├── .editorconfig ├── .gitattributes ├── .github └── workflows │ ├── build.yml │ ├── quick_test.yml │ └── tests.yml ├── .gitignore ├── .readthedocs.yaml ├── DISCLAIMER ├── LICENSE ├── README.md ├── adcircpy ├── __init__.py ├── __main__.py ├── cmd │ ├── __init__.py │ ├── argument_parser.py │ ├── basecmd.py │ ├── best_track_file.py │ ├── best_track_run.py │ ├── diagnose.py │ ├── fort63.py │ ├── iter_smooth.py │ ├── plot_fort61.py │ ├── plot_maxele.py │ ├── plot_mesh.py │ ├── tidal_run.py │ └── tide_gen.py ├── driver.py ├── figures.py ├── forcing │ ├── __init__.py │ ├── base.py │ ├── bctypes.py │ ├── tides │ │ ├── __init__.py │ │ ├── dataset.py │ │ ├── hamtide.py │ │ ├── tides.py │ │ └── tpxo.py │ ├── waves │ │ ├── __init__.py │ │ ├── base.py │ │ └── ww3.py │ └── winds │ │ ├── __init__.py │ │ ├── _parametric │ │ ├── __init__.py │ │ ├── gao2015.py │ │ └── holland2010.py │ │ ├── atmesh.py │ │ ├── base.py │ │ ├── best_track.py │ │ └── owi.py ├── fort15.py ├── mesh │ ├── __init__.py │ ├── base.py │ ├── fort13.py │ ├── fort14.py │ ├── mesh.py │ └── parsers │ │ ├── __init__.py │ │ ├── grd.py │ │ └── sms2dm.py ├── outputs │ ├── __init__.py │ ├── base.py │ ├── collection.py │ ├── fort61.py │ ├── fort63.py │ └── maxele.py ├── plotting.py ├── server │ ├── __init__.py │ ├── base_config.py │ ├── driver_file.py │ ├── slurm_config.py │ └── ssh_config.py └── utilities.py ├── docs ├── Makefile ├── make.bat └── source │ ├── abstract.rst │ ├── acknowledgments.rst │ ├── basic_usage.rst │ ├── cli.rst │ ├── conf.py │ ├── figures │ ├── Fort14_UML.png │ ├── Grd_UML.png │ ├── classes_adcircpy.fort15.png │ ├── classes_adcircpy.mesh.AdcircMesh.png │ ├── fort14_triplot_example.png │ └── hsofs_mesh.png │ ├── index.rst │ ├── introduction.rst │ ├── python_api.rst │ ├── references.bib │ └── references.rst ├── examples ├── example_1.py ├── example_2.py ├── example_3.py ├── example_4.py └── example_5.ipynb ├── pyproject.toml └── tests ├── __init__.py ├── data ├── .gitignore ├── input │ ├── test_best_track_run │ │ └── stations.txt │ ├── test_from_atcf │ │ └── florence2018_atcf.trk │ ├── test_from_fort22 │ │ └── irma2017_fort.22 │ ├── test_import_stations │ │ ├── stations_1.txt │ │ ├── stations_2.txt │ │ └── stations_3.txt │ ├── test_no_internet │ │ └── fort.22 │ └── test_plot_besttrack │ │ └── florence2018_atcf.trk └── reference │ ├── example_1 │ ├── driver.sh │ ├── fort.14 │ └── fort.15 │ ├── example_2 │ ├── driver.sh │ ├── fort.13 │ ├── fort.14 │ ├── fort.15.coldstart │ └── fort.15.hotstart │ ├── example_3 │ ├── fort.14 │ ├── fort.15.coldstart │ ├── fort.15.hotstart │ ├── fort.22 │ └── slurm.job │ ├── example_4 │ ├── fort.14 │ ├── fort.15.coldstart │ ├── fort.15.hotstart │ └── slurm.job │ ├── test_Stations │ ├── stations_1.fort.15 │ ├── stations_2.fort.15 │ └── stations_3.fort.15 │ ├── test_best_track_run │ ├── driver.sh │ ├── fort.13 │ ├── fort.14 │ ├── fort.15.coldstart │ ├── fort.15.hotstart │ └── fort.22 │ ├── test_configuration │ ├── driver.sh │ ├── fort.14 │ ├── fort.15.coldstart │ └── fort.15.hotstart │ ├── test_open │ └── fort.14 │ ├── test_slurm_driver │ └── slurm.job │ ├── test_tidal_run │ ├── driver.sh │ ├── fort.14 │ ├── fort.15.coldstart │ └── fort.15.hotstart │ ├── test_tidal_run_cli │ ├── driver.sh │ ├── fort.14 │ ├── fort.15.coldstart │ └── fort.15.hotstart │ ├── test_tide_gen │ └── fort.15 │ └── test_write │ ├── test_AdcircMesh.2dm │ └── test_AdcircMesh.gr3 ├── test_adcirc_mesh.py ├── test_api.py ├── test_best_track_run.py ├── test_besttrack.py ├── test_configuration.py ├── test_examples.py ├── test_stations.py ├── test_tidal_run.py └── test_tide_gen.py /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | charset = utf-8 3 | end_of_line = lf 4 | indent_size = 4 5 | indent_style = space 6 | insert_final_newline = true 7 | max_line_length = 127 8 | tab_width = 4 9 | ij_continuation_indent_size = 8 10 | ij_formatter_off_tag = @formatter:off 11 | ij_formatter_on_tag = @formatter:on 12 | ij_formatter_tags_enabled = false 13 | ij_smart_tabs = false 14 | ij_visual_guides = 79, 127 15 | ij_wrap_on_typing = false 16 | 17 | [.editorconfig] 18 | ij_editorconfig_align_group_field_declarations = false 19 | ij_editorconfig_space_after_colon = false 20 | ij_editorconfig_space_after_comma = true 21 | ij_editorconfig_space_before_colon = false 22 | ij_editorconfig_space_before_comma = false 23 | ij_editorconfig_spaces_around_assignment_operators = true 24 | 25 | [{*.bash, *.job, *.sh, *.zsh}] 26 | indent_size = 2 27 | tab_width = 2 28 | ij_shell_binary_ops_start_line = false 29 | ij_shell_keep_column_alignment_padding = false 30 | ij_shell_minify_program = false 31 | ij_shell_redirect_followed_by_space = false 32 | ij_shell_switch_cases_indented = false 33 | 34 | [{*.py, *.pyw, soundings}] 35 | max_line_length = 79 36 | ij_python_align_collections_and_comprehensions = true 37 | ij_python_align_multiline_imports = true 38 | ij_python_align_multiline_parameters = true 39 | ij_python_align_multiline_parameters_in_calls = true 40 | ij_python_blank_line_at_file_end = true 41 | ij_python_blank_lines_after_imports = 1 42 | ij_python_blank_lines_after_local_imports = 0 43 | ij_python_blank_lines_around_class = 1 44 | ij_python_blank_lines_around_method = 1 45 | ij_python_blank_lines_around_top_level_classes_functions = 2 46 | ij_python_blank_lines_before_first_method = 0 47 | ij_python_dict_alignment = 0 48 | ij_python_dict_new_line_after_left_brace = false 49 | ij_python_dict_new_line_before_right_brace = false 50 | ij_python_dict_wrapping = 5 51 | ij_python_from_import_new_line_after_left_parenthesis = false 52 | ij_python_from_import_new_line_before_right_parenthesis = false 53 | ij_python_from_import_parentheses_force_if_multiline = false 54 | ij_python_from_import_trailing_comma_if_multiline = false 55 | ij_python_from_import_wrapping = 1 56 | ij_python_hang_closing_brackets = false 57 | ij_python_keep_blank_lines_in_code = 1 58 | ij_python_keep_blank_lines_in_declarations = 1 59 | ij_python_keep_indents_on_empty_lines = false 60 | ij_python_keep_line_breaks = true 61 | ij_python_new_line_after_colon = false 62 | ij_python_new_line_after_colon_multi_clause = true 63 | ij_python_optimize_imports_always_split_from_imports = false 64 | ij_python_optimize_imports_case_insensitive_order = false 65 | ij_python_optimize_imports_join_from_imports_with_same_source = true 66 | ij_python_optimize_imports_sort_by_type_first = false 67 | ij_python_optimize_imports_sort_imports = true 68 | ij_python_optimize_imports_sort_names_in_from_imports = true 69 | ij_python_space_after_comma = true 70 | ij_python_space_after_number_sign = true 71 | ij_python_space_after_py_colon = true 72 | ij_python_space_before_backslash = true 73 | ij_python_space_before_comma = false 74 | ij_python_space_before_for_semicolon = false 75 | ij_python_space_before_lbracket = false 76 | ij_python_space_before_method_call_parentheses = false 77 | ij_python_space_before_method_parentheses = false 78 | ij_python_space_before_number_sign = true 79 | ij_python_space_before_py_colon = false 80 | ij_python_space_within_empty_method_call_parentheses = false 81 | ij_python_space_within_empty_method_parentheses = false 82 | ij_python_spaces_around_additive_operators = true 83 | ij_python_spaces_around_assignment_operators = true 84 | ij_python_spaces_around_bitwise_operators = true 85 | ij_python_spaces_around_eq_in_keyword_argument = false 86 | ij_python_spaces_around_eq_in_named_parameter = false 87 | ij_python_spaces_around_equality_operators = true 88 | ij_python_spaces_around_multiplicative_operators = true 89 | ij_python_spaces_around_power_operator = true 90 | ij_python_spaces_around_relational_operators = true 91 | ij_python_spaces_around_shift_operators = true 92 | ij_python_spaces_within_braces = false 93 | ij_python_spaces_within_brackets = false 94 | ij_python_spaces_within_method_call_parentheses = false 95 | ij_python_spaces_within_method_parentheses = false 96 | ij_python_use_continuation_indent_for_arguments = true 97 | ij_python_use_continuation_indent_for_collection_and_comprehensions = false 98 | ij_python_wrap_long_lines = false 99 | 100 | [{*.yaml, *.yml}] 101 | indent_size = 2 102 | ij_yaml_keep_indents_on_empty_lines = false 103 | ij_yaml_keep_line_breaks = true 104 | ij_yaml_space_before_colon = false 105 | ij_yaml_spaces_within_braces = true 106 | ij_yaml_spaces_within_brackets = true 107 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.png filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | jobs: 9 | publish: 10 | name: publish package to PyPI 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: checkout repository 14 | uses: actions/checkout@v2 15 | - name: install Poetry 16 | uses: abatilo/actions-poetry@v2.1.3 17 | - name: install Dunamai 18 | run: pip install dunamai 19 | - name: extract version from VCS 20 | run: poetry version $(dunamai from any) 21 | - name: build wheel and source 22 | run: poetry build 23 | - name: upload wheel and source 24 | run: poetry publish --username __token__ --password ${{ secrets.PYPI_TOKEN }} 25 | -------------------------------------------------------------------------------- /.github/workflows/quick_test.yml: -------------------------------------------------------------------------------- 1 | name: quick test 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - main 7 | paths: 8 | - '**.py' 9 | - '.github/workflows/quick_test.yml' 10 | - 'pyproject.toml' 11 | 12 | jobs: 13 | lint: 14 | name: lint 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: clone repository 18 | uses: actions/checkout@v3 19 | - name: install Python 20 | uses: actions/setup-python@v4 21 | with: 22 | python-version: '3.11' 23 | - name: load cached Python installation 24 | id: cache 25 | uses: actions/cache@v3 26 | with: 27 | path: ${{ env.pythonLocation }} 28 | key: lint-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }} 29 | - name: install linters 30 | run: pip install flake8 oitnb 31 | - name: lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: lint with oitnb 38 | run: oitnb . --check 39 | test: 40 | needs: lint 41 | name: quick test 42 | runs-on: ubuntu-latest 43 | steps: 44 | - name: clone repository 45 | uses: actions/checkout@v3 46 | - name: install Python 47 | uses: actions/setup-python@v4 48 | with: 49 | python-version: '3.11' 50 | - name: load cached Python installation 51 | id: cache 52 | uses: actions/cache@v3 53 | with: 54 | path: ${{ env.pythonLocation }} 55 | key: test-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }} 56 | - name: install dependencies 57 | run: | 58 | sudo apt update 59 | sudo apt install libhdf5-dev 60 | sudo apt install libnetcdf-dev 61 | pip install ".[testing]" 62 | python -m pip install --upgrade --force-reinstall --no-deps --no-cache-dir netcdf4 --no-binary netcdf4 63 | - name: run tests 64 | run: pytest --numprocesses auto 65 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '**.py' 9 | - '.github/workflows/tests.yml' 10 | - 'pyproject.toml' 11 | pull_request: 12 | branches: 13 | - main 14 | 15 | jobs: 16 | lint: 17 | name: lint 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: clone repository 21 | uses: actions/checkout@v3 22 | - name: install Python 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: '3.11' 26 | - name: load cached Python installation 27 | id: cache 28 | uses: actions/cache@v3 29 | with: 30 | path: ${{ env.pythonLocation }} 31 | key: lint-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }} 32 | - name: install linters 33 | run: pip install flake8 oitnb 34 | - name: lint with flake8 35 | run: | 36 | # stop the build if there are Python syntax errors or undefined names 37 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 38 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 39 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 40 | - name: lint with oitnb 41 | run: oitnb . --check 42 | test: 43 | needs: lint 44 | name: test 45 | runs-on: ${{ matrix.os }} 46 | strategy: 47 | matrix: 48 | os: [ ubuntu-latest, macos-latest ] 49 | python-version: [ '3.8', '3.9', '3.10', '3.11' ] 50 | exclude: 51 | - os: macos-latest 52 | python-version: '3.11' 53 | steps: 54 | - name: clone repository 55 | uses: actions/checkout@v3 56 | - name: install Python 57 | uses: actions/setup-python@v4 58 | with: 59 | python-version: ${{ matrix.python-version }} 60 | - name: load cached Python installation 61 | id: cache 62 | uses: actions/cache@v3 63 | with: 64 | path: ${{ env.pythonLocation }} 65 | key: test-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }} 66 | - name: install linux dependencies 67 | run: | 68 | sudo apt update 69 | sudo apt install libhdf5-dev 70 | sudo apt install libnetcdf-dev 71 | if: runner.os == 'Linux' 72 | - name: install macos dependencies 73 | run: | 74 | brew install hdf5 75 | brew install netcdf 76 | if: runner.os == 'macOS' 77 | - name: install repo 78 | run: | 79 | pip install ".[testing]" 80 | python -m pip install --upgrade --force-reinstall --no-deps --no-cache-dir netcdf4 --no-binary netcdf4 81 | - name: run tests 82 | run: pytest --numprocesses auto 83 | test_with_coverage: 84 | needs: [ lint, test ] 85 | name: test with coverage 86 | runs-on: ubuntu-latest 87 | steps: 88 | - name: clone repository 89 | uses: actions/checkout@v3 90 | - name: install Python 91 | uses: actions/setup-python@v4 92 | with: 93 | python-version: '3.11' 94 | - name: load cached Python installation 95 | id: cache 96 | uses: actions/cache@v3 97 | with: 98 | path: ${{ env.pythonLocation }} 99 | key: test-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }} 100 | - name: install linux dependencies 101 | run: | 102 | sudo apt update 103 | sudo apt install libhdf5-dev 104 | sudo apt install libnetcdf-dev 105 | if: runner.os == 'Linux' 106 | - name: install macos dependencies 107 | run: | 108 | brew install hdf5 109 | brew install netcdf 110 | if: runner.os == 'macOS' 111 | - name: install repo 112 | run: | 113 | pip install ".[testing]" 114 | python -m pip install --upgrade --force-reinstall --no-deps --no-cache-dir netcdf4 --no-binary netcdf4 115 | - name: run tests with coverage 116 | run: pytest --numprocesses auto --cov . --cov-report xml:coverage.xml 117 | - name: show coverage report 118 | run: coverage report 119 | - name: Upload coverage reports to Codecov 120 | uses: codecov/codecov-action@v4.0.1 121 | with: 122 | token: ${{ secrets.CODECOV_TOKEN }} 123 | fail_ci_if_error: true 124 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | examples/output 2 | examples/data 3 | tests/data/input 4 | tests/data/output 5 | 6 | # Created by https://www.gitignore.io/api/python 7 | # Edit at https://www.gitignore.io/?templates=python 8 | 9 | ### Python ### 10 | # Byte-compiled / optimized / DLL files 11 | __pycache__/ 12 | *.py[cod] 13 | *$py.class 14 | 15 | # C extensions 16 | *.so 17 | 18 | # Distribution / packaging 19 | .Python 20 | build/ 21 | develop-eggs/ 22 | dist/ 23 | downloads/ 24 | eggs/ 25 | .eggs/ 26 | #lib/ 27 | lib64/ 28 | parts/ 29 | sdist/ 30 | var/ 31 | wheels/ 32 | pip-wheel-metadata/ 33 | share/python-wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .nox/ 53 | .coverage 54 | .coverage.* 55 | .cache 56 | nosetests.xml 57 | coverage.xml 58 | *.cover 59 | .hypothesis/ 60 | .pytest_cache/ 61 | 62 | # Translations 63 | *.mo 64 | *.pot 65 | 66 | # Django stuff: 67 | *.log 68 | local_settings.py 69 | db.sqlite3 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # celery beat schedule file 102 | celerybeat-schedule 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # End of https://www.gitignore.io/api/python 135 | /.idea/ 136 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | version: 2 6 | 7 | build: 8 | os: ubuntu-20.04 9 | tools: 10 | python: "3.10" 11 | 12 | sphinx: 13 | configuration: docs/source/conf.py 14 | 15 | python: 16 | install: 17 | - method: pip 18 | path: . 19 | extra_requirements: 20 | - documentation 21 | -------------------------------------------------------------------------------- /DISCLAIMER: -------------------------------------------------------------------------------- 1 | THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /adcircpy/__init__.py: -------------------------------------------------------------------------------- 1 | import matplotlib as mpl 2 | from pandas.plotting import register_matplotlib_converters 3 | 4 | from adcircpy.driver import AdcircRun 5 | from adcircpy.forcing import TidalSource, Tides, WaveForcing, WindForcing 6 | from adcircpy.fort15 import Fort15 7 | from adcircpy.mesh import AdcircMesh 8 | 9 | __all__ = [ 10 | 'AdcircMesh', 11 | 'AdcircRun', 12 | 'Tides', 13 | 'TidalSource', 14 | 'WaveForcing', 15 | 'WindForcing', 16 | 'Fort15', 17 | ] 18 | 19 | mpl.rcParams['agg.path.chunksize'] = 10000 20 | register_matplotlib_converters() 21 | 22 | try: 23 | import colored_traceback 24 | 25 | colored_traceback.add_hook(always=True) 26 | except ImportError: 27 | pass 28 | -------------------------------------------------------------------------------- /adcircpy/__main__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oceanmodeling/adcircpy/0eb84de6e743b4b6c8f2514dba07646b883ecc08/adcircpy/__main__.py -------------------------------------------------------------------------------- /adcircpy/cmd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oceanmodeling/adcircpy/0eb84de6e743b4b6c8f2514dba07646b883ecc08/adcircpy/cmd/__init__.py -------------------------------------------------------------------------------- /adcircpy/cmd/best_track_file.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import argparse 3 | from datetime import datetime 4 | from pathlib import Path 5 | 6 | from adcircpy.forcing.winds.best_track import BestTrackForcing 7 | 8 | 9 | def parse_args(): 10 | parser = argparse.ArgumentParser( 11 | description='generate `fort.22` information from HURDAT2 data' 12 | ) 13 | parser.add_argument( 14 | 'storm', 15 | help='Can be StormYYYY (eg. Sandy2012), ' 16 | 'a storm id from the HURDAT2 table (ftp://ftp.nhc.noaa.gov/atcf/archive/storm.table), or' 17 | 'a file path to a track file in HURDAT2 format', 18 | ) 19 | parser.add_argument('--save-path', help='path to which to write fort.22') 20 | parser.add_argument('--start-date', help='format is %%Y%%m%%d%%H') 21 | parser.add_argument('--end-date', help='format is %%Y%%m%%d%%H') 22 | parser.add_argument( 23 | '--quiet', '-q', action='store_true', default=False, help='suppress console output', 24 | ) 25 | parser.add_argument( 26 | '--plot-track', 27 | action='store_true', 28 | default=False, 29 | help='show a simple plot of the track', 30 | ) 31 | parser.add_argument('--nws', help='which NWS forcing option to use') 32 | return parser.parse_args() 33 | 34 | 35 | def main(): 36 | args = parse_args() 37 | 38 | if 'fort.22' in args.storm: 39 | bt = BestTrackForcing.from_fort22( 40 | args.storm, nws=int(args.nws), start_date=args.start_date, end_date=args.end_date, 41 | ) 42 | else: 43 | bt = BestTrackForcing( 44 | args.storm, 45 | nws=int(args.nws) if args.nws is not None else 20, 46 | start_date=datetime.strptime(args.start_date, '%Y%m%d%H') 47 | if args.start_date is not None 48 | else None, 49 | end_date=datetime.strptime(args.end_date, '%Y%m%d%H') 50 | if args.end_date is not None 51 | else None, 52 | ) 53 | 54 | # print fort22 55 | if not args.quiet: 56 | print(str(bt)) 57 | 58 | # show cheap plot 59 | if args.plot_track: 60 | bt.plot_track(show=True) 61 | 62 | # save fort22 63 | if args.save_path is not None: 64 | with open(Path(args.save_path), 'w') as output_file: 65 | output_file.write(str(bt)) 66 | 67 | 68 | if __name__ == '__main__': 69 | main() 70 | -------------------------------------------------------------------------------- /adcircpy/cmd/best_track_run.py: -------------------------------------------------------------------------------- 1 | # import argparse 2 | from datetime import datetime, timedelta 3 | import logging 4 | 5 | from pytz import timezone 6 | 7 | from adcircpy.cmd import argument_parser 8 | from adcircpy.cmd.basecmd import AdcircCommand 9 | from adcircpy.forcing.winds.best_track import BestTrackForcing 10 | from adcircpy.utilities import get_logger 11 | 12 | LOGGER = get_logger(__name__) 13 | 14 | 15 | class BestTrackRunCommand(AdcircCommand): 16 | def __init__(self, args): 17 | 18 | LOGGER.info('Init BestTrackRunCommand') 19 | super().__init__(args) 20 | 21 | LOGGER.info(f'Init BestTrackForcing for {self.args.storm_id}') 22 | bt = BestTrackForcing(self.args.storm_id) 23 | 24 | LOGGER.info('Clip BestTrackForcing to bbox') 25 | if self.args.clip: 26 | bt.clip_to_bbox(self.mesh.get_bbox(output_type='bbox'), self.mesh.crs) 27 | 28 | if args.start_date is None: 29 | self.start_date = bt.start_date 30 | else: 31 | self.start_date = datetime.strptime(args.start_date, '%%Y-%%m-%%dT%%H') 32 | 33 | if args.run_days is None: 34 | self.end_date = bt.end_date 35 | else: 36 | self.end_date = self.start_date + timedelta(days=args.run_days) 37 | 38 | bt.start_date = self.start_date 39 | bt.end_date = self.end_date 40 | 41 | self.mesh.add_forcing(bt) 42 | 43 | 44 | def main(): 45 | args = argument_parser.get_parser('best_track').parse_args() 46 | logging.basicConfig( 47 | level={'warning': logging.WARNING, 'info': logging.INFO, 'debug': logging.DEBUG,}[ 48 | args.log_level 49 | ], 50 | format='[%(asctime)s] %(name)s %(levelname)s: %(message)s', 51 | # force=True, 52 | ) 53 | logging.Formatter.converter = lambda *args: datetime.now(tz=timezone('UTC')).timetuple() 54 | BestTrackRunCommand(args).run() 55 | -------------------------------------------------------------------------------- /adcircpy/cmd/diagnose.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | 3 | import numpy as np 4 | 5 | 6 | def parse(logfile): 7 | logfile = pathlib.Path(logfile).resolve() 8 | with open(logfile, 'r') as f: 9 | lines = "".join(f.readlines()) 10 | elmax = list() 11 | speedmax = list() 12 | index = list() 13 | _lines = lines.split('** ERROR: Elevation.gt.ErrorElev, ADCIRC stopping. **\n') 14 | line0 = "".join(_lines[0]).split('\n') 15 | for line in line0: 16 | if '** WARNING: Elevation.gt.WarnElev **' in line: 17 | elmax.append(float(line.split('AT NODE')[0].split('=')[-1])) 18 | speedmax.append(float(line.split('SPEEDMAX =')[0].split('AT NODE')[-1])) 19 | index.append( 20 | np.abs(int(line.split('AT NODE')[-1].split('ON MYPROC')[0].strip())) - 1 21 | ) 22 | return elmax, speedmax, index 23 | -------------------------------------------------------------------------------- /adcircpy/cmd/fort63.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from ast import literal_eval 3 | 4 | import matplotlib.pyplot as plt 5 | 6 | from adcircpy.cmd import diagnose 7 | from adcircpy.outputs.fort63 import Fort63 8 | 9 | 10 | def plot(fort63, args): 11 | fort63.index = args.index 12 | ax = fort63.tricontourf(vmin=args.vmin, vmax=args.vmax, cbar=True,) 13 | if args.plot_elements: 14 | ax.triplot(fort63.triangulation, color='k', linewidth=0.1) 15 | if args.diagnose is not None: 16 | elmax, speedmax, index = diagnose.parse(args.diagnose) 17 | ax.scatter( 18 | fort63.x[index], 19 | fort63.y[index], 20 | # c=elmax, 21 | marker='o', 22 | edgecolor='r', 23 | facecolor='none', 24 | ) 25 | ax.set_ylim(ymin=args.ymin, ymax=args.ymax, auto=True) 26 | ax.set_xlim(xmin=args.xmin, xmax=args.xmax, auto=True) 27 | plt.show() 28 | 29 | 30 | def animation(fort63, args): 31 | fort63.animation( 32 | save=args.save_path, 33 | show=not args.no_show, 34 | xmin=args.xmin, 35 | xmax=args.xmax, 36 | ymin=args.ymin, 37 | ymax=args.ymax, 38 | start_frame=args.start_index, 39 | end_frame=args.end_index, 40 | fps=args.fps, 41 | figsize=args.figsize, 42 | elements=args.plot_elements, 43 | ) 44 | 45 | 46 | def export(fort63, args): 47 | fort63.index = args.index 48 | fort63.export(args.output, overwrite=args.overwrite) 49 | 50 | 51 | def main(): 52 | args = parse_args() 53 | fort63 = Fort63( 54 | args.fort63, 55 | # fort14=args.fort14 56 | ) 57 | {'plot': plot, 'animate': animation, 'export': export}[args.mode](fort63, args) 58 | 59 | 60 | def parse_args(): 61 | parser = argparse.ArgumentParser( 62 | description='Program to see a quick plot of an ADCIRC fort63 file.' 63 | ) 64 | parser.add_argument('fort63', help='Path to fort.63 file.') 65 | 66 | subparsers = parser.add_subparsers(dest='mode') 67 | subparsers.required = True 68 | 69 | # data plotting subparsers 70 | plot = subparsers.add_parser('plot') 71 | plot.add_argument('index', type=int, default=-1) 72 | plot.add_argument('--no-show', action='store_true') 73 | _help = 'Path to fort.14 file (required if fort63 files is not netcdf).' 74 | plot.add_argument('--fort14', help=_help) 75 | plot.add_argument('--title', help='Plot title override.') 76 | plot.add_argument('--vmin', type=float) 77 | plot.add_argument('--vmax', type=float) 78 | plot.add_argument('--start-index', type=int) 79 | plot.add_argument('--end-index', type=int) 80 | plot.add_argument('--plot-elements', action='store_true') 81 | plot.add_argument('--diagnose') 82 | plot.add_argument('--save-path') 83 | plot.add_argument('--xmin', type=float) 84 | plot.add_argument('--xmax', type=float) 85 | plot.add_argument('--ymin', type=float) 86 | plot.add_argument('--ymax', type=float) 87 | 88 | # animation 89 | anim = subparsers.add_parser('animate') 90 | anim.add_argument('--fps', type=int, default=5) 91 | anim.add_argument('--figsize', type=literal_eval) 92 | anim.add_argument('--start-index', type=int, default=0) 93 | anim.add_argument('--end-index', type=int, default=-1) 94 | anim.add_argument('--no-show', action='store_true') 95 | anim.add_argument('--save-path') 96 | anim.add_argument('--plot-elements', action='store_true') 97 | anim.add_argument('--xmin', type=float) 98 | anim.add_argument('--xmax', type=float) 99 | anim.add_argument('--ymin', type=float) 100 | anim.add_argument('--ymax', type=float) 101 | 102 | # export 103 | export = subparsers.add_parser('export') 104 | export.add_argument('output') 105 | export.add_argument('index', type=int) 106 | export.add_argument('--overwrite', action='store_true') 107 | 108 | return parser.parse_args() 109 | 110 | 111 | if __name__ == '__main__': 112 | main() 113 | -------------------------------------------------------------------------------- /adcircpy/cmd/iter_smooth.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Takes as input the logfile of an ADCIRC run and attempts to avoid the blowup 4 | by assigning the minimum topobathy value of the group of nodes where the blowup 5 | ocurred. 6 | """ 7 | import argparse 8 | from glob import glob 9 | import pathlib 10 | 11 | import matplotlib.pyplot as plt 12 | 13 | from adcircpy.cmd import diagnose 14 | from adcircpy.mesh import AdcircMesh 15 | 16 | 17 | def parse_args(): 18 | parser = argparse.ArgumentParser() 19 | parser.add_argument('base_dir') 20 | parser.add_argument('--log-filename', default='sbatch.log') 21 | args = parser.parse_args() 22 | args.base_dir = pathlib.Path(args.base_dir).resolve() 23 | assert args.base_dir.is_dir() 24 | return args 25 | 26 | 27 | def main(): 28 | args = parse_args() 29 | iter_dir = args.base_dir / 'iter' 30 | iter_dir.mkdir(exist_ok=True) 31 | logs = glob(str(args.base_dir / '**' / args.log_filename), recursive=True) 32 | base_dir = args.base_dir 33 | if len(logs) == 0: 34 | msg = 'No log file found!' 35 | raise Exception(msg) 36 | elif len(logs) == 1: 37 | log_file = base_dir / args.log_filename 38 | else: 39 | msg = 'More than 1 logfile' 40 | raise NotImplementedError(msg) 41 | 42 | elmax, speedmax, indexes = diagnose.parse(log_file) 43 | 44 | if len(indexes) == 0: 45 | msg = 'Congratulations, your mesh did not blowup with ADCIRC. ' 46 | msg += " That's a feat." 47 | print(msg) 48 | exit() 49 | mesh = AdcircMesh.open(base_dir / 'fort.14') 50 | # mesh.values[indexes] = np.min(mesh.values[indexes]) 51 | ax = mesh.make_plot() 52 | ax.triplot(mesh.triangulation, color='k', linewidth=0.05) 53 | ax.scatter(mesh.x[indexes], mesh.y[indexes], edgecolor='r', facecolor='none') 54 | plt.show() 55 | return 0 56 | 57 | 58 | if __name__ == '__main__': 59 | exit(main()) 60 | -------------------------------------------------------------------------------- /adcircpy/cmd/plot_fort61.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import argparse 3 | import os 4 | from pathlib import Path 5 | 6 | import matplotlib.pyplot as plt 7 | import pandas 8 | from searvey.coops import COOPS_Station 9 | 10 | from adcircpy.outputs import Fort61 11 | 12 | 13 | def parse_args(): 14 | parser = argparse.ArgumentParser( 15 | description='Program to see a quick plot of an ADCIRC mesh.' 16 | ) 17 | parser.add_argument('path', help='Path to ADCIRC fort.61 or fort.61.nc file.') 18 | parser.add_argument( 19 | 'vertical_datum', 20 | choices=['MHHW', 'MHW', 'MTL', 'MSL', 'MLW', 'MLLW', 'NAVD88', 'STND'], 21 | help='Tidal station datum, must match vertical datum of mesh.', 22 | ) 23 | show = parser.add_mutually_exclusive_group(required=False) 24 | show.add_argument( 25 | '--show', 26 | dest='show', 27 | action='store_true', 28 | help='Shows plots to screen as they are generated (default).', 29 | ) 30 | show.add_argument( 31 | '--no-show', 32 | dest='show', 33 | action='store_false', 34 | help='Prevents the plots from showing to screen. ' 35 | + 'Useful for only saving the plots without showing them.', 36 | ) 37 | parser.add_argument('--coops-only', action='store_true', help='coops plots to screen.') 38 | parser.add_argument( 39 | '--save', 40 | help='Directory where to save plots. ' 41 | + "Will be created if it doesn't exist. " 42 | + 'It will also overwrite files unles --resume-save is used.', 43 | ) 44 | parser.add_argument( 45 | '--resume-save', 46 | action='store_true', 47 | help='Directory where to save plots. ' + "Will be created if it doesn't exist.", 48 | ) 49 | return parser.parse_args() 50 | 51 | 52 | def main(): 53 | args = parse_args() 54 | fort61 = Fort61(args.path) 55 | 56 | start_date = fort61.datetime[0] 57 | end_date = fort61.datetime[-1] 58 | 59 | for station_id, data in fort61: 60 | if args.save is not None: 61 | fname = str(Path(str(Path(args.save)) + '/{}.png'.format(station_id))) 62 | if args.resume_save and os.path.isfile(fname): 63 | continue 64 | 65 | station = COOPS_Station(station_id) 66 | 67 | station_data = station.get( 68 | start_date=start_date, end_date=end_date, datum=args.vertical_datum 69 | ) 70 | 71 | if args.coops_only and not pandas.isna(station_data['v']).all(): 72 | plt.plot( 73 | station_data['t'], station_data['v'], label='COOPS', color='b', linewidth=0.7 74 | ) 75 | plt.plot( 76 | fort61.datetime, data['values'], label='ADCIRC', color='r', linewidth=0.7, 77 | ) 78 | elif not args.coops_only: 79 | plt.plot( 80 | fort61.datetime, data['values'], label='ADCIRC', color='r', linewidth=0.7, 81 | ) 82 | 83 | fig = plt.gcf() 84 | if fig.get_axes(): 85 | fig.set_size_inches(18.5, 10.5) 86 | fig.gca().set_xlim(fort61.datetime[0], fort61.datetime[-1]) 87 | plt.ylabel('water level [meters, {}]'.format(args.vertical_datum)) 88 | plt.title('{}\n{}'.format(station.nos_id, station.name)) 89 | plt.legend() 90 | if args.save is not None: 91 | os.makedirs(str(Path(args.save)), exist_ok=True) 92 | fig.savefig(fname, dpi=300, bbox_inches='tight') 93 | if args.show: 94 | plt.show() 95 | plt.close(fig) 96 | 97 | 98 | if __name__ == '__main__': 99 | main() 100 | -------------------------------------------------------------------------------- /adcircpy/cmd/plot_maxele.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import matplotlib.pyplot as plt 4 | 5 | from adcircpy.outputs import Maxele, MaximumElevationTimes 6 | 7 | 8 | def parse_args(): 9 | parser = argparse.ArgumentParser( 10 | description='Program to generate a plot of an ADCIRC `maxele` file.' 11 | ) 12 | parser.add_argument('maxele', help='Path to maxele file.') 13 | parser.add_argument( 14 | '--plot_timestep_of_maxele', 15 | help='Plot the timestep of the maximum elevation value', 16 | action='store_true', 17 | ) 18 | parser.add_argument( 19 | '--fort14', help='Path to fort.14 file (required if maxele files is not netcdf).', 20 | ) 21 | parser.add_argument('--title', help='Plot title override.') 22 | parser.add_argument('--vmin', type=float) 23 | parser.add_argument('--vmax', type=float) 24 | parser.add_argument('--cmap', type=str, default='jet') 25 | parser.add_argument('--levels', type=int, default=256) 26 | return parser.parse_args() 27 | 28 | 29 | def main(): 30 | args = parse_args() 31 | if args.plot_timestep_of_maxele: 32 | maxele = MaximumElevationTimes(args.maxele) 33 | else: 34 | maxele = Maxele(args.maxele) 35 | maxele.tricontourf( 36 | vmin=args.vmin, vmax=args.vmax, cmap=args.cmap, levels=args.levels, cbar=True 37 | ) 38 | plt.show() 39 | -------------------------------------------------------------------------------- /adcircpy/cmd/plot_mesh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import argparse 3 | import pathlib 4 | 5 | import matplotlib.pyplot as plt 6 | 7 | from adcircpy import AdcircMesh 8 | 9 | 10 | class PlotMeshCommand: 11 | def __init__(self, args: argparse.Namespace): 12 | mesh = AdcircMesh.open(args.mesh, crs=args.crs) 13 | fig = plt.figure() 14 | ax = fig.add_subplot(111) 15 | if args.no_topobathy is False: 16 | mesh.make_plot(axes=ax, vmin=args.vmin, vmax=args.vmax) 17 | if args.show_elements: 18 | mesh.triplot(axes=ax) 19 | if args.plot_boundaries: 20 | mesh.boundaries.gdf.plot(ax=ax) 21 | plt.show(block=True) 22 | 23 | 24 | def diagnose(logfile): 25 | import numpy as np 26 | 27 | logfile = pathlib.Path(logfile).resolve() 28 | with open(logfile, 'r') as f: 29 | lines = "".join(f.readlines()) 30 | elmax = list() 31 | speedmax = list() 32 | index = list() 33 | _lines = lines.split('** ERROR: Elevation.gt.ErrorElev, ADCIRC stopping. **\n') 34 | line0 = "".join(_lines[0]).split('\n') 35 | for line in line0: 36 | if '** WARNING: Elevation.gt.WarnElev **' in line: 37 | elmax.append(float(line.split('AT NODE')[0].split('=')[-1])) 38 | speedmax.append(float(line.split('SPEEDMAX =')[0].split('AT NODE')[-1])) 39 | index.append( 40 | np.abs(int(line.split('AT NODE')[-1].split('ON MYPROC')[0].strip())) - 1 41 | ) 42 | return elmax, speedmax, index 43 | 44 | 45 | def parse_args(): 46 | parser = argparse.ArgumentParser( 47 | description='Program to see a quick plot of an ADCIRC mesh.' 48 | ) 49 | parser.add_argument('mesh', help='ADCIRC mesh file path.') 50 | parser.add_argument('--crs', help='ADCIRC mesh crs.') 51 | parser.add_argument('--show-elements', action='store_true', default=False) 52 | parser.add_argument('--no-topobathy', action='store_true', default=False) 53 | parser.add_argument('--vmin', type=float) 54 | parser.add_argument('--vmax', type=float) 55 | parser.add_argument('--plot-boundaries', action='store_true') 56 | parser.add_argument('--diagnose') 57 | return parser.parse_args() 58 | 59 | 60 | def main(): 61 | PlotMeshCommand(parse_args()) 62 | 63 | 64 | if __name__ == '__main__': 65 | main() 66 | -------------------------------------------------------------------------------- /adcircpy/cmd/tidal_run.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | import logging 3 | 4 | from pytz import timezone 5 | 6 | from adcircpy.cmd import argument_parser 7 | from adcircpy.cmd.basecmd import AdcircCommand 8 | 9 | 10 | class TidalRunCommand(AdcircCommand): 11 | """CLI wrapper for AdcircCommand to generate tidal only runs""" 12 | 13 | def __init__(self, args): 14 | super().__init__(args) 15 | self.start_date = datetime.strptime(self.args.start_date, '%Y-%m-%dT%H:%M:%S') 16 | self.end_date = self.start_date + timedelta(days=self.args.run_days) 17 | 18 | 19 | def main(): 20 | args = argument_parser.get_parser('tidal').parse_args() 21 | if len(args.constituents) == 0: 22 | args.constituents = ['all'] 23 | logging.basicConfig( 24 | level={'warning': logging.WARNING, 'info': logging.INFO, 'debug': logging.DEBUG,}[ 25 | args.log_level 26 | ], 27 | format='[%(asctime)s] %(name)s %(levelname)s: %(message)s', 28 | force=True, 29 | ) 30 | logging.Formatter.converter = lambda *args: datetime.now(tz=timezone('UTC')).timetuple() 31 | TidalRunCommand(args).run() 32 | -------------------------------------------------------------------------------- /adcircpy/cmd/tide_gen.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import argparse 3 | from datetime import datetime, timedelta 4 | import pathlib 5 | 6 | from adcircpy import AdcircMesh, Fort15 7 | from adcircpy.forcing.tides import Tides 8 | 9 | 10 | def parse_args(): 11 | parser = argparse.ArgumentParser() 12 | parser.add_argument('mesh') 13 | parser.add_argument('start_date', type=lambda x: datetime.strptime(x, '%Y-%m-%dT%H:%M:%S')) 14 | parser.add_argument('run_days', type=float) 15 | parser.add_argument('--output-file', type=pathlib.Path) 16 | parser.add_argument('--tidal-database', '--tidal-db', choices=['hamtide', 'tpxo']) 17 | parser.add_argument('--mesh-crs') 18 | return parser.parse_args() 19 | 20 | 21 | def main(): 22 | args = parse_args() 23 | tides = Tides(tidal_source=args.tidal_database) 24 | tides.use_all() 25 | tides.start_date = args.start_date 26 | tides.end_date = tides.start_date + timedelta(days=args.run_days) 27 | mesh = AdcircMesh.open(args.mesh, crs=args.mesh_crs) 28 | mesh.add_forcing(tides) 29 | fort15 = Fort15(mesh) 30 | if args.output_file is not None: 31 | with open(args.output_file, 'w') as f: 32 | f.write(fort15.get_tidal_forcing()) 33 | else: 34 | print(fort15.get_tidal_forcing()) 35 | 36 | 37 | if __name__ == '__main__': 38 | main() 39 | -------------------------------------------------------------------------------- /adcircpy/figures.py: -------------------------------------------------------------------------------- 1 | from matplotlib import rcParams 2 | from matplotlib.colors import LinearSegmentedColormap, Normalize 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | 7 | def get_topobathy_kwargs(values, vmin, vmax, colors=256): 8 | vmin = np.min(values) if vmin is None else vmin 9 | vmax = np.max(values) if vmax is None else vmax 10 | if vmax <= 0.0: 11 | cmap = plt.cm.seismic 12 | col_val = 0.0 13 | levels = np.linspace(vmin, vmax, colors) 14 | else: 15 | wet_count = int(np.floor(colors * (float((values < 0.0).sum()) / float(values.size)))) 16 | col_val = float(wet_count) / colors 17 | dry_count = colors - wet_count 18 | colors_undersea = plt.cm.bwr(np.linspace(1.0, 0.0, wet_count)) 19 | colors_land = plt.cm.terrain(np.linspace(0.25, 1.0, dry_count)) 20 | colors = np.vstack((colors_undersea, colors_land)) 21 | cmap = LinearSegmentedColormap.from_list('cut_terrain', colors) 22 | wlevels = np.linspace(vmin, 0.0, wet_count, endpoint=False) 23 | dlevels = np.linspace(0.0, vmax, dry_count) 24 | levels = np.hstack((wlevels, dlevels)) 25 | if vmax > 0: 26 | norm = FixPointNormalize(sealevel=0.0, vmax=vmax, vmin=vmin, col_val=col_val) 27 | else: 28 | norm = None 29 | return { 30 | 'cmap': cmap, 31 | 'norm': norm, 32 | 'levels': levels, 33 | 'col_val': col_val, 34 | # 'extend': 'both' 35 | } 36 | 37 | 38 | def get_axes(axes, figsize=None, subplot=111): 39 | figsize = rcParams['figure.figsize'] if figsize is None else figsize 40 | if axes is None: 41 | fig = plt.figure(figsize=figsize) 42 | axes = fig.add_subplot(subplot) 43 | return axes 44 | 45 | 46 | class FixPointNormalize(Normalize): 47 | """ 48 | This class is used for plotting. The reason it is declared here is that 49 | it is used by more than one submodule. In the future, this class will be 50 | native part of matplotlib. This definiton will be removed once the native 51 | matplotlib definition becomes available. 52 | Inspired by https://stackoverflow.com/questions/20144529/shifted-colorbar-matplotlib 53 | Subclassing Normalize to obtain a colormap with a fixpoint 54 | somewhere in the middle of the colormap. 55 | This may be useful for a `terrain` map, to set the "sea level" 56 | to a color in the blue/turquise range. 57 | """ 58 | 59 | def __init__(self, vmin=None, vmax=None, sealevel=0, col_val=0.5, clip=False): 60 | # sealevel is the fix point of the colormap (in data units) 61 | self.sealevel = sealevel 62 | # col_val is the color value in the range [0,1] that should represent 63 | # the sealevel. 64 | self.col_val = col_val 65 | Normalize.__init__(self, vmin, vmax, clip) 66 | 67 | def __call__(self, value, clip=None): 68 | x, y = [self.vmin, self.sealevel, self.vmax], [0, self.col_val, 1] 69 | if np.ma.is_masked(value) is False: 70 | value = np.ma.masked_invalid(value) 71 | return np.ma.masked_where(value.mask, np.interp(value, x, y)) 72 | 73 | 74 | def figure(f): 75 | def decorator(*argv, **kwargs): 76 | axes = get_axes(kwargs.get('axes', None), kwargs.get('figsize', None)) 77 | kwargs.update({'axes': axes}) 78 | axes = f(*argv, **kwargs) 79 | axes.axis('scaled') 80 | if kwargs.get('show', False): 81 | plt.show() 82 | return axes 83 | 84 | return decorator 85 | -------------------------------------------------------------------------------- /adcircpy/forcing/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.forcing.tides import HAMTIDE, TidalSource, Tides, TPXO 2 | from adcircpy.forcing.waves import WaveForcing, WaveWatch3DataForcing 3 | from adcircpy.forcing.winds import ( 4 | AtmosphericMeshForcing, 5 | BestTrackForcing, 6 | OwiForcing, 7 | WindForcing, 8 | ) 9 | 10 | __all__ = [ 11 | 'Tides', 12 | 'TidalSource', 13 | 'TPXO', 14 | 'HAMTIDE', 15 | 'WaveForcing', 16 | 'WaveWatch3DataForcing', 17 | 'WindForcing', 18 | 'BestTrackForcing', 19 | 'AtmosphericMeshForcing', 20 | 'OwiForcing', 21 | ] 22 | -------------------------------------------------------------------------------- /adcircpy/forcing/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from datetime import timedelta 3 | from os import PathLike 4 | 5 | 6 | class Forcing(ABC): 7 | def __init__(self, interval: timedelta): 8 | self.interval = interval 9 | 10 | @abstractmethod 11 | def write(self, directory: PathLike, overwrite: bool = False): 12 | raise NotImplementedError 13 | 14 | @property 15 | def interval(self) -> timedelta: 16 | return self.__interval 17 | 18 | @interval.setter 19 | def interval(self, interval: timedelta): 20 | if interval is not None and not isinstance(interval, timedelta): 21 | interval = timedelta(seconds=interval) 22 | self.__interval = interval 23 | 24 | def __eq__(self, other: 'Forcing') -> bool: 25 | return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ 26 | -------------------------------------------------------------------------------- /adcircpy/forcing/bctypes.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | 4 | class BoundaryCondition(metaclass=ABCMeta): 5 | @property 6 | @abstractmethod 7 | def btype(self): 8 | raise NotImplementedError 9 | 10 | 11 | class EtaBc(BoundaryCondition): 12 | @property 13 | def btype(self): 14 | return 'iettype' 15 | 16 | 17 | class VelBc(BoundaryCondition): 18 | @property 19 | def btype(self): 20 | return 'ifltype' 21 | 22 | 23 | class TempBc(BoundaryCondition): 24 | @property 25 | def btype(self): 26 | return 'itetype' 27 | 28 | 29 | class SalBc(BoundaryCondition): 30 | @property 31 | def btype(self): 32 | return 'isatype' 33 | 34 | 35 | class TraceBc(BoundaryCondition): 36 | @property 37 | def btype(self): 38 | return 'itrtype' 39 | -------------------------------------------------------------------------------- /adcircpy/forcing/tides/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.forcing.tides.hamtide import HAMTIDE 2 | from adcircpy.forcing.tides.tides import TidalSource, Tides 3 | from adcircpy.forcing.tides.tpxo import TPXO 4 | 5 | __all__ = ['Tides', 'TidalSource', 'TPXO', 'HAMTIDE'] 6 | -------------------------------------------------------------------------------- /adcircpy/forcing/tides/dataset.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from os import PathLike 3 | from typing import List, Tuple 4 | 5 | import numpy as np 6 | 7 | 8 | class TidalDataset(ABC): 9 | def __init__(self, path: PathLike = None): 10 | """ 11 | create a new tidal dataset object 12 | :param path: file path or URL pointing to dataset location 13 | """ 14 | 15 | self.path = str(path) if path is not None else None 16 | 17 | def __call__( 18 | self, constituent: str, vertices: np.ndarray 19 | ) -> Tuple[np.ndarray, np.ndarray]: 20 | """ 21 | get tidal ampltidue and phase 22 | :param constituent: tidal constituent 23 | :param vertices: XY locations at which to sample (Mx2) 24 | :return: amplitude and phase arrays at given locations 25 | """ 26 | return self.get_amplitude(constituent, vertices), self.get_phase(constituent, vertices) 27 | 28 | @abstractmethod 29 | def get_amplitude(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 30 | """ 31 | generate tidal ampltidue 32 | :param constituent: tidal constituent 33 | :param vertices: XY locations at which to sample (Mx2) 34 | :return: amplitude at given locations 35 | """ 36 | raise NotImplementedError 37 | 38 | @abstractmethod 39 | def get_phase(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 40 | """ 41 | generate tidal phase 42 | :param constituent: tidal constituent 43 | :param vertices: XY locations at which to sample (Mx2) 44 | :return: phase at given locations 45 | """ 46 | raise NotImplementedError 47 | 48 | @property 49 | @abstractmethod 50 | def x(self) -> np.ndarray: 51 | """ 52 | :return: 1D array of X values of vertices 53 | """ 54 | raise NotImplementedError 55 | 56 | @property 57 | @abstractmethod 58 | def y(self) -> np.ndarray: 59 | """ 60 | :return: 1D array of Y values of vertices 61 | """ 62 | raise NotImplementedError 63 | 64 | @property 65 | @abstractmethod 66 | def constituents(self) -> List[str]: 67 | """ 68 | :return: list of constituents available on the data source. 69 | """ 70 | raise NotImplementedError 71 | 72 | @staticmethod 73 | def _assert_vertices(vertices: np.ndarray): 74 | """ 75 | :param vertices: list of XY locations 76 | :return: whether vertices are in XY format (Mx2) 77 | """ 78 | assert ( 79 | len(vertices.shape) == 2 and vertices.shape[1] == 2 80 | ), 'vertices must be of shape Mx2' 81 | 82 | def __eq__(self, other: 'TidalDataset') -> bool: 83 | return self.__class__ == other.__class__ and self.__dict__ == other.__dict__ 84 | -------------------------------------------------------------------------------- /adcircpy/forcing/tides/hamtide.py: -------------------------------------------------------------------------------- 1 | from os import PathLike 2 | from pathlib import Path 3 | 4 | from netCDF4 import Dataset 5 | import numpy as np 6 | from scipy.interpolate import griddata 7 | 8 | from adcircpy.forcing.tides.dataset import TidalDataset 9 | 10 | 11 | class HAMTIDE(TidalDataset): 12 | """ 13 | Taguchi, E., Stammer, D., & Zahel, W. (2010). Estimation of deep ocean tidal energy dissipation based on the high-resolution data-assimilative HAMTIDE model. J. geophys. Res. 14 | https://icdc.cen.uni-hamburg.de/en/hamtide.html 15 | """ 16 | 17 | OPENDAP_URL = 'https://icdc.cen.uni-hamburg.de/thredds/dodsC/ftpthredds/hamtide/' 18 | 19 | def __init__(self, hamtide_dataset_directory: PathLike = None): 20 | if hamtide_dataset_directory is None: 21 | hamtide_dataset_directory = self.OPENDAP_URL 22 | else: 23 | try: 24 | if Path(hamtide_dataset_directory).exists(): 25 | hamtide_dataset_directory = Path(hamtide_dataset_directory) 26 | if len(list(hamtide_dataset_directory.glob('*.nc'))) == 0: 27 | raise FileNotFoundError( 28 | f'no NetCDF files found at ' f'"{hamtide_dataset_directory}"' 29 | ) 30 | except OSError: 31 | raise ValueError('given resource must be a local path') 32 | 33 | super().__init__(hamtide_dataset_directory) 34 | 35 | datasets = {'elevation': {}, 'velocity': {}} 36 | for variable in datasets.keys(): 37 | datasets[variable].update( 38 | { 39 | constituent.lower(): {'path': None, 'dataset': None} 40 | for constituent in self.constituents 41 | } 42 | ) 43 | 44 | self.datasets = datasets 45 | 46 | def get_amplitude(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 47 | if not isinstance(vertices, np.ndarray): 48 | vertices = np.asarray(vertices) 49 | self._assert_vertices(vertices) 50 | return self._get_interpolation('elevation', 'AMPL', constituent, vertices) * 0.01 51 | 52 | def get_phase(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 53 | if not isinstance(vertices, np.ndarray): 54 | vertices = np.asarray(vertices) 55 | self._assert_vertices(vertices) 56 | return self._get_interpolation('elevation', 'PHAS', constituent, vertices) 57 | 58 | @property 59 | def x(self) -> np.ndarray: 60 | if not hasattr(self, '_x'): 61 | self._x = Dataset(self._prepend_path('k2.hamtide11a.nc'))['LON'][:].data 62 | return self._x 63 | 64 | @property 65 | def y(self) -> np.ndarray: 66 | if not hasattr(self, '_y'): 67 | self._y = Dataset(self._prepend_path('k2.hamtide11a.nc'))['LAT'][:].data 68 | return self._y 69 | 70 | @property 71 | def constituents(self): 72 | return ['S2', 'Q1', 'P1', 'O1', 'N2', 'M2', 'K2', 'K1'] 73 | 74 | def _get_dataset(self, variable: str, constituent: str) -> Dataset: 75 | data = self.datasets[variable][constituent.lower()] 76 | 77 | dataset = data['dataset'] 78 | if dataset is None: 79 | path = data['path'] 80 | if path is None: 81 | if variable == 'elevation': 82 | filename = f'{constituent.lower()}.hamtide11a.nc' 83 | elif variable == 'velocity': 84 | filename = f'HAMcurrent11a_{constituent.lower()}.nc' 85 | else: 86 | raise NotImplementedError( 87 | f'tidal variable "{variable}" ' f'not implemented' 88 | ) 89 | 90 | path = self._prepend_path(filename) 91 | 92 | try: 93 | dataset = Dataset(path) 94 | if data['path'] is None: 95 | self.datasets[variable][constituent.lower()]['path'] = path 96 | self.datasets[variable][constituent.lower()]['dataset'] = dataset 97 | except FileNotFoundError: 98 | raise FileNotFoundError( 99 | f'no dataset found for "{variable}" ' f'"{constituent}" at "{path}"' 100 | ) 101 | 102 | return dataset 103 | 104 | def _get_interpolation( 105 | self, variable: str, netcdf_variable: str, constituent: str, vertices: np.ndarray, 106 | ) -> np.ndarray: 107 | self._assert_vertices(vertices) 108 | 109 | xq = np.asarray([x + 360.0 if x < 0.0 else x for x in vertices[:, 0]]).flatten() 110 | yq = vertices[:, 1].flatten() 111 | dx = (self.x[-1] - self.x[0]) / len(self.x) 112 | xidx = np.logical_and(self.x >= np.min(xq) - 2.0 * dx, self.x <= np.max(xq) + 2.0 * dx) 113 | dy = (self.y[-1] - self.y[0]) / len(self.y) 114 | yidx = np.logical_and(self.y >= np.min(yq) - 2.0 * dy, self.y <= np.max(yq) + 2.0 * dy) 115 | xi, yi = np.meshgrid(self.x[xidx], self.y[yidx]) 116 | dataset = self._get_dataset(variable, constituent) 117 | zi = dataset[netcdf_variable][yidx, xidx] 118 | mask = ~zi.mask 119 | if mask.size == 1: 120 | mask = np.array(zi * mask, dtype=bool) 121 | xi = xi[mask].flatten() 122 | yi = yi[mask].flatten() 123 | zi = zi[mask].flatten() 124 | values = griddata((xi, yi), zi, (xq, yq), method='linear', fill_value=np.nan,) 125 | nan_idxs = np.where(np.isnan(values)) 126 | values[nan_idxs] = griddata( 127 | (xi, yi), zi, (xq[nan_idxs], yq[nan_idxs]), method='nearest', 128 | ) 129 | return values 130 | 131 | def _prepend_path(self, filename: str) -> str: 132 | if self.path is None: 133 | path = self.path 134 | elif isinstance(self.path, Path): 135 | path = self.path / filename 136 | else: 137 | path = f'{self.path}/{filename}' 138 | return path 139 | -------------------------------------------------------------------------------- /adcircpy/forcing/tides/tpxo.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os import PathLike 3 | from pathlib import Path 4 | 5 | import appdirs 6 | from netCDF4 import Dataset 7 | import numpy as np 8 | from scipy.interpolate import griddata 9 | 10 | from adcircpy.forcing.tides.dataset import TidalDataset 11 | 12 | TPXO_ENVIRONMENT_VARIABLE = 'TPXO_NCFILE' 13 | TPXO_FILENAME = 'h_tpxo9.v1.nc' 14 | 15 | 16 | class TPXO(TidalDataset): 17 | DEFAULT_PATH = Path(appdirs.user_data_dir('tpxo')) / TPXO_FILENAME 18 | 19 | def __init__(self, tpxo_dataset_filename: PathLike = None): 20 | if tpxo_dataset_filename is None: 21 | tpxo_environment_variable = os.getenv(TPXO_ENVIRONMENT_VARIABLE) 22 | if tpxo_environment_variable is not None: 23 | tpxo_dataset_filename = tpxo_environment_variable 24 | else: 25 | tpxo_dataset_filename = self.DEFAULT_PATH 26 | 27 | super().__init__(tpxo_dataset_filename) 28 | 29 | if self.path is not None: 30 | self.dataset = Dataset(self.path) 31 | else: 32 | raise FileNotFoundError( 33 | '\n'.join( 34 | [ 35 | f'No TPXO file found at "{self.path}".', 36 | 'New users will need to register and request a copy of ' 37 | f'the TPXO9 NetCDF file (specifically `{TPXO_FILENAME}`) ' 38 | 'from the authors at https://www.tpxo.net.', 39 | 'Once you obtain `h_tpxo9.v1.nc`, you can follow one of the following options: ', 40 | f'1) copy or symlink the file to "{self.path}"', 41 | f'2) set the environment variable `{TPXO_ENVIRONMENT_VARIABLE}` to point to the file', 42 | ] 43 | ) 44 | ) 45 | 46 | def get_amplitude(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 47 | if not isinstance(vertices, np.ndarray): 48 | vertices = np.asarray(vertices) 49 | self._assert_vertices(vertices) 50 | return self._get_interpolation(self.ha, constituent, vertices) 51 | 52 | def get_phase(self, constituent: str, vertices: np.ndarray) -> np.ndarray: 53 | if not isinstance(vertices, np.ndarray): 54 | vertices = np.asarray(vertices) 55 | self._assert_vertices(vertices) 56 | return self._get_interpolation(self.hp, constituent, vertices) 57 | 58 | @property 59 | def x(self) -> np.ndarray: 60 | return self.dataset['lon_z'][:, 0].data 61 | 62 | @property 63 | def y(self) -> np.ndarray: 64 | return self.dataset['lat_z'][0, :].data 65 | 66 | @property 67 | def ha(self) -> np.ndarray: 68 | return self.dataset['ha'][:] 69 | 70 | @property 71 | def hp(self) -> np.ndarray: 72 | return self.dataset['hp'][:] 73 | 74 | @property 75 | def constituents(self): 76 | if not hasattr(self, '_constituents'): 77 | self._constituents = [ 78 | c.capitalize() 79 | for c in self.dataset['con'][:] 80 | .astype('|S1') 81 | .tostring() 82 | .decode('utf-8') 83 | .split() 84 | ] 85 | return self._constituents 86 | 87 | def _get_interpolation( 88 | self, tpxo_array: np.ndarray, constituent: str, vertices: np.ndarray 89 | ): 90 | """ 91 | `tpxo_index_key` is either `ha` or `hp` based on the keys used 92 | internally in the TPXO NetCDF file. 93 | """ 94 | 95 | self._assert_vertices(vertices) 96 | constituents = list(map(lambda x: x.lower(), self.constituents)) 97 | constituent = constituents.index(constituent.lower()) 98 | zi = tpxo_array[constituent, :, :].flatten() 99 | xo = np.asarray([x + 360.0 for x in vertices[:, 0] if x < 0]).flatten() 100 | yo = vertices[:, 1].flatten() 101 | xi, yi = np.meshgrid(self.x, self.y, indexing='ij') 102 | xi = xi.flatten() 103 | yi = yi.flatten() 104 | dx = np.mean(np.diff(self.x)) 105 | dy = np.mean(np.diff(self.y)) 106 | # buffer window by 2 pixel units 107 | mask1 = np.logical_and( 108 | np.logical_and(xi >= np.min(xo) - 2 * dx, xi <= np.max(xo) + 2 * dx), 109 | np.logical_and(yi >= np.min(yo) - 2 * dy, yi <= np.max(yo) + 2 * dy), 110 | ) 111 | # remove junk values from input array 112 | mask2 = np.ma.masked_where(zi != 0.0, zi) 113 | iidx = np.where(np.logical_and(mask1, mask2)) 114 | values = griddata( 115 | (xi[iidx], yi[iidx]), zi[iidx], (xo, yo), method='linear', fill_value=np.nan, 116 | ) 117 | nan_idxs = np.where(np.isnan(values)) 118 | values[nan_idxs] = griddata( 119 | (xi[iidx], yi[iidx]), zi[iidx], (xo[nan_idxs], yo[nan_idxs]), method='nearest', 120 | ) 121 | return values 122 | -------------------------------------------------------------------------------- /adcircpy/forcing/waves/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.forcing.waves.base import WaveForcing 2 | from adcircpy.forcing.waves.ww3 import WaveWatch3DataForcing 3 | 4 | __all__ = [ 5 | 'WaveForcing', 6 | 'WaveWatch3DataForcing', 7 | ] 8 | -------------------------------------------------------------------------------- /adcircpy/forcing/waves/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from os import PathLike 3 | 4 | from adcircpy.forcing.base import Forcing 5 | 6 | 7 | class WaveForcing(Forcing, ABC): 8 | def __init__(self, nrs: int, interval_seconds: int): 9 | super().__init__(interval_seconds) 10 | self.NRS = nrs 11 | 12 | @abstractmethod 13 | def write(self, directory: PathLike, overwrite: bool = False): 14 | raise NotImplementedError 15 | -------------------------------------------------------------------------------- /adcircpy/forcing/waves/ww3.py: -------------------------------------------------------------------------------- 1 | from os import PathLike 2 | from pathlib import Path 3 | 4 | from adcircpy.forcing.waves import WaveForcing 5 | 6 | 7 | class WaveWatch3DataForcing(WaveForcing): 8 | def __init__(self, filename: PathLike, nrs: int = 5, interval_seconds: int = 3600): 9 | if not isinstance(filename, Path): 10 | filename = Path(filename) 11 | self.filename = filename 12 | super().__init__(nrs=nrs, interval_seconds=interval_seconds) 13 | 14 | def write(self, directory: PathLike, overwrite: bool = False): 15 | # ww3data is just a netCDF file so needs no fort.22 16 | pass 17 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.forcing.winds.atmesh import AtmosphericMeshForcing 2 | from adcircpy.forcing.winds.base import WindForcing 3 | from adcircpy.forcing.winds.best_track import BestTrackForcing 4 | from adcircpy.forcing.winds.owi import OwiForcing 5 | 6 | __all__ = [ 7 | 'WindForcing', 8 | 'BestTrackForcing', 9 | 'AtmosphericMeshForcing', 10 | 'OwiForcing', 11 | ] 12 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/_parametric/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oceanmodeling/adcircpy/0eb84de6e743b4b6c8f2514dba07646b883ecc08/adcircpy/forcing/winds/_parametric/__init__.py -------------------------------------------------------------------------------- /adcircpy/forcing/winds/_parametric/gao2015.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | 4 | def main(): 5 | pass 6 | 7 | 8 | if __name__ == '__main__': 9 | main() 10 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/_parametric/holland2010.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | from pyschism.forcing.winds.atcf import Bdeck 6 | from scipy import optimize 7 | 8 | 9 | def holland_B(hurdat, coriolis=True): 10 | # air_density = 1.225 11 | air_density = 1.15 12 | 13 | def with_coriolis(Vmax, Rmax, Pn, Pc, eye_lat): 14 | f = 2.0 * 7.2921e-5 * np.sin(np.radians(np.abs(eye_lat))) 15 | return (Vmax ** 2 + Vmax * Rmax * f * air_density * np.exp(1)) / (Pn - Pc) 16 | 17 | def no_coriolis(Vmax, Pn, Pc): 18 | return (Vmax ** 2 * air_density * np.exp(1)) / (Pn - Pc) 19 | 20 | for data in hurdat.values(): 21 | 22 | Pc = data['central_pressure'] 23 | Pn = data['background_pressure'] 24 | # avoid negative Holland B parameter as initial guess 25 | if Pn <= Pc: 26 | Pn = Pc + 1.0 27 | if coriolis: 28 | return with_coriolis( 29 | data['max_sustained_wind_speed'], 30 | data['radius_of_maximum_winds'], 31 | Pn, 32 | Pc, 33 | data['eye']['lat'], 34 | ) 35 | else: 36 | return no_coriolis(data['max_sustained_wind_speed'], Pn, Pc) 37 | 38 | 39 | def main(): 40 | storm_id = 'AL152017' 41 | # storm_id = 'AL182012' 42 | hurdat = Bdeck(storm_id).data 43 | for time, data in hurdat.items(): 44 | if len(data['isotachs'].keys()) != 4: 45 | continue 46 | # initial guesses 47 | Vmax = data['max_sustained_wind_speed'] 48 | Rmax = data['radius_of_maximum_winds'] 49 | # print(data) 50 | # exit() 51 | x = 1.0 52 | B = 1.0 53 | 54 | def holland2010(r, B, x): 55 | return Vmax * (((Rmax / r) ** B) * np.exp(1 - (Rmax / r) ** B)) ** x 56 | 57 | def V(B, x): 58 | def v(r): 59 | return holland2010(r, B, x) 60 | 61 | return v 62 | 63 | # B = holland_B(hurdat) 64 | for quad, isotachs in data['isotachs'].items(): 65 | xdata = [] 66 | ydata = [] 67 | for y, x in isotachs.items(): 68 | xdata.append(x) 69 | ydata.append(y) 70 | # xdata.append(Rmax) 71 | # ydata.append(Vmax) 72 | # add bounds 73 | bi = np.finfo(float).eps # avoid divide by zero 74 | bf = data['radius_of_last_closed_isobar'] 75 | bounds = (bi, bf) 76 | p0 = [B, x] 77 | # do curve fitting 78 | with warnings.catch_warnings(): 79 | warnings.simplefilter('ignore') 80 | popt, pcov = optimize.curve_fit( 81 | holland2010, 82 | xdata, 83 | ydata, 84 | p0=p0, 85 | # bounds=bounds, 86 | method='dogbox', 87 | ) 88 | print(popt) 89 | v = V(*popt) 90 | radii = np.linspace(bi, bf, num=500) 91 | res = [] 92 | for i in radii: 93 | res.append(v(i)) 94 | results = np.array(res) 95 | plt.plot(radii, results, label=quad) 96 | # plt.gca().axis('scaled') 97 | plt.legend() 98 | plt.show() 99 | plt.close(plt.gcf()) 100 | # Vmax 101 | # res = minimize(holland2010, x0=[Vmax, Rmax, B, 0.1] ) 102 | # print(res) 103 | 104 | 105 | def init(): 106 | if __name__ == '__main__': 107 | try: 108 | import colored_traceback 109 | 110 | colored_traceback.add_hook(always=True) 111 | except ModuleNotFoundError: 112 | pass 113 | main() 114 | 115 | 116 | init() 117 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/atmesh.py: -------------------------------------------------------------------------------- 1 | from os import PathLike 2 | from pathlib import Path 3 | 4 | from adcircpy.forcing.winds.base import WindForcing 5 | 6 | 7 | class AtmosphericMeshForcing(WindForcing): 8 | def __init__(self, filename: PathLike, nws: int = 5, interval_seconds: int = 3600): 9 | if not isinstance(filename, Path): 10 | filename = Path(filename) 11 | self.filename = filename 12 | super().__init__(nws=nws, interval_seconds=interval_seconds) 13 | 14 | def write(self, directory: PathLike, overwrite: bool = False): 15 | # atmesh is only a netCDF file so no fort.22 is needed 16 | pass 17 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from os import PathLike 3 | 4 | from adcircpy.forcing.base import Forcing 5 | 6 | 7 | class WindForcing(Forcing, ABC): 8 | def __init__(self, nws: int, interval_seconds: int): 9 | super().__init__(interval_seconds) 10 | self.NWS = nws 11 | 12 | @abstractmethod 13 | def write(self, directory: PathLike, overwrite: bool = False): 14 | raise NotImplementedError 15 | 16 | @classmethod 17 | def from_fort22(cls, fort22: PathLike, nws: int = None) -> 'WindForcing': 18 | raise NotImplementedError(f'reading `fort.22` is not implemented for {cls}') 19 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/best_track.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import io 3 | import logging 4 | import os 5 | from os import PathLike 6 | import pathlib 7 | from typing import Union 8 | 9 | from matplotlib import pyplot 10 | from matplotlib.axis import Axis 11 | from matplotlib.transforms import Bbox 12 | import numpy as numpy 13 | from pandas import DataFrame 14 | from pyproj import CRS, Transformer 15 | from shapely import ops 16 | from shapely.geometry import Point, Polygon 17 | from stormevents.nhc import VortexTrack 18 | import utm 19 | 20 | from adcircpy.forcing.winds.base import WindForcing 21 | from adcircpy.plotting import plot_coastline, plot_polygons 22 | 23 | 24 | class BestTrackForcing(VortexTrack, WindForcing): 25 | def __init__( 26 | self, 27 | storm: Union[str, PathLike, DataFrame, io.BytesIO], 28 | nws: int = None, 29 | interval_seconds: int = None, 30 | start_date: datetime = None, 31 | end_date: datetime = None, 32 | *args, 33 | **kwargs, 34 | ): 35 | if nws is None: 36 | nws = 20 37 | 38 | valid_nws_values = [8, 19, 20] 39 | assert ( 40 | nws in valid_nws_values 41 | ), f'ATCF BestTrack can only use `nws` values in {valid_nws_values}' 42 | 43 | if interval_seconds is None: 44 | interval_seconds = 3600 45 | 46 | VortexTrack.__init__( 47 | self, 48 | storm=storm, 49 | start_date=start_date, 50 | end_date=end_date, 51 | file_deck='b', 52 | advisories=['BEST'], 53 | ) 54 | WindForcing.__init__(self, nws=nws, interval_seconds=interval_seconds) 55 | 56 | @classmethod 57 | def from_fort22( 58 | cls, 59 | fort22: PathLike, 60 | nws: int = None, 61 | interval_seconds: int = None, 62 | start_date: datetime = None, 63 | end_date: datetime = None, 64 | ) -> 'WindForcing': 65 | instance = cls.from_file(path=fort22, start_date=start_date, end_date=end_date) 66 | WindForcing.__init__(instance, nws=nws, interval_seconds=interval_seconds) 67 | return instance 68 | 69 | def summary( 70 | self, output: Union[str, os.PathLike] = None, overwrite: bool = False, 71 | ): 72 | min_storm_speed = numpy.min(self.data['speed']) 73 | max_storm_speed = numpy.max(self.data['speed']) 74 | track_length = self.distance 75 | duration = self.duration 76 | min_central_pressure = numpy.min(self.data['central_pressure']) 77 | max_wind_speed = numpy.max(self.data['max_sustained_wind_speed']) 78 | start_loc = (self.data['longitude'][0], self.data['latitude'][0]) 79 | end_loc = (self.data['longitude'].iloc[-1], self.data['latitude'].iloc[-1]) 80 | f = [ 81 | f'Summary of storm: {self.nhc_code}', 82 | f'min./max. track speed: {min_storm_speed} m/s, {max_storm_speed} m/s', 83 | f'min. central pressure: {min_central_pressure} hPa', 84 | f'max. wind speed: {max_wind_speed} kts', 85 | f'Starting at: {start_loc} and ended at: {end_loc}', 86 | f'Total track length: {track_length:.2f} km', 87 | f'Total track duration: {duration:.2f} days', 88 | ] 89 | summary = '\n'.join(f) 90 | if output is not None: 91 | if not isinstance(output, pathlib.Path): 92 | output = pathlib.Path(output) 93 | if overwrite or not output.exists(): 94 | with open(output, 'w+') as fh: 95 | fh.write(summary) 96 | else: 97 | logging.debug(f'skipping existing file "{output}"') 98 | return summary 99 | 100 | def write(self, path: PathLike, overwrite: bool = False): 101 | VortexTrack.to_file(self, path=path, overwrite=overwrite) 102 | 103 | @property 104 | def NWS(self) -> int: 105 | try: 106 | return self.__NWS 107 | except AttributeError: 108 | return 20 109 | 110 | @NWS.setter 111 | def NWS(self, NWS: int): 112 | assert NWS in [8, 19, 20] 113 | self.__NWS = int(NWS) 114 | 115 | @property 116 | def BLADj(self) -> float: 117 | try: 118 | return self.__BLADj 119 | except AttributeError: 120 | return 0.9 121 | 122 | @BLADj.setter 123 | def BLADj(self, BLADj: float): 124 | BLADj = float(BLADj) 125 | assert BLADj >= 0 and BLADj <= 1 126 | self.__BLADj = BLADj 127 | 128 | @property 129 | def geofactor(self) -> float: 130 | try: 131 | return self.__geofactor 132 | except AttributeError: 133 | return 1 134 | 135 | @geofactor.setter 136 | def geofactor(self, geofactor: float): 137 | geofactor = float(geofactor) 138 | assert geofactor >= 0 and geofactor <= 1 139 | self.__geofactor = geofactor 140 | 141 | def clip_to_bbox(self, bbox, bbox_crs): 142 | msg = f'bbox must be a {Bbox} instance.' 143 | assert isinstance(bbox, Bbox), msg 144 | bbox_pol = Polygon( 145 | [ 146 | [bbox.xmin, bbox.ymin], 147 | [bbox.xmax, bbox.ymin], 148 | [bbox.xmax, bbox.ymax], 149 | [bbox.xmin, bbox.ymax], 150 | [bbox.xmin, bbox.ymin], 151 | ] 152 | ) 153 | _switch = True 154 | unique_dates = numpy.unique(self.data['datetime']) 155 | _found_start_date = False 156 | for _datetime in unique_dates: 157 | records = self.data[self.data['datetime'] == _datetime] 158 | radii = records['radius_of_last_closed_isobar'].iloc[0] 159 | radii = 1852.0 * radii # convert to meters 160 | lon = records['longitude'].iloc[0] 161 | lat = records['latitude'].iloc[0] 162 | _, _, number, letter = utm.from_latlon(lat, lon) 163 | df_crs = CRS.from_epsg(4326) 164 | utm_crs = CRS.from_epsg(f'326{number}') 165 | transformer = Transformer.from_crs(df_crs, utm_crs, always_xy=True) 166 | p = Point(*transformer.transform(lon, lat)) 167 | pol = p.buffer(radii) 168 | transformer = Transformer.from_crs(utm_crs, bbox_crs, always_xy=True) 169 | pol = ops.transform(transformer.transform, pol) 170 | if _switch is True: 171 | if not pol.intersects(bbox_pol): 172 | continue 173 | else: 174 | self.start_date = records['datetime'].iloc[0] 175 | _found_start_date = True 176 | _switch = False 177 | continue 178 | 179 | else: 180 | if pol.intersects(bbox_pol): 181 | continue 182 | else: 183 | self.end_date = records['datetime'].iloc[0] 184 | break 185 | 186 | if _found_start_date is False: 187 | raise Exception(f'No data within mesh bounding box for storm {self.storm_id}.') 188 | 189 | def plot_track( 190 | self, 191 | axis: Axis = None, 192 | show: bool = False, 193 | color: str = 'k', 194 | coastline: bool = True, 195 | **kwargs, 196 | ): 197 | kwargs.update({'color': color}) 198 | if axis is None: 199 | fig = pyplot.figure() 200 | axis = fig.add_subplot(111) 201 | data = self.data 202 | for i, (_, row) in enumerate(data.iterrows()): 203 | # when dealing with nautical degrees, U is sine and V is cosine. 204 | U = row['speed'] * numpy.sin(numpy.deg2rad(row['direction'])) 205 | V = row['speed'] * numpy.cos(numpy.deg2rad(row['direction'])) 206 | axis.quiver(row['longitude'], row['latitude'], U, V, **kwargs) 207 | if i % 6 == 0: 208 | axis.annotate( 209 | row['datetime'], (row['longitude'], row['latitude']), 210 | ) 211 | if show: 212 | axis.axis('scaled') 213 | if bool(coastline) is True: 214 | plot_coastline(axis, show) 215 | 216 | def plot_wind_swath(self, isotach: int, segments: int = 91): 217 | isotachs = self.isotachs(wind_speed=isotach, segments=segments) 218 | swath = self.wind_swaths(wind_speed=isotach, segments=segments)['BEST'] 219 | 220 | plot_polygons(isotachs) 221 | plot_polygons(swath) 222 | pyplot.suptitle( 223 | f'{self.nhc_code} - isotach {isotach} kt ({self.start_date} - {self.end_date})' 224 | ) 225 | pyplot.show() 226 | -------------------------------------------------------------------------------- /adcircpy/forcing/winds/owi.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from os import PathLike 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import numpy as np 7 | 8 | from adcircpy.forcing.winds.base import WindForcing 9 | 10 | 11 | class OwiForcing(WindForcing): 12 | def __init__(self, interval_seconds: int): 13 | super().__init__(12, interval_seconds) 14 | self.__basin_scale_pressure = None 15 | self.__basin_scale_winds = None 16 | self.__regional_scale_pressure = None 17 | self.__regional_scale_winds = None 18 | 19 | def write(self, directory: PathLike, overwrite: bool = False): 20 | if not isinstance(directory, Path): 21 | directory = Path(directory) 22 | output_filenames = { 23 | 'fort.22': self.fort22, 24 | 'fort.221': self.fort221, 25 | 'fort.222': self.fort222, 26 | 'fort.223': self.fort223, 27 | 'fort.224': self.fort224, 28 | } 29 | for output_filename, output_text in output_filenames.items(): 30 | output_filename = directory / output_filename 31 | if not output_filename.exists() or overwrite: 32 | with open(output_filename) as output_file: 33 | output_file.write(output_text) 34 | 35 | def make_plot(self): 36 | pass 37 | 38 | @property 39 | def start_date(self): 40 | try: 41 | return self.__start_date 42 | except AttributeError: 43 | return 44 | 45 | @property 46 | def end_date(self): 47 | try: 48 | return self.__end_date 49 | except AttributeError: 50 | return 51 | 52 | @property 53 | def datetime(self) -> np.ndarray: 54 | try: 55 | return self.__datetime 56 | except AttributeError: 57 | return 58 | 59 | def __set_datetime(self, value: np.ndarray): 60 | if self.datetime is not None: 61 | assert np.array_equal( 62 | self.datetime, value 63 | ), 'Dates of input files provided do not match.' 64 | else: 65 | self.__datetime = np.asarray(value) 66 | 67 | @property 68 | def basin_scale_pressure(self): 69 | if self.__basin_scale_pressure is None: 70 | raise AttributeError('Must set basin_scale_pressure attribute.') 71 | return self.__basin_scale_pressure 72 | 73 | @basin_scale_pressure.setter 74 | def basin_scale_pressure(self, basin_scale_pressure): 75 | _ = self.__parse_fort22_p(basin_scale_pressure) 76 | self.__set_datetime(_['datetime']) 77 | 78 | @property 79 | def basin_scale_winds(self): 80 | if self.__basin_scale_winds is None: 81 | raise AttributeError('Must set basin_scale_winds attribute.') 82 | return self.__basin_scale_winds 83 | 84 | @basin_scale_winds.setter 85 | def basin_scale_winds(self, basin_scale_winds): 86 | _ = self.__parse_fort22_w(basin_scale_winds) 87 | self.__set_datetime(_['datetime']) 88 | 89 | @property 90 | def regional_scale_pressure(self): 91 | if self.__regional_scale_pressure is None: 92 | raise AttributeError('Must set regional_scale_pressure attribute.') 93 | return self.__regional_scale_pressure 94 | 95 | @regional_scale_pressure.setter 96 | def regional_scale_pressure(self, regional_scale_pressure): 97 | _ = self.__parse_fort22_p(regional_scale_pressure) 98 | self.__set_datetime(_['datetime']) 99 | 100 | @property 101 | def regional_scale_winds(self): 102 | if self.__regional_scale_winds is None: 103 | raise AttributeError('Must set regional_scale_winds attribute.') 104 | return self.__regional_scale_winds 105 | 106 | @regional_scale_winds.setter 107 | def regional_scale_winds(self, regional_scale_winds): 108 | _ = self.__parse_fort22_w(regional_scale_winds) 109 | self.__set_datetime(_['datetime']) 110 | 111 | @property 112 | def fort22(self) -> str: 113 | raise NotImplementedError 114 | 115 | @property 116 | def fort221(self) -> str: 117 | raise NotImplementedError 118 | 119 | @fort221.setter 120 | def fort221(self, fort221): 121 | self.basin_scale_pressure = fort221 122 | 123 | @property 124 | def fort222(self) -> str: 125 | raise NotImplementedError 126 | 127 | @fort222.setter 128 | def fort222(self, fort222): 129 | self.basin_scale_winds = fort222 130 | 131 | @property 132 | def fort223(self) -> str: 133 | raise NotImplementedError 134 | 135 | @fort223.setter 136 | def fort223(self, fort223): 137 | self.regional_scale_pressure = fort223 138 | 139 | @property 140 | def fort224(self) -> str: 141 | raise NotImplementedError 142 | 143 | @fort224.setter 144 | def fort224(self, fort224): 145 | self.regional_scale_winds = fort224 146 | 147 | @staticmethod 148 | def __parse_fort22_p(file: PathLike) -> {str: Any}: 149 | with open(file, 'r') as f: 150 | OWI = dict() 151 | OWI['datetime'] = list() 152 | OWI['values'] = list() 153 | OWI['header'] = f.readline().strip('\n') 154 | line = f.readline() 155 | OWI['iLat'] = int(line[6:9]) 156 | OWI['iLon'] = int(line[15:19]) 157 | OWI['DX'] = float(line[22:28]) 158 | OWI['DY'] = float(line[31:37]) 159 | OWI['SWLat'] = float(line[45:51]) 160 | OWI['SWLon'] = float(line[57:65]) 161 | OWI['datetime'].append(datetime.strptime(line[68:80], '%Y%m%d%H%M')) 162 | values = list() 163 | for line in f: 164 | if 'iLat' in line: 165 | OWI['datetime'].append(datetime.strptime(line[68:80], '%Y%m%d%H%M')) 166 | OWI['values'].append( 167 | np.asarray(values).reshape((OWI['iLon'], OWI['iLat'])) 168 | ) 169 | values = list() 170 | else: 171 | for n in [line[i : i + 10] for i in range(0, 80, 10)]: 172 | values.append(float(n)) 173 | return OWI 174 | 175 | @staticmethod 176 | def __parse_fort22_w(file: PathLike) -> {str: Any}: 177 | with open(file, 'r') as f: 178 | OWI = dict() 179 | OWI['datetime'] = list() 180 | OWI['values'] = dict() 181 | OWI['values']['u'] = list() 182 | OWI['values']['v'] = list() 183 | OWI['header'] = f.readline().strip('\n') 184 | line = f.readline() 185 | OWI['iLat'] = int(line[6:9]) 186 | OWI['iLon'] = int(line[15:19]) 187 | OWI['DX'] = float(line[22:28]) 188 | OWI['DY'] = float(line[31:37]) 189 | OWI['SWLat'] = float(line[45:51]) 190 | OWI['SWLon'] = float(line[57:65]) 191 | OWI['datetime'].append(datetime.strptime(line[68:80], '%Y%m%d%H%M')) 192 | values_u = list() 193 | values_v = list() 194 | shape = (OWI['iLon'], OWI['iLat']) 195 | size = OWI['iLon'] * OWI['iLat'] 196 | for line in f: 197 | if 'iLat' in line: 198 | OWI['datetime'].append(datetime.strptime(line[68:80], '%Y%m%d%H%M')) 199 | OWI['values']['u'].append(np.asarray(values_u).reshape(shape)) 200 | OWI['values']['v'].append(np.asarray(values_v).reshape(shape)) 201 | values_u = list() 202 | values_v = list() 203 | else: 204 | for n in [line[i : i + 10] for i in range(0, 80, 10)]: 205 | if len(values_u) != size: 206 | values_u.append(float(n)) 207 | else: 208 | values_v.append(float(n)) 209 | return OWI 210 | -------------------------------------------------------------------------------- /adcircpy/mesh/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.mesh.mesh import AdcircMesh 2 | 3 | __all__ = ['AdcircMesh'] 4 | -------------------------------------------------------------------------------- /adcircpy/mesh/parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oceanmodeling/adcircpy/0eb84de6e743b4b6c8f2514dba07646b883ecc08/adcircpy/mesh/parsers/__init__.py -------------------------------------------------------------------------------- /adcircpy/mesh/parsers/sms2dm.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from enum import Enum 3 | import logging 4 | from os import PathLike 5 | import pathlib 6 | 7 | 8 | class MeshGeometryType(Enum): 9 | TRIANGLE = 'E3T' 10 | QUADRILATERAL = 'E4Q' 11 | HEXAGON = 'E6T' 12 | OCTAGON = 'E8Q' 13 | NONAGON = 'E9Q' 14 | 15 | 16 | def read(path): 17 | mesh = {} 18 | with open(pathlib.Path(path), 'r') as f: 19 | f.readline() 20 | 21 | for line in f.readlines(): 22 | line = line.split() 23 | 24 | geom_type = line[0] 25 | if geom_type not in mesh: 26 | mesh[geom_type] = {} 27 | 28 | if geom_type in ['E3T', 'E4Q']: 29 | mesh[geom_type].update({line[1]: line[2:]}) 30 | elif geom_type in ['ND']: 31 | mesh[geom_type].update( 32 | {line[1]: (list(map(float, line[2:-1])), float(line[-1]))} 33 | ) 34 | return mesh 35 | 36 | 37 | def write(mesh: {str: {str: (float, float)}}, path: PathLike, overwrite: bool = False): 38 | if not isinstance(path, pathlib.Path): 39 | path = pathlib.Path(path) 40 | 41 | triangles = mesh[MeshGeometryType.TRIANGLE.value] 42 | triangles.insert(0, 'type', MeshGeometryType.TRIANGLE.value) 43 | triangles.insert(1, 'id', triangles.index) 44 | quadrilaterals = mesh[MeshGeometryType.QUADRILATERAL.value] 45 | quadrilaterals.insert(0, 'type', MeshGeometryType.QUADRILATERAL.value) 46 | quadrilaterals.insert(1, 'id', quadrilaterals.index) 47 | nodes = mesh['ND'] 48 | nodes.insert(0, 'type', 'ND') 49 | nodes.insert(1, 'id', nodes.index) 50 | 51 | if 'boundaries' in mesh: 52 | boundaries = mesh['boundaries'] 53 | boundaries.insert(0, 'type', 'NS') 54 | boundaries.iloc[:, 2:] *= -1 55 | else: 56 | boundaries = None 57 | 58 | def float_format(value: float): 59 | return f'{value:<.16E}' 60 | 61 | if overwrite or not path.exists(): 62 | with open(path, 'w') as f: 63 | f.write('MESH2D\n') 64 | 65 | if len(triangles) > 0: 66 | logging.debug('writing triangles') 67 | start_time = datetime.now() 68 | triangles.to_string(f, header=False, index=False, justify='left') 69 | f.write('\n') 70 | logging.debug(f'wrote triangles in {datetime.now() - start_time}') 71 | 72 | if len(quadrilaterals) > 0: 73 | logging.debug('writing quadrilaterals') 74 | start_time = datetime.now() 75 | quadrilaterals.to_string(f, header=False, index=False, justify='left') 76 | f.write('\n') 77 | logging.debug(f'wrote quadrilaterals in {datetime.now() - start_time}') 78 | 79 | logging.debug('writing nodes') 80 | start_time = datetime.now() 81 | nodes.to_string( 82 | f, header=False, index=False, justify='left', float_format=float_format 83 | ) 84 | f.write('\n') 85 | logging.debug(f'wrote nodes in {datetime.now() - start_time}') 86 | 87 | if boundaries in mesh: 88 | logging.debug('writing boundaries') 89 | start_time = datetime.now() 90 | boundaries.to_string(f, header=False, index=False, justify='left') 91 | f.write('\n') 92 | logging.debug(f'wrote boundaries in {datetime.now() - start_time}') 93 | 94 | return 0 # for unittests 95 | else: 96 | logging.debug(f'skipping existing file "{path}"') 97 | return 1 98 | -------------------------------------------------------------------------------- /adcircpy/outputs/__init__.py: -------------------------------------------------------------------------------- 1 | # from adcircpy.outputs._OutputFactory import _OutputFactory 2 | # from adcircpy.outputs.ElevationStationsTimeseries \ 3 | # import ElevationStationsTimeseries 4 | from adcircpy.outputs.fort61 import ElevationStations, Fort61 5 | from adcircpy.outputs.fort63 import Fort63 6 | from adcircpy.outputs.maxele import Maxele, MaximumElevationTimes 7 | 8 | # from AdcircPy.Outputs.HarmonicConstituentsElevationStations import \ 9 | # HarmonicConstituentsElevationStations 10 | 11 | __all__ = [ 12 | # '_OutputFactory', 13 | # 'ElevationStationsTimeseries', 14 | 'Maxele', 15 | 'MaximumElevationTimes', 16 | 'ElevationStations', 17 | 'Fort61', 18 | 'Fort63', 19 | # 'HarmonicConstituentsElevationStations' 20 | ] 21 | -------------------------------------------------------------------------------- /adcircpy/outputs/collection.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | # from collections.abc import Mapping 4 | import pathlib 5 | 6 | from adcircpy.outputs.maxele import Maxele 7 | 8 | 9 | class OutputCollection( 10 | # Mapping 11 | ): 12 | def __init__( 13 | self, 14 | fort61=None, 15 | fort62=None, 16 | fort63=None, 17 | fort64=None, 18 | maxele=None, 19 | maxvel=None, 20 | crs=None, 21 | ): 22 | self._crs = crs 23 | self._maxele = maxele 24 | 25 | def __iter__(self): 26 | for name in self.get_output_types(): 27 | yield self._container[name] 28 | 29 | def __len__(self): 30 | return len(self.get_output_types()) 31 | 32 | def get_output(self, name): 33 | return self._container[name] 34 | 35 | def get_output_types(self): 36 | return [_ for _ in self._container.keys() if _ is not None] 37 | 38 | def _certify_output_type(self, inst, obj): 39 | # TODO: should use _filetype attribute instead or create a Enum class 40 | if isinstance(inst, obj): 41 | return inst 42 | elif isinstance(inst, (str, pathlib.Path)): 43 | return obj(inst, crs=self.crs) 44 | 45 | @property 46 | def maxele(self): 47 | return self._maxele 48 | 49 | @property 50 | def crs(self): 51 | return self._crs 52 | 53 | @property 54 | @lru_cache(maxsize=None) 55 | def _container(self): 56 | return {} 57 | 58 | @property 59 | def _maxele(self): 60 | return self._container['maxele'] 61 | 62 | @property 63 | def _crs(self): 64 | return self.__crs 65 | 66 | @_maxele.setter 67 | def _maxele(self, maxele): 68 | self._container['maxele'] = self._certify_output_type(maxele, Maxele) 69 | 70 | @_crs.setter 71 | def _crs(self, crs): 72 | self.__crs = crs 73 | -------------------------------------------------------------------------------- /adcircpy/outputs/fort61.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | import pathlib 3 | import uuid 4 | 5 | from netCDF4 import Dataset 6 | 7 | """ 8 | This class could probably be improved by following the suggestions found 9 | https://stackoverflow.com/questions/4014621/a-python-class-that-acts-like-dict 10 | """ 11 | 12 | 13 | class ElevationStations: 14 | def __init__(self, path): 15 | self._path = path 16 | 17 | def __iter__(self): 18 | for key, item in self.stations.items(): 19 | yield key, item 20 | 21 | def __repr__(self): 22 | print(self.stations) 23 | 24 | def _certify_netcdf_stations_file(self, nc): 25 | msg = f'Input file {self.path} is not an ADCIRC stations output file ' 26 | msg += '(fort.61.nc).' 27 | assert 'station_name' in nc.variables and 'zeta' in nc.variables, msg 28 | 29 | def _init_netcdf_stations(self): 30 | stations = dict() 31 | for idx, name in enumerate(self.nc['station_name']): 32 | name = "".join([s.decode('UTF-8') for s in name]).strip(' ') 33 | if len(name) == 0 or name in stations.keys(): 34 | name = uuid.uuid4().hex[:8] 35 | stations[name] = dict() 36 | stations[name]['x'] = float(self.nc['x'][idx]) 37 | stations[name]['y'] = float(self.nc['y'][idx]) 38 | stations[name]['values'] = self.nc['zeta'][:, idx] 39 | self.__stations = stations 40 | 41 | def _init_netcdf_datetime(self): 42 | base_date = self.nc['time'].base_date.split('!')[0].strip(' ') 43 | for fmt in ('%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M'): 44 | try: 45 | base_date = datetime.strptime(base_date, fmt) 46 | break 47 | except ValueError: 48 | pass 49 | if isinstance(base_date, str): 50 | msg = f'Could not parse input date {base_date}. ' 51 | msg += "Known formats are '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M'." 52 | raise IOError(msg) 53 | self.__datetime = [base_date + timedelta(seconds=float(s)) for s in self.nc['time']] 54 | 55 | def _init_ascii(self): 56 | msg = 'ASCII fort.61 files have not yet been implemented.' 57 | raise NotImplementedError(msg) 58 | 59 | @property 60 | def path(self): 61 | return self._path 62 | 63 | @property 64 | def stations(self): 65 | return self._stations 66 | 67 | @property 68 | def datetime(self): 69 | return self._datetime 70 | 71 | @property 72 | def nc(self): 73 | return self._nc 74 | 75 | @property 76 | def _path(self): 77 | return self.__path 78 | 79 | @property 80 | def _stations(self): 81 | try: 82 | return self.__stations 83 | except AttributeError: 84 | if self.nc: 85 | self._init_netcdf_stations() 86 | else: 87 | self._init_ascii() 88 | return self.__stations 89 | 90 | @property 91 | def _nc(self): 92 | try: 93 | return self.__nc 94 | except AttributeError: 95 | try: 96 | nc = Dataset(self.path) 97 | except OSError: 98 | nc = False 99 | if nc: 100 | self._certify_netcdf_stations_file(nc) 101 | self.__nc = nc 102 | return self.__nc 103 | 104 | @property 105 | def _datetime(self): 106 | try: 107 | return self.__datetime 108 | except AttributeError: 109 | if self.nc: 110 | self._init_netcdf_datetime() 111 | else: 112 | self._init_ascii() 113 | return self.__datetime 114 | 115 | @_path.setter 116 | def _path(self, path): 117 | path = pathlib.Path(path) 118 | self.__path = path 119 | 120 | 121 | # alias 122 | Fort61 = ElevationStations 123 | -------------------------------------------------------------------------------- /adcircpy/outputs/fort63.py: -------------------------------------------------------------------------------- 1 | from adcircpy.outputs.base import ScalarSurfaceOutputTimeseries 2 | 3 | 4 | class Fort63(ScalarSurfaceOutputTimeseries): 5 | _filetype = 'fort.63' 6 | _cmap = 'jet' 7 | _levels = 256 8 | -------------------------------------------------------------------------------- /adcircpy/outputs/maxele.py: -------------------------------------------------------------------------------- 1 | from adcircpy.outputs.base import SurfaceOutput 2 | 3 | 4 | class Maxele(SurfaceOutput): 5 | _filetype = 'maxele' 6 | 7 | 8 | class MaximumElevationTimes(SurfaceOutput): 9 | _filetype = 'time_of_maxele' 10 | 11 | 12 | # import numpy as np 13 | # import pathlib 14 | # from netCDF4 import Dataset 15 | # import matplotlib.pyplot as plt 16 | # from adcircpy.mesh import grd 17 | # from adcircpy.mesh.base import _EuclideanMesh2D 18 | 19 | 20 | # class Maxele(_EuclideanMesh2D): 21 | 22 | # def __init__( 23 | # self, 24 | # coords, 25 | # triangles, 26 | # zeta_max, 27 | # time_of_zeta_max=None, 28 | # description='maxele', 29 | # crs=None, 30 | # ): 31 | # super().__init__(coords, triangles=triangles, values=zeta_max) 32 | # self._zeta_max = np.ma.masked_equal(super().values, 99999.0) 33 | # self._time_of_zeta_max = time_of_zeta_max 34 | 35 | # def make_plot( 36 | # self, 37 | # axes=None, 38 | # vmin=None, 39 | # vmax=None, 40 | # cmap='jet', 41 | # levels=256, 42 | # show=False, 43 | # title=None, 44 | # figsize=None, 45 | # extent=None, 46 | # cbar_label=None, 47 | # **kwargs 48 | # ): 49 | # if axes is None: 50 | # axes = plt.figure(figsize=figsize).add_subplot(111) 51 | # if vmin is None: 52 | # vmin = np.min(self.values) 53 | # if vmax is None: 54 | # vmax = np.max(self.values) 55 | # tri = self.triangulation 56 | # if np.any(self.values.mask): 57 | # tri_mask = np.any(self.values.mask[tri.triangles], axis=1) 58 | # tri.set_mask(tri_mask) 59 | # ax = axes.tricontourf( 60 | # tri, 61 | # self.values.data, 62 | # levels=levels, 63 | # cmap=cmap, 64 | # vmin=vmin, 65 | # vmax=vmax, 66 | # **kwargs 67 | # ) 68 | # plt.colorbar(ax, cmap=cmap) 69 | # axes.axis('scaled') 70 | # if extent is not None: 71 | # axes.axis(extent) 72 | # if show is True: 73 | # plt.show() 74 | # return axes 75 | 76 | # @classmethod 77 | # def open(cls, path, crs=None): 78 | # path = str(pathlib.Path(path).absolute()) 79 | # try: 80 | # return cls.from_netcdf(path, crs) 81 | # except OSError: 82 | # return cls.from_ascii(path, crs) 83 | 84 | # @classmethod 85 | # def from_netcdf(cls, path, crs=None): 86 | # nc = Dataset(path) 87 | # cls._certify_netcdf_maxele_file(nc) 88 | # coords = np.vstack([nc['x'][:].data, nc['y'][:].data]).T 89 | # triangles = nc['element'][:].data - 1 90 | # zeta_max = np.ma.masked_equal( 91 | # nc['zeta_max'][:], nc['zeta_max']._FillValue) 92 | # time_of_zeta_max = np.ma.masked_equal( 93 | # nc['time_of_zeta_max'][:], nc['time_of_zeta_max']._FillValue) 94 | # cls = cls( 95 | # coords, 96 | # triangles, 97 | # zeta_max, 98 | # time_of_zeta_max, 99 | # crs=crs 100 | # ) 101 | # cls._nc = nc 102 | # return cls 103 | 104 | # @classmethod 105 | # def from_ascii(cls, path, fort14, crs=None): 106 | # fort14 = cls.parse_gr3(fort14) 107 | # with open(path, 'r') as f: 108 | # line = f.readline() 109 | # line = f.readline().split() 110 | # NP = int(line[1]) 111 | # line = f.readline() 112 | # values = list() 113 | # for i in range(NP): 114 | # values.append(float(f.readline().split()[1])) 115 | # time_of_zeta_max = list() 116 | # for i in range(NP): 117 | # try: 118 | # time_of_zeta_max.append(float(f.readline().split()[1])) 119 | # except IndexError: 120 | # time_of_zeta_max.append(-99999.) 121 | # values = np.ma.masked_equal(values, -99999.) 122 | # time_of_zeta_max = np.ma.masked_equal(time_of_zeta_max, -99999.) 123 | # return cls( 124 | # np.vstack([fort14.pop('x'), fort14.pop('y')]).T, 125 | # fort14.pop('elements'), 126 | # values, 127 | # time_of_zeta_max, 128 | # crs) 129 | 130 | # @property 131 | # def values(self): 132 | # return self.zeta_max 133 | 134 | # @property 135 | # def zeta_max(self): 136 | # return self._zeta_max 137 | 138 | # @property 139 | # def time_of_zeta_max(self): 140 | # return self._time_of_zeta_max 141 | 142 | # @staticmethod 143 | # def _certify_netcdf_maxele_file(nc): 144 | # if ('zeta_max' not in nc.variables.keys() 145 | # and 'time_of_zeta_max' not in nc.variables.keys()): 146 | # raise Exception('Not a maxele file!') 147 | 148 | # @property 149 | # def _zeta_max(self): 150 | # return self.__zeta_max 151 | 152 | # @property 153 | # def _time_of_zeta_max(self): 154 | # return self.__time_of_zeta_max 155 | 156 | # @_zeta_max.setter 157 | # def _zeta_max(self, zeta_max): 158 | # self.__zeta_max = zeta_max 159 | 160 | # @_time_of_zeta_max.setter 161 | # def _time_of_zeta_max(self, time_of_zeta_max): 162 | # self.add_attribute('time_of_zeta_max') 163 | # self.set_attribute('time_of_zeta_max', time_of_zeta_max) 164 | -------------------------------------------------------------------------------- /adcircpy/plotting.py: -------------------------------------------------------------------------------- 1 | import io 2 | from os import PathLike 3 | import pathlib 4 | from typing import Union 5 | import zipfile 6 | 7 | import appdirs 8 | import geopandas 9 | from matplotlib import pyplot 10 | from matplotlib.axes import Axes 11 | from matplotlib import colormaps 12 | import numpy 13 | import requests 14 | from shapely.geometry import MultiPoint, MultiPolygon, Polygon 15 | from shapely.geometry import shape as shapely_shape 16 | 17 | 18 | def plot_polygon( 19 | geometry: Union[Polygon, MultiPolygon], 20 | fill: bool = False, 21 | axis: Axes = None, 22 | show: bool = False, 23 | **kwargs, 24 | ) -> Axes: 25 | """ 26 | Plot the given polygon. 27 | 28 | :param geometry: Shapely polygon (or multipolygon) 29 | :param axis: `pyplot` axis to plot to 30 | :param show: whether to show the plot 31 | """ 32 | 33 | if axis is None: 34 | axis = pyplot.gca() 35 | 36 | if 'c' not in kwargs: 37 | try: 38 | color = next(axis._get_lines.color_cycle) 39 | except AttributeError: 40 | color = 'r' 41 | kwargs['c'] = color 42 | 43 | if isinstance(geometry, dict): 44 | geometry = shapely_shape(geometry) 45 | 46 | if type(geometry) is Polygon: 47 | if fill: 48 | axis.fill(*geometry.exterior.xy, **kwargs) 49 | kwargs['c'] = 'w' 50 | for interior in geometry.interiors: 51 | axis.fill(*interior.xy, **kwargs) 52 | else: 53 | axis.plot(*geometry.exterior.xy, **kwargs) 54 | for interior in geometry.interiors: 55 | axis.plot(*interior.xy, **kwargs) 56 | elif type(geometry) is MultiPolygon: 57 | for polygon in geometry: 58 | plot_polygon(geometry=polygon, axis=axis, fill=fill, show=False, **kwargs) 59 | else: 60 | if fill: 61 | axis.fill(*geometry.xy, **kwargs) 62 | else: 63 | axis.plot(*geometry.xy, **kwargs) 64 | 65 | if show: 66 | pyplot.show() 67 | 68 | return axis 69 | 70 | 71 | def plot_polygons( 72 | geometries: [Polygon], 73 | colors: [str] = None, 74 | fill: bool = False, 75 | axis: Axes = None, 76 | show: bool = False, 77 | **kwargs, 78 | ) -> Axes: 79 | """ 80 | Plot the given polygons using the given colors. 81 | 82 | :param geometries: list of shapely polygons or multipolygons 83 | :param colors: colors to plot each region 84 | :param axis: `pyplot` axis to plot to 85 | :param show: whether to show the plot 86 | """ 87 | 88 | if axis is None: 89 | axis = pyplot.gca() 90 | 91 | if 'c' in kwargs: 92 | colors = [kwargs['c'] for _ in range(len(geometries))] 93 | elif colors is None: 94 | colors = [ 95 | colormaps['gist_rainbow'](color_index / len(geometries)) 96 | for color_index in range(len(geometries)) 97 | ] 98 | 99 | for geometry_index, geometry in enumerate(geometries): 100 | kwargs['c'] = colors[geometry_index] 101 | plot_polygon(geometry=geometry, fill=fill, axis=axis, **kwargs) 102 | 103 | if show: 104 | pyplot.show() 105 | 106 | return axis 107 | 108 | 109 | def plot_bounding_box( 110 | sw: (float, float), ne: (float, float), axis: Axes = None, show: bool = False, **kwargs, 111 | ) -> Axes: 112 | """ 113 | Plot the bounding box of the given extent. 114 | 115 | :param sw: XY coordinates of southwest corner 116 | :param ne: XY coordinates of northeast corner 117 | :param axis: `pyplot` axis to plot to 118 | :param show: whether to show the plot 119 | """ 120 | 121 | if axis is None: 122 | axis = pyplot.gca() 123 | 124 | corner_points = numpy.array([sw, (ne[0], sw[1]), ne, (sw[0], ne[1]), sw]) 125 | 126 | axis.plot(corner_points[:, 0], corner_points[:, 1], **kwargs) 127 | 128 | if show: 129 | pyplot.show() 130 | 131 | return axis 132 | 133 | 134 | def plot_points( 135 | points: Union[numpy.array, MultiPoint], 136 | index: int = 0, 137 | axis: Axes = None, 138 | show: bool = False, 139 | **kwargs, 140 | ) -> Axes: 141 | """ 142 | Create a scatter plot of the given points. 143 | 144 | :param points: N x M array of points 145 | :param index: zero-based index of vector layer to read 146 | :param axis: `pyplot` axis to plot to 147 | :param show: whether to show the plot 148 | """ 149 | 150 | if type(points) is MultiPoint: 151 | points = numpy.squeeze(numpy.stack((point._get_coords() for point in points), axis=0)) 152 | 153 | if axis is None: 154 | axis = pyplot.gca() 155 | 156 | if 'c' not in kwargs and points.shape[1] > 2: 157 | kwargs['c'] = points[:, index + 2] 158 | 159 | if 's' not in kwargs: 160 | kwargs['s'] = 2 161 | 162 | axis.scatter(points[:, 0], points[:, 1], **kwargs) 163 | 164 | if show: 165 | pyplot.show() 166 | 167 | return axis 168 | 169 | 170 | def download_coastline(overwrite: bool = False) -> pathlib.Path: 171 | data_directory = pathlib.Path(appdirs.user_data_dir('ne_coastline')) 172 | if not data_directory.exists(): 173 | data_directory.mkdir(exist_ok=True, parents=True) 174 | 175 | coastline_filename = data_directory / 'ne_110m_coastline.shp' 176 | 177 | if not coastline_filename.exists() or overwrite: 178 | # download and save if not present 179 | url = 'http://naciscdn.org/naturalearth/110m/physical/ne_110m_coastline.zip' 180 | response = requests.get(url, stream=True) 181 | with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file: 182 | for member_filename in zip_file.namelist(): 183 | file_data = zip_file.read(member_filename) 184 | with open(data_directory / member_filename, 'wb') as output_file: 185 | output_file.write(file_data) 186 | assert coastline_filename.exists(), 'coastline file not downloaded' 187 | 188 | return coastline_filename 189 | 190 | 191 | def plot_coastline(axis: Axes = None, show: bool = False, save_filename: PathLike = None): 192 | if axis is None: 193 | figure = pyplot.figure() 194 | axis = figure.add_subplot(1, 1, 1) 195 | 196 | coastline_filename = download_coastline() 197 | dataframe = geopandas.read_file(coastline_filename) 198 | dataframe.plot(ax=axis) 199 | 200 | if save_filename is not None: 201 | pyplot.savefig(save_filename) 202 | 203 | if show: 204 | pyplot.show() 205 | -------------------------------------------------------------------------------- /adcircpy/server/__init__.py: -------------------------------------------------------------------------------- 1 | from adcircpy.server.slurm_config import SlurmConfig 2 | from adcircpy.server.ssh_config import SSHConfig 3 | 4 | __all__ = [ 5 | 'SlurmConfig', 6 | 'SSHConfig', 7 | ] 8 | -------------------------------------------------------------------------------- /adcircpy/server/base_config.py: -------------------------------------------------------------------------------- 1 | class BaseServerConfig: 2 | pass 3 | -------------------------------------------------------------------------------- /adcircpy/server/slurm_config.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | import uuid 3 | 4 | from adcircpy.server.base_config import BaseServerConfig 5 | 6 | 7 | class SlurmConfig(BaseServerConfig): 8 | """ 9 | Object instance of a Slurm shell script (`*.job`). 10 | """ 11 | 12 | def __init__( 13 | self, 14 | account: str, 15 | ntasks: int, 16 | walltime: timedelta, 17 | partition: str = None, 18 | filename: str = 'slurm.job', 19 | run_directory: str = '.', 20 | run_name: str = None, 21 | mail_type: str = None, 22 | mail_user: str = None, 23 | log_filename: str = None, 24 | modules: [str] = None, 25 | path_prefix: str = None, 26 | extra_commands: [str] = None, 27 | launcher: str = 'srun', 28 | nodes: int = None, 29 | ): 30 | """ 31 | Instantiate a new Slurm shell script (`*.job`). 32 | 33 | :param account: Slurm account name 34 | :param ntasks: number of total tasks for Slurm to run 35 | :param run_name: Slurm run name 36 | :param partition: partition to run on 37 | :param walltime: time delta 38 | :param driver_script_filename: file path to the driver shell script 39 | :param run_directory: directory to run in 40 | :param mail_type: email type 41 | :param mail_user: email address 42 | :param log_filename: file path to output log file 43 | :param modules: list of file paths to modules to load 44 | :param path_prefix: file path to prepend to the PATH 45 | :param extra_commands: list of extra shell commands to insert into script 46 | :param launcher: command to start processes on target system (`srun`, `ibrun`, etc.) 47 | :param nodes: number of total nodes 48 | """ 49 | self._account = account 50 | self._slurm_ntasks = ntasks 51 | self._run_name = run_name 52 | self._partition = partition 53 | self._walltime = walltime 54 | self._filename = filename 55 | self._run_directory = run_directory 56 | self._mail_type = mail_type 57 | self._mail_user = mail_user 58 | self._log_filename = log_filename 59 | self._modules = modules 60 | self._path_prefix = path_prefix 61 | self._extra_commands = extra_commands 62 | self._launcher = launcher 63 | self._nodes = nodes 64 | 65 | @property 66 | def nprocs(self): 67 | return self._slurm_ntasks 68 | 69 | @property 70 | def _walltime(self): 71 | return self.__walltime 72 | 73 | @_walltime.setter 74 | def _walltime(self, walltime): 75 | hours, remainder = divmod(walltime, timedelta(hours=1)) 76 | minutes, remainder = divmod(remainder, timedelta(minutes=1)) 77 | seconds = round(remainder / timedelta(seconds=1)) 78 | self.__walltime = f'{hours:02}:{minutes:02}:{seconds:02}' 79 | 80 | @property 81 | def _filename(self): 82 | return self.__filename 83 | 84 | @_filename.setter 85 | def _filename(self, filename): 86 | if filename is None: 87 | filename = 'slurm.job' 88 | self.__filename = filename 89 | 90 | @property 91 | def _run_name(self): 92 | return self.__run_name 93 | 94 | @_run_name.setter 95 | def _run_name(self, run_name): 96 | if run_name is None: 97 | run_name = uuid.uuid4().hex 98 | self.__run_name = run_name 99 | 100 | @property 101 | def _run_directory(self): 102 | return self.__run_directory 103 | 104 | @_run_directory.setter 105 | def _run_directory(self, run_directory): 106 | if run_directory is None: 107 | run_directory = '.' 108 | self.__run_directory = run_directory 109 | 110 | @property 111 | def _log_filename(self): 112 | return self.__log_filename 113 | 114 | @_log_filename.setter 115 | def _log_filename(self, log_filename): 116 | if log_filename is None: 117 | log_filename = 'slurm.log' 118 | self.__log_filename = log_filename 119 | 120 | @property 121 | def _prefix(self): 122 | f = f'#SBATCH -D {self._run_directory}\n' f'#SBATCH -J {self._run_name}\n' 123 | 124 | if self._account is not None: 125 | f += f'#SBATCH -A {self._account}\n' 126 | if self._mail_type is not None: 127 | f += f'#SBATCH --mail-type={self._mail_type}\n' 128 | if self._mail_user is not None: 129 | f += f'#SBATCH --mail-user={self._mail_user}\n' 130 | if self._log_filename is not None: 131 | f += f'#SBATCH --output={self._log_filename}\n' 132 | 133 | f += f'#SBATCH -n {self._slurm_ntasks}\n' 134 | if self._nodes is not None: 135 | f += f'#SBATCH -N {self._nodes}\n' 136 | 137 | f += f'#SBATCH --time={self._walltime}\n' 138 | 139 | if self._partition is not None: 140 | f += f'#SBATCH --partition={self._partition}\n' 141 | 142 | f += '\nulimit -s unlimited\nset -e\n' 143 | 144 | if self._modules is not None: 145 | f += f'\n' f'module load {" ".join(module for module in self._modules)}\n' 146 | 147 | if self._path_prefix is not None: 148 | f += f'\n' f'PATH={self._path_prefix}:$PATH\n' 149 | 150 | if self._extra_commands is not None: 151 | f += '\n' 152 | for command in self._extra_commands: 153 | f += f'{command}\n' 154 | 155 | return f 156 | -------------------------------------------------------------------------------- /adcircpy/utilities.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from os import PathLike 4 | from pathlib import Path 5 | import sys 6 | import tarfile 7 | 8 | import pooch 9 | 10 | 11 | def download_mesh( 12 | url: str, directory: PathLike, known_hash: str = None, overwrite: bool = False 13 | ): 14 | if not isinstance(directory, Path): 15 | directory = Path(directory) 16 | if not directory.exists(): 17 | directory.mkdir(parents=True, exist_ok=True) 18 | 19 | if not (directory / 'fort.14').exists() or overwrite: 20 | logging.info(f'downloading mesh files to {directory}') 21 | extract_download( 22 | url, directory, ['fort.13', 'fort.14'], known_hash=known_hash, overwrite=overwrite 23 | ) 24 | 25 | return directory 26 | 27 | 28 | def extract_download( 29 | url: str, 30 | directory: PathLike, 31 | filenames: [str] = None, 32 | known_hash: str = None, 33 | overwrite: bool = False, 34 | ): 35 | if not isinstance(directory, Path): 36 | directory = Path(directory) 37 | 38 | if filenames is None: 39 | filenames = [] 40 | 41 | if not directory.exists(): 42 | directory.mkdir(parents=True, exist_ok=True) 43 | 44 | temporary_filename = directory / 'temp.tar.gz' 45 | logging.debug(f'downloading {url} -> {temporary_filename}') 46 | temporary_filename = pooch.retrieve(url, known_hash=known_hash, fname=temporary_filename) 47 | logging.debug(f'extracting {temporary_filename} -> {directory}') 48 | with tarfile.open(temporary_filename) as local_file: 49 | if len(filenames) > 0: 50 | for filename in filenames: 51 | if filename in local_file.getnames(): 52 | path = directory / filename 53 | if not path.exists() or overwrite: 54 | if path.exists(): 55 | os.remove(path) 56 | local_file.extract(filename, directory) 57 | else: 58 | local_file.extractall(directory) 59 | 60 | 61 | def get_logger( 62 | name: str, 63 | log_filename: PathLike = None, 64 | file_level: int = None, 65 | console_level: int = None, 66 | log_format: str = None, 67 | ) -> logging.Logger: 68 | if file_level is None: 69 | file_level = logging.DEBUG 70 | if console_level is None: 71 | console_level = logging.INFO 72 | logger = logging.getLogger(name) 73 | 74 | # check if logger is already configured 75 | if logger.level == logging.NOTSET and len(logger.handlers) == 0: 76 | # check if logger has a parent 77 | if '.' in name: 78 | if isinstance(logger.parent, logging.RootLogger): 79 | for existing_console_handler in [ 80 | handler 81 | for handler in logger.parent.handlers 82 | if not isinstance(handler, logging.FileHandler) 83 | ]: 84 | logger.parent.removeHandler(existing_console_handler) 85 | logger.parent = get_logger(name.rsplit('.', 1)[0]) 86 | else: 87 | # otherwise create a new split-console logger 88 | if console_level != logging.NOTSET: 89 | for existing_console_handler in [ 90 | handler 91 | for handler in logger.handlers 92 | if not isinstance(handler, logging.FileHandler) 93 | ]: 94 | logger.removeHandler(existing_console_handler) 95 | 96 | console_output = logging.StreamHandler(sys.stdout) 97 | console_output.setLevel(console_level) 98 | logger.addHandler(console_output) 99 | 100 | if log_filename is not None: 101 | file_handler = logging.FileHandler(log_filename) 102 | file_handler.setLevel(file_level) 103 | for existing_file_handler in [ 104 | handler for handler in logger.handlers if isinstance(handler, logging.FileHandler) 105 | ]: 106 | logger.removeHandler(existing_file_handler) 107 | logger.addHandler(file_handler) 108 | 109 | if log_format is None: 110 | log_format = '[%(asctime)s] %(name)-15s %(levelname)-8s: %(message)s' 111 | log_formatter = logging.Formatter(log_format) 112 | for handler in logger.handlers: 113 | handler.setFormatter(log_formatter) 114 | 115 | return logger 116 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/abstract.rst: -------------------------------------------------------------------------------- 1 | Abstract 2 | ======== 3 | 4 | The Advanced Circulation Model (ADCIRC) :footcite:p:`Luettich1992` is a Fortran program used for modeling ocean circulation due to tides, surface waves and atmospheric forcings. 5 | However, the input formats and configuration are inflexible and not straight forward for operational implementation, making rapid iteration of model testing, ensemble configuration, and model coupling complicated. 6 | Here, we introduce a flexible abstraction of model inputs and outputs written in Python, called ADCIRCpy :footcite:p:`JaimeCalzadaNOAA2021`, that provides a simpler user interface for automatically generating ADCIRC configuration to a variety of inputs and model scenarios. 7 | This documentation outlines 1. the needs for such an abstraction, 2. the peculiarities and challenges with the ADCIRC model that necessitate custom logic, and 3. methodologies for generalizing user input in such a way as to make generating model configurations consistent, fast, and efficient. 8 | 9 | `Python, ADCIRC, configuration generation, circulation modeling, user interface` 10 | 11 | .. footbibliography:: 12 | -------------------------------------------------------------------------------- /docs/source/acknowledgments.rst: -------------------------------------------------------------------------------- 1 | Acknowledgments 2 | =============== 3 | 4 | The authors would like to express their acknowledgment to Dr. Joannes Westerink for motivating this work, and to all the ADCIRC developers who have worked to make the ADCIRC software possible. 5 | -------------------------------------------------------------------------------- /docs/source/basic_usage.rst: -------------------------------------------------------------------------------- 1 | Basic Usage 2 | =========== 3 | 4 | `Example 1`_ (below) illustrates usage of the ADCIRCpy Python API to generate a "best track" parametric wind configuration for running ADCIRC on hurricane Sandy (2012). 5 | 6 | .. _Example 1: 7 | 8 | .. code-block:: python 9 | 10 | #!/usr/bin/env python 11 | 12 | from datetime import timedelta 13 | 14 | from adcircpy import AdcircMesh, AdcircRun, Tides 15 | from adcircpy.forcing.winds import BestTrackForcing 16 | from adcircpy.server import SlurmConfig 17 | 18 | # load an ADCIRC mesh grid from a `fort.14` file to a new mesh object 19 | mesh = AdcircMesh.open('fort.14', crs='epsg:4326') 20 | 21 | # add nodal attributes from a `fort.13` file to the mesh object 22 | mesh.import_nodal_attributes('fort.13') 23 | 24 | # create a tidal forcing object, using all constituents 25 | tidal_forcing = Tides() 26 | tidal_forcing.use_all() 27 | 28 | # add data from the tidal forcing object to the mesh object 29 | mesh.add_forcing(tidal_forcing) 30 | 31 | # create a wind forcing object for Hurricane Sandy (2012) 32 | wind_forcing = BestTrackForcing('Sandy2012') 33 | 34 | # add wind forcing data to the mesh object 35 | mesh.add_forcing(wind_forcing) 36 | 37 | # create a Slurm (HPC job manager) configuration object. 38 | slurm = SlurmConfig( 39 | account='account', 40 | ntasks=1000, 41 | run_name='ADCIRCpy documentation example', 42 | partition='partition', 43 | walltime=timedelta(hours=8), 44 | mail_type='all', 45 | mail_user='example@email.gov', 46 | log_filename='example.log', 47 | modules=['intel/2020', 'impi/2020', 'netcdf/4.7.2-parallel'], 48 | path_prefix='$HOME/adcirc/build', 49 | ) 50 | 51 | # create an ADCIRC run driver object 52 | driver = AdcircRun( 53 | mesh=mesh, 54 | server_config=slurm, 55 | spinup_time=timedelta(days=15), 56 | ) 57 | 58 | # write configuration files to the specified directory 59 | driver.write(output_directory="./model_inputs") 60 | 61 | The set of files generated by this configuration (``fort.14``, ``fort.15.coldstart``, ``fort.15.hotstart``, ``fort.22``, and ``slurm.job``) represents the minimum working example for an ADCIRC run using parametric winds. To submit this configuration to the Slurm job manager, run the following command in a shell with access to the ADCIRC binaries: 62 | 63 | .. code-block:: shell 64 | 65 | sbatch slurm.job 66 | 67 | Note that this setup generates model inputs based on default values. These may not be necessarily optimal. For this reason, it is advisable that users perform checks of the outputs and introduce optimizations by modifying the parameters given to the Python API. For example, the user might want to check that the auto-computed timestep matches the expectations from experience. Instead of doing manual modifications to the ``fort.15`` file, the user makes use of the ADCIRCpy module to customize and optimize their modeling needs. If the user, for example, requires their model to use a specific timestep of two seconds, it would suffice to execute the following statement: 68 | 69 | .. code-block:: python 70 | 71 | driver.timestep = 2 72 | 73 | before :code:`driver.write()`. Most of the ``fort.15`` options can be overridden directly by the user, with a very small exception of parameters that should be considered "private" in the context of the ``fort.15`` file. 74 | 75 | .. code-block:: python 76 | 77 | # Modify timestep to 2 seconds. 78 | driver.timestep = 2. 79 | 80 | # Add a constant Mannings coefficient field to the mesh 81 | mesh.mannings_n_at_sea_floor = mesh.coords.shape[0]*[0.025] 82 | 83 | # generate TAU0 factors 84 | mesh.generate_tau0() 85 | 86 | # Write new model configuration to disk. 87 | driver.write("model_inputs_modified", overwrite=True) 88 | 89 | After these modifications, the resulting directory contains ``fort.13``, ``fort.14``, ``fort.15.coldstart``, ``fort.15.hotstart``, ``fort.22``, and ``slurm.job``. The new ``fort.13`` includes the newly-added Manning's N and :code:`TAU0` factors. 90 | -------------------------------------------------------------------------------- /docs/source/cli.rst: -------------------------------------------------------------------------------- 1 | CLI Commands 2 | ============ 3 | 4 | The :code:`adcircpy` command line interface (CLI) provides commands accessible from the command line, allowing a smart way to produce forecasts, tidal-only hindcasts, "best track" hindcast operations, and practicing parametric winds appropriate for storm surge hindcasts, for rapid iteration and debugging. A single command generates a sanitized file through Python code, which is user-customized to the virtual model scenario. 5 | 6 | ``tide_gen`` 7 | ------------ 8 | The :code:`tide_gen` command generates the tidal forcing table required in the ``fort.15`` file. The generated table is suitable for being pasted onto a target ``fort.15`` file. In reality, this algorithm calls the same functions invoked when generating full model runs, so it can be used to check whether the tidal forcing outputs will look reasonable before running the entirety of the algorithm. 9 | 10 | .. code-block:: shell 11 | 12 | tide_gen /path/to/your/fort.14 '2021-02-26T00:00:00' 15 --mesh-crs='epsg:4326' 13 | 14 | .. program-output:: tide_gen -h 15 | 16 | ``tidal_run`` 17 | ------------- 18 | The :code:`tidal_run` entrypoint will generate the necessary set of input files for forecast and hindcast tidal-only runs with any mesh. Most of the options that tidal_run takes are also valid for other command line entry points, so getting familiar with the options on tidal_run is helpful to understand the options for more advanced CLI entry points. A tidal only run is perhaps the first step towards mesh validation, and mesh stability checks, therefore having an entrypoint that is able to generate ADCIRC input files for tidal only runs pays greatly for initial mesh stability checks. 19 | 20 | .. code-block:: shell 21 | 22 | tidal_run \ 23 | /path/to/fort.14 \ 24 | $(date +"%Y-%m-%dT%H:%M:%S") \ 25 | 15 \ 26 | --spinup-days=5 \ 27 | --tau0-gen \ 28 | --crs=EPSG:4326 \ 29 | --constituents=all \ 30 | --timestep=10. \ 31 | --stations-file=stations.txt \ 32 | --elev-stat=6. \ 33 | --generate-linear-mannings 34 | 35 | .. program-output:: tidal_run -h 36 | 37 | ``best_track_run`` 38 | ------------------ 39 | The :code:`best_track_run` command line entry point was designed to be able to generate a full parametric wind run in ADCIRC from the command line. This entry point essentially encapsulates most of the :code:`adcircpy` functionality in a single command. The following is an example of how to invoke this functionality from the bash shell. 40 | 41 | .. code-block:: shell 42 | 43 | best_track_run \ 44 | fort.14 \ 45 | Ike2008 \ 46 | --spinup-days=15 \ 47 | --crs=EPSG:4326 \ 48 | --fort13=fort.13 \ 49 | --output-directory=Ike2008 \ 50 | --constituents=major \ 51 | --skip-run \ 52 | --generate-linear-mannings \ 53 | --tau0-gen \ 54 | --timestep=1.0 \ 55 | --elev=60. \ 56 | --stations-file=coops.txt \ 57 | --elev-stat=6. \ 58 | --binaries-prefix=.python_env/bin \ 59 | --overwrite \ 60 | --use-slurm \ 61 | --account=nosofs \ 62 | --slurm-ntasks=800 \ 63 | --partition=orion \ 64 | --walltime=8 \ 65 | --mail-type=all \ 66 | --mail-user=jaime.calzada@noaa.gov \ 67 | --module=intel/2020 \ 68 | --module=impi/2020 \ 69 | --module=netcdf/4.7.2-parallel \ 70 | --log-level=info 71 | 72 | .. program-output:: best_track_run -h 73 | 74 | ``best_track_file`` 75 | ------------------- 76 | The :code:`best_track_file` entry point generates an ``aswip``-ready "best track" file. This uses the :code:`adcircpy.forcing.winds.BestTrackForcing` class. 77 | 78 | .. code-block:: shell 79 | 80 | best_track_file Sandy2012 81 | 82 | .. program-output:: best_track_file -h 83 | 84 | ``fort63`` 85 | ---------- 86 | .. program-output:: fort63 -h 87 | 88 | ``plot_maxele`` 89 | --------------- 90 | .. program-output:: plot_maxele -h 91 | 92 | ``plot_fort61`` 93 | --------------- 94 | .. code-block:: shell 95 | 96 | plot_fort61 /path/to/fort.61.nc MSL --show --coops-only 97 | 98 | .. program-output:: plot_fort61 -h 99 | 100 | ``plot_mesh`` 101 | ------------- 102 | .. code-block:: shell 103 | 104 | plot_mesh /path/to/fort.14 --show-elements 105 | 106 | .. program-output:: plot_mesh -h 107 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | from datetime import datetime 13 | import os 14 | from os import PathLike 15 | from pathlib import Path 16 | import subprocess 17 | import sys 18 | 19 | from dunamai import Version 20 | from importlib.metadata import metadata 21 | 22 | 23 | def repository_root(path: PathLike = None) -> Path: 24 | if path is None: 25 | path = __file__ 26 | if not isinstance(path, Path): 27 | path = Path(path) 28 | if path.is_file(): 29 | path = path.parent 30 | if '.git' in (child.name for child in path.iterdir()) or path == path.parent: 31 | return path 32 | else: 33 | return repository_root(path.parent) 34 | 35 | 36 | sys.path.insert(0, str(repository_root())) 37 | 38 | subprocess.run( 39 | f'{sys.executable} -m pip install -U pip', 40 | shell=True, 41 | stdout=subprocess.DEVNULL, 42 | stderr=subprocess.DEVNULL, 43 | ) 44 | 45 | # -- Project information ----------------------------------------------------- 46 | md = metadata('adcircpy') 47 | 48 | project = md['name'] 49 | author = md['author'] 50 | copyright = f'{datetime.now().year}, {author}' 51 | 52 | # The full version, including alpha/beta/rc tags 53 | try: 54 | release = Version.from_any_vcs().serialize() 55 | except RuntimeError: 56 | release = os.environ.get('VERSION') 57 | 58 | # -- General configuration --------------------------------------------------- 59 | 60 | autoclass_content = 'both' # include both class docstring and __init__ 61 | autodoc_default_options = { 62 | # Make sure that any autodoc declarations show the right members 63 | 'members': True, 64 | 'inherited-members': True, 65 | 'private-members': True, 66 | 'member-order': 'bysource', 67 | 'exclude-members': '__weakref__', 68 | } 69 | autosummary_generate = True # Make _autosummary files and include them 70 | napoleon_numpy_docstring = False # Force consistency, leave only Google 71 | napoleon_use_rtype = False # More legible 72 | 73 | # Add any Sphinx extension module names here, as strings. They can be 74 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 75 | # ones. 76 | extensions = [ 77 | 'sphinxcontrib.bibtex', 78 | 'sphinxcontrib.programoutput', 79 | # Need the autodoc and autosummary packages to generate our docs. 80 | 'sphinx.ext.autodoc', 81 | 'sphinx.ext.autosummary', 82 | # The Napoleon extension allows for nicer argument formatting. 83 | 'sphinx.ext.napoleon', 84 | 'm2r2', 85 | ] 86 | 87 | # Add any paths that contain templates here, relative to this directory. 88 | templates_path = ['_templates'] 89 | 90 | # List of patterns, relative to source directory, that match files and 91 | # directories to ignore when looking for source files. 92 | # This pattern also affects html_static_path and html_extra_path. 93 | exclude_patterns = [] 94 | 95 | # -- Options for HTML output ------------------------------------------------- 96 | 97 | # The theme to use for HTML and HTML Help pages. See the documentation for 98 | # a list of builtin themes. 99 | # 100 | html_theme = 'sphinx_rtd_theme' 101 | 102 | # Add any paths that contain custom static files (such as style sheets) here, 103 | # relative to this directory. They are copied after the builtin static files, 104 | # so a file named "default.css" will overwrite the builtin "default.css". 105 | html_static_path = ['_static'] 106 | 107 | # -- Extension configuration ------------------------------------------------- 108 | source_suffix = ['.rst', '.md'] 109 | bibtex_bibfiles = ['references.bib'] 110 | -------------------------------------------------------------------------------- /docs/source/figures/Fort14_UML.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:de37394ac5f475658ffc7738ff5108d58a4f2d7c39dca62b5a704fc03e79d14e 3 | size 132435 4 | -------------------------------------------------------------------------------- /docs/source/figures/Grd_UML.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e3b1e2a831158103077610286e1c420d36f29e4b32a929b9fe77fb74d64bf829 3 | size 143448 4 | -------------------------------------------------------------------------------- /docs/source/figures/classes_adcircpy.fort15.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:6d112d395ea7b67ef00e384ece27133910635a3ff1b44622d9be62ad7101d776 3 | size 246154 4 | -------------------------------------------------------------------------------- /docs/source/figures/classes_adcircpy.mesh.AdcircMesh.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:da5ac0116ddba81c7910820411ac6bd08de84be9d6ebaeb7a1e64d0f8cc9d54c 3 | size 335976 4 | -------------------------------------------------------------------------------- /docs/source/figures/fort14_triplot_example.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c419955fcbde2b9c20f72dda85f9c88ea1ce863324f92efb4c0bbfea4c046fdc 3 | size 38055 4 | -------------------------------------------------------------------------------- /docs/source/figures/hsofs_mesh.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:713272729c04ea74763147b228ab5ad0754d3406fef5f8e4024e6bab0f3ffd6e 3 | size 715420 4 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. toctree:: 2 | :hidden: 3 | 4 | abstract 5 | 6 | .. mdinclude:: ../../README.md 7 | 8 | .. toctree:: 9 | :hidden: 10 | 11 | introduction 12 | basic_usage 13 | python_api 14 | cli 15 | acknowledgments 16 | references 17 | -------------------------------------------------------------------------------- /docs/source/introduction.rst: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | The Advanced Circulation Model (ADCIRC) :footcite:p:`Luettich1992` is widely used to model tides, storm surge-related coastal inundation, and general ocean circulation. While it is one of the most popular coastal prediction models among coastal modelers, ADCIRC’s input files are highly complex and require high amounts of rote memorization. For example, the main model configuration (``fort.15``) file contains several interdependent entries; changing the value of a single line may drastically alter the contents of the rest of the file and even the locations of other configuration entries. Although the vast majority of the configuration file inputs are described in detail on the ADCIRC website, the configuration file itself has a relatively high level of cyclomatic complexity. Considering that, for the most part, the canonical ADCIRC workflow intends for users to modify configuration parameters "on-the-fly" in order to debug potential modeling issues, this complexity increases the difficulty of keeping the configuration options consistent with each other for even the simplest changes. For example, when the user changes the modeled timestep :code:`DTDP` in the ``fort.15`` file (perhaps the most common step when debugging convergence issues), they must also manually recompute several additional variables, namely :code:`NSPOOLE`, :code:`NSPOOLV`, :code:`NSPOOLM`, :code:`NSPOOLC`, etc., and enter them into the configuration alongside the initial change. This is but one of many examples of cyclomatically dependent configuration options in the ``fort.15`` file. The issues and potential sources of human error arising from this interdependency, of parameters needing to change together in order to maintain configuration consistency, is one of the main issues ADCIRCpy aims to alleviate through it’s API. 5 | 6 | Many ADCIRC input files (besides ``fort.15``) require pre-processing before ADCIRC can parse them. To name one example, the parametric wind fields that ADCIRC computes depend on obtaining the relevant "best track" data from the National Hurricane Center (NHC) FTP archives. NHC provides these files in a format unreadable by ADCIRC; they must be sanitized before ADCIRC is able to parse them correctly. Several ADCIRC users have developed scripts for processing these files, but most of these are not user-friendly. For example, the official ADCIRC Surge Guidance System (ASGS) is comprised of a variety of scripts in and utilizes several languages. As such, it is not straightforward to install and use, particularly for custom workflows outside the scope of the design of the ASGS developers. To this end ADCIRCpy provides an advantage, in that the sanitized file can be generated with a single command from the shell or through a Python script calling the API, allowing the processing to be customized to virtually any modeling scenario. 7 | 8 | ADCIRCpy provides another convenience when generating the spatially-varying :code:`TAU0` factor. In the conventional (non-ADCIRCpy) workflow, the user compiles and runs a separate Fortran application (``tau0_gen.f90`` :footcite:p:`Weiver2008`) to generate :code:`TAU0`. In contrast, ADCIRCpy is able to both generate this data and integrate the results into model inputs without relying on the user to manually compile or invoke external tools. Other examples of external tools requiring separate compilation and execution on a typical ADCIRC workflow include ``tide_fac.f90`` (TPXO interpolation), etc. ADCIRCpy offers "pythonic" equivalents to all of these tools within a single framework that does not require Fortran compilation. 9 | 10 | ADCIRCpy was built with users and operational applications in mind, as a framework exposing both a Python API and a command line interface. The interface provides several ways to interact with ADCIRC, depending on the use case. The command line options offer a quick way of generating tidal-only hindcasts and forecasts, as well as "best track" configurations using parametric winds suitable for storm surge hindcasts. ADCIRCpy is open-source, `hosted and maintained on GitHub `_ by the Office of Coast Survey of `NOAA National Ocean Service Coastal Marine Modeling Branch (CMMB) `_, and can be installed from the Python Package Index (PyPI) by running the following command on a system with Python and :code:`pip`: 11 | 12 | .. code-block:: shell 13 | 14 | pip install adcircpy 15 | 16 | .. footbibliography:: 17 | -------------------------------------------------------------------------------- /docs/source/references.rst: -------------------------------------------------------------------------------- 1 | References 2 | ========== 3 | 4 | .. bibliography:: references.bib 5 | :all: 6 | -------------------------------------------------------------------------------- /examples/example_1.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from pathlib import Path 3 | import shutil 4 | import warnings 5 | 6 | from adcircpy import AdcircMesh, AdcircRun, Tides 7 | from adcircpy.utilities import download_mesh 8 | 9 | DATA_DIRECTORY = Path(__file__).parent.absolute() / 'data' 10 | INPUT_DIRECTORY = DATA_DIRECTORY / 'input' / 'shinnecock' 11 | OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_1' 12 | 13 | MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' 14 | 15 | download_mesh( 16 | url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', 17 | directory=MESH_DIRECTORY, 18 | known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', 19 | ) 20 | 21 | # open mesh file 22 | mesh = AdcircMesh.open(MESH_DIRECTORY / 'fort.14', crs=4326) 23 | 24 | # initialize tidal forcing and constituents 25 | tidal_forcing = Tides() 26 | tidal_forcing.use_constituent('M2') 27 | tidal_forcing.use_constituent('N2') 28 | tidal_forcing.use_constituent('S2') 29 | tidal_forcing.use_constituent('K1') 30 | tidal_forcing.use_constituent('O1') 31 | mesh.add_forcing(tidal_forcing) 32 | 33 | # set simulation dates 34 | duration = timedelta(days=5) 35 | start_date = datetime(2015, 12, 14) 36 | end_date = start_date + duration 37 | 38 | # instantiate driver object 39 | driver = AdcircRun(mesh, start_date, end_date) 40 | 41 | # request outputs 42 | driver.set_elevation_surface_output(sampling_rate=timedelta(minutes=30)) 43 | driver.set_velocity_surface_output(sampling_rate=timedelta(minutes=30)) 44 | 45 | # override default options so the resulting `fort.15` matches the original Shinnecock test case options 46 | driver.timestep = 6.0 47 | driver.DRAMP = 2.0 48 | driver.TOUTGE = 3.8 49 | driver.TOUTGV = 3.8 50 | driver.smagorinsky = False 51 | driver.horizontal_mixing_coefficient = 5.0 52 | driver.gwce_solution_scheme = 'semi-implicit-legacy' 53 | 54 | if shutil.which('padcirc') is not None: 55 | driver.run(OUTPUT_DIRECTORY, overwrite=True) 56 | elif shutil.which('adcirc') is not None: 57 | driver.run(OUTPUT_DIRECTORY, overwrite=True, nproc=1) 58 | else: 59 | warnings.warn( 60 | 'ADCIRC binaries were not found in PATH. ' 61 | 'ADCIRC will not run. Writing files to disk...' 62 | ) 63 | driver.write(OUTPUT_DIRECTORY, overwrite=True) 64 | -------------------------------------------------------------------------------- /examples/example_2.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from pathlib import Path 3 | import shutil 4 | import warnings 5 | 6 | import numpy 7 | 8 | from adcircpy import AdcircMesh, AdcircRun, Tides 9 | from adcircpy.utilities import download_mesh 10 | 11 | DATA_DIRECTORY = Path(__file__).parent.absolute() / 'data' 12 | INPUT_DIRECTORY = DATA_DIRECTORY / 'input' 13 | OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_2' 14 | 15 | MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' 16 | 17 | download_mesh( 18 | url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', 19 | directory=MESH_DIRECTORY, 20 | known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', 21 | ) 22 | 23 | # open mesh file 24 | mesh = AdcircMesh.open(MESH_DIRECTORY / 'fort.14', crs=4326) 25 | 26 | # generate tau0 factor 27 | mesh.generate_tau0() 28 | 29 | # also add Manning's N to the domain (constant for this example) 30 | mesh.mannings_n_at_sea_floor = numpy.full(mesh.values.shape, 0.025) 31 | 32 | # initialize tidal forcing and constituents 33 | tidal_forcing = Tides() 34 | tidal_forcing.use_constituent('M2') 35 | tidal_forcing.use_constituent('N2') 36 | tidal_forcing.use_constituent('S2') 37 | tidal_forcing.use_constituent('K1') 38 | tidal_forcing.use_constituent('O1') 39 | mesh.add_forcing(tidal_forcing) 40 | 41 | # set simulation dates 42 | spinup_time = timedelta(days=2) 43 | duration = timedelta(days=3) 44 | start_date = datetime(2015, 12, 14) + spinup_time 45 | end_date = start_date + duration 46 | 47 | # instantiate driver object 48 | driver = AdcircRun(mesh, start_date, end_date, spinup_time) 49 | 50 | # request outputs 51 | driver.set_elevation_surface_output(sampling_rate=timedelta(minutes=30)) 52 | driver.set_velocity_surface_output(sampling_rate=timedelta(minutes=30)) 53 | 54 | # override default options 55 | driver.timestep = 4.0 56 | 57 | if shutil.which('padcirc') is not None: 58 | driver.run(OUTPUT_DIRECTORY, overwrite=True) 59 | elif shutil.which('adcirc') is not None: 60 | driver.run(OUTPUT_DIRECTORY, overwrite=True, nproc=1) 61 | else: 62 | warnings.warn( 63 | 'ADCIRC binaries were not found in PATH. ' 64 | 'ADCIRC will not run. Writing files to disk...' 65 | ) 66 | driver.write(OUTPUT_DIRECTORY, overwrite=True) 67 | -------------------------------------------------------------------------------- /examples/example_3.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from pathlib import Path 3 | 4 | from adcircpy import AdcircMesh, AdcircRun, Tides 5 | from adcircpy.forcing.winds import BestTrackForcing 6 | from adcircpy.server import SlurmConfig 7 | from adcircpy.utilities import download_mesh 8 | 9 | DATA_DIRECTORY = Path(__file__).parent.absolute() / 'data' 10 | INPUT_DIRECTORY = DATA_DIRECTORY / 'input' 11 | OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_3' 12 | 13 | MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' 14 | 15 | download_mesh( 16 | url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', 17 | directory=MESH_DIRECTORY, 18 | known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', 19 | ) 20 | 21 | # open mesh file 22 | mesh = AdcircMesh.open(MESH_DIRECTORY / 'fort.14', crs=4326) 23 | 24 | # initialize tidal forcing and constituents 25 | tidal_forcing = Tides() 26 | tidal_forcing.use_all() 27 | mesh.add_forcing(tidal_forcing) 28 | 29 | # initialize wind forcing 30 | wind_forcing = BestTrackForcing('Sandy2012') 31 | mesh.add_forcing(wind_forcing) 32 | 33 | # initialize Slurm configuration 34 | slurm = SlurmConfig( 35 | account='account', 36 | ntasks=1000, 37 | run_name='adcircpy/examples/example_3.py', 38 | partition='partition', 39 | walltime=timedelta(hours=8), 40 | mail_type='all', 41 | mail_user='example@email.gov', 42 | log_filename='example_3.log', 43 | modules=['intel/2020', 'impi/2020', 'netcdf/4.7.2-parallel'], 44 | path_prefix='$HOME/adcirc/build', 45 | ) 46 | 47 | # set simulation dates 48 | spinup_time = timedelta(days=15) 49 | duration = timedelta(days=3) 50 | start_date = datetime(2012, 10, 21, 18) 51 | end_date = start_date + duration 52 | 53 | # instantiate driver object 54 | driver = AdcircRun(mesh, start_date, end_date, spinup_time, server_config=slurm) 55 | 56 | # write driver state to disk 57 | driver.write(OUTPUT_DIRECTORY, overwrite=True) 58 | -------------------------------------------------------------------------------- /examples/example_4.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from pathlib import Path 3 | 4 | from adcircpy import AdcircMesh, AdcircRun, Tides 5 | from adcircpy.forcing.waves.ww3 import WaveWatch3DataForcing 6 | from adcircpy.forcing.winds.atmesh import AtmosphericMeshForcing 7 | from adcircpy.server import SlurmConfig 8 | from adcircpy.utilities import download_mesh 9 | 10 | DATA_DIRECTORY = Path(__file__).parent.absolute() / 'data' 11 | INPUT_DIRECTORY = DATA_DIRECTORY / 'input' 12 | OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' / 'example_4' 13 | 14 | MESH_DIRECTORY = INPUT_DIRECTORY / 'shinnecock' 15 | 16 | download_mesh( 17 | url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', 18 | directory=MESH_DIRECTORY, 19 | known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', 20 | ) 21 | 22 | # open mesh file 23 | mesh = AdcircMesh.open(MESH_DIRECTORY / 'fort.14', crs=4326) 24 | 25 | # initialize tidal forcing and constituents 26 | tidal_forcing = Tides() 27 | tidal_forcing.use_all() 28 | mesh.add_forcing(tidal_forcing) 29 | 30 | # initialize atmospheric mesh forcings (for NUOPC coupling) 31 | wind_forcing = AtmosphericMeshForcing( 32 | filename='Wind_HWRF_SANDY_Nov2018_ExtendedSmoothT.nc', nws=17, interval_seconds=3600, 33 | ) 34 | mesh.add_forcing(wind_forcing) 35 | 36 | # initialize wave mesh forcings (for NUOPC coupling) 37 | wave_forcing = WaveWatch3DataForcing( 38 | filename='ww3.HWRF.NOV2018.2012_sxy.nc', nrs=5, interval_seconds=3600, 39 | ) 40 | mesh.add_forcing(wave_forcing) 41 | 42 | # initialize Slurm configuration 43 | slurm = SlurmConfig( 44 | account='account', 45 | ntasks=1000, 46 | run_name='adcircpy/examples/example_4.py', 47 | partition='partition', 48 | walltime=timedelta(hours=8), 49 | mail_type='all', 50 | mail_user='example@email.gov', 51 | log_filename='example_4.log', 52 | modules=['intel/2020', 'impi/2020', 'netcdf/4.7.2-parallel'], 53 | path_prefix='$HOME/adcirc/build', 54 | ) 55 | 56 | # instantiate driver object 57 | driver = AdcircRun( 58 | mesh=mesh, 59 | start_date=datetime.now(), 60 | end_date=timedelta(days=7), 61 | spinup_time=timedelta(days=5), 62 | server_config=slurm, 63 | ) 64 | 65 | # write driver state to disk 66 | driver.write(OUTPUT_DIRECTORY, overwrite=True) 67 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = 'adcircpy' 3 | version = '0.0.0' 4 | description = 'Python package for working with ADCIRC input and output files' 5 | authors = [ 6 | 'Zach Burnett ', 7 | 'Jaime R Calzada ', 8 | 'Soroosh Mani ', 9 | ] 10 | license = 'GPL-3.0-or-later' 11 | readme = 'README.md' 12 | repository = 'https://github.com/noaa-ocs-modeling/adcircpy.git' 13 | documentation = 'https://adcircpy.readthedocs.io' 14 | 15 | [build-system] 16 | requires = [ 17 | 'poetry-core>=1.0.0', 18 | 'poetry-dynamic-versioning', 19 | ] 20 | build-backend = 'poetry.core.masonry.api' 21 | 22 | [tool.poetry-dynamic-versioning] 23 | enable = true 24 | 25 | [tool.poetry.dependencies] 26 | python = '^3.8, <3.12' 27 | appdirs = '*' 28 | dunamai = { version = '*', optional = true } 29 | geopandas = '*' 30 | haversine = '*' 31 | matplotlib = '*' 32 | netCDF4 = '*' 33 | numpy = '*' 34 | pandas = '*' 35 | paramiko = '*' 36 | pooch = '*' 37 | psutil = '*' 38 | pyproj = '>=2.6' 39 | requests = '*' 40 | scipy = '<=1.12' 41 | searvey = '*' 42 | shapely = '*' 43 | stormevents = '>=2.3.2' # tests results based on speed fix 44 | typepigeon = '<2' # newer versions require code update 45 | utm = '*' 46 | isort = { version = '*', optional = true } 47 | oitnb = { version = '*', optional = true } 48 | pytest = { version = '*', optional = true } 49 | pytest-cov = { version = '*', optional = true } 50 | pytest-mock = { version = '*', optional = true } 51 | pytest-socket = { version = '*', optional = true } 52 | pytest-xdist = { version = '*', optional = true } 53 | m2r2 = { version = '*', optional = true } 54 | sphinx = { version = '*', optional = true } 55 | sphinx-rtd-theme = { version = '*', optional = true } 56 | sphinxcontrib-programoutput = { version = '*', optional = true } 57 | sphinxcontrib-bibtex = { version = '*', optional = true } 58 | 59 | [tool.poetry.extras] 60 | testing = ['pytest', 'pytest-cov', 'pytest-mock', 'pytest-socket', 'pytest-xdist'] 61 | development = ['isort', 'oitnb'] 62 | documentation = ['dunamai', 'm2r2', 'sphinx', 'sphinx-rtd-theme', 'sphinxcontrib-programoutput', 'sphinxcontrib-bibtex'] 63 | 64 | [tool.poetry.scripts] 65 | tidal_run = 'adcircpy.cmd.tidal_run:main' 66 | best_track_run = 'adcircpy.cmd.best_track_run:main' 67 | best_track_file = 'adcircpy.cmd.best_track_file:main' 68 | plot_mesh = 'adcircpy.cmd.plot_mesh:main' 69 | plot_maxele = 'adcircpy.cmd.plot_maxele:main' 70 | plot_fort61 = 'adcircpy.cmd.plot_fort61:main' 71 | fort63 = 'adcircpy.cmd.fort63:main' 72 | tide_gen = 'adcircpy.cmd.tide_gen:main' 73 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os import PathLike 3 | from pathlib import Path 4 | import re 5 | from typing import Dict, List 6 | 7 | import pytest 8 | 9 | from adcircpy.utilities import download_mesh 10 | 11 | DATA_DIRECTORY = Path(__file__).parent.absolute().resolve() / 'data' 12 | INPUT_DIRECTORY = DATA_DIRECTORY / 'input' 13 | OUTPUT_DIRECTORY = DATA_DIRECTORY / 'output' 14 | REFERENCE_DIRECTORY = DATA_DIRECTORY / 'reference' 15 | 16 | 17 | @pytest.fixture 18 | def shinnecock_mesh_directory(worker_id) -> Path: 19 | mesh_directory = INPUT_DIRECTORY / 'shinnecock' 20 | download_mesh( 21 | url='https://www.dropbox.com/s/1wk91r67cacf132/NetCDF_shinnecock_inlet.tar.bz2?dl=1', 22 | directory=mesh_directory, 23 | known_hash='99d764541983bfee60d4176af48ed803d427dea61243fa22d3f4003ebcec98f4', 24 | ) 25 | 26 | return mesh_directory 27 | 28 | 29 | def check_reference_directory( 30 | test_directory: PathLike, 31 | reference_directory: PathLike, 32 | skip_lines: Dict[str, List[int]] = None, 33 | ): 34 | if not isinstance(test_directory, Path): 35 | test_directory = Path(test_directory) 36 | if not isinstance(reference_directory, Path): 37 | reference_directory = Path(reference_directory) 38 | if skip_lines is None: 39 | skip_lines = {} 40 | 41 | for reference_filename in reference_directory.iterdir(): 42 | if reference_filename.is_dir(): 43 | check_reference_directory( 44 | test_directory / reference_filename.name, reference_filename, skip_lines 45 | ) 46 | else: 47 | test_filename = test_directory / reference_filename.name 48 | 49 | with open(test_filename) as test_file, open(reference_filename) as reference_file: 50 | test_lines = list(test_file.readlines()) 51 | reference_lines = list(reference_file.readlines()) 52 | 53 | lines_to_skip = set() 54 | for file_mask, line_indices in skip_lines.items(): 55 | if ( 56 | file_mask in str(test_filename) 57 | or re.match(file_mask, str(test_filename)) 58 | and len(test_lines) > 0 59 | ): 60 | try: 61 | lines_to_skip.update( 62 | line_index % len(test_lines) for line_index in line_indices 63 | ) 64 | except ZeroDivisionError: 65 | continue 66 | 67 | for line_index in sorted(lines_to_skip, reverse=True): 68 | del test_lines[line_index], reference_lines[line_index] 69 | 70 | cwd = Path.cwd() 71 | assert '\n'.join(test_lines) == '\n'.join( 72 | reference_lines 73 | ), f'"{os.path.relpath(test_filename, cwd)}" != "{os.path.relpath(reference_filename, cwd)}"' 74 | -------------------------------------------------------------------------------- /tests/data/.gitignore: -------------------------------------------------------------------------------- 1 | NetCDF_Shinnecock_Inlet 2 | -------------------------------------------------------------------------------- /tests/data/input/test_import_stations/stations_2.txt: -------------------------------------------------------------------------------- 1 | 9 ! NSTAE 2 | -64.703300 32.373400 ! 2695540 3 | -66.982578 44.904494 ! 8410140 4 | -67.108530 44.871053 ! 8410714 5 | -67.125020 44.919910 ! 8410715 6 | -67.140690 45.130070 ! 8410834 7 | -67.149430 44.822650 ! 8410864 8 | -67.187150 44.649400 ! 8411060 9 | -67.296800 44.640940 ! 8411250 10 | -72.577200 40.823000 ! 8512769 11 | 1 ! NSTAV 12 | -72.577200 40.823000 ! 8512769 13 | 0 ! NSTAM 14 | -------------------------------------------------------------------------------- /tests/data/input/test_import_stations/stations_3.txt: -------------------------------------------------------------------------------- 1 | 1 ! NSTAE NSTAV NSTAC 2 | -72.577200 40.823000 ! 8512769 3 | 0 ! NSTAM 4 | -------------------------------------------------------------------------------- /tests/data/reference/example_1/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=4 8 | 9 | main() { 10 | rm -rf work 11 | mkdir work 12 | cd work 13 | ln -sf ../fort.14 14 | ln -sf ../fort.13 15 | ln -sf ../fort.15 ./fort.15 16 | adcprep --np 4 --partmesh 17 | adcprep --np 4 --prepall 18 | mpiexec -n 4 padcirc 19 | clean_directory 20 | cd .. 21 | } 22 | 23 | clean_directory() { 24 | rm -rf PE* 25 | rm -rf partmesh.txt 26 | rm -rf metis_graph.txt 27 | rm -rf fort.13 28 | rm -rf fort.14 29 | rm -rf fort.15 30 | rm -rf fort.16 31 | rm -rf fort.80 32 | rm -rf fort.68.nc 33 | } 34 | 35 | main 36 | -------------------------------------------------------------------------------- /tests/data/reference/example_2/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=4 8 | 9 | main() { 10 | SECONDS=0 11 | run_coldstart_phase 12 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 13 | duration=$SECONDS 14 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 15 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 16 | exit -1 17 | else 18 | run_hotstart_phase 19 | duration=$SECONDS 20 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 21 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 22 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 23 | exit -1 24 | fi 25 | fi 26 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 27 | } 28 | 29 | run_coldstart_phase() { 30 | rm -rf coldstart 31 | mkdir coldstart 32 | cd coldstart 33 | ln -sf ../fort.14 34 | ln -sf ../fort.13 35 | ln -sf ../fort.15.coldstart ./fort.15 36 | adcprep --np 4 --partmesh 37 | adcprep --np 4 --prepall 38 | mpiexec -n 4 padcirc 2>&1 | tee ../padcirc.log 39 | clean_directory 40 | cd .. 41 | } 42 | 43 | run_hotstart_phase() { 44 | rm -rf hotstart 45 | mkdir hotstart 46 | cd hotstart 47 | ln -sf ../fort.14 48 | ln -sf ../fort.13 49 | ln -sf ../fort.15.hotstart ./fort.15 50 | ln -sf ../coldstart/fort.67.nc 51 | adcprep --np 4 --partmesh 52 | adcprep --np 4 --prepall 53 | mpiexec -n 4 padcirc 2>&1 | tee -a ../padcirc.log 54 | clean_directory 55 | cd .. 56 | } 57 | 58 | clean_directory() { 59 | rm -rf PE* 60 | rm -rf partmesh.txt 61 | rm -rf metis_graph.txt 62 | rm -rf fort.13 63 | rm -rf fort.14 64 | rm -rf fort.15 65 | rm -rf fort.16 66 | rm -rf fort.80 67 | rm -rf fort.68.nc 68 | } 69 | 70 | main 71 | -------------------------------------------------------------------------------- /tests/data/reference/example_3/fort.22: -------------------------------------------------------------------------------- 1 | AL, 18, 2012102118, , BEST, 0, 143N, 774W, 25, 1006, LO, 0, , 0, 0, 0, 0, 1008, 180, 150, 35, 0, L, 0, , 224, 3, INVEST, 1 2 | AL, 18, 2012102200, , BEST, 0, 139N, 778W, 25, 1005, LO, 0, , 0, 0, 0, 0, 1008, 180, 150, 35, 0, L, 0, , 224, 3, INVEST, 2 3 | AL, 18, 2012102206, , BEST, 0, 135N, 782W, 25, 1003, LO, 0, , 0, 0, 0, 0, 1008, 225, 75, 35, 0, L, 0, , 224, 3, INVEST, 3 4 | AL, 18, 2012102212, , BEST, 0, 131N, 786W, 30, 1002, TD, 0, , 0, 0, 0, 0, 1007, 250, 75, 0, 0, L, 0, , 224, 3, EIGHTEEN, 4 5 | AL, 18, 2012102218, , BEST, 0, 127N, 787W, 35, 1000, TS, 34, NEQ, 50, 60, 0, 0, 1007, 250, 60, 45, 0, L, 0, , 194, 2, EIGHTEEN, 5 6 | AL, 18, 2012102300, , BEST, 0, 126N, 784W, 40, 998, TS, 34, NEQ, 50, 60, 0, 0, 1007, 250, 40, 50, 0, L, 0, , 109, 2, SANDY, 6 7 | AL, 18, 2012102306, , BEST, 0, 129N, 781W, 40, 998, TS, 34, NEQ, 70, 80, 0, 0, 1005, 240, 60, 50, 0, L, 0, , 44, 2, SANDY, 7 8 | AL, 18, 2012102312, , BEST, 0, 134N, 779W, 40, 995, TS, 34, NEQ, 100, 100, 0, 0, 1005, 240, 60, 55, 0, L, 0, , 21, 3, SANDY, 8 9 | AL, 18, 2012102318, , BEST, 0, 140N, 776W, 45, 993, TS, 34, NEQ, 100, 120, 0, 0, 1005, 240, 60, 55, 0, L, 0, , 26, 3, SANDY, 9 10 | AL, 18, 2012102400, , BEST, 0, 147N, 773W, 55, 990, TS, 34, NEQ, 100, 150, 40, 40, 1007, 300, 60, 60, 0, L, 0, , 23, 4, SANDY, 10 11 | AL, 18, 2012102400, , BEST, 0, 147N, 773W, 55, 990, TS, 50, NEQ, 0, 70, 0, 0, 1007, 300, 60, 60, 0, L, 0, , 23, 4, SANDY, 10 12 | AL, 18, 2012102406, , BEST, 0, 156N, 771W, 60, 987, TS, 34, NEQ, 100, 150, 50, 50, 1007, 300, 45, 65, 0, L, 0, , 12, 5, SANDY, 11 13 | AL, 18, 2012102406, , BEST, 0, 156N, 771W, 60, 987, TS, 50, NEQ, 50, 70, 20, 20, 1007, 300, 45, 65, 0, L, 0, , 12, 5, SANDY, 11 14 | AL, 18, 2012102412, , BEST, 0, 166N, 769W, 65, 981, HU, 34, NEQ, 120, 160, 70, 70, 1006, 285, 40, 75, 0, L, 0, , 11, 5, SANDY, 12 15 | AL, 18, 2012102412, , BEST, 0, 166N, 769W, 65, 981, HU, 50, NEQ, 50, 60, 40, 30, 1006, 285, 40, 75, 0, L, 0, , 11, 5, SANDY, 12 16 | AL, 18, 2012102412, , BEST, 0, 166N, 769W, 65, 981, HU, 64, NEQ, 20, 20, 0, 0, 1006, 285, 40, 75, 0, L, 0, , 11, 5, SANDY, 12 17 | AL, 18, 2012102418, , BEST, 0, 177N, 767W, 75, 972, HU, 34, NEQ, 150, 180, 70, 70, 1005, 290, 25, 85, 0, L, 0, , 10, 6, SANDY, 13 18 | AL, 18, 2012102418, , BEST, 0, 177N, 767W, 75, 972, HU, 50, NEQ, 50, 60, 40, 40, 1005, 290, 25, 85, 0, L, 0, , 10, 6, SANDY, 13 19 | AL, 18, 2012102418, , BEST, 0, 177N, 767W, 75, 972, HU, 64, NEQ, 25, 30, 20, 25, 1005, 290, 25, 85, 0, L, 0, , 10, 6, SANDY, 13 20 | -------------------------------------------------------------------------------- /tests/data/reference/example_3/slurm.job: -------------------------------------------------------------------------------- 1 | #!/bin/bash --login 2 | #SBATCH -D . 3 | #SBATCH -J adcircpy/examples/example_3.py 4 | #SBATCH -A account 5 | #SBATCH --mail-type=all 6 | #SBATCH --mail-user=example@email.gov 7 | #SBATCH --output=example_3.log 8 | #SBATCH -n 1000 9 | #SBATCH --time=08:00:00 10 | #SBATCH --partition=partition 11 | 12 | ulimit -s unlimited 13 | set -e 14 | 15 | module load intel/2020 impi/2020 netcdf/4.7.2-parallel 16 | 17 | PATH=$HOME/adcirc/build:$PATH 18 | 19 | main() { 20 | SECONDS=0 21 | run_coldstart_phase 22 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." example_3.log; then 23 | duration=$SECONDS 24 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 25 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 26 | exit -1 27 | else 28 | run_hotstart_phase 29 | duration=$SECONDS 30 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." example_3.log; then 31 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 32 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 33 | exit -1 34 | fi 35 | fi 36 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 37 | } 38 | 39 | run_coldstart_phase() { 40 | rm -rf coldstart 41 | mkdir coldstart 42 | cd coldstart 43 | ln -sf ../fort.14 44 | ln -sf ../fort.13 45 | ln -sf ../fort.15.coldstart ./fort.15 46 | adcprep --np $SLURM_NTASKS --partmesh 47 | adcprep --np $SLURM_NTASKS --prepall 48 | srun padcirc 49 | clean_directory 50 | cd .. 51 | } 52 | 53 | run_hotstart_phase() { 54 | rm -rf hotstart 55 | mkdir hotstart 56 | cd hotstart 57 | ln -sf ../fort.14 58 | ln -sf ../fort.13 59 | ln -sf ../fort.15.hotstart ./fort.15 60 | ln -sf ../coldstart/fort.67.nc 61 | ln -sf ../fort.22 ./fort.22 62 | aswip 63 | mv NWS_20_fort.22 fort.22 64 | adcprep --np $SLURM_NTASKS --partmesh 65 | adcprep --np $SLURM_NTASKS --prepall 66 | srun padcirc 67 | clean_directory 68 | cd .. 69 | } 70 | 71 | clean_directory() { 72 | rm -rf PE* 73 | rm -rf partmesh.txt 74 | rm -rf metis_graph.txt 75 | rm -rf fort.13 76 | rm -rf fort.14 77 | rm -rf fort.15 78 | rm -rf fort.16 79 | rm -rf fort.80 80 | rm -rf fort.68.nc 81 | } 82 | 83 | main 84 | -------------------------------------------------------------------------------- /tests/data/reference/example_4/slurm.job: -------------------------------------------------------------------------------- 1 | #!/bin/bash --login 2 | #SBATCH -D . 3 | #SBATCH -J adcircpy/examples/example_4.py 4 | #SBATCH -A account 5 | #SBATCH --mail-type=all 6 | #SBATCH --mail-user=example@email.gov 7 | #SBATCH --output=example_4.log 8 | #SBATCH -n 1000 9 | #SBATCH --time=08:00:00 10 | #SBATCH --partition=partition 11 | 12 | ulimit -s unlimited 13 | set -e 14 | 15 | module load intel/2020 impi/2020 netcdf/4.7.2-parallel 16 | 17 | PATH=$HOME/adcirc/build:$PATH 18 | 19 | main() { 20 | SECONDS=0 21 | run_coldstart_phase 22 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." example_4.log; then 23 | duration=$SECONDS 24 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 25 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 26 | exit -1 27 | else 28 | run_hotstart_phase 29 | duration=$SECONDS 30 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." example_4.log; then 31 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 32 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 33 | exit -1 34 | fi 35 | fi 36 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 37 | } 38 | 39 | run_coldstart_phase() { 40 | rm -rf coldstart 41 | mkdir coldstart 42 | cd coldstart 43 | ln -sf ../fort.14 44 | ln -sf ../fort.13 45 | ln -sf ../fort.15.coldstart ./fort.15 46 | adcprep --np $SLURM_NTASKS --partmesh 47 | adcprep --np $SLURM_NTASKS --prepall 48 | srun padcswan 49 | clean_directory 50 | cd .. 51 | } 52 | 53 | run_hotstart_phase() { 54 | rm -rf hotstart 55 | mkdir hotstart 56 | cd hotstart 57 | ln -sf ../fort.14 58 | ln -sf ../fort.13 59 | ln -sf ../fort.15.hotstart ./fort.15 60 | ln -sf ../coldstart/fort.67.nc 61 | ln -sf ../fort.22 ./fort.22 62 | aswip 63 | mv NWS_17_fort.22 fort.22 64 | adcprep --np $SLURM_NTASKS --partmesh 65 | adcprep --np $SLURM_NTASKS --prepall 66 | srun padcswan 67 | clean_directory 68 | cd .. 69 | } 70 | 71 | clean_directory() { 72 | rm -rf PE* 73 | rm -rf partmesh.txt 74 | rm -rf metis_graph.txt 75 | rm -rf fort.13 76 | rm -rf fort.14 77 | rm -rf fort.15 78 | rm -rf fort.16 79 | rm -rf fort.80 80 | rm -rf fort.68.nc 81 | } 82 | 83 | main 84 | -------------------------------------------------------------------------------- /tests/data/reference/test_Stations/stations_1.fort.15: -------------------------------------------------------------------------------- 1 | 12 ! NSTAE 2 | -78.91829681396484 33.654998779296875 ! 8661070 3 | -76.67066192626953 34.71733093261719 ! 8656483 4 | -79.9236068725586 32.78083038330078 ! 8665530 5 | -75.54818725585938 35.79568862915039 ! 8652587 6 | -77.95361328125 34.227500915527344 ! 8658120 7 | -77.78668975830078 34.21331024169922 ! 8658163 8 | -75.70419311523438 35.20861053466797 ! 8654467 9 | -80.90303039550781 32.034690856933594 ! 8670870 10 | -79.70670318603516 32.8567008972168 ! 8664941 11 | -80.46499633789062 32.34000015258789 ! 8668498 12 | -78.50669860839844 33.8650016784668 ! 8659897 13 | -75.63500213623047 35.22330093383789 ! 8654400 14 | 12 ! NSTAV 15 | -78.91829681396484 33.654998779296875 ! 8661070 16 | -76.67066192626953 34.71733093261719 ! 8656483 17 | -79.9236068725586 32.78083038330078 ! 8665530 18 | -75.54818725585938 35.79568862915039 ! 8652587 19 | -77.95361328125 34.227500915527344 ! 8658120 20 | -77.78668975830078 34.21331024169922 ! 8658163 21 | -75.70419311523438 35.20861053466797 ! 8654467 22 | -80.90303039550781 32.034690856933594 ! 8670870 23 | -79.70670318603516 32.8567008972168 ! 8664941 24 | -80.46499633789062 32.34000015258789 ! 8668498 25 | -78.50669860839844 33.8650016784668 ! 8659897 26 | -75.63500213623047 35.22330093383789 ! 8654400 27 | 12 ! NSTAC 28 | -78.91829681396484 33.654998779296875 ! 8661070 29 | -76.67066192626953 34.71733093261719 ! 8656483 30 | -79.9236068725586 32.78083038330078 ! 8665530 31 | -75.54818725585938 35.79568862915039 ! 8652587 32 | -77.95361328125 34.227500915527344 ! 8658120 33 | -77.78668975830078 34.21331024169922 ! 8658163 34 | -75.70419311523438 35.20861053466797 ! 8654467 35 | -80.90303039550781 32.034690856933594 ! 8670870 36 | -79.70670318603516 32.8567008972168 ! 8664941 37 | -80.46499633789062 32.34000015258789 ! 8668498 38 | -78.50669860839844 33.8650016784668 ! 8659897 39 | -75.63500213623047 35.22330093383789 ! 8654400 40 | 12 ! NSTAM 41 | -78.91829681396484 33.654998779296875 ! 8661070 42 | -76.67066192626953 34.71733093261719 ! 8656483 43 | -79.9236068725586 32.78083038330078 ! 8665530 44 | -75.54818725585938 35.79568862915039 ! 8652587 45 | -77.95361328125 34.227500915527344 ! 8658120 46 | -77.78668975830078 34.21331024169922 ! 8658163 47 | -75.70419311523438 35.20861053466797 ! 8654467 48 | -80.90303039550781 32.034690856933594 ! 8670870 49 | -79.70670318603516 32.8567008972168 ! 8664941 50 | -80.46499633789062 32.34000015258789 ! 8668498 51 | -78.50669860839844 33.8650016784668 ! 8659897 52 | -75.63500213623047 35.22330093383789 ! 8654400 -------------------------------------------------------------------------------- /tests/data/reference/test_Stations/stations_2.fort.15: -------------------------------------------------------------------------------- 1 | 12 ! NSTAE 2 | -78.91829681396484 33.654998779296875 ! 8661070 3 | -76.67066192626953 34.71733093261719 ! 8656483 4 | -79.9236068725586 32.78083038330078 ! 8665530 5 | -75.54818725585938 35.79568862915039 ! 8652587 6 | -77.95361328125 34.227500915527344 ! 8658120 7 | -77.78668975830078 34.21331024169922 ! 8658163 8 | -75.70419311523438 35.20861053466797 ! 8654467 9 | -80.90303039550781 32.034690856933594 ! 8670870 10 | -79.70670318603516 32.8567008972168 ! 8664941 11 | -80.46499633789062 32.34000015258789 ! 8668498 12 | -78.50669860839844 33.8650016784668 ! 8659897 13 | -75.63500213623047 35.22330093383789 ! 8654400 -------------------------------------------------------------------------------- /tests/data/reference/test_Stations/stations_3.fort.15: -------------------------------------------------------------------------------- 1 | 3 ! NSTAE 2 | -77.95361328125 34.227500915527344 ! 8658120 3 | -80.90303039550781 32.034690856933594 ! 8670870 4 | -75.54818725585938 35.79568862915039 ! 8652587 5 | 1 ! NSTAV 6 | -77.95361328125 34.227500915527344 ! 8658120 -------------------------------------------------------------------------------- /tests/data/reference/test_best_track_run/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=2 8 | 9 | main() { 10 | SECONDS=0 11 | run_coldstart_phase 12 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 13 | duration=$SECONDS 14 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 15 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 16 | exit -1 17 | else 18 | run_hotstart_phase 19 | duration=$SECONDS 20 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 21 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 22 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 23 | exit -1 24 | fi 25 | fi 26 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 27 | } 28 | 29 | run_coldstart_phase() { 30 | rm -rf coldstart 31 | mkdir coldstart 32 | cd coldstart 33 | ln -sf ../fort.14 34 | ln -sf ../fort.13 35 | ln -sf ../fort.15.coldstart ./fort.15 36 | adcprep --np 2 --partmesh 37 | adcprep --np 2 --prepall 38 | mpiexec -n 2 padcirc 2>&1 | tee ../padcirc.log 39 | clean_directory 40 | cd .. 41 | } 42 | 43 | run_hotstart_phase() { 44 | rm -rf hotstart 45 | mkdir hotstart 46 | cd hotstart 47 | ln -sf ../fort.14 48 | ln -sf ../fort.13 49 | ln -sf ../fort.15.hotstart ./fort.15 50 | ln -sf ../coldstart/fort.67.nc 51 | ln -sf ../fort.22 ./fort.22 52 | aswip 53 | mv NWS_20_fort.22 fort.22 54 | adcprep --np 2 --partmesh 55 | adcprep --np 2 --prepall 56 | mpiexec -n 2 padcirc 2>&1 | tee -a ../padcirc.log 57 | clean_directory 58 | cd .. 59 | } 60 | 61 | clean_directory() { 62 | rm -rf PE* 63 | rm -rf partmesh.txt 64 | rm -rf metis_graph.txt 65 | rm -rf fort.13 66 | rm -rf fort.14 67 | rm -rf fort.15 68 | rm -rf fort.16 69 | rm -rf fort.80 70 | rm -rf fort.68.nc 71 | } 72 | 73 | main 74 | -------------------------------------------------------------------------------- /tests/data/reference/test_best_track_run/fort.22: -------------------------------------------------------------------------------- 1 | AL, 06, 2018091800, , BEST, 0, 395N, 805W, 25, 1008, EX, 0, , 0, 0, 0, 0, 1013, 360, 160, 0, 0, , 0, , 59, 7, FLORENCE, 1 2 | AL, 06, 2018091806, , BEST, 0, 413N, 768W, 25, 1007, EX, 0, , 0, 0, 0, 0, 1013, 360, 170, 0, 0, , 0, , 56, 17, FLORENCE, 2 3 | AL, 06, 2018091812, , BEST, 0, 422N, 733W, 25, 1006, EX, 34, NEQ, 0, 0, 0, 0, 1013, 360, 180, 30, 0, L, 0, , 70, 14, FLORENCE, 3 4 | -------------------------------------------------------------------------------- /tests/data/reference/test_configuration/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=2 8 | 9 | main() { 10 | SECONDS=0 11 | run_coldstart_phase 12 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." punswan.log; then 13 | duration=$SECONDS 14 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 15 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 16 | exit -1 17 | else 18 | run_hotstart_phase 19 | duration=$SECONDS 20 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." punswan.log; then 21 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 22 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 23 | exit -1 24 | fi 25 | fi 26 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 27 | } 28 | 29 | run_coldstart_phase() { 30 | rm -rf coldstart 31 | mkdir coldstart 32 | cd coldstart 33 | ln -sf ../fort.14 34 | ln -sf ../fort.13 35 | ln -sf ../fort.15.coldstart ./fort.15 36 | adcprep --np 2 --partmesh 37 | adcprep --np 2 --prepall 38 | mpiexec -n 2 punswan 2>&1 | tee ../punswan.log 39 | clean_directory 40 | cd .. 41 | } 42 | 43 | run_hotstart_phase() { 44 | rm -rf hotstart 45 | mkdir hotstart 46 | cd hotstart 47 | ln -sf ../fort.14 48 | ln -sf ../fort.13 49 | ln -sf ../fort.15.hotstart ./fort.15 50 | ln -sf ../coldstart/fort.67.nc 51 | ln -sf ../fort.22 ./fort.22 52 | aswip 53 | mv NWS_17_fort.22 fort.22 54 | adcprep --np 2 --partmesh 55 | adcprep --np 2 --prepall 56 | mpiexec -n 2 punswan 2>&1 | tee -a ../punswan.log 57 | clean_directory 58 | cd .. 59 | } 60 | 61 | clean_directory() { 62 | rm -rf PE* 63 | rm -rf partmesh.txt 64 | rm -rf metis_graph.txt 65 | rm -rf fort.13 66 | rm -rf fort.14 67 | rm -rf fort.15 68 | rm -rf fort.16 69 | rm -rf fort.80 70 | rm -rf fort.68.nc 71 | } 72 | 73 | main 74 | -------------------------------------------------------------------------------- /tests/data/reference/test_configuration/fort.15.coldstart: -------------------------------------------------------------------------------- 1 | created on 2023-09-01 11:11 ! RUNDES - 32 CHARACTER ALPHANUMERIC RUN DESCRIPTION 2 | Shinacock Inlet Coarse Grid ! RUNID - 24 CHARACTER ALPANUMERIC RUN IDENTIFICATION 3 | 1 ! NFOVER - NONFATAL ERROR OVERRIDE OPTION 4 | 1 ! NABOUT - ABREVIATED OUTPUT OPTION PARAMETER 5 | 100 ! NSCREEN - UNIT 6 OUTPUT OPTION PARAMETER 6 | 0 ! IHOT - HOT START PARAMETER 7 | 2 ! ICS - COORDINATE SYSTEM SELECTION PARAMETER 8 | 511113 ! IM - MODEL SELECTION PARAMETER 9 | 2 ! NOLIBF - BOTTOM FRICTION TERM SELECTION PARAM; before NWP==1, '2' was used 10 | 2 ! NOLIFA - FINITE AMPLITUDE TERM SELECTION PARAMETER 11 | 1 ! NOLICA - SPATIAL DERIVATIVE CONVECTIVE SELECTION PARAMETER 12 | 1 ! NOLICAT - TIME DERIVATIVE CONVECTIVE TERM SELECTION PARAMETER 13 | 0 ! NWP - VARIABLE BOTTOM FRICTION AND LATERAL VISCOSITY OPTION PARAMETER; default 0 14 | 1 ! NCOR - VARIABLE CORIOLIS IN SPACE OPTION PARAMETER 15 | 1 ! NTIP - TIDAL POTENTIAL OPTION PARAMETER 16 | 0 ! NWS - WIND STRESS AND BAROMETRIC PRESSURE OPTION PARAMETER 17 | 1 ! NRAMP - RAMP FUNCTION OPTION 18 | 9.81 ! G - ACCELERATION DUE TO GRAVITY - DETERMINES UNITS 19 | 0.005 ! TAU0 - WEIGHTING FACTOR IN GWCE; original, 0.005 20 | 2.613217 ! DTDP - TIME STEP (IN SECONDS) 21 | 0 ! STATIM - STARTING TIME (IN DAYS) 22 | 0 ! REFTIM - REFERENCE TIME (IN DAYS) 23 | 2 ! RNDAY - TOTAL LENGTH OF SIMULATION (IN DAYS) 24 | 2 ! DRAMP - DURATION OF RAMP FUNCTION (IN DAYS) 25 | 0.5 0.5 0 ! A00 B00 C00 - TIME WEIGHTING FACTORS FOR THE GWCE EQUATION 26 | 0.01 0 0 0.01 ! H0 NODEDRYMIN NODEWETRMP VELMIN 27 | -72.47588186015 40.7602221247 ! SLAM0 SFEA0 - CENTER OF CPP PROJECTION (NOT USED IF ICS=1, NTIP=0, NCOR=0) 28 | 0.0025 1 10 0.333333 ! CF HBREAK FTHETA FGAMMA 29 | -0.2 ! smagorinsky coefficient - LATERAL EDDY VISCOSITY COEFFICIENT; IGNORED IF NWP =1 30 | 0 ! CORI - CORIOLIS PARAMETER - IGNORED IF NCOR = 1 31 | 0 ! NTIF - NUMBER OF TIDAL POTENTIAL CONSTITUENTS BEING FORCED starting 2008082300 32 | 0 33 | 110 ! ANGINN - INNER ANGLE THRESHOLD 34 | 0 0 0 0 ! NOUTE TOUTSE TOUTFE NSPOOLE - ELEV STATION OUTPUT INFO (UNIT 61) 35 | 0 ! NSTAE - TOTAL NUMBER OF ELEVATION RECORDING STATIONS 36 | 0 0 0 0 ! NOUTV TOUTSV TOUTFV NSPOOLV - VELOCITY STATION OUTPUT INFO (UNIT 62) 37 | 0 ! NSTAV - TOTAL NUMBER OF VELOCITY RECORDING STATIONS 38 | 0 0.000000 0.000000 0 ! NOUTGE TOUTSGE TOUTFGE NSPOOLGE - GLOBAL ELEVATION OUTPUT INFO (UNIT 63) 39 | 0 0.000000 0.000000 0 ! NOUTGV TOUTSGV TOUTFGV NSPOOLGV - GLOBAL VELOCITY OUTPUT INFO (UNIT 64) 40 | 0 ! NFREQ 41 | 0 0 0 0 ! THAS THAF NHAINC FMV - HARMONIC ANALYSIS PARAMETERS 42 | 0 0 0 0 ! NHASE NHASV NHAGE NHAGV - CONTROL HARMONIC ANALYSIS AND OUTPUT TO UNITS 51,52,53,54 43 | 5 66125 ! NHSTAR NHSINC - HOT START FILE GENERATION PARAMETERS 44 | 1 0 1E-08 25 ! ITITER ISLDIA CONVCR ITMAX - ALGEBRAIC SOLUTION PARAMETERS 45 | ! NCPROJ - PROJECT TITLE 46 | ! NCINST - PROJECT INSTITUTION 47 | ! NCSOUR - PROJECT SOURCE 48 | ! NCHIST - PROJECT HISTORY 49 | ! NCREF - PROJECT REFERENCES 50 | ! NCCOM - PROJECT COMMENTS 51 | ! NCHOST - PROJECT HOST 52 | ! NCONV - CONVENTIONS 53 | ! NCCONT - CONTACT INFORMATION 54 | 2015-12-14 00:00 ! NCDATE - forcing start date 55 | &metControl WindDragLimit=0.0025, DragLawString=default, outputWindDrag=F, invertedBarometerOnElevationBoundary=T \ 56 | -------------------------------------------------------------------------------- /tests/data/reference/test_configuration/fort.15.hotstart: -------------------------------------------------------------------------------- 1 | created on 2023-09-01 11:12 ! RUNDES - 32 CHARACTER ALPHANUMERIC RUN DESCRIPTION 2 | Shinacock Inlet Coarse Grid ! RUNID - 24 CHARACTER ALPANUMERIC RUN IDENTIFICATION 3 | 1 ! NFOVER - NONFATAL ERROR OVERRIDE OPTION 4 | 1 ! NABOUT - ABREVIATED OUTPUT OPTION PARAMETER 5 | 100 ! NSCREEN - UNIT 6 OUTPUT OPTION PARAMETER 6 | 567 ! IHOT - HOT START PARAMETER 7 | 2 ! ICS - COORDINATE SYSTEM SELECTION PARAMETER 8 | 511113 ! IM - MODEL SELECTION PARAMETER 9 | 2 ! NOLIBF - BOTTOM FRICTION TERM SELECTION PARAM; before NWP==1, '2' was used 10 | 2 ! NOLIFA - FINITE AMPLITUDE TERM SELECTION PARAMETER 11 | 1 ! NOLICA - SPATIAL DERIVATIVE CONVECTIVE SELECTION PARAMETER 12 | 1 ! NOLICAT - TIME DERIVATIVE CONVECTIVE TERM SELECTION PARAMETER 13 | 0 ! NWP - VARIABLE BOTTOM FRICTION AND LATERAL VISCOSITY OPTION PARAMETER; default 0 14 | 1 ! NCOR - VARIABLE CORIOLIS IN SPACE OPTION PARAMETER 15 | 1 ! NTIP - TIDAL POTENTIAL OPTION PARAMETER 16 | 517 ! NWS - WIND STRESS AND BAROMETRIC PRESSURE OPTION PARAMETER 17 | 8 ! NRAMP - RAMP FUNCTION OPTION 18 | 9.81 ! G - ACCELERATION DUE TO GRAVITY - DETERMINES UNITS 19 | 0.005 ! TAU0 - WEIGHTING FACTOR IN GWCE; original, 0.005 20 | 2.613217 ! DTDP - TIME STEP (IN SECONDS) 21 | 0 ! STATIM - STARTING TIME (IN DAYS) 22 | 0 ! REFTIM - REFERENCE TIME (IN DAYS) 23 | 3600 3600 ! WTIMINC RSTIMINC - meteorological data time increment, wave forcing increment 24 | 5 ! RNDAY - TOTAL LENGTH OF SIMULATION (IN DAYS) 25 | 2.000 0.000 0.000 0.000 2.000 2.000 1.000 0.000 2.000 ! DRAMP - DURATION OF RAMP FUNCTION (IN DAYS) 26 | 0.5 0.5 0 ! A00 B00 C00 - TIME WEIGHTING FACTORS FOR THE GWCE EQUATION 27 | 0.01 0 0 0.01 ! H0 NODEDRYMIN NODEWETRMP VELMIN 28 | -72.47588186015 40.7602221247 ! SLAM0 SFEA0 - CENTER OF CPP PROJECTION (NOT USED IF ICS=1, NTIP=0, NCOR=0) 29 | 0.0025 1 10 0.333333 ! CF HBREAK FTHETA FGAMMA 30 | -0.2 ! smagorinsky coefficient - LATERAL EDDY VISCOSITY COEFFICIENT; IGNORED IF NWP =1 31 | 0 ! CORI - CORIOLIS PARAMETER - IGNORED IF NCOR = 1 32 | 0 ! NTIF - NUMBER OF TIDAL POTENTIAL CONSTITUENTS BEING FORCED starting 2008082300 33 | 0 34 | 110 ! ANGINN - INNER ANGLE THRESHOLD 35 | 0 0 0 0 ! NOUTE TOUTSE TOUTFE NSPOOLE - ELEV STATION OUTPUT INFO (UNIT 61) 36 | 0 ! NSTAE - TOTAL NUMBER OF ELEVATION RECORDING STATIONS 37 | 0 0 0 0 ! NOUTV TOUTSV TOUTFV NSPOOLV - VELOCITY STATION OUTPUT INFO (UNIT 62) 38 | 0 ! NSTAV - TOTAL NUMBER OF VELOCITY RECORDING STATIONS 39 | 0 0 0 0 ! NOUTM TOUTSM TOUTFM NSPOOLM - METEOROLOGICAL STATION OUTPUT INFO (UNITS 71/72) 40 | 0 ! NSTAM - TOTAL NUMBER OF METEOROLOGICAL RECORDING STATIONS 41 | 0 0.000000 0.000000 0 ! NOUTGE TOUTSGE TOUTFGE NSPOOLGE - GLOBAL ELEVATION OUTPUT INFO (UNIT 63) 42 | 0 0.000000 0.000000 0 ! NOUTGV TOUTSGV TOUTFGV NSPOOLGV - GLOBAL VELOCITY OUTPUT INFO (UNIT 64) 43 | 0 0.000000 0.000000 0 ! NOUTGM TOUTSGM TOUTFGM NSPOOLGM - GLOBAL METEOROLOGICAL OUTPUT INFO 44 | 0 ! NFREQ 45 | 0 0 0 0 ! THAS THAF NHAINC FMV - HARMONIC ANALYSIS PARAMETERS 46 | 0 0 0 0 ! NHASE NHASV NHAGE NHAGV - CONTROL HARMONIC ANALYSIS AND OUTPUT TO UNITS 51,52,53,54 47 | 0 0 ! NHSTAR NHSINC - HOT START FILE GENERATION PARAMETERS 48 | 1 0 1E-08 25 ! ITITER ISLDIA CONVCR ITMAX - ALGEBRAIC SOLUTION PARAMETERS 49 | ! NCPROJ - PROJECT TITLE 50 | ! NCINST - PROJECT INSTITUTION 51 | ! NCSOUR - PROJECT SOURCE 52 | ! NCHIST - PROJECT HISTORY 53 | ! NCREF - PROJECT REFERENCES 54 | ! NCCOM - PROJECT COMMENTS 55 | ! NCHOST - PROJECT HOST 56 | ! NCONV - CONVENTIONS 57 | ! NCCONT - CONTACT INFORMATION 58 | 2015-12-14 00:00 ! NCDATE - forcing start date 59 | &SWANOutputControl SWAN_OutputHS=False, SWAN_OutputDIR=False, SWAN_OutputTM01=False, SWAN_OutputTPS=False, SWAN_OutputWIND=False, SWAN_OutputTM02=False, SWAN_OutputTMM10=False \ 60 | &metControl WindDragLimit=0.0025, DragLawString=default, outputWindDrag=F, invertedBarometerOnElevationBoundary=T \ 61 | -------------------------------------------------------------------------------- /tests/data/reference/test_open/fort.14: -------------------------------------------------------------------------------- 1 | 2 | 10 11 3 | 1 0.0 0.0 -5.0 4 | 2 0.5 0.0 -4.0 5 | 3 1.0 0.0 -3.0 6 | 4 1.0 1.0 -2.0 7 | 5 0.0 1.0 -1.0 8 | 6 0.5 1.5 0.0 9 | 7 0.33 0.33 1.0 10 | 8 0.66 0.33 2.0 11 | 9 0.5 0.66 3.0 12 | 10 -1.0 1.0 4.0 13 | 11 -1.0 0.0 5.0 14 | 1 3 5 7 9 15 | 2 3 1 2 7 16 | 3 3 2 3 8 17 | 4 3 8 7 2 18 | 5 3 3 4 8 19 | 6 3 4 9 8 20 | 7 3 4 6 5 21 | 8 4 5 10 11 1 22 | 9 3 9 4 5 23 | 10 3 5 1 7 -------------------------------------------------------------------------------- /tests/data/reference/test_slurm_driver/slurm.job: -------------------------------------------------------------------------------- 1 | #!/bin/bash --login 2 | #SBATCH -D . 3 | #SBATCH -J adcircpy/tests/test_configuration.py 4 | #SBATCH -A account 5 | #SBATCH --mail-type=all 6 | #SBATCH --mail-user=example@email.gov 7 | #SBATCH --output=test_configuration.log 8 | #SBATCH -n 1000 9 | #SBATCH --time=08:00:00 10 | #SBATCH --partition=partition 11 | 12 | ulimit -s unlimited 13 | set -e 14 | 15 | module load intel/2020 impi/2020 netcdf/4.7.2-parallel 16 | 17 | PATH=$HOME/adcirc/build:$PATH 18 | 19 | main() { 20 | SECONDS=0 21 | run_coldstart_phase 22 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." test_configuration.log; then 23 | duration=$SECONDS 24 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 25 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 26 | exit -1 27 | else 28 | run_hotstart_phase 29 | duration=$SECONDS 30 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." test_configuration.log; then 31 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 32 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 33 | exit -1 34 | fi 35 | fi 36 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 37 | } 38 | 39 | run_coldstart_phase() { 40 | rm -rf coldstart 41 | mkdir coldstart 42 | cd coldstart 43 | ln -sf ../fort.14 44 | ln -sf ../fort.13 45 | ln -sf ../fort.15.coldstart ./fort.15 46 | adcprep --np $SLURM_NTASKS --partmesh 47 | adcprep --np $SLURM_NTASKS --prepall 48 | srun padcirc 49 | clean_directory 50 | cd .. 51 | } 52 | 53 | run_hotstart_phase() { 54 | rm -rf hotstart 55 | mkdir hotstart 56 | cd hotstart 57 | ln -sf ../fort.14 58 | ln -sf ../fort.13 59 | ln -sf ../fort.15.hotstart ./fort.15 60 | ln -sf ../coldstart/fort.67.nc 61 | adcprep --np $SLURM_NTASKS --partmesh 62 | adcprep --np $SLURM_NTASKS --prepall 63 | srun padcirc 64 | clean_directory 65 | cd .. 66 | } 67 | 68 | clean_directory() { 69 | rm -rf PE* 70 | rm -rf partmesh.txt 71 | rm -rf metis_graph.txt 72 | rm -rf fort.13 73 | rm -rf fort.14 74 | rm -rf fort.15 75 | rm -rf fort.16 76 | rm -rf fort.80 77 | rm -rf fort.68.nc 78 | } 79 | 80 | main 81 | -------------------------------------------------------------------------------- /tests/data/reference/test_tidal_run/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=2 8 | 9 | main() { 10 | SECONDS=0 11 | run_coldstart_phase 12 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 13 | duration=$SECONDS 14 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 15 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 16 | exit -1 17 | else 18 | run_hotstart_phase 19 | duration=$SECONDS 20 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 21 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 22 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 23 | exit -1 24 | fi 25 | fi 26 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 27 | } 28 | 29 | run_coldstart_phase() { 30 | rm -rf coldstart 31 | mkdir coldstart 32 | cd coldstart 33 | ln -sf ../fort.14 34 | ln -sf ../fort.13 35 | ln -sf ../fort.15.coldstart ./fort.15 36 | adcprep --np 2 --partmesh 37 | adcprep --np 2 --prepall 38 | mpiexec -n 2 padcirc 2>&1 | tee ../padcirc.log 39 | clean_directory 40 | cd .. 41 | } 42 | 43 | run_hotstart_phase() { 44 | rm -rf hotstart 45 | mkdir hotstart 46 | cd hotstart 47 | ln -sf ../fort.14 48 | ln -sf ../fort.13 49 | ln -sf ../fort.15.hotstart ./fort.15 50 | ln -sf ../coldstart/fort.67.nc 51 | adcprep --np 2 --partmesh 52 | adcprep --np 2 --prepall 53 | mpiexec -n 2 padcirc 2>&1 | tee -a ../padcirc.log 54 | clean_directory 55 | cd .. 56 | } 57 | 58 | clean_directory() { 59 | rm -rf PE* 60 | rm -rf partmesh.txt 61 | rm -rf metis_graph.txt 62 | rm -rf fort.13 63 | rm -rf fort.14 64 | rm -rf fort.15 65 | rm -rf fort.16 66 | rm -rf fort.80 67 | rm -rf fort.68.nc 68 | } 69 | 70 | main 71 | -------------------------------------------------------------------------------- /tests/data/reference/test_tidal_run_cli/driver.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ulimit -s unlimited 4 | 5 | set -e 6 | 7 | NPROCS=2 8 | 9 | main() { 10 | SECONDS=0 11 | run_coldstart_phase 12 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 13 | duration=$SECONDS 14 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 15 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 16 | exit -1 17 | else 18 | run_hotstart_phase 19 | duration=$SECONDS 20 | if grep -Rq "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." padcirc.log; then 21 | echo "ERROR: Elevation.gt.ErrorElev, ADCIRC stopping." 22 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 23 | exit -1 24 | fi 25 | fi 26 | echo "Wallclock time: $(($duration / 60)) minutes and $(($duration % 60)) seconds." 27 | } 28 | 29 | run_coldstart_phase() { 30 | rm -rf coldstart 31 | mkdir coldstart 32 | cd coldstart 33 | ln -sf ../fort.14 34 | ln -sf ../fort.13 35 | ln -sf ../fort.15.coldstart ./fort.15 36 | adcprep --np 2 --partmesh 37 | adcprep --np 2 --prepall 38 | mpiexec -n 2 padcirc 2>&1 | tee ../padcirc.log 39 | clean_directory 40 | cd .. 41 | } 42 | 43 | run_hotstart_phase() { 44 | rm -rf hotstart 45 | mkdir hotstart 46 | cd hotstart 47 | ln -sf ../fort.14 48 | ln -sf ../fort.13 49 | ln -sf ../fort.15.hotstart ./fort.15 50 | ln -sf ../coldstart/fort.67.nc 51 | adcprep --np 2 --partmesh 52 | adcprep --np 2 --prepall 53 | mpiexec -n 2 padcirc 2>&1 | tee -a ../padcirc.log 54 | clean_directory 55 | cd .. 56 | } 57 | 58 | clean_directory() { 59 | rm -rf PE* 60 | rm -rf partmesh.txt 61 | rm -rf metis_graph.txt 62 | rm -rf fort.13 63 | rm -rf fort.14 64 | rm -rf fort.15 65 | rm -rf fort.16 66 | rm -rf fort.80 67 | rm -rf fort.68.nc 68 | } 69 | 70 | main 71 | -------------------------------------------------------------------------------- /tests/data/reference/test_write/test_AdcircMesh.2dm: -------------------------------------------------------------------------------- 1 | MESH2D 2 | ND 1 0.0000000000000000E+00 0.0000000000000000E+00 5.0000000000000000E+00 3 | ND 2 5.0000000000000000E-01 0.0000000000000000E+00 4.0000000000000000E+00 4 | ND 3 1.0000000000000000E+00 0.0000000000000000E+00 3.0000000000000000E+00 5 | ND 4 1.0000000000000000E+00 1.0000000000000000E+00 2.0000000000000000E+00 6 | ND 5 0.0000000000000000E+00 1.0000000000000000E+00 1.0000000000000000E+00 7 | ND 6 5.0000000000000000E-01 1.5000000000000000E+00 -0.0000000000000000E+00 8 | ND 7 3.3000000000000002E-01 3.3000000000000002E-01 -1.0000000000000000E+00 9 | ND 8 6.6000000000000003E-01 3.3000000000000002E-01 -2.0000000000000000E+00 10 | ND 9 5.0000000000000000E-01 6.6000000000000003E-01 -3.0000000000000000E+00 11 | ND 10 -1.0000000000000000E+00 1.0000000000000000E+00 -4.0000000000000000E+00 12 | ND 11 -1.0000000000000000E+00 0.0000000000000000E+00 -5.0000000000000000E+00 13 | -------------------------------------------------------------------------------- /tests/data/reference/test_write/test_AdcircMesh.gr3: -------------------------------------------------------------------------------- 1 | 2 | 10 11 3 | 1 0.0000000000000000E+00 0.0000000000000000E+00 5.0000000000000000E+00 4 | 2 5.0000000000000000E-01 0.0000000000000000E+00 4.0000000000000000E+00 5 | 3 1.0000000000000000E+00 0.0000000000000000E+00 3.0000000000000000E+00 6 | 4 1.0000000000000000E+00 1.0000000000000000E+00 2.0000000000000000E+00 7 | 5 0.0000000000000000E+00 1.0000000000000000E+00 1.0000000000000000E+00 8 | 6 5.0000000000000000E-01 1.5000000000000000E+00 -0.0000000000000000E+00 9 | 7 3.3000000000000002E-01 3.3000000000000002E-01 -1.0000000000000000E+00 10 | 8 6.6000000000000003E-01 3.3000000000000002E-01 -2.0000000000000000E+00 11 | 9 5.0000000000000000E-01 6.6000000000000003E-01 -3.0000000000000000E+00 12 | 10 -1.0000000000000000E+00 1.0000000000000000E+00 -4.0000000000000000E+00 13 | 11 -1.0000000000000000E+00 0.0000000000000000E+00 -5.0000000000000000E+00 14 | 1 5 7 9 None 15 | 2 1 2 7 None 16 | 3 2 3 8 None 17 | 4 8 7 2 None 18 | 5 3 4 8 None 19 | 6 4 9 8 None 20 | 7 4 6 5 None 21 | 8 5 10 11 1 22 | 9 9 4 5 None 23 | 10 5 1 7 None 24 | 0 ! total number of ocean boundaries 25 | 0 ! total number of ocean boundary nodes 26 | 0 ! total number of non-ocean boundaries 27 | 0 ! Total number of non-ocean boundary nodes -------------------------------------------------------------------------------- /tests/test_adcirc_mesh.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | from copy import copy 3 | 4 | import pytest 5 | 6 | from adcircpy import AdcircMesh 7 | from tests import OUTPUT_DIRECTORY 8 | 9 | 10 | @pytest.fixture 11 | def nodes() -> {int: ((float, float), float)}: 12 | return { 13 | '1': (0.0, 0.0, -5.0), 14 | '2': (0.5, 0.0, -4.0), 15 | '3': (1.0, 0.0, -3.0), 16 | '4': (1.0, 1.0, -2.0), 17 | '5': (0.0, 1.0, -1.0), 18 | '6': (0.5, 1.5, 0.0), 19 | '7': (0.33, 0.33, 1.0), 20 | '8': (0.66, 0.33, 2.0), 21 | '9': (0.5, 0.66, 3.0), 22 | '10': (-1.0, 1.0, 4.0), 23 | '11': (-1.0, 0.0, 5.0), 24 | } 25 | 26 | 27 | @pytest.fixture 28 | def elements() -> {int: [int]}: 29 | return { 30 | '1': ['5', '7', '9'], 31 | '2': ['1', '2', '7'], 32 | '3': ['2', '3', '8'], 33 | '4': ['8', '7', '2'], 34 | '5': ['3', '4', '8'], 35 | '6': ['4', '9', '8'], 36 | '7': ['4', '6', '5'], 37 | '8': ['5', '10', '11', '1'], 38 | '9': ['9', '4', '5'], 39 | '10': ['5', '1', '7'], 40 | } 41 | 42 | 43 | @pytest.fixture 44 | def boundaries() -> {int: {int: {str: [int]}}}: 45 | return { 46 | # "open" boundaries 47 | None: {0: {'indexes': ['10', '11', '1', '2']}, 1: {'indexes': ['2', '3', '4']}}, 48 | # "land" boundaries 49 | 0: {0: {'indexes': ['4', '6']}, 1: {'indexes': ['6', '5', '10']}}, 50 | # "interior" boundary 51 | 1: {0: {'indexes': ['7', '8', '9', '7']}}, 52 | } 53 | 54 | 55 | @pytest.fixture 56 | def fort14(elements, nodes) -> str: 57 | lines = [ 58 | f'\n{len(elements):d} {len(nodes):d}', 59 | *(f'{id} {x} {y} {z}' for id, (x, y, z) in nodes.items()), 60 | *( 61 | f'{id} {len(geometry)} {" ".join(idx for idx in geometry)}' 62 | for id, geometry in elements.items() 63 | ), 64 | ] 65 | return '\n'.join(lines) 66 | 67 | 68 | @pytest.fixture 69 | def wet_nodes() -> {int: ((float, float), float)}: 70 | return { 71 | 0: (0.0, 0.0, 0.0), 72 | 1: (1.0, 0.0, -1.0), 73 | 2: (1.0, 1.0, -2.0), 74 | 3: (0.0, 1.0, -3.0), 75 | 4: (0.5, 1.5, -4.0), 76 | } 77 | 78 | 79 | @pytest.fixture 80 | def wet_elements() -> {int: [int]}: 81 | return { 82 | 0: [2, 4, 3], 83 | 1: [0, 1, 2, 3], 84 | } 85 | 86 | 87 | def test_triangles_only(nodes, elements): 88 | mesh = AdcircMesh(nodes, {id: geom for geom in elements.values() if len(geom) == 3},) 89 | 90 | assert isinstance(mesh, AdcircMesh) 91 | 92 | 93 | def test_quads_only(nodes, elements): 94 | mesh = AdcircMesh(nodes, {id: geom for geom in elements.values() if len(geom) == 4},) 95 | 96 | assert isinstance(mesh, AdcircMesh) 97 | 98 | 99 | def test_hybrid(nodes, elements): 100 | mesh = AdcircMesh(nodes, elements) 101 | 102 | assert isinstance(mesh, AdcircMesh) 103 | 104 | 105 | def test_open(fort14): 106 | output_directory = OUTPUT_DIRECTORY / 'test_open' 107 | 108 | if not output_directory.exists(): 109 | output_directory.mkdir(parents=True, exist_ok=True) 110 | 111 | mesh_filename = output_directory / 'fort.14' 112 | 113 | with open(mesh_filename, 'w') as temporary_file: 114 | temporary_file.write(fort14) 115 | 116 | mesh = AdcircMesh.open(mesh_filename) 117 | 118 | assert isinstance(mesh, AdcircMesh) 119 | 120 | 121 | def test_make_plot(nodes, elements, mocker): 122 | mocker.patch('matplotlib.pyplot.show') 123 | 124 | mesh = AdcircMesh(nodes, elements) 125 | mesh.make_plot( 126 | show=True, extent=[0, 1, 0, 1], title='test', cbar_label='elevation [m]', vmax=0.0, 127 | ) 128 | 129 | assert isinstance(mesh, AdcircMesh) 130 | 131 | 132 | def test_make_plot_wet_only(wet_nodes, wet_elements, mocker): 133 | mesh = AdcircMesh(wet_nodes, wet_elements) 134 | 135 | mocker.patch('matplotlib.pyplot.show') 136 | mesh.make_plot() 137 | 138 | assert isinstance(mesh, AdcircMesh) 139 | 140 | 141 | def test_write(nodes, elements): 142 | output_directory = OUTPUT_DIRECTORY / 'test_write' 143 | 144 | if not output_directory.exists(): 145 | output_directory.mkdir(parents=True, exist_ok=True) 146 | 147 | mesh = AdcircMesh(nodes, elements) 148 | 149 | mesh.write(output_directory / 'test_AdcircMesh.gr3', overwrite=True) 150 | mesh.write(output_directory / 'test_AdcircMesh.2dm', format='2dm', overwrite=True) 151 | 152 | with pytest.raises(ValueError): 153 | mesh.write(output_directory / 'test_AdcircMesh.txt', format='txt', overwrite=True) 154 | 155 | 156 | def test_triplot(nodes, elements, boundaries, mocker): 157 | mesh = AdcircMesh(nodes, elements, boundaries=boundaries) 158 | 159 | mocker.patch('matplotlib.pyplot.show') 160 | mesh.triplot() 161 | 162 | 163 | def test_make_plot_flat_domain(nodes, elements, boundaries, mocker): 164 | nodes = {id: (*coord[:2], 0.0) for id, coord in nodes.items()} 165 | mesh = AdcircMesh(nodes, elements, boundaries=boundaries) 166 | 167 | mocker.patch('matplotlib.pyplot.show') 168 | mesh.make_plot() 169 | 170 | 171 | def test_mesh_equality(nodes, elements, boundaries, wet_nodes, wet_elements): 172 | mesh_1 = AdcircMesh(nodes, elements, boundaries=boundaries) 173 | mesh_2 = AdcircMesh(nodes, elements, boundaries=boundaries) 174 | mesh_3 = mesh_1 175 | mesh_4 = copy(mesh_1) 176 | mesh_5 = AdcircMesh(wet_nodes, wet_elements) 177 | mesh_6 = AdcircMesh(nodes, elements) 178 | 179 | assert mesh_1 is not mesh_2 180 | assert mesh_1 is mesh_3 181 | assert mesh_1 is not mesh_4 182 | 183 | assert mesh_1 == mesh_2 184 | assert mesh_1 == mesh_3 185 | assert mesh_1 == mesh_4 186 | assert mesh_1 != mesh_5 187 | assert mesh_1 != mesh_6 188 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import pytest 4 | 5 | from adcircpy.mesh import AdcircMesh 6 | 7 | # noinspection PyUnresolvedReferences 8 | from tests import shinnecock_mesh_directory 9 | 10 | 11 | def test_mesh_make_plot(shinnecock_mesh_directory): 12 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 13 | mesh.make_plot() 14 | 15 | 16 | def test_mesh_get_land_boundaries(shinnecock_mesh_directory): 17 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 18 | mesh.land_boundaries.gdf 19 | 20 | 21 | def test_mesh_get_ocean_boundaries(shinnecock_mesh_directory): 22 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 23 | mesh.ocean_boundaries.gdf 24 | -------------------------------------------------------------------------------- /tests/test_best_track_run.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import shutil 4 | import sys 5 | 6 | import pytest 7 | 8 | from adcircpy.cmd import best_track_run 9 | 10 | # noinspection PyUnresolvedReferences 11 | from tests import ( 12 | check_reference_directory, 13 | INPUT_DIRECTORY, 14 | OUTPUT_DIRECTORY, 15 | REFERENCE_DIRECTORY, 16 | shinnecock_mesh_directory, 17 | ) 18 | 19 | 20 | @pytest.mark.skipif(sys.version_info < (3, 7), reason='test fails on Python < 3.7') 21 | def test_best_track_run(shinnecock_mesh_directory, mocker): 22 | input_directory = INPUT_DIRECTORY / 'test_best_track_run' 23 | output_directory = OUTPUT_DIRECTORY / 'test_best_track_run' 24 | reference_directory = REFERENCE_DIRECTORY / 'test_best_track_run' 25 | 26 | if not output_directory.exists(): 27 | output_directory.mkdir(parents=True, exist_ok=True) 28 | 29 | cmd = [ 30 | 'best_track_run', 31 | f'{shinnecock_mesh_directory / "fort.14"}', 32 | 'florence2018', 33 | '--spinup-days=0.5', 34 | '--crs=EPSG:4326', 35 | f'--output-directory={str(output_directory)}', 36 | '--constituents=all', 37 | '--overwrite', 38 | '--timestep=10.', 39 | '--tau0-gen', 40 | f'--stations-file={input_directory / "stations.txt"}', 41 | '--elev-stat=6.', 42 | '--run-days=0.5', 43 | '--nproc=2', 44 | ] 45 | if shutil.which('padcirc') is None: 46 | if shutil.which('adcirc') is not None: 47 | cmd.append('--nproc=1') 48 | else: 49 | cmd.append('--skip-run') 50 | mocker.patch('sys.argv', cmd) 51 | 52 | best_track_run.main() 53 | 54 | check_reference_directory( 55 | output_directory, reference_directory, skip_lines={'fort.15': [0, -1]} 56 | ) 57 | -------------------------------------------------------------------------------- /tests/test_besttrack.py: -------------------------------------------------------------------------------- 1 | # ! /usr/bin/env python 2 | 3 | from adcircpy.forcing.winds.best_track import BestTrackForcing 4 | from tests import INPUT_DIRECTORY 5 | 6 | 7 | def test_plot_besttrack(mocker): 8 | input_directory = INPUT_DIRECTORY / 'test_plot_besttrack' 9 | 10 | best_track = BestTrackForcing.from_fort22( 11 | input_directory / 'florence2018_atcf.trk', nws=8, 12 | ) 13 | 14 | mocker.patch('matplotlib.pyplot.show') 15 | best_track.plot_track(coastline=False) 16 | -------------------------------------------------------------------------------- /tests/test_configuration.py: -------------------------------------------------------------------------------- 1 | # ! /usr/bin/env python 2 | from copy import copy 3 | from datetime import datetime, timedelta 4 | import sys 5 | 6 | import pytest 7 | 8 | from adcircpy import AdcircMesh, AdcircRun 9 | from adcircpy.forcing.waves.ww3 import WaveWatch3DataForcing 10 | from adcircpy.forcing.winds.atmesh import AtmosphericMeshForcing 11 | from adcircpy.fort15 import StationType 12 | from adcircpy.server import SlurmConfig 13 | from adcircpy.server.driver_file import DriverFile 14 | 15 | # noinspection PyUnresolvedReferences 16 | from tests import ( 17 | check_reference_directory, 18 | INPUT_DIRECTORY, 19 | OUTPUT_DIRECTORY, 20 | REFERENCE_DIRECTORY, 21 | shinnecock_mesh_directory, 22 | ) 23 | 24 | 25 | def test_slurm_driver(shinnecock_mesh_directory): 26 | output_directory = OUTPUT_DIRECTORY / 'test_slurm_driver' 27 | reference_directory = REFERENCE_DIRECTORY / 'test_slurm_driver' 28 | 29 | if not output_directory.exists(): 30 | output_directory.mkdir(parents=True, exist_ok=True) 31 | 32 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 33 | 34 | slurm = SlurmConfig( 35 | account='account', 36 | ntasks=1000, 37 | run_name='adcircpy/tests/test_configuration.py', 38 | partition='partition', 39 | walltime=timedelta(hours=8), 40 | mail_type='all', 41 | mail_user='example@email.gov', 42 | log_filename='test_configuration.log', 43 | modules=['intel/2020', 'impi/2020', 'netcdf/4.7.2-parallel'], 44 | path_prefix='$HOME/adcirc/build', 45 | ) 46 | driver = AdcircRun( 47 | mesh=mesh, 48 | start_date=datetime.now(), 49 | end_date=timedelta(days=7), 50 | spinup_time=timedelta(days=5), 51 | server_config=slurm, 52 | ) 53 | DriverFile(driver).write(output_directory / 'slurm.job', overwrite=True) 54 | 55 | check_reference_directory(output_directory, reference_directory) 56 | 57 | 58 | @pytest.mark.skipif(sys.version_info < (3, 7), reason='test fails on Python < 3.7') 59 | def test_configuration(shinnecock_mesh_directory): 60 | output_directory = OUTPUT_DIRECTORY / 'test_configuration' 61 | reference_directory = REFERENCE_DIRECTORY / 'test_configuration' 62 | 63 | if not output_directory.exists(): 64 | output_directory.mkdir(parents=True, exist_ok=True) 65 | 66 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 67 | 68 | spinup_time = timedelta(days=2) 69 | start_date = datetime(2015, 12, 14) + spinup_time 70 | end_date = start_date + timedelta(days=3) 71 | 72 | wind_forcing = AtmosphericMeshForcing( 73 | filename='Wind_HWRF_SANDY_Nov2018_ExtendedSmoothT.nc', nws=17, interval_seconds=3600, 74 | ) 75 | wave_forcing = WaveWatch3DataForcing( 76 | filename='ww3.HWRF.NOV2018.2012_sxy.nc', nrs=5, interval_seconds=3600, 77 | ) 78 | 79 | mesh.add_forcing(wind_forcing) 80 | mesh.add_forcing(wave_forcing) 81 | 82 | driver = AdcircRun(mesh, start_date, end_date, spinup_time,) 83 | 84 | driver.write(output_directory, overwrite=True, nproc=2) 85 | 86 | check_reference_directory( 87 | output_directory, reference_directory, skip_lines={'fort.15': [0]}, 88 | ) 89 | 90 | 91 | def test_import_stations(shinnecock_mesh_directory): 92 | input_directory = INPUT_DIRECTORY / 'test_import_stations' 93 | 94 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 95 | 96 | spinup_time = timedelta(days=2) 97 | start_date = datetime(2015, 12, 14) + spinup_time 98 | end_date = start_date + timedelta(days=3) 99 | 100 | driver_1 = AdcircRun(copy(mesh), start_date, end_date, spinup_time) 101 | driver_1.import_stations(input_directory / 'stations_1.txt', only_within=True) 102 | 103 | driver_2 = AdcircRun(copy(mesh), start_date, end_date, spinup_time) 104 | driver_2.import_stations(input_directory / 'stations_2.txt', only_within=True) 105 | 106 | driver_3 = AdcircRun(copy(mesh), start_date, end_date, spinup_time) 107 | driver_3.import_stations(input_directory / 'stations_3.txt', only_within=True) 108 | 109 | driver_4 = AdcircRun(copy(mesh), start_date, end_date, spinup_time) 110 | driver_4.import_stations( 111 | input_directory / 'stations_3.txt', 112 | station_types=['elevation', 'NSTAC', StationType.METEOROLOGICAL], 113 | ) 114 | 115 | assert driver_1.elevation_stations == {'8512769': (-72.5772, 40.823)} 116 | assert driver_1.velocity_stations == {} 117 | assert driver_1.concentration_stations == {} 118 | assert driver_1.meteorological_stations == {} 119 | 120 | assert driver_2.elevation_stations == {'8512769': (-72.5772, 40.823)} 121 | assert driver_2.velocity_stations == {'8512769': (-72.5772, 40.823)} 122 | assert driver_2.concentration_stations == {} 123 | assert driver_2.meteorological_stations == {} 124 | 125 | assert driver_3.elevation_stations == {'8512769': (-72.5772, 40.823)} 126 | assert driver_3.velocity_stations == {'8512769': (-72.5772, 40.823)} 127 | assert driver_3.concentration_stations == {} 128 | assert driver_3.meteorological_stations == {} 129 | 130 | assert driver_4.elevation_stations == {'8512769': (-72.5772, 40.823)} 131 | assert driver_4.velocity_stations == {} 132 | assert driver_4.concentration_stations == {'8512769': (-72.5772, 40.823)} 133 | assert driver_4.meteorological_stations == {} 134 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import sys 3 | 4 | import pytest 5 | 6 | from tests import ( 7 | check_reference_directory, 8 | DATA_DIRECTORY, 9 | OUTPUT_DIRECTORY, 10 | REFERENCE_DIRECTORY, 11 | ) 12 | 13 | EXAMPLES_DIRECTORY = DATA_DIRECTORY.parent.parent / 'examples' 14 | 15 | 16 | @pytest.mark.skip 17 | def test_example_1(): 18 | reference_directory = REFERENCE_DIRECTORY / 'example_1' 19 | output_directory = OUTPUT_DIRECTORY / 'example_1' 20 | 21 | if output_directory.exists(): 22 | shutil.rmtree(output_directory) 23 | output_directory.mkdir(exist_ok=True, parents=True) 24 | 25 | exec(open(EXAMPLES_DIRECTORY / 'example_1.py').read()) 26 | 27 | check_reference_directory( 28 | output_directory, reference_directory, skip_lines={'fort.15': [0, -2]} 29 | ) 30 | 31 | 32 | @pytest.mark.skip 33 | def test_example_2(): 34 | reference_directory = REFERENCE_DIRECTORY / 'example_2' 35 | output_directory = OUTPUT_DIRECTORY / 'example_2' 36 | 37 | if output_directory.exists(): 38 | shutil.rmtree(output_directory) 39 | output_directory.mkdir(exist_ok=True, parents=True) 40 | 41 | exec(open(EXAMPLES_DIRECTORY / 'example_2.py').read()) 42 | 43 | check_reference_directory( 44 | output_directory, reference_directory, skip_lines={'fort.15': [0, -2]} 45 | ) 46 | 47 | 48 | @pytest.mark.skipif(sys.version_info < (3, 7), reason='test fails on Python < 3.7') 49 | def test_example_3(): 50 | reference_directory = REFERENCE_DIRECTORY / 'example_3' 51 | output_directory = OUTPUT_DIRECTORY / 'example_3' 52 | 53 | if output_directory.exists(): 54 | shutil.rmtree(output_directory) 55 | output_directory.mkdir(exist_ok=True, parents=True) 56 | 57 | exec(open(EXAMPLES_DIRECTORY / 'example_3.py').read()) 58 | 59 | check_reference_directory( 60 | output_directory, reference_directory, skip_lines={'fort.15': [0, -2]} 61 | ) 62 | 63 | 64 | @pytest.mark.skip 65 | def test_example_4(): 66 | reference_directory = REFERENCE_DIRECTORY / 'example_4' 67 | output_directory = OUTPUT_DIRECTORY / 'example_4' 68 | 69 | if output_directory.exists(): 70 | shutil.rmtree(output_directory) 71 | output_directory.mkdir(exist_ok=True, parents=True) 72 | 73 | exec(open(EXAMPLES_DIRECTORY / 'example_4.py').read()) 74 | 75 | check_reference_directory( 76 | output_directory, 77 | reference_directory, 78 | skip_lines={'fort.15.coldstart': [0, -2], 'fort.15.hotstart': [0, -3]}, 79 | ) 80 | -------------------------------------------------------------------------------- /tests/test_stations.py: -------------------------------------------------------------------------------- 1 | from stormevents.nhc import VortexTrack 2 | 3 | from adcircpy.fort15 import Stations 4 | from tests import check_reference_directory, OUTPUT_DIRECTORY, REFERENCE_DIRECTORY 5 | 6 | 7 | def test_Stations(): 8 | reference_directory = REFERENCE_DIRECTORY / 'test_Stations' 9 | output_directory = OUTPUT_DIRECTORY / 'test_Stations' 10 | 11 | if not output_directory.exists(): 12 | output_directory.mkdir(parents=True, exist_ok=True) 13 | 14 | stations_1 = Stations.within_wind_swath(track=VortexTrack('florence2018')) 15 | stations_2 = Stations.within_wind_swath( 16 | track=VortexTrack('florence2018'), station_types=['ELEVATION'] 17 | ) 18 | stations_3 = Stations.within_wind_swath( 19 | track=VortexTrack('florence2018'), 20 | station_types={'ELEVATION': [8658120, 8670870, 8652587], 'VELOCITY': [8658120]}, 21 | ) 22 | 23 | stations_1.write(output_directory / 'stations_1.fort.15', overwrite=True) 24 | stations_2.write(output_directory / 'stations_2.fort.15', overwrite=True) 25 | stations_3.write(output_directory / 'stations_3.fort.15', overwrite=True) 26 | 27 | check_reference_directory(output_directory, reference_directory) 28 | -------------------------------------------------------------------------------- /tests/test_tidal_run.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | from datetime import datetime, timedelta 4 | import shutil 5 | import sys 6 | 7 | import pytest 8 | 9 | from adcircpy.cmd import tidal_run 10 | from adcircpy.driver import AdcircRun 11 | from adcircpy.forcing import Tides 12 | from adcircpy.mesh import AdcircMesh 13 | 14 | # noinspection PyUnresolvedReferences 15 | from tests import ( 16 | check_reference_directory, 17 | OUTPUT_DIRECTORY, 18 | REFERENCE_DIRECTORY, 19 | shinnecock_mesh_directory, 20 | ) 21 | 22 | 23 | @pytest.mark.skipif(sys.version_info < (3, 7), reason='test fails on Python < 3.7') 24 | def test_tidal_run(shinnecock_mesh_directory): 25 | output_directory = OUTPUT_DIRECTORY / 'test_tidal_run' 26 | reference_directory = REFERENCE_DIRECTORY / 'test_tidal_run' 27 | 28 | if not output_directory.exists(): 29 | output_directory.mkdir(parents=True, exist_ok=True) 30 | 31 | mesh = AdcircMesh.open(shinnecock_mesh_directory / 'fort.14', crs=4326) 32 | 33 | tidal_forcing = Tides() 34 | tidal_forcing.use_all() 35 | 36 | mesh.add_forcing(tidal_forcing) 37 | now = datetime.utcnow() 38 | driver = AdcircRun( 39 | mesh, 40 | start_date=now, 41 | end_date=now + timedelta(days=0.5), 42 | spinup_time=timedelta(days=0.5), 43 | ) 44 | driver.timestep = 10.0 45 | 46 | if shutil.which('padcirc') is not None: 47 | driver.run(output_directory, nproc=2, overwrite=True) 48 | else: 49 | driver.write(output_directory, nproc=2, overwrite=True) 50 | 51 | check_reference_directory( 52 | output_directory, 53 | reference_directory, 54 | skip_lines={'fort.15': [0, *range(32, 47, 2), *range(49, 64, 2), -2]}, 55 | ) 56 | 57 | 58 | @pytest.mark.skipif(sys.version_info < (3, 7), reason='test fails on Python < 3.7') 59 | def test_tidal_run_cli(shinnecock_mesh_directory, mocker): 60 | output_directory = OUTPUT_DIRECTORY / 'test_tidal_run_cli' 61 | reference_directory = REFERENCE_DIRECTORY / 'test_tidal_run_cli' 62 | 63 | if not output_directory.exists(): 64 | output_directory.mkdir(parents=True, exist_ok=True) 65 | 66 | cmd = [ 67 | 'tidal_run', 68 | f'{shinnecock_mesh_directory / "fort.14"}', 69 | f"{datetime.strftime(datetime.utcnow(), '%Y-%m-%dT%H:%M:%S')}", 70 | '0.5', 71 | '--spinup-days=0.5', 72 | '--crs=EPSG:4326', 73 | f'--output-directory={output_directory}', 74 | '--constituents=all', 75 | '--overwrite', 76 | '--timestep=10.', 77 | '--nproc=2', 78 | ] 79 | if shutil.which('padcirc') is None: 80 | if shutil.which('adcirc') is not None: 81 | cmd.append('--nproc=1') 82 | else: 83 | cmd.append('--skip-run') 84 | mocker.patch('sys.argv', cmd) 85 | 86 | tidal_run.main() 87 | 88 | check_reference_directory( 89 | output_directory, 90 | reference_directory, 91 | skip_lines={'fort.15': [0, *range(32, 47, 2), *range(49, 64, 2), -2]}, 92 | ) 93 | -------------------------------------------------------------------------------- /tests/test_tide_gen.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | from adcircpy.cmd import tide_gen 4 | 5 | # noinspection PyUnresolvedReferences 6 | from tests import ( 7 | check_reference_directory, 8 | OUTPUT_DIRECTORY, 9 | REFERENCE_DIRECTORY, 10 | shinnecock_mesh_directory, 11 | ) 12 | 13 | 14 | def test_tide_gen(shinnecock_mesh_directory, mocker): 15 | output_directory = OUTPUT_DIRECTORY / 'test_tide_gen' 16 | reference_directory = REFERENCE_DIRECTORY / 'test_tide_gen' 17 | 18 | if not output_directory.exists(): 19 | output_directory.mkdir(parents=True, exist_ok=True) 20 | 21 | cmd = [ 22 | 'tide_gen', 23 | f'{shinnecock_mesh_directory / "fort.14"}', 24 | '2021-02-26T00:00:00', 25 | '15', 26 | '--mesh-crs=epsg:4326', 27 | f'--output-file={output_directory / "fort.15"}', 28 | ] 29 | mocker.patch('sys.argv', cmd) 30 | 31 | tide_gen.main() 32 | 33 | check_reference_directory( 34 | output_directory, reference_directory, skip_lines={'fort.15': [0, -1]} 35 | ) 36 | --------------------------------------------------------------------------------