├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── nos-s100_conda_env.txt
├── ofs-ops.py
└── ofs.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .idea
3 | *.shp
4 | *.prj
5 | *.shx
6 | *.dbf
7 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7-alpine
2 |
3 | ENV USER_PY=pyuser
4 | ENV PATH_S100=/opt/s100
5 | ENV PATH_S100_OFS=$PATH_S100/ofs
6 | ENV PATH_S100_S100PY=$PATH_S100/s100py
7 | ENV PATH_S100_THYME=$PATH_S100/thyme
8 | ENV PATH_S100_RUN=$PATH_S100/run
9 | ENV PATH_S100_RUN_SHP=$PATH_S100_RUN/shapefiles
10 | ENV PATH_S100_RUN_NETCDF=$PATH_S100_RUN/netcdf
11 | ENV PATH_S100_RUN_HDF5=$PATH_S100_RUN/hdf5
12 | ENV HDF5_VERSION="1.10.4"
13 | ENV HDF5_PREFIX="/usr/local"
14 | ENV HDF5_DIR=$HDF5_PREFIX
15 | ENV NETCDF_VERSION="4.6.2"
16 | ENV NETCDF_PREFIX="/usr/local"
17 | ENV GDAL_VERSION="2.4.1"
18 | ENV GDAL_PREFIX="/usr/local"
19 |
20 | WORKDIR /tmp/setup
21 |
22 | RUN set -ex; \
23 | echo "Updating Alpine packages" \
24 | && apk --no-cache upgrade; \
25 | \
26 | echo "Adding edge main/testing repositories (required for installing hdf5)" \
27 | && echo "http://dl-cdn.alpinelinux.org/alpine/edge/main" >> /etc/apk/repositories \
28 | && echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing" >> /etc/apk/repositories;\
29 | \
30 | echo "Installing required system packages" \
31 | && apk --no-cache add --virtual=hdf5_deps zlib; \
32 | \
33 | echo "Installing build dependency packages" \
34 | && apk --no-cache add --virtual=build_deps gcc g++ make m4 musl-dev zlib-dev; \
35 | \
36 | echo "Downloading HDF5 source code" \
37 | && hdf5_archive=hdf5-${HDF5_VERSION}.tar.bz2 \
38 | && hdf5_src_dir=hdf5-${HDF5_VERSION} \
39 | && wget -q -O ${hdf5_archive} "https://www.hdfgroup.org/package/source-bzip-2/?wpdmdl=13047" \
40 | && tar xf ${hdf5_archive} \
41 | && echo "Compiling/installing HDF5 native library" \
42 | && cd ${hdf5_src_dir} \
43 | # --enable-unsupported required when building high-level libs with thread safety config
44 | && ./configure --prefix=$HDF5_PREFIX --with-zlib=/usr/include,/lib --enable-threadsafe --enable-unsupported --with-pthread=/usr/include,/usr/lib \
45 | && make \
46 | && make install \
47 | && cd .. \
48 | && rm ${hdf5_archive} \
49 | && rm -r ${hdf5_src_dir}; \
50 | \
51 | echo "Downloading NetCDF source code" \
52 | && netcdf_archive=netcdf-v${NETCDF_VERSION}.tar.gz \
53 | && netcdf_src_dir=netcdf-c-${NETCDF_VERSION} \
54 | && wget -q -O ${netcdf_archive} "https://github.com/Unidata/netcdf-c/archive/v${NETCDF_VERSION}.tar.gz" \
55 | && tar xf ${netcdf_archive} \
56 | && echo "Compiling/installing NetCDF native library" \
57 | && cd ${netcdf_src_dir} \
58 | && CPPFLAGS='-I/usr/local/include -I/usr/include' LDFLAGS='-L/usr/local/lib -L/lib' ./configure --prefix=${NETCDF_PREFIX} --disable-dap \
59 | && make \
60 | && make install \
61 | && cd .. \
62 | && rm ${netcdf_archive} \
63 | && rm -r ${netcdf_src_dir}; \
64 | \
65 | echo "Removing build dependency packages" \
66 | && apk --no-cache del --purge -r build_deps;
67 |
68 | # GDAL & Python prerequisites
69 | RUN set -ex; \
70 | echo "Installing required system packages" \
71 | # geos required by shapely & gdal
72 | # lapack required for numpy/scipy
73 | && apk --no-cache add --virtual=gdal_deps geos proj4 zlib \
74 | && apk --no-cache add --virtual=py_deps lapack; \
75 | \
76 | echo "Installing build dependency packages" \
77 | && apk --no-cache add --virtual=build_deps \
78 | gcc g++ make m4 musl-dev cmake linux-headers \
79 | zlib-dev minizip-dev expat-dev uriparser-dev \
80 | gfortran lapack-dev geos-dev proj4-dev \
81 | openssl ca-certificates; \
82 | \
83 | echo "Upgrading pip" \
84 | && pip install --no-cache-dir -U pip; \
85 | \
86 | echo "Installing python dependencies" \
87 | # numpy must be installed first (separately) or scipy install won't work.
88 | && pip install --no-cache-dir numpy \
89 | && pip install --no-cache-dir shapely scipy netCDF4 h5py; \
90 | \
91 | echo "Downloading GDAL source code" \
92 | && gdal_archive=gdal-${GDAL_VERSION}.tar.gz \
93 | && gdal_src_dir=gdal-${GDAL_VERSION} \
94 | && wget -q -O ${gdal_archive} "https://github.com/OSGeo/gdal/archive/v${GDAL_VERSION}.tar.gz" \
95 | && tar xf ${gdal_archive} \
96 | && echo "Compiling/installing GDAL" \
97 | && cd ${gdal_src_dir}/gdal \
98 | && ./configure --prefix=$GDAL_PREFIX \
99 | #--with-libkml \
100 | --with-geos=/usr/bin/geos-config \
101 | --with-proj=/usr \
102 | --with-netcdf=$NETCDF_PREFIX \
103 | --with-hdf5=$HDF5_PREFIX \
104 | --with-python=yes \
105 | && make \
106 | && make install \
107 | && cd ../.. \
108 | && rm ${gdal_archive} \
109 | && rm -r ${gdal_src_dir}; \
110 | \
111 | echo "Removing build dependency packages" \
112 | && apk --no-cache del --purge -r build_deps;
113 |
114 |
115 | # Assuming 's100py.tar.gz' is an archive of the s100py repository contents
116 | COPY s100py.tar.gz /tmp/setup
117 |
118 | # Assuming 'thyme.tar.gz' is an archive of the thyme repository contents
119 | COPY thyme.tar.gz /tmp/setup
120 |
121 | # Assuming 'ofs_s100_hdf5.tar.gz' is an archive of the ofs_s100_hdf5 repository contents
122 | COPY ofs.tar.gz /tmp/setup
123 |
124 | # Assuming 'nos80k.zip' is a zip of the nos80k shapefile
125 | COPY nos80k.zip /tmp/setup
126 |
127 |
128 | RUN set -ex; \
129 | echo "Installing build dependency packages" \
130 | && apk --no-cache add --virtual=build_deps \
131 | gcc g++ make m4 musl-dev; \
132 | \
133 | echo "Setting up run directories" \
134 | && mkdir -p $PATH_S100_RUN_SHP $PATH_S100_RUN_NETCDF $PATH_S100_RUN_HDF5 $PATH_S100_S100PY $PATH_S100_THYME $PATH_S100_OFS; \
135 | echo "Extracting packages/data" \
136 | && cd $PATH_S100_S100PY && tar xzvf /tmp/setup/s100py.tar.gz \
137 | && cd $PATH_S100_THYME && tar xzvf /tmp/setup/thyme.tar.gz \
138 | && cd $PATH_S100_OFS && tar xzvf /tmp/setup/ofs.tar.gz \
139 | && cd $PATH_S100_RUN_SHP && unzip /tmp/setup/nos80k.zip; \
140 | echo "Installing thyme/s100py python packages" \
141 | && pip install --no-cache-dir -e $PATH_S100_THYME \
142 | && pip install --no-cache-dir -e $PATH_S100_S100PY; \
143 | echo "Adding group/user [$USER_PY] & updating path ownership" \
144 | && addgroup -S $USER_PY && adduser -S $USER_PY -G $USER_PY \
145 | && chown -R $USER_PY.$USER_PY $PATH_S100; \
146 | echo "Patching /sbin/ldconfig to fix shapely (see https://serverfault.com/a/964693/399264)" \
147 | && sed -i '1 a \
148 | if [ "$1" = "-p" ]; then\n\
149 | # Hack to mimic GNU ldconfig s -p option, needed by ctypes, used by shapely\n\
150 | echo " libc.musl-x86_64.so.1 (libc6,x86-64) => /lib/libc.musl-x86_64.so.1"\n\
151 | exit 0\n\
152 | fi\' /sbin/ldconfig; \
153 | echo "Cleaning up packages" \
154 | && rm -f /tmp/setup/*.tar.gz; \
155 | \
156 | echo "Removing build dependency packages" \
157 | && apk --no-cache del --purge -r build_deps;
158 |
159 |
160 | WORKDIR $PATH_S100_RUN
161 | USER $USER_PY
162 | CMD ["/bin/sh"]
163 |
164 |
165 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | CC0 1.0 Universal
2 |
3 | Statement of Purpose
4 |
5 | The laws of most jurisdictions throughout the world automatically confer
6 | exclusive Copyright and Related Rights (defined below) upon the creator and
7 | subsequent owner(s) (each and all, an "owner") of an original work of
8 | authorship and/or a database (each, a "Work").
9 |
10 | Certain owners wish to permanently relinquish those rights to a Work for the
11 | purpose of contributing to a commons of creative, cultural and scientific
12 | works ("Commons") that the public can reliably and without fear of later
13 | claims of infringement build upon, modify, incorporate in other works, reuse
14 | and redistribute as freely as possible in any form whatsoever and for any
15 | purposes, including without limitation commercial purposes. These owners may
16 | contribute to the Commons to promote the ideal of a free culture and the
17 | further production of creative, cultural and scientific works, or to gain
18 | reputation or greater distribution for their Work in part through the use and
19 | efforts of others.
20 |
21 | For these and/or other purposes and motivations, and without any expectation
22 | of additional consideration or compensation, the person associating CC0 with a
23 | Work (the "Affirmer"), to the extent that he or she is an owner of Copyright
24 | and Related Rights in the Work, voluntarily elects to apply CC0 to the Work
25 | and publicly distribute the Work under its terms, with knowledge of his or her
26 | Copyright and Related Rights in the Work and the meaning and intended legal
27 | effect of CC0 on those rights.
28 |
29 | 1. Copyright and Related Rights. A Work made available under CC0 may be
30 | protected by copyright and related or neighboring rights ("Copyright and
31 | Related Rights"). Copyright and Related Rights include, but are not limited
32 | to, the following:
33 |
34 | i. the right to reproduce, adapt, distribute, perform, display, communicate,
35 | and translate a Work;
36 |
37 | ii. moral rights retained by the original author(s) and/or performer(s);
38 |
39 | iii. publicity and privacy rights pertaining to a person's image or likeness
40 | depicted in a Work;
41 |
42 | iv. rights protecting against unfair competition in regards to a Work,
43 | subject to the limitations in paragraph 4(a), below;
44 |
45 | v. rights protecting the extraction, dissemination, use and reuse of data in
46 | a Work;
47 |
48 | vi. database rights (such as those arising under Directive 96/9/EC of the
49 | European Parliament and of the Council of 11 March 1996 on the legal
50 | protection of databases, and under any national implementation thereof,
51 | including any amended or successor version of such directive); and
52 |
53 | vii. other similar, equivalent or corresponding rights throughout the world
54 | based on applicable law or treaty, and any national implementations thereof.
55 |
56 | 2. Waiver. To the greatest extent permitted by, but not in contravention of,
57 | applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and
58 | unconditionally waives, abandons, and surrenders all of Affirmer's Copyright
59 | and Related Rights and associated claims and causes of action, whether now
60 | known or unknown (including existing as well as future claims and causes of
61 | action), in the Work (i) in all territories worldwide, (ii) for the maximum
62 | duration provided by applicable law or treaty (including future time
63 | extensions), (iii) in any current or future medium and for any number of
64 | copies, and (iv) for any purpose whatsoever, including without limitation
65 | commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes
66 | the Waiver for the benefit of each member of the public at large and to the
67 | detriment of Affirmer's heirs and successors, fully intending that such Waiver
68 | shall not be subject to revocation, rescission, cancellation, termination, or
69 | any other legal or equitable action to disrupt the quiet enjoyment of the Work
70 | by the public as contemplated by Affirmer's express Statement of Purpose.
71 |
72 | 3. Public License Fallback. Should any part of the Waiver for any reason be
73 | judged legally invalid or ineffective under applicable law, then the Waiver
74 | shall be preserved to the maximum extent permitted taking into account
75 | Affirmer's express Statement of Purpose. In addition, to the extent the Waiver
76 | is so judged Affirmer hereby grants to each affected person a royalty-free,
77 | non transferable, non sublicensable, non exclusive, irrevocable and
78 | unconditional license to exercise Affirmer's Copyright and Related Rights in
79 | the Work (i) in all territories worldwide, (ii) for the maximum duration
80 | provided by applicable law or treaty (including future time extensions), (iii)
81 | in any current or future medium and for any number of copies, and (iv) for any
82 | purpose whatsoever, including without limitation commercial, advertising or
83 | promotional purposes (the "License"). The License shall be deemed effective as
84 | of the date CC0 was applied by Affirmer to the Work. Should any part of the
85 | License for any reason be judged legally invalid or ineffective under
86 | applicable law, such partial invalidity or ineffectiveness shall not
87 | invalidate the remainder of the License, and in such case Affirmer hereby
88 | affirms that he or she will not (i) exercise any of his or her remaining
89 | Copyright and Related Rights in the Work or (ii) assert any associated claims
90 | and causes of action with respect to the Work, in either case contrary to
91 | Affirmer's express Statement of Purpose.
92 |
93 | 4. Limitations and Disclaimers.
94 |
95 | a. No trademark or patent rights held by Affirmer are waived, abandoned,
96 | surrendered, licensed or otherwise affected by this document.
97 |
98 | b. Affirmer offers the Work as-is and makes no representations or warranties
99 | of any kind concerning the Work, express, implied, statutory or otherwise,
100 | including without limitation warranties of title, merchantability, fitness
101 | for a particular purpose, non infringement, or the absence of latent or
102 | other defects, accuracy, or the present or absence of errors, whether or not
103 | discoverable, all to the greatest extent permissible under applicable law.
104 |
105 | c. Affirmer disclaims responsibility for clearing rights of other persons
106 | that may apply to the Work or any use thereof, including without limitation
107 | any person's Copyright and Related Rights in the Work. Further, Affirmer
108 | disclaims responsibility for obtaining any necessary consents, permissions
109 | or other rights required for any use of the Work.
110 |
111 | d. Affirmer understands and acknowledges that Creative Commons is not a
112 | party to this document and has no duty or obligation with respect to this
113 | CC0 or use of the Work.
114 |
115 | For more information, please see
116 |
117 |
118 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | NOS S-100 Toolkit
2 | =================
3 |
4 | A Python toolkit for downloading National Oceanic and Atmospheric
5 | Administration (NOAA)/National Ocean Service (NOS) datasets and encoding
6 | in International Hydrographic Organization (IHO)
7 | [S-100](http://s100.iho.int/S100/) data formats.
8 |
9 | Overview
10 | --------
11 |
12 | These scripts download NOS Ocean Model NetCDF Forecast files hosted on
13 | the NOAA Operational Model Archive and Distribution System
14 | [NOMADS](ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/nos/prod/) and on
15 | the NOAA CO-OPS
16 | [THREDDS](https://opendap.co-ops.nos.noaa.gov/thredds/catalog.html) Data
17 | Server and use the [thyme](https://github.com/noaa-ocs-modeling/thyme)
18 | and [s100py](https://github.com/noaa-ocs-s100/s100py) libraries to
19 | interpolate its surface current forecasts to a regular grid and encode
20 | the result in S-100/S-111 compliant HDF5 files.
21 |
22 | S-111 is an IHO standard outlining formats for storing and sending
23 | surface water current data and metadata and is designed for
24 | interoperability with Electronic Navigation Charts.
25 |
26 | Features
27 | --------
28 |
29 | - Download NOAA/NOS Operational Forecast System (OFS) NetCDF files
30 | - Given a target resolution and optionally a subgrid shapefile,
31 | generate a regular grid definition and save to a thyme Model Index
32 | NetCDF file
33 | - Process a full OFS forecast run (0 to 48+ hours) and encode the
34 | result into IHO S-111 format (a single HDF-5 file containing all
35 | forecast projections) on a regular grid
36 | - Chop S-111 output into smaller sub-grids to reduce file sizes
37 | - Calculate water currents at any depth-below-surface
38 |
39 | Index File Data/Functionality
40 | -----------------------------
41 |
42 | For information about the model index files used by these scripts, see
43 | the [thyme](https://github.com/noaa-ocs-modeling/thyme) documentation.
44 |
45 | Supported Models
46 | ----------------
47 |
48 | These scripts presently support the following hydrodynamic modeling
49 | systems:
50 |
51 | - NCEP Real-Time Ocean Forecast System (RTOFS)
52 | - NOS Chesapeake Bay Operational Forecast System (CBOFS)
53 | - NOS Delaware Bay Operational Forecast System (DBOFS)
54 | - NOS Gulf of Maine Operational Forecast System (GoMOFS)
55 | - NOS Lake Erie Operational Forecast System (LEOFS)
56 | - NOS Lake Huron Operational Forecast System (LHOFS)
57 | - NOS Lake Michigan & Huron Operational Forecast System (LMHOFS)
58 | - NOS Lake Michigan Operational Forecast System (LMOFS)
59 | - NOS Lake Ontario Operational Forecast System (LOOFS)
60 | - NOS Lake Superior Operational Forecast System (LSOFS)
61 | - NOS New York/New Jersey Harbor Operational Forecast System (NYOFS)
62 | - NOS Northeast Gulf of Mexico Operational Forecast System (NEGOFS)
63 | - NOS Northern Gulf of Mexico Operational Forecast System (NGOFS)
64 | - NOS Northwest Gulf of Mexico Operational Forecast System (NWGOFS)
65 | - NOS San Francisco Bay Operational Forecast System (SFBOFS)
66 | - NOS Tampa Bay Operational Forecast System (TBOFS)
67 | - NOS West Coast Operational Forecast System (WCOFS)
68 |
69 | Visit [NOS Operational Forecast
70 | Systems](https://tidesandcurrents.noaa.gov/models.html) for more
71 | information.
72 |
73 | Requirements
74 | ------------
75 |
76 | This codebase relies on the following Python packages:
77 |
78 | - [s100py](https://github.com/noaa-ocs-s100/s100py)
79 | - [thyme](https://github.com/noaa-ocs-modeling/thyme)
80 |
81 | However, the above packages rely on the GDAL Python bindings to be
82 | present, so it usually can\'t just be installed using `pip install gdal`.
83 | We recommend installing GDAL either through a package manager (e.g.
84 | `conda`, `apt`, `yum`, `pacman`) or by using the preconfigured conda
85 | environment file provided in this repository that can be used to install
86 | all dependencies quickly. To go this route, you must first install Miniconda.
87 |
88 | For instructions downloading/installing Miniconda, please reference the
89 | [official documentation](https://docs.conda.io/en/latest/miniconda.html).
90 |
91 | Instructions for configuring your conda environment can be found in the
92 | following sections.
93 |
94 | Configuration
95 | -------------
96 |
97 | - Create a parent directory that will contain the code and all other
98 | files that will be used or generated by this package. Ensure that
99 | the directory has appropriate user/group ownership and
100 | read/write/execute permissions settings that will allow the code to
101 | be checked out and run. For simplicity, this example assumes that
102 | directory is under the user\'s home directory.
103 |
104 | ```bash
105 | mkdir ~/nos-s100-toolkit
106 | cd ~/nos-s100-toolkit
107 | ```
108 |
109 | - Create a subdirectory that will store the NetCDF index files
110 |
111 | ```bash
112 | mkdir ~/nos-s100-toolkit/indexes
113 | ```
114 |
115 | - Copy existing index files, if any, to this directory
116 |
117 | - Create a subdirectory that will store downloaded model NetCDF files
118 |
119 | ```bash
120 | mkdir ~/nos-s100-toolkit/netcdf
121 | ```
122 |
123 | - Create a subdirectory that will store generated S-111 HDF5 output
124 | files
125 |
126 | ```bash
127 | mkdir ~/nos-s100-toolkit/hdf5
128 | ```
129 |
130 | - Create a subdirectory that will store shoreline and/or subgrid
131 | shapefiles. This is only required when generating new NetCDF index
132 | files. Make sure that any shapefiles being used have coverage for
133 | the model domain(s) you will be working with.
134 |
135 | ```bash
136 | mkdir ~/nos-s100-toolkit/shp
137 | ```
138 |
139 | - Copy the shapefiles, if any, to this directory.
140 |
141 | - Clone the repository to a new `src` subdirectory:
142 |
143 | ```bash
144 | git clone https://github.com/noaa-ocs-s100/nos-s100-toolkit ~/nos-s100-toolkit/src
145 | ```
146 |
147 | - Ensure that `ofs.py` is executable. If not, run
148 |
149 | ```bash
150 | chmod gou+x ~/nos-s100-toolkit/src/ofs.py
151 | ```
152 |
153 | - Ensure the new `src` directory is in your `$PATH` environment
154 | variable:
155 |
156 | ```bash
157 | export PATH=$PATH:~/nos-s100-toolkit/src
158 | ```
159 |
160 | - Create and configure a new conda environment from the conda
161 | environment file supplied with the code (this will download and
162 | install all required packages):
163 |
164 | ```bash
165 | conda create --name nos-s100 --file nos-s100_conda_env.txt
166 | ```
167 |
168 | Execution
169 | ---------
170 |
171 | - Activate your new conda environment (once activated, conda prepends
172 | the environment name nos-s100 onto your system command prompt)
173 | ```bash
174 | conda activate nos-s100
175 | pip install s100py
176 | ```
177 | - To print detailed usage information:
178 | ```bash
179 | cd ~/nos-s100-toolkit/src
180 | python ofs.py -h
181 | ```
182 | - The following examples describe the steps to create different S-111
183 | data coding formats. For more information about S-111 data coding
184 | formats, see the [s100py](https://github.com/noaa-ocs-s100/s100py)
185 | documentation.
186 |
187 | **To create regular-grid S-111 files (Coding Format 2)**
188 |
189 | - Generate an index (grid definition) file for a particular model at a
190 | particular resolution:
191 |
192 | > **WARNING:** Keep in mind that larger model domains and higher resolutions will take longer to generate.
193 |
194 | - Download a model output NetCDF file and place in the `netcdf`subdirectory, modifying the model abbreviation, timestamp, and forecast hour as necessary
195 |
196 | ```bash
197 | cd ~/nos-s100-toolkit/netcdf
198 | wget https://opendap.co-ops.nos.noaa.gov/thredds/fileServer/NOAA/CBOFS/MODELS/201907/nos.cbofs.fields.f001.20190701.t00z.nc
199 | ```
200 |
201 | - Download any land shapefile and/or subgrid shapefile, unzip and place in the `shp` subdirectory
202 |
203 | ```bash
204 | cd ~/nos-s100-toolkit/shp
205 | wget https://www.weather.gov/source/gis/Shapefiles/County/s_11au16.zip # U.S. States and Territories
206 | ```
207 |
208 | - Using the downloaded NetCDF file and subgrid/shoreline shapefiles, generate a "default grid" index file.
209 | The extent (envelope) of the resulting grid definition will match the model's native domain.
210 | Specifying `-t 500` implies a target cellsize of ~500 meters.
211 |
212 | ```bash
213 | python ofs.py -i ~/nos-s100-toolkit/indexes/cbofs_index_default_500m.nc -b -l ~/nos-s100-toolkit/shp/s_11au16.shp -m ~/nos-s100-toolkit/netcdf/nos.cbofs.fields.f001.20190701.t00z.nc -o cbofs -t 500 -code 2
214 | ```
215 |
216 | - Alternatively, create a "subgrid" index file instead. This requires a
217 | shapefile containing orthogonal grid polygons describing areas for which
218 | distinct S-111 files will be generated (for all grid polygons that
219 | intersect the native model domain). Specifying `-f GridCellName`
220 | indicates the values from the supplied shapefile's "GridCellname"
221 | attribute to be used for the filename of any generated S-111 files. If
222 | not specified, the primary key identifier (e.g. `fid`) will be used
223 | instead to distinguish the S-111 files from each other.
224 |
225 | ```bash
226 | python ofs.py -i ~/nos-s100-toolkit/indexes/cbofs_index_subset_500m.nc -b -l ~/nos-s100-toolkit/shp/land.shp -g ~/nos-s100-toolkit/shp/grid.shp -f GridCellName -m ~/nos-s100-toolkit/netcdf/nos.cbofs.fields.f001.20190701.t00z.nc -o cbofs -t 500 -code 2
227 | ```
228 |
229 | - Download the latest full OFS forecast run and convert to S-111
230 | format (requires specifying a NetCDF index \[grid definition\]
231 | file):
232 |
233 | ```bash
234 | python ofs.py -i ~/nos-s100-toolkit/indexes/cbofs_index_default_500m.nc -s ~/nos-s100-toolkit/hdf5 -d ~/nos-s100-toolkit/netcdf -o cbofs -code 2
235 | ```
236 |
237 | - Skip the download step and convert an existing OFS forecast file to
238 | S-111 format
239 |
240 | ```bash
241 | python ofs.py -i ~/nos-s100-toolkit/indexes/cbofs_index_default_500m.nc -s ~/nos-s100-toolkit/hdf5 -m ~/nos-s100-toolkit/netcdf/nos.cbofs.fields.f001.20190701.t00z.nc -o cbofs -c 2019070100 -code 2
242 | ```
243 |
244 | **To create \"ungeorectified gridded array\" S-111 files (Coding Format 3)**
245 |
246 | - Download the latest full OFS forecast run and convert to S-111
247 | format:
248 |
249 | ```bash
250 | python ofs.py -s ~/nos-s100-toolkit/hdf5 -d ~/nos-s100-toolkit/netcdf -o cbofs -code 3
251 | ```
252 |
253 | - Skip the download step and convert an existing OFS forecast file to
254 | S-111 format
255 |
256 | ```bash
257 | python ofs.py -s ~/nos-s100-toolkit/hdf5 -m ~/nos-s100-toolkit/netcdf/nos.cbofs.fields.f001.20190701.t00z.nc -o cbofs -c 2019070100 -code 3
258 | ```
259 |
260 | Authors
261 | -------
262 |
263 | - Erin Nagel (UCAR),
264 | - Jason Greenlaw (ERT),
265 |
266 | License
267 | -------
268 |
269 | This project is licensed under the [Creative Commons Zero
270 | 1.0](https://creativecommons.org/publicdomain/zero/1.0/) public domain
271 | dedication. See [LICENSE](LICENSE) for more information.
272 |
273 | Disclaimer
274 | ----------
275 |
276 | This repository is a scientific product and is not official
277 | communication of the National Oceanic and Atmospheric Administration, or
278 | the United States Department of Commerce. All NOAA GitHub project code
279 | is provided on an 'as is' basis and the user assumes responsibility for
280 | its use. Any claims against the Department of Commerce or Department of
281 | Commerce bureaus stemming from the use of this GitHub project will be
282 | governed by all applicable Federal law. Any reference to specific
283 | commercial products, processes, or services by service mark, trademark,
284 | manufacturer, or otherwise, does not constitute or imply their
285 | endorsement, recommendation or favoring by the Department of Commerce.
286 | The Department of Commerce seal and logo, or the seal and logo of a DOC
287 | bureau, shall not be used in any manner to imply endorsement of any
288 | commercial product or activity by DOC or the United States Government.
289 |
290 | Acknowledgments
291 | ---------------
292 |
293 | This software has been developed by the National Oceanic and Atmospheric
294 | Administration (NOAA)/National Ocean Service (NOS)/Office of Coast
295 | Survey (OCS)/Coast Survey Development Lab (CSDL) for use by the
296 | scientific and oceanographic communities.
297 |
298 | CSDL wishes to thank the following entities for their assistance:
299 |
300 | - NOAA/NOS/Center for Operational Oceanographic Products and Services
301 | (CO-OPS)
302 |
--------------------------------------------------------------------------------
/nos-s100_conda_env.txt:
--------------------------------------------------------------------------------
1 | # This file may be used to create an environment using:
2 | # $ conda create --name --file
3 | # platform: linux-64
4 | @EXPLICIT
5 | https://repo.anaconda.com/pkgs/main/linux-64/_libgcc_mutex-0.1-main.tar.bz2
6 | https://repo.anaconda.com/pkgs/main/linux-64/blas-1.0-mkl.tar.bz2
7 | https://repo.anaconda.com/pkgs/main/linux-64/ca-certificates-2019.5.15-0.tar.bz2
8 | https://repo.anaconda.com/pkgs/main/linux-64/intel-openmp-2019.4-243.tar.bz2
9 | https://repo.anaconda.com/pkgs/main/linux-64/libgfortran-ng-7.3.0-hdf63c60_0.tar.bz2
10 | https://repo.anaconda.com/pkgs/main/linux-64/libstdcxx-ng-9.1.0-hdf63c60_0.tar.bz2
11 | https://repo.anaconda.com/pkgs/main/linux-64/poppler-data-0.4.9-0.tar.bz2
12 | https://repo.anaconda.com/pkgs/main/linux-64/libgcc-ng-9.1.0-hdf63c60_0.tar.bz2
13 | https://repo.anaconda.com/pkgs/main/linux-64/mkl-2019.4-243.tar.bz2
14 | https://repo.anaconda.com/pkgs/main/linux-64/bzip2-1.0.7-h7b6447c_0.tar.bz2
15 | https://repo.anaconda.com/pkgs/main/linux-64/expat-2.2.6-he6710b0_0.tar.bz2
16 | https://repo.anaconda.com/pkgs/main/linux-64/freexl-1.0.5-h14c3975_0.tar.bz2
17 | https://repo.anaconda.com/pkgs/main/linux-64/geos-3.7.1-he6710b0_0.tar.bz2
18 | https://repo.anaconda.com/pkgs/main/linux-64/giflib-5.1.4-h14c3975_1.tar.bz2
19 | https://repo.anaconda.com/pkgs/main/linux-64/icu-58.2-h9c2bf20_1.tar.bz2
20 | https://repo.anaconda.com/pkgs/main/linux-64/jpeg-9b-h024ee3a_2.tar.bz2
21 | https://repo.anaconda.com/pkgs/main/linux-64/json-c-0.13.1-h1bed415_0.tar.bz2
22 | https://repo.anaconda.com/pkgs/main/linux-64/libffi-3.2.1-hd88cf55_4.tar.bz2
23 | https://repo.anaconda.com/pkgs/main/linux-64/libuuid-1.0.3-h1bed415_2.tar.bz2
24 | https://repo.anaconda.com/pkgs/main/linux-64/libxcb-1.13-h1bed415_1.tar.bz2
25 | https://repo.anaconda.com/pkgs/main/linux-64/ncurses-6.1-he6710b0_1.tar.bz2
26 | https://repo.anaconda.com/pkgs/main/linux-64/openssl-1.1.1c-h7b6447c_1.tar.bz2
27 | https://repo.anaconda.com/pkgs/main/linux-64/pcre-8.43-he6710b0_0.tar.bz2
28 | https://repo.anaconda.com/pkgs/main/linux-64/pixman-0.38.0-h7b6447c_0.tar.bz2
29 | https://repo.anaconda.com/pkgs/main/linux-64/proj4-5.2.0-he6710b0_1.tar.bz2
30 | https://repo.anaconda.com/pkgs/main/linux-64/xz-5.2.4-h14c3975_4.tar.bz2
31 | https://repo.anaconda.com/pkgs/main/linux-64/zlib-1.2.11-h7b6447c_3.tar.bz2
32 | https://repo.anaconda.com/pkgs/main/linux-64/glib-2.56.2-hd408876_0.tar.bz2
33 | https://repo.anaconda.com/pkgs/main/linux-64/hdf4-4.2.13-h3ca952b_2.tar.bz2
34 | https://repo.anaconda.com/pkgs/main/linux-64/hdf5-1.10.4-hb1b8bf9_0.tar.bz2
35 | https://repo.anaconda.com/pkgs/main/linux-64/libboost-1.67.0-h46d08c1_4.tar.bz2
36 | https://repo.anaconda.com/pkgs/main/linux-64/libedit-3.1.20181209-hc058e9b_0.tar.bz2
37 | https://repo.anaconda.com/pkgs/main/linux-64/libpng-1.6.37-hbc83047_0.tar.bz2
38 | https://repo.anaconda.com/pkgs/main/linux-64/libssh2-1.8.2-h1ba5d50_0.tar.bz2
39 | https://repo.anaconda.com/pkgs/main/linux-64/libxml2-2.9.9-hea5a465_1.tar.bz2
40 | https://repo.anaconda.com/pkgs/main/linux-64/readline-7.0-h7b6447c_5.tar.bz2
41 | https://repo.anaconda.com/pkgs/main/linux-64/tk-8.6.8-hbc83047_0.tar.bz2
42 | https://repo.anaconda.com/pkgs/main/linux-64/xerces-c-3.2.2-h780794e_0.tar.bz2
43 | https://repo.anaconda.com/pkgs/main/linux-64/zstd-1.3.7-h0b5b093_0.tar.bz2
44 | https://repo.anaconda.com/pkgs/main/linux-64/freetype-2.9.1-h8a8886c_1.tar.bz2
45 | https://repo.anaconda.com/pkgs/main/linux-64/kealib-1.4.7-hd0c454d_6.tar.bz2
46 | https://repo.anaconda.com/pkgs/main/linux-64/krb5-1.16.1-h173b8e3_7.tar.bz2
47 | https://repo.anaconda.com/pkgs/main/linux-64/libkml-1.3.0-h590aaf7_4.tar.bz2
48 | https://repo.anaconda.com/pkgs/main/linux-64/libtiff-4.0.10-h2733197_2.tar.bz2
49 | https://repo.anaconda.com/pkgs/main/linux-64/sqlite-3.28.0-h7b6447c_0.tar.bz2
50 | https://repo.anaconda.com/pkgs/main/linux-64/fontconfig-2.13.0-h9420a91_0.tar.bz2
51 | https://repo.anaconda.com/pkgs/main/linux-64/libcurl-7.64.1-h20c2e04_0.tar.bz2
52 | https://repo.anaconda.com/pkgs/main/linux-64/libpq-11.2-h20c2e04_0.tar.bz2
53 | https://repo.anaconda.com/pkgs/main/linux-64/libspatialite-4.3.0a-hb08deb6_19.tar.bz2
54 | https://repo.anaconda.com/pkgs/main/linux-64/openjpeg-2.3.0-h05c96fa_1.tar.bz2
55 | https://repo.anaconda.com/pkgs/main/linux-64/python-3.7.3-h0371630_0.tar.bz2
56 | https://repo.anaconda.com/pkgs/main/linux-64/cairo-1.14.12-h8948797_3.tar.bz2
57 | https://repo.anaconda.com/pkgs/main/linux-64/certifi-2019.6.16-py37_0.tar.bz2
58 | https://repo.anaconda.com/pkgs/main/linux-64/curl-7.64.1-hbc83047_0.tar.bz2
59 | https://repo.anaconda.com/pkgs/main/linux-64/numpy-base-1.16.4-py37hde5b4d6_0.tar.bz2
60 | https://repo.anaconda.com/pkgs/main/linux-64/six-1.12.0-py37_0.tar.bz2
61 | https://repo.anaconda.com/pkgs/main/linux-64/libdap4-3.19.1-h6ec2957_0.tar.bz2
62 | https://repo.anaconda.com/pkgs/main/linux-64/libnetcdf-4.6.1-h11d0813_2.tar.bz2
63 | https://repo.anaconda.com/pkgs/main/linux-64/mkl_random-1.0.2-py37hd81dba3_0.tar.bz2
64 | https://repo.anaconda.com/pkgs/main/linux-64/poppler-0.65.0-h581218d_1.tar.bz2
65 | https://repo.anaconda.com/pkgs/main/linux-64/setuptools-41.0.1-py37_0.tar.bz2
66 | https://repo.anaconda.com/pkgs/main/linux-64/libgdal-2.3.3-h2e7e64b_0.tar.bz2
67 | https://repo.anaconda.com/pkgs/main/linux-64/wheel-0.33.4-py37_0.tar.bz2
68 | https://repo.anaconda.com/pkgs/main/linux-64/pip-19.1.1-py37_0.tar.bz2
69 | https://repo.anaconda.com/pkgs/main/linux-64/cftime-1.0.3.4-py37hdd07704_1.tar.bz2
70 | https://repo.anaconda.com/pkgs/main/linux-64/gdal-2.3.3-py37hbb2a789_0.tar.bz2
71 | https://repo.anaconda.com/pkgs/main/linux-64/h5py-2.9.0-py37h7918eee_0.tar.bz2
72 | https://repo.anaconda.com/pkgs/main/linux-64/mkl_fft-1.0.12-py37ha843d7b_0.tar.bz2
73 | https://repo.anaconda.com/pkgs/main/linux-64/numpy-1.16.4-py37h7e9f1db_0.tar.bz2
74 | https://repo.anaconda.com/pkgs/main/linux-64/netcdf4-1.4.2-py37h808af73_0.tar.bz2
75 | https://repo.anaconda.com/pkgs/main/linux-64/scipy-1.2.1-py37h7c811a0_0.tar.bz2
76 | https://repo.anaconda.com/pkgs/main/linux-64/shapely-1.6.4-py37h86c5351_0.tar.bz2
--------------------------------------------------------------------------------
/ofs-ops.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Download and convert OFS output to S-111 format using an operational configuration.
4 | The latest OFS output model run is downloaded from the FTP server, then is converted
5 | to both default-grid and subset-grid S-111 files in parallel. The resulting S-111
6 | files are then moved to an operational storage volume for FTP dissemination.
7 | """
8 |
9 | from multiprocessing import Pool
10 | import shutil
11 | import os
12 | from glob import glob
13 | import argparse
14 |
15 | from s100py import s111
16 | import ofs
17 |
18 | # Max number of subprocess workers to spin up
19 | MAXWORKERS = 2
20 |
21 | SOURCE_PATH = '/opt/s100/'
22 | DEST_PATH = '/win/ofsdata/%Y%m%d/HDF5/S111_1.0.0/'
23 |
24 | MODEL_INDEX_FILE = {
25 | 'cbofs': {
26 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/cbofs_index_default_500m.nc',
27 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/cbofs_index_band4v5_500m.nc'
28 | },
29 | 'dbofs': {
30 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/dbofs_index_default_500m.nc',
31 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/dbofs_index_band4v5_500m.nc'
32 | },
33 | 'nyofs': {
34 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/nyofs_index_default_500m.nc',
35 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/nyofs_index_band4v5_500m.nc'
36 | },
37 | 'rtofs_east': {
38 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/rtofs_east_index_default_8500m.nc',
39 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/rtofs_east_index_band2v5_8500m.nc'
40 | },
41 | 'rtofs_west': {
42 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/rtofs_west_index_default_8500m.nc',
43 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/rtofs_west_index_band2v5_8500m.nc'
44 | },
45 | 'ngofs': {
46 | 'index_default_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/ngofs_index_default_1000m.nc',
47 | 'index_subset_path': f'{SOURCE_PATH}indexes/thyme_v0.4.0/ngofs_index_band2v5_1000m.nc'
48 | }
49 | }
50 |
51 | # Process Regular Grids
52 | DATA_CODING_FORMAT = 2
53 | TARGET_DEPTH = 4.5
54 |
55 |
56 | def run_ofs(ofs_model):
57 | cycletime = ofs.get_latest_cycletime(ofs_model)
58 | if cycletime is None:
59 | print('Error: Latest model cycle time cannot be determined. Verify that system time is correct and review model cycle configuration.')
60 | return 1
61 | local_files = ofs.download(ofs_model, ofs.get_latest_cycletime(ofs_model), '{}netcdf/'.format(SOURCE_PATH))
62 |
63 | workerPool = Pool(processes=max(1, MAXWORKERS))
64 | workers = []
65 |
66 | index_default_path = MODEL_INDEX_FILE[ofs_model]['index_default_path']
67 | index_subset_path = MODEL_INDEX_FILE[ofs_model]['index_subset_path']
68 | s111_dir = '{}hdf5/{}/'.format(SOURCE_PATH, ofs_model)
69 | model_dir = '{}{}/'.format(DEST_PATH, ofs_model.upper())
70 |
71 | model_output_files = []
72 |
73 | index_file_default = ofs.MODEL_INDEX_CLASS[ofs.MODELS[ofs_model]['model_type']](index_default_path)
74 | index_file_subset = ofs.MODEL_INDEX_CLASS[ofs.MODELS[ofs_model]['model_type']](index_subset_path)
75 |
76 | for local_file in local_files:
77 | model_output_files.append(ofs.MODEL_FILE_CLASS[ofs.MODELS[ofs_model]['model_type']](local_file,
78 | datetime_rounding=
79 | ofs.MODELS[ofs_model][
80 | 'datetime_rounding']))
81 |
82 | file_metadata = s111.S111Metadata(ofs.MODELS[ofs_model]['region'], ofs.MODELS[ofs_model]['product'],
83 | ofs.CURRENT_DATATYPE, ofs.PRODUCERCODE_US, None, ofs_model)
84 |
85 | # Call default grid processing
86 | workers.append(workerPool.apply_async(s111.model_to_s111, (index_file_default, model_output_files, s111_dir, cycletime, file_metadata, DATA_CODING_FORMAT, TARGET_DEPTH)))
87 |
88 | # Call subgrid processing
89 | workers.append(workerPool.apply_async(s111.model_to_s111, (index_file_subset, model_output_files, s111_dir, cycletime, file_metadata, DATA_CODING_FORMAT, TARGET_DEPTH)))
90 |
91 | s111_file_paths = []
92 |
93 | for w in workers:
94 | s111_file_paths.extend(w.get())
95 |
96 | # Copy s111 files to OCS FTP /win/ofsdata/{forecast_date}/HDF5/S111_1.0.0
97 | for s111_file_path in s111_file_paths:
98 | split_path = os.path.split(s111_file_path)
99 | dst = os.path.join(cycletime.strftime(model_dir), split_path[1])
100 | shutil.copyfile(s111_file_path, dst)
101 |
102 | # Remove s111 files from hdf5 directory
103 | delete_files = glob('{}/*.h5'.format(s111_dir))
104 | for delete_files in delete_files:
105 | os.remove(delete_files)
106 |
107 |
108 | def main():
109 | parser = argparse.ArgumentParser(description='Convert surface currents model output to S-111 HDF5 format operationally')
110 | parser.add_argument('-o', '--ofs_model', help='Identifier of target Operational Forecast System (OFS) to be processed (e.g. cbofs, dbofs)', required=True)
111 | args = parser.parse_args()
112 |
113 | ofs_model = args.ofs_model
114 | if not ofs_model or ofs_model.lower() not in MODEL_INDEX_FILE:
115 | parser.error(
116 | 'A valid -o/--ofs_model must be specified. Possible values: {}'.format(', '.join(list(MODEL_INDEX_FILE.keys()))))
117 | return 1
118 |
119 | ofs_model = ofs_model.lower()
120 |
121 | run_ofs(ofs_model)
122 |
123 |
124 | if __name__ == '__main__':
125 | main()
126 |
--------------------------------------------------------------------------------
/ofs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Download and convert OFS NetCDF model output to S-111 compliant HDF5."""
3 | import argparse
4 | import datetime
5 | import shutil
6 | import urllib.request
7 | import os
8 | from glob import glob
9 | import sys
10 |
11 | from s100py import s111
12 | from thyme.model import roms
13 | from thyme.model import fvcom
14 | from thyme.model import pom
15 | from thyme.model import hycom
16 | from thyme.util import dateutil
17 | import time
18 |
19 | start_time = time.time()
20 |
21 | # Base URL of NCEP NOMADS HTTP for accessing CO-OPS OFS NetCDF files
22 | HTTP_SERVER_NOMADS = 'https://nomads.ncep.noaa.gov'
23 | # Base URL of CO-OPS THREDDS HTTP for accessing CO-OPS OFS NetCDF files
24 | HTTP_SERVER_THREDDS = 'https://opendap.co-ops.nos.noaa.gov'
25 |
26 | # Path format of NetCDF files. Forecast initialization (reference) time will be
27 | # injected using datetime.strftime() and zero-padded forecast designation (e.g.
28 | # 'f012') will be injected by using str.format().
29 | # Example: reftime.strftime(HTTP_NETCDF_PATH_FORMAT).format(forecast_str='f012')
30 | HTTP_NETCDF_NOMADS_PATH_FORMAT = '/pub/data/nccf/com/nos/prod/{model_str_lc}.%Y%m%d/nos.{model_str_lc}.fields.{forecast_str}.%Y%m%d.t%Hz.nc'
31 | HTTP_NETCDF_NOMADS_RTOFS_EAST_PATH_FORMAT = '/pub/data/nccf/com/{model_str_lc}/prod/{model_str_lc}.%Y%m%d/{model_str_lc}_glo_3dz_{forecast_str}_6hrly_hvr_US_east.nc'
32 | HTTP_NETCDF_NOMADS_RTOFS_WEST_PATH_FORMAT = '/pub/data/nccf/com/{model_str_lc}/prod/{model_str_lc}.%Y%m%d/{model_str_lc}_glo_3dz_{forecast_str}_6hrly_hvr_US_west.nc'
33 | HTTP_NETCDF_THREDDS_PATH_FORMAT = '/thredds/fileServer/NOAA/{model_str_uc}/MODELS/%Y%m/nos.{model_str_lc}.fields.{forecast_str}.%Y%m%d.t%Hz.nc'
34 | HTTP_NETCDF_THREDDS_NYOFS_PATH_FORMAT = '/thredds/fileServer/NOAA/{model_str_uc}/MODELS/%Y%m/nos.{model_str_lc}.fields.forecast.%Y%m%d.t%Hz.nc'
35 | HTTP_NETCDF_THREDDS_GLOFS_PATH_FORMAT = '/thredds/fileServer/NOAA/{model_str_uc}/MODELS/%Y%m/glofs.{model_str_lc}.fields.forecast.%Y%m%d.t%Hz.nc'
36 |
37 | # Folder path of downloaded NetCDF files.
38 | LOCAL_NETCDF_NOMADS_FILENAME_FORMAT = 'nos.{model_str_lc}.fields.{forecast_str}.%Y%m%d.t%Hz.nc'
39 | LOCAL_NETCDF_THREDDS_FILENAME_FORMAT = 'nos.{model_str_lc}.fields.{forecast_str}.%Y%m%d.t%Hz.nc'
40 | LOCAL_NETCDF_THREDDS_NYOFS_FILENAME_FORMAT = 'nos.{model_str_lc}.fields.%Y%m%d.t%Hz.nc'
41 | LOCAL_NETCDF_THREDDS_GLOFS_FILENAME_FORMAT = 'glofs.{model_str_lc}.fields.%Y%m%d.t%Hz.nc'
42 | LOCAL_NETCDF_OCS_RTOFS_WEST_FILENAME_FORMAT = '{model_str_lc}_glo_3dz_{forecast_str}_6hrly_hvr_US_west.nc'
43 | LOCAL_NETCDF_OCS_RTOFS_EAST_FILENAME_FORMAT = '{model_str_lc}_glo_3dz_{forecast_str}_6hrly_hvr_US_east.nc'
44 |
45 | MODELTYPE_FVCOM = 'fvcom'
46 | MODELTYPE_HYCOM = 'hycom'
47 | MODELTYPE_POM = 'pom'
48 | MODELTYPE_ROMS = 'roms'
49 |
50 | # Model File classes associated with each model type
51 | MODEL_FILE_CLASS = {
52 | MODELTYPE_FVCOM: fvcom.FVCOMFile,
53 | MODELTYPE_HYCOM: hycom.HYCOMFile,
54 | MODELTYPE_POM: pom.POMFile,
55 | MODELTYPE_ROMS: roms.ROMSFile
56 | }
57 |
58 | # Index File classes associated with each model type
59 | MODEL_INDEX_CLASS = {
60 | MODELTYPE_FVCOM: fvcom.FVCOMIndexFile,
61 | MODELTYPE_HYCOM: hycom.HYCOMIndexFile,
62 | MODELTYPE_POM: pom.POMIndexFile,
63 | MODELTYPE_ROMS: roms.ROMSIndexFile
64 | }
65 |
66 | PRODUCT_DESCRIPTION_FVCOM = 'FVCOM_Hydrodynamic_Model_Forecasts'
67 | PRODUCT_DESCRIPTION_HYCOM = 'HYCOM_Hydrodynamic_Model_Forecasts'
68 | PRODUCT_DESCRIPTION_POM = 'POM_Hydrodynamic_Model_Forecasts'
69 | PRODUCT_DESCRIPTION_ROMS = 'ROMS_Hydrodynamic_Model_Forecasts'
70 |
71 | PRODUCERCODE_US = 'US'
72 | CURRENT_DATATYPE = 6
73 |
74 | """
75 | Model configuration dictionary, where key is the lower-case model identifier
76 | and value is another dictionary with the following properties:
77 | file_server: NCEP NOMADS or CO-OPS THREDDS
78 | file_path: NCEP NOMADS or CO-OPS THREDDS file path
79 | forecast_hours: List of forecast projections (hours from cycle time at
80 | which each forecast is valid).
81 | cycles: List of hour-of-day values corresponding with daily model cycles
82 | For example, for a model produced four times per day at 0000, 0600,
83 | 1200, and 1800 UTC, specify (0,6,12,18).
84 | server_file_delay: `datetime.timedelta` representing delay (time since model cycle
85 | time) of file availability on HTTP server, set cron time equal to or greater.
86 | region: OFS region
87 | product: Description of product type
88 | model_type: Type of underlining modelling framework.
89 | """
90 | MODELS = {
91 | 'cbofs': {
92 | # Hourly output from +1 to +48
93 | 'file_server': HTTP_SERVER_NOMADS,
94 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
95 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
96 | 'forecast_hours': list(range(1, 49)),
97 | 'cycles': (0, 6, 12, 18),
98 | 'server_file_delay': datetime.timedelta(minutes=85),
99 | 'region': 'Chesapeake_Bay',
100 | 'product': PRODUCT_DESCRIPTION_ROMS,
101 | 'model_type': MODELTYPE_ROMS,
102 | 'datetime_rounding': None
103 |
104 | },
105 | 'gomofs': {
106 | # 3-hourly output from +3 to +72
107 | 'file_server': HTTP_SERVER_NOMADS,
108 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
109 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
110 | 'forecast_hours': list(range(3, 73, 3)),
111 | 'cycles': (0, 6, 12, 18),
112 | 'server_file_delay': datetime.timedelta(minutes=134),
113 | 'region': 'Gulf_of_Maine',
114 | 'product': PRODUCT_DESCRIPTION_ROMS,
115 | 'model_type': MODELTYPE_ROMS,
116 | 'datetime_rounding': None
117 |
118 | },
119 | 'dbofs': {
120 | # Hourly output from +1 to +48
121 | 'file_server': HTTP_SERVER_NOMADS,
122 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
123 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
124 | 'forecast_hours': list(range(1, 49)),
125 | 'cycles': (0, 6, 12, 18),
126 | 'server_file_delay': datetime.timedelta(minutes=80),
127 | 'region': 'Delaware_Bay',
128 | 'product': PRODUCT_DESCRIPTION_ROMS,
129 | 'model_type': MODELTYPE_ROMS,
130 | 'datetime_rounding': None
131 |
132 | },
133 | 'tbofs': {
134 | # Hourly output from +1 to +48
135 | 'file_server': HTTP_SERVER_NOMADS,
136 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
137 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
138 | 'forecast_hours': list(range(1, 49)),
139 | 'cycles': (0, 6, 12, 18),
140 | 'server_file_delay': datetime.timedelta(minutes=74),
141 | 'region': 'Tampa_Bay',
142 | 'product': PRODUCT_DESCRIPTION_ROMS,
143 | 'model_type': MODELTYPE_ROMS,
144 | 'datetime_rounding': None
145 |
146 | },
147 | 'negofs': {
148 | # Hourly output from +1 to +48
149 | 'file_server': HTTP_SERVER_NOMADS,
150 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
151 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
152 | 'forecast_hours': list(range(1, 49)),
153 | 'cycles': (3, 9, 15, 21),
154 | 'server_file_delay': datetime.timedelta(minutes=95),
155 | 'region': 'Northeast_Gulf_of_Mexico',
156 | 'product': PRODUCT_DESCRIPTION_FVCOM,
157 | 'model_type': MODELTYPE_FVCOM,
158 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
159 |
160 | },
161 | 'nwgofs': {
162 | # Hourly output from +1 to +48
163 | 'file_server': HTTP_SERVER_NOMADS,
164 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
165 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
166 | 'forecast_hours': list(range(1, 49)),
167 | 'cycles': (3, 9, 15, 21),
168 | 'server_file_delay': datetime.timedelta(minutes=90),
169 | 'region': 'Northwest_Gulf_of_Mexico',
170 | 'product': PRODUCT_DESCRIPTION_FVCOM,
171 | 'model_type': MODELTYPE_FVCOM,
172 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
173 |
174 | },
175 | 'ngofs': {
176 | # Hourly output from +1 to +48
177 | 'file_server': HTTP_SERVER_NOMADS,
178 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
179 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
180 | 'forecast_hours': list(range(1, 49)),
181 | 'cycles': (3, 9, 15, 21),
182 | 'server_file_delay': datetime.timedelta(minutes=50),
183 | 'region': 'Northern_Gulf_of_Mexico',
184 | 'product': PRODUCT_DESCRIPTION_FVCOM,
185 | 'model_type': MODELTYPE_FVCOM,
186 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
187 |
188 | },
189 | 'sfbofs': {
190 | # Hourly output from +1 to +48
191 | 'file_server': HTTP_SERVER_NOMADS,
192 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
193 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
194 | 'forecast_hours': list(range(1, 49)),
195 | 'cycles': (3, 9, 15, 21),
196 | 'server_file_delay': datetime.timedelta(minutes=55),
197 | 'region': 'San_Francisco_Bay',
198 | 'product': PRODUCT_DESCRIPTION_FVCOM,
199 | 'model_type': MODELTYPE_FVCOM,
200 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
201 |
202 | },
203 | 'leofs': {
204 | # Hourly output from +1 to +48
205 | 'file_server': HTTP_SERVER_NOMADS,
206 | 'file_path': HTTP_NETCDF_NOMADS_PATH_FORMAT,
207 | 'file_name': LOCAL_NETCDF_NOMADS_FILENAME_FORMAT,
208 | 'forecast_hours': list(range(1, 49)),
209 | 'cycles': (0, 6, 12, 18),
210 | 'server_file_delay': datetime.timedelta(minutes=100),
211 | 'region': 'Lake_Erie',
212 | 'product': PRODUCT_DESCRIPTION_FVCOM,
213 | 'model_type': MODELTYPE_FVCOM,
214 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
215 |
216 | },
217 | 'nyofs': {
218 | # Hourly output from +1 to +53
219 | 'file_server': HTTP_SERVER_THREDDS,
220 | 'file_path': HTTP_NETCDF_THREDDS_NYOFS_PATH_FORMAT,
221 | 'file_name': LOCAL_NETCDF_THREDDS_NYOFS_FILENAME_FORMAT,
222 | 'forecast_hours': list(range(0, 53)),
223 | 'cycles': (5, 11, 17, 23),
224 | 'server_file_delay': datetime.timedelta(minutes=48),
225 | 'region': 'Port_of_New_York_and_New_Jersey',
226 | 'product': PRODUCT_DESCRIPTION_POM,
227 | 'model_type': MODELTYPE_POM,
228 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
229 |
230 | },
231 | 'nyofs_fg': {
232 | # Hourly output from +1 to +53
233 | 'file_server': HTTP_SERVER_THREDDS,
234 | 'file_path': HTTP_NETCDF_THREDDS_NYOFS_PATH_FORMAT,
235 | 'file_name': LOCAL_NETCDF_THREDDS_NYOFS_FILENAME_FORMAT,
236 | 'forecast_hours': list(range(0, 53)),
237 | 'cycles': (5, 11, 17, 23),
238 | 'server_file_delay': datetime.timedelta(minutes=48),
239 | 'region': 'Port_of_New_York_and_New_Jersey',
240 | 'product': PRODUCT_DESCRIPTION_POM,
241 | 'model_type': MODELTYPE_POM,
242 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
243 |
244 | },
245 | 'lmofs': {
246 | # Hourly output from +1 to +59
247 | 'file_server': HTTP_SERVER_THREDDS,
248 | 'file_path': HTTP_NETCDF_THREDDS_GLOFS_PATH_FORMAT,
249 | 'file_name': LOCAL_NETCDF_THREDDS_GLOFS_FILENAME_FORMAT,
250 | 'forecast_hours': list(range(0, 1)),
251 | 'cycles': (0, 6, 12, 18),
252 | 'server_file_delay': datetime.timedelta(minutes=100),
253 | 'region': 'Lake_Michigan',
254 | 'product': PRODUCT_DESCRIPTION_POM,
255 | 'model_type': MODELTYPE_POM,
256 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
257 |
258 | },
259 | 'lhofs': {
260 | # Hourly output from +1 to +59
261 | 'file_server': HTTP_SERVER_THREDDS,
262 | 'file_path': HTTP_NETCDF_THREDDS_GLOFS_PATH_FORMAT,
263 | 'file_name': LOCAL_NETCDF_THREDDS_GLOFS_FILENAME_FORMAT,
264 | 'forecast_hours': list(range(0, 1)),
265 | 'cycles': (0, 6, 12, 18),
266 | 'server_file_delay': datetime.timedelta(minutes=100),
267 | 'region': 'Lake_Huron',
268 | 'product': PRODUCT_DESCRIPTION_POM,
269 | 'model_type': MODELTYPE_POM,
270 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
271 |
272 | },
273 | 'loofs': {
274 | # Hourly output from +1 to +59
275 | 'file_server': HTTP_SERVER_THREDDS,
276 | 'file_path': HTTP_NETCDF_THREDDS_GLOFS_PATH_FORMAT,
277 | 'file_name': LOCAL_NETCDF_THREDDS_GLOFS_FILENAME_FORMAT,
278 | 'forecast_hours': list(range(0, 1)),
279 | 'cycles': (0, 6, 12, 18),
280 | 'server_file_delay': datetime.timedelta(minutes=100),
281 | 'region': 'Lake_Ontario',
282 | 'product': PRODUCT_DESCRIPTION_POM,
283 | 'model_type': MODELTYPE_POM,
284 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
285 |
286 | },
287 | 'lsofs': {
288 | # Hourly output from +1 to +59
289 | 'file_server': HTTP_SERVER_THREDDS,
290 | 'file_path': HTTP_NETCDF_THREDDS_GLOFS_PATH_FORMAT,
291 | 'file_name': LOCAL_NETCDF_THREDDS_GLOFS_FILENAME_FORMAT,
292 | 'forecast_hours': list(range(0, 1)),
293 | 'cycles': (0, 6, 12, 18),
294 | 'server_file_delay': datetime.timedelta(minutes=100),
295 | 'region': 'Lake_Superior',
296 | 'product': PRODUCT_DESCRIPTION_POM,
297 | 'model_type': MODELTYPE_POM,
298 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
299 |
300 | },
301 | 'rtofs_east': {
302 | # Hourly output from +6 to +72
303 | 'file_server': HTTP_SERVER_NOMADS,
304 | 'file_path': HTTP_NETCDF_NOMADS_RTOFS_EAST_PATH_FORMAT,
305 | 'file_name': LOCAL_NETCDF_OCS_RTOFS_EAST_FILENAME_FORMAT,
306 | 'forecast_hours': list(range(6, 78, 6)),
307 | 'cycles': (0,),
308 | 'server_file_delay': datetime.timedelta(minutes=40),
309 | 'region': 'US East',
310 | 'product': PRODUCT_DESCRIPTION_HYCOM,
311 | 'model_type': MODELTYPE_HYCOM,
312 | 'datetime_rounding': None
313 |
314 | },
315 | 'rtofs_west': {
316 | # Hourly output from +6 to +72
317 | 'file_server': HTTP_SERVER_NOMADS,
318 | 'file_path': HTTP_NETCDF_NOMADS_RTOFS_WEST_PATH_FORMAT,
319 | 'file_name': LOCAL_NETCDF_OCS_RTOFS_WEST_FILENAME_FORMAT,
320 | 'forecast_hours': list(range(6, 78, 6)),
321 | 'cycles': (0,),
322 | 'server_file_delay': datetime.timedelta(minutes=40),
323 | 'region': 'US West',
324 | 'product': PRODUCT_DESCRIPTION_HYCOM,
325 | 'model_type': MODELTYPE_HYCOM,
326 | 'datetime_rounding': None
327 |
328 | },
329 | 'wcofs': {
330 | # Hourly output from +1 to +21
331 | 'file_server': HTTP_SERVER_THREDDS,
332 | 'file_path': HTTP_NETCDF_THREDDS_PATH_FORMAT,
333 | 'file_name': LOCAL_NETCDF_THREDDS_FILENAME_FORMAT,
334 | 'forecast_hours': list(range(1, 21, 3)),
335 | 'cycles': 3,
336 | 'server_file_delay': datetime.timedelta(minutes=100),
337 | 'region': 'West_Coast',
338 | 'product': PRODUCT_DESCRIPTION_ROMS,
339 | 'model_type': MODELTYPE_ROMS,
340 | 'datetime_rounding': None
341 |
342 | },
343 | 'lmhofs': {
344 | # Hourly output from +1 to +121
345 | 'file_server': HTTP_SERVER_THREDDS,
346 | 'file_path': HTTP_NETCDF_THREDDS_PATH_FORMAT,
347 | 'file_name': LOCAL_NETCDF_THREDDS_FILENAME_FORMAT,
348 | 'forecast_hours': list(range(1, 121)),
349 | 'cycles': (0, 6, 12, 18),
350 | 'server_file_delay': datetime.timedelta(minutes=100),
351 | 'region': 'Lake Michigan & Lake Huron',
352 | 'product': PRODUCT_DESCRIPTION_FVCOM,
353 | 'model_type': MODELTYPE_FVCOM,
354 | 'datetime_rounding': dateutil.DatetimeRounding.NEAREST_HOUR
355 |
356 | }
357 | # Disable CIOFS support until wetting/drying handled properly by ROMS module
358 | # 'ciofs': {
359 | # # Hourly output from +1 to +49
360 | # 'file_server': HTTP_SERVER_THREDDS,
361 | # 'file_path': HTTP_NETCDF_THREDDS_PATH_FORMAT,
362 | # 'file_name': LOCAL_NETCDF_THREDDS_FILENAME_FORMAT,
363 | # 'forecast_hours': list(range(1, 49)),
364 | # 'cycles': (0, 6, 12, 18),
365 | # 'server_file_delay': datetime.timedelta(minutes=100),
366 | # 'region': 'Cook_Inlet',
367 | # 'product': PRODUCT_DESCRIPTION_ROMS,
368 | # 'model_type': MODELTYPE_ROMS,
369 | # 'datetime_rounding': None
370 | # },
371 | }
372 |
373 |
374 | def get_latest_cycletime(ofs_model):
375 | """Calculate and return the latest cycletime for specified model.
376 |
377 | Args:
378 | ofs_model: The target model identifier.
379 |
380 | Returns: A `datetime.datetime` instance representing the calculated
381 | cycletime, or None if one could not be determined using system time and
382 | configured thresholds.
383 | """
384 | ofs_model = ofs_model.lower()
385 |
386 | now = datetime.datetime.utcnow()
387 | print('Current system time (UTC): {}'.format(now))
388 |
389 | cycletime = None
390 |
391 | # Calculate most recent cycle using configured thresholds
392 | cycles = MODELS[ofs_model]['cycles']
393 | server_file_delay = MODELS[ofs_model]['server_file_delay']
394 |
395 | # Start by listing all cycle times for today, chronologically
396 | today_cycletimes = sorted([datetime.datetime(now.year, now.month, now.day, cycle) for cycle in cycles])
397 |
398 | # Include yesterday's cycles as potential cycles to check
399 | potential_cycletimes = [today_cycletime - datetime.timedelta(days=1) for today_cycletime in
400 | today_cycletimes] + today_cycletimes
401 |
402 | # Now search through potential cycle times in reverse chronological
403 | # order until we find one that should be available
404 | for potential_cycletime in reversed(potential_cycletimes):
405 | if now >= potential_cycletime + server_file_delay:
406 | cycletime = potential_cycletime
407 | break
408 |
409 | return cycletime
410 |
411 |
412 | def download(ofs_model, cycletime, download_dir):
413 | """Download latest model run
414 |
415 | Args:
416 | ofs_model: The target model identifier.
417 | cycletime: `datetime.datetime` representing model initialization
418 | (reference/cycle) time.
419 | download_dir: Path to a parent directory where model output files will
420 | be downloaded. Must exist. Files will be downloaded to a
421 | subdirectory named according to the model identifier (will be
422 | created if it does not yet exist; if it does exist, existing files
423 | will be removed before downloading new files).
424 | """
425 | if not download_dir.endswith('/'):
426 | download_dir += '/'
427 | download_dir += ofs_model.lower()
428 |
429 | if not os.path.isdir(download_dir):
430 | os.makedirs(download_dir)
431 | else:
432 | # Clean the directory of all data files before downloading and
433 | # processing the new data:
434 | delete_files = glob('{}/*.nc'.format(download_dir))
435 | for delete_files in delete_files:
436 | sys.stderr.write('Removing {0}\n'.format(delete_files))
437 | os.remove(delete_files)
438 |
439 | local_files = []
440 |
441 | if MODELS[ofs_model]['file_server'] == HTTP_SERVER_NOMADS:
442 | for forecast in MODELS[ofs_model]['forecast_hours']:
443 | forecast_str = 'f{0:03d}'.format(forecast)
444 | if ofs_model == 'rtofs_east' or ofs_model == 'rtofs_west':
445 | url = cycletime.strftime('{}{}'.format(MODELS[ofs_model]['file_server'], MODELS[ofs_model]['file_path'])).format(
446 | model_str_lc='rtofs', forecast_str=forecast_str)
447 | local_file = '{}/{}'.format(download_dir, cycletime.strftime(MODELS[ofs_model]['file_name']).format(
448 | model_str_lc='rtofs', forecast_str=forecast_str))
449 |
450 | print('Downloading {} to {}...'.format(url, local_file))
451 | with urllib.request.urlopen(url) as response, open(local_file, 'wb') as out_file:
452 | shutil.copyfileobj(response, out_file)
453 | print('Download successful.')
454 | local_files.append(local_file)
455 |
456 | else:
457 | url = cycletime.strftime('{}{}'.format(MODELS[ofs_model]['file_server'], MODELS[ofs_model]['file_path'])).format(
458 | model_str_lc=ofs_model.lower(), forecast_str=forecast_str)
459 | local_file = '{}/{}'.format(download_dir, cycletime.strftime(MODELS[ofs_model]['file_name']).format(
460 | model_str_lc=ofs_model.lower(), forecast_str=forecast_str))
461 |
462 | print('Downloading {} to {}...'.format(url, local_file))
463 | with urllib.request.urlopen(url) as response, open(local_file, 'wb') as out_file:
464 | shutil.copyfileobj(response, out_file)
465 | print('Download successful.')
466 | local_files.append(local_file)
467 |
468 | elif MODELS[ofs_model]['file_server'] == HTTP_SERVER_THREDDS:
469 | if ofs_model == 'nyofs_fg':
470 | url = cycletime.strftime('{}{}'.format(MODELS[ofs_model]['file_server'], MODELS[ofs_model]['file_path'])).format(model_str_uc='NYOFS', model_str_lc=ofs_model.lower())
471 | local_file = '{}/{}'.format(download_dir, cycletime.strftime(MODELS[ofs_model]['file_name']).format(model_str_lc=ofs_model.lower()))
472 |
473 | print('Downloading {} to {}...'.format(url, local_file))
474 | with urllib.request.urlopen(url) as response, open(local_file, 'wb') as out_file:
475 | shutil.copyfileobj(response, out_file)
476 | print('Download successful.')
477 | local_files.append(local_file)
478 |
479 | elif ofs_model == 'nyofs':
480 | url = cycletime.strftime('{}{}'.format(MODELS[ofs_model]['file_server'], MODELS[ofs_model]['file_path'])).format(model_str_uc=ofs_model.upper(), model_str_lc=ofs_model.lower())
481 | local_file = '{}/{}'.format(download_dir, cycletime.strftime(MODELS[ofs_model]['file_name']).format(model_str_lc=ofs_model.lower()))
482 |
483 | print('Downloading {} to {}...'.format(url, local_file))
484 | with urllib.request.urlopen(url) as response, open(local_file, 'wb') as out_file:
485 | shutil.copyfileobj(response, out_file)
486 | print('Download successful.')
487 | local_files.append(local_file)
488 |
489 | else:
490 | for forecast in MODELS[ofs_model]['forecast_hours']:
491 | forecast_str = 'f{0:03d}'.format(forecast)
492 | url = cycletime.strftime('{}{}'.format(MODELS[ofs_model]['file_server'], MODELS[ofs_model]['file_path'])).format(model_str_uc=ofs_model.upper(), model_str_lc=ofs_model.lower(), forecast_str=forecast_str)
493 | local_file = '{}/{}'.format(download_dir, cycletime.strftime(MODELS[ofs_model]['file_name']).format(model_str_lc=ofs_model.lower(), forecast_str=forecast_str))
494 |
495 | print('Downloading {} to {}...'.format(url, local_file))
496 | with urllib.request.urlopen(url) as response, open(local_file, 'wb') as out_file:
497 | shutil.copyfileobj(response, out_file)
498 | print('Download successful.')
499 | local_files.append(local_file)
500 |
501 | return local_files
502 |
503 |
504 | def download_and_process(download_dir, s111_dir, cycletime, ofs_model, file_metadata, data_coding_format, target_depth, index_file=None):
505 | """Download latest model run and convert to S-111 format.
506 |
507 | Args:
508 | s111_dir: Path to a parent directory where output S111 HDF5 file(s)
509 | will be generated. Must exist.
510 | cycletime: `datetime.datetime` representing model initialization
511 | (reference/cycle) time.
512 | download_dir: Path to a parent directory where model output files will
513 | be downloaded. Must exist. Files will be downloaded to a
514 | subdirectory named according to the model identifier (will be
515 | created if it does not yet exist; if it does exist, existing files
516 | will be removed before downloading new files).
517 | ofs_model: Ocean forecast system model identifier.
518 | file_metadata: `S111Metadata` instance describing metadata for geographic
519 | identifier and description of current meter type, forecast method,
520 | or model.
521 | data_coding_format: 1:Time series at fixed stations, 2:Regularly gridded arrays,
522 | 3:Ungeorectified gridded arrays, 4:Time series for one moving platform.
523 | index_file (`ModelIndexFile`, optional): The model index file to be
524 | used for interpolation (if required), or `None` (default) if this
525 | model requires no index file for processing.
526 | target_depth: The water current at a specified target depth below the sea
527 | surface in meters.
528 | """
529 | local_files = download(ofs_model, cycletime, download_dir)
530 | print(download_dir)
531 | print('Converting files to S111 format...')
532 |
533 | model_output_files = []
534 | for local_file in local_files:
535 | model_output_files.append(MODEL_FILE_CLASS[MODELS[ofs_model]['model_type']](local_file, datetime_rounding=MODELS[ofs_model]['datetime_rounding']))
536 |
537 | s111.model_to_s111(index_file, model_output_files, s111_dir, cycletime, file_metadata, data_coding_format, target_depth)
538 |
539 |
540 | def create_index_file(index_file_path, model_file_path, model_type, model_name, target_cellsize_meters, grid_shp, grid_field_name, land_shp):
541 | """
542 | Create a model index file.
543 |
544 | Args:
545 | index_file_path: Path to index file to be created.
546 | model_file_path: Path to model file required by index creation.
547 | model_type: Model type designation (e.g. 'roms').
548 | model_name: Name/abbreviation of modeling system (e.g. 'CBOFS').
549 | target_cellsize_meters: Target cellsize (in meters) of grid definition.
550 | grid_shp: Path to subgrid shapefile, if any (None otherwise).
551 | grid_field_name: Name of grid field used for grid identifier, if
552 | desired (None otherwise).
553 | land_shp: Path to land/shoreline shapefile used during mask creation.
554 |
555 | Returns:
556 | True if successful; False if an exception was encountered.
557 | """
558 | index_file = MODEL_INDEX_CLASS[model_type](index_file_path)
559 | model_output_file = MODEL_FILE_CLASS[model_type](model_file_path)
560 |
561 | try:
562 | index_file.open()
563 | model_output_file.open()
564 | index_file.init_nc(model_output_file, int(target_cellsize_meters), model_name, shoreline_shp=land_shp,
565 | subset_grid_shp=grid_shp, subset_grid_field_name=grid_field_name)
566 | return True
567 | finally:
568 | index_file.close()
569 | model_output_file.close()
570 |
571 | return False
572 |
573 |
574 | def main():
575 | """Parse command line arguments and execute target functions."""
576 | parser = argparse.ArgumentParser(description='Convert surface currents model output to S-111 HDF5 format')
577 | parser.add_argument('-i', '--index_file_path', help='Path to existing or to-be-created index netcdf file. This file contains regular grid definition and interpolation parameters, and is required to convert model output to S111 HDF5 format. If -b/--build_index is specified, any existing file at the specified path will be overwritten.')
578 | parser.add_argument('-s', '--s111_dir', help='Path to a directory where generated S-111 HDF5 file(s) will be generated. Generated files will be placed in a subdirectory named to match the model identifier (e.g. \'cbofs\') and files will be auto-named using identifying information (e.g. model reference/cycle time, subgrid id) and with the .h5 file extension. Ignored if -b/--build_index is specified.')
579 | parser.add_argument('-b', '--build_index', action='store_true', help='Build a new index NetCDF file at the path specified by -i/--index_file_path. This file must be generated before processing any model output, as it will contain the regular grid definition and interpolation parameters. Once created, it can be used indefinitely for the target model unless changes to the regular grid extent/resolution are required or if the underlying model grid changes.')
580 | parser.add_argument('-g', '--grid_shp', help='Path to a polygon grid shapefile that will be used to generate matching subgrids when generating an index file. Only used when -b/--build_index is specified. If not specified, the model extent will be used to generate the index file and no subsetting will occur. Ignored if -b/--build_index is not specified.')
581 | parser.add_argument('-f', '--grid_field_name', help='Specify a field name from the input grid shapefile to collect')
582 | parser.add_argument('-l', '--land_shp', help='Path to a land/shoreline polygon shapefile that will be used to apply a detailed land mask when generating an index file. Only used when -b/--build_index is specified. If not specified, the grid mask will be determined by the model\'s own mask, which may be less detailed. Ignored if -b/--build_index is not specified.')
583 | parser.add_argument('-m', '--model_file_path', nargs='+', help='Path to one or more NetCDF files containing raw/native model output to be converted to S111 format (when -s/--s111_dir is specified) or used to build an index file (when -b/--build_index is specified). If not specified, the latest model run will be automatically downloaded and processed. Required when --build_index is specified.', required=False)
584 | parser.add_argument('-d', '--download_dir', help='Path to a directory where downloaded model output files can be placed. Files will be downloaded into a subdirectory named to match the model identifier (e.g. \'cbofs\') (if it does not yet exist, it will be created). Prior to downloading, any existing NetCDF files in the model\'s subdirectory will be deleted to prevent file accumulation. Required when -m/--model_file_path is not specified.')
585 | parser.add_argument('-o', '--ofs_model', help='Identifier of target Operational Forecast System (OFS) to be processed (e.g. cbofs, dbofs, gomofs, or tbofs)', required=True)
586 | parser.add_argument('-c', '--cycletime', help='Model cycle time (i.e. initialization/reference time) to process, in the format YYYYMMDDHH. If not specified, the most recent cycle will be calculated using configured thresholds and present system time.')
587 | parser.add_argument('-t', '--target_cellsize_meters', help='Target cellsize of regular grid cells in meters. Actual size of x/y grid cells will vary slightly, since the regular grid uses lat/lon coordinates (thus a cell\'s width/height in meters will vary by latitude), and since it will be adjusted in order to fit a whole number of grid cells in the x and y directions within the calculated grid extent.')
588 | parser.add_argument('-z', '--target_depth', help='The water current at a specified target depth below the sea surface in meters, default target depth is 4.5 meters. Target interpolation depth must be greater or equal to 0.')
589 | parser.add_argument('-code', '--data_coding_format', help='Data format type, 1 - Time series fixed station, 2 - Regular grid, 3 - Ungeorectified grid, 4 - Time series moving platform.', required=True)
590 | args = parser.parse_args()
591 |
592 | ofs_model = args.ofs_model
593 | if not ofs_model or ofs_model.lower() not in MODELS:
594 | parser.error('A valid -o/--ofs_model must be specified. Possible values: {}'.format(', '.join(list(MODELS.keys()))))
595 | return 1
596 | ofs_model = ofs_model.lower()
597 |
598 | target_depth = args.target_depth
599 | if target_depth is not None:
600 | target_depth = float(target_depth)
601 | if target_depth < 0:
602 | parser.error('Invalid entry, depth must be positive')
603 | return 1
604 |
605 | data_coding_format = int(args.data_coding_format)
606 |
607 | if data_coding_format not in (1, 2, 3, 4):
608 | parser.error('Invalid data coding format type specified. Supported values: 1 - Time series fixed station, 2 - Regular grid, 3 - Ungeorectified grid, 4 - Time series moving platform')
609 | return 1
610 |
611 | cycletime = args.cycletime
612 | if cycletime is not None:
613 | try:
614 | cycletime = datetime.datetime.strptime(cycletime, '%Y%m%d%H')
615 | except ValueError:
616 | parser.error('Invalid -c/--cycletime specified [{}]. Format must be YYYYMMDDHH.'.format(args.cycletime))
617 | return 1
618 | else:
619 | cycletime = get_latest_cycletime(ofs_model)
620 | if cycletime is None:
621 | print('Error: Latest model cycle time cannot be determined. Verify that system time is correct and review model cycle configuration.')
622 | return 1
623 |
624 | print('Processing forecast cycle with reference time (UTC): {}'.format(cycletime))
625 |
626 | if args.build_index:
627 | if args.target_cellsize_meters is None:
628 | parser.error('Target cellsize in meters must be specified when --build_index is specified.')
629 | print(args)
630 | return 1
631 | if args.model_file_path is None:
632 | parser.error('At least one model output file must be specified with --model_file_path when --build_index is specified.')
633 | print(args)
634 | return 1
635 | if args.grid_shp is not None and not os.path.isfile(args.grid_shp):
636 | parser.error('Specified grid shapefile does not exist [{}]'.format(args.grid_shp))
637 | return 1
638 | if args.land_shp is not None and not os.path.isfile(args.land_shp):
639 | parser.error('Specified land/shoreline shapefile does not exist [{}]'.format(args.land_shp))
640 | return 1
641 |
642 | return 0 if create_index_file(args.index_file_path,
643 | args.model_file_path[0],
644 | MODELS[ofs_model]['model_type'],
645 | args.ofs_model,
646 | args.target_cellsize_meters,
647 | args.grid_shp,
648 | args.grid_field_name,
649 | args.land_shp) else 1
650 |
651 | if not os.path.isdir(args.s111_dir):
652 | parser.error('Invalid/missing S-111 output directory (-s/-s111_dir) specified.')
653 | return 1
654 |
655 | s111_dir = args.s111_dir
656 | if not s111_dir.endswith('/'):
657 | s111_dir += '/'
658 | s111_dir += ofs_model.lower()
659 | if not os.path.isdir(s111_dir):
660 | os.makedirs(s111_dir)
661 |
662 | if data_coding_format == 2:
663 | if not os.path.exists(args.index_file_path):
664 | parser.error('Specified index file does not exist [{}]'.format(args.index_file_path))
665 | return 1
666 |
667 | index_file = MODEL_INDEX_CLASS[MODELS[ofs_model]['model_type']](args.index_file_path)
668 |
669 | elif data_coding_format == 3:
670 | index_file = None
671 |
672 | if args.model_file_path is not None:
673 | if args.cycletime is None:
674 | print(parser.error('A valid -c/--cycletime matching the input model forecast, must be specified, format must be YYYYMMDD.'))
675 | return 1
676 |
677 | model_output_file = MODEL_FILE_CLASS[MODELS[ofs_model]['model_type']](args.model_file_path[0],
678 | datetime_rounding=MODELS[ofs_model][
679 | 'datetime_rounding'])
680 |
681 | file_metadata = s111.S111Metadata(MODELS[ofs_model]['region'], MODELS[ofs_model]['product'], CURRENT_DATATYPE,
682 | PRODUCERCODE_US, None, ofs_model)
683 |
684 | s111.model_to_s111(index_file, [model_output_file], s111_dir, cycletime, file_metadata, data_coding_format, target_depth)
685 |
686 | else:
687 | if not args.download_dir or not os.path.isdir(args.download_dir):
688 | parser.error('Invalid/missing download directory (-d/--download_dir) specified.')
689 | return 1
690 |
691 | file_metadata = s111.S111Metadata(MODELS[ofs_model]['region'], MODELS[ofs_model]['product'], CURRENT_DATATYPE,
692 | PRODUCERCODE_US, None, ofs_model)
693 |
694 | download_and_process(args.download_dir, s111_dir, cycletime, ofs_model, file_metadata, data_coding_format, target_depth, index_file)
695 |
696 | return 0
697 |
698 |
699 | if __name__ == '__main__':
700 | main()
701 | print(f"-- Run Time -- {(time.time() - start_time)} seconds")
702 |
--------------------------------------------------------------------------------