├── tests ├── __init__.py ├── end_to_end_test.py └── test_config.py ├── wrfpy ├── __init__.py ├── cylc │ ├── run_real.py │ ├── run_wrf.py │ ├── wps_run.py │ ├── wrfda_obs.py │ ├── wrfda_obsproc_run.py │ ├── wrfda_obsproc_init.py │ ├── wrf_init.py │ ├── wps_init.py │ ├── upp.py │ ├── combine_synop.py │ ├── prepare_synop.py │ ├── wps_post.py │ ├── copy_synop.py │ ├── wrfda_run.py │ ├── retry_wrf.py │ └── archive.py ├── examples │ └── namelist.wps ├── scripts │ └── wrfpy ├── scale.py ├── readObsTemperature.py ├── split_namelist.py ├── wrf.py ├── utils.py ├── config.py ├── upp.py ├── wps.py ├── configuration.py ├── bumpskin.py └── wrfda.py ├── requirements-docs.txt ├── MANIFEST.in ├── requirements-rtd.txt ├── requirements-test.txt ├── requirements.txt ├── CHANGELOG.md ├── .travis.yml ├── CITATION.cff ├── .zenodo.json ├── setup.py ├── .gitignore ├── README.rst └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /wrfpy/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements-docs.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | sphinx-autobuild 3 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # Include the license file 2 | include LICENSE 3 | -------------------------------------------------------------------------------- /requirements-rtd.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | sphinx 3 | sphinx-autobuild 4 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | coverage 2 | pytest 3 | pytest-cov<2.6.0 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2==2.8 2 | MarkupSafe==0.23 3 | PyYAML>=4.2b1 4 | f90nml 5 | python-dateutil==2.7.3 6 | astropy==2.0.9 7 | pathos==0.2.2.1 8 | netCDF4 9 | pyOpenSSL 10 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ### 0.2.1 4 | 5 | * Check for geo_em files in wps workdir instead of wps install directory 6 | 7 | ### 0.2.0 8 | 9 | * Second Beta release 10 | * Cleanup creation of CYLC suite configuration 11 | * General cleanup 12 | 13 | 14 | ### 0.1.0 15 | 16 | * Beta release 17 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: xenial 3 | python: 4 | - "3.5" 5 | - "3.6" 6 | - "3.7" 7 | install: 8 | - pip install . 9 | - pip install -r requirements.txt 10 | - pip install -r requirements-test.txt 11 | - pip install -r requirements-docs.txt 12 | script: 13 | - py.test --cov=wrfpy --cov-report term --cov-report xml:cov.sml tests/ 14 | after_success: 15 | - bash <(curl -s https://codecov.io/bash) 16 | -------------------------------------------------------------------------------- /wrfpy/cylc/run_real.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy.wrf import run_wrf 7 | from wrfpy import utils 8 | 9 | 10 | def main(): 11 | ''' 12 | Main function to initialize WPS timestep: 13 | - converts cylc timestring to datetime object 14 | - calls wrf.__init() and initialize() 15 | ''' 16 | WRF = run_wrf() 17 | WRF.run_real() 18 | 19 | if __name__=="__main__": 20 | main() 21 | -------------------------------------------------------------------------------- /wrfpy/cylc/run_wrf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy.wrf import run_wrf 7 | from wrfpy import utils 8 | 9 | 10 | def main(): 11 | ''' 12 | Main function to initialize WPS timestep: 13 | - converts cylc timestring to datetime object 14 | - calls wrf.__init() and initialize() 15 | ''' 16 | WRF = run_wrf() 17 | WRF.run_wrf() 18 | 19 | if __name__=="__main__": 20 | main() 21 | -------------------------------------------------------------------------------- /wrfpy/cylc/wps_run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy.wps import wps 7 | from wrfpy import utils 8 | from dateutil.relativedelta import relativedelta 9 | 10 | def wps_run(): 11 | ''' 12 | Initialize WPS timestep 13 | ''' 14 | WPS = wps() # initialize object 15 | WPS._run_geogrid() 16 | WPS._run_ungrib() 17 | WPS._run_metgrid() 18 | 19 | 20 | def main(): 21 | ''' 22 | Main function to run wps 23 | ''' 24 | wps_run() 25 | 26 | 27 | if __name__=="__main__": 28 | wps_run() 29 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # YAML 1.2 2 | --- 3 | authors: 4 | - 5 | affiliation: "Netherlands eScience Center" 6 | family-names: Haren 7 | given-names: Ronald 8 | name-particle: van 9 | cff-version: "1.0.3" 10 | commit: 11 | date-released: 2018-09-18 12 | doi: 10.5281/zenodo.1420918 13 | keywords: 14 | - "WRF" 15 | - "WRFDA" 16 | - "urban nudging" 17 | - "workflow" 18 | - "cylc" 19 | license: Apache-2.0 20 | message: "If you use this software, please cite it using these metadata." 21 | repository-code: "https://github.com/ERA-URBAN/wrfpy" 22 | title: WRFpy 23 | version: "0.2.1" 24 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Python application to facilitate setting-up and running (long) Weather Research and Forecasting (WRF) simulations with (optionally) data assimilation", 3 | "license": "Apache-2.0", 4 | "title": "WRFpy", 5 | "upload_type": "software", 6 | "creators": [ 7 | { 8 | "affiliation": "Netherlands eScience Center", 9 | "name": "van Haren, Ronald" 10 | } 11 | ], 12 | "access_right": "open", 13 | "keywords": [ 14 | "WRF", 15 | "WRFDA", 16 | "radar", 17 | "data assimilation", 18 | "ascii" 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /wrfpy/cylc/wrfda_obs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | import shutil 7 | from wrfpy.readObsTemperature import readObsTemperature 8 | from wrfpy import utils 9 | 10 | def main(datestring): 11 | dt = utils.convert_cylc_time(datestring) 12 | readObsTemperature(dt, dstationtypes=['davis', 'vp2', 'vantage']) 13 | 14 | 15 | if __name__=="__main__": 16 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 17 | parser.add_argument('datestring', metavar='N', type=str, 18 | help='Date-time string from cylc suite') 19 | # parse arguments 20 | args = parser.parse_args() 21 | # call main 22 | main(args.datestring) 23 | -------------------------------------------------------------------------------- /wrfpy/examples/namelist.wps: -------------------------------------------------------------------------------- 1 | &share 2 | wrf_core = 'ARW' 3 | max_dom = 2 4 | start_date = '2014-07-16_00:00:00', '2014-07-16_00:00:00' 5 | end_date = '2014-07-20_00:00:00', '2014-07-20_00:00:00' 6 | interval_seconds = 3600 7 | io_form_geogrid = 2 8 | / 9 | &geogrid 10 | parent_id = 0, 1, 2, 3 11 | parent_grid_ratio = 1, 5, 5, 5 12 | i_parent_start = 1, 44, 51, 45 13 | j_parent_start = 1, 52, 48, 48 14 | e_we = 120, 121, 121, 176 15 | e_sn = 120, 121, 121, 136 16 | geog_data_res = '30s', '30s', 'wur-landuse+30s', 'wur-landuse+30s' 17 | dx = 12500 18 | dy = 12500 19 | map_proj = 'lambert' 20 | ref_lat = 51.964716 21 | ref_lon = 5.663308 22 | truelat1 = 30.0 23 | truelat2 = 60.0 24 | stand_lon = 4.55 25 | geog_data_path = '/data/WPS_GEOG/' 26 | / 27 | &ungrib 28 | out_format = 'WPS' 29 | prefix = 'FILE' 30 | / 31 | &metgrid 32 | fg_name = 'FILE' 33 | io_form_metgrid = 2 34 | / 35 | -------------------------------------------------------------------------------- /wrfpy/cylc/wrfda_obsproc_run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.wrfda import wrfda 8 | 9 | def obsproc_run(datestart): 10 | ''' 11 | Initialize WPS timestep 12 | ''' 13 | WRFDA = wrfda(datestart) # initialize object 14 | WRFDA.obsproc_run() 15 | 16 | 17 | def main(datestring): 18 | ''' 19 | Main function to initialize WPS timestep: 20 | - converts cylc timestring to datetime object 21 | - calls wps_init() 22 | ''' 23 | dt = utils.convert_cylc_time(datestring) 24 | obsproc_run(dt) 25 | 26 | 27 | if __name__=="__main__": 28 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 29 | parser.add_argument('datestring', metavar='N', type=str, 30 | help='Date-time string from cylc suite') 31 | # parse arguments 32 | args = parser.parse_args() 33 | # call main 34 | main(args.datestring) 35 | -------------------------------------------------------------------------------- /wrfpy/cylc/wrfda_obsproc_init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.wrfda import wrfda 8 | 9 | def obsproc_init(datestart): 10 | ''' 11 | Initialize WPS timestep 12 | ''' 13 | WRFDA = wrfda(datestart) # initialize object 14 | WRFDA.obsproc_init(datestart) 15 | 16 | 17 | def main(datestring): 18 | ''' 19 | Main function to initialize WPS timestep: 20 | - converts cylc timestring to datetime object 21 | - calls wps_init() 22 | ''' 23 | dt = utils.convert_cylc_time(datestring) 24 | obsproc_init(dt) 25 | 26 | 27 | if __name__=="__main__": 28 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 29 | parser.add_argument('datestring', metavar='N', type=str, 30 | help='Date-time string from cylc suite') 31 | # parse arguments 32 | args = parser.parse_args() 33 | # call main 34 | main(args.datestring) 35 | -------------------------------------------------------------------------------- /wrfpy/cylc/wrf_init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy.wrf import run_wrf 7 | from wrfpy import utils 8 | 9 | 10 | def main(datestring, interval): 11 | ''' 12 | Main function to initialize WPS timestep: 13 | - converts cylc timestring to datetime object 14 | - calls wrf.__init() and initialize() 15 | ''' 16 | dt = utils.convert_cylc_time(datestring) 17 | WRF = run_wrf() 18 | WRF.initialize(dt, dt + datetime.timedelta(hours=interval)) 19 | 20 | if __name__=="__main__": 21 | parser = argparse.ArgumentParser(description='Initialize WRF step.') 22 | parser.add_argument('datestring', metavar='N', type=str, 23 | help='Date-time string from cylc suite') 24 | parser.add_argument('interval', metavar='I', type=int, 25 | help='Time interval in hours') 26 | # parse arguments 27 | args = parser.parse_args() 28 | # call main 29 | main(args.datestring, args.interval) 30 | -------------------------------------------------------------------------------- /wrfpy/cylc/wps_init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy.wps import wps 7 | from wrfpy import utils 8 | 9 | 10 | def wps_init(datestart, dateend): 11 | ''' 12 | Initialize WPS timestep 13 | ''' 14 | WPS = wps() # initialize object 15 | WPS._initialize(datestart, dateend) 16 | 17 | 18 | def main(datestring, interval): 19 | ''' 20 | Main function to initialize WPS timestep: 21 | - converts cylc timestring to datetime object 22 | - calls wps_init() 23 | ''' 24 | dt = utils.convert_cylc_time(datestring) 25 | wps_init(dt, dt + datetime.timedelta(hours=interval)) 26 | 27 | 28 | if __name__=="__main__": 29 | parser = argparse.ArgumentParser(description='Initialize WPS step.') 30 | parser.add_argument('datestring', metavar='N', type=str, 31 | help='Date-time string from cylc suite') 32 | parser.add_argument('interval', metavar='I', type=int, 33 | help='Time interval in hours') 34 | # parse arguments 35 | args = parser.parse_args() 36 | # call main 37 | main(args.datestring, args.interval) 38 | -------------------------------------------------------------------------------- /tests/end_to_end_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os.path import dirname, abspath 3 | import unittest 4 | import tempfile 5 | from wrfpy.configuration import configuration 6 | 7 | 8 | class end2endtest(unittest.TestCase): 9 | def setUp(self): 10 | ''' 11 | setup test environment 12 | ''' 13 | # define test_data location 14 | self.test_data = os.path.join(dirname(abspath(__file__)), '..', 15 | 'test_data') 16 | 17 | def test_01(self): 18 | ''' 19 | Test single radar with 2 vertical levels 20 | ''' 21 | with tempfile.TemporaryDirectory() as temp_dir: 22 | results = {} 23 | results['suitename'] = 'test' 24 | results['basedir'] = temp_dir 25 | results['init'] = True 26 | configuration(results) 27 | # test if config.json exists 28 | outfile = os.path.join(results['basedir'], 29 | results['suitename'], 'config.json') 30 | self.assertEqual(os.path.exists(outfile), 1) 31 | 32 | 33 | if __name__ == "__main__": 34 | unittest.main() 35 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | 4 | def read(fname): 5 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 6 | 7 | setup( 8 | name = "WRFpy", 9 | version = "0.2.1", 10 | author = "Ronald van Haren", 11 | author_email = "r.vanharen@esciencecenter.nl", 12 | description = ("A python application that provides an easy way to set up," 13 | " run, and monitor (long) Weather Research and Forecasting " 14 | " (WRF) simulations."), 15 | license = "Apache 2.0", 16 | keywords = "WRF cylc workflow WRFDA", 17 | url = "https://github.com/ERA-URBAN/wrfpy", 18 | packages=['wrfpy'], 19 | include_package_data = True, # include everything in source control 20 | package_data={'wrfpy': ['cylc/*.py', 'examples/*']}, 21 | scripts=['wrfpy/scripts/wrfpy'], 22 | long_description=read('README.rst'), 23 | classifiers=[ 24 | "Development Status :: 4 - Beta", 25 | "Topic :: Software Development :: Libraries :: Python Modules", 26 | "License :: OSI Approved :: Apache Software License", 27 | ], 28 | install_requires=['numpy', 'Jinja2', 'MarkupSafe', 'PyYAML', 'f90nml', 29 | 'python-dateutil', 'astropy', 'pathos', 'netCDF4', 30 | 'pyOpenSSL'], 31 | ) 32 | 33 | -------------------------------------------------------------------------------- /wrfpy/cylc/upp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.upp import upp 8 | from wrfpy.config import config 9 | import os 10 | 11 | class run_upp(config): 12 | '''' 13 | ''' 14 | def __init__(self, datestring): 15 | config.__init__(self) 16 | dt = utils.convert_cylc_time(datestring) 17 | postprocess = upp() 18 | # construct wrfout name for domain 1 19 | dt_str = dt.strftime('%Y-%m-%d_%H:%M:%S') 20 | wrfout_name = 'wrfout_d01_' + dt_str 21 | wrfout_file = os.path.join(self.config['filesystem']['wrf_run_dir'], wrfout_name) 22 | start_date = utils.return_validate(postprocess.config['options_general']['date_start']) 23 | upp_interval = postprocess.config['options_upp']['upp_interval'] 24 | if (start_date == dt): # very first timestep 25 | postprocess.run_unipost_file(wrfout_file, frequency=upp_interval, use_t0=True) 26 | else: 27 | postprocess.run_unipost_file(wrfout_file, frequency=upp_interval, use_t0=False) 28 | 29 | 30 | if __name__=="__main__": 31 | parser = argparse.ArgumentParser(description='Initialize WRF step.') 32 | parser.add_argument('datestring', metavar='N', type=str, 33 | help='Date-time string from cylc suite') 34 | # parse arguments 35 | args = parser.parse_args() 36 | # call main 37 | run_upp(args.datestring) 38 | -------------------------------------------------------------------------------- /wrfpy/scripts/wrfpy: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: Configuration part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | import os 10 | import argparse 11 | from wrfpy.configuration import configuration 12 | 13 | 14 | def cli_parser(): 15 | ''' 16 | parse command line arguments 17 | ''' 18 | parser = argparse.ArgumentParser( 19 | description='WRFpy', 20 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 21 | parser.add_argument('--init', action='store_true', 22 | help='Initialize suite') 23 | parser.add_argument('--create', action='store_true', 24 | help='Create suite config') 25 | parser.add_argument('--basedir', type=str, 26 | default=os.path.join(os.path.expanduser("~"), 27 | 'cylc-suites'), 28 | help="basedir in which suites are installed") 29 | parser.add_argument('suitename', 30 | type=str, help='name of suite') 31 | results = vars(parser.parse_args()) 32 | # either initialize or create a suite, not both 33 | if (results['init'] ^ results['create']): 34 | configuration(results) 35 | else: 36 | # print error message to the user, combiniation of --init and --create 37 | # is not allowed 38 | print("Only one of '--init' and '--create' is allowed.") 39 | exit() 40 | 41 | 42 | if __name__ == "__main__": 43 | cli_parser() 44 | -------------------------------------------------------------------------------- /wrfpy/cylc/combine_synop.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from pynetcdf2littler.wrapper_littler import wrapper_littler 8 | from dateutil.relativedelta import relativedelta 9 | import os 10 | import glob 11 | import fileinput 12 | 13 | def main(args): 14 | ''' 15 | Example script to combine different output files 16 | from the prepare_synop.py script 17 | ''' 18 | dt = utils.convert_cylc_time(args.datestring) 19 | # startdate 20 | dt1 = datetime.datetime(dt.year, dt.month, 1) 21 | dt1s = dt1.strftime('%Y%m%d') # convert to string 22 | outputdir = os.path.join(args.outputdir, dt1s) 23 | filenames = glob.glob(os.path.join(outputdir, '*')) 24 | outputfile = args.outputfile 25 | if filenames: 26 | with open(os.path.join 27 | (outputdir, outputfile), 'w') as fout: 28 | for line in fileinput.input(filenames): 29 | fout.write(line) 30 | else: 31 | with open(outputfile, 'a'): 32 | os.utime(outputfile, None) 33 | 34 | 35 | if __name__=="__main__": 36 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 37 | parser.add_argument('datestring', metavar='N', type=str, 38 | help='Date-time string from cylc suite') 39 | parser.add_argument('-d', '--outputdir', help='outputdir', 40 | required=False, default=os.getcwd()) 41 | parser.add_argument('-o', '--outputfile', help='name of outputfile', 42 | required=True) 43 | # parse arguments 44 | args = parser.parse_args() 45 | # call main 46 | main(args) 47 | -------------------------------------------------------------------------------- /wrfpy/cylc/prepare_synop.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from pynetcdf2littler.wrapper_littler import wrapper_littler 8 | from dateutil.relativedelta import relativedelta 9 | import os 10 | 11 | def main(args): 12 | ''' 13 | Example script to integrate pynetcdf2littler into CYLC 14 | ''' 15 | dt = utils.convert_cylc_time(args.datestring) 16 | # startdate 17 | dt1 = datetime.datetime(dt.year, dt.month, 1) 18 | dt1s = dt1.strftime('%Y%m%d') # convert to string 19 | dt2 = dt1 + relativedelta(months=1) 20 | dt2s = dt2.strftime('%Y%m%d') # convert to string 21 | outputdir = os.path.join(args.outputdir, dt1s) 22 | wrapper_littler(args.filelist, args.namelist, outputdir, 23 | args.outputfile, dt1s, dt2s) 24 | 25 | 26 | if __name__=="__main__": 27 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 28 | parser.add_argument('datestring', metavar='N', type=str, 29 | help='Date-time string from cylc suite') 30 | parser.add_argument('-f', '--filelist', 31 | help='filelist containing netcdf files', 32 | default='wrapper.filelist', required=False) 33 | parser.add_argument('-n', '--namelist', help='netcdf2littler namelist', 34 | required=True) 35 | parser.add_argument('-d', '--outputdir', help='outputdir', 36 | required=False, default=os.getcwd()) 37 | parser.add_argument('-o', '--outputfile', help='name of outputfile', 38 | required=False, default='pynetcdf2littler.output') 39 | # parse arguments 40 | args = parser.parse_args() 41 | # call main 42 | main(args) 43 | -------------------------------------------------------------------------------- /wrfpy/cylc/wps_post.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.config import config 8 | import os 9 | #from urb import urb 10 | import shutil 11 | import glob 12 | 13 | class wps_post(config): 14 | '''' 15 | Main function to initialize WPS timestep: 16 | - converts cylc timestring to datetime object 17 | - calls wps_init() 18 | ''' 19 | def __init__(self): 20 | config.__init__(self) 21 | rundir = self.config['filesystem']['wrf_run_dir'] 22 | wpsdir = os.path.join(self.config['filesystem']['work_dir'], 'wps') 23 | ## wrf run dir 24 | # cleanup old met_em files 25 | # create list of files to remove 26 | #files = [glob.glob(os.path.join(rundir, ext)) 27 | # for ext in ['met_em*']] 28 | # flatten list 29 | #files_flat = [item for sublist in files for item in sublist] 30 | # remove files silently 31 | #[ utils.silentremove(filename) for filename in files_flat ] 32 | # copy new met_em files 33 | # create list of files to copy 34 | files = [glob.glob(os.path.join(wpsdir, ext)) 35 | for ext in ['met_em*']] 36 | # flatten list 37 | files_flat = [item for sublist in files for item in sublist] 38 | [ shutil.copyfile(filename, os.path.join(rundir, os.path.basename(filename))) for filename in files_flat ] 39 | ## wps workdir 40 | # create list of files to remove 41 | files = [glob.glob(os.path.join(wpsdir, ext)) 42 | for ext in ['met_em*', 'FILE*', 'PFILE*', 'GRIBFILE*']] 43 | # flatten list 44 | files_flat = [item for sublist in files for item in sublist] 45 | # remove files silently 46 | [ utils.silentremove(filename) for filename in files_flat ] 47 | 48 | if __name__=="__main__": 49 | wps_post() 50 | -------------------------------------------------------------------------------- /wrfpy/cylc/copy_synop.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.config import config 8 | from pynetcdf2littler.wrapper_littler import wrapper_littler 9 | from dateutil.relativedelta import relativedelta 10 | import os 11 | import glob 12 | import shutil 13 | 14 | 15 | class copySynop(config): 16 | '''' 17 | Example script how to copy output files from e.g. 18 | prepare_synop.py or combine_synop.py if there are 19 | different synop input files for e.g. different days/months 20 | ''' 21 | def __init__(self, args): 22 | config.__init__(self) 23 | obsDir = self.config['filesystem']['obs_dir'] 24 | obsFilename = self.config['filesystem']['obs_filename'] 25 | outputFile = os.path.join(obsDir, obsFilename) 26 | dt = utils.convert_cylc_time(args.datestring) 27 | # startdate 28 | dt1 = datetime.datetime(dt.year, dt.month, 1) 29 | dt1s = dt1.strftime('%Y%m%d') # convert to string 30 | inputdir = os.path.join(args.inputdir, dt1s) 31 | inputFile = os.path.join(inputdir, args.inputfile) 32 | # remove existing file 33 | utils.silentremove(outputFile) 34 | # copy inputfile to location specified in config.json 35 | shutil.copyfile(inputFile, outputFile) 36 | 37 | 38 | if __name__=="__main__": 39 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 40 | parser.add_argument('datestring', metavar='N', type=str, 41 | help='Date-time string from cylc suite') 42 | parser.add_argument('-d', '--inputdir', help='inputdir', 43 | required=False, default=os.getcwd()) 44 | parser.add_argument('-i', '--inputfile', help='name of inputfile', 45 | required=True) 46 | # parse arguments 47 | args = parser.parse_args() 48 | # call main 49 | copySynop(args) 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # Jupyter Notebook 76 | .ipynb_checkpoints 77 | 78 | # IPython 79 | profile_default/ 80 | ipython_config.py 81 | 82 | # pyenv 83 | .python-version 84 | 85 | # celery beat schedule file 86 | celerybeat-schedule 87 | 88 | # SageMath parsed files 89 | *.sage.py 90 | 91 | # Environments 92 | .env 93 | .venv 94 | env/ 95 | venv/ 96 | ENV/ 97 | env.bak/ 98 | venv.bak/ 99 | 100 | # Spyder project settings 101 | .spyderproject 102 | .spyproject 103 | 104 | # Rope project settings 105 | .ropeproject 106 | 107 | # mkdocs documentation 108 | /site 109 | 110 | # mypy 111 | .mypy_cache/ 112 | .dmypy.json 113 | dmypy.json 114 | 115 | # Pyre type checker 116 | .pyre/ 117 | 118 | -------------------------------------------------------------------------------- /wrfpy/cylc/wrfda_run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import datetime 5 | import time 6 | from wrfpy import utils 7 | from wrfpy.wrfda import wrfda 8 | from wrfpy.bumpskin import * 9 | from wrfpy.scale import wrfda_interpolate 10 | from wrfpy.config import config 11 | import shutil 12 | 13 | class dataAssimilation(config): 14 | ''' 15 | Data assimilation helper class 16 | ''' 17 | def __init__(self, datestring): 18 | config.__init__(self) 19 | datestart = utils.convert_cylc_time(datestring) 20 | # initialize WRFDA object 21 | WRFDA = wrfda(datestart) 22 | WRFDA.prepare_updatebc(datestart) 23 | # update lower boundary conditions 24 | for domain in range(1, WRFDA.max_dom+1): 25 | WRFDA.updatebc_run(domain) # run da_updatebc.exe 26 | # copy radar data into WRFDA workdir if available 27 | try: 28 | radarFile = self.config['filesystem']['radar_filepath'] 29 | radarTarget = os.path.join(self.config['filesystem']['work_dir'], 30 | 'wrfda', 'd01', 'ob.radar') 31 | shutil.copyfile(radarFile, radarTarget) 32 | except (KeyError, IOError): 33 | pass 34 | # prepare for running da_wrfvar.exe 35 | WRFDA.prepare_wrfda() 36 | # run da_wrfvar.exe 37 | WRFDA.wrfvar_run(1) 38 | # interpolate rural variables from wrfda 39 | wrfda_interpolate(itype='rural') 40 | try: 41 | urbanData = self.config['options_urbantemps']['urban_stations'] 42 | except KeyError: 43 | urbanData = False 44 | if urbanData: 45 | bskin = bumpskin(urbanData, dstationtypes=['davis', 'vp2', 'vantage']) 46 | # update URBPARM.TBL with anthropogenic heat factors 47 | try: 48 | urbparmFile = self.config['options_wrf']['urbparm.tbl'] 49 | except KeyError: 50 | urbparmFile = False 51 | if urbparmFile: 52 | urbparm(datestart, urbparmFile) 53 | # update lateral boundary conditions 54 | WRFDA.prepare_updatebc_type('lateral', datestart, 1) 55 | WRFDA.updatebc_run(1) 56 | # copy files over to WRF run_dir 57 | WRFDA.wrfda_post(datestart) 58 | 59 | 60 | if __name__=="__main__": 61 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 62 | parser.add_argument('datestring', metavar='N', type=str, 63 | help='Date-time string from cylc suite') 64 | # parse arguments 65 | args = parser.parse_args() 66 | # call dataAssimilation class 67 | dataAssimilation(args.datestring) 68 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg 2 | :target: https://opensource.org/licenses/Apache-2.0 3 | .. image:: https://travis-ci.org/ERA-URBAN/wrfpy.svg?branch=master 4 | :target: https://travis-ci.org/ERA-URBAN/wrfpy 5 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.1420918.svg 6 | :target: https://doi.org/10.5281/zenodo.1420918 7 | .. image:: https://badge.fury.io/py/WRFpy.svg 8 | :target: https://badge.fury.io/py/WRFpy 9 | 10 | WRFpy 11 | ===== 12 | 13 | What is WRFpy: 14 | ~~~~~~~~~~~~~~ 15 | 16 | WRFpy is a python application that provides an easy way to set up, run, 17 | and monitor (long) Weather Research and Forecasting (WRF) simulations. 18 | It provides a simple user-editable JSON configuration file and 19 | integrates with Cylc to access distributed computing and storage 20 | resources as well as monitoring. Optionally, WRFpy allows for data 21 | assimilation using WRF data assimilation system (WRFDA) and 22 | postprocessing of wrfinput files using the NCEP Unified Post Processing 23 | System (UPP). 24 | 25 | Installation 26 | ~~~~~~~~~~~~ 27 | 28 | WRFpy is installable via pip: 29 | 30 | :: 31 | 32 | pip install wrfpy 33 | 34 | 35 | Usage 36 | ~~~~~ 37 | 38 | WRFpy provides functionality depending on the used command-line 39 | switches: 40 | 41 | :: 42 | 43 | usage: wrfpy [-h] [--init] [--create] [--basedir BASEDIR] suitename 44 | 45 | WRFpy 46 | 47 | positional arguments: 48 | suitename name of suite 49 | 50 | optional arguments: 51 | -h, --help show this help message and exit 52 | --init Initialize suite (default: False) 53 | --create Create suite config (default: False) 54 | --basedir BASEDIR basedir in which suites are installed (default: 55 | ${HOME}/cylc-suites) 56 | 57 | 58 | In order to set up a new cylc suite, we first need to initialize one. 59 | This is done using the following command: 60 | 61 | :: 62 | 63 | wrfpy --init testsuite 64 | 65 | This creates a configuration file (config.json) that needs to be filled 66 | in by the user before continuing. WRFpy points the user to the location 67 | of this file. 68 | 69 | After the configuration file has been filled, it is time to create the 70 | actual configuration that will be used by the CYLC workflow engine. To 71 | create the CYLC suite, use the following command: 72 | 73 | :: 74 | 75 | wrfpy --create testsuite 76 | 77 | The final configuration lives in a file called suite.rc. If you want to 78 | make further (specialized) changes to the workflow by adding/tweaking 79 | steps, you can directly edit the suite.rc file with your favorite 80 | editor. 81 | 82 | Now it is time to register the suite with CYLC. CYLC is available at 83 | 84 | :: 85 | 86 | https://cylc.github.io/cylc/ 87 | 88 | and has great documentation. From now on you are using CYLC to control 89 | your WRF runs. Please consult the CYLC documentation for the relevant 90 | commands. 91 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | description: Configuration part of wrfpy 5 | license: APACHE 2.0 6 | """ 7 | 8 | import os 9 | import tempfile 10 | import unittest 11 | 12 | import pkg_resources 13 | 14 | from wrfpy.config import config 15 | 16 | 17 | class TestConfig(unittest.TestCase): 18 | """Tests for the config module.""" 19 | 20 | def test_load_valid_config(self): 21 | """Test validation for run_hours fields in general.""" 22 | with tempfile.TemporaryDirectory() as temp_dir: 23 | config_file = os.path.join(temp_dir, "config.json") 24 | cfg = self._create_basic_config(config_file) 25 | cfg._check_config() 26 | self.assertEqual(1, cfg.config["options_general"]["boundary_interval"]) 27 | 28 | def test_general_run_hours(self): 29 | """Test validation for run_hours fields in wps.""" 30 | with tempfile.TemporaryDirectory() as temp_dir: 31 | config_file = os.path.join(temp_dir, "config.json") 32 | cfg = self._create_basic_config(config_file) 33 | 34 | # fail to validate if wps run_hours is not present 35 | cfg.config["options_general"]["run_hours"] = None 36 | with self.assertRaises(AssertionError): 37 | cfg._check_general() 38 | 39 | def test_wps_run_hours(self): 40 | """Test validation for run_hours fields in general and wps.""" 41 | with tempfile.TemporaryDirectory() as temp_dir: 42 | config_file = os.path.join(temp_dir, "config.json") 43 | cfg = self._create_basic_config(config_file) 44 | 45 | # fail to validate if general run_hours is not present 46 | cfg.config["options_wps"]["run_hours"] = None 47 | with self.assertRaises(AssertionError): 48 | cfg._check_wps() 49 | 50 | def test_start_date_before_end_date_validation(self): 51 | """Test validation for start date coming before end date.""" 52 | with tempfile.TemporaryDirectory() as temp_dir: 53 | config_file = os.path.join(temp_dir, "config.json") 54 | cfg = self._create_basic_config(config_file) 55 | 56 | # fail to validate if general run_hours is not present 57 | cfg.config["options_general"]["date_start"], \ 58 | cfg.config["options_general"]["date_end"] = cfg.config["options_general"]["date_end"], \ 59 | cfg.config["options_general"]["date_start"] 60 | 61 | with self.assertRaises(IOError): 62 | cfg._check_general() 63 | 64 | @classmethod 65 | def _create_basic_config(cls, config_file: str) -> config: 66 | """Create minimal configuration file for unit config unit tests.""" 67 | cfg = config(wrfpy_config=config_file) 68 | cfg._read_json() 69 | # general 70 | cfg.config["options_general"]["boundary_interval"] = 1 71 | cfg.config["options_general"]["date_end"] = "2019-01-01_01" 72 | cfg.config["options_general"]["date_start"] = "2019-01-01_00" 73 | cfg.config["options_general"]["run_hours"] = "1" 74 | # wps 75 | cfg.config["options_wps"]["namelist.wps"] = cls._get_example_namelist() 76 | cfg.config["options_wps"]["run_hours"] = "1" 77 | # wrf 78 | cfg.config["options_wrf"]["namelist.input"] = cls._get_example_namelist() 79 | return cfg 80 | 81 | @classmethod 82 | def _get_example_namelist(cls) -> str: 83 | resource_package = __name__ 84 | resource_path = '/'.join(('..', 'wrfpy', 'examples', 'namelist.wps')) 85 | filename = pkg_resources.resource_filename(resource_package, resource_path) 86 | return os.path.realpath(filename) 87 | 88 | 89 | if __name__ == '__main__': 90 | unittest.main() 91 | -------------------------------------------------------------------------------- /wrfpy/cylc/retry_wrf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: WRF part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | from wrfpy.config import config 10 | from wrfpy import utils 11 | import f90nml 12 | import os 13 | import shutil 14 | import argparse 15 | import collections 16 | import subprocess 17 | import time 18 | 19 | class retry_wrf(config): 20 | ''' 21 | change namelist timestep in rundir 22 | ''' 23 | def __init__(self): 24 | config.__init__(self) # load config 25 | self.wrf_run_dir = self.config['filesystem']['wrf_run_dir'] 26 | self._cli_parser() 27 | self.define_retry_values() 28 | self.load_nml() 29 | self.change_namelist() 30 | self.write_namelist() 31 | self.run_wrf() 32 | 33 | def _cli_parser(self): 34 | ''' 35 | parse command line arguments 36 | ''' 37 | parser = argparse.ArgumentParser( 38 | description='WRF retry script', 39 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 40 | parser.add_argument('datestring', metavar='N', type=str, 41 | help='Date-time string from cylc suite') 42 | parser.add_argument('retrynumber', metavar='M', type=int, 43 | help='cylc retry number') 44 | args = parser.parse_args() 45 | self.retry_number = args.retrynumber 46 | self.dt = utils.convert_cylc_time(args.datestring) 47 | 48 | def load_nml(self): 49 | ''' 50 | load namelist in wrf rundir 51 | ''' 52 | self.wrf_nml = f90nml.read(os.path.join(self.wrf_run_dir, 53 | 'namelist.input')) 54 | 55 | def define_retry_values(self): 56 | ''' 57 | define retry values 58 | ''' 59 | # empty nested dictionary 60 | self.retry_values = collections.defaultdict(dict) 61 | # define retry steps 62 | self.retry_values[1]['time_step'] = 10 63 | self.retry_values[1]['parent_time_step_ratio'] = [1, 5, 5] 64 | self.retry_values[2]['time_step'] = 8 65 | self.retry_values[2]['parent_time_step_ratio'] = [1, 5, 5] 66 | self.retry_values[3]['time_step'] = 12 67 | self.retry_values[3]['parent_time_step_ratio'] = [1, 6, 6] 68 | self.retry_values[4]['time_step'] = 6 69 | self.retry_values[4]['parent_time_step_ratio'] = [1, 5, 5] 70 | 71 | 72 | def change_namelist(self): 73 | if self.retry_number in [1,2,3,4]: 74 | self.wrf_nml['domains']['parent_time_step_ratio' 75 | ] = self.retry_values[self.retry_number]['parent_time_step_ratio'] 76 | self.wrf_nml['domains']['time_step' 77 | ] = self.retry_values[self.retry_number]['time_step'] 78 | elif self.retry_number > 4: 79 | print('falling back to no data assimilation') 80 | for dom in [1, 2, 3]: 81 | # construct wrfout name for domain 1 82 | dt_str = self.dt.strftime('%Y-%m-%d_%H:%M:%S') 83 | wrfvar_input = 'wrfvar_input_d0' + str(dom) + '_' + dt_str 84 | # remove wrfinput file with data assimilation 85 | os.remove(os.path.join(self.wrf_run_dir, 'wrfinput_d0' + str(dom))) 86 | # copy wrfinput file without data assimilation as fallback 87 | shutil.copyfile(os.path.join(self.wrf_run_dir, wrfvar_input), 88 | os.path.join(self.wrf_run_dir, 'wrfinput_d0' + str(dom))) 89 | 90 | def write_namelist(self): 91 | ''' 92 | write changed namelist to disk 93 | ''' 94 | # remove backup file if exists 95 | try: 96 | os.remove(os.path.join(self.wrf_run_dir, 'namelist.input.bak')) 97 | except OSError: 98 | pass 99 | # copy file to backup file 100 | shutil.copyfile(os.path.join(self.wrf_run_dir, 'namelist.input'), 101 | os.path.join(self.wrf_run_dir, 'namelist.input.bak')) 102 | # remove original namelist 103 | os.remove(os.path.join(self.wrf_run_dir, 'namelist.input')) 104 | # write new namelist 105 | self.wrf_nml.write(os.path.join(self.wrf_run_dir, 106 | 'namelist.input')) 107 | 108 | def run_wrf(self): 109 | ''' 110 | run wrf 111 | ''' 112 | j_id = None 113 | if len(self.config['options_slurm']['slurm_wrf.exe']): 114 | # run using slurm 115 | if j_id: 116 | mid = "--dependency=afterok:%d" %j_id 117 | wrf_command = ['sbatch', mid, self.config['options_slurm']['slurm_wrf.exe']] 118 | else: 119 | wrf_command = ['sbatch', self.config['options_slurm']['slurm_wrf.exe']] 120 | utils.check_file_exists(wrf_command[-1]) 121 | try: 122 | res = subprocess.check_output(wrf_command, cwd=self.wrf_run_dir, 123 | stderr=utils.devnull()) 124 | j_id = int(res.split()[-1]) # slurm job-id 125 | except subprocess.CalledProcessError: 126 | #logger.error('WRF failed %s:' %wrf_command) 127 | raise # re-raise exception 128 | utils.waitJobToFinish(j_id) 129 | else: 130 | # run locally 131 | subprocess.check_call(os.path.join(self.wrf_run_dir, 'wrf.exe'), cwd=self.wrf_run_dir, 132 | stdout=utils.devnull(), stderr=utils.devnull()) 133 | 134 | if __name__=="__main__": 135 | retry_wrf() 136 | -------------------------------------------------------------------------------- /wrfpy/scale.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from scipy import interpolate 4 | from netCDF4 import Dataset 5 | import os 6 | import numpy as np 7 | import shutil 8 | import f90nml 9 | from wrfpy.config import config 10 | from wrfpy import utils 11 | from datetime import datetime 12 | 13 | 14 | class wrfda_interpolate(config): 15 | def __init__(self, itype='rural'): 16 | if itype not in ['rural', 'urban', 'both']: 17 | raise Exception('Unknown itype, should be one of rural, urban, both') 18 | config.__init__(self) 19 | # read WRF namelist in WRF work_dir 20 | wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) 21 | self.wrfda_workdir = os.path.join(self.config['filesystem']['work_dir'], 22 | 'wrfda') 23 | self.wrf_rundir = self.config['filesystem']['work_dir'] 24 | # get number of domains 25 | ndoms = wrf_nml['domains']['max_dom'] 26 | # check if ndoms is an integer and >0 27 | if not (isinstance(ndoms, int) and ndoms>0): 28 | raise ValueError("'domains_max_dom' namelist variable should be an " \ 29 | "integer>0") 30 | doms = range(2, ndoms+1) 31 | for dom in doms: 32 | pdomain = 1 33 | self.read_init(dom, pdomain) 34 | if ((itype=='rural') or (itype=='both')): 35 | self.fix_2d_field('ALBBCK', 'CANWAT', 'MU', 'PSFC', 'SST', 'TMN', 'TSK', 'T2') 36 | self.fix_3d_field('P', 'PH', 'SH2O', 'SMOIS', 'T', 'W', 'QVAPOR') 37 | self.fix_3d_field_uv(self.XLAT_U_p, self.XLONG_U_p, self.XLAT_U_c, self.XLONG_U_c, 'U') 38 | self.fix_3d_field_uv(self.XLAT_V_p, self.XLONG_V_p, self.XLAT_V_c, self.XLONG_V_c, 'V') 39 | if ndoms > 1: 40 | self.cleanup(dom) 41 | 42 | def read_init(self, cdom, pdom): 43 | c_wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(cdom)) 44 | p_wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(pdom)) 45 | self.fg_p = Dataset(os.path.join(p_wrfda_workdir, 'fg'), 'r') 46 | self.wrfinput_p = Dataset(os.path.join(p_wrfda_workdir, 'wrfvar_output'), 'r') 47 | shutil.copyfile(os.path.join(c_wrfda_workdir, 'fg'), os.path.join(c_wrfda_workdir, 'wrfvar_output')) 48 | self.wrfinput_c = Dataset(os.path.join(c_wrfda_workdir, 'wrfvar_output'), 'r+') 49 | # get time information from wrfinput file 50 | dtobj, datestr = self.get_time(os.path.join(c_wrfda_workdir, 'wrfvar_output')) 51 | # get file connection to wrfvar_input file for child domain in wrf run directory 52 | start_date = utils.return_validate( 53 | self.config['options_general']['date_start']) 54 | if (dtobj == start_date): # very first timestep 55 | self.wrfinput_c_nolsm = Dataset(os.path.join(self.wrf_rundir, ('wrfinput_d0' + str(cdom))), 'r') 56 | else: 57 | self.wrfinput_c_nolsm = Dataset(os.path.join(self.wrf_rundir, ('wrfvar_input_d0' + str(cdom) + '_' + datestr)), 'r') 58 | # lon/lat information parent domain 59 | self.XLONG_p = self.wrfinput_p.variables['XLONG'][0,:] 60 | self.XLAT_p = self.wrfinput_p.variables['XLAT'][0,:] 61 | # lon/lat information child domain 62 | self.XLONG_c = self.wrfinput_c.variables['XLONG'][0,:] 63 | self.XLAT_c = self.wrfinput_c.variables['XLAT'][0,:] 64 | # lon/lat information parent domain 65 | self.XLONG_U_p = self.wrfinput_p.variables['XLONG_U'][0,:] 66 | self.XLAT_U_p = self.wrfinput_p.variables['XLAT_U'][0,:] 67 | # lon/lat information child domain 68 | self.XLONG_U_c = self.wrfinput_c.variables['XLONG_U'][0,:] 69 | self.XLAT_U_c = self.wrfinput_c.variables['XLAT_U'][0,:] 70 | # V 71 | # lon/lat information parent domain 72 | self.XLONG_V_p = self.wrfinput_p.variables['XLONG_V'][0,:] 73 | self.XLAT_V_p = self.wrfinput_p.variables['XLAT_V'][0,:] 74 | # lon/lat information child domain 75 | self.XLONG_V_c = self.wrfinput_c.variables['XLONG_V'][0,:] 76 | self.XLAT_V_c = self.wrfinput_c.variables['XLAT_V'][0,:] 77 | 78 | def get_time(self, wrfinput): 79 | ''' 80 | get time from wrfinput file 81 | ''' 82 | wrfinput = Dataset(wrfinput, 'r') # open netcdf file 83 | # get datetime string from wrfinput file 84 | datestr = ''.join(wrfinput.variables['Times'][0]) 85 | # convert to datetime object 86 | dtobj = datetime.strptime(datestr, '%Y-%m-%d_%H:%M:%S') 87 | wrfinput.close() # close netcdf file 88 | return dtobj, datestr 89 | 90 | def fix_2d_field(self, *variables): 91 | #XLONG_p_i = self.XLONG_p[self.wrfinput_p.variables['LU_INDEX'][0,:]==1].reshape(-1) 92 | #XLAT_p_i = self.XLAT_p[self.wrfinput_p.variables['LU_INDEX'][0,:]==1].reshape(-1) 93 | XLONG_p_i = self.XLONG_p.reshape(-1) 94 | XLAT_p_i = self.XLAT_p.reshape(-1) 95 | for variable in variables: 96 | var = self.wrfinput_p.variables[variable][0,:] - self.fg_p.variables[variable][0,:] 97 | #var_i = var[self.wrfinput_p.variables['LU_INDEX'][0,:]==1].reshape(-1) 98 | var_i = var.reshape(-1) 99 | # interpolate regular wrfda variables with nearest neighbor interpolation 100 | intp_var = interpolate.griddata((XLONG_p_i,XLAT_p_i), var_i, (self.XLONG_c.reshape(-1),self.XLAT_c.reshape(-1)), method='nearest').reshape(np.shape(self.XLONG_c)) 101 | self.wrfinput_c.variables[variable][:] += intp_var 102 | 103 | def fix_3d_field(self, *variables): 104 | XLONG_p_i = self.XLONG_p.reshape(-1) 105 | XLAT_p_i = self.XLAT_p.reshape(-1) 106 | for variable in variables: 107 | var = self.wrfinput_p.variables[variable][0,:] - self.fg_p.variables[variable][0,:] 108 | # interpolate regular wrfda variables with nearest neighbor interpolation 109 | intp_var = [interpolate.griddata((XLONG_p_i,XLAT_p_i), var[lev,:].reshape(-1), (self.XLONG_c.reshape(-1),self.XLAT_c.reshape(-1)), method='nearest').reshape(np.shape(self.XLONG_c)) for lev in range(0,len(var))] 110 | self.wrfinput_c.variables[variable][:] += intp_var 111 | 112 | def fix_3d_field_uv(self,XLAT_p, XLONG_p, XLAT_c, XLONG_c, *variables): 113 | for variable in variables: 114 | var = self.wrfinput_p.variables[variable][0,:] - self.fg_p.variables[variable][0,:] 115 | intp_var = [interpolate.griddata((XLONG_p.reshape(-1),XLAT_p.reshape(-1)), var[lev,:].reshape(-1), (XLONG_c.reshape(-1),XLAT_c.reshape(-1)), method='nearest').reshape(np.shape(XLONG_c)) for lev in range(0,len(var))] 116 | self.wrfinput_c.variables[variable][:] += intp_var 117 | 118 | def cleanup(self, cdom): 119 | ''' 120 | close netcdf files and write changes 121 | ''' 122 | self.wrfinput_p.close() 123 | self.wrfinput_c.close() 124 | self.wrfinput_c_nolsm.close() 125 | self.fg_p.close() 126 | # copy results back to original file 127 | c_wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(cdom)) 128 | shutil.copyfile(os.path.join(c_wrfda_workdir, 'wrfvar_output'), 129 | os.path.join(c_wrfda_workdir, 'fg')) 130 | 131 | 132 | if __name__=="__main__": 133 | wrfda_interpolate() 134 | 135 | -------------------------------------------------------------------------------- /wrfpy/readObsTemperature.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ## 4 | from wrfpy.config import config 5 | import csv 6 | import os 7 | import astral 8 | from netCDF4 import Dataset 9 | from netCDF4 import date2num 10 | import numpy as np 11 | import bisect 12 | from datetime import datetime 13 | import glob 14 | from pathos.multiprocessing import ProcessPool as Pool 15 | 16 | 17 | class readObsTemperature(config): 18 | def __init__(self, dtobj, nstationtypes=None, dstationtypes=None): 19 | config.__init__(self) 20 | # optional define station types to be used 21 | self.nstationtypes = nstationtypes # stationtypes at night 22 | self.dstationtypes = dstationtypes # stationtypes during daytime 23 | # define datestr 24 | datestr = datetime.strftime(dtobj, '%Y-%m-%d_%H:%M:%S') 25 | # define name of csv file 26 | self.wrf_rundir = self.config['filesystem']['work_dir'] 27 | fname = 'obs_stations_' + datestr + '.csv' 28 | self.csvfile = os.path.join(self.wrf_rundir, fname) 29 | try: 30 | # try to read an existing csv file 31 | self.read_csv(datestr) 32 | except IOError: 33 | if self.config['options_urbantemps']['urban_stations']: 34 | # reading existing csv file failed, start from scratch 35 | self.urbStations = self.config['options_urbantemps']['urban_stations'] 36 | self.verify_input() 37 | self.obs_temp_p(dtobj) 38 | self.write_csv(datestr) 39 | else: 40 | raise 41 | 42 | def verify_input(self): 43 | ''' 44 | verify input and create list of files 45 | ''' 46 | try: 47 | f = Dataset(self.urbStations, 'r') 48 | f.close() 49 | self.filelist = [self.urbStations] 50 | except IOError: 51 | # file is not a netcdf file, assuming a txt file containing a 52 | # list of netcdf files 53 | if os.path.isdir(self.urbStations): 54 | # path is actually a directory, not a file 55 | self.filelist = glob.glob(os.path.join(self.urbStations, '*nc')) 56 | else: 57 | # re-raise error 58 | raise 59 | 60 | def obs_temp_p(self, dtobj): 61 | ''' 62 | get observed temperature in amsterdam parallel 63 | ''' 64 | self.dtobjP = dtobj 65 | pool = Pool() 66 | obs = pool.map(self.obs_temp, self.filelist) 67 | self.obs = [ob for ob in obs if ob is not None] 68 | 69 | def obs_temp(self, f): 70 | ''' 71 | get observed temperature in amsterdam per station 72 | ''' 73 | try: 74 | obs = Dataset(f, 'r') 75 | obs_lon = obs.variables['longitude'][0] 76 | obs_lat = obs.variables['latitude'][0] 77 | elevation = 0 78 | try: 79 | stationtype = obs.stationtype 80 | except AttributeError: 81 | stationtype = None 82 | stobs = (obs_lat, obs_lon, elevation, stationtype) 83 | use_station = self.filter_stationtype(stobs, self.dtobjP) 84 | if use_station: 85 | dt = obs.variables['time'] 86 | # convert datetime object to dt.units units 87 | dtobj_num = date2num(self.dtobjP, units=dt.units, 88 | calendar=dt.calendar) 89 | # make use of the property that the array is already 90 | # sorted to find the closest date 91 | try: 92 | ind = bisect.bisect_left(dt[:], dtobj_num) 93 | except RuntimeError: 94 | return 95 | if ((ind == 0) or (ind == len(dt))): 96 | return None 97 | else: 98 | am = np.argmin([abs(dt[ind]-dtobj_num), 99 | abs(dt[ind-1]-dtobj_num)]) 100 | if (am == 0): 101 | idx = ind 102 | else: 103 | idx = ind - 1 104 | if abs((dt[:]-dtobj_num)[idx]) > 900: 105 | # ignore observation if time difference 106 | # between model and observation is > 15 minutes 107 | return None 108 | temp = obs.variables['temperature'][idx] 109 | sname = f[:] # stationname 110 | obs.close() 111 | # append results to lists 112 | obs_temp = temp 113 | obs_stype = stationtype 114 | obs_sname= sname 115 | except IOError: 116 | return None 117 | except AttributeError: 118 | return None 119 | try: 120 | return (obs_lat, obs_lon, obs_temp, obs_stype, obs_sname) 121 | except UnboundLocalError: 122 | return None 123 | 124 | def filter_stationtype(self, stobs, dtobj): 125 | ''' 126 | check if it is day or night based on the solar angle 127 | construct location 128 | ''' 129 | lat = stobs[0] 130 | lon = stobs[1] 131 | elevation = 0 # placeholder 132 | loc = astral.Location(info=('name', 'region', lat, lon, 'UTC', 133 | elevation)) 134 | solar_elevation = loc.solar_elevation(dtobj) 135 | # set stime according to day/night based on solar angle 136 | if (solar_elevation > 0): 137 | stime = 'day' 138 | else: 139 | stime = 'night' 140 | if ((stime == 'day') and self.dstationtypes): 141 | try: 142 | mask = any([x.lower() in stobs[3].lower() for 143 | x in self.dstationtypes]) 144 | except AttributeError: 145 | mask = False 146 | elif ((stime == 'night') and self.nstationtypes): 147 | try: 148 | mask = any([x.lower() in stobs[3].lower() for 149 | x in self.nstationtypes]) 150 | except AttributeError: 151 | mask = False 152 | else: 153 | mask = True 154 | return mask 155 | 156 | def write_csv(self, datestr): 157 | ''' 158 | write output of stations used to csv file 159 | ''' 160 | with open(self.csvfile, 'wb') as out: 161 | csv_out = csv.writer(out) 162 | csv_out.writerow(['lat', 'lon', 'temperature', 'stationtype', 163 | 'stationname']) 164 | for row in self.obs: 165 | csv_out.writerow(row) 166 | 167 | def read_csv(self, datestr): 168 | ''' 169 | read station temperatures from csv file 170 | ''' 171 | # initialize variables in csv file 172 | obs_lat = [] 173 | obs_lon = [] 174 | obs_temp = [] 175 | obs_stype = [] 176 | obs_sname = [] 177 | # start reading csv file 178 | with open(self.csvfile, 'r') as inp: 179 | reader = csv.reader(inp) 180 | next(reader) # skip header 181 | for row in reader: 182 | # append variables 183 | obs_lat.append(float(row[0])) 184 | obs_lon.append(float(row[1])) 185 | obs_temp.append(float(row[2])) 186 | obs_stype.append(str(row[3])) 187 | obs_sname.append(str(row[4])) 188 | # zip variables 189 | self.obs = zip(obs_lat, obs_lon, obs_temp, obs_stype, obs_sname) 190 | -------------------------------------------------------------------------------- /wrfpy/split_namelist.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: split WRF namelist into first domain / rest 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | import f90nml 10 | import copy 11 | import mpl_toolkits.basemap.pyproj as pyproj 12 | from wrfpy.config import config 13 | import os 14 | from wrfpy import utils 15 | 16 | class split_nml_shared(config): 17 | ''' 18 | shared functionality for split_nml_wrf and split_nml_wps 19 | ''' 20 | def __init__(self, namelist): 21 | config.__init__(self) 22 | self.namelist = namelist 23 | self.read_namelist() 24 | self.create_namelist_copies() 25 | self.modify_coarse_namelist() 26 | self.modify_fine_namelist() 27 | 28 | def read_namelist(self): 29 | ''' 30 | read user supplied namelist 31 | ''' 32 | self.nml = f90nml.read(self.namelist) 33 | # get list of namelist keys 34 | self.keys = self.nml.keys() 35 | 36 | def create_namelist_copies(self): 37 | ''' 38 | create two (shallow) copies of the variable containing the namelist 39 | which will be used to create the output namelists 40 | ''' 41 | self.nml_coarse = copy.copy(self.nml) 42 | self.nml_fine = copy.copy(self.nml) 43 | 44 | 45 | def modify_coarse_namelist(self): 46 | ''' 47 | modify coarse namelist (resulting namelist contains outer domain only) 48 | ''' 49 | for section in self.nml.keys(): 50 | for key in self.nml[section].keys(): 51 | if isinstance(self.nml[section][key], list): 52 | if key not in ['eta_levels']: # don't modify these keys 53 | # use only first item from list 54 | self.nml_coarse[section][key] = self.nml[section][key][0] 55 | elif key == 'max_dom': 56 | self.nml_coarse[section][key] = 1 # only outer domain 57 | # else don't modify the key 58 | 59 | 60 | def modify_fine_namelist(self): 61 | ''' 62 | modify fine namelist (resulting namelist contains all but outer domain) 63 | ''' 64 | special_cases1 = ['parent_grid_ratio', 'i_parent_start', 'j_parent_start', 65 | 'parent_time_step_ratio'] 66 | special_cases2 = ['grid_id', 'parent_id'] 67 | for section in self.nml.keys(): 68 | for key in self.nml[section].keys(): 69 | if isinstance(self.nml[section][key], list): 70 | if key in special_cases1: 71 | if len(self.nml[section][key]) > 2: 72 | self.nml_fine[section][key] = [1] + self.nml[ 73 | section][key][2:] 74 | else: 75 | self.nml_fine[section][key] = 1 76 | elif key in special_cases2: 77 | self.nml_fine[section][key] = self.nml[section][key][:-1] 78 | elif key not in ['eta_levels']: # don't modify these keys 79 | # start from second item in list 80 | self.nml_fine[section][key] = self.nml[section][key][1:] 81 | elif key=='time_step': 82 | self.nml_fine[section][key] = int( 83 | float(self.nml[section][key]) / self.nml['domains'][ 84 | 'parent_grid_ratio'][1]) 85 | elif key=='max_dom': 86 | self.nml_fine[section][key] = self.nml[section][key] - 1 87 | 88 | 89 | 90 | 91 | class split_nml_wrf(split_nml_shared, config): 92 | def __init__(self): 93 | config.__init__(self) # load config 94 | wrf_namelist = self.config['options_wrf']['namelist.input'] 95 | split_nml_shared.__init__(self, wrf_namelist) 96 | self._save_namelists() 97 | 98 | 99 | def _save_namelists(self): 100 | ''' 101 | write coarse and fine WRF namelist.input to the respective run directories 102 | as namelist.forecast 103 | ''' 104 | # define namelist directories 105 | coarse_namelist_dir = os.path.join(self.config['filesystem']['work_dir'], 106 | 'wrf_coarse') 107 | fine_namelist_dir = os.path.join(self.config['filesystem']['work_dir'], 108 | 'wrf_fine') 109 | # create directories 110 | [utils._create_directory(directory) for directory in [coarse_namelist_dir, 111 | fine_namelist_dir]] 112 | # remove old files if needed 113 | [utils.silentremove(filename) for filename in [ 114 | os.path.join(dn, 'namelist.forecast') for dn in [coarse_namelist_dir, 115 | fine_namelist_dir]]] 116 | # write namelists 117 | self.nml_coarse.write(os.path.join(coarse_namelist_dir, 118 | 'namelist.forecast')) 119 | self.nml_fine.write(os.path.join(fine_namelist_dir, 120 | 'namelist.forecast')) 121 | 122 | 123 | 124 | 125 | class split_nml_wps(split_nml_shared, config): 126 | def __init__(self): 127 | config.__init__(self) # load config 128 | wps_namelist = self.config['options_wps']['namelist.wps'] 129 | split_nml_shared.__init__(self, wps_namelist) 130 | self._modify_fine_namelist_wps() 131 | self._save_namelists() 132 | 133 | 134 | def _modify_fine_namelist_wps(self): 135 | ''' 136 | wps specific fine namelist changes 137 | ''' 138 | # calculate new dx, dy, ref_lon, ref_lat for second domain 139 | self._calculate_center_second_domain() 140 | # modify dx, dy, ref_lon, ref_lat for second domain 141 | self.nml_fine['geogrid']['dx'] = self.dx 142 | self.nml_fine['geogrid']['dy'] = self.dy 143 | self.nml_fine['geogrid']['ref_lon'] = self.ref_lon 144 | self.nml_fine['geogrid']['ref_lat'] = self.ref_lat 145 | 146 | 147 | def _calculate_center_second_domain(self): 148 | ''' 149 | Calculate the center of the second domain for running in UPP mode 150 | ''' 151 | grid_ratio = self.nml['geogrid']['parent_grid_ratio'][1] 152 | i_start = self.nml['geogrid']['i_parent_start'] 153 | j_start = self.nml['geogrid']['j_parent_start'] 154 | e_we = self.nml['geogrid']['e_we'] 155 | e_sn = self.nml['geogrid']['e_sn'] 156 | ref_lat = self.nml['geogrid']['ref_lat'] 157 | ref_lon = self.nml['geogrid']['ref_lon'] 158 | truelat1 = self.nml['geogrid']['truelat1'] 159 | truelat2 = self.nml['geogrid']['truelat2'] 160 | # new dx and dy 161 | self.dx = float(self.nml['geogrid']['dx']) / grid_ratio 162 | self.dy = float(self.nml['geogrid']['dy']) / grid_ratio 163 | # define projection string 164 | projstring = ("+init=EPSG:4326 +proj=lcc +lat_1=%s +lat_2=%s +lat_0=%s " 165 | "+lon_0=%s +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 " 166 | "+units=m +no_defs" 167 | %(str(truelat1), str(truelat2), str(ref_lat), str(ref_lon))) 168 | projection = pyproj.Proj( projstring ) 169 | # calculate east/west/south/north 170 | west = (-self.nml['geogrid']['dx'] * (e_we[0] - 1 ) * 0.5) + ( 171 | (i_start[1] - 1) * self.nml['geogrid']['dx']) 172 | south = (-self.nml['geogrid']['dy'] * (e_sn[0] - 1 ) * 0.5) + ( 173 | (j_start[1] - 1) * self.nml['geogrid']['dy']) 174 | east = west + ((e_we[1] -1 ) * self.dx) 175 | north = south + ((e_sn[1] -1 ) * self.dy) 176 | # new ref_lat and ref_lon 177 | self.ref_lon, self.ref_lat = projection((west + east) * 0.5, 178 | (north + south) * 0.5, 179 | inverse=True) 180 | 181 | 182 | def _save_namelists(self): 183 | ''' 184 | write coarse and fine WRF namelist.input to the respective run directories 185 | as namelist.forecast 186 | ''' 187 | # define namelist directories 188 | coarse_namelist_dir = os.path.join(self.config['filesystem']['work_dir'], 189 | 'wps_coarse') 190 | fine_namelist_dir = os.path.join(self.config['filesystem']['work_dir'], 191 | 'wps_fine') 192 | # create directories 193 | [utils._create_directory(directory) for directory in [coarse_namelist_dir, 194 | fine_namelist_dir]] 195 | # remove old files if needed 196 | [utils.silentremove(filename) for filename in [ 197 | os.path.join(dn, 'namelist.wps') for dn in [coarse_namelist_dir, 198 | fine_namelist_dir]]] 199 | # write namelists 200 | self.nml_coarse.write(os.path.join(coarse_namelist_dir, 201 | 'namelist.wps')) 202 | self.nml_fine.write(os.path.join(fine_namelist_dir, 203 | 'namelist.wps')) 204 | 205 | 206 | if __name__=="__main__": 207 | split_nml_wps() 208 | split_nml_wrf() 209 | -------------------------------------------------------------------------------- /wrfpy/wrf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: WRF part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | from wrfpy.config import config 10 | from datetime import datetime 11 | import glob 12 | import os 13 | import f90nml 14 | from wrfpy import utils 15 | import subprocess 16 | import shutil 17 | 18 | 19 | class run_wrf(config): 20 | ''' 21 | run_wrf is a subclass of config # TODO: use better names 22 | ''' 23 | def __init__(self): 24 | config.__init__(self) 25 | self.wrf_rundir = self.config['filesystem']['wrf_run_dir'] 26 | 27 | def initialize(self, datestart, dateend): 28 | ''' 29 | initialize new WRF run 30 | ''' 31 | self.check_wrf_rundir() 32 | self.cleanup_previous_wrf_run() 33 | self.prepare_wrf_config(datestart, 34 | dateend) 35 | 36 | def check_wrf_rundir(self): 37 | ''' 38 | check if rundir exists 39 | if rundir doesn't exist, copy over content 40 | of self.config['filesystem']['wrf_dir']/run 41 | ''' 42 | utils._create_directory(self.wrf_rundir) 43 | # create list of files in self.config['filesystem']['wrf_dir']/run 44 | files = glob.glob(os.path.join(self.config['filesystem']['wrf_dir'], 45 | 'run', '*')) 46 | for fl in files: 47 | fname = os.path.basename(fl) 48 | if (os.path.splitext(fname)[1] == '.exe'): 49 | # don't copy over the executables 50 | continue 51 | shutil.copyfile(fl, os.path.join(self.wrf_rundir, fname)) 52 | 53 | def cleanup_previous_wrf_run(self): 54 | from utils import silentremove 55 | ''' 56 | cleanup initial/boundary conditions and namelist from previous WRF run 57 | ''' 58 | # remove initial conditions (wrfinput files) 59 | for filename in glob.glob(os.path.join( 60 | self.config['filesystem']['wrf_run_dir'], 'wrfinput_d*')): 61 | silentremove(filename) 62 | # remove lateral boundary conditions (wrfbdy_d01) 63 | silentremove(os.path.join(self.config['filesystem']['wrf_run_dir'], 64 | 'wrfbdy_d01')) 65 | silentremove(os.path.join(self.config['filesystem']['wrf_run_dir'], 66 | 'namelist.input')) 67 | 68 | 69 | def prepare_wrf_config(self, datestart, dateend): 70 | ''' 71 | Copy over default WRF namelist and modify time_control variables 72 | ''' 73 | from datetime import datetime 74 | # check if both datestart and dateend are a datetime instance 75 | if not all([ isinstance(dt, datetime) for dt in [datestart, dateend] ]): 76 | raise TypeError("datestart and dateend must be an instance of datetime") 77 | # read WRF namelist in WRF work_dir 78 | wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) 79 | # get number of domains 80 | ndoms = wrf_nml['domains']['max_dom'] 81 | # check if ndoms is an integer and >0 82 | if not (isinstance(ndoms, int) and ndoms>0): 83 | raise ValueError("'domains_max_dom' namelist variable should be an " \ 84 | "integer>0") 85 | # define dictionary with time control values 86 | dict = {'time_control:start_year':datestart.year, 87 | 'time_control:start_month':datestart.month, 88 | 'time_control:start_day':datestart.day, 89 | 'time_control:start_hour':datestart.hour, 90 | 'time_control:end_year':dateend.year, 91 | 'time_control:end_month':dateend.month, 92 | 'time_control:end_day':dateend.day, 93 | 'time_control:end_hour':dateend.hour, 94 | } 95 | # loop over dictionary and set start/end date parameters 96 | for el in dict.keys(): 97 | if not isinstance(dict[el], list): 98 | wrf_nml[el.split(':')[0]][el.split(':')[1]] = [dict[el]] * ndoms 99 | else: 100 | wrf_nml[el.split(':')[0]][el.split(':')[1]] = dict[el] * ndoms 101 | # set interval_seconds to total seconds between datestart and dateend 102 | wrf_nml['time_control']['interval_seconds'] = int(self.config[ 103 | 'options_general']['boundary_interval']) 104 | # calculate datetime.timedelta between datestart and dateend 105 | td = dateend - datestart 106 | # set run_days, run_hours, run_minutes, run_seconds 107 | td_days, td_hours, td_minutes, td_seconds = utils.days_hours_minutes_seconds(td) 108 | wrf_nml['time_control']['run_days'] = td_days 109 | wrf_nml['time_control']['run_hours'] = td_hours 110 | wrf_nml['time_control']['run_minutes'] = td_minutes 111 | wrf_nml['time_control']['run_seconds'] = td_seconds 112 | # check if WUR urban config is to be used 113 | if 'sf_urban_use_wur_config' in wrf_nml['physics']: 114 | # get start_date from config.json 115 | start_date = utils.return_validate( 116 | self.config['options_general']['date_start']) 117 | # if very first timestep, don't initialize urban parameters from file 118 | if (wrf_nml['physics']['sf_urban_use_wur_config'] and 119 | start_date == datestart): 120 | wrf_nml['physics']['sf_urban_init_from_file'] = False 121 | else: 122 | wrf_nml['physics']['sf_urban_init_from_file'] = True 123 | # write namelist.input 124 | wrf_nml.write(os.path.join( 125 | self.config['filesystem']['wrf_run_dir'], 'namelist.input')) 126 | 127 | 128 | def run_real(self, j_id=None): 129 | ''' 130 | run wrf real.exe 131 | ''' 132 | # check if slurm_real.exe is defined 133 | if len(self.config['options_slurm']['slurm_real.exe']): 134 | if j_id: 135 | mid = "--dependency=afterok:%d" %j_id 136 | real_command = ['sbatch', mid, self.config['options_slurm']['slurm_real.exe']] 137 | else: 138 | real_command = ['sbatch', self.config['options_slurm']['slurm_real.exe']] 139 | utils.check_file_exists(real_command[-1]) 140 | utils.silentremove(os.path.join(self.wrf_rundir, 'real.exe')) 141 | os.symlink(os.path.join(self.config['filesystem']['wrf_dir'],'main','real.exe'), 142 | os.path.join(self.wrf_rundir, 'real.exe')) 143 | try: 144 | res = subprocess.check_output(real_command, cwd=self.wrf_rundir, 145 | stderr=utils.devnull()) 146 | j_id = int(res.split()[-1]) # slurm job-id 147 | except subprocess.CalledProcessError: 148 | #logger.error('Real failed %s:' %real_command) 149 | raise # re-raise exception 150 | utils.waitJobToFinish(j_id) 151 | else: # run locally 152 | real_command = os.path.join(self.config['filesystem']['wrf_dir'], 153 | 'main', 'real.exe') 154 | utils.check_file_exists(real_command) 155 | try: 156 | subprocess.check_call(real_command, cwd=self.wrf_rundir, 157 | stdout=utils.devnull(), stderr=utils.devnull()) 158 | except subprocess.CalledProcessError: 159 | #logger.error('real.exe failed %s:' %real_command) 160 | raise # re-raise exception 161 | 162 | 163 | def run_wrf(self, j_id=None): 164 | ''' 165 | run wrf.exe 166 | ''' 167 | # check if slurm_wrf.exe is defined 168 | if len(self.config['options_slurm']['slurm_wrf.exe']): 169 | if j_id: 170 | mid = "--dependency=afterok:%d" %j_id 171 | wrf_command = ['sbatch', mid, self.config['options_slurm']['slurm_wrf.exe']] 172 | else: 173 | wrf_command = ['sbatch', self.config['options_slurm']['slurm_wrf.exe']] 174 | utils.check_file_exists(wrf_command[-1]) 175 | utils.silentremove(os.path.join(self.wrf_rundir, 'wrf.exe')) 176 | os.symlink(os.path.join(self.config['filesystem']['wrf_dir'],'main','wrf.exe'), 177 | os.path.join(self.wrf_rundir, 'wrf.exe')) 178 | try: 179 | res = subprocess.check_output(wrf_command, cwd=self.wrf_rundir, 180 | stderr=utils.devnull()) 181 | j_id = int(res.split()[-1]) # slurm job-id 182 | except subprocess.CalledProcessError: 183 | logger.error('Wrf failed %s:' %wrf_command) 184 | raise # re-raise exception 185 | utils.waitJobToFinish(j_id) 186 | else: # run locally 187 | wrf_command = os.path.join(self.config['filesystem']['wrf_dir'], 188 | 'main', 'wrf.exe') 189 | utils.check_file_exists(wrf_command) 190 | try: 191 | subprocess.check_call(wrf_command, cwd=self.wrf_rundir, 192 | stdout=utils.devnull(), stderr=utils.devnull()) 193 | except subprocess.CalledProcessError: 194 | logger.error('wrf.exe failed %s:' %wrf_command) 195 | raise # re-raise exception 196 | 197 | -------------------------------------------------------------------------------- /wrfpy/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | ''' 4 | description: Utilities used in wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | import logging 10 | import sys 11 | import os 12 | 13 | # define global LOG variables 14 | DEFAULT_LOG_LEVEL = 'debug' 15 | LOG_LEVELS = {'debug': logging.DEBUG, 16 | 'info': logging.INFO, 17 | 'warning': logging.WARNING, 18 | 'error': logging.ERROR, 19 | 'critical': logging.CRITICAL} 20 | LOG_LEVELS_LIST = LOG_LEVELS.keys() 21 | # LOG_FORMAT = '%(asctime)-15s %(message)s' 22 | LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s' 23 | DATE_FORMAT = "%Y/%m/%d/%H:%M:%S" 24 | logger = None 25 | 26 | 27 | def devnull(): 28 | ''' 29 | define devnull based on python version 30 | ''' 31 | if sys.version_info >= (3, 3): 32 | from subprocess import DEVNULL as devnull 33 | elif sys.version_info >= (2, 4): 34 | devnull = open(os.devnull, 'wb') 35 | else: 36 | assert sys.version_info >= (2, 4) 37 | return devnull 38 | 39 | 40 | def silentremove(filename): 41 | ''' 42 | Remove a file or directory without raising an error if the file or 43 | directory does not exist 44 | ''' 45 | import errno 46 | import shutil 47 | try: 48 | os.remove(filename) 49 | except OSError as e: 50 | if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory 51 | if e.errno == errno.EISDIR: 52 | shutil.rmtree(filename) 53 | else: 54 | raise # re-raise exception if a different error occured 55 | 56 | 57 | def return_validate(date_text, format='%Y-%m-%d_%H'): 58 | ''' 59 | validate date_text and return datetime.datetime object 60 | ''' 61 | from datetime import datetime 62 | try: 63 | date_time = datetime.strptime(date_text, format).replace(tzinfo=None) 64 | except ValueError: 65 | logger.error('Incorrect date format, should be %s' % format) 66 | raise ValueError('Incorrect date format, should be %s' % format) 67 | return date_time 68 | 69 | 70 | def check_file_exists(filename, boolean=False): 71 | ''' 72 | check if file exists and is readable, else raise IOError 73 | ''' 74 | try: 75 | with open(filename): 76 | if boolean: 77 | return True 78 | else: 79 | pass # file exists and is readable, nothing else to do 80 | except IOError: 81 | if boolean: 82 | return False 83 | else: 84 | # file does not exist OR no read permissions 85 | logger.error('Unable to open file: %s' % filename) 86 | raise # re-raise exception 87 | 88 | 89 | def validate_time_wrfout(wrfout, current_time): 90 | ''' 91 | Validate if current_time is in wrfout file 92 | ''' 93 | # get list of timesteps in wrfout file (list of datetime objects) 94 | time_steps = timesteps_wrfout(wrfout) 95 | # convert current_time to datetime object 96 | ctime = return_validate(current_time) 97 | if ctime not in time_steps: 98 | message = ('Time ' + current_time + 'not found in wrfout file: ' + 99 | wrfout) 100 | logger.error(message) 101 | raise ValueError(message) 102 | 103 | 104 | def timesteps_wrfout(wrfout): 105 | ''' 106 | return a list of timesteps (as datetime objects) in a wrfout file 107 | Input variables: 108 | - wrfout: path to a wrfout file 109 | ''' 110 | from netCDF4 import Dataset as ncdf 111 | from datetime import timedelta 112 | check_file_exists(wrfout) # check if wrfout file exists 113 | # read time information from wrfout file 114 | ncfile = ncdf(wrfout, format='NETCDF4') 115 | # minutes since start of simulation, rounded to 1 decimal float 116 | tvar = [round(nc, 0) for nc in ncfile.variables['XTIME'][:]] 117 | ncfile.close() 118 | # get start date from wrfout filename 119 | time_string = wrfout[-19:-6] 120 | start_time = return_validate(time_string) 121 | # times in netcdf file 122 | time_steps = [start_time + timedelta(minutes=step) for step in tvar] 123 | return time_steps 124 | 125 | 126 | def datetime_to_string(dtime, format='%Y-%m-%d_%H'): 127 | ''' 128 | convert datetime object to string. Standard format is 'YYYY-MM-DD_HH' 129 | Input variables: 130 | - dtime: datetime object 131 | - (optional) format: string format to return 132 | ''' 133 | from datetime import datetime 134 | # check if dtime is of instance datetime 135 | if not isinstance(dtime, datetime): 136 | message = 'input variable dtime is not of type datetime' 137 | logger.error(message) 138 | raise IOError(message) 139 | # return datetime as a string 140 | return dtime.strftime(format) 141 | 142 | 143 | def start_logging(filename, level=DEFAULT_LOG_LEVEL): 144 | ''' 145 | Start logging with given filename and level. 146 | ''' 147 | global logger 148 | if logger is None: 149 | logger = logging.getLogger() 150 | else: # wish there was a logger.close() 151 | for handler in logger.handlers[:]: # make a copy of the list 152 | logger.removeHandler(handler) 153 | logger.setLevel(LOG_LEVELS[level]) 154 | formatter = logging.Formatter(LOG_FORMAT, datefmt=DATE_FORMAT) 155 | fh = logging.FileHandler(filename) 156 | fh.setFormatter(formatter) 157 | logger.addHandler(fh) 158 | return logger 159 | 160 | 161 | def get_logger(): 162 | pass 163 | 164 | 165 | def datetime_range(start, end, delta): 166 | ''' 167 | Return a generator of all timesteps between two datetime.date(time) 168 | objects. 169 | Time between timesteps is provided by the argument delta. 170 | ''' 171 | import datetime 172 | current = start 173 | if not isinstance(delta, datetime.timedelta): 174 | try: 175 | delta = datetime.timedelta(**delta) 176 | except TypeError: 177 | message = ('delta argument in utils.datetime_range should be of ', 178 | 'a mapping of datetime.timedelta type') 179 | logger.error(message) 180 | raise TypeError(message) 181 | while current < end: 182 | yield current 183 | current += delta 184 | 185 | 186 | def excepthook(*args): 187 | ''' 188 | Replace sys.excepthook with custom handler so any uncaught exception 189 | gets logged 190 | ''' 191 | logger.error('Uncaught exception:', exc_info=args) 192 | 193 | 194 | def _create_directory(path): 195 | ''' 196 | Create a directory if it does not exist yet 197 | ''' 198 | import errno 199 | try: 200 | os.makedirs(path) 201 | except OSError as e: 202 | if e.errno != errno.EEXIST: # directory already exists, no problem 203 | raise # re-raise exception if a different error occured 204 | 205 | 206 | def get_wrfpy_path(): 207 | ''' 208 | get the path of wrfpy installation 209 | ''' 210 | import wrfpy 211 | return os.path.dirname(os.path.realpath(wrfpy.__file__)) 212 | 213 | 214 | def testjob(j_id): 215 | import subprocess 216 | command = "squeue -j %s" % j_id 217 | output = subprocess.check_output(command.split()) 218 | try: 219 | if j_id == int(output.split()[-8]): 220 | return True 221 | else: 222 | return False 223 | except ValueError: 224 | return False 225 | 226 | 227 | def testjobsucces(j_id): 228 | import subprocess 229 | command = "sacct -j %s --format=state" % j_id 230 | output = subprocess.check_output(command.split()) 231 | if any(x in ['CANCELED', 'FAILED', 'TIMEOUT'] for x in output.split()): 232 | raise IOError('slurm command failed') 233 | else: 234 | return True 235 | 236 | def waitJobToFinish(j_id): 237 | import time 238 | while True: 239 | time.sleep(1) 240 | if not testjob(j_id): 241 | testjobsucces(j_id) 242 | break 243 | 244 | def convert_cylc_time(string): 245 | import datetime 246 | import dateutil.parser 247 | try: 248 | return datetime.datetime.strptime( 249 | string, '%Y%m%dT%H00+01').replace(tzinfo=None) 250 | except ValueError: 251 | return dateutil.parser.parse(string).replace(tzinfo=None) 252 | 253 | 254 | def get_max_dom(namelist): 255 | ''' 256 | get maximum domain number from WRF namelist.input 257 | ''' 258 | import f90nml 259 | wrf_nml = f90nml.read(namelist) 260 | # maximum domain number 261 | return wrf_nml['domains']['max_dom'] 262 | 263 | 264 | def days_hours_minutes_seconds(td): 265 | ''' return days, hours, minutes, seconds 266 | input: datetime.timedelta 267 | ''' 268 | return (td.days, td.seconds//3600, (td.seconds//60) % 60, 269 | td.seconds % 60) 270 | -------------------------------------------------------------------------------- /wrfpy/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: Configuration part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | import json 10 | import os 11 | from wrfpy import utils 12 | import f90nml 13 | import yaml 14 | 15 | 16 | class config: 17 | ''' 18 | description 19 | ''' 20 | def __init__(self, wrfpy_config=False): 21 | global logger 22 | wrfpy_dir = os.environ['HOME'] 23 | logger = utils.start_logging(os.path.join(wrfpy_dir, 'wrfpy.log')) 24 | if not wrfpy_config: 25 | try: 26 | # get CYLC_SUITE_DEF_PATH environment variable 27 | wrfpy_dir = os.environ['CYLC_SUITE_DEF_PATH'] 28 | except KeyError: 29 | # default back to user home dir in case CYLC is not used 30 | wrfpy_dir = os.environ['HOME'] 31 | # config.json needs to be in base of wrfpy_dir 32 | self.configfile = os.path.join(wrfpy_dir, 'config.json') 33 | else: 34 | self.configfile = wrfpy_config 35 | try: 36 | logger.debug('Checking if configuration file exists: %s' %self.configfile) 37 | utils.check_file_exists(self.configfile, boolean=False) 38 | # read json config file 39 | self._read_json() 40 | # check config file for consistency and errors 41 | except IOError: 42 | # create config file 43 | self._create_empty_config() 44 | else: 45 | # check config 46 | self._check_config() 47 | 48 | def _create_empty_config(self): 49 | ''' 50 | create empty json config file 51 | ''' 52 | # define keys 53 | keys_dir = ['wrf_dir', 'wrf_run_dir', 'wrfda_dir', 54 | 'upp_dir', 'wps_dir', 55 | 'archive_dir', 'boundary_dir', 'upp_archive_dir', 'work_dir', 'obs_dir', 'obs_filename', 'radar_filepath'] 56 | keys_wrf = ['namelist.input', 'urbparm.tbl'] 57 | keys_upp = ['upp', 'upp_interval'] 58 | keys_wrfda = ['namelist.wrfda', 'wrfda', 'wrfda_type', 'cv_type', 'be.dat'] 59 | keys_general = ['date_start', 'date_end', 60 | 'boundary_interval', 'ref_lon', 61 | 'ref_lat', 'run_hours', 62 | 'fix_urban_temps'] 63 | keys_wps = ['namelist.wps', 'run_hours', 'vtable', 'geogrid.tbl', 'metgrid.tbl'] 64 | keys_slurm = ['slurm_real.exe', 'slurm_wrf.exe', 65 | 'slurm_ungrib.exe', 66 | 'slurm_metgrid.exe', 'slurm_geogrid.exe', 67 | 'slurm_obsproc.exe', 'slurm_updatebc.exe', 68 | 'slurm_da_wrfvar.exe'] 69 | keys_urbantemps = ['TBL_URB', 'TGL_URB', 'TSLB', 70 | 'ah.csv', 'urban_stations'] 71 | # create dictionaries 72 | config_dir = {key: '' for key in keys_dir} 73 | options_general = {key: '' for key in keys_general} 74 | options_wrfda = {key: '' for key in keys_wrfda} 75 | options_wrf = {key: '' for key in keys_wrf} 76 | options_upp = {key: '' for key in keys_upp} 77 | options_wps = {key: '' for key in keys_wps} 78 | options_slurm = {key: '' for key in keys_slurm} 79 | options_urbantemps = {key: '' for key in keys_urbantemps} 80 | # combine dictionaries 81 | config_out = {} 82 | config_out['filesystem'] = config_dir 83 | config_out['options_wrf'] = options_wrf 84 | config_out['options_wps'] = options_wps 85 | config_out['options_upp'] = options_upp 86 | config_out['options_slurm'] = options_slurm 87 | config_out['options_wrfda'] = options_wrfda 88 | config_out['options_general'] = options_general 89 | config_out['options_urbantemps'] = options_urbantemps 90 | # write json config file 91 | with open(self.configfile, 'w') as outfile: 92 | json.dump(config_out, outfile,sort_keys=True, indent=4) 93 | # print message pointing user to edit config file 94 | self._print_config_message() 95 | 96 | def _print_config_message(self): 97 | ''' 98 | print message pointing the user to edit the configuration file 99 | ''' 100 | message = '''>>> A configuration file has been created at %s 101 | >>> Please edit the configuration file before continuing.''' %self.configfile 102 | print(message) 103 | logger.info(message) 104 | 105 | def _read_json(self): 106 | ''' 107 | read json config file 108 | ''' 109 | with open(self.configfile, 'r') as infile: 110 | #self.config = json.load(infile) 111 | self.config = yaml.safe_load(infile) 112 | 113 | def _check_config(self): 114 | ''' 115 | check configuration file 116 | ''' 117 | self._check_general() # check general options 118 | self._check_wrf() # check wrf options 119 | self._check_wps() # check wps options 120 | self._check_wrfda() # check wrfda 121 | self._check_upp() # check upp 122 | 123 | def _check_wrfda(self): 124 | ''' 125 | check if wrfda option is set 126 | check if wrfda_type is supported 127 | check wrfda_dir for consistency 128 | ''' 129 | if self.config['options_wrfda']['wrfda']: 130 | self._check_wrfda_type() 131 | self._check_wrfda_dir() 132 | 133 | def _check_wrfda_type(self): 134 | ''' 135 | check if wrfda_type in json config file is either 3dvar of 4dvar 136 | ''' 137 | if (not self.config['options_wrfda']['wrfda_type'].lower() in 138 | ['3dvar', '4dvar']): 139 | message = ("Only '3dvar' or '4dvar' supported in ", 140 | "config['options']['wrfda_type']") 141 | logger.error(message) 142 | raise IOError(message) 143 | 144 | def _check_wrfda_dir(self): 145 | ''' 146 | check if the wrfda directory exist 147 | check if obsproc.exe and da_wrfvar.exe executables exist in the wrfda 148 | directory 149 | ''' 150 | # TODO: find out if we can verify that WRFDA dir is 3dvar or 4dvar compiled 151 | assert os.path.isdir(self.config['filesystem']['wrfda_dir']), ( 152 | 'wrfda directory %s not found' %self.config['filesystem']['wrfda_dir']) 153 | # create list of files to check 154 | files_to_check = [ 155 | os.path.join(self.config['filesystem']['wrfda_dir'], filename) for 156 | filename in ['var/obsproc/obsproc.exe', 'var/da/da_wrfvar.exe']] 157 | # check if all files in the list exist and are readable 158 | [utils.check_file_exists(filename) for filename in files_to_check] 159 | 160 | def _check_upp(self): 161 | if self.config['options_upp']['upp']: 162 | # TODO: check UPP interval 163 | self._check_upp_dir() 164 | 165 | def _check_upp_dir(self): 166 | assert os.path.isdir(self.config['filesystem']['upp_dir']), ( 167 | 'upp directory %s not found' %self.config['filesystem']['upp_dir']) 168 | # create list of files to check 169 | files_to_check = [ 170 | os.path.join(self.config['filesystem']['upp_dir'], filename) for 171 | filename in ['bin/unipost.exe', 'parm/wrf_cntrl.parm']] 172 | # check if all files in the list exist and are readable 173 | [utils.check_file_exists(filename) for filename in files_to_check] 174 | 175 | def _check_general(self): 176 | ''' 177 | check general options in json config file 178 | - date_start and date_end have a valid format 179 | - end_date is after start_date 180 | - boundary_interval is an integer 181 | ''' 182 | # check if start_date and end_date are in valid format 183 | start_date = utils.return_validate( 184 | self.config['options_general']['date_start']) 185 | end_date = utils.return_validate( 186 | self.config['options_general']['date_end']) 187 | # end_date should be after start_date 188 | if (start_date >= end_date): 189 | message = 'start_date is after end_date' 190 | logger.error(message) 191 | raise IOError(message) 192 | # check if run_hours is specified 193 | run_hours = self.config['options_general']['run_hours'] 194 | assert run_hours, "No General run_hours specified in config file" 195 | # boundary interval should be an int number of hours 196 | assert isinstance(self.config['options_general']['boundary_interval'], 197 | int), ('boundary_interval should be given as an ' 198 | 'integer in %s' % self.configfile) 199 | # boundary interval should not be larger than time between start_date 200 | # and end_date 201 | assert ((self.config['options_general']['boundary_interval']) <= ( 202 | end_date - start_date).total_seconds()), ( 203 | 'boundary interval is larger than time between start_date and ' 204 | 'end_date') 205 | 206 | def _check_wps(self): 207 | ''' 208 | check wps options in json config file 209 | ''' 210 | # verify that the config option is specified by the user 211 | assert (len(self.config['options_wps']['namelist.wps']) > 0), ( 212 | 'No WPS namelist.wps specified in config file') 213 | # check if specified namelist.wps exist and are readable 214 | utils.check_file_exists(self.config['options_wps']['namelist.wps']) 215 | # check if run_hours is specified 216 | run_hours = self.config['options_wps']['run_hours'] 217 | assert run_hours, "No WPS run_hours specified in config file" 218 | # check if namelist.wps is in the required format and has all keys needed 219 | self._check_namelist_wps() 220 | 221 | 222 | def _check_namelist_wps(self): 223 | ''' 224 | check if namelist.wps is in the required format and has all keys needed 225 | ''' 226 | # verify that example namelist.wps exists and is not removed by user 227 | basepath = utils.get_wrfpy_path() 228 | self.example_file = os.path.join(basepath, 'examples', 'namelist.wps') 229 | utils.check_file_exists(self.example_file) 230 | # load specified namelist 231 | self.user_nml = f90nml.read(self.config['options_wps']['namelist.wps']) 232 | # verify that all keys in self.user_nml are also in example namelist 233 | self._verify_namelist_wps_keys() 234 | # validate the key information specified 235 | self._validate_namelist_wps_keys() 236 | 237 | def _verify_namelist_wps_keys(self): 238 | ''' 239 | verify that all keys in example_nml are also in user_nml 240 | ''' 241 | # load example namelist.wps 242 | example_nml = f90nml.read(self.example_file) 243 | example_keys = example_nml.keys() 244 | for section in example_nml.keys(): 245 | for key in example_nml[section].keys(): 246 | assert self.user_nml[section][key], ( 247 | 'Key not found in user specified namelist: %s' 248 | %self.config['options_wps']['namelist.wps']) 249 | 250 | def _validate_namelist_wps_keys(self): 251 | ''' 252 | verify that user specified namelist.wps contains valid information 253 | for all domains specified by the max_dom key 254 | ''' 255 | pass 256 | 257 | def _check_wrf(self): 258 | ''' 259 | check wrf options in json config file 260 | ''' 261 | # verify that the config option is specified by the user 262 | assert (len(self.config['options_wrf']['namelist.input']) > 0), ( 263 | 'No WRF namelist.input specified in config file') 264 | # check if specified namelist.wps exist and are readable 265 | utils.check_file_exists(self.config['options_wrf']['namelist.input']) 266 | # check if namelist.input is in the required format and has all keys needed 267 | self._check_namelist_wrf() 268 | 269 | def _check_namelist_wrf(self): 270 | ''' 271 | check if namelist.input is in the required format and has all keys needed 272 | ''' 273 | pass 274 | -------------------------------------------------------------------------------- /wrfpy/upp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: Unified Post Precession (UPP) part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | from wrfpy import utils 10 | import glob 11 | import subprocess 12 | import os 13 | import errno 14 | from wrfpy.config import config 15 | 16 | class upp(config): 17 | ''' 18 | Runs the Universal Post Processor (UPP) for requested time steps in 19 | a wrfout file 20 | ''' 21 | def __init__(self): 22 | config.__init__(self) 23 | self._set_variables() 24 | self._initialize() 25 | self._prepare_post_dir() 26 | self._set_environment_variables() 27 | 28 | 29 | def _set_variables(self): 30 | ''' 31 | Define additional control variables for the unipost.exe tool, inherit from 32 | global config. 33 | ''' 34 | self.crtm_dir = os.path.join(self.config['filesystem']['upp_dir'], 'src/lib/crtm2/src/fix') 35 | self.post_dir = os.path.join(self.config['filesystem']['upp_dir'], 'postprd') 36 | 37 | def _initialize(self): 38 | ''' 39 | Check if archive dir exists, create if not. 40 | The archive dir is used to ... 41 | ''' 42 | # create archive dir 43 | utils._create_directory(self.config['filesystem']['upp_archive_dir']) 44 | # create post_dir (remove old one if needed) 45 | utils.silentremove(self.post_dir) 46 | utils._create_directory(self.post_dir) 47 | 48 | 49 | 50 | def _prepare_post_dir(self): 51 | ''' 52 | Create and prepare post_dir 53 | ''' 54 | #logger.debug('Preparing postprd directory: %s' %config['post_dir']) 55 | 56 | # create self.post_dir if it does not exist yet 57 | utils._create_directory(self.post_dir) 58 | 59 | # Link all the relevant files need to compute various diagnostics 60 | relpath_to_link = ['EmisCoeff/Big_Endian/EmisCoeff.bin', 61 | 'AerosolCoeff/Big_Endian/AerosolCoeff.bin', 62 | 'CloudCoeff/Big_Endian/CloudCoeff.bin', 63 | 'SpcCoeff/Big_Endian/imgr_g11.SpcCoeff.bin', 64 | 'TauCoeff/ODPS/Big_Endian/imgr_g11.TauCoeff.bin', 65 | 'SpcCoeff/Big_Endian/imgr_g12.SpcCoeff.bin', 66 | 'TauCoeff/ODPS/Big_Endian/imgr_g12.TauCoeff.bin', 67 | 'SpcCoeff/Big_Endian/imgr_g13.SpcCoeff.bin', 68 | 'TauCoeff/ODPS/Big_Endian/imgr_g13.TauCoeff.bin', 69 | 'SpcCoeff/Big_Endian/imgr_g15.SpcCoeff.bin', 70 | 'TauCoeff/ODPS/Big_Endian/imgr_g15.TauCoeff.bin', 71 | 'SpcCoeff/Big_Endian/imgr_mt1r.SpcCoeff.bin', 72 | 'TauCoeff/ODPS/Big_Endian/imgr_mt1r.TauCoeff.bin', 73 | 'SpcCoeff/Big_Endian/imgr_mt2.SpcCoeff.bin', 74 | 'TauCoeff/ODPS/Big_Endian/imgr_mt2.TauCoeff.bin', 75 | 'SpcCoeff/Big_Endian/imgr_insat3d.SpcCoeff.bin', 76 | 'TauCoeff/ODPS/Big_Endian/imgr_insat3d.TauCoeff.bin', 77 | 'SpcCoeff/Big_Endian/amsre_aqua.SpcCoeff.bin', 78 | 'TauCoeff/ODPS/Big_Endian/amsre_aqua.TauCoeff.bin', 79 | 'SpcCoeff/Big_Endian/tmi_trmm.SpcCoeff.bin', 80 | 'TauCoeff/ODPS/Big_Endian/tmi_trmm.TauCoeff.bin', 81 | 'SpcCoeff/Big_Endian/ssmi_f13.SpcCoeff.bin', 82 | 'TauCoeff/ODPS/Big_Endian/ssmi_f13.TauCoeff.bin', 83 | 'SpcCoeff/Big_Endian/ssmi_f14.SpcCoeff.bin', 84 | 'TauCoeff/ODPS/Big_Endian/ssmi_f14.TauCoeff.bin', 85 | 'SpcCoeff/Big_Endian/ssmi_f15.SpcCoeff.bin', 86 | 'TauCoeff/ODPS/Big_Endian/ssmi_f15.TauCoeff.bin', 87 | 'SpcCoeff/Big_Endian/ssmis_f16.SpcCoeff.bin', 88 | 'TauCoeff/ODPS/Big_Endian/ssmis_f16.TauCoeff.bin', 89 | 'SpcCoeff/Big_Endian/ssmis_f17.SpcCoeff.bin', 90 | 'TauCoeff/ODPS/Big_Endian/ssmis_f17.TauCoeff.bin', 91 | 'SpcCoeff/Big_Endian/ssmis_f18.SpcCoeff.bin', 92 | 'TauCoeff/ODPS/Big_Endian/ssmis_f18.TauCoeff.bin', 93 | 'SpcCoeff/Big_Endian/ssmis_f19.SpcCoeff.bin', 94 | 'TauCoeff/ODPS/Big_Endian/ssmis_f19.TauCoeff.bin', 95 | 'SpcCoeff/Big_Endian/ssmis_f20.SpcCoeff.bin', 96 | 'TauCoeff/ODPS/Big_Endian/ssmis_f20.TauCoeff.bin', 97 | 'SpcCoeff/Big_Endian/seviri_m10.SpcCoeff.bin', 98 | 'TauCoeff/ODPS/Big_Endian/seviri_m10.TauCoeff.bin', 99 | 'SpcCoeff/Big_Endian/v.seviri_m10.SpcCoeff.bin'] 100 | 101 | # abspath coefficients for crtm2 (simulated synthetic satellites) 102 | abspath_coeff= [os.path.join(self.crtm_dir, relpath) for relpath in 103 | relpath_to_link ] 104 | # abspath wrf_cntrl param file 105 | abspath_pf = os.path.join(self.config['filesystem']['upp_dir'], 'parm', 106 | 'wrf_cntrl.parm') 107 | # concatenate lists of paths 108 | abspath_to_link = abspath_coeff + [abspath_pf] 109 | # create a symlink for every file in abspath_to_link 110 | for fl in abspath_to_link: 111 | utils.check_file_exists(fl) # check if file exist and is readable 112 | os.symlink(fl, os.path.join(self.post_dir, os.path.basename(fl))) 113 | # symlink wrf_cntrl.parm to config['post_dir']/fort.14 114 | os.symlink(abspath_pf, os.path.join(self.post_dir, 'fort.14')) 115 | # symlink microphysic's tables - code used is based on mp_physics option 116 | # used in the wrfout file 117 | os.symlink(os.path.join(self.config['filesystem']['wrf_run_dir'], 'ETAMPNEW_DATA'), 118 | os.path.join(self.post_dir, 'nam_micro_lookup.dat')) 119 | os.symlink(os.path.join(self.config['filesystem']['wrf_run_dir'], 120 | 'ETAMPNEW_DATA.expanded_rain' 121 | ), os.path.join(self.post_dir, 122 | 'hires_micro_lookup.dat')) 123 | 124 | 125 | def _set_environment_variables(self): 126 | ''' 127 | Set environment variables 128 | ''' 129 | #logger.debug('Enter set_environment_variables') 130 | os.environ['MP_SHARED_MEMORY'] = 'yes' 131 | os.environ['MP_LABELIO'] = 'yes' 132 | os.environ['tmmark'] = 'tm00' 133 | #logger.debug('Leave set_environment_variables') 134 | 135 | 136 | def _cleanup_output_files(self): 137 | ''' 138 | Clean up old output files in post_dir 139 | ''' 140 | #logger.debug('Enter cleanup_output_files') 141 | file_ext = [ '*.out', '*.tm00', 'fort.110', 'itag'] 142 | files_found = [ f for files in [ 143 | glob.glob(os.path.join(self.post_dir, ext)) 144 | for ext in file_ext ] for f in files] 145 | # try to remove files, raise exception if needed 146 | [ utils.silentremove(fl) for fl in files_found ] 147 | #logger.debug('Leave cleanup_output_files') 148 | 149 | 150 | def _write_itag(self, wrfout, current_time): 151 | ''' 152 | Create input file for unipost 153 | --------content itag file --------------------------------------- 154 | First line is location of wrfout data 155 | Second line is required format 156 | Third line is the modeltime to process 157 | Fourth line is the model identifier (WRF, NMM) 158 | ----------------------------------------------------------------- 159 | ''' 160 | #logger.debug('Enter write_itag') 161 | #logger.debug('Time in itag file is: %s' %current_time) 162 | # set itag filename and cleanup 163 | filename = os.path.join(self.post_dir, 'itag') 164 | utils.silentremove(filename) 165 | # template of itag file 166 | template = """{wrfout} 167 | netcdf 168 | {current_time}:00:00 169 | NCAR 170 | """ 171 | # context variables in template 172 | context = { 173 | "wrfout":wrfout, 174 | "current_time":current_time 175 | } 176 | # create the itag file and write content to it based on the template 177 | try: 178 | with open(filename, 'w') as itag: 179 | itag.write(template.format(**context)) 180 | except IOError as e: 181 | #logger.error('Unable to write itag file: %s' %filename) 182 | print('Unable to write itag file: %s' %filename) 183 | raise e # re-raise exception 184 | #logger.debug('Leave write_itag') 185 | 186 | 187 | def _run_unipost_step(self, wrfout, current_time, thours): 188 | ''' 189 | Input variables for the function are: 190 | - full path to a wrfout file (regular wrfout naming) 191 | - time to run unipost for in format YYYY-MM-DD_HH 192 | - thours: TODO add description 193 | The following functionality is provided by the function: 194 | - validate input parameters 195 | - write itag file 196 | - run unipost.exe command 197 | - rename output 198 | - archive output 199 | - cleanup output 200 | ''' 201 | # see if current_time is in wrfout AND validate time format 202 | utils.validate_time_wrfout(wrfout, current_time) 203 | # extract domain information from wrfout filename 204 | domain = int(wrfout[-22:-20]) 205 | # write itag file 206 | self._write_itag(wrfout, current_time) 207 | # run unipost.exe 208 | subprocess.check_call(os.path.join(self.config['filesystem']['upp_dir'], 'bin', 'unipost.exe'), 209 | cwd=self.post_dir, stdout=utils.devnull(), 210 | stderr=utils.devnull()) 211 | # rename and archive output 212 | self._archive_output(current_time, thours, domain) 213 | # cleanup output files 214 | self._cleanup_output_files() 215 | 216 | 217 | def run_unipost_file(self, wrfout, frequency=2, use_t0=False): 218 | ''' 219 | Input variables for the function are: 220 | - wrfout: full path to a wrfout file (regular wrfout naming) 221 | - (optional) frequency: time interval in hours at which processing should 222 | take place 223 | - (optional) use_t0: boolean, process time step 0 224 | The function provides the following functionality: 225 | description 226 | ''' 227 | time_steps = utils.timesteps_wrfout(wrfout) 228 | # convert to hours since timestep 0 229 | td = [int((t - time_steps[0]).total_seconds()/3600) for t in time_steps] 230 | # modulo should be zero 231 | if not td[-1]%frequency==0: 232 | message = '' # TODO: add error message 233 | #logger.error(message) 234 | raise IOError(message) 235 | else: 236 | # create list of booleans where module is 0 237 | modulo = [tdi%frequency==0 for tdi in td] 238 | for idx, tstep in enumerate(time_steps): 239 | if (not use_t0 and idx==0) or (modulo[idx] is False): 240 | continue 241 | # convert time step to time string 242 | current_time = utils.datetime_to_string(tstep) 243 | # run unipost step 244 | self._run_unipost_step(wrfout, current_time, td[idx]) 245 | 246 | 247 | def _archive_output(self, current_time, thours, domain): 248 | ''' 249 | rename unipost.exe output to wrfpost_d0${domain}_time.grb and archive 250 | ''' 251 | import shutil 252 | # verify that domain is an int 253 | if not isinstance(domain, int): 254 | message = 'domain id should be an integer' 255 | #logger.error(message) 256 | raise IOError(message) 257 | # define original and destination filename 258 | origname = 'WRFPRS%02d.tm00' %thours 259 | outname = 'wrfpost_d%02d_%s.grb' %(domain, current_time) 260 | # rename file and move to archive dir 261 | shutil.move(os.path.join(self.post_dir, origname), 262 | os.path.join(self.config['filesystem']['upp_archive_dir'], outname)) 263 | # check if file is indeed archived 264 | utils.check_file_exists(os.path.join(self.config['filesystem']['upp_archive_dir'], outname)) 265 | 266 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /wrfpy/wps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: WRF Preprocessing System (WPS) part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | from wrfpy import utils 10 | import glob 11 | import subprocess 12 | import os 13 | import errno 14 | import f90nml 15 | from wrfpy.config import config 16 | from datetime import datetime 17 | import shutil 18 | from netCDF4 import Dataset 19 | 20 | 21 | class wps(config): 22 | ''' 23 | description 24 | ''' 25 | def __init__(self): 26 | config.__init__(self) # load config 27 | # define and create wps working directory 28 | self.wps_workdir = os.path.join(self.config['filesystem']['work_dir'], 29 | 'wps') 30 | utils._create_directory(self.wps_workdir) 31 | 32 | 33 | def _initialize(self, datestart, dateend, boundarydir=False): 34 | ''' 35 | Initialize WPS working directory / namelist 36 | ''' 37 | if not boundarydir: 38 | self.boundarydir = self.config['filesystem']['boundary_dir'] 39 | else: 40 | self.boundarydir = boundarydir 41 | self._clean_boundaries_wps() # clean leftover boundaries 42 | self._prepare_namelist(datestart, dateend) 43 | self._link_boundary_files() 44 | self._link_vtable() 45 | self._link_tbl_files() 46 | 47 | 48 | def _clean_boundaries_wps(self): 49 | ''' 50 | clean old leftover boundary files in WPS directory 51 | ''' 52 | # create list of files to remove 53 | files = [glob.glob(os.path.join(self.wps_workdir, ext)) 54 | for ext in ['GRIBFILE.*', 'FILE:', 'PFILE:', 'PRES:']] 55 | # flatten list 56 | files_flat = [item for sublist in files for item in sublist] 57 | # remove files silently 58 | [ utils.silentremove(filename) for filename in files_flat ] 59 | 60 | 61 | def _prepare_namelist(self, datestart, dateend): 62 | ''' 63 | prepare wps namelist 64 | ''' 65 | # read WPS namelist in WPS work_dir 66 | wps_nml = f90nml.read(self.config['options_wps']['namelist.wps']) 67 | # get numer of domains 68 | ndoms = wps_nml['share']['max_dom'] 69 | # check if ndoms is an integer and >0 70 | if not (isinstance(ndoms, int) and ndoms>0): 71 | raise ValueError("'domains_max_dom' namelist variable should be an " \ 72 | "integer>0") 73 | # check if both datestart and dateend are a datetime instance 74 | if not all([ isinstance(dt, datetime) for dt in [datestart, dateend] ]): 75 | raise TypeError("datestart and dateend must be an instance of datetime") 76 | # set new datestart and dateend 77 | wps_nml['share']['start_date'] = [datetime.strftime(datestart, 78 | '%Y-%m-%d_%H:%M:%S')] * ndoms 79 | wps_nml['share']['end_date'] = [datetime.strftime(dateend, 80 | '%Y-%m-%d_%H:%M:%S')] * ndoms 81 | # write namelist in wps work_dir 82 | utils.silentremove(os.path.join( 83 | self.config['filesystem']['work_dir'], 'wps', 'namelist.wps')) 84 | wps_nml.write(os.path.join( 85 | self.config['filesystem']['work_dir'], 'wps', 'namelist.wps')) 86 | 87 | 88 | def _link_boundary_files(self): 89 | ''' 90 | link boundary grib files to wps work directory with the required naming 91 | ''' 92 | # get list of files to link 93 | filelist = glob.glob(os.path.join(self.boundarydir, '*')) 94 | # make sure we only have files 95 | filelist = [fl for fl in filelist if os.path.isfile(fl)] 96 | if len(filelist) == 0: 97 | message = 'linking boundary files failed, no files found to link' 98 | #logger.error(message) 99 | raise IOError(message) 100 | # get list of filename extensions to use for destination link 101 | linkext = self._get_ext_list(len(filelist)) 102 | # link grib files 103 | [os.symlink(filelist[idx], os.path.join( 104 | self.wps_workdir, 'GRIBFILE.' + linkext[idx])) for idx in range(len(filelist))] 105 | 106 | 107 | def _get_ext_list(self, num): 108 | ''' 109 | create list of filename extensions for num number of files 110 | Extensions have the form: AAA, AAB, AAC... ABA, ABB...,BAA, BAB... 111 | ''' 112 | from string import ascii_uppercase 113 | # create list of uppercase letters used linkname extension 114 | ext = [ascii_uppercase[idx] for idx in range(0,len(ascii_uppercase))] 115 | i1, i2, i3 = 0, 0, 0 116 | for filenum in range(num): # loop over number of files 117 | # append extension to list (or create list for first iteration) 118 | try: 119 | list_ext.append(ext[i3] + ext[i2] + ext[i1]) 120 | except NameError: 121 | list_ext = [ext[i3] + ext[i2] + ext[i1]] 122 | i1 += 1 # increment i1 123 | if i1 >= len(ascii_uppercase): 124 | i1 = 0 125 | i2 += 1 # increment i2 126 | if i2 >= len(ascii_uppercase): 127 | i2 = 0 128 | i3 += 1 # increment i3 129 | if i3 >= len(ascii_uppercase): 130 | message = 'Too many files to link' 131 | #logger.error(message) 132 | raise IOError(message) 133 | return list_ext 134 | 135 | 136 | def _link_vtable(self): 137 | ''' 138 | link the required Vtable 139 | ''' 140 | utils.silentremove(os.path.join(self.wps_workdir, 'Vtable')) 141 | vtable = self.config['options_wps']['vtable'] 142 | vtable_path = os.path.join(self.config['filesystem']['wps_dir'], 'ungrib', 143 | 'Variable_Tables', vtable) 144 | os.symlink(vtable_path, os.path.join(self.wps_workdir, 'Vtable')) 145 | 146 | 147 | def _link_tbl_files(self): 148 | ''' 149 | link GEOGRID.TBL and METGRID.TBL into wps work_dir 150 | ''' 151 | # geogrid 152 | if not os.path.isfile(os.path.join(self.wps_workdir, 'geogrid', 153 | 'GEOGRID.TBL')): 154 | if not self.config['options_wps']['geogrid.tbl']: 155 | geogridtbl = os.path.join(self.config['filesystem']['wps_dir'], 'geogrid', 156 | 'GEOGRID.TBL.ARW') 157 | else: 158 | geogridtbl = self.config['options_wps']['geogrid.tbl'] 159 | utils._create_directory(os.path.join(self.wps_workdir, 'geogrid')) 160 | os.symlink(geogridtbl, os.path.join(self.wps_workdir, 'geogrid', 161 | 'GEOGRID.TBL')) 162 | # metgrid 163 | if not os.path.isfile(os.path.join(self.wps_workdir, 'metgrid', 164 | 'METGRID.TBL')): 165 | if not self.config['options_wps']['metgrid.tbl']: 166 | metgridtbl = os.path.join(self.config['filesystem']['wps_dir'], 'metgrid', 167 | 'METGRID.TBL.ARW') 168 | else: 169 | metgridtbl = self.config['options_wps']['metgrid.tbl'] 170 | utils._create_directory(os.path.join(self.wps_workdir, 'metgrid')) 171 | os.symlink(metgridtbl, os.path.join(self.wps_workdir, 'metgrid', 172 | 'METGRID.TBL')) 173 | 174 | def _run_geogrid(self, j_id=None): 175 | ''' 176 | run geogrid.exe (locally or using slurm script defined in config.json) 177 | ''' 178 | # get number of domains from wps namelist 179 | wps_nml = f90nml.read(self.config['options_wps']['namelist.wps']) 180 | ndoms = wps_nml['share']['max_dom'] 181 | # check if geo_em files already exist for all domains 182 | try: 183 | for dom in range(1, ndoms + 1): 184 | fname = "geo_em.d{}.nc".format(str(dom).zfill(2)) 185 | ncfile = Dataset(os.path.join(self.wps_workdir, fname)) 186 | ncfile.close() 187 | except IOError: 188 | # create geo_em nc files 189 | if len(self.config['options_slurm']['slurm_geogrid.exe']): 190 | # run using slurm 191 | if j_id: 192 | mid = "--dependency=afterok:%d" %j_id 193 | geogrid_command = ['sbatch', mid, self.config['options_slurm']['slurm_geogrid.exe']] 194 | else: 195 | geogrid_command = ['sbatch', self.config['options_slurm']['slurm_geogrid.exe']] 196 | utils.check_file_exists(geogrid_command[1]) 197 | utils.silentremove(os.path.join(self.wps_workdir, 'geogrid', 'geogrid.exe')) 198 | os.symlink(os.path.join(self.config['filesystem']['wps_dir'],'geogrid','geogrid.exe'), 199 | os.path.join(self.wps_workdir, 'geogrid', 'geogrid.exe')) 200 | try: 201 | res = subprocess.check_output(geogrid_command, cwd=self.wps_workdir, 202 | stderr=utils.devnull()) 203 | j_id = int(res.split()[-1]) # slurm job-id 204 | except subprocess.CalledProcessError: 205 | #logger.error('Metgrid failed %s:' %geogrid_command) 206 | raise # re-raise exception 207 | utils.waitJobToFinish(j_id) 208 | else: 209 | geogrid_command = os.path.join(self.config['filesystem']['wps_dir'], 210 | 'geogrid', 'geogrid.exe') 211 | utils.check_file_exists(geogrid_command) 212 | try: 213 | subprocess.check_call(geogrid_command, cwd=self.wps_workdir, 214 | stdout=utils.devnull(), stderr=utils.devnull()) 215 | except subprocess.CalledProcessError: 216 | #logger.error('Geogrid failed %s:' %geogrid_command) 217 | raise # re-raise exception 218 | 219 | 220 | def _run_ungrib(self, j_id=None): 221 | ''' 222 | run ungrib.exe (locally or using slurm script defined in config.json) 223 | ''' 224 | if len(self.config['options_slurm']['slurm_ungrib.exe']): 225 | # run using slurm 226 | if j_id: 227 | mid = "--dependency=afterok:%d" %j_id 228 | ungrib_command = ['sbatch', mid, self.config['options_slurm']['slurm_ungrib.exe']] 229 | else: 230 | ungrib_command = ['sbatch', self.config['options_slurm']['slurm_ungrib.exe']] 231 | utils.check_file_exists(ungrib_command[-1]) 232 | utils.silentremove(os.path.join(self.wps_workdir, 'ungrib', 'ungrib.exe')) 233 | if not os.path.isdir(os.path.join(self.wps_workdir, 'ungrib')): 234 | utils._create_directory(os.path.join(self.wps_workdir, 'ungrib')) 235 | os.symlink(os.path.join(self.config['filesystem']['wps_dir'],'ungrib','ungrib.exe'), 236 | os.path.join(self.wps_workdir, 'ungrib', 'ungrib.exe')) 237 | try: 238 | res = subprocess.check_output(ungrib_command, cwd=self.wps_workdir, 239 | stderr=utils.devnull()) 240 | j_id = int(res.split()[-1]) # slurm job-id 241 | except subprocess.CalledProcessError: 242 | #logger.error('Ungrib failed %s:' %ungrib_command) 243 | raise # re-raise exception 244 | utils.waitJobToFinish(j_id) 245 | else: 246 | ungrib_command = os.path.join(self.config['filesystem']['wps_dir'], 247 | 'ungrib', 'ungrib.exe') 248 | utils.check_file_exists(ungrib_command) 249 | try: 250 | subprocess.check_call(ungrib_command, cwd=self.wps_workdir, 251 | stdout=utils.devnull(), stderr=utils.devnull()) 252 | except subprocess.CalledProcessError: 253 | #logger.error('Ungrib failed %s:' %ungrib_command) 254 | raise # re-raise exception 255 | 256 | 257 | def _run_metgrid(self, j_id=None): 258 | ''' 259 | run metgrid.exe (locally or using slurm script defined in config.json) 260 | ''' 261 | if len(self.config['options_slurm']['slurm_metgrid.exe']): 262 | # run using slurm 263 | if j_id: 264 | mid = "--dependency=afterok:%d" %j_id 265 | metgrid_command = ['sbatch', mid, self.config['options_slurm']['slurm_metgrid.exe']] 266 | else: 267 | metgrid_command = ['sbatch', self.config['options_slurm']['slurm_metgrid.exe']] 268 | utils.check_file_exists(metgrid_command[-1]) 269 | utils.silentremove(os.path.join(self.wps_workdir, 'metgrid', 'metgrid.exe')) 270 | os.symlink(os.path.join(self.config['filesystem']['wps_dir'],'metgrid','metgrid.exe'), 271 | os.path.join(self.wps_workdir, 'metgrid', 'metgrid.exe')) 272 | try: 273 | res = subprocess.check_output(metgrid_command, cwd=self.wps_workdir, 274 | stderr=utils.devnull()) 275 | j_id = int(res.split()[-1]) # slurm job-id 276 | except subprocess.CalledProcessError: 277 | #logger.error('Metgrid failed %s:' %metgrid_command) 278 | raise # re-raise exception 279 | utils.waitJobToFinish(j_id) 280 | else: 281 | metgrid_command = os.path.join(self.config['filesystem']['wps_dir'], 282 | 'metgrid', 'metgrid.exe') 283 | utils.check_file_exists(metgrid_command) 284 | try: 285 | subprocess.check_call(metgrid_command, cwd=self.wps_workdir, 286 | stdout=utils.devnull(), stderr=utils.devnull()) 287 | except subprocess.CalledProcessError: 288 | #logger.error('Metgrid failed %s:' %metgrid_command) 289 | raise # re-raise exception 290 | 291 | -------------------------------------------------------------------------------- /wrfpy/configuration.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: Configuration part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | from wrfpy.config import config 10 | from wrfpy import utils 11 | import os 12 | from distutils.dir_util import copy_tree 13 | import pkg_resources 14 | 15 | 16 | class configuration(config): 17 | def __init__(self, results): 18 | global logger 19 | logger = utils.start_logging(os.path.join(os.path.expanduser("~"), 20 | 'wrfpy.log')) 21 | if results['init']: 22 | self._create_directory_structure(results['suitename'], 23 | results['basedir']) 24 | elif results['create']: 25 | self._create_cylc_config(results['suitename'], 26 | results['basedir']) 27 | 28 | def _create_directory_structure(self, suitename, basedir=None): 29 | ''' 30 | Create directory structure for the Cylc configuration 31 | ''' 32 | # set basedir to users home directory if not supplied 33 | if not basedir: 34 | basedir = os.path.join(os.path.expanduser("~"), 'cylc-suites') 35 | # subdirectories to create 36 | subdirs = ['bin', 'control', 'doc', 'inc'] 37 | # create subdirectories 38 | [utils._create_directory( 39 | os.path.join(basedir, suitename, subdir)) 40 | for subdir in subdirs] 41 | # copy over helper scripts for cylc 42 | cylcDir = pkg_resources.resource_filename('wrfpy', 'cylc/') 43 | targetDir = os.path.join(basedir, suitename, 'bin') 44 | copy_tree(cylcDir, targetDir) 45 | # create empty json config file in suite directory 46 | # this does not overwrite an existing config file 47 | config.__init__(self, os.path.join( 48 | basedir, suitename, 'config.json')) 49 | 50 | def _create_cylc_config(self, suitename, basedir): 51 | ''' 52 | Create cylc suite.rc configuration file based on config.json 53 | ''' 54 | config.__init__(self, os.path.join( 55 | basedir, suitename, 'config.json')) 56 | self.incr_hour = self.config['options_general']['run_hours'] 57 | self.wps_interval_hours = self.config['options_wps']['run_hours'] 58 | suiterc = self._header() 59 | suiterc += self._scheduling() 60 | suiterc += self._runtime() 61 | suiterc += self._visualization() 62 | self._write(suiterc, os.path.join(basedir, suitename, 'suite.rc')) 63 | 64 | def _header(self): 65 | ''' 66 | define suite.rc header information 67 | ''' 68 | start_time = utils.datetime_to_string( 69 | utils.return_validate(self.config[ 70 | 'options_general']['date_start']), 71 | format='%Y%m%dT%H') 72 | end_time = utils.datetime_to_string( 73 | utils.return_validate(self.config['options_general']['date_end']), 74 | format='%Y%m%dT%H') 75 | # define template 76 | template = """#!Jinja2 77 | 78 | {{% set START = "{start_time}" %}} 79 | {{% set STOP = "{end_time}" %}} 80 | 81 | """ 82 | # context variables in template 83 | context = { 84 | "start_time": start_time, 85 | "end_time": end_time 86 | } 87 | return template.format(**context) 88 | 89 | def _scheduling(self): 90 | ''' 91 | define suite.rc scheduling information 92 | ''' 93 | # get start_hour and increment time from config.json 94 | start_hour = str( 95 | utils.return_validate 96 | (self.config['options_general']['date_start']).hour).zfill(2) 97 | # check if we need to add upp 98 | try: 99 | if self.config['options_upp']['upp']: 100 | uppBlock = "=> upp" 101 | else: 102 | uppBlock = "" 103 | except KeyError: 104 | uppBlock = "" 105 | # define template 106 | template = """[scheduling] 107 | initial cycle point = {{{{ START }}}} 108 | final cycle point = {{{{ STOP }}}} 109 | [[dependencies]] 110 | # Initial cycle point 111 | [[[R1]]] 112 | graph = \"\"\" 113 | wrf_init => wps => wrf_real => wrfda => wrf_run {upp} 114 | obsproc_init => obsproc_run => wrfda 115 | \"\"\" 116 | # Repeat every {incr_hour} hours, starting {incr_hour} hours 117 | # after initial cylce point 118 | [[[+PT{incr_hour}H/PT{incr_hour}H]]] 119 | graph = \"\"\" 120 | wrf_run[-PT{incr_hour}H] => wrf_init => wrf_real => wrfda => wrf_run {upp} 121 | wrfda[-PT{incr_hour}H] => obsproc_init => obsproc_run => wrfda 122 | \"\"\" 123 | # Repeat every {wps_incr_hour} hours, starting {wps_incr_hour} hours 124 | # after initial cylce point 125 | [[[+PT{wps_incr_hour}H/PT{wps_incr_hour}H]]] 126 | graph = \"\"\" 127 | wps[-PT{wps_incr_hour}H] => wps => wrf_init 128 | \"\"\" 129 | """ 130 | # context variables in template 131 | context = { 132 | "start_hour": start_hour, 133 | "incr_hour": self.incr_hour, 134 | "wps_incr_hour": self.wps_interval_hours, 135 | "upp": uppBlock 136 | } 137 | return template.format(**context) 138 | 139 | def _runtime(self): 140 | ''' 141 | define suite.rc runtime information 142 | ''' 143 | return (self._runtime_base() + self._runtime_init_wrf() + 144 | self._runtime_init_obsproc() + self._runtime_real() + 145 | self._runtime_wrf() + self._runtime_obsproc() + 146 | self._runtime_wrfda() + self._runtime_upp() + 147 | self._runtime_wps()) 148 | 149 | def _runtime_base(self): 150 | ''' 151 | define suite.rc runtime information: base 152 | ''' 153 | # define template 154 | template = """[runtime] 155 | [[root]] # suite defaults 156 | [[[job submission]]] 157 | method = background 158 | """ 159 | # context variables in template 160 | context = {} 161 | return template.format(**context) 162 | 163 | def _runtime_init_wrf(self): 164 | ''' 165 | define suite.rc runtime information: init 166 | ''' 167 | init_command = "wrf_init.py $CYLC_TASK_CYCLE_POINT {incr_hour}" 168 | init_context = { 169 | "incr_hour": self.incr_hour 170 | } 171 | init = init_command.format(**init_context) 172 | # define template 173 | template = """ 174 | [[wrf_init]] 175 | script = \"\"\" 176 | {wrf_init} 177 | \"\"\" 178 | [[[job submission]]] 179 | method = {method} 180 | [[[directives]]] 181 | {directives}""" 182 | # context variables in template 183 | context = { 184 | "wrf_init": init, 185 | "method": "background", 186 | "directives": "" 187 | } 188 | return template.format(**context) 189 | 190 | def _runtime_init_obsproc(self): 191 | ''' 192 | define suite.rc runtime information: init 193 | ''' 194 | init = "wrfda_obsproc_init.py $CYLC_TASK_CYCLE_POINT" 195 | # define template 196 | template = """ 197 | [[obsproc_init]] 198 | script = \"\"\" 199 | {obsproc_init} 200 | \"\"\" 201 | [[[job submission]]] 202 | method = {method} 203 | [[[directives]]] 204 | {directives}""" 205 | # context variables in template 206 | context = { 207 | "obsproc_init": init, 208 | "method": "background", 209 | "directives": "" 210 | } 211 | return template.format(**context) 212 | 213 | def _runtime_real(self): 214 | ''' 215 | define suite.rc runtime information: real.exe 216 | ''' 217 | wrf_real = "run_real.py" 218 | # define template 219 | template = """ 220 | [[wrf_real]] 221 | script = \"\"\" 222 | {wrf_real} 223 | \"\"\" 224 | [[[job submission]]] 225 | method = {method} 226 | [[[directives]]] 227 | {directives}""" 228 | # context variables in template 229 | context = { 230 | "wrf_real": wrf_real, 231 | "method": "background", 232 | "directives": "" 233 | } 234 | return template.format(**context) 235 | 236 | 237 | def _runtime_wrf(self): 238 | ''' 239 | define suite.rc runtime information: wrf.exe 240 | ''' 241 | wrf_run = "run_wrf.py" 242 | # define template 243 | template = """ 244 | [[wrf_run]] 245 | script = \"\"\" 246 | {wrf_run} 247 | \"\"\" 248 | [[[job submission]]] 249 | method = {method} 250 | [[[directives]]] 251 | {directives}""" 252 | # context variables in template 253 | context = { 254 | "wrf_run": wrf_run, 255 | "method": "background", 256 | "directives": "" 257 | } 258 | return template.format(**context) 259 | 260 | def _runtime_obsproc(self): 261 | ''' 262 | define suite.rc runtime information: obsproc.exe 263 | ''' 264 | obsproc_run = "wrfda_obsproc_run.py $CYLC_TASK_CYCLE_POINT" 265 | # define template 266 | template = """ 267 | [[obsproc_run]] 268 | script = \"\"\" 269 | {obsproc_run} 270 | \"\"\" 271 | [[[job submission]]] 272 | method = {method} 273 | [[[directives]]] 274 | {directives}""" 275 | # context variables in template 276 | context = { 277 | "obsproc_run": obsproc_run, 278 | "method": "background", 279 | "directives": "" 280 | } 281 | return template.format(**context) 282 | 283 | def _runtime_wrfda(self): 284 | ''' 285 | define suite.rc runtime information: wrfda 286 | ''' 287 | wrfda_run = "wrfda_run.py $CYLC_TASK_CYCLE_POINT" 288 | # define template 289 | template = """ 290 | [[wrfda]] 291 | script = \"\"\" 292 | {wrfda_run} 293 | \"\"\" 294 | [[[job submission]]] 295 | method = {method} 296 | [[[directives]]] 297 | {directives}""" 298 | # context variables in template 299 | context = { 300 | "wrfda_run": wrfda_run, 301 | "method": "background", 302 | "directives": "" 303 | } 304 | return template.format(**context) 305 | 306 | def _runtime_upp(self): 307 | ''' 308 | define suite.rc runtime information: wrfda 309 | ''' 310 | # define template 311 | template = """ 312 | [[upp]] 313 | script = \"\"\" 314 | {command} 315 | \"\"\" 316 | [[[job submission]]] 317 | method = {method} 318 | [[[directives]]] 319 | {directives} 320 | """ 321 | command = "upp.py $CYLC_TASK_CYCLE_POINT" 322 | context = { 323 | "command": command, 324 | "method": "background", 325 | "directives": "" 326 | } 327 | return template.format(**context) 328 | 329 | def _runtime_wps(self): 330 | ''' 331 | define suite.rc runtime information: wrfda 332 | ''' 333 | # define template 334 | template = """ 335 | [[wps]] 336 | pre-script = \"\"\" 337 | {pre_command} 338 | \"\"\" 339 | script = \"\"\" 340 | {command} 341 | \"\"\" 342 | post-script = \"\"\" 343 | {post_command} 344 | \"\"\" 345 | [[[environment]]] 346 | WORKDIR = {wps_workdir} 347 | CYLC_TASK_WORK_DIR = $WORKDIR 348 | [[[job submission]]] 349 | method = {method} 350 | [[[directives]]] 351 | {directives} 352 | """ 353 | pre_command = "wps_init.py $CYLC_TASK_CYCLE_POINT {wps_run_hours}" 354 | pre_command_context = { 355 | "wps_run_hours": self.wps_interval_hours, 356 | } 357 | command = "wps_run.py" 358 | command_context = { 359 | "wps_dir": self.config['filesystem']['wps_dir'] 360 | } 361 | post_command = "wps_post.py" 362 | context = { 363 | "wps_workdir": os.path.join(self.config['filesystem']['work_dir'], 364 | 'wps'), 365 | "pre_command": pre_command.format(**pre_command_context), 366 | "command": command.format(**command_context), 367 | "post_command": post_command, 368 | "method": "background", 369 | "directives": "" 370 | } 371 | return template.format(**context) 372 | 373 | def _visualization(self): 374 | ''' 375 | define suite.rc visualization information 376 | ''' 377 | # define template 378 | template = """ 379 | [visualization] 380 | initial cycle point = {{ START }} 381 | final cycle point = {{ STOP }} 382 | default node attributes = "style=filled", "fillcolor=grey" 383 | """ 384 | return template 385 | 386 | def _write(self, suiterc, filename): 387 | ''' 388 | write cylc suite.rc config to file 389 | ''' 390 | # create the itag file and write content to it based on the template 391 | try: 392 | with open(filename, 'w') as itag: 393 | itag.write(suiterc) 394 | except IOError: 395 | raise # re-raise exception 396 | -------------------------------------------------------------------------------- /wrfpy/cylc/archive.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from netCDF4 import Dataset as ncdf 4 | from netCDF4 import date2num 5 | import pandas 6 | import time 7 | import os 8 | from dateutil import relativedelta 9 | import argparse 10 | import f90nml 11 | import shutil 12 | from wrfpy.config import config 13 | from wrfpy import utils 14 | from astropy.convolution import convolve 15 | import numpy as np 16 | 17 | 18 | class postprocess(config): 19 | def __init__(self, datestart, dateend): 20 | config.__init__(self) 21 | self.startdate = datestart 22 | self.enddate = dateend 23 | # read WRF namelist in WRF work_dir 24 | wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) 25 | # get number of domains 26 | self.ndoms = wrf_nml['domains']['max_dom'] 27 | self.rundir = self.config['filesystem']['wrf_run_dir'] 28 | # archive in subdir per year 29 | self.archivedir = os.path.join( 30 | self.config['filesystem']['archive_dir'], 31 | str(self.startdate.year)) 32 | utils._create_directory(self.archivedir) 33 | # define static variables 34 | self.define_vars_static() 35 | # define variables that need to be stored hourly 36 | self.define_vars_hourly() 37 | # define variables that need to be stored every minute for the inner 38 | # domain, hourly for the other domains 39 | self.define_vars_minute() 40 | self.define_vars_deac() # define variables to be deaccumulated 41 | self.archive() # archive "normal" variables 42 | self.archive_wrfvar_input() # archive wrfvar_input files 43 | # get start_date from config.json 44 | start_date = utils.return_validate( 45 | self.config['options_general']['date_start']) 46 | if (start_date == datestart): # very first timestep 47 | self.archive_static() # archive static variables 48 | self.cleanup() 49 | 50 | def define_vars_hourly(self): 51 | ''' 52 | create dict of outputstream:variable for output that has to be saved 53 | every hour 54 | ''' 55 | self.hour_var = [ 56 | 'VEGFRA', 57 | 'MU', 58 | 'MUB', 59 | 'Q2', 60 | 'T2', 61 | 'TH2', 62 | 'PSFC', 63 | 'U10', 64 | 'V10', 65 | 'GRDFLX', 66 | 'ACSNOM', 67 | 'SNOW', 68 | 'SNOWH', 69 | 'CANWAT', 70 | 'TC2M_URB', 71 | 'TP2M_URB', 72 | 'LAI', 73 | 'VAR', 74 | 'F', 75 | 'E', 76 | 'TSK', 77 | 'SWDOWN', 78 | 'GLW', 79 | 'SWNORM', 80 | 'ALBEDO', 81 | 'ALBBCK', 82 | 'EMISS', 83 | 'NOAHRES', 84 | 'UST', 85 | 'PBLH', 86 | 'HFX', 87 | 'LH', 88 | 'SNOWC', 89 | 'OLR', 90 | 'SFCEXC', 91 | 'Z0', 92 | 'SST', 93 | 'U', 94 | 'V', 95 | 'W', 96 | 'PH', 97 | 'PHB', 98 | 'T', 99 | 'P', 100 | 'PB', 101 | 'P_HYD', 102 | 'QVAPOR', 103 | 'QCLOUD', 104 | 'QRAIN', 105 | 'QICE', 106 | 'QSNOW', 107 | 'QGRAUP', 108 | 'CLDFRA', 109 | 'TSLB', 110 | 'SMOIS', 111 | 'SMCREL'] 112 | 113 | def define_vars_static(self): 114 | ''' 115 | Static variables. 116 | Run only once on the very first timestep 117 | ''' 118 | self.static_var = [ 119 | 'ZS', 120 | 'DZS', 121 | 'XLAND', 122 | 'TMN'] 123 | 124 | def define_vars_minute(self): 125 | ''' 126 | create dict of outputstream:variable for output that has to be saved 127 | every minute for the inner domain 128 | ''' 129 | self.minute_var = [ 130 | 'SFROFF', 131 | 'UDROFF', 132 | 'QFX', 133 | 'SR', 134 | 'RAINNC', 135 | 'SNOWNC', 136 | 'GRAUPELNC', 137 | 'HAILNC'] 138 | 139 | def define_vars_deac(self): 140 | self.deac_var = [ 141 | 'SNOWNC', 142 | 'RAINNC', 143 | 'SFROFF', 144 | 'UDROFF', 145 | 'SNOWC', 146 | 'GRAUPELNC', 147 | 'HAILNC'] 148 | 149 | def write_netcdf(self, var, inpdata, lat, lon, dt, dim, outfile): 150 | ''' 151 | Write netcdf output file 152 | ''' 153 | # open output file 154 | ncfile = ncdf(outfile, 'w') 155 | # create dimensions and variables 156 | if dim == 3: 157 | ncfile.createDimension('time', len(dt)) 158 | ncfile.createDimension('south_north', np.shape(inpdata)[1]) 159 | ncfile.createDimension('west_east', np.shape(inpdata)[2]) 160 | data = ncfile.createVariable(var, 'f4', 161 | ('time', 'south_north', 'west_east',), 162 | zlib=True, fill_value=-999) 163 | elif dim == 4: 164 | ncfile.createDimension('time', len(dt)) 165 | ncfile.createDimension('bottom_top', np.shape(inpdata)[1]) 166 | ncfile.createDimension('south_north', np.shape(inpdata)[2]) 167 | ncfile.createDimension('west_east', np.shape(inpdata)[3]) 168 | data = ncfile.createVariable(var, 'f4', 169 | ('time', 'bottom_top', 170 | 'south_north', 'west_east',), 171 | zlib=True, fill_value=-999) 172 | data1 = ncfile.createVariable('latitude', 'f4', 173 | ('south_north', 'west_east',), zlib=True) 174 | data2 = ncfile.createVariable('longitude', 'f4', 175 | ('south_north', 'west_east',), zlib=True) 176 | timevar = ncfile.createVariable('time', 'f4', ('time',), zlib=True) 177 | # time axis UTC 178 | dt = [date2num(d.to_datetime(), 179 | units='minutes since 2010-01-01 00:00:00', 180 | calendar='gregorian') for d in dt] 181 | # define attributes 182 | timevar.units = 'minutes since 2010-01-01 00:00:00' 183 | timevar.calendar = 'gregorian' 184 | timevar.standard_name = 'time' 185 | timevar.long_name = 'time in UTC' 186 | data1.units = 'degree_east' 187 | data1.standard_name = 'longitude' 188 | data1.FieldType = 104 189 | data1.description = "longitude, west is negative" 190 | data1.MemoryOrder = "XY" 191 | data1.coordinates = "lon lat" 192 | data2.units = 'degree_north' 193 | data2.standard_name = 'latitude' 194 | data2.description = 'latitude, south is negative' 195 | data2.FieldType = 104 196 | data2.MemoryOrder = "XY" 197 | data2.coordinates = "lon lat" 198 | try: 199 | data[:] = inpdata[:] 200 | except IndexError: 201 | raise 202 | # lat/lon should be a static field 203 | try: 204 | data1[:] = lat[0, :] 205 | data2[:] = lon[0, :] 206 | except IndexError: 207 | raise 208 | timevar[:] = dt 209 | # Add global attributes 210 | ncfile.history = 'Created ' + time.ctime(time.time()) 211 | ncfile.close() 212 | 213 | def getvar(self, var, domain, datestr): 214 | ''' 215 | Read variable form netCDF file and return array 216 | ''' 217 | # define and load input file 218 | input_fn = var + '_d0' + str(domain) + '_' + datestr 219 | input_file = os.path.join(self.rundir, input_fn) 220 | ncfile = ncdf(input_file, 'r') 221 | # read variable and close netCDF file 222 | tmp = ncfile.variables[var][:] 223 | ncfile.close() 224 | return tmp 225 | 226 | @staticmethod 227 | def spatial_filter(data): 228 | ''' 229 | Apply spatial convolution filter to input data 230 | ''' 231 | kernel = np.array([[1, 1, 1], [1, 0, 1], [1, 1, 1]]) 232 | if data.ndim == 2: 233 | dataF = convolve(data[:], kernel, 234 | nan_treatment='interpolate', 235 | preserve_nan=True) 236 | return dataF 237 | elif data.ndim == 3: 238 | dataF = np.zeros(np.shape(data)) 239 | for i in range(0, len(data)): 240 | dataF[i, :] = convolve(data[i, :], kernel, 241 | nan_treatment='interpolate', 242 | preserve_nan=True) 243 | return dataF 244 | else: 245 | return data 246 | 247 | def archive(self): 248 | ''' 249 | archive standard output files 250 | ''' 251 | # loop over all domains 252 | for domain in range(1, self.ndoms + 1): 253 | # get lat/lon information from wrfout 254 | datestr_fn = self.startdate.strftime('%Y-%m-%d_%H:%M:%S') 255 | wrfout_n = 'wrfout_d0' + str(domain) + '_' + datestr_fn 256 | wrfout = ncdf(os.path.join(self.rundir, wrfout_n), 'r') 257 | lat = wrfout.variables['XLAT'][:] 258 | lon = wrfout.variables['XLONG'][:] 259 | lat_u = wrfout.variables['XLAT_U'][:] 260 | lon_u = wrfout.variables['XLONG_U'][:] 261 | lat_v = wrfout.variables['XLAT_V'][:] 262 | lon_v = wrfout.variables['XLONG_V'][:] 263 | frc_urb = wrfout.variables['FRC_URB2D'][:] 264 | wrfout.close() 265 | # iterate over all variables that need to be archived 266 | for var in (self.hour_var + self.minute_var): 267 | print(var) 268 | output_fn = (var + '_d0' + str(domain) + 269 | '_' + datestr_fn + '.nc') 270 | output_file = os.path.join(self.archivedir, output_fn) 271 | for cdate in pandas.date_range(self.startdate, self.enddate, 272 | freq='2H')[:-1]: 273 | datestr_in = cdate.strftime('%Y-%m-%d_%H:%M:%S') 274 | if not var == 'TC2M_URB': 275 | tmp = self.getvar(var, domain, datestr_in) 276 | else: 277 | # compute TC2M_URB from T2, TP2M_URB and FRC_URB2D 278 | # load required variables 279 | tp2m_urb = self.getvar('TP2M_URB', domain, datestr_in) 280 | # set non-urban points to NaN instead of 0 281 | tp2m_urb[tp2m_urb == 0] = np.nan 282 | t2 = self.getvar('T2', domain, datestr_in) 283 | # compute tc2m_urb 284 | tmp = (t2 - (1 - frc_urb) * tp2m_urb) / frc_urb 285 | # compute spatial filtered variant 286 | tmpF = (self.spatial_filter(t2) - 287 | (1 - frc_urb) * 288 | self.spatial_filter(tp2m_urb)) / frc_urb 289 | # overwrite outer edges of domain with original data 290 | tmpF[:, 0, :] = tmp[:, 0, :] 291 | tmpF[:, -1, :] = tmp[:, -1, :] 292 | tmpF[:, :, 0] = tmp[:, :, 0] 293 | tmpF[:, :, -1] = tmp[:, :, -1] 294 | # difference between filtered/unfiltered 295 | diff = np.abs(tmp - tmpF) 296 | # replace points in tmp where diff>1 with tmpF 297 | tmp[diff > 1] = tmpF[diff > 1] 298 | # set NaN to 0 in tc2m_urb 299 | tmp[np.isnan(tmp)] = 0 300 | # combine steps from input files 301 | if var in self.deac_var: 302 | # need to deaccumulate this variable 303 | try: 304 | output = np.vstack((output, 305 | np.diff(tmp, axis=0))) 306 | except NameError: 307 | output = np.vstack((tmp[0, :][np.newaxis, :], 308 | np.diff(tmp, axis=0))) 309 | else: 310 | # variable only needs appending 311 | try: 312 | output = np.vstack((output, tmp[1:])) 313 | except NameError: 314 | output = tmp 315 | # find number of dimensions (3d/4d variable) 316 | dim = np.ndim(tmp) 317 | del tmp # cleanup 318 | # define time variable in output file 319 | if (var in self.minute_var) and (domain == self.ndoms): 320 | # minute variable in inner domain => minute output 321 | dt = pandas.date_range(self.startdate, self.enddate, 322 | freq='1min')[:] 323 | else: 324 | # else hourly output 325 | dt = pandas.date_range(self.startdate, self.enddate, 326 | freq='1H')[:] 327 | # write netcdf outfile 328 | if var == 'U': 329 | self.write_netcdf(var, output, lat_u, lon_u, dt, dim, 330 | output_file) 331 | elif var == 'V': 332 | self.write_netcdf(var, output, lat_v, lon_v, dt, dim, 333 | output_file) 334 | else: 335 | self.write_netcdf(var, output, lat, lon, dt, dim, 336 | output_file) 337 | del output 338 | 339 | def archive_wrfvar_input(self): 340 | ''' 341 | archive wrfvar_input files 342 | ''' 343 | # loop over all domains 344 | wrfvar_archivedir = os.path.join(self.archivedir, 'wrfvar') 345 | utils._create_directory(wrfvar_archivedir) 346 | start_date = utils.return_validate( 347 | self.config['options_general']['date_start']) 348 | for domain in range(1, self.ndoms + 1): 349 | # iterate over all variables that need to be archived 350 | for cdate in pandas.date_range(self.startdate, self.enddate, 351 | freq='2H')[:-1]: 352 | if (cdate != start_date): 353 | datestr_in = cdate.strftime('%Y-%m-%d_%H:%M:%S') 354 | # define and load input file 355 | input_fn = ('wrfvar_input' + '_d0' + str(domain) + 356 | '_' + datestr_in) 357 | input_file = os.path.join(self.rundir, input_fn) 358 | output_file = os.path.join(wrfvar_archivedir, input_fn) 359 | # copy wrfvar_input to archive dir 360 | shutil.copyfile(input_file, output_file) 361 | 362 | def archive_static(self): 363 | ''' 364 | archive non-changing files 365 | ''' 366 | # loop over all domains 367 | static_archivedir = os.path.join(self.archivedir, 'static') 368 | utils._create_directory(static_archivedir) 369 | for domain in range(1, self.ndoms + 1): 370 | # iterate over all variables that need to be archived 371 | for var in self.static_var: 372 | datestr_in = self.startdate.strftime('%Y-%m-%d_%H:%M:%S') 373 | # define and load input file 374 | input_fn = var + '_d0' + str(domain) + '_' + datestr_in 375 | input_file = os.path.join(self.rundir, input_fn) 376 | output_file = os.path.join(static_archivedir, input_fn) 377 | # copy wrfvar_input to archive dir 378 | shutil.copyfile(input_file, output_file) 379 | 380 | def cleanup(self): 381 | ''' 382 | cleanup files in WRF run directory 383 | ''' 384 | # loop over all domains 385 | for domain in range(1, self.ndoms + 1): 386 | # iterate over all variables that need to be archived 387 | for var in (self.hour_var + self.minute_var + 388 | ['wrfout', 'wrfvar_input']): 389 | for cdate in pandas.date_range(self.startdate, self.enddate, 390 | freq='2H')[:-1]: 391 | datestr_in = cdate.strftime('%Y-%m-%d_%H:%M:%S') 392 | # define and load input file 393 | input_fn = var + '_d0' + str(domain) + '_' + datestr_in 394 | input_file = os.path.join(self.rundir, input_fn) 395 | utils.silentremove(input_file) 396 | 397 | 398 | def main(datestring): 399 | ''' 400 | Main function to call archive class 401 | ''' 402 | dt = utils.convert_cylc_time(datestring) 403 | postprocess(dt-relativedelta.relativedelta(days=1), dt) 404 | 405 | 406 | if __name__ == "__main__": 407 | parser = argparse.ArgumentParser(description='Initialize obsproc.') 408 | parser.add_argument('datestring', metavar='N', type=str, 409 | help='Date-time string from cylc suite') 410 | # parse arguments 411 | args = parser.parse_args() 412 | # call main 413 | main(args.datestring) 414 | -------------------------------------------------------------------------------- /wrfpy/bumpskin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | 3 | from netCDF4 import Dataset 4 | import numpy 5 | from geopy.distance import vincenty 6 | from wrfpy.config import config 7 | from wrfpy import utils 8 | from wrfpy.readObsTemperature import readObsTemperature 9 | import os 10 | from datetime import datetime 11 | import operator 12 | from numpy import unravel_index 13 | from numpy import shape as npshape 14 | import glob 15 | import statsmodels.api as sm 16 | import csv 17 | import numpy as np 18 | import f90nml 19 | from scipy import interpolate 20 | from astropy.convolution import convolve 21 | 22 | 23 | def return_float_int(value): 24 | try: 25 | return int(value.strip(',')) 26 | except ValueError: 27 | return float(value.strip(',')) 28 | 29 | 30 | def convert_to_number(list): 31 | if len(list) == 0: 32 | return list 33 | elif len(list) == 1: 34 | return return_float_int(list[0]) 35 | elif len(list) > 1: 36 | return [return_float_int(value) for value in list] 37 | else: 38 | return list 39 | 40 | 41 | def reg_m(y, x): 42 | ones = numpy.ones(len(x[0])) 43 | X = sm.add_constant(numpy.column_stack((x[0], ones))) 44 | for ele in x[1:]: 45 | X = sm.add_constant(numpy.column_stack((ele, X))) 46 | results = sm.OLS(y, X).fit() 47 | return results 48 | 49 | 50 | def find_gridpoint(lat_in, lon_in, lat, lon): 51 | ''' 52 | lat_in, lon_in: lat/lon coordinate of point of interest 53 | lat, lon: grid of lat/lon to find closest index of gridpoint 54 | ''' 55 | # extract window surrounding point 56 | lon_window = lon[(lon >= lon_in - 0.10) & 57 | (lon <= lon_in + 0.10) & 58 | (lat >= lat_in - 0.10) & 59 | (lat <= lat_in + 0.10)] 60 | lat_window = lat[(lon >= lon_in - 0.10) & 61 | (lon <= lon_in + 0.10) & 62 | (lat >= lat_in - 0.10) & 63 | (lat <= lat_in + 0.10)] 64 | 65 | lonx = lon_window 66 | latx = lat_window 67 | # calculate distance to each point in the surrounding window 68 | distance = [vincenty((lat_in, lon_in), (latx[idx], lonx[idx])).km 69 | for idx in range(0, len(lonx))] 70 | # find index of closest reference station to wunderground station 71 | try: 72 | min_index, min_value = min(enumerate(distance), 73 | key=operator.itemgetter(1)) 74 | lat_sel = latx[min_index] 75 | # indices of gridpoint 76 | latidx = lat.reshape(-1).tolist().index(lat_sel) 77 | (lat_idx, lon_idx) = unravel_index(latidx, npshape(lon)) 78 | return lat_idx, lon_idx 79 | except ValueError: 80 | return None, None 81 | 82 | 83 | class urbparm(config): 84 | def __init__(self, dtobj, infile): 85 | config.__init__(self) 86 | if self.config['options_urbantemps']['ah.csv']: 87 | ahcsv = self.config['options_urbantemps']['ah.csv'] 88 | self.read_ah_csv(ahcsv, dtobj) 89 | self.options = self.read_tbl(infile) 90 | self.change_AH() 91 | self.write_tbl() 92 | 93 | def read_ah_csv(self, ahcsv, dtobj): 94 | ''' 95 | read anthropogenic heat from csv file 96 | columns are: yr, month, ah, alh 97 | alh column is optional 98 | ''' 99 | # initialize variables in csv file 100 | yr = [] 101 | mnth = [] 102 | ah = [] 103 | alh = [] # optional 104 | # start reading csv file 105 | with open(ahcsv, 'r') as inp: 106 | reader = csv.reader(inp) 107 | next(reader) # skip header 108 | for row in reader: 109 | # append variables 110 | yr.append(int(row[0])) 111 | mnth.append(int(row[1])) 112 | ah.append(float(row[2])) 113 | try: 114 | alh.append(float(row[3])) 115 | except IndexError: 116 | alh.append(None) 117 | yr = np.array(yr) 118 | mnth = np.array(mnth) 119 | ah = np.array(ah) 120 | alh = np.array(alh) 121 | self.ah = ah[(yr == dtobj.year) & (mnth == dtobj.month)][0] 122 | if not float(self.ah) > 0: 123 | self.ah = None 124 | self.alh = alh[(yr == dtobj.year) & (mnth == dtobj.month)][0] 125 | if not float(self.alh) > 0: 126 | self.alh = None 127 | 128 | @staticmethod 129 | def read_tbl(tblfile): 130 | ''' 131 | Read URBPARM.TBL 132 | ''' 133 | COMMENT_CHAR = '#' 134 | OPTION_CHAR = ':' 135 | # process GEOGRID.TBL 136 | options = {} 137 | with open(tblfile) as openfileobject: 138 | for line in openfileobject: 139 | # First, remove comments: 140 | if COMMENT_CHAR in line: 141 | # split on comment char, keep only the part before 142 | line, comment = line.split(COMMENT_CHAR, 1) 143 | # Second, find lines with an option=value: 144 | if OPTION_CHAR in line: 145 | # split on option char: 146 | option, value = line.split(OPTION_CHAR, 1) 147 | # strip spaces: 148 | option = option.strip() 149 | value = convert_to_number(value.strip().split()) 150 | # store in dictionary: 151 | options[option] = value 152 | return options 153 | 154 | def write_tbl(self): 155 | ''' 156 | Write URBPARM.TBL to wrf run directory 157 | ''' 158 | outfile = os.path.join(self.config['filesystem']['wrf_run_dir'], 159 | 'URBPARM.TBL') 160 | # remove outfile if exists 161 | utils.silentremove(outfile) 162 | # write new outfile 163 | file = open(outfile, 'w') 164 | space_sep = ['HSEQUIP', 'AHDIUPRF', 'ALHDIUPRF'] 165 | for key in self.options.keys(): 166 | if key not in ['STREET PARAMETERS', 'BUILDING HEIGHTS']: 167 | try: 168 | if key not in space_sep: 169 | file.write("{0} : {1}\n".format 170 | (key, ", ".join(str(x) 171 | for x in self.options.get(key)))) 172 | else: 173 | file.write("{0} : {1}\n".format 174 | (key, " ".join(str(x) 175 | for x in self.options.get(key)))) 176 | except TypeError: 177 | file.write("{0} : {1}\n".format 178 | (key, self.options.get(key))) 179 | file.close() 180 | 181 | def change_AH(self): 182 | ''' 183 | Modify anthropogenic heat with ones in csv file 184 | ''' 185 | if self.ah: 186 | self.options['AH'][-1] = self.ah 187 | if self.alh: 188 | self.options['ALH'][-1] = self.alh 189 | 190 | 191 | class bumpskin(config): 192 | def __init__(self, filename, nstationtypes=None, dstationtypes=None): 193 | config.__init__(self) 194 | # optional define station types to be used 195 | self.nstationtypes = nstationtypes # stationtypes at night 196 | self.dstationtypes = dstationtypes # stationtypes during daytime 197 | self.wrfda_workdir = os.path.join(self.config 198 | ['filesystem']['work_dir'], 'wrfda') 199 | self.wrf_rundir = self.config['filesystem']['work_dir'] 200 | # verify input 201 | self.verify_input(filename) 202 | # get number of domains 203 | wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) 204 | ndoms = wrf_nml['domains']['max_dom'] 205 | # check if ndoms is an integer and >0 206 | if not (isinstance(ndoms, int) and ndoms > 0): 207 | raise ValueError("'domains_max_dom' namelist variable should be an" 208 | " integer>0") 209 | try: 210 | (lat, lon, diffT) = self.findDiffT(1) 211 | for domain in range(1, ndoms+1): 212 | self.applyToGrid(lat, lon, diffT, domain) 213 | except TypeError: 214 | pass 215 | 216 | def verify_input(self, filename): 217 | ''' 218 | verify input and create list of files 219 | ''' 220 | try: 221 | f = Dataset(filename, 'r') 222 | f.close() 223 | self.filelist = [filename] 224 | except IOError: 225 | # file is not a netcdf file, assuming a txt file containing a 226 | # list of netcdf files 227 | if os.path.isdir(filename): 228 | # path is actually a directory, not a file 229 | self.filelist = glob.glob(os.path.join(filename, '*nc')) 230 | else: 231 | # re-raise error 232 | raise 233 | 234 | def get_time(self, wrfinput): 235 | ''' 236 | get time from wrfinput file 237 | ''' 238 | wrfinput = Dataset(wrfinput, 'r') # open netcdf file 239 | # get datetime string from wrfinput file 240 | datestr = ''.join(wrfinput.variables['Times'][0]) 241 | # convert to datetime object 242 | dtobj = datetime.strptime(datestr, '%Y-%m-%d_%H:%M:%S') 243 | wrfinput.close() # close netcdf file 244 | return dtobj, datestr 245 | 246 | @staticmethod 247 | def getCoords(wrfinput): 248 | ''' 249 | Return XLAT,XLONG coordinates from wrfinput file 250 | ''' 251 | wrfinput = Dataset(wrfinput, 'r') # open netcdf file 252 | lat = wrfinput.variables['XLAT'][0, :] 253 | lon = wrfinput.variables['XLONG'][0, :] 254 | lu_ind = wrfinput.variables['LU_INDEX'][0, :] 255 | wrfinput.close() 256 | return (lat, lon, lu_ind) 257 | 258 | @staticmethod 259 | def clean_2m_temp(T2, LU_INDEX, iswater, filter=True): 260 | ''' 261 | Cleanup large spatial 2m temperature fluctuations 262 | ''' 263 | if filter: 264 | # set water points to NaN 265 | t2 = T2 266 | t2[LU_INDEX[0, :] == iswater] = np.nan 267 | # convolution kernel 268 | kernel = np.array([[1, 1, 1], [1, 0, 1], [1, 1, 1]]) 269 | # apply convolution kernel 270 | T2_filtered = convolve(t2[:], kernel, 271 | nan_treatment='interpolate', 272 | preserve_nan=True) 273 | # handle domain edges 274 | T2_filtered[0, :] = T2[0, :] 275 | T2_filtered[-1, :] = T2[-1, :] 276 | T2_filtered[:, 0] = T2[:, 0] 277 | T2_filtered[:, -1] = T2[:, -1] 278 | # difference between filtered and original 279 | diff = np.abs(T2_filtered - T2) 280 | # replace points with large difference 281 | # compared to neighboring points 282 | T2[diff > 3] = T2_filtered[diff > 3] 283 | print('Total points changed in T2 field: ' + 284 | str(len(T2[diff > 3]))) 285 | print('Average increment: ' + 286 | str(np.sum(diff[diff > 3])/len(T2[diff > 3]))) 287 | return T2 288 | 289 | def get_urban_temp(self, wrfinput, ams): 290 | ''' 291 | get urban temperature 292 | ''' 293 | wrfinput = Dataset(wrfinput, 'r') # open netcdf file 294 | # get datetime string from wrfinput file 295 | LU_IND = wrfinput.variables['LU_INDEX'][0, :] 296 | iswater = wrfinput.getncattr('ISWATER') 297 | GLW_IND = wrfinput.variables['GLW'][0, :] 298 | U10_IND = wrfinput.variables['U10'][0, :] 299 | V10_IND = wrfinput.variables['V10'][0, :] 300 | UV10_IND = numpy.sqrt(U10_IND**2 + V10_IND**2) 301 | lat = wrfinput.variables['XLAT'][0, :] 302 | lon = wrfinput.variables['XLONG'][0, :] 303 | T2_IND = wrfinput.variables['T2'][0, :] 304 | T2_IND = self.clean_2m_temp(T2_IND, LU_IND, 305 | iswater, filter=True) 306 | T2 = [] 307 | U10 = [] 308 | V10 = [] 309 | GLW = [] 310 | LU = [] 311 | for point in ams: 312 | i_idx, j_idx = find_gridpoint(point[0], point[1], lat, lon) 313 | if (i_idx and j_idx): 314 | T2.append(T2_IND[i_idx, j_idx]) 315 | U10.append(wrfinput.variables['U10'][0, i_idx, j_idx]) 316 | V10.append(wrfinput.variables['V10'][0, i_idx, j_idx]) 317 | GLW.append(wrfinput.variables['GLW'][0, i_idx, j_idx]) 318 | LU.append(wrfinput.variables['LU_INDEX'][0, i_idx, j_idx]) 319 | else: 320 | T2.append(numpy.nan) 321 | U10.append(numpy.nan) 322 | V10.append(numpy.nan) 323 | GLW.append(numpy.nan) 324 | LU.append(numpy.nan) 325 | wrfinput.close() 326 | UV10 = numpy.sqrt(numpy.array(U10)**2 + numpy.array(V10)**2) 327 | return (T2, numpy.array(GLW), UV10, numpy.array(LU), LU_IND, 328 | GLW_IND, UV10_IND) 329 | 330 | def findDiffT(self, domain): 331 | ''' 332 | calculate increment of urban temperatures and apply increment 333 | to wrfinput file in wrfda directory 334 | ''' 335 | # load netcdf files 336 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 337 | wrfinput = os.path.join(wrfda_workdir, 'wrfvar_output') 338 | # get datetime from wrfinput file 339 | dtobj, datestr = self.get_time(wrfinput) 340 | # get observed temperatures 341 | obs = readObsTemperature(dtobj, nstationtypes=None, 342 | dstationtypes=None).obs 343 | obs_temp = [obs[idx][2] for idx in range(0, len(obs))] 344 | # get modeled temperatures at location of observation stations 345 | t_urb, glw, uv10, lu, LU_IND, glw_IND, uv10_IND = self.get_urban_temp( 346 | wrfinput, obs) 347 | lat, lon, lu_ind = self.getCoords(wrfinput) # get coordinates 348 | diffT_station = numpy.array(obs_temp) - numpy.array(t_urb) 349 | # calculate median and standard deviation, ignore outliers > 10K 350 | # only consider landuse class 1 351 | nanmask = ((~numpy.isnan(diffT_station)) & (lu == 1) & 352 | (abs(diffT_station) < 5)) 353 | obs = numpy.array(obs) 354 | obs = obs[nanmask] 355 | diffT_station = diffT_station[nanmask] 356 | lu = lu[nanmask] 357 | glw = glw[nanmask] 358 | uv10 = uv10[nanmask] 359 | median = numpy.nanmedian(diffT_station[(abs(diffT_station) < 5)]) 360 | std = numpy.nanstd(diffT_station[(abs(diffT_station) < 5)]) 361 | print('print diffT station') 362 | print(diffT_station[(abs(diffT_station) < 5)]) 363 | print('end print diffT station') 364 | # depending on the number of observations, calculate the temperature 365 | # increment differently 366 | if (len(lu) < 3): 367 | # no temperature increment for <3 observations 368 | print('No temperature increment applied, not enough data.') 369 | diffT = numpy.zeros(numpy.shape(glw_IND)) 370 | elif ((len(lu) >= 3) & (len(lu) < 5)): 371 | # use median if between 3 and 5 observations 372 | print('Median temperature increment applied: ' + str(median)) 373 | diffT = median * numpy.ones(numpy.shape(glw_IND)) 374 | diffT[LU_IND != 1] = 0 375 | else: 376 | # fit statistical model 377 | # define mask 378 | mask = ((diffT_station > median - 2*std) & 379 | (diffT_station < median + 2*std) & 380 | (lu == 1) & (abs(diffT_station) < 5)) 381 | # filter obs 382 | obs = obs[mask] 383 | # recalculate median 384 | median = numpy.nanmedian(diffT_station[mask]) 385 | fit = reg_m(diffT_station[mask], [(glw)[mask], uv10[mask]]) 386 | # calculate diffT for every gridpoint 387 | if fit.f_pvalue <= 0.1: # use fit if significant 388 | print('Temperature increment applied from statistical ', 389 | + 'fit with values: ' + str(fit.params)) 390 | diffT = (fit.params[1] * glw_IND + 391 | fit.params[0] * uv10_IND + fit.params[2]) 392 | else: # use median 393 | print('Median temperature increment applied: ' + str(median)) 394 | diffT = median * numpy.ones(numpy.shape(glw_IND)) 395 | diffT[LU_IND != 1] = 0 # set to 0 if LU_IND!=1 396 | return (lat, lon, diffT) 397 | 398 | def applyToGrid(self, lat, lon, diffT, domain): 399 | # load netcdf files 400 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 401 | wrfinputFile = os.path.join(wrfda_workdir, 'wrfvar_output') 402 | lat2, lon2, lu_ind2 = self.getCoords(wrfinputFile) 403 | # get datetime from wrfinput file 404 | dtobj, datestr = self.get_time(wrfinputFile) 405 | # if not ((lat==lat2) and (lon==lon2)) we need to interpolate 406 | if not (np.array_equal(lat, lat2) and np.array_equal(lon, lon2)): 407 | # do interpolation to get new diffT 408 | diffT = interpolate.griddata( 409 | (lon.reshape(-1), lat.reshape(-1)), diffT.reshape(-1), 410 | (lon2.reshape(-1), lat2.reshape(-1)), 411 | method='cubic').reshape(np.shape(lon2)) 412 | diffT[lu_ind2 != 1] = 0 # set to 0 if LU_IND!=1 413 | # open wrfvar_output (output after data assimilation) 414 | self.wrfinput2 = Dataset(os.path.join(wrfda_workdir, 'wrfvar_output'), 415 | 'r+') 416 | # open wrfvar_input (input before DA (last step previous run) 417 | start_date = utils.return_validate( 418 | self.config['options_general']['date_start']) 419 | if (dtobj == start_date): # very first timestep 420 | self.wrfinput3 = Dataset(os.path.join 421 | (self.wrf_rundir, 422 | ('wrfinput_d0' + str(domain))), 'r') 423 | return 424 | else: 425 | self.wrfinput3 = Dataset(os.path.join 426 | (self.wrf_rundir, 427 | ('wrfvar_input_d0' + str(domain) + 428 | '_' + datestr)), 'r') 429 | # define variables to increment 430 | # variables_2d = ['TC_URB', 'TR_URB', 'TB_URB', 'TG_URB', 'TS_URB'] 431 | # variables_3d = ['TRL_URB', 'TBL_URB', 'TGL_URB', 'TSLB'] 432 | # begin determining multiplying factor 433 | rhocp = 1231 434 | uc_urb = self.wrfinput2.variables['UC_URB'][:] 435 | lp_urb = self.wrfinput2.variables['BUILD_AREA_FRACTION'][:] 436 | hgt_urb = self.wrfinput2.variables['BUILD_HEIGHT'][:] 437 | lb_urb = self.wrfinput2.variables['BUILD_SURF_RATIO'][:] 438 | frc_urb = self.wrfinput2.variables['FRC_URB2D'][:] 439 | chc_urb = self.wrfinput2.variables['CHC_SFCDIF'][:] 440 | R = numpy.maximum(numpy.minimum(lp_urb/frc_urb, 0.9), 0.1) 441 | RW = 1.0 - R 442 | HNORM = 2. * hgt_urb * frc_urb / (lb_urb - lp_urb) 443 | HNORM[lb_urb <= lp_urb] = 10.0 444 | ZR = numpy.maximum(numpy.minimum(hgt_urb, 100.0), 3.0) 445 | h = ZR / HNORM 446 | W = 2 * h 447 | # set safety margin on W/RW >=8 or else SLUCM could misbehave 448 | # make sure to use the same safety margin in module_sf_urban.F 449 | W[(W / RW) > 8.0] = ((8.0 / (W / RW)) * W)[(W / RW) > 8.0] 450 | CW = numpy.zeros(numpy.shape(uc_urb)) 451 | CW[uc_urb > 5] = 7.51 * uc_urb[uc_urb > 5]**0.78 452 | CW[uc_urb <= 5] = 6.15 + 4.18 * uc_urb[uc_urb <= 5] 453 | DTW = diffT * (1 + ((RW * rhocp) / (W + RW)) * (chc_urb/CW)) 454 | 455 | diffT = DTW # change 09/01/2018 456 | diffT = numpy.nan_to_num(diffT) # replace nan by 0 457 | # apply temperature changes 458 | TSK = self.wrfinput2.variables['TSK'] 459 | TSK[:] = TSK[:] + diffT 460 | TB_URB = self.wrfinput2.variables['TB_URB'] 461 | TB_URB[:] = TB_URB[:] + diffT 462 | TG_URB = self.wrfinput2.variables['TG_URB'] 463 | TG_URB[:] = TG_URB[:] + diffT 464 | TS_URB = self.wrfinput2.variables['TS_URB'] 465 | TS_URB[:] = TS_URB[:] + diffT 466 | TGR_URB = self.wrfinput2.variables['TGR_URB'] 467 | TGR_URB[:] = TGR_URB[:] + diffT 468 | 469 | # wall layer temperature 470 | try: 471 | TBL_URB_factors = self.config['options_urbantemps']['TBL_URB'] 472 | except KeyError: 473 | # fallback values if none are defined in config 474 | # these may not work correctly for other cities than Amsterdam 475 | TBL_URB_factors = [0.823, 0.558, 0.379, 0.257] 476 | if not (isinstance(TBL_URB_factors, list) and 477 | len(TBL_URB_factors) > 1): 478 | TBL_URB_factors = [0.823, 0.558, 0.379, 0.257] 479 | TBL_URB = self.wrfinput2.variables['TBL_URB'] 480 | levs = numpy.shape(self.wrfinput2.variables['TBL_URB'][:])[1] 481 | TBL_URB = self.wrfinput2.variables['TBL_URB'] 482 | for lev in range(0, levs): 483 | try: 484 | TBL_URB[0, lev, :] = (TBL_URB[0, lev, :] + 485 | diffT * float(TBL_URB_factors[lev])) 486 | except IndexError: 487 | # no factor for this layer => no increment 488 | pass 489 | 490 | # road layer temperature 491 | try: 492 | TGL_URB_factors = self.config['options_urbantemps']['TGL_URB'] 493 | except KeyError: 494 | # fallback values if none are defined in config 495 | # these may not work correctly for other cities than Amsterdam 496 | TGL_URB_factors = [0.776, 0.170, 0.004] 497 | if not (isinstance(TGL_URB_factors, list) and 498 | len(TGL_URB_factors) > 1): 499 | TGL_URB_factors = [0.776, 0.170, 0.004] 500 | TGL_URB = self.wrfinput2.variables['TGL_URB'] 501 | levs = numpy.shape(self.wrfinput2.variables['TGL_URB'][:])[1] 502 | TGL_URB = self.wrfinput2.variables['TGL_URB'] 503 | for lev in range(0, levs): 504 | try: 505 | TGL_URB[0, lev, :] = (TGL_URB[0, lev, :] + 506 | diffT * float(TGL_URB_factors[lev])) 507 | except IndexError: 508 | # no factor for this layer => no increment 509 | pass 510 | 511 | # adjustment soil for vegetation fraction urban cell 512 | try: 513 | TSLB_factors = self.config['options_urbantemps']['TSLB'] 514 | except KeyError: 515 | # fallback values if none are defined in config 516 | # these may not work correctly for other cities than Amsterdam 517 | TSLB_factors = [0.507, 0.009] 518 | if not (isinstance(TSLB_factors, list) and 519 | len(TSLB_factors) > 1): 520 | TSLB_factors = [0.507, 0.009] 521 | TSLB = self.wrfinput2.variables['TSLB'] # after update_lsm 522 | TSLB_in = self.wrfinput3.variables['TSLB'] # before update_lsm 523 | levs = numpy.shape(self.wrfinput2.variables['TSLB'][:])[1] 524 | for lev in range(0, levs): 525 | # reset TSLB for urban cells to value before update_lsm 526 | TSLB[0, lev, :][lu_ind2 == 1] = TSLB_in[0, lev, :][lu_ind2 == 1] 527 | try: 528 | TSLB[0, lev, :] = (TSLB[0, lev, :] + 529 | diffT * float(TSLB_factors[lev])) 530 | except IndexError: 531 | pass 532 | 533 | # close netcdf file 534 | self.wrfinput2.close() 535 | self.wrfinput3.close() 536 | -------------------------------------------------------------------------------- /wrfpy/wrfda.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | description: WRFDA part of wrfpy 5 | license: APACHE 2.0 6 | author: Ronald van Haren, NLeSC (r.vanharen@esciencecenter.nl) 7 | ''' 8 | 9 | import os 10 | import f90nml 11 | import subprocess 12 | import shutil 13 | from wrfpy import utils 14 | from wrfpy.config import config 15 | from datetime import datetime 16 | import time 17 | 18 | 19 | class wrfda(config): 20 | ''' 21 | description 22 | ''' 23 | def __init__(self, datestart, low_only=False): 24 | config.__init__(self) # load config 25 | self.low_only = low_only 26 | self.datestart = datestart 27 | self.rundir = self.config['filesystem']['wrf_run_dir'] 28 | self.wrfda_workdir = os.path.join(self.config['filesystem']['work_dir'], 29 | 'wrfda') 30 | self.max_dom = utils.get_max_dom(self.config['options_wrf']['namelist.input']) 31 | # copy default 3dvar obsproc namelist to namelist.obsproc 32 | self.obsproc_dir = os.path.join(self.config['filesystem']['wrfda_dir'], 33 | 'var/obsproc') 34 | # get dictionary with workdir/obs filename per domain 35 | self.obs = self.get_obsproc_dirs() 36 | 37 | def run(self, datestart): 38 | ''' 39 | Run all WRFDA steps 40 | ''' 41 | self.datestart = datestart 42 | self.obsproc_init(datestart) # initialize obsrproc work directory 43 | self.obsproc_run() # run obsproc.exe 44 | self.prepare_updatebc(datestart) # prepares for updating low bc 45 | for domain in range(1, self.max_dom+1): 46 | self.updatebc_run(domain) # run da_updatebc.exe 47 | self.prepare_wrfda() # prepare for running da_wrfvar.exe 48 | for domain in range(1, self.max_dom+1): 49 | self.wrfvar_run(domain) # run da_wrfvar.exe 50 | # prepare for updating lateral bc 51 | self.prepare_updatebc_type('lateral', datestart, 1) 52 | self.updatebc_run(1) # run da_updatebc.exe 53 | self.wrfda_post(datestart) # copy files over to WRF run_dir 54 | 55 | def obsproc_init(self, datestart): 56 | ''' 57 | Sync obsproc namelist with WRF namelist.input 58 | ''' 59 | from datetime import timedelta 60 | from datetime import datetime 61 | # convert to unique list 62 | obslist = list(set(self.obs.values())) 63 | # read WRF namelist in WRF work_dir 64 | wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) 65 | for obs in obslist: 66 | # read obsproc namelist 67 | obsproc_nml = f90nml.read(os.path.join 68 | (self.obsproc_dir, 69 | 'namelist.obsproc.3dvar.wrfvar-tut')) 70 | # create obsproc workdir 71 | self.create_obsproc_dir(obs[0]) 72 | # copy observation in LITTLE_R format to obsproc_dir 73 | shutil.copyfile(os.path.join( 74 | self.config['filesystem']['obs_dir'], obs[1]), 75 | os.path.join(obs[0], obs[1])) 76 | # sync obsproc namelist variables with wrf namelist.input 77 | obsproc_nml['record1']['obs_gts_filename'] = obs[1] 78 | obsproc_nml['record8']['nesti'] = (wrf_nml['domains'][ 79 | 'i_parent_start']) 80 | obsproc_nml['record8']['nestj'] = (wrf_nml['domains'][ 81 | 'j_parent_start']) 82 | obsproc_nml['record8']['nestix'] = wrf_nml['domains']['e_we'] 83 | obsproc_nml['record8']['nestjx'] = wrf_nml['domains']['e_sn'] 84 | obsproc_nml['record8']['numc'] = wrf_nml['domains']['parent_id'] 85 | obsproc_nml['record8']['dis'] = wrf_nml['domains']['dx'] 86 | obsproc_nml['record8']['maxnes'] = wrf_nml['domains']['max_dom'] 87 | # set time_analysis, time_window_min, time_window_max 88 | # check if both datestart and dateend are a datetime instance 89 | if not isinstance(datestart, datetime): 90 | raise TypeError("datestart must be an instance of datetime") 91 | obsproc_nml['record2'][ 92 | 'time_analysis'] = datetime.strftime(datestart, 93 | '%Y-%m-%d_%H:%M:%S') 94 | obsproc_nml['record2']['time_window_min'] = datetime.strftime( 95 | datestart - timedelta(minutes=15), '%Y-%m-%d_%H:%M:%S') 96 | obsproc_nml['record2']['time_window_max'] = datetime.strftime( 97 | datestart + timedelta(minutes=15), '%Y-%m-%d_%H:%M:%S') 98 | # save obsproc_nml 99 | utils.silentremove(os.path.join(obs[0], 'namelist.obsproc')) 100 | obsproc_nml.write(os.path.join(obs[0], 'namelist.obsproc')) 101 | 102 | def get_obsproc_dirs(self): 103 | ''' 104 | get list of observation names and workdirs for obsproc 105 | ''' 106 | # initialize variables 107 | obsnames, obsproc_workdirs = [], [] 108 | for dom in range(1, self.max_dom + 1): 109 | try: 110 | obsname = self.config['filesystem']['obs_filename_d' 111 | + str(dom)] 112 | obsnames.append(obsname) 113 | obsproc_workdirs.append(os.path.join( 114 | self.config['filesystem']['work_dir'], 115 | 'obsproc', obsname)) 116 | except KeyError: 117 | obsname = self.config['filesystem']['obs_filename'] 118 | obsnames.append(obsname) 119 | obsproc_workdirs.append(os.path.join( 120 | self.config['filesystem']['work_dir'], 121 | 'obsproc', obsname)) 122 | # merge everything into a dict 123 | # domain: (workdir, obsname) 124 | obs = dict(zip(range(1, self.max_dom + 1), 125 | zip(obsproc_workdirs, obsnames))) 126 | return obs 127 | 128 | def create_obsproc_dir(self, workdir): 129 | ''' 130 | symlink all files required to run obsproc.exe into obsproc workdir 131 | ''' 132 | # cleanup 133 | utils.silentremove(workdir) 134 | # create work directory 135 | utils._create_directory(workdir) 136 | # symlink error files 137 | files = ['DIR.txt', 'HEIGHT.txt', 'PRES.txt', 'RH.txt', 'TEMP.txt', 138 | 'UV.txt', 'obserr.txt'] 139 | for fl in files: 140 | os.symlink(os.path.join(self.obsproc_dir, fl), 141 | os.path.join(workdir, fl)) 142 | # symlink obsproc.exe 143 | os.symlink(os.path.join(self.obsproc_dir, 'src', 'obsproc.exe'), 144 | os.path.join(workdir, 'obsproc.exe')) 145 | 146 | def obsproc_run(self): 147 | ''' 148 | run obsproc.exe 149 | ''' 150 | obslist = list(set(self.obs.values())) 151 | obsproc_dir = obslist[0][0] 152 | # TODO: check if output is file is created and no errors have occurred 153 | j_id = None 154 | if len(self.config['options_slurm']['slurm_obsproc.exe']): 155 | # run using slurm 156 | if j_id: 157 | mid = "--dependency=afterok:%d" % j_id 158 | obsproc_command = ['sbatch', mid, 159 | self.config['options_slurm']['slurm_obsproc.exe']] 160 | else: 161 | obsproc_command = ['sbatch', 162 | self.config['options_slurm']['slurm_obsproc.exe']] 163 | utils.check_file_exists(obsproc_command[-1]) 164 | try: 165 | res = subprocess.check_output(obsproc_command, cwd=obsproc_dir, 166 | stderr=utils.devnull()) 167 | j_id = int(res.split()[-1]) # slurm job-id 168 | except subprocess.CalledProcessError: 169 | #logger.error('Obsproc failed %s:' % obsproc_command) 170 | raise # re-raise exception 171 | utils.waitJobToFinish(j_id) 172 | else: 173 | # run locally 174 | subprocess.check_call(os.path.join(obsproc_dir, 'obsproc.exe'), 175 | cwd=obsproc_dir, 176 | stdout=utils.devnull(), 177 | stderr=utils.devnull()) 178 | 179 | return None 180 | 181 | def prepare_symlink_files(self, domain): 182 | ''' 183 | prepare WRFDA directory 184 | ''' 185 | # set domain specific workdir 186 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 187 | # read obsproc namelist 188 | obsproc_nml = f90nml.read(os.path.join(self.obs[domain][0], 189 | 'namelist.obsproc')) 190 | # symlink da_wrfvar.exe, LANDUSE.TBL, be.dat.cv3 191 | os.symlink(os.path.join( 192 | self.config['filesystem']['wrfda_dir'], 'var/da/da_wrfvar.exe' 193 | ), os.path.join(wrfda_workdir, 'da_wrfvar.exe')) 194 | if self.check_cv5_cv7(): 195 | # symlink the correct be.dat from the list 196 | os.symlink(self.wrfda_be_dat, 197 | os.path.join(wrfda_workdir, 'be.dat')) 198 | else: 199 | # cv3 200 | os.symlink(os.path.join( 201 | self.config['filesystem']['wrfda_dir'], 'var/run/be.dat.cv3' 202 | ), os.path.join(wrfda_workdir, 'be.dat')) 203 | os.symlink(os.path.join( 204 | self.config['filesystem']['wrfda_dir'], 'run/LANDUSE.TBL' 205 | ), os.path.join(wrfda_workdir, 'LANDUSE.TBL')) 206 | # symlink output of obsproc 207 | os.symlink(os.path.join 208 | (self.obs[domain][0], 209 | 'obs_gts_' + obsproc_nml['record2']['time_analysis'] + 210 | '.3DVAR' 211 | ), os.path.join(wrfda_workdir, 'ob.ascii')) 212 | 213 | def create_parame(self, parame_type, domain): 214 | # set domain specific workdir 215 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 216 | filename = os.path.join(wrfda_workdir, 'parame.in') 217 | utils.silentremove(filename) 218 | # add configuration to parame.in file 219 | parame = open(filename, 'w') # open file 220 | if parame_type == 'lower': 221 | # start config file lower boundary conditions 222 | parame.write("""&control_param 223 | da_file = './fg' 224 | wrf_input = './wrfinput_d01' 225 | domain_id = 1 226 | cycling = .true. 227 | debug = .true. 228 | update_low_bdy = .true. 229 | update_lsm = .true. 230 | var4d_lbc = .false. 231 | iswater = 16 232 | / 233 | """) 234 | # end config file lower boundary conditions 235 | else: 236 | # start config file lateral boundary conditions 237 | parame.write("""&control_param 238 | da_file = '/home/haren/model/WRFV3/run2/wrfinput_d01' 239 | wrf_bdy_file = './wrfbdy_d01' 240 | domain_id = 1 241 | cycling = .true. 242 | debug = .true. 243 | update_low_bdy = .false. 244 | update_lateral_bdy = .true. 245 | update_lsm = .false. 246 | var4d_lbc = .false. 247 | / 248 | """) 249 | # end config file lateral boundary conditions 250 | parame.close() # close file 251 | 252 | def prepare_wrfda_namelist(self, domain): 253 | # set domain specific workdir 254 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 255 | # read WRFDA namelist, use namelist.wrfda as supplied in config.json 256 | # if not supplied, fall back to default from WRFDA 257 | if utils.check_file_exists(self.config['options_wrfda'][ 258 | 'namelist.wrfda'], 259 | boolean=True): 260 | wrfda_namelist = self.config['options_wrfda']['namelist.wrfda'] 261 | else: 262 | wrfda_namelist = os.path.join(self.config['filesystem'][ 263 | 'wrfda_dir'], 264 | 'var/test/tutorial/namelist.input') 265 | wrfda_nml = f90nml.read(wrfda_namelist) 266 | # read WRF namelist in WRF work_dir 267 | wrf_nml = f90nml.read(os.path.join 268 | (self.config['filesystem']['wrf_run_dir'], 269 | 'namelist.input')) 270 | # set domain specific information in namelist 271 | for var in ['e_we', 'e_sn', 'e_vert', 'dx', 'dy']: 272 | # get variable from ${RUNDIR}/namelist.input 273 | var_value = wrf_nml['domains'][var] 274 | # set domain specific variable in WRDFA_WORKDIR/namelist.input 275 | wrfda_nml['domains'][var] = var_value[domain - 1] 276 | for var in ['mp_physics', 'ra_lw_physics', 'ra_sw_physics', 'radt', 277 | 'sf_sfclay_physics', 'sf_surface_physics', 278 | 'bl_pbl_physics', 279 | 'cu_physics', 'cudt', 'num_soil_layers']: 280 | # get variable from ${RUNDIR}/namelist.input 281 | var_value = wrf_nml['physics'][var] 282 | # set domain specific variable in WRDFA_WORKDIR/namelist.input 283 | try: 284 | wrfda_nml['physics'][var] = var_value[domain - 1] 285 | except TypeError: 286 | wrfda_nml['physics'][var] = var_value 287 | obsproc_nml = f90nml.read(os.path.join 288 | (self.obs[domain][0], 'namelist.obsproc')) 289 | # sync wrfda namelist with obsproc namelist 290 | wrfda_nml['wrfvar18']['analysis_date'] = (obsproc_nml['record2'][ 291 | 'time_analysis']) 292 | wrfda_nml['wrfvar21']['time_window_min'] = (obsproc_nml['record2'][ 293 | 'time_window_min']) 294 | wrfda_nml['wrfvar22']['time_window_max'] = (obsproc_nml['record2'][ 295 | 'time_window_max']) 296 | if self.check_cv5_cv7(): 297 | wrfda_nml['wrfvar7']['cv_options'] = int(self.config[ 298 | 'options_wrfda'][ 299 | 'cv_type']) 300 | wrfda_nml['wrfvar6']['max_ext_its'] = 2 301 | wrfda_nml['wrfvar5']['check_max_iv'] = True 302 | else: 303 | wrfda_nml['wrfvar7']['cv_options'] = 3 304 | tana = utils.return_validate(obsproc_nml 305 | ['record2']['time_analysis'][:-6]) 306 | wrfda_nml['time_control']['start_year'] = tana.year 307 | wrfda_nml['time_control']['start_month'] = tana.month 308 | wrfda_nml['time_control']['start_day'] = tana.day 309 | wrfda_nml['time_control']['start_hour'] = tana.hour 310 | wrfda_nml['time_control']['end_year'] = tana.year 311 | wrfda_nml['time_control']['end_month'] = tana.month 312 | wrfda_nml['time_control']['end_day'] = tana.day 313 | wrfda_nml['time_control']['end_hour'] = tana.hour 314 | # save changes to wrfda_nml 315 | utils.silentremove(os.path.join(wrfda_workdir, 'namelist.input')) 316 | wrfda_nml.write(os.path.join(wrfda_workdir, 'namelist.input')) 317 | 318 | def check_cv5_cv7(self): 319 | ''' 320 | return True if cv_type=5 or cv_type=7 is set and 321 | be.dat is defined (and exist on filesystem) 322 | for the outer domain in config.json 323 | ''' 324 | if (int(self.config['options_wrfda']['cv_type']) in [5, 7]): 325 | # check if be.dat is a filepath or an array of filepaths 326 | if isinstance(self.config['options_wrfda']['be.dat'], str): 327 | # option is a filepath 328 | self.wrfda_be_dat = self.config['options_wrfda']['be.dat'] 329 | elif isinstance(self.config['options_wrfda']['be.dat'], list): 330 | if len(self.config['options_wrfda']['be.dat']) == 1: 331 | # lenght == 1, so threat the first element as a str case 332 | month_idx = 0 333 | elif len(self.config['options_wrfda']['be.dat']) == 12: 334 | # there is one be.dat matrix for each month 335 | # find month number from self.datestart 336 | month_idx = self.datestart.month - 1 337 | else: 338 | # list but not of length 1 or 12 339 | raise IOError("config['options_wrfda']['be.dat'] ", 340 | "should be a string or a ", 341 | "list of length 1 or 12. Found a list of ", 342 | "length ", 343 | str(len(self.config['options_wrfda'][ 344 | 'be.dat']))) 345 | self.wrfda_be_dat = self.config[ 346 | 'options_wrfda']['be.dat'][month_idx] 347 | else: 348 | # not a list or str 349 | raise TypeError("unkonwn type for be.dat configuration:", 350 | type(self.config['options_wrfda']['be.dat'])) 351 | return utils.check_file_exists( 352 | self.wrfda_be_dat, boolean=True) 353 | 354 | def prepare_wrfda(self): 355 | ''' 356 | prepare WRFDA 357 | ''' 358 | # prepare a WRFDA workdirectory for each domain 359 | for domain in range(1, self.max_dom+1): 360 | self.prepare_symlink_files(domain) 361 | self.prepare_wrfda_namelist(domain) 362 | 363 | def wrfvar_run(self, domain): 364 | ''' 365 | run da_wrfvar.exe 366 | ''' 367 | # set domain specific workdir 368 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 369 | logfile = os.path.join(wrfda_workdir, 'log.wrfda_d' + str(domain)) 370 | j_id = None 371 | if len(self.config['options_slurm']['slurm_wrfvar.exe']): 372 | if j_id: 373 | mid = "--dependency=afterok:%d" % j_id 374 | wrfvar_command = ['sbatch', mid, 375 | self.config['options_slurm']['slurm_wrfvar.exe']] 376 | else: 377 | wrfvar_command = ['sbatch', 378 | self.config['options_slurm']['slurm_wrfvar.exe']] 379 | utils.check_file_exists(wrfvar_command[-1]) 380 | try: 381 | res = subprocess.check_output(wrfvar_command, 382 | cwd=wrfda_workdir, 383 | stderr=utils.devnull()) 384 | j_id = int(res.split()[-1]) # slurm job-id 385 | except subprocess.CalledProcessError: 386 | #logger.error('Wrfvar failed %s:' %wrfvar_command) 387 | raise # re-raise exception 388 | utils.waitJobToFinish(j_id) 389 | else: 390 | # run locally 391 | subprocess.check_call([os.path.join(wrfda_workdir, 392 | 'da_wrfvar.exe'), 393 | '>&!', logfile], 394 | cwd=wrfda_workdir, stdout=utils.devnull(), 395 | stderr=utils.devnull()) 396 | 397 | def prepare_updatebc(self, datestart): 398 | # prepare a WRFDA workdirectory for each domain 399 | for domain in range(1, self.max_dom+1): 400 | # TODO: add check for domain is int 401 | # define domain specific workdir 402 | wrfda_workdir = os.path.join(self.wrfda_workdir, 403 | "d0" + str(domain)) 404 | # general functionality independent of boundary type in parame.in 405 | if os.path.exists(wrfda_workdir): 406 | shutil.rmtree(wrfda_workdir) # remove wrfda_workdir 407 | utils._create_directory(os.path.join(wrfda_workdir, 'var', 'da')) 408 | # define parame.in file 409 | self.create_parame('lower', domain) 410 | # symlink da_update_bc.exe 411 | os.symlink(os.path.join( 412 | self.config['filesystem']['wrfda_dir'], 'var/da/da_update_bc.exe' 413 | ), os.path.join(wrfda_workdir, 'da_update_bc.exe')) 414 | # copy wrfbdy_d01 file (lateral boundaries) to WRFDA_WORKDIR 415 | shutil.copyfile(os.path.join(self.rundir, 'wrfbdy_d01'), 416 | os.path.join(wrfda_workdir, 'wrfbdy_d01')) 417 | # set parame.in file for updating lower boundary first 418 | self.prepare_updatebc_type('lower', datestart, domain) 419 | 420 | def prepare_updatebc_type(self, boundary_type, datestart, domain): 421 | # set domain specific workdir 422 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 423 | if (boundary_type == 'lower'): 424 | # define parame.in file 425 | self.create_parame(boundary_type, domain) 426 | # copy first guess (wrfout in wrfinput format) for WRFDA 427 | first_guess = os.path.join(self.rundir, 428 | ('wrfvar_input_d0' + str(domain) + '_' + 429 | datetime.strftime 430 | (datestart, '%Y-%m-%d_%H:%M:%S'))) 431 | try: 432 | shutil.copyfile(first_guess, os.path.join(wrfda_workdir, 'fg')) 433 | except Exception: 434 | shutil.copyfile(os.path.join 435 | (self.rundir, 'wrfinput_d0' + str(domain)), 436 | os.path.join(wrfda_workdir, 'fg')) 437 | # read parame.in file 438 | parame = f90nml.read(os.path.join(wrfda_workdir, 'parame.in')) 439 | # set domain in parame.in 440 | parame['control_param']['domain_id'] = domain 441 | # set wrf_input (IC from WPS and WRF real) 442 | parame['control_param']['wrf_input'] = str(os.path.join( 443 | self.rundir, 'wrfinput_d0' + str(domain))) 444 | # save changes to parame.in file 445 | utils.silentremove(os.path.join(wrfda_workdir, 'parame.in')) 446 | parame.write(os.path.join(wrfda_workdir, 'parame.in')) 447 | elif (boundary_type == 'lateral'): 448 | # define parame.in file 449 | self.create_parame(boundary_type, domain) 450 | # read parame.in file 451 | parame = f90nml.read(os.path.join(wrfda_workdir, 'parame.in')) 452 | # set output from WRFDA 453 | parame['control_param']['da_file'] = os.path.join(wrfda_workdir, 454 | 'wrfvar_output') 455 | # save changes to parame.in file 456 | utils.silentremove(os.path.join(wrfda_workdir, 'parame.in')) 457 | parame.write(os.path.join(wrfda_workdir, 'parame.in')) 458 | else: 459 | raise Exception('unknown boundary type') 460 | 461 | def updatebc_run(self, domain): 462 | # set domain specific workdir 463 | wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) 464 | # run da_update_bc.exe 465 | j_id = None 466 | if len(self.config['options_slurm']['slurm_updatebc.exe']): 467 | if j_id: 468 | mid = "--dependency=afterok:%d" % j_id 469 | updatebc_command = ['sbatch', mid, 470 | self.config[ 471 | 'options_slurm']['slurm_updatebc.exe']] 472 | else: 473 | updatebc_command = ['sbatch', 474 | self.config[ 475 | 'options_slurm']['slurm_updatebc.exe']] 476 | try: 477 | res = subprocess.check_output(updatebc_command, 478 | cwd=wrfda_workdir, 479 | stderr=utils.devnull()) 480 | j_id = int(res.split()[-1]) # slurm job-id 481 | except subprocess.CalledProcessError: 482 | #logger.error('Updatebc failed %s:' % updatebc_command) 483 | raise # re-raise exception 484 | utils.waitJobToFinish(j_id) 485 | else: 486 | # run locally 487 | subprocess.check_call(os.path.join 488 | (wrfda_workdir, 'da_update_bc.exe'), 489 | cwd=wrfda_workdir, 490 | stdout=utils.devnull(), 491 | stderr=utils.devnull()) 492 | 493 | def wrfda_post(self, datestart): 494 | ''' 495 | Move files into WRF run dir 496 | after all data assimilation steps have completed 497 | ''' 498 | # prepare a WRFDA workdirectory for each domain 499 | for domain in range(1, self.max_dom+1): 500 | # set domain specific workdir 501 | wrfda_workdir = os.path.join(self.wrfda_workdir, 502 | "d0" + str(domain)) 503 | if (domain == 1): 504 | # copy over updated lateral boundary conditions to RUNDIR 505 | # only for outer domain 506 | utils.silentremove(os.path.join(self.rundir, 'wrfbdy_d01')) 507 | shutil.copyfile(os.path.join(wrfda_workdir, 'wrfbdy_d01'), 508 | os.path.join(self.rundir, 'wrfbdy_d01')) 509 | # copy log files 510 | datestr = datetime.strftime(datestart, '%Y-%m-%d_%H:%M:%S') 511 | rsl_out_name = 'wrfda_rsl_out_' + datestr 512 | statistics_out_name = 'wrfda_statistics_' + datestr 513 | try: 514 | shutil.copyfile(os.path.join 515 | (wrfda_workdir, 'rsl.out.0000'), 516 | os.path.join(self.rundir, rsl_out_name)) 517 | except IOError: 518 | pass 519 | try: 520 | shutil.copyfile(os.path.join 521 | (wrfda_workdir, 'statistics'), 522 | os.path.join(self.rundir, 523 | statistics_out_name)) 524 | except IOError: 525 | pass 526 | # copy wrfvar_output_d0${domain} to ${RUNDIR}/wrfinput_d0${domain} 527 | utils.silentremove(os.path.join 528 | (self.rundir, 'wrfinput_d0' + str(domain))) 529 | if not self.low_only: 530 | shutil.copyfile(os.path.join(wrfda_workdir, 'wrfvar_output'), 531 | os.path.join(self.rundir, 532 | 'wrfinput_d0' + str(domain))) 533 | else: 534 | shutil.copyfile(os.path.join(wrfda_workdir, 'fg'), 535 | os.path.join(self.rundir, 536 | 'wrfinput_d0' + str(domain))) 537 | --------------------------------------------------------------------------------