├── Diagnostics_Package ├── logos │ ├── arm_logo.png │ ├── e3sm_logo.png │ ├── thread_logo.png │ └── asr_logo_final.png ├── convert_OBS_LES_output │ ├── LES_to_DPxx_format │ │ ├── RICO_intercomparison_1dvars_to_DPxx.py │ │ ├── RICO_intercomparison_profiles_to_DPxx.py │ │ └── SAM_LES_to_Dpxx.py │ ├── OBS_to_DPxx_format │ │ ├── MAGIC_to_DPxx_kazr.py │ │ ├── MAGIC_to_DPxx_1d.py │ │ ├── CASS_to_DPxx_cloudfrac.py │ │ ├── GOAMAZON_double_to_DPxx_precip.py │ │ ├── GOAMAZON_single_to_DPxx_precip.py │ │ ├── LAFE_to_DPxx_dopplerlidar.w2.py │ │ ├── GATEIII_to_DPxx.py │ │ ├── ARM97_to_DPxx.py │ │ ├── MAGIC_to_DPxx_sounding.py │ │ ├── CASS_to_DPxx_1dvars.py │ │ ├── GOAMAZON_double_to_DPxx_ARMCMBE.py │ │ ├── GOAMAZON_single_to_DPxx_ARMCMBE.py │ │ ├── DYNAMO_to_DPxx.py │ │ ├── SGPcont_to_DPxx.py │ │ ├── COMBLE_to_DPxx_maclwp.py │ │ ├── GOAMAZON_to_DPxx.py │ │ └── LAFE_to_DPxx_sounding.py │ └── DPxx_to-from_E3SM_format │ │ ├── DPxx_to_E3SM.py │ │ └── E3SM_to_DPxx.py └── diagnostics_user_driver.py ├── DPxx_SCREAM_SCRIPTS ├── yaml_file_example │ ├── legacy_files_April9_2025 │ │ ├── README.txt │ │ ├── scream_output_avg_1hour.yaml │ │ └── scream_horiz_avg_output_15min.yaml │ ├── scream_output_avg_1hour.yaml │ └── scream_horiz_avg_output_15min.yaml ├── regrid_utilities │ ├── generate_dpxx_horiz_avg_weights.py │ └── regrid_dpxx_output.py ├── run_dpxx_scream_TRACER.csh ├── run_dpxx_scream_ATEX.csh ├── run_dpxx_scream_GABLS.csh ├── run_dpxx_scream_GATEIII.csh ├── run_dpxx_scream_TOGAII.csh ├── run_dpxx_scream_ARM95.csh ├── run_dpxx_scream_ARM97.csh └── run_dpxx_scream_BOMEX.csh ├── README.md ├── LICENSE └── E3SM_SCM_scripts ├── E3SM_REPLAY_rename_limitedarea.csh └── replay_postprocess_global_onecolumn.py /Diagnostics_Package/logos/arm_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/E3SM-Project/scmlib/HEAD/Diagnostics_Package/logos/arm_logo.png -------------------------------------------------------------------------------- /Diagnostics_Package/logos/e3sm_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/E3SM-Project/scmlib/HEAD/Diagnostics_Package/logos/e3sm_logo.png -------------------------------------------------------------------------------- /Diagnostics_Package/logos/thread_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/E3SM-Project/scmlib/HEAD/Diagnostics_Package/logos/thread_logo.png -------------------------------------------------------------------------------- /Diagnostics_Package/logos/asr_logo_final.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/E3SM-Project/scmlib/HEAD/Diagnostics_Package/logos/asr_logo_final.png -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/yaml_file_example/legacy_files_April9_2025/README.txt: -------------------------------------------------------------------------------- 1 | On April 9, 2025 PR 7197 went into the EAMxx code which renames many namelist and YAML related parameters. While users transition to the code, we provide the pre PR 7197 YAML files here. 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # scmlib 2 | Library of IOP cases for the Single Column Model and Doubly Periodic cloud resolving model. 3 | 4 | Scripts to run the E3SM Single Column Model (SCM) and Doubly Periodic configuration of SCREAM (DP-SCREAM). 5 | Description of each case can be found at https://github.com/E3SM-Project/scmlib/wiki/E3SM-Intensive-Observation-Period-Case-Library 6 | 7 | Documentation for the [E3SM SCM and DP-EAMxx diagnostics package can be found here](https://github.com/E3SM-Project/scmlib/blob/master/Diagnostics_Package/README.md). 8 | 9 | Scripts are maintained on a regular basis. Should you encounter problems, open a Gitissue on this repo. 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, Energy Exascale Earth System Model Project 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/LES_to_DPxx_format/RICO_intercomparison_1dvars_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Output path 6 | outpath = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/" 7 | 8 | # Define all cases 9 | cases = [ 10 | { 11 | "input_file": "/global/homes/b/bogensch/dp_scream_paper/first_submission_scripts/rico_data/RICO_les_MF.nc", 12 | "output_file": outpath + "RICO.les.intercomparison_ensavg.1dvars.dpxx_format.nc", 13 | "time_offset": 86400.0, 14 | }, 15 | ] 16 | 17 | # Define 2D variable mappings 18 | two_d_vars = [ 19 | ("lwp", "LiqWaterPath_horiz_avg", 1.0 / 1000.0), 20 | ("rwp", "RainWaterPath_horiz_avg", 1.0 / 1000.0), 21 | ("prec_srf", "precip_total_surf_mass_flux_horiz_avg", 1.0 / 1000.0), 22 | ] 23 | 24 | # Process each case 25 | for case in cases: 26 | input_file = case["input_file"] 27 | output_file = case["output_file"] 28 | time_offset = case["time_offset"] 29 | 30 | # Ensure output directory exists 31 | output_dir = os.path.dirname(output_file) 32 | os.makedirs(output_dir, exist_ok=True) 33 | 34 | # Open the input file 35 | ds_in = xr.open_dataset(input_file) 36 | ds_out = xr.Dataset() 37 | 38 | # Process time 39 | time_data = ds_in["time_series"].values 40 | ds_out["time"] = xr.DataArray(time_data / time_offset, dims=["time"]) 41 | 42 | # Process z 43 | z_data_flipped = ds_in["height"].values[::-1] 44 | z_mid_les = np.tile(z_data_flipped, (len(time_data), 1)) 45 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid_les, dims=["time", "lev"]) 46 | 47 | # Process 2D variables: take nanmean over les 48 | for var_in, var_out, factor in two_d_vars: 49 | if var_in in ds_in: 50 | var_data = ds_in[var_in].mean(dim="les", skipna=True).values * factor 51 | ds_out[var_out] = xr.DataArray(var_data, dims=["time"]) 52 | print(f"{var_out}: shape={var_data.shape}, valid={np.sum(np.isfinite(var_data))}") 53 | else: 54 | print(f"Warning: Variable '{var_in}' not found in {input_file}. Skipping.") 55 | 56 | # Copy global attributes 57 | ds_out.attrs = ds_in.attrs 58 | 59 | # Write output 60 | ds_out.to_netcdf(output_file) 61 | print(f"Output file created at: {output_file}") 62 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/MAGIC_to_DPxx_kazr.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/MAGIC_analysis/15A_Obs_diag_v2.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/MAGIC.obs.kazr.dpxx_format.nc" 8 | time_offset = 1.25 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["time_kazr"].values 26 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 27 | 28 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 29 | 30 | z_data = ds_in["z_kazr"].values[::-1] # just a dummy variable 31 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 32 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 33 | 34 | # Define lists for variables 35 | three_d_vars = [ 36 | ("cf_kazr", "cldfrac_tot_for_analysis_horiz_avg", 1.0), 37 | ] 38 | 39 | # Process 3D variables 40 | 41 | for var_in, var_out, factor in three_d_vars: 42 | if var_in in ds_in: 43 | data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 44 | print(np.shape(data_val)) 45 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 46 | else: 47 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 48 | 49 | # Clip all variables in ds_out to ensure no values are below zero 50 | for var_name in ds_out.data_vars: 51 | da = ds_out[var_name] 52 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 53 | ds_out[var_name] = da.clip(min=0) 54 | 55 | # Copy attributes from the input dataset to the output dataset 56 | ds_out.attrs = ds_in.attrs 57 | 58 | # Add the units attribute 59 | ds_out["time"].attrs["units"] = "days since 2013-07-21 05:27:00" 60 | 61 | # Save the new dataset to a NetCDF file 62 | ds_out.to_netcdf(output_file) 63 | 64 | print(f"Output file created at: {output_file}") 65 | 66 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/MAGIC_to_DPxx_1d.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/MAGIC_analysis/15A_Obs_diag_v2.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/MAGIC.obs.1dvars.dpxx_format.nc" 8 | time_offset = 1.25 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["time_obs"].values 26 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 27 | 28 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 29 | 30 | z_data = ds_in["z_kazr"].values[::-1] # just a dummy variable 31 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 32 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 33 | 34 | # Define lists for variables 35 | two_d_vars = [ 36 | ("lwp_iop_obs", "LiqWaterPath_horiz_avg", 1.0/1000.0), 37 | ("pwv_iop_obs", "VapWaterPath_horiz_avg",1.0), 38 | ("lhf_obs", "surface_upward_latent_heat_flux_horiz_avg",1.0), 39 | ("shf_obs", "surf_sens_flux_horiz_avg",1.0), 40 | ("rain_obs", "precip_total_surf_mass_flux_horiz_avg",1.15741e-5/1000.), 41 | ("fsds_obs", "SW_flux_dn_at_model_bot_horiz_avg",1.0), 42 | ("flds_obs", "LW_flux_dn_at_model_bot_horiz_avg",1.0), 43 | ] 44 | 45 | # Process 2D variables 46 | for var_in, var_out, factor in two_d_vars: 47 | if var_in in ds_in: 48 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 49 | else: 50 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 51 | 52 | # Copy attributes from the input dataset to the output dataset 53 | ds_out.attrs = ds_in.attrs 54 | 55 | # Add the units attribute 56 | ds_out["time"].attrs["units"] = "days since 2013-07-21 05:27:00" 57 | 58 | # Save the new dataset to a NetCDF file 59 | ds_out.to_netcdf(output_file) 60 | 61 | print(f"Output file created at: {output_file}") 62 | 63 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/CASS_to_DPxx_cloudfrac.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/CASS_obs/cass_obs_cloud_fraction.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/CASS.obs.ARSCL.dpxx_format.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["hour"].values 26 | ds_out["time"] = xr.DataArray((time_data - 6.5)/24., dims=["time"]) 27 | 28 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 29 | 30 | z_data = ds_in["height"].values[::-1] # just a dummy variable 31 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 32 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 33 | 34 | # Define lists for variables 35 | three_d_vars = [ 36 | ("cf", "cldfrac_tot_for_analysis_horiz_avg", 1.0), 37 | ] 38 | 39 | # Process 3D variables 40 | 41 | for var_in, var_out, factor in three_d_vars: 42 | if var_in in ds_in: 43 | # data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 44 | data_val = np.squeeze(ds_in[var_in].values).T[:, ::-1] 45 | print(np.shape(data_val)) 46 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 47 | else: 48 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 49 | 50 | # Clip all variables in ds_out to ensure no values are below zero 51 | for var_name in ds_out.data_vars: 52 | da = ds_out[var_name] 53 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 54 | ds_out[var_name] = da.clip(min=0) 55 | 56 | # Copy attributes from the input dataset to the output dataset 57 | ds_out.attrs = ds_in.attrs 58 | 59 | # Add the units attribute 60 | ds_out["time"].attrs["units"] = "days since 2000-07-24 12:00:00" 61 | 62 | # Save the new dataset to a NetCDF file 63 | ds_out.to_netcdf(output_file) 64 | 65 | print(f"Output file created at: {output_file}") 66 | 67 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GOAMAZON_double_to_DPxx_precip.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | import csv 5 | 6 | def csv_read_in(filename): 7 | 8 | file=open(filename) 9 | csvreader=csv.reader(file) 10 | 11 | data=[] 12 | for row in csvreader: 13 | data.append(row) 14 | 15 | floatdata=np.float_(data) 16 | return floatdata 17 | 18 | # Define input and output file paths 19 | input_file = "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/603_obs/GOAMAZON_day603.csv" 20 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON_doublepulse.obs.radar_precip.dpxx_format.nc" 21 | time_offset = 168.5 22 | 23 | # Ensure the directory for the output file exists 24 | output_dir = os.path.dirname(output_file) 25 | if not os.path.exists(output_dir): 26 | os.makedirs(output_dir) 27 | print(f"Directory '{output_dir}' created.") 28 | else: 29 | print(f"Directory '{output_dir}' already exists.") 30 | 31 | # Read in dataset 32 | prect_in=csv_read_in(input_file) 33 | 34 | # time stored in prect_in[:,0] 35 | # precip values stored in prect_in[:,1] 36 | 37 | prect_in[:,0]=(prect_in[:,0]-8.0)/24. 38 | 39 | # Create a new dataset for the output 40 | ds_out = xr.Dataset() 41 | 42 | # Assign filtered time to output dataset 43 | ds_out["time"] = xr.DataArray(prect_in[:,0], dims=["time"]) 44 | 45 | 46 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 47 | 48 | z_data = [10.0,20.0] 49 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 50 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 51 | 52 | # Define lists for variables 53 | two_d_vars = [ 54 | ("cld_frac", "precip_total_surf_mass_flux_horiz_avg", 0.001/3600.0), 55 | ] 56 | 57 | # Process 2D variables 58 | 59 | # Process 2D variables 60 | for var_in, var_out, factor in two_d_vars: 61 | ds_out[var_out] = xr.DataArray(np.squeeze(prect_in[:,1]), dims=["time"]) * factor 62 | 63 | # Clip all variables in ds_out to ensure no values are below zero 64 | for var_name in ds_out.data_vars: 65 | da = ds_out[var_name] 66 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 67 | ds_out[var_name] = da.clip(min=0) 68 | 69 | # Add the units attribute 70 | ds_out["precip_total_surf_mass_flux_horiz_avg"].attrs["units"]="m/s" 71 | ds_out["time"].attrs["units"] = "days since 2015-08-26 12:00:00" 72 | 73 | # Save the new dataset to a NetCDF file 74 | ds_out.to_netcdf(output_file) 75 | 76 | print(f"Output file created at: {output_file}") 77 | 78 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GOAMAZON_single_to_DPxx_precip.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | import csv 5 | 6 | def csv_read_in(filename): 7 | 8 | file=open(filename) 9 | csvreader=csv.reader(file) 10 | 11 | data=[] 12 | for row in csvreader: 13 | data.append(row) 14 | 15 | floatdata=np.float_(data) 16 | return floatdata 17 | 18 | # Define input and output file paths 19 | input_file = "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/278_obs/GOAMAZON_day278.csv" 20 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON_singlepulse.obs.radar_precip.dpxx_format.nc" 21 | time_offset = 278.5 22 | 23 | # Ensure the directory for the output file exists 24 | output_dir = os.path.dirname(output_file) 25 | if not os.path.exists(output_dir): 26 | os.makedirs(output_dir) 27 | print(f"Directory '{output_dir}' created.") 28 | else: 29 | print(f"Directory '{output_dir}' already exists.") 30 | 31 | # Read in dataset 32 | prect_in=csv_read_in(input_file) 33 | 34 | # time stored in prect_in[:,0] 35 | # precip values stored in prect_in[:,1] 36 | 37 | prect_in[:,0]=(prect_in[:,0]-8.0)/24. 38 | 39 | # Create a new dataset for the output 40 | ds_out = xr.Dataset() 41 | 42 | # Assign filtered time to output dataset 43 | ds_out["time"] = xr.DataArray(prect_in[:,0], dims=["time"]) 44 | 45 | 46 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 47 | 48 | z_data = [10.0,20.0] 49 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 50 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 51 | 52 | # Define lists for variables 53 | two_d_vars = [ 54 | ("cld_frac", "precip_total_surf_mass_flux_horiz_avg", 0.001/3600.0), 55 | ] 56 | 57 | # Process 2D variables 58 | 59 | # Process 2D variables 60 | for var_in, var_out, factor in two_d_vars: 61 | ds_out[var_out] = xr.DataArray(np.squeeze(prect_in[:,1]), dims=["time"]) * factor 62 | 63 | # Clip all variables in ds_out to ensure no values are below zero 64 | for var_name in ds_out.data_vars: 65 | da = ds_out[var_name] 66 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 67 | ds_out[var_name] = da.clip(min=0) 68 | 69 | # Add the units attribute 70 | ds_out["precip_total_surf_mass_flux_horiz_avg"].attrs["units"]="m/s" 71 | ds_out["time"].attrs["units"] = "days since 2014-10-05 12:00:00" 72 | 73 | # Save the new dataset to a NetCDF file 74 | ds_out.to_netcdf(output_file) 75 | 76 | print(f"Output file created at: {output_file}") 77 | 78 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/LAFE_to_DPxx_dopplerlidar.w2.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/DP_paper_figures/ARM_data/LAFE/sgpdlprofwstats4newsC1.c1.20170823_24.000000.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/LAFE.obs.w2.dpxx_format.nc" 8 | 9 | # Ensure the directory for the output file exists 10 | output_dir = os.path.dirname(output_file) 11 | if not os.path.exists(output_dir): 12 | os.makedirs(output_dir) 13 | print(f"Directory '{output_dir}' created.") 14 | else: 15 | print(f"Directory '{output_dir}' already exists.") 16 | 17 | # Open the input file 18 | ds_in = xr.open_dataset(input_file,decode_times=False) 19 | 20 | # Create a new dataset for the output 21 | ds_out = xr.Dataset() 22 | 23 | # 1. Transfer and adjust the "time" variable 24 | time_data = ds_in["time"].values 25 | ds_out["time"] = xr.DataArray((time_data - 41400.)/86400., dims=["time"]) 26 | 27 | z_data = ds_in["height"].values[::-1] # just a dummy variable 28 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 29 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 30 | 31 | # Define lists for variables 32 | three_d_vars = [ 33 | ("w_variance", "w_variance_horiz_avg", 1.0), 34 | ] 35 | 36 | # Process 3D variables 37 | 38 | for var_in, var_out, factor in three_d_vars: 39 | if var_in in ds_in: 40 | data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 41 | print(np.shape(data_val)) 42 | 43 | if var_in == "w_variance": 44 | # Read and flip the 'noise' variable to match w_variance shape 45 | noise = np.squeeze(ds_in["noise"].values)[:, ::-1] 46 | data_val[noise > 5] = np.nan # Apply the filtering 47 | 48 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 49 | else: 50 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 51 | 52 | # Clip all variables in ds_out to ensure no values are below zero 53 | for var_name in ds_out.data_vars: 54 | da = ds_out[var_name] 55 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 56 | ds_out[var_name] = da.clip(min=0) 57 | 58 | # Copy attributes from the input dataset to the output dataset 59 | ds_out.attrs = ds_in.attrs 60 | 61 | # Add the units attribute 62 | ds_out["time"].attrs["units"] = "days since 2017-08-23 11:30:00" 63 | 64 | # Save the new dataset to a NetCDF file 65 | ds_out.to_netcdf(output_file) 66 | 67 | print(f"Output file created at: {output_file}") 68 | 69 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GATEIII_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/cfs/cdirs/e3sm/inputdata/atm/cam/scam/iop/GATEIII_iopfile_4scam.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GATEIII.varanal.various.dpxx_format.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["tsec"].values 26 | time_data = (time_data-time_data[0])/86400. 27 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 28 | 29 | p_data = ds_in["lev"].values/100. 30 | p_mid_obs = np.tile(p_data, (len(ds_out["time"]), 1)) 31 | ds_out["p_mid_obs"] = xr.DataArray(p_mid_obs, dims=["time", "lev"]) 32 | 33 | # 3. Define lists for 3D and 2D variables 34 | three_d_vars = [ 35 | ("q", "qv_horiz_avg", 1.0), 36 | ("u", "U_horiz_avg", 1.0), 37 | ("v", "V_horiz_avg", 1.0), 38 | ("T", "T_mid_horiz_avg", 1.0), 39 | ("relhum", "RelativeHumidity_horiz_avg", 1.0/100.0) 40 | ] 41 | 42 | two_d_vars = [ 43 | ("Prec", "precip_total_surf_mass_flux_horiz_avg", 1.0) 44 | ] 45 | 46 | # ("", "", ), 47 | 48 | # Process 3D variables 49 | 50 | for var_in, var_out, factor in three_d_vars: 51 | if var_in in ds_in: 52 | data_val = np.squeeze(ds_in[var_in].values) 53 | print(np.shape(data_val)) 54 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 55 | else: 56 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 57 | 58 | # Process 2D variables 59 | for var_in, var_out, factor in two_d_vars: 60 | if var_in in ds_in: 61 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 62 | else: 63 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 64 | 65 | # Copy attributes from the input dataset to the output dataset 66 | ds_out.attrs = ds_in.attrs 67 | 68 | # Add the units attribute 69 | ds_out["time"].attrs["units"] = "days since 1974-08-30 00:00:00" 70 | 71 | # Save the new dataset to a NetCDF file 72 | ds_out.to_netcdf(output_file) 73 | 74 | print(f"Output file created at: {output_file}") 75 | 76 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/ARM97_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/cfs/cdirs/e3sm/inputdata/atm/cam/scam/iop/ARM97_iopfile_4scam.nc" 7 | output_file = "/pscratch/sd/b/bogensch/dp_screamxx_conv/obs_data/OBS_ARM97_full.dpxx.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["tsec"].values 26 | time_data = (time_data-time_data[0])/86400. 27 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 28 | 29 | p_data = ds_in["lev"].values/100. 30 | p_mid_obs = np.tile(p_data, (len(ds_out["time"]), 1)) 31 | ds_out["p_mid_obs"] = xr.DataArray(p_mid_obs, dims=["time", "lev"]) 32 | 33 | # 3. Define lists for 3D and 2D variables 34 | three_d_vars = [ 35 | ("q", "qv_horiz_avg", 1.0), 36 | ("u", "U_horiz_avg", 1.0), 37 | ("v", "V_horiz_avg", 1.0), 38 | ("T", "T_mid_horiz_avg", 1.0), 39 | ("rh", "RelativeHumidity_horiz_avg", 1.0/100.0) 40 | ] 41 | 42 | two_d_vars = [ 43 | ("Prec", "precip_total_surf_mass_flux_horiz_avg", 1.0/1000.0), 44 | ("prew", "VapWaterPath_horiz_avg",10.0) 45 | ] 46 | 47 | # ("", "", ), 48 | 49 | # Process 3D variables 50 | 51 | for var_in, var_out, factor in three_d_vars: 52 | if var_in in ds_in: 53 | data_val = np.squeeze(ds_in[var_in].values) 54 | print(np.shape(data_val)) 55 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 56 | else: 57 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 58 | 59 | # Process 2D variables 60 | for var_in, var_out, factor in two_d_vars: 61 | if var_in in ds_in: 62 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 63 | else: 64 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 65 | 66 | # Copy attributes from the input dataset to the output dataset 67 | ds_out.attrs = ds_in.attrs 68 | 69 | # Add the units attribute 70 | ds_out["time"].attrs["units"] = "days since 1997-06-18 23:29:45" 71 | 72 | # Save the new dataset to a NetCDF file 73 | ds_out.to_netcdf(output_file) 74 | 75 | print(f"Output file created at: {output_file}") 76 | 77 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/MAGIC_to_DPxx_sounding.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/MAGIC_analysis/15A_Obs_diag_v2.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/MAGIC.obs.sounding.dpxx_format.nc" 8 | time_offset = 1.25 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["time_snd"].values 26 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 27 | 28 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 29 | 30 | z_data = ds_in["z_snd"].values[::-1] # just a dummy variable 31 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 32 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 33 | 34 | # Define lists for variables 35 | two_d_vars = [ 36 | ("zi_pbl_snd", "pbl_height_horiz_avg", 1.0), 37 | ] 38 | 39 | # Define lists for variables 40 | three_d_vars = [ 41 | ("rh_snd", "RelativeHumidity_horiz_avg", 1.0), 42 | ("q_snd", "qv_horiz_avg",1.0/1000.), 43 | ("t_snd", "T_mid_horiz_avg",1.0), 44 | ("p_snd", "P_mid_horiz_avg",1.0), 45 | ("theta_snd", "PotentialTemperature_horiz_avg",1.0), 46 | ] 47 | 48 | # Process 2D variables 49 | for var_in, var_out, factor in two_d_vars: 50 | if var_in in ds_in: 51 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 52 | else: 53 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 54 | 55 | # Process 3D variables 56 | for var_in, var_out, factor in three_d_vars: 57 | if var_in in ds_in: 58 | data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 59 | print(np.shape(data_val)) 60 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 61 | else: 62 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 63 | 64 | # Copy attributes from the input dataset to the output dataset 65 | ds_out.attrs = ds_in.attrs 66 | 67 | # Add the units attribute 68 | ds_out["time"].attrs["units"] = "days since 2013-07-21 05:27:00" 69 | 70 | # Save the new dataset to a NetCDF file 71 | ds_out.to_netcdf(output_file) 72 | 73 | print(f"Output file created at: {output_file}") 74 | 75 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/CASS_to_DPxx_1dvars.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file1 = "/global/homes/b/bogensch/THREAD/CASS_obs/cass_obs_lwp_m2.nc" 7 | input_file2 = "/global/homes/b/bogensch/THREAD/CASS_obs/cass_obs_shortwave_down.nc" 8 | input_file3 = "/global/homes/b/bogensch/THREAD/CASS_obs/cass_obs_longwave_down.nc" 9 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/CASS.obs.1dvars.dpxx_format.nc" 10 | time_offset = 0.0 11 | 12 | # Ensure the directory for the output file exists 13 | output_dir = os.path.dirname(output_file) 14 | if not os.path.exists(output_dir): 15 | os.makedirs(output_dir) 16 | print(f"Directory '{output_dir}' created.") 17 | else: 18 | print(f"Directory '{output_dir}' already exists.") 19 | 20 | # Open the input file 21 | ds_in1 = xr.open_dataset(input_file1,decode_times=False) 22 | ds_in2 = xr.open_dataset(input_file2,decode_times=False) 23 | ds_in3 = xr.open_dataset(input_file3,decode_times=False) 24 | 25 | # Create a new dataset for the output 26 | ds_out = xr.Dataset() 27 | 28 | # 1. Transfer and adjust the "time" variable 29 | time_data = ds_in1["hour"].values - 6.0 30 | ds_out["time"] = xr.DataArray((time_data - 0.5)/24.0, dims=["time"]) 31 | 32 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 33 | 34 | z_data = [10.0,20.0] 35 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 36 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 37 | 38 | # Define lists for variables 39 | two_d_vars = [ 40 | ("lwp_m3", "LiqWaterPath_horiz_avg", 1.0/1000.0), 41 | ("swdn_m0", "SW_flux_dn_at_model_bot_horiz_avg", 1.0), 42 | ("lwdn_m0", "LW_flux_dn_at_model_bot_horiz_avg", 1.0), 43 | ] 44 | 45 | # Process 2D variables 46 | for var_in, var_out, factor in two_d_vars: 47 | 48 | if var_in == "lwp_m3": 49 | data_in = np.squeeze(ds_in1["lwp_m3"].values) 50 | 51 | if var_in == "swdn_m0": 52 | data_in = np.squeeze(ds_in2["swdn_m0"].values) 53 | 54 | if var_in == "lwdn_m0": 55 | data_in = np.squeeze(ds_in3["lwdn_m0"].values) 56 | 57 | ds_out[var_out] = xr.DataArray(np.squeeze(data_in), dims=["time"]) * factor 58 | 59 | # Clip all variables in ds_out to ensure no values are below zero 60 | for var_name in ds_out.data_vars: 61 | da = ds_out[var_name] 62 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 63 | ds_out[var_name] = da.clip(min=0) 64 | 65 | ds_out["LiqWaterPath_horiz_avg"].attrs["units"]="kg/m2" 66 | ds_out["SW_flux_dn_at_model_bot_horiz_avg"].attrs["units"]="W/m2" 67 | ds_out["LW_flux_dn_at_model_bot_horiz_avg"].attrs["units"]="W/m2" 68 | 69 | # Add the units attribute 70 | ds_out["time"].attrs["units"] = "days since 2000-07-24 12:00:00" 71 | 72 | # Save the new dataset to a NetCDF file 73 | ds_out.to_netcdf(output_file) 74 | 75 | print(f"Output file created at: {output_file}") 76 | 77 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GOAMAZON_double_to_DPxx_ARMCMBE.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/ARM_obs/maoarmbecldradM1.c1.20150101.003000.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON_doublepulse.obs.ARMCMBE.dpxx_format.nc" 8 | time_offset = 168.5 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["time"].values 26 | # convert time from seconds to days 27 | #ds_out["time"] = xr.DataArray(time_data/86400. - time_offset, dims=["time"]) 28 | 29 | # Convert time from seconds to days 30 | time_days = time_data / 86400.0 - time_offset 31 | 32 | # Filter for time values between day 0 and 0.5 33 | time_filtered = (time_days >= 0.0) & (time_days <= 0.5) 34 | 35 | # Apply filtering to input dataset 36 | ds_in = ds_in.isel(time=time_filtered) 37 | 38 | # Assign filtered time to output dataset 39 | ds_out["time"] = xr.DataArray(time_days[time_filtered], dims=["time"]) 40 | 41 | 42 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 43 | 44 | z_data = ds_in["height"].values[::-1] # just a dummy variable 45 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 46 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 47 | 48 | # Define lists for variables 49 | three_d_vars = [ 50 | ("cld_frac", "cldfrac_tot_for_analysis_horiz_avg", 1.0/100.0), 51 | ] 52 | 53 | # Process 3D variables 54 | 55 | for var_in, var_out, factor in three_d_vars: 56 | if var_in in ds_in: 57 | data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 58 | print(np.shape(data_val)) 59 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 60 | else: 61 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 62 | 63 | # Clip all variables in ds_out to ensure no values are below zero 64 | for var_name in ds_out.data_vars: 65 | da = ds_out[var_name] 66 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 67 | ds_out[var_name] = da.clip(min=0) 68 | 69 | # Copy attributes from the input dataset to the output dataset 70 | ds_out.attrs = ds_in.attrs 71 | 72 | # Add the units attribute 73 | ds_out["time"].attrs["units"] = "days since 2015-08-26 12:00:00" 74 | 75 | # Save the new dataset to a NetCDF file 76 | ds_out.to_netcdf(output_file) 77 | 78 | print(f"Output file created at: {output_file}") 79 | 80 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GOAMAZON_single_to_DPxx_ARMCMBE.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/ARM_obs/maoarmbecldradM1.c1.20140101.003000.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON_singlepulse.obs.ARMCMBE.dpxx_format.nc" 8 | time_offset = 278.5 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["time"].values 26 | # convert time from seconds to days 27 | #ds_out["time"] = xr.DataArray(time_data/86400. - time_offset, dims=["time"]) 28 | 29 | # Convert time from seconds to days 30 | time_days = time_data / 86400.0 - time_offset 31 | 32 | # Filter for time values between day 0 and 0.5 33 | time_filtered = (time_days >= 0.0) & (time_days <= 0.5) 34 | 35 | # Apply filtering to input dataset 36 | ds_in = ds_in.isel(time=time_filtered) 37 | 38 | # Assign filtered time to output dataset 39 | ds_out["time"] = xr.DataArray(time_days[time_filtered], dims=["time"]) 40 | 41 | 42 | # This file will only have 1d data, make up vertical coordinates to satisfy diagnostics package requirements. 43 | 44 | z_data = ds_in["height"].values[::-1] # just a dummy variable 45 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 46 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 47 | 48 | # Define lists for variables 49 | three_d_vars = [ 50 | ("cld_frac", "cldfrac_tot_for_analysis_horiz_avg", 1.0/100.), 51 | ] 52 | 53 | # Process 3D variables 54 | 55 | for var_in, var_out, factor in three_d_vars: 56 | if var_in in ds_in: 57 | data_val = np.squeeze(ds_in[var_in].values)[:,::-1] 58 | print(np.shape(data_val)) 59 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 60 | else: 61 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 62 | 63 | # Clip all variables in ds_out to ensure no values are below zero 64 | for var_name in ds_out.data_vars: 65 | da = ds_out[var_name] 66 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 67 | ds_out[var_name] = da.clip(min=0) 68 | 69 | # Copy attributes from the input dataset to the output dataset 70 | ds_out.attrs = ds_in.attrs 71 | 72 | # Add the units attribute 73 | ds_out["time"].attrs["units"] = "days since 2014-10-05 12:00:00" 74 | 75 | # Save the new dataset to a NetCDF file 76 | ds_out.to_netcdf(output_file) 77 | 78 | print(f"Output file created at: {output_file}") 79 | 80 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/DYNAMO_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/cfs/cdirs/e3sm/inputdata/atm/cam/scam/iop/DYNAMO_amie_iopfile_4scam.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/DYNAMO_amie.varanal.various.dpxx_format.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["tsec"].values 26 | time_data = (time_data-time_data[0])/86400. 27 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 28 | 29 | p_data = ds_in["lev"].values/100. 30 | lev_mask = p_data < 1000 31 | 32 | lev_selected = p_data[lev_mask] 33 | p_mid_obs = np.tile(lev_selected, (len(ds_out["time"]), 1)) 34 | ds_out["p_mid_obs"] = xr.DataArray(p_mid_obs, dims=["time", "lev"]) 35 | 36 | # 3. Define lists for 3D and 2D variables 37 | three_d_vars = [ 38 | ("q", "qv_horiz_avg", 1.0), 39 | ("u", "U_horiz_avg", 1.0), 40 | ("v", "V_horiz_avg", 1.0), 41 | ("T", "T_mid_horiz_avg", 1.0), 42 | ] 43 | 44 | two_d_vars = [ 45 | ("shflx", "surf_sens_flux_horiz_avg", 1.0), 46 | ("lhflx", "surf_evap_horiz_avg", 4e-7), 47 | ("lhflx", "surface_upward_latent_heat_flux_horiz_avg", 1.0), 48 | ("LWP", "LiqWaterPath_horiz_avg", 0.01*1000.0), 49 | ("prec_srf", "precip_total_surf_mass_flux_horiz_avg", 2.77778e-7), 50 | ("lw_net_toa", "LW_flux_up_at_model_top_horiz_avg", 1.0), 51 | ("wspd_srf","wind_speed_10m_horiz_avg", 1.0), 52 | ] 53 | 54 | # ("", "", ), 55 | 56 | # Process 3D variables 57 | 58 | for var_in, var_out, factor in three_d_vars: 59 | if var_in in ds_in: 60 | data_val = np.squeeze(ds_in[var_in].values) 61 | data_val = data_val[:, lev_mask] # apply the lev > 1000 filter 62 | print(np.shape(data_val)) 63 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 64 | else: 65 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 66 | 67 | # Process 2D variables 68 | for var_in, var_out, factor in two_d_vars: 69 | if var_in in ds_in: 70 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 71 | else: 72 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 73 | 74 | # Copy attributes from the input dataset to the output dataset 75 | ds_out.attrs = ds_in.attrs 76 | 77 | # Add the units attribute 78 | ds_out["time"].attrs["units"] = "days since 2011-10-02 00:00:00" 79 | 80 | # Save the new dataset to a NetCDF file 81 | ds_out.to_netcdf(output_file) 82 | 83 | print(f"Output file created at: {output_file}") 84 | 85 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/yaml_file_example/scream_output_avg_1hour.yaml: -------------------------------------------------------------------------------- 1 | %YAML 1.1 2 | --- 3 | averaging_type: average 4 | max_snapshots_per_file: 100000 5 | filename_prefix: ${CASE}.fullfield 6 | fields: 7 | physics_pg2: 8 | field_names: 9 | - z_mid 10 | - p_mid 11 | # HOMME 12 | - ps 13 | - omega 14 | # SHOC 15 | - cldfrac_liq 16 | - eddy_diff_mom 17 | - sgs_buoy_flux 18 | - tke 19 | - inv_qc_relvar 20 | - pbl_height 21 | # CLD 22 | - cldfrac_ice 23 | - cldfrac_tot 24 | - cldfrac_tot_for_analysis 25 | # P3 26 | - bm 27 | - nc 28 | - ni 29 | - nr 30 | - qi 31 | - qm 32 | - qr 33 | - T_prev_micro_step 34 | - qv_prev_micro_step 35 | - eff_radius_qc 36 | - eff_radius_qi 37 | - eff_radius_qr 38 | - micro_liq_ice_exchange 39 | - micro_vap_ice_exchange 40 | - micro_vap_liq_exchange 41 | - precip_liq_surf_mass_flux 42 | - precip_ice_surf_mass_flux 43 | - precip_total_surf_mass_flux 44 | # SHOC + HOMME 45 | - U 46 | - V 47 | # SHOC + P3 48 | - qc 49 | - qv 50 | # SHOC + P3 + RRTMGP + HOMME 51 | - T_mid 52 | # RRTMGP 53 | - rad_heating_pdel 54 | - sfc_flux_lw_dn 55 | - sfc_flux_sw_net 56 | - ShortwaveCloudForcing 57 | - LongwaveCloudForcing 58 | - LiqWaterPath 59 | - IceWaterPath 60 | - RainWaterPath 61 | - RimeWaterPath 62 | - VapWaterPath 63 | - ZonalVapFlux 64 | - MeridionalVapFlux 65 | - SW_flux_up_at_model_top 66 | - SW_flux_dn_at_model_top 67 | - LW_flux_up_at_model_top 68 | - SW_flux_dn_at_model_bot 69 | - SW_flux_up_at_model_bot 70 | - LW_flux_dn_at_model_bot 71 | - LW_flux_up_at_model_bot 72 | - SW_clrsky_flux_up_at_model_top 73 | - LW_clrsky_flux_up_at_model_top 74 | - SW_clrsky_flux_dn_at_model_bot 75 | - SW_clrsky_flux_up_at_model_bot 76 | - LW_clrsky_flux_dn_at_model_bot 77 | - LW_clrsky_flux_up_at_model_bot 78 | # diag 79 | - PotentialTemperature 80 | - LiqPotentialTemperature 81 | - DryStaticEnergy 82 | - RelativeHumidity 83 | - surface_upward_latent_heat_flux 84 | # coupler 85 | - surf_radiative_T 86 | - T_2m 87 | - qv_2m 88 | - wind_speed_10m 89 | - surf_mom_flux 90 | - surf_sens_flux 91 | - surf_evap 92 | # Process rates 93 | - shoc_T_mid_tend 94 | - p3_T_mid_tend 95 | - rrtmgp_T_mid_tend 96 | - shoc_qv_tend 97 | - p3_qv_tend 98 | - homme_T_mid_tend 99 | - homme_qv_tend 100 | # Select level output 101 | - PotentialTemperature_at_700hPa 102 | - PotentialTemperature_at_1000hPa 103 | - omega_at_500hPa 104 | - RelativeHumidity_at_700hPa 105 | - SeaLevelPressure 106 | # ADD SHOC DIAGS (must set ./atmchange extra_shoc_diags=true) 107 | # - shoc_mix_horiz_avg 108 | # - wthl_sec_horiz_avg 109 | # - wqw_sec_horiz_avg 110 | # - qw_sec_horiz_avg 111 | # - thl_sec_horiz_avg 112 | # - uw_sec_horiz_avg 113 | # - vw_sec_horiz_avg 114 | # - w3_horiz_avg 115 | # - w_variance_horiz_avg 116 | output_control: 117 | frequency: 1 118 | frequency_units: nhours 119 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/yaml_file_example/legacy_files_April9_2025/scream_output_avg_1hour.yaml: -------------------------------------------------------------------------------- 1 | %YAML 1.1 2 | --- 3 | Averaging Type: Average 4 | Max Snapshots Per File: 100000 5 | filename_prefix: ${CASE}.fullfield 6 | Fields: 7 | Physics PG2: 8 | Field Names: 9 | - z_mid 10 | - p_mid 11 | # HOMME 12 | - ps 13 | - omega 14 | # SHOC 15 | - cldfrac_liq 16 | - eddy_diff_mom 17 | - sgs_buoy_flux 18 | - tke 19 | - inv_qc_relvar 20 | - pbl_height 21 | # CLD 22 | - cldfrac_ice 23 | - cldfrac_tot 24 | - cldfrac_tot_for_analysis 25 | # P3 26 | - bm 27 | - nc 28 | - ni 29 | - nr 30 | - qi 31 | - qm 32 | - qr 33 | - T_prev_micro_step 34 | - qv_prev_micro_step 35 | - eff_radius_qc 36 | - eff_radius_qi 37 | - eff_radius_qr 38 | - micro_liq_ice_exchange 39 | - micro_vap_ice_exchange 40 | - micro_vap_liq_exchange 41 | - precip_liq_surf_mass_flux 42 | - precip_ice_surf_mass_flux 43 | - precip_total_surf_mass_flux 44 | # SHOC + HOMME 45 | - U 46 | - V 47 | # SHOC + P3 48 | - qc 49 | - qv 50 | # SHOC + P3 + RRTMGP + HOMME 51 | - T_mid 52 | # RRTMGP 53 | - rad_heating_pdel 54 | - sfc_flux_lw_dn 55 | - sfc_flux_sw_net 56 | - ShortwaveCloudForcing 57 | - LongwaveCloudForcing 58 | - LiqWaterPath 59 | - IceWaterPath 60 | - RainWaterPath 61 | - RimeWaterPath 62 | - VapWaterPath 63 | - ZonalVapFlux 64 | - MeridionalVapFlux 65 | - SW_flux_up_at_model_top 66 | - SW_flux_dn_at_model_top 67 | - LW_flux_up_at_model_top 68 | - SW_flux_dn_at_model_bot 69 | - SW_flux_up_at_model_bot 70 | - LW_flux_dn_at_model_bot 71 | - LW_flux_up_at_model_bot 72 | - SW_clrsky_flux_up_at_model_top 73 | - LW_clrsky_flux_up_at_model_top 74 | - SW_clrsky_flux_dn_at_model_bot 75 | - SW_clrsky_flux_up_at_model_bot 76 | - LW_clrsky_flux_dn_at_model_bot 77 | - LW_clrsky_flux_up_at_model_bot 78 | # diag 79 | - PotentialTemperature 80 | - LiqPotentialTemperature 81 | - DryStaticEnergy 82 | - RelativeHumidity 83 | - surface_upward_latent_heat_flux 84 | # coupler 85 | - surf_radiative_T 86 | - T_2m 87 | - qv_2m 88 | - wind_speed_10m 89 | - surf_mom_flux 90 | - surf_sens_flux 91 | - surf_evap 92 | # Process rates 93 | - shoc_T_mid_tend 94 | - p3_T_mid_tend 95 | - rrtmgp_T_mid_tend 96 | - shoc_qv_tend 97 | - p3_qv_tend 98 | - homme_T_mid_tend 99 | - homme_qv_tend 100 | # Select level output 101 | - PotentialTemperature_at_700hPa 102 | - PotentialTemperature_at_1000hPa 103 | - omega_at_500hPa 104 | - RelativeHumidity_at_700hPa 105 | - SeaLevelPressure 106 | # ADD SHOC DIAGS (must set ./atmchange extra_shoc_diags=true) 107 | # - shoc_mix_horiz_avg 108 | # - wthl_sec_horiz_avg 109 | # - wqw_sec_horiz_avg 110 | # - qw_sec_horiz_avg 111 | # - thl_sec_horiz_avg 112 | # - uw_sec_horiz_avg 113 | # - vw_sec_horiz_avg 114 | # - w3_horiz_avg 115 | # - w_variance_horiz_avg 116 | output_control: 117 | Frequency: 1 118 | frequency_units: nhours 119 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/LES_to_DPxx_format/RICO_intercomparison_profiles_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Output path 6 | outpath = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/" 7 | 8 | # Define all cases in a list of dictionaries 9 | cases = [ 10 | { 11 | "input_file": "/global/homes/b/bogensch/dp_scream_paper/first_submission_scripts/rico_data/RICO_les_MF_ensavg.nc", 12 | "output_file": outpath + "RICO.les.intercomparison_ensavg.profiles.dpxx_format.nc", 13 | "time_offset": 86400.0, 14 | }, 15 | ] 16 | 17 | # Define variable mappings 18 | three_d_vars = [ 19 | ("cld_frac", "cldfrac_tot_for_analysis_horiz_avg", 1.0), 20 | ("ql", "qc_horiz_avg", 0.001), 21 | ("qr", "qr_horiz_avg", 0.001), 22 | ("thetal", "LiqPotentialTemperature_horiz_avg", 1.0), 23 | ("qt", "qv_horiz_avg", 1.0 / 1000.0), 24 | ("tot_wqt", "wqw_sec_horiz_avg", 1.0), 25 | ] 26 | 27 | #two_d_vars = [ 28 | # ("lwp", "LiqWaterPath_horiz_avg", 1.0 / 1000.0), 29 | # ("rwp", "RainWaterPath_horiz_avg", 1.0 / 1000.0), 30 | # ("prec_srf", "precip_total_surf_mass_flux_horiz_avg", 1.15741e-8), 31 | #] 32 | 33 | # Process each case 34 | for case in cases: 35 | input_file = case["input_file"] 36 | output_file = case["output_file"] 37 | time_offset = case["time_offset"] 38 | 39 | # Ensure output directory exists 40 | output_dir = os.path.dirname(output_file) 41 | os.makedirs(output_dir, exist_ok=True) 42 | 43 | # Open the input file 44 | ds_in = xr.open_dataset(input_file) 45 | ds_out = xr.Dataset() 46 | 47 | # Process time 48 | time_data = ds_in["time"].values 49 | ds_out["time"] = xr.DataArray(time_data/time_offset, dims=["time"]) 50 | 51 | # Process z and p 52 | z_data_flipped = ds_in["height"].values[::-1] 53 | z_mid_les = np.tile(z_data_flipped, (len(ds_out["time"]), 1)) 54 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid_les, dims=["time", "lev"]) 55 | 56 | # Process 3D variables 57 | for var_in, var_out, factor in three_d_vars: 58 | if var_in in ds_in: 59 | if var_in == 'qt': 60 | data_flipped1 = ds_in['qt'].values[:, ::-1] 61 | data_flipped2 = ds_in['ql'].values[:, ::-1] 62 | ds_out[var_out] = (xr.DataArray(data_flipped1, dims=["time", "lev"]) - 63 | xr.DataArray(data_flipped2, dims=["time", "lev"])) * factor 64 | else: 65 | data_flipped = ds_in[var_in].values[:, ::-1] 66 | ds_out[var_out] = xr.DataArray(data_flipped, dims=["time", "lev"]) * factor 67 | else: 68 | print(f"Warning: Variable '{var_in}' not found in {input_file}. Skipping.") 69 | 70 | # # Process 2D variables 71 | # for var_in, var_out, factor in two_d_vars: 72 | # if var_in in ds_in: 73 | # ds_out[var_out] = xr.DataArray(ds_in[var_in].values, dims=["time"]) * factor 74 | # else: 75 | # print(f"Warning: Variable '{var_in}' not found in {input_file}. Skipping.") 76 | 77 | # Copy attributes and save 78 | ds_out.attrs = ds_in.attrs 79 | ds_out.to_netcdf(output_file) 80 | print(f"Output file created at: {output_file}") 81 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/SGPcont_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/cfs/cdirs/e3sm/inputdata/atm/cam/scam/iop/SGP_continuous_iopfile_4scam.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/SGP_continous.varanal.various.dpxx_format.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["tsec"].values 26 | time_data = (time_data-time_data[0])/86400. 27 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 28 | 29 | lev_vals = ds_in["lev"].values / 100.0 # convert to hPa 30 | lev_mask = lev_vals < 1000 31 | lev_selected = lev_vals[lev_mask] 32 | 33 | p_mid_obs = np.tile(lev_selected, (len(ds_out["time"]), 1)) 34 | ds_out["p_mid_obs"] = xr.DataArray(p_mid_obs, dims=["time", "lev"]) 35 | 36 | # 3. Define lists for 3D and 2D variables 37 | three_d_vars = [ 38 | ("q", "qv_horiz_avg", 1.0), 39 | ("u", "U_horiz_avg", 1.0), 40 | ("v", "V_horiz_avg", 1.0), 41 | ("T", "T_mid_horiz_avg", 1.0), 42 | ] 43 | 44 | two_d_vars = [ 45 | ("prec_srf", "precip_total_surf_mass_flux_horiz_avg",2.77778e-7), 46 | # ("PW", "VapWaterPath_horiz_avg",0.01*1000.0), 47 | ("shflx", "surf_sens_flux_horiz_avg", 1.0), 48 | ("lhflx", "surf_evap_horiz_avg", 4e-7), 49 | ("lhflx", "surf_upward_latent_heat_flux_horiz_avg", 1.0), 50 | # ("LWP", "LiqWaterPath_horiz_avg", 0.01*1000.0), 51 | ("Ps", "ps_horiz_avg", 1.0), 52 | ("Tg", "surf_radiative_T_horiz_avg", 1.0), 53 | ("Tsair", "T_2m_horiz_avg", 1.0), 54 | ] 55 | 56 | # ("", "", ), 57 | 58 | # Process 3D variables 59 | 60 | for var_in, var_out, factor in three_d_vars: 61 | if var_in in ds_in: 62 | data_val = np.squeeze(ds_in[var_in].values) 63 | data_val = data_val[:, lev_mask] # apply the lev > 1000 filter 64 | print(np.shape(data_val)) 65 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 66 | else: 67 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 68 | 69 | # Process 2D variables 70 | for var_in, var_out, factor in two_d_vars: 71 | if var_in in ds_in: 72 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 73 | else: 74 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 75 | 76 | # Copy attributes from the input dataset to the output dataset 77 | ds_out.attrs = ds_in.attrs 78 | 79 | # Add the units attribute 80 | ds_out["time"].attrs["units"] = "days since 2004-01-01 00:00:00" 81 | 82 | # Save the new dataset to a NetCDF file 83 | ds_out.to_netcdf(output_file) 84 | 85 | print(f"Output file created at: {output_file}") 86 | 87 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/COMBLE_to_DPxx_maclwp.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import pandas as pd 4 | import os 5 | import csv 6 | 7 | # Define input and output file paths 8 | input_file = "/global/homes/b/bogensch/THREAD/COMBLE_obs/maclwp_dat.csv" 9 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/COMBLE.obs.maclwp.dpxx_format.nc" 10 | 11 | # Ensure the directory for the output file exists 12 | output_dir = os.path.dirname(output_file) 13 | if not os.path.exists(output_dir): 14 | os.makedirs(output_dir) 15 | print(f"Directory '{output_dir}' created.") 16 | else: 17 | print(f"Directory '{output_dir}' already exists.") 18 | 19 | # ---------------------------------------------------------------------- 20 | # Load and prepare input data 21 | # ---------------------------------------------------------------------- 22 | data = pd.read_csv(input_file) 23 | 24 | maclwp_time = data['time'].values / 3600.0 + 2.0 # hours 25 | maclwp = data['lwp_bu'].values 26 | 27 | # Sort by time 28 | sorted_indices = np.argsort(maclwp_time) 29 | maclwp_time = maclwp_time[sorted_indices] 30 | maclwp = maclwp[sorted_indices] 31 | 32 | # Convert hours -> days 33 | maclwp_time = maclwp_time / 24.0 34 | 35 | print(np.shape(maclwp)) 36 | 37 | # ---------------------------------------------------------------------- 38 | # Build output dataset 39 | # ---------------------------------------------------------------------- 40 | ds_out = xr.Dataset() 41 | 42 | # 1) Prepend a new time=0 without overwriting existing first value 43 | # (shift all original times to the next index) 44 | time_with_leading_zero = np.concatenate(([0.0], maclwp_time.astype(float))) 45 | ds_out["time"] = xr.DataArray(time_with_leading_zero, dims=["time"]) 46 | ds_out["time"].attrs["units"] = "days since 2020-03-12 22:00:00" 47 | 48 | # 2) Minimal vertical info to satisfy diagnostics (kept numeric) 49 | z_data = [10.0, 20.0] 50 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 51 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) 52 | 53 | # 3) Define and populate 1-D time series variables 54 | two_d_vars = [ 55 | ("dummy", "LiqWaterPath_horiz_avg", 1.0), 56 | ] 57 | 58 | for var_in, var_out, factor in two_d_vars: 59 | # Allocate with the NEW time length, set first value NaN, then copy original data 60 | arr = np.empty(len(ds_out["time"]), dtype=float) 61 | arr[:] = np.nan # initialize to NaN 62 | arr[1:] = np.squeeze(maclwp) * factor # shift original data by one index 63 | ds_out[var_out] = xr.DataArray(arr, dims=["time"]) 64 | 65 | # 4) Clip numeric variables to be >= 0 (NaNs are preserved) 66 | for var_name in ds_out.data_vars: 67 | da = ds_out[var_name] 68 | if np.issubdtype(da.dtype, np.number): 69 | ds_out[var_name] = da.clip(min=0) 70 | 71 | # 5) Units/attrs 72 | ds_out["LiqWaterPath_horiz_avg"].attrs["units"] = "kg/m2" 73 | 74 | # 6) (Safety) Ensure all 1-D time series variables have NaN at the first (new) time 75 | # This covers any future additions like more *_horiz_avg time series. 76 | for var_name, da in ds_out.data_vars.items(): 77 | if ("time",) == da.dims: # only 1-D time series 78 | ds_out[var_name][0] = np.nan 79 | 80 | # Save the new dataset to a NetCDF file 81 | ds_out.to_netcdf(output_file) 82 | 83 | print(f"Output file created at: {output_file}") 84 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/GOAMAZON_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/global/cfs/cdirs/e3sm/inputdata/atm/cam/scam/iop/GOAMAZON_iopfile_4scam.nc" 7 | output_file = "/pscratch/sd/b/bogensch/dp_screamxx_conv/obs_data/OBS_GOAMAZON_full.dpxx.nc" 8 | time_offset = 0.0 9 | 10 | # Ensure the directory for the output file exists 11 | output_dir = os.path.dirname(output_file) 12 | if not os.path.exists(output_dir): 13 | os.makedirs(output_dir) 14 | print(f"Directory '{output_dir}' created.") 15 | else: 16 | print(f"Directory '{output_dir}' already exists.") 17 | 18 | # Open the input file 19 | ds_in = xr.open_dataset(input_file,decode_times=False) 20 | 21 | # Create a new dataset for the output 22 | ds_out = xr.Dataset() 23 | 24 | # 1. Transfer and adjust the "time" variable 25 | time_data = ds_in["tsec"].values 26 | time_data = (time_data-time_data[0])/86400. 27 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 28 | 29 | # Filter lev > 1000 hPa 30 | lev_vals = ds_in["lev"].values / 100.0 # convert to hPa 31 | lev_mask = lev_vals < 1000 32 | lev_selected = lev_vals[lev_mask] 33 | 34 | # Apply the filtered lev to p_mid_obs 35 | p_mid_obs = np.tile(lev_selected, (len(ds_out["time"]), 1)) 36 | ds_out["p_mid_obs"] = xr.DataArray(p_mid_obs, dims=["time", "lev"]) 37 | 38 | # Update the lev coordinate in the output 39 | ds_out = ds_out.assign_coords(lev=("lev", lev_selected)) 40 | 41 | # 3. Define lists for 3D and 2D variables 42 | three_d_vars = [ 43 | ("q", "qv_horiz_avg", 1.0), 44 | ("u", "U_horiz_avg", 1.0), 45 | ("v", "V_horiz_avg", 1.0), 46 | ("T", "T_mid_horiz_avg", 1.0), 47 | ] 48 | 49 | two_d_vars = [ 50 | ("prec_srf", "precip_total_surf_mass_flux_horiz_avg",2.77778e-7), 51 | ("PW", "VapWaterPath_horiz_avg",0.01*1000.0), 52 | ("shflx", "surf_sens_flux_horiz_avg", 1.0), 53 | ("lhflx", "surf_evap_horiz_avg", 4e-7), 54 | ("lhflx", "surface_upward_latent_heat_flux_horiz_avg", 1.0), 55 | ("LWP", "LiqWaterPath_horiz_avg", 0.01*1000.0), 56 | ("Ps", "ps_horiz_avg", 1.0), 57 | ("Tg", "surf_radiative_T_horiz_avg", 1.0), 58 | ("Tsair", "T_2m_horiz_avg", 1.0), 59 | ] 60 | 61 | # ("", "", ), 62 | 63 | # Process 3D variables (apply lev filter) 64 | for var_in, var_out, factor in three_d_vars: 65 | if var_in in ds_in: 66 | data_val = np.squeeze(ds_in[var_in].values) # shape: [time, lev] 67 | data_val = data_val[:, lev_mask] # apply the lev > 1000 filter 68 | ds_out[var_out] = xr.DataArray(data_val, dims=["time", "lev"]) * factor 69 | else: 70 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 71 | 72 | # Process 2D variables 73 | for var_in, var_out, factor in two_d_vars: 74 | if var_in in ds_in: 75 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 76 | else: 77 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 78 | 79 | # Copy attributes from the input dataset to the output dataset 80 | ds_out.attrs = ds_in.attrs 81 | 82 | # Add the units attribute 83 | ds_out["time"].attrs["units"] = "days since 2014-01-01 00:00:00" 84 | 85 | # Save the new dataset to a NetCDF file 86 | ds_out.to_netcdf(output_file) 87 | 88 | print(f"Output file created at: {output_file}") 89 | 90 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/OBS_to_DPxx_format/LAFE_to_DPxx_sounding.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Define input and output file paths 6 | input_file = "/pscratch/sd/b/bogensch/E3SM_simulations/ARMdata/sgpinterpolatedsondeC1.c1.20170823_24.000030.nc" 7 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/LAFE.obs.sounding.dpxx_format.nc" 8 | 9 | # Ensure the directory for the output file exists 10 | output_dir = os.path.dirname(output_file) 11 | if not os.path.exists(output_dir): 12 | os.makedirs(output_dir) 13 | print(f"Directory '{output_dir}' created.") 14 | else: 15 | print(f"Directory '{output_dir}' already exists.") 16 | 17 | # Open the input file 18 | ds_in = xr.open_dataset(input_file,decode_times=False) 19 | 20 | # Create a new dataset for the output 21 | ds_out = xr.Dataset() 22 | 23 | # 1. Transfer and adjust the "time" variable 24 | time_data = ds_in["time"].values 25 | ds_out["time"] = xr.DataArray((time_data - 41400.)/86400., dims=["time"]) 26 | 27 | z_data = ds_in["height"].values[::-1] # just a dummy variable 28 | z_mid = np.tile(z_data, (len(ds_out["time"]), 1)) 29 | print(np.shape(z_mid)) 30 | print(z_mid[:,-1]) 31 | z_mid = z_mid - z_mid[:,-1][:, None] 32 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid, dims=["time", "lev"]) * 1000. # convert to m 33 | 34 | # Define lists for variables 35 | three_d_vars = [ 36 | ("temp", "T_mid_horiz_avg", 1.0), 37 | ("vap_pres", "qv_horiz_avg", 1.0), 38 | ("temp", "PotentialTemperature_horiz_avg", 1.0) 39 | ] 40 | 41 | # Process 3D variables 42 | 43 | # Constants for theta calculation 44 | Rd = 287.05 # J/kg/K 45 | Cp = 1004.6 # J/kg/K 46 | 47 | for var_in, var_out, factor in three_d_vars: 48 | if var_in in ds_in: 49 | data_val = np.squeeze(ds_in[var_in].values)[:, ::-1] # reverse vertical 50 | 51 | if var_out == "T_mid_horiz_avg": 52 | temp_K = data_val + 273.15 53 | ds_out[var_out] = xr.DataArray(temp_K, dims=["time", "lev"]) 54 | 55 | elif var_out == "qv_horiz_avg": 56 | # Read vap_pres and pressure (reverse vertical to match) 57 | vap_pres = np.squeeze(ds_in["vap_pres"].values)[:, ::-1] * 10.0 # hPa to Pa 58 | pressure = np.squeeze(ds_in["bar_pres"].values)[:, ::-1] * 10.0 # hPa to Pa 59 | qv = 621.97 * (vap_pres / (pressure - vap_pres)) 60 | ds_out[var_out] = xr.DataArray(qv, dims=["time", "lev"])/1000. 61 | 62 | elif var_out == "PotentialTemperature_horiz_avg": 63 | # Reuse temp_K and pressure from earlier 64 | temp_K = np.squeeze(ds_in["temp"].values)[:, ::-1] + 273.15 65 | pressure = np.squeeze(ds_in["bar_pres"].values)[:, ::-1] * 10.0 # hPa to Pa 66 | theta = temp_K * (1000.0 / pressure) ** (Rd / Cp) 67 | ds_out[var_out] = xr.DataArray(theta, dims=["time", "lev"]) 68 | 69 | else: 70 | print(f"Warning: Variable '{var_in}' not found in input dataset. Skipping.") 71 | 72 | 73 | # Clip all variables in ds_out to ensure no values are below zero 74 | for var_name in ds_out.data_vars: 75 | da = ds_out[var_name] 76 | if np.issubdtype(da.dtype, np.number): # Only apply to numeric types 77 | ds_out[var_name] = da.clip(min=0) 78 | 79 | # Copy attributes from the input dataset to the output dataset 80 | ds_out.attrs = ds_in.attrs 81 | 82 | # Add the units attribute 83 | ds_out["time"].attrs["units"] = "days since 2017-08-23 11:30:00" 84 | 85 | # Save the new dataset to a NetCDF file 86 | ds_out.to_netcdf(output_file) 87 | 88 | print(f"Output file created at: {output_file}") 89 | 90 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/regrid_utilities/generate_dpxx_horiz_avg_weights.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | 4 | NOTE: This script is now obsolete as there is a much EASIER way to get horizontally averaged 5 | output in DPxx. That is to append any variable name in your YAML file with "_horiz_avg". 6 | Will retain this script for legacy purposes and because there could be some edge use cases 7 | where this script is still handy. 8 | 9 | Will generate a mapping file to produce horizontally averaged output 10 | during runtime for DPxx simulations. NOTE that these simple mapping files 11 | will ONLY work for online DPxx remapping and not offline. The eamxx online 12 | horizontal mapping only needs S (sparse matrix for weights), col, row, which is 13 | what the script does. The map file used for offline mapping (.e.g., nco) requires 14 | other "nominal" variables (frac_a/b, area_a/b, yc_a/b, xc_a/b, yv_a/b, xv_a/b, 15 | src_grid_dims, dst_grid_dims). These nominal variables require other procedures 16 | to generate (e.g., by a complex art of ESMF-NCL), but some field values are meaningless 17 | in DPxx cases and are indeed not required by online mapping. 18 | 19 | User only needs to supply basic geometry data for their DPxx simulation. This 20 | needs to be exactly the same as you've specified in your run script for 21 | num_ne_x, num_ne_y, domain_size_x, domain_size_y. 22 | 23 | Once you have generated your mapping file simply point to it in your YAML file like so: 24 | horiz_remap_file: /path/to/your/file/mapping_dpxx_x200000m_y200000m_nex20_ney20_to_1x1.20241024.nc 25 | 26 | Script authors: Peter Bogenschutz (bogenschutz1@llnl.gov) 27 | Jishi Zhang (zhang73@llnl.gov) 28 | """ 29 | 30 | import netCDF4 as nc4 31 | import numpy as np 32 | import os 33 | from datetime import datetime 34 | 35 | ####################################################################### 36 | ###### Start user input 37 | 38 | # These geometry parameters should match what you plan to use in your simulation 39 | num_ne_x=166 40 | num_ne_y=12 41 | domain_size_x=6000000 42 | domain_size_y=432000 43 | 44 | # Supply path where mapping file will be placed 45 | outputpath="/global/homes/b/bogensch/dp_scream_scripts_xx/remap_files/" 46 | 47 | ###### End user input 48 | ####################################################################### 49 | 50 | # Figure out number of physics columns 51 | phys_col=num_ne_x*num_ne_y*4 52 | 53 | # Compute the physics resolution 54 | dx=float(domain_size_x)/(num_ne_x*2.0) 55 | dy=float(domain_size_y)/(num_ne_y*2.0) 56 | 57 | # Compute the area of each column 58 | area_col=dx*dy 59 | area_dom=float(domain_size_x)*float(domain_size_y) 60 | 61 | S_in=np.float64(area_col/area_dom) 62 | col_in=np.arange(phys_col)+1 63 | row_in=np.ones(phys_col) 64 | 65 | ### Now make the output file 66 | 67 | # what is the current date? 68 | current_date = datetime.now() 69 | formatted_date = current_date.strftime("%Y%m%d") 70 | 71 | # Make output string 72 | filename="mapping_dpxx_x"+str(domain_size_x)+"m_y"+str(domain_size_y)+\ 73 | "m_nex"+str(num_ne_x)+"_ney"+str(num_ne_y)+"_to_1x1."+formatted_date+".nc" 74 | 75 | fullfile=outputpath+filename 76 | 77 | # check to see if outputfile already exists, if so overwrite 78 | ishere=os.path.isfile(fullfile) 79 | if ishere: 80 | os.system('rm '+fullfile) 81 | 82 | f=nc4.Dataset(fullfile,'w',format='NETCDF4') 83 | f.createDimension('n_s',phys_col) 84 | f.createDimension('n_a',phys_col) 85 | f.createDimension('n_b',1) 86 | 87 | S=f.createVariable('S','f8','n_s') 88 | col=f.createVariable('col','i4','n_s') 89 | row=f.createVariable('row','i4','n_s') 90 | 91 | S[:]=np.full(phys_col, S_in, dtype='float64') 92 | col[:]=col_in 93 | row[:]=row_in 94 | 95 | f.close() 96 | 97 | print("Generated file: ",fullfile) 98 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/yaml_file_example/legacy_files_April9_2025/scream_horiz_avg_output_15min.yaml: -------------------------------------------------------------------------------- 1 | %YAML 1.1 2 | --- 3 | Averaging Type: Average 4 | Max Snapshots Per File: 100000 5 | filename_prefix: ${CASE}.horiz_avg 6 | Fields: 7 | Physics PG2: 8 | Field Names: 9 | - z_mid_horiz_avg 10 | - p_mid_horiz_avg 11 | # HOMME 12 | - ps_horiz_avg 13 | - omega_horiz_avg 14 | # SHOC 15 | - cldfrac_liq_horiz_avg 16 | - eddy_diff_mom_horiz_avg 17 | - sgs_buoy_flux_horiz_avg 18 | - tke_horiz_avg 19 | - inv_qc_relvar_horiz_avg 20 | - pbl_height_horiz_avg 21 | # CLD 22 | - cldfrac_ice_horiz_avg 23 | - cldfrac_tot_horiz_avg 24 | - cldfrac_tot_for_analysis_horiz_avg 25 | # P3 26 | - bm_horiz_avg 27 | - nc_horiz_avg 28 | - ni_horiz_avg 29 | - nr_horiz_avg 30 | - qi_horiz_avg 31 | - qm_horiz_avg 32 | - qr_horiz_avg 33 | - T_prev_micro_step_horiz_avg 34 | - qv_prev_micro_step_horiz_avg 35 | - eff_radius_qc_horiz_avg 36 | - eff_radius_qi_horiz_avg 37 | - eff_radius_qr_horiz_avg 38 | - micro_liq_ice_exchange_horiz_avg 39 | - micro_vap_ice_exchange_horiz_avg 40 | - micro_vap_liq_exchange_horiz_avg 41 | - precip_liq_surf_mass_flux_horiz_avg 42 | - precip_ice_surf_mass_flux_horiz_avg 43 | - precip_total_surf_mass_flux_horiz_avg 44 | # SHOC + HOMME 45 | - U_horiz_avg 46 | - V_horiz_avg 47 | # SHOC + P3 48 | - qc_horiz_avg 49 | - qv_horiz_avg 50 | # SHOC + P3 + RRTMGP + HOMME 51 | - T_mid_horiz_avg 52 | # RRTMGP 53 | - rad_heating_pdel_horiz_avg 54 | - sfc_flux_lw_dn_horiz_avg 55 | - sfc_flux_sw_net_horiz_avg 56 | - ShortwaveCloudForcing_horiz_avg 57 | - LongwaveCloudForcing_horiz_avg 58 | - LiqWaterPath_horiz_avg 59 | - IceWaterPath_horiz_avg 60 | - RainWaterPath_horiz_avg 61 | - RimeWaterPath_horiz_avg 62 | - VapWaterPath_horiz_avg 63 | - ZonalVapFlux_horiz_avg 64 | - MeridionalVapFlux_horiz_avg 65 | - SW_flux_up_at_model_top_horiz_avg 66 | - SW_flux_dn_at_model_top_horiz_avg 67 | - LW_flux_up_at_model_top_horiz_avg 68 | - SW_flux_dn_at_model_bot_horiz_avg 69 | - SW_flux_up_at_model_bot_horiz_avg 70 | - LW_flux_dn_at_model_bot_horiz_avg 71 | - LW_flux_up_at_model_bot_horiz_avg 72 | - SW_clrsky_flux_up_at_model_top_horiz_avg 73 | - LW_clrsky_flux_up_at_model_top_horiz_avg 74 | - SW_clrsky_flux_dn_at_model_bot_horiz_avg 75 | - SW_clrsky_flux_up_at_model_bot_horiz_avg 76 | - LW_clrsky_flux_dn_at_model_bot_horiz_avg 77 | - LW_clrsky_flux_up_at_model_bot_horiz_avg 78 | # diag 79 | - PotentialTemperature_horiz_avg 80 | - LiqPotentialTemperature_horiz_avg 81 | - DryStaticEnergy_horiz_avg 82 | - RelativeHumidity_horiz_avg 83 | - surface_upward_latent_heat_flux_horiz_avg 84 | # coupler 85 | - surf_radiative_T_horiz_avg 86 | - T_2m_horiz_avg 87 | - qv_2m_horiz_avg 88 | - wind_speed_10m_horiz_avg 89 | - surf_mom_flux_horiz_avg 90 | - surf_sens_flux_horiz_avg 91 | - surf_evap_horiz_avg 92 | # Process rates 93 | - shoc_T_mid_tend_horiz_avg 94 | - p3_T_mid_tend_horiz_avg 95 | - rrtmgp_T_mid_tend_horiz_avg 96 | - shoc_qv_tend_horiz_avg 97 | - p3_qv_tend_horiz_avg 98 | - homme_T_mid_tend_horiz_avg 99 | - homme_qv_tend_horiz_avg 100 | # Select level output 101 | - PotentialTemperature_at_700hPa_horiz_avg 102 | - PotentialTemperature_at_1000hPa_horiz_avg 103 | - omega_at_500hPa_horiz_avg 104 | - RelativeHumidity_at_700hPa_horiz_avg 105 | - SeaLevelPressure_horiz_avg 106 | # ADD SHOC DIAGS (must set ./atmchange extra_shoc_diags=true) 107 | # - shoc_mix_horiz_avg 108 | # - wthl_sec_horiz_avg 109 | # - wqw_sec_horiz_avg 110 | # - qw_sec_horiz_avg 111 | # - thl_sec_horiz_avg 112 | # - uw_sec_horiz_avg 113 | # - vw_sec_horiz_avg 114 | # - w3_horiz_avg 115 | # - w_variance_horiz_avg 116 | output_control: 117 | Frequency: 15 118 | frequency_units: nmins 119 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/yaml_file_example/scream_horiz_avg_output_15min.yaml: -------------------------------------------------------------------------------- 1 | %YAML 1.1 2 | --- 3 | averaging_type: average 4 | max_snapshots_per_file: 100000 5 | filename_prefix: ${CASE}.horiz_avg 6 | fields: 7 | physics_pg2: 8 | field_names: 9 | - z_mid_horiz_avg 10 | - p_mid_horiz_avg 11 | # HOMME 12 | - ps_horiz_avg 13 | - omega_horiz_avg 14 | # SHOC 15 | - cldfrac_liq_horiz_avg 16 | - eddy_diff_mom_horiz_avg 17 | - sgs_buoy_flux_horiz_avg 18 | - tke_horiz_avg 19 | - inv_qc_relvar_horiz_avg 20 | - pbl_height_horiz_avg 21 | # CLD 22 | - cldfrac_ice_horiz_avg 23 | - cldfrac_tot_horiz_avg 24 | - cldfrac_tot_for_analysis_horiz_avg 25 | # P3 26 | - bm_horiz_avg 27 | - nc_horiz_avg 28 | - ni_horiz_avg 29 | - nr_horiz_avg 30 | - qi_horiz_avg 31 | - qm_horiz_avg 32 | - qr_horiz_avg 33 | - T_prev_micro_step_horiz_avg 34 | - qv_prev_micro_step_horiz_avg 35 | - eff_radius_qc_horiz_avg 36 | - eff_radius_qi_horiz_avg 37 | - eff_radius_qr_horiz_avg 38 | - micro_liq_ice_exchange_horiz_avg 39 | - micro_vap_ice_exchange_horiz_avg 40 | - micro_vap_liq_exchange_horiz_avg 41 | - precip_liq_surf_mass_flux_horiz_avg 42 | - precip_ice_surf_mass_flux_horiz_avg 43 | - precip_total_surf_mass_flux_horiz_avg 44 | # SHOC + HOMME 45 | - U_horiz_avg 46 | - V_horiz_avg 47 | # SHOC + P3 48 | - qc_horiz_avg 49 | - qv_horiz_avg 50 | # SHOC + P3 + RRTMGP + HOMME 51 | - T_mid_horiz_avg 52 | # RRTMGP 53 | - rad_heating_pdel_horiz_avg 54 | - sfc_flux_lw_dn_horiz_avg 55 | - sfc_flux_sw_net_horiz_avg 56 | - ShortwaveCloudForcing_horiz_avg 57 | - LongwaveCloudForcing_horiz_avg 58 | - LiqWaterPath_horiz_avg 59 | - IceWaterPath_horiz_avg 60 | - RainWaterPath_horiz_avg 61 | - RimeWaterPath_horiz_avg 62 | - VapWaterPath_horiz_avg 63 | - ZonalVapFlux_horiz_avg 64 | - MeridionalVapFlux_horiz_avg 65 | - SW_flux_up_at_model_top_horiz_avg 66 | - SW_flux_dn_at_model_top_horiz_avg 67 | - LW_flux_up_at_model_top_horiz_avg 68 | - SW_flux_dn_at_model_bot_horiz_avg 69 | - SW_flux_up_at_model_bot_horiz_avg 70 | - LW_flux_dn_at_model_bot_horiz_avg 71 | - LW_flux_up_at_model_bot_horiz_avg 72 | - SW_clrsky_flux_up_at_model_top_horiz_avg 73 | - LW_clrsky_flux_up_at_model_top_horiz_avg 74 | - SW_clrsky_flux_dn_at_model_bot_horiz_avg 75 | - SW_clrsky_flux_up_at_model_bot_horiz_avg 76 | - LW_clrsky_flux_dn_at_model_bot_horiz_avg 77 | - LW_clrsky_flux_up_at_model_bot_horiz_avg 78 | # diag 79 | - PotentialTemperature_horiz_avg 80 | - LiqPotentialTemperature_horiz_avg 81 | - DryStaticEnergy_horiz_avg 82 | - RelativeHumidity_horiz_avg 83 | - surface_upward_latent_heat_flux_horiz_avg 84 | # coupler 85 | - surf_radiative_T_horiz_avg 86 | - T_2m_horiz_avg 87 | - qv_2m_horiz_avg 88 | - wind_speed_10m_horiz_avg 89 | - surf_mom_flux_horiz_avg 90 | - surf_sens_flux_horiz_avg 91 | - surf_evap_horiz_avg 92 | # Process rates 93 | - shoc_T_mid_tend_horiz_avg 94 | - p3_T_mid_tend_horiz_avg 95 | - rrtmgp_T_mid_tend_horiz_avg 96 | - shoc_qv_tend_horiz_avg 97 | - p3_qv_tend_horiz_avg 98 | - homme_T_mid_tend_horiz_avg 99 | - homme_qv_tend_horiz_avg 100 | - iop_forcing_T_mid_tend_horiz_avg 101 | - iop_forcing_qv_tend_horiz_avg 102 | # Select level output 103 | - PotentialTemperature_at_700hPa_horiz_avg 104 | - PotentialTemperature_at_1000hPa_horiz_avg 105 | - omega_at_500hPa_horiz_avg 106 | - RelativeHumidity_at_700hPa_horiz_avg 107 | - T_mid_at_model_bot_horiz_avg 108 | - SeaLevelPressure_horiz_avg 109 | # ADD SHOC DIAGS (must set ./atmchange extra_shoc_diags=true) 110 | - shoc_mix_horiz_avg 111 | - wthl_sec_horiz_avg 112 | - wqw_sec_horiz_avg 113 | - qw_sec_horiz_avg 114 | - thl_sec_horiz_avg 115 | - uw_sec_horiz_avg 116 | - vw_sec_horiz_avg 117 | - w3_horiz_avg 118 | - w_variance_horiz_avg 119 | output_control: 120 | frequency: 15 121 | frequency_units: nmins 122 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/LES_to_DPxx_format/SAM_LES_to_Dpxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | 5 | # Output path 6 | outpath = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/" 7 | 8 | # Define all cases in a list of dictionaries 9 | cases = [ 10 | { 11 | "input_file": "/global/homes/b/bogensch/mk_iop_forcing/make_lafe/lafe_0823/thick_512x512x156_50m_50m_1s.nc", 12 | "output_file": outpath + "LAFE.les.SAM.dpxx_format.nc", 13 | "time_offset": 235.4807, 14 | }, 15 | { 16 | "input_file": "/global/homes/b/bogensch/THREAD/CASS_LES/thick_512x512x156_50m_50m_1s.nc", 17 | "output_file": outpath + "CASS.les.SAM.dpxx_format.nc", 18 | "time_offset": 205.5017, 19 | }, 20 | { 21 | "input_file": "/global/homes/b/bogensch/THREAD/SAM_LES/GATE_IDEAL_S_2048x2048x256_100m_2s.nc", 22 | "output_file": outpath + "GATEIDEAL.les.SAM.dpxx_format.nc", 23 | "time_offset": 0.0, 24 | }, 25 | { 26 | "input_file": "/global/homes/b/bogensch/THREAD/SAM_LES/MAG3D.15A.20130720.1729_105h_128x128x460_LES.nc", 27 | "output_file": outpath + "MAGIC.les.SAM.dpxx_format.nc", 28 | "time_offset": 201.25, 29 | }, 30 | { 31 | "input_file": "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/LESfiles/GOAMAZON_goamazon_278_test1.nc", 32 | "output_file": outpath + "GOAMAZON_singlepulse.les.SAM.dpxx_format.nc", 33 | "time_offset": 278.5042, 34 | }, 35 | { 36 | "input_file": "/global/homes/b/bogensch/THREAD/GOAMAZON_analysis/LESfiles/GOAMAZON_goamazon_603_test3.nc", 37 | "output_file": outpath + "GOAMAZON_doublepulse.les.SAM.dpxx_format.nc", 38 | "time_offset": 603.5021, 39 | }, 40 | { 41 | "input_file": "/global/homes/b/bogensch/THREAD/SAM_LES/COMBLE_MIP_V2.4_with_ice_vm.nc", 42 | "output_file": outpath + "COMBLE.les.SAM.dpxx_format.nc", 43 | "time_offset": 71.92007, 44 | }, 45 | ] 46 | 47 | # Define variable mappings 48 | three_d_vars = [ 49 | ("CLD", "cldfrac_tot_for_analysis_horiz_avg", 1.0), 50 | ("QCL", "qc_horiz_avg", 0.001), 51 | ("QPL", "qr_horiz_avg", 0.001), 52 | ("THETAL", "LiqPotentialTemperature_horiz_avg", 1.0), 53 | ("THETA", "PotentialTemperature_horiz_avg", 1.0), 54 | ("RELH", "RelativeHumidity_horiz_avg", 1.0 / 100.0), 55 | ("TABS", "T_mid_horiz_avg", 1.0), 56 | ("U", "U_horiz_avg", 1.0), 57 | ("V", "V_horiz_avg", 1.0), 58 | ("QV", "qv_horiz_avg", 1.0 / 1000.0), 59 | ("QCI", "qi_horiz_avg", 1.0 / 1000.0), 60 | ("TKES", "tke_horiz_avg", 1.0), 61 | ("TK", "eddy_diff_mom_horiz_avg", 1.0), 62 | ("QPEVP", "micro_vap_liq_exchange_horiz_avg", 1.0 / 86400.0 / 1000.0), 63 | ] 64 | 65 | two_d_vars = [ 66 | ("SHF", "surf_sens_flux_horiz_avg", 1.0), 67 | ("LHF", "surf_evap_horiz_avg", 4e-7), 68 | ("LHF", "surface_upward_latent_heat_flux_horiz_avg", 1.0), 69 | ("CWP", "LiqWaterPath_horiz_avg", 1.0 / 1000.0), 70 | ("IWP", "IceWaterPath_horiz_avg", 1.0 / 1000.0), 71 | ("PW", "VapWaterPath_horiz_avg", 1.0 / 1.0), 72 | ("RWP", "RainWaterPath_horiz_avg", 1.0 / 1000.0), 73 | ("PREC", "precip_total_surf_mass_flux_horiz_avg", 1.15741e-8), 74 | ] 75 | 76 | # Process each case 77 | for case in cases: 78 | input_file = case["input_file"] 79 | output_file = case["output_file"] 80 | time_offset = case["time_offset"] 81 | 82 | # Ensure output directory exists 83 | output_dir = os.path.dirname(output_file) 84 | os.makedirs(output_dir, exist_ok=True) 85 | 86 | # Open the input file 87 | ds_in = xr.open_dataset(input_file) 88 | ds_out = xr.Dataset() 89 | 90 | # Process time 91 | time_data = ds_in["time"].values 92 | ds_out["time"] = xr.DataArray(time_data - time_offset, dims=["time"]) 93 | 94 | # Process z and p 95 | z_data_flipped = ds_in["z"].values[::-1] 96 | z_mid_les = np.tile(z_data_flipped, (len(ds_out["time"]), 1)) 97 | ds_out["z_mid_horiz_avg"] = xr.DataArray(z_mid_les, dims=["time", "lev"]) 98 | 99 | p_data_flipped = ds_in["p"].values[::-1] 100 | p_mid_les = np.tile(p_data_flipped, (len(ds_out["time"]), 1)) 101 | ds_out["p_mid_les"] = xr.DataArray(p_mid_les, dims=["time", "lev"]) 102 | 103 | # Process 3D variables 104 | for var_in, var_out, factor in three_d_vars: 105 | if var_in in ds_in: 106 | data_flipped = ds_in[var_in].values[:, ::-1] 107 | ds_out[var_out] = xr.DataArray(data_flipped, dims=["time", "lev"]) * factor 108 | else: 109 | print(f"Warning: Variable '{var_in}' not found in {input_file}. Skipping.") 110 | 111 | # Process 2D variables 112 | for var_in, var_out, factor in two_d_vars: 113 | if var_in in ds_in: 114 | ds_out[var_out] = xr.DataArray(ds_in[var_in].values, dims=["time"]) * factor 115 | else: 116 | print(f"Warning: Variable '{var_in}' not found in {input_file}. Skipping.") 117 | 118 | # Copy attributes and save 119 | ds_out.attrs = ds_in.attrs 120 | ds_out.to_netcdf(output_file) 121 | print(f"Output file created at: {output_file}") 122 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/DPxx_to-from_E3SM_format/DPxx_to_E3SM.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | import glob 5 | 6 | def convert_file(input_file, output_file): 7 | print(f"Processing file: {input_file}") 8 | ds_in = xr.open_dataset(input_file, decode_times=False) 9 | ds_out = xr.Dataset() 10 | 11 | # Transfer and adjust the "time" variable 12 | time_data = ds_in["time"].values 13 | ds_out["time"] = xr.DataArray(time_data, dims=["time"]) 14 | if "units" in ds_in["time"].attrs: 15 | ds_out["time"].attrs["units"] = ds_in["time"].attrs["units"] 16 | ds_out["lev"] = ds_in["lev"].values 17 | ds_out["lev"].attrs = ds_in["lev"].attrs 18 | 19 | if "ilev" in ds_in: 20 | ds_out["ilev"] = ds_in["ilev"].values 21 | ds_out["ilev"].attrs = ds_in["ilev"].attrs 22 | 23 | if "hyam" in ds_in: 24 | for name in ["hyam", "hybm", "hyai", "hybi"]: 25 | ds_out[name] = ds_in[name].values 26 | ds_out[name].attrs = ds_in[name].attrs 27 | 28 | if "p_mid_obs" in ds_in: 29 | ds_out["p_mid_obs"] = ds_in["p_mid_obs"] 30 | ds_out["p_mid_obs"].attrs = ds_in["p_mid_obs"].attrs 31 | 32 | three_d_vars = [ 33 | ("qv_horiz_avg", "Q", 1.0), 34 | ("U_horiz_avg", "U", 1.0), 35 | ("V_horiz_avg", "V", 1.0), 36 | ("T_mid_horiz_avg", "T", 1.0), 37 | ("RelativeHumidity_horiz_avg", "RELHUM", 100.), 38 | ("z_mid_horiz_avg", "Z3", 1.0), 39 | ("qc_horiz_avg", "CLDLIQ", 1.0), 40 | ("qi_horiz_avg", "CLDICE", 1.0), 41 | ("qr_horiz_avg", "RAINQM", 1.0), 42 | ("cldfrac_tot_for_analysis_horiz_avg", "CLOUD", 1.0), 43 | ("omega_horiz_avg", "OMEGA", 1.0), 44 | ("wthl_sec_horiz_avg", "WPTHLP_CLUBB", 1004.*1.2), 45 | ("wqw_sec_horiz_avg", "WPRTP_CLUBB", (2.5*10**6)*1.2), 46 | ("w_variance_horiz_avg", "WP2_CLUBB", 1.0), 47 | ("w3_horiz_avg", "WP3_CLUBB", 1.0), 48 | ("sgs_buoy_flux_horiz_avg", "WPTHVP_CLUBB", 1004.*1.2), 49 | ("thl_sec_horiz_avg", "THLP2_CLUBB", 1.0), 50 | ("qw_sec_horiz_avg", "RTP2_CLUBB", 1000.*1000.) 51 | ] 52 | 53 | two_d_vars = [ 54 | ("surf_sens_flux_horiz_avg", "SHFLX", 1.0), 55 | ("surface_upward_latent_heat_flux_horiz_avg", "LHFLX", 1.0), 56 | ("ps_horiz_avg", "PS", 1.0), 57 | ("VapWaterPath_horiz_avg", "TMQ", 1.0), 58 | ("LiqWaterPath_horiz_avg", "TGCLDLWP", 1.0), 59 | ("IceWaterPath_horiz_avg", "TGCLDIWP", 1.0), 60 | ("ShortwaveCloudForcing_horiz_avg", "SWCF", 1.0), 61 | ("LongwaveCloudForcing_horiz_avg", "LWCF", 1.0), 62 | ("surf_radiative_T_horiz_avg", "TS", 1.0), 63 | ("T_2m_horiz_avg", "TREFHT", 1.0), 64 | ("qv_2m_horiz_avg", "QREFHT", 1.0), 65 | ("precip_total_surf_mass_flux_horiz_avg", "PRECT", 1.0), 66 | ("cldhgh_int_horiz_avg", "CLDHGH", 1.0), 67 | ("cldmed_int_horiz_avg", "CLDMED", 1.0), 68 | ("cldlow_int_horiz_avg", "CLDLOW", 1.0), 69 | ("cldtot_int_horiz_avg", "CLDTOT", 1.0), 70 | ("omega_at_500hPa_horiz_avg", "OMEGA500", 1.0), 71 | ("LW_flux_dn_at_model_bot_horiz_avg", "FLDS", 1.0), 72 | ("SW_flux_dn_at_model_bot_horiz_avg", "FSDS", 1.0), 73 | ("sfc_flux_sw_net_horiz_avg", "FSNS", 1.0), 74 | ("sfc_flux_lw_dn_horiz_avg", "FLNS", 1.0), 75 | ("model_top_flux_sw_net_horiz_avg", "FSNTOA", 1.0), 76 | ("model_top_flux_lw_net_horiz_avg", "FLNT", 1.0) 77 | ] 78 | 79 | for var_in, var_out, factor in three_d_vars: 80 | if var_in in ds_in: 81 | data_val = np.squeeze(ds_in[var_in].values) 82 | dims_in = ds_in[var_in].dims 83 | dims_out = ["time", "ilev"] if len(dims_in) >= 2 and dims_in[1] == 'ilev' else ["time", "lev"] 84 | 85 | ds_out[var_out] = xr.DataArray(data_val, dims=dims_out) * factor 86 | ds_out[var_out].attrs = ds_in[var_in].attrs 87 | else: 88 | print(f"Warning: Variable '{var_in}' not found.") 89 | 90 | for var_in, var_out, factor in two_d_vars: 91 | if var_in in ds_in: 92 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 93 | ds_out[var_out].attrs = ds_in[var_in].attrs 94 | else: 95 | print(f"Warning: Variable '{var_in}' not found.") 96 | 97 | ds_out.attrs = ds_in.attrs 98 | ds_out.to_netcdf(output_file) 99 | print(f"File saved to {output_file}\n") 100 | 101 | # === MAIN BLOCK === 102 | use_batch_mode = True # Set to False to run manually 103 | 104 | if use_batch_mode: 105 | input_dir = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/" 106 | output_dir = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/E3SM_SCM/" 107 | os.makedirs(output_dir, exist_ok=True) 108 | 109 | input_files = sorted(glob.glob(os.path.join(input_dir, "*dpxx_format.nc"))) 110 | for input_file in input_files: 111 | filename = os.path.basename(input_file).replace("dpxx_format", "e3sm_format") 112 | output_file = os.path.join(output_dir, filename) 113 | convert_file(input_file, output_file) 114 | else: 115 | # Manual mode 116 | input_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON.obs.ARMBE.dpxx_format.nc" 117 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/E3SM_SCM/GOAMAZON.obs.ARMBE.e3sm_format.nc" 118 | os.makedirs(os.path.dirname(output_file), exist_ok=True) 119 | convert_file(input_file, output_file) 120 | -------------------------------------------------------------------------------- /Diagnostics_Package/convert_OBS_LES_output/DPxx_to-from_E3SM_format/E3SM_to_DPxx.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | import glob 5 | 6 | def convert_file(input_file, output_file): 7 | print(f"Processing file: {input_file}") 8 | ds_in = xr.open_dataset(input_file, decode_times=False) 9 | ds_out = xr.Dataset() 10 | 11 | # Transfer and adjust the "time" variable 12 | time_data = ds_in["time"].values 13 | ds_out["time"] = xr.DataArray(time_data, dims=["time"]) 14 | ds_out["time"].attrs["units"] = ds_in["time"].attrs["units"] 15 | ds_out["lev"] = ds_in["lev"].values 16 | ds_out["lev"].attrs = ds_in["lev"].attrs 17 | 18 | if "ilev" in ds_in: 19 | ds_out["ilev"] = ds_in["ilev"].values 20 | ds_out["ilev"].attrs = ds_in["ilev"].attrs 21 | 22 | if "hyam" in ds_in: 23 | for name in ["hyam", "hybm", "hyai", "hybi"]: 24 | ds_out[name] = ds_in[name].values 25 | ds_out[name].attrs = ds_in[name].attrs 26 | 27 | if "p_mid_obs" in ds_in: 28 | ds_out["p_mid_obs"] = ds_in["p_mid_obs"] 29 | ds_out["p_mid_obs"].attrs = ds_in["p_mid_obs"].attrs 30 | 31 | three_d_vars = [ 32 | ("Q", "qv_horiz_avg", 1.0), 33 | ("U", "U_horiz_avg", 1.0), 34 | ("V", "V_horiz_avg", 1.0), 35 | ("T", "T_mid_horiz_avg", 1.0), 36 | ("RELHUM", "RelativeHumidity_horiz_avg", 1.0/100.), 37 | ("Z3", "z_mid_horiz_avg", 1.0), 38 | ("CLDLIQ", "qc_horiz_avg", 1.0), 39 | ("CLDICE", "qi_horiz_avg", 1.0), 40 | ("RAINQM", "qr_horiz_avg", 1.0), 41 | ("CLOUD", "cldfrac_tot_for_analysis_horiz_avg", 1.0), 42 | ("OMEGA", "omega_horiz_avg", 1.0), 43 | ("WPTHLP_CLUBB", "wthl_sec_horiz_avg", 1./1004./1.2), 44 | ("WPRTP_CLUBB", "wqw_sec_horiz_avg", 1./(2.5*10**6)/1.2), 45 | ("WP2_CLUBB", "w_variance_horiz_avg", 1.0), 46 | ("WP3_CLUBB", "w3_horiz_avg", 1.0), 47 | ("WPTHVP_CLUBB", "sgs_buoy_flux_horiz_avg", 1./1004./1.2), 48 | ("THLP2_CLUBB", "thl_sec_horiz_avg", 1.0), 49 | ("RTP2_CLUBB", "qw_sec_horiz_avg", 1.0/1000./1000.) 50 | ] 51 | 52 | two_d_vars = [ 53 | ("SHFLX", "surf_sens_flux_horiz_avg", 1.0), 54 | ("LHFLX", "surface_upward_latent_heat_flux_horiz_avg",1.0), 55 | ("PS", "ps_horiz_avg",1.0), 56 | ("TMQ", "VapWaterPath_horiz_avg", 1.0), 57 | ("TGCLDLWP", "LiqWaterPath_horiz_avg", 1.0), 58 | ("TGCLDIWP", "IceWaterPath_horiz_avg", 1.0), 59 | ("SWCF", "ShortwaveCloudForcing_horiz_avg", 1.0), 60 | ("LWCF", "LongwaveCloudForcing_horiz_avg", 1.0), 61 | ("TS", "surf_radiative_T_horiz_avg", 1.0), 62 | ("TREFHT", "T_2m_horiz_avg", 1.0), 63 | ("QREFHT", "qv_2m_horiz_avg", 1.0), 64 | ("PRECT", "precip_total_surf_mass_flux_horiz_avg", 1.0), 65 | ("CLDHGH", "cldhgh_int_horiz_avg", 1.0), 66 | ("CLDMED", "cldmed_int_horiz_avg", 1.0), 67 | ("CLDLOW", "cldlow_int_horiz_avg", 1.0), 68 | ("CLDTOT", "cldtot_int_horiz_avg", 1.0), 69 | ("OMEGA500", "omega_at_500hPa_horiz_avg", 1.0), 70 | ("FLDS", "LW_flux_dn_at_model_bot_horiz_avg", 1.0), 71 | ("FSDS", "SW_flux_dn_at_model_bot_horiz_avg", 1.0), 72 | ("FSNS", "sfc_flux_sw_net_horiz_avg", 1.0), 73 | ("FLNS", "sfc_flux_lw_dn_horiz_avg", 1.0), 74 | ("FSNTOA", "model_top_flux_sw_net_horiz_avg", 1.0), 75 | ("FLNT", "model_top_flux_lw_net_horiz_avg", 1.0) 76 | ] 77 | 78 | for var_in, var_out, factor in three_d_vars: 79 | if var_in in ds_in: 80 | data_val = np.squeeze(ds_in[var_in].values) 81 | dims_in = ds_in[var_in].dims 82 | dims_out = ["time", "ilev"] if len(dims_in) >= 2 and dims_in[1] == 'ilev' else ["time", "lev"] 83 | if var_in == "Z3": 84 | surface_elev = data_val[:, -1][:, np.newaxis] - 10 85 | data_val = data_val - surface_elev 86 | ds_out[var_out] = xr.DataArray(data_val, dims=dims_out) * factor 87 | ds_out[var_out].attrs = ds_in[var_in].attrs 88 | else: 89 | print(f"Warning: Variable '{var_in}' not found.") 90 | 91 | for var_in, var_out, factor in two_d_vars: 92 | if var_in in ds_in: 93 | ds_out[var_out] = xr.DataArray(np.squeeze(ds_in[var_in].values), dims=["time"]) * factor 94 | ds_out[var_out].attrs = ds_in[var_in].attrs 95 | else: 96 | print(f"Warning: Variable '{var_in}' not found.") 97 | 98 | if "PRECL" in ds_in and "PRECC" in ds_in: 99 | ds_out['precip_total_surf_mass_flux_horiz_avg'] = xr.DataArray( 100 | np.squeeze(ds_in['PRECL'].values + ds_in['PRECC'].values), dims=["time"] 101 | ) 102 | ds_out['precip_large_surf_mass_flux_horiz_avg'] = xr.DataArray( 103 | np.squeeze(ds_in['PRECL'].values), dims=["time"] 104 | ) 105 | ds_out['precip_conv_surf_mass_flux_horiz_avg'] = xr.DataArray( 106 | np.squeeze(ds_in['PRECC'].values), dims=["time"] 107 | ) 108 | ds_out['precip_large_surf_mass_flux_horiz_avg'].attrs = ds_in['PRECL'].attrs 109 | ds_out['precip_conv_surf_mass_flux_horiz_avg'].attrs = ds_in['PRECC'].attrs 110 | 111 | ds_out.attrs = ds_in.attrs 112 | ds_out.to_netcdf(output_file) 113 | print(f"File saved to {output_file}\n") 114 | 115 | # === MAIN BLOCK === 116 | use_batch_mode = False # Set to False to run manually 117 | 118 | if use_batch_mode: 119 | input_dir = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/pre_files/" 120 | output_dir = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/" 121 | os.makedirs(output_dir, exist_ok=True) 122 | 123 | input_files = sorted(glob.glob(os.path.join(input_dir, "*e3sm_format.nc"))) 124 | for input_file in input_files: 125 | filename = os.path.basename(input_file).replace("e3sm_format", "dpxx_format") 126 | output_file = os.path.join(output_dir, filename) 127 | convert_file(input_file, output_file) 128 | else: 129 | # Manual mode 130 | input_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/pre_files/maoarmbe_iopfile_4scam_arm_diags_2014to2015.nc" 131 | output_file = "/pscratch/sd/b/bogensch/E3SM_simulations/iopdiags_OBS_and_LES_files/DP_EAMxx/GOAMAZON.obs.ARMBE.dpxx_format.nc" 132 | os.makedirs(os.path.dirname(output_file), exist_ok=True) 133 | convert_file(input_file, output_file) 134 | -------------------------------------------------------------------------------- /E3SM_SCM_scripts/E3SM_REPLAY_rename_limitedarea.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh 2 | ## ==================================================================== 3 | # Purpose is to modify the (limited area) IOP file that is generated from E3SM 4 | # to remove the lat lon appendecies on the variables. In addition, the 5 | # dimension variables need to be renamed to "ncol", which is what E3SM 6 | # expects. This is a temporary solution until this can be fixed in the E3SM code. 7 | 8 | # This script is configured to deal with the minimum number of variables required 9 | # by the E3SM REPLAY option. Any additional variables will have to be added to the 10 | # dynvars and physvars array. 11 | 12 | # This script is obviously very hardwired. Want to submit somthing better? Please! 13 | # Original (ashamed) author: Peter Bogenschutz (bogenschutz1@llnl.gov) 14 | 15 | ############################################################ 16 | # BEGIN USER INPUT 17 | 18 | # simulation id 19 | setenv simulation E3SM.ne30_ne30.anvil 20 | # file path where replay data is located 21 | setenv filepath /lcrc/group/acme/bogensch/ACME_simulations/{$simulation}/run 22 | # the file containing the E3SM replay data 23 | setenv replayfile {$simulation}.cam.h1.0001-01-01-00000.nc 24 | 25 | # Provide name for the IOP file generated by E3SM 26 | setenv file REPLAY.009.ne30_ne30.cam.h1.0001-01-01-00000.nc 27 | 28 | # Provide the latlon string that was afixed to all your output 29 | # variables in this file. HINT: to determine, perform a ncdump -h 30 | # and look at the variables names 31 | setenv latlon 40e_to_50e_45s_to_55s 32 | 33 | # Full input file name (do not change) 34 | setenv filename {$filepath}/{$replayfile} 35 | 36 | # Select a name for the output file that will be generated 37 | setenv finalfile E3SMreplay_{$replayfile} 38 | 39 | # Variables that have dimensions of ncol_GLL_$latlon 40 | set dynvars = (lat_GLL lon_GLL CLAT CLDICE_dten CLDLIQ_dten DMS_dten H2O2_dten H2SO4_dten NUMICE_dten NUMLIQ_dten NUMRAI_dten NUMSNO_dten O3_dten Ps Q_dten RAINQM_dten SNOWQM_dten SO2_dten SOAG_dten bc_a1_dten bc_a3_dten bc_a4_dten divT3d dst_a1_dten dst_a3_dten mom_a1_dten mom_a2_dten mom_a3_dten mom_a4_dten ncl_a1_dten ncl_a2_dten ncl_a3_dten num_a1_dten num_a2_dten num_a3_dten num_a4_dten pom_a1_dten pom_a3_dten pom_a4_dten q shflx so4_a1_dten so4_a2_dten so4_a3_dten soa_a1_dten soa_a2_dten soa_a3_dten t u v) 41 | 42 | # Variables that have dimension of ncol_physgrid_$latlon 43 | set physvars = (lat_physgrid lon_physgrid Prec Tg Tsair heat_glob lhflx omega phis shflx trefht) 44 | 45 | # END USER INPUT 46 | ############################################################ 47 | 48 | # clean up temporary files if they happen to still be lying around 49 | if (-e temp_physics.nc) rm temp_physics.nc 50 | if (-e temp_dynamics.nc) rm temp_dynamics.nc 51 | 52 | # determine number of physics variables 53 | set numphysics = ${#physvars} 54 | 55 | # Note that the limited area file generated by E3SM has two dimensions, 56 | # ncol_GLL_$latlon and ncol_physgrid_$latlon. These are the same! 57 | # Their names need to be changed to "ncol", which is easy enough to do, 58 | # but I couldn't figure out how to change both when they exist in the same file 59 | # without getting an error. Thus, this script is more complicated that I would 60 | # like as variables need to be separated into different files, then 61 | # merged back into one file at the end. 62 | 63 | # loop through physics variables and put them into a separate file 64 | set phys_ind = 1 65 | while($phys_ind <= $numphysics) 66 | 67 | ncks -h -A -v {$physvars[$phys_ind]}_$latlon $replayfile temp_physics.nc 68 | @ phys_ind++ 69 | 70 | end # variable loop 71 | 72 | # determine number of dynamics variables 73 | set numdynamics = ${#dynvars} 74 | 75 | # loop through dynamics variables and put them into a separate file 76 | set dyn_ind = 1 77 | while($dyn_ind <= $numdynamics) 78 | 79 | ncks -h -A -v {$dynvars[$dyn_ind]}_$latlon $replayfile temp_dynamics.nc 80 | @ dyn_ind++ 81 | 82 | end # variable loop 83 | 84 | # bring all other necessary variables to the dynamics file 85 | ncks -h -A -v hyai $replayfile temp_dynamics.nc 86 | ncks -h -A -v hybi $replayfile temp_dynamics.nc 87 | ncks -h -A -v time $replayfile temp_dynamics.nc 88 | ncks -h -A -v date $replayfile temp_dynamics.nc 89 | ncks -h -A -v datesec $replayfile temp_dynamics.nc 90 | ncks -h -A -v time_bnds $replayfile temp_dynamics.nc 91 | ncks -h -A -v time_written $replayfile temp_dynamics.nc 92 | ncks -h -A -v ndbase $replayfile temp_dynamics.nc 93 | ncks -h -A -v nsbase $replayfile temp_dynamics.nc 94 | ncks -h -A -v nbdate $replayfile temp_dynamics.nc 95 | ncks -h -A -v bdate $replayfile temp_dynamics.nc 96 | ncks -h -A -v mdt $replayfile temp_dynamics.nc 97 | ncks -h -A -v ndcur $replayfile temp_dynamics.nc 98 | ncks -h -A -v nscur $replayfile temp_dynamics.nc 99 | ncks -h -A -v co2vmr $replayfile temp_dynamics.nc 100 | ncks -h -A -v ch4vmr $replayfile temp_dynamics.nc 101 | ncks -h -A -v n2ovmr $replayfile temp_dynamics.nc 102 | ncks -h -A -v f11vmr $replayfile temp_dynamics.nc 103 | ncks -h -A -v f12vmr $replayfile temp_dynamics.nc 104 | ncks -h -A -v sol_tsi $replayfile temp_dynamics.nc 105 | ncks -h -A -v tsec $replayfile temp_dynamics.nc 106 | ncks -h -A -v nsteph $replayfile temp_dynamics.nc 107 | 108 | 109 | ############################################################## 110 | #### Rename stuff 111 | 112 | # Rename the dimension variable for the dynamics variables to 113 | # simply be ncol (what E3SM expects) 114 | ncrename -h -O -d ncol_GLL_$latlon,ncol temp_dynamics.nc 115 | 116 | # Rename all dynamics variables to remove the afixed latlon string 117 | set dyn_ind = 1 118 | while($dyn_ind <= $numdynamics) 119 | 120 | ncrename -h -O -v {$dynvars[$dyn_ind]}_$latlon,{$dynvars[$dyn_ind]} temp_dynamics.nc 121 | @ dyn_ind++ 122 | 123 | end # variable loop 124 | 125 | # Rename the dimension variable for the physics variables to 126 | # simply be ncol (what E3SM expects) 127 | ncrename -h -O -d ncol_physgrid_$latlon,ncol temp_physics.nc 128 | 129 | # Rename all physics variables to remove the afixed latlon string 130 | set phys_ind = 1 131 | while($phys_ind <= $numphysics) 132 | 133 | ncrename -h -O -v {$physvars[$phys_ind]}_$latlon,{$physvars[$phys_ind]} temp_physics.nc 134 | @ phys_ind++ 135 | 136 | end # variable loop 137 | 138 | # Rename the lat and lon variables to simply be "lat" and "lon" 139 | ncrename -h -O -v lat_physgrid,lat temp_physics.nc 140 | ncrename -h -O -v lon_physgrid,lon temp_physics.nc 141 | 142 | # Merge the dynamics file into the physics file 143 | ncks -h -A temp_dynamics.nc temp_physics.nc 144 | 145 | # Remove dynamics GLL variable, put all info into the final file 146 | ncks -h -C -O -x -v lat_GLL,lon_GLL temp_physics.nc $finalfile 147 | 148 | # Add this attribute, which is needed for E3SM REPLAY runs 149 | ncatted -h -a CAM_GENERATED_FORCING,global,o,c,"create SCAM IOP dataset" $finalfile 150 | 151 | # remove temporary files 152 | rm temp_physics.nc 153 | rm temp_dynamics.nc 154 | 155 | 156 | 157 | -------------------------------------------------------------------------------- /E3SM_SCM_scripts/replay_postprocess_global_onecolumn.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Feb 9 16:11:48 2024 5 | 6 | @author: bogenschutz1 7 | """ 8 | # -*- coding: utf-8 -*- 9 | """ 10 | Post process output generated to "replay" an E3SM column in SCM mode 11 | 12 | This script assumes that all replay data is in one file. This script extracts one 13 | column from the file to generate forcing data to run in E3SM SCM. 14 | This script is provided as an example and can be modified to meet 15 | the users individual needs. 16 | """ 17 | 18 | import matplotlib.pyplot as plt 19 | import matplotlib 20 | from scipy import interpolate 21 | import netCDF4 as nc4 22 | import numpy as np 23 | import scipy as sp 24 | import pylab 25 | import os 26 | 27 | ################################### 28 | ##### Begin user input 29 | 30 | # Directory where output lives 31 | datadir='/pscratch/sd/b/bogensch/SCM_sims/E3SM_SCMv3.F2010.replay.001a.ne30pg2_oECv3.pm-cpu/run/' 32 | 33 | # Name of the case 34 | casename='E3SM_SCMv3.F2010.replay.001a' 35 | 36 | # File append information for the file to process 37 | fileappend='.ne30pg2_oECv3.pm-cpu.eam.h1.0001-01-01-00000.nc' 38 | 39 | # select lat and lon column to extract 40 | lat_ex=0.0 41 | lon_ex=0.0 42 | 43 | # the lon and lat bounds of subset replay data; for global data keep blank 44 | lonlat='' 45 | 46 | # Specify desired location of processed output file 47 | # by default set to the input directory 48 | outdir='/global/homes/b/bogensch/acme_scripts_scm001/' 49 | 50 | ###### End user input 51 | #################################### 52 | 53 | # define input file 54 | inputfile=datadir+casename+fileappend 55 | 56 | # define output file 57 | outputfile=outdir+casename+'.lon'+str(lon_ex)+'_lat'+str(lat_ex)+'.replaydata_for_SCM.nc' 58 | 59 | # does output file already exist? if so delete and start fresh 60 | if os.path.isfile(outputfile): 61 | os.system('rm '+outputfile) 62 | 63 | # Open input file 64 | f_in=nc4.Dataset(inputfile,mode='r') 65 | 66 | # Open output file 67 | f_out=nc4.Dataset(outputfile,mode='w',format='NETCDF4') 68 | 69 | ############################ INPUT FILE READ INS 70 | # Read in dimension of input file 71 | ncol_dyn=f_in.dimensions['ncol_d'+lonlat] 72 | ncol_phys=f_in.dimensions['ncol'+lonlat] 73 | nlev=f_in.dimensions['lev'] 74 | nilev=f_in.dimensions['ilev'] 75 | ntime=f_in.dimensions['time'] 76 | nbnd=f_in.dimensions['nbnd'] 77 | 78 | # Read in coordinates of input file 79 | lat_dyn=f_in.variables['lat_d'+lonlat] 80 | lon_dyn=f_in.variables['lon_d'+lonlat] 81 | lat_phys=f_in.variables['lat'+lonlat] 82 | lon_phys=f_in.variables['lon'+lonlat] 83 | lev_in=f_in.variables['lev'] 84 | ilev_in=f_in.variables['ilev'] 85 | time_in=f_in.variables['time'] 86 | tsec_in=f_in.variables['tsec'] 87 | 88 | ############################ OUTPUT FILE CREATES 89 | # Define dimensions for output file 90 | f_out.createDimension('ncol',1) 91 | f_out.createDimension('time',ntime.size) 92 | f_out.createDimension('lev',nlev.size) 93 | f_out.createDimension('ilev',nilev.size) 94 | f_out.createDimension('nbnd',nbnd.size) 95 | 96 | # Define coordinates for outputfile 97 | lat=f_out.createVariable('lat','f4','ncol') 98 | lon=f_out.createVariable('lon','f4','ncol') 99 | lev=f_out.createVariable('lev','f4','lev') 100 | ilev=f_out.createVariable('ilev','f4','ilev') 101 | time=f_out.createVariable('time','f4','time') 102 | tsec=f_out.createVariable('tsec','f4','time') 103 | bdate=f_out.createVariable('bdate','i4') 104 | 105 | ############################ Copy coordinate information 106 | lat[:]=lat_ex 107 | lat.units=lat_dyn.units 108 | lat.long_name=lat_dyn.long_name 109 | 110 | lon[:]=lon_ex 111 | lon.units=lon_dyn.units 112 | lon.long_name=lon_dyn.long_name 113 | 114 | time[:]=time_in[:] 115 | time.units=time_in.units 116 | time.long_name=time_in.long_name 117 | 118 | tsec[:]=tsec_in[:] 119 | tsec.units='s' 120 | tsec.long_name=tsec_in.long_name 121 | 122 | lev[:]=lev_in[:] 123 | lev.units=lev_in.units 124 | lev.long_name=lev_in.long_name 125 | 126 | ilev[:]=ilev_in[:] 127 | ilev.units=ilev_in.units 128 | ilev.long_name=ilev_in.long_name 129 | 130 | bdate[0]=20000101 131 | bdate.units='yyyymmdd' 132 | bdate.long_name='base date' 133 | 134 | ############################ Find column to extract for both physics and dynamics 135 | 136 | testval=abs(lat_dyn[:] - lat_ex)+abs(lon_dyn[:] - lon_ex) 137 | dy_col=np.where(testval == np.min(testval)) 138 | 139 | testval=abs(lat_phys[:] - lat_ex)+abs(lon_phys[:] - lon_ex) 140 | phys_col=np.where(testval == np.min(testval)) 141 | 142 | print(lat_dyn[dy_col]) 143 | print(lon_dyn[dy_col]) 144 | 145 | print(dy_col) 146 | print(np.size(testval)) 147 | #searchva=np.min(lat_test+lon_test) 148 | 149 | ############################## Process relevant variables 150 | 151 | # Loop over inputdata to search for variables with specific dimensions 152 | indata = nc4.Dataset(inputfile) 153 | # list of vars w dimenions 'time' and 'hru' 154 | wanted_vars = [] 155 | 156 | # loop thru all the variables 157 | for v in indata.variables: 158 | # set filling flag to false by default 159 | fillvar=False 160 | 161 | print(indata.variables[v].dimensions) 162 | 163 | 164 | # process variables that are 3D and on dynamics grid 165 | # These variables do NOT have to be remapped 166 | if indata.variables[v].dimensions == ('time','lev','ncol_d'+lonlat): 167 | fillvar=True 168 | current_name=indata.variables[v].name 169 | current_var=f_in.variables[current_name] 170 | current_var_out=f_out.createVariable(current_name,'f4',('time','lev','ncol')) 171 | print(np.shape(current_var),dy_col[0]) 172 | current_var_out[:]=current_var[:,:,dy_col[0]] 173 | 174 | # process 2D variables on dynamics grid 175 | if indata.variables[v].dimensions == ('time','ncol_d'+lonlat): 176 | fillvar=True 177 | current_name=indata.variables[v].name 178 | current_var=f_in.variables[current_name] 179 | current_var_out=f_out.createVariable(current_name,'f4',('time','ncol')) 180 | current_var_out[:]=current_var[:,dy_col[0]] 181 | 182 | # process 2D variables on physics grid - Needs to be remapped 183 | if indata.variables[v].dimensions == ('time','ncol'+lonlat): 184 | fillvar=True 185 | current_name=indata.variables[v].name 186 | current_var=f_in.variables[current_name] 187 | current_var_out=f_out.createVariable(current_name,'f4',('time','ncol')) 188 | current_var_out[:]=current_var[:,phys_col[0]] 189 | 190 | # process 3D variables on physics grid - Needs to be remapped 191 | if indata.variables[v].dimensions == ('time','lev','ncol'+lonlat): 192 | fillvar=True 193 | current_name=indata.variables[v].name 194 | current_var=f_in.variables[current_name] 195 | current_var_out=f_out.createVariable(current_name,'f4',('time','lev','ncol')) 196 | current_var_out[:]=current_var[:,:,phys_col[0]] 197 | 198 | if (fillvar): 199 | print('Processing variable: ',current_name) 200 | # Fill the information for this variable 201 | current_var_out.unit=current_var.units 202 | current_var_out.long_name=current_var.long_name 203 | 204 | # Close both the input and output files 205 | f_in.close() 206 | f_out.close() 207 | 208 | # Add this attribute, which is needed for E3SM REPLAY runs 209 | thecmd ='ncatted -h -a E3SM_GENERATED_FORCING,global,o,c,"create SCM IOP dataset" '+outputfile 210 | os.system(thecmd) 211 | -------------------------------------------------------------------------------- /Diagnostics_Package/diagnostics_user_driver.py: -------------------------------------------------------------------------------- 1 | from diagnostics import run_diagnostics 2 | import os 3 | 4 | ########################################################## 5 | # ARM/ASR diagnostics package for E3SM Single Column Model (SCM) 6 | # or doubly-periodic EAMxx (DP-EAMxx). 7 | 8 | # Please make a copy of this driver file and modify it for your case/needs. 9 | 10 | # This driver file should provide sufficient documentation on user 11 | # defined settings, but more details exist in the README.md file. 12 | 13 | # For detailed documentation on input data requirements and how to 14 | # add supported observational/LES data sets please see the documentation 15 | # (to be released very soon). 16 | 17 | ########################################################## 18 | ########################################################## 19 | # BEGIN: MANDATORY USER DEFINED SETTINGS 20 | 21 | # Where do you want output diagnostics to be placed? Provide path. 22 | output_dir = "/global/cfs/cdirs/e3sm/www/bogensch/Official_Example_Diags" 23 | 24 | # User-specified general ID for this diagnostic set 25 | general_id = "MAGIC_dpxx_bug_fix" # Change as needed 26 | 27 | ######## Begin manage input datasets 28 | 29 | datasets=[] 30 | 31 | # Define each dataset and its associated metadata. 32 | # - REQUIRED Input: 33 | # 1) filename = the path and filename of the output dataset to be considered. 34 | # 2) short_id = ID used in the diagnostics package for legends etc. 35 | # 3) line_color and line_style: used for profile and 1D time series plots. 36 | 37 | # below stuff useful to define if recycled by many datasets, but not required as it is just used 38 | # to define the filename metadata for E3SM/DP-SCREAM output (i.e. you can explicity just declare path 39 | # and file for each filename if you prefer when adding each case). 40 | simulation_dir = "/pscratch/sd/b/bogensch/dp_screamxx" # directory for model simulations 41 | caseappend = ".horiz_avg.AVERAGE.nmins_x15.2013-07-21-19620.nc" # file suffix for model simulations 42 | 43 | # Path to observational datasets 44 | obs_dir = "/global/cfs/cdirs/e3sm/diagnostics/observations/Atm/scm_dpxx_datasets/DP_EAMxx" 45 | 46 | # Add datasets (can have as many as you want, minimum of one) 47 | # Please list model (E3SM/SCREAM) datasets first, before LES/OBS. 48 | 49 | # SCREAM control simulation 50 | casename="scream_dpxx_MAGIC.prefix.001a" 51 | datasets.append({ 52 | "filename": os.path.join(simulation_dir, casename, "run", f"{casename}{caseappend}"), 53 | "short_id": "EAMxx 3 km Control", 54 | "line_color": "blue", 55 | "line_style": "-" 56 | }) 57 | 58 | # SCREAM simulation with a bug fix 59 | casename="scream_dpxx_MAGIC.fix.001a" 60 | datasets.append({ 61 | "filename": os.path.join(simulation_dir, casename, "run", f"{casename}{caseappend}"), 62 | "short_id": "EAMxx 3 km Bug Fix", 63 | "line_color": "green", 64 | "line_style": "--" 65 | }) 66 | 67 | # SAM LES 68 | datasets.append({ 69 | "filename": os.path.join(obs_dir,"MAGIC.les.SAM.dpxx_format.nc"), 70 | "short_id": "SAM-LES", 71 | "line_color": "black", 72 | "line_style": "-" 73 | }) 74 | 75 | # 1D observation dataset 76 | datasets.append({ 77 | "filename": os.path.join(obs_dir,"MAGIC.obs.1dvars.dpxx_format.nc"), 78 | "short_id": "OBS", 79 | "line_color": "gray", 80 | "line_style": "--" 81 | }) 82 | 83 | # Sounding observation dataset 84 | datasets.append({ 85 | "filename": os.path.join(obs_dir,"MAGIC.obs.sounding.dpxx_format.nc"), 86 | "short_id": "OBS", 87 | "line_color": "gray", 88 | "line_style": "--" 89 | }) 90 | 91 | # KAZR observation dataset 92 | datasets.append({ 93 | "filename": os.path.join(obs_dir,"MAGIC.obs.kazr.dpxx_format.nc"), 94 | "short_id": "OBS-Kazr", 95 | "line_color": "gray", 96 | "line_style": "--" 97 | }) 98 | 99 | # End add datasets. 100 | 101 | ######## End manage input datasets 102 | 103 | # PROFILE PLOT AVERAGING WINDOWS: 104 | # Define averaging windows for profile plots as numerical values in days. You can have 105 | # as many averaging windows as you would like. Each index for these arrays corresponds 106 | # to an averaging window. This example does daily averaging for three days. 107 | profile_time_s = [0.0,1.0,2.0] # Starting times for averaging 108 | profile_time_e = [1.0,2.0,3.0] # Ending times for averaging 109 | 110 | # Note that time series and time-height plots will by default plot the entire range 111 | # in your simulation. If you want to modify this, then please see the optional 112 | # user defined settings below. 113 | 114 | # END: MANDATORY USER DEFINED SETTINGS 115 | ########################################################## 116 | ########################################################## 117 | # BEGIN: OPTIONAL user defined settings 118 | 119 | # Do time-height plots? These can take a bit longer to make 120 | do_timeheight=True 121 | 122 | # Choose vertical plotting coordinate; can be pressure or height. 123 | # -If height then the variable Z3 (E3SM) or z_mid (EAMxx) needs to be in your output file. 124 | # -If pressure then PS (E3SM) or ps (EAMxx) should be in your output file. If it is not then 125 | # the package will use hybrid levels to plot, which may not be accurate compared to observations. 126 | height_cord = "z" # p = pressure; z = height 127 | 128 | # Optional: Maximum y-axis height for profile plots (in meters or mb; depending on vertical coordinate) 129 | max_height_profile = 3000 # Set to desired height in meters or mb, or None for automatic scaling 130 | 131 | # Optional: Maximum y-axis height for time-height (in meters or mb; depending on vertical coordinate) 132 | max_height_timeheight = 3000 # Set to desired height in meters or mb, or None for automatic scaling 133 | 134 | # linewidth for curves 135 | linewidth = 4 136 | 137 | # Optional: Time range for time series plots in days 138 | time_series_time_s = 0 # Starting time for time series, None for default (entire range) 139 | time_series_time_e = 3.25 # Ending time for time series, None for default (entire range) 140 | 141 | # Optional: Time range for time-height plots in days 142 | time_height_time_s = 0 # Starting time for time-height plots, None for default (entire range) 143 | time_height_time_e = 3.25 # Ending time for time-height plots, None for default (entire range) 144 | 145 | # Do Diurnal Composite Analysis? Must have at least three days worth of data and each 146 | # day must have at least 4 output time slices for this analysis to be considered. 147 | do_diurnal_composites = True 148 | diurnal_start_day = 0 # Starting day for diurnal composite stats, None for default (entire range) 149 | diurnal_end_day = 3.0 # Ending day for diurnal composite stats, None for default (entire range) 150 | 151 | # Define the colormap for time height contourf plots. Default is "viridis_r". 152 | time_height_cmap = "viridis_r" 153 | 154 | # Optional arguments to define tick size and label size for plots. Default is 14. 155 | ticksize=14 156 | labelsize=14 157 | 158 | # END: OPTIONAL user defined settings 159 | ########################################################## 160 | ########################################################## 161 | 162 | # Call the diagnostics function with user-defined settings 163 | run_diagnostics( 164 | output_dir, 165 | general_id, 166 | datasets, 167 | profile_time_s, 168 | profile_time_e, 169 | do_timeheight, 170 | height_cord, 171 | max_height_profile, 172 | max_height_timeheight, 173 | linewidth, 174 | time_series_time_s, 175 | time_series_time_e, 176 | time_height_time_s, 177 | time_height_time_e, 178 | do_diurnal_composites=do_diurnal_composites, 179 | diurnal_start_day=diurnal_start_day, 180 | diurnal_end_day=diurnal_end_day, 181 | usercmap=time_height_cmap, 182 | ticksize=ticksize, 183 | labelsize=labelsize 184 | ) 185 | 186 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/regrid_utilities/regrid_dpxx_output.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Process 2D & 3D fields and put them on an x & y grid. 4 | Data will be placed in a folder called post_processed_output 5 | in your case directory. 6 | 7 | 3D currently only works with data on the lev grid (as opposed to ilev). 8 | This is because ilev currently doesn't appear to be written to to 9 | output files for some reason. 10 | 11 | This comes with no warranty and is provided as a convenience. 12 | """ 13 | 14 | import netCDF4 as nc4 15 | import numpy as np 16 | import scipy as sp 17 | import pylab 18 | import os 19 | import glob 20 | import matplotlib.pyplot as plt 21 | 22 | ###### Start user input ################################################ 23 | # What variables do you want to process? You have two options, you 24 | # can either list specific variables or simply put "all" to regrid every 25 | # 2D & 3D variable in your output stream. Example below of selected vars. 26 | #vartodo=["T_mid","IceWaterPath"] 27 | vartodo=["all"] 28 | 29 | # Supply the run directory, casename, and prefix of the output stream to process 30 | casedir='/pscratch/sd/b/bogensch/dp_screamxx/' 31 | casename='scream_dpxx_GATEIDEAL.3.2km.003a' 32 | outstream='scream_dpxx_GATEIDEAL.3.2km.003a.scream.hourly.avg.AVERAGE.nhours_x1' 33 | 34 | ###### End user input 35 | ####################################################################### 36 | 37 | def regrid_array(data, x_coords, y_coords): 38 | # Create dictionaries to map the coordinates to their indices 39 | unique_x = sorted(set(x_coords)) 40 | unique_y = sorted(set(y_coords)) 41 | 42 | x_index = {value: idx for idx, value in enumerate(unique_x)} 43 | y_index = {value: idx for idx, value in enumerate(unique_y)} 44 | 45 | # Determine the shape of the 2D array 46 | max_x = len(unique_x) 47 | max_y = len(unique_y) 48 | 49 | # Convert x_coords and y_coords to indices 50 | x_indices = np.array([x_index[x] for x in x_coords]) 51 | y_indices = np.array([y_index[y] for y in y_coords]) 52 | 53 | # Determine the number of slices (time or vertical levels) 54 | num_slices = data.shape[0] 55 | 56 | if (data.ndim == 2): 57 | 58 | # Create an empty 3D array with the determined shape 59 | arranged_array = np.empty((num_slices, max_y, max_x)) 60 | 61 | # Use advanced indexing to place the data in the correct locations in one go 62 | arranged_array[:, y_indices, x_indices] = data 63 | 64 | if (data.ndim == 3): 65 | num_levs = data.shape[2] # Assuming the 3rd dimension is the number of levels 66 | 67 | # Create an empty 4D array with the determined shape 68 | arranged_array = np.empty((num_slices, num_levs, max_y, max_x)) 69 | 70 | # Vectorized operation to place data into 4D array 71 | # Use broadcasting to index array positions efficiently 72 | arranged_array[:, :, y_indices, x_indices] = data.transpose(0, 2, 1) 73 | 74 | # Create the appropriately arranged x and y coordinate arrays 75 | arranged_x_coords = np.array(unique_x) 76 | arranged_y_coords = np.array(unique_y) 77 | 78 | return arranged_array, arranged_x_coords, arranged_y_coords 79 | 80 | ################################ 81 | 82 | def SCREAM_get_cords(initfile): 83 | 84 | f=nc4.Dataset(initfile) 85 | time=f.variables['time'][:] 86 | lev=f.variables['lev'][:] 87 | crm_grid_x=f.variables['lon'][:] 88 | crm_grid_y=f.variables['lat'][:] 89 | 90 | # conversion_fac=(3.14/180.) 91 | conversion_fac=1. 92 | 93 | crm_grid_x=crm_grid_x*conversion_fac 94 | crm_grid_y=crm_grid_y*conversion_fac 95 | 96 | f.close() 97 | 98 | return time, lev, crm_grid_x, crm_grid_y 99 | 100 | ################################ 101 | 102 | def determine_var_dim(var,var_name,matching_vars,dimarr): 103 | 104 | # Check if the dimensions are ('time', 'ncol') 105 | if var.dimensions == ('time', 'ncol'): 106 | matching_vars.append(var_name) 107 | dimarr.append('2D') 108 | 109 | # Check if the dimensions are ('time', 'ncol') 110 | if var.dimensions == ('time', 'ncol','lev'): 111 | matching_vars.append(var_name) 112 | dimarr.append('3D') 113 | 114 | ################################ 115 | 116 | def find_variables(filename,varlist_in): 117 | # Open the NetCDF file 118 | dataset = nc4.Dataset(filename) 119 | 120 | # Initialize a list to store variable names that match the criteria 121 | matching_vars = [] 122 | dimarr = [] 123 | 124 | for var_entry in varlist_in: 125 | 126 | # If all variables are done, search for variables in file 127 | if (var_entry == "all"): 128 | 129 | # Iterate over all the variables in the NetCDF file 130 | for var_name in dataset.variables: 131 | var = dataset.variables[var_name] 132 | determine_var_dim(var,var_name,matching_vars,dimarr) 133 | else: 134 | 135 | # user specified output 136 | var = dataset.variables[var_entry] 137 | determine_var_dim(var,var_entry,matching_vars,dimarr) 138 | 139 | # Close the dataset 140 | dataset.close() 141 | 142 | return matching_vars, dimarr 143 | 144 | ################################ 145 | 146 | def check_path(dim): 147 | 148 | # Check to see if post process directory exists for 2d output; if not create it 149 | postpath_dim=casedir+casename+'/post_processed_output/'+dim+'/' 150 | ishere=os.path.isdir(postpath_dim) 151 | 152 | # Make directory for post processing 153 | if not ishere: 154 | os.system('mkdir '+postpath_dim) 155 | 156 | return postpath_dim 157 | 158 | ############################################################################## 159 | ############################################################################## 160 | ############################################################################## 161 | # Begin main function 162 | 163 | # Check to see if post process directory exists; if not create it 164 | postpath=casedir+casename+'/post_processed_output/' 165 | ishere=os.path.isdir(postpath) 166 | 167 | # Make directory for post processing 168 | if not ishere: 169 | os.system('mkdir '+postpath) 170 | 171 | ############################################################ 172 | # 173 | 174 | # Make a list of files to process 175 | filedir=casedir+casename+'/run/'+outstream 176 | filelist=sorted(glob.glob(filedir+'*.nc')) 177 | print(filedir) 178 | # Get coordinates and timing information 179 | # open first file 180 | time_in, lev_in, crm_grid_x, crm_grid_y = SCREAM_get_cords(filelist[0]) 181 | 182 | # arrange the coordinates 183 | unique_x = sorted(set(crm_grid_x)) 184 | unique_y = sorted(set(crm_grid_y)) 185 | 186 | # Create the appropriately arranged x and y coordinate arrays 187 | arranged_x_coords = np.array(unique_x) 188 | arranged_y_coords = np.array(unique_y) 189 | 190 | # figure out number of times in files 191 | numfiles=len(filelist) 192 | numtimes=len(time_in) 193 | 194 | # figure out number of times for output file 195 | ntimes=numtimes*(numfiles-1.) 196 | 197 | # figure out number of times in last file 198 | if (numfiles > 1): 199 | time_in, lev_in, crm_grid_x, crm_grid_y = SCREAM_get_cords(filelist[len(filelist)-1]) 200 | ntimes=ntimes+len(time_in) 201 | 202 | # Are we doing all variables or selected variables? 203 | vartodo,dimarr=find_variables(filelist[0],vartodo) 204 | 205 | numvars=len(vartodo) 206 | 207 | ############################################################# 208 | # Process each variable one at a time 209 | for v in range(0,numvars): 210 | 211 | # Make sure output directory has been created 212 | postpath_dim=check_path(dimarr[v]) 213 | 214 | print('PROCESSING VARIABLE: ',vartodo[v]) 215 | outputfile=postpath_dim+casename+'_'+dimarr[v]+'_'+vartodo[v]+'.nc' 216 | 217 | ishere=os.path.isfile(outputfile) 218 | print('Making output file ',outputfile) 219 | if ishere: 220 | os.system('rm '+outputfile) 221 | f=nc4.Dataset(outputfile,'w',format='NETCDF4') 222 | f.createDimension('x',len(arranged_x_coords)) 223 | f.createDimension('y',len(arranged_y_coords)) 224 | f.createDimension('time',ntimes) 225 | if (dimarr[v] == '3D'): 226 | f.createDimension('lev',len(lev_in)) 227 | 228 | x=f.createVariable('x','f4','x') 229 | y=f.createVariable('y','f4','y') 230 | time=f.createVariable('time','f4','time') 231 | if (dimarr[v] == '2D'): 232 | out_var=f.createVariable(vartodo[v],'f4',('time','y','x')) 233 | if (dimarr[v] == '3D'): 234 | lev=f.createVariable('lev','f4','lev') 235 | out_var=f.createVariable(vartodo[v],'f4',('time','lev','y','x')) 236 | 237 | x[:]=arranged_x_coords 238 | x.units='m' 239 | x.long_name='x coordinate' 240 | 241 | y[:]=arranged_y_coords 242 | y.units='m' 243 | y.long_name='y coordinate' 244 | 245 | if (dimarr[v] == '3D'): 246 | lev[:]=lev_in 247 | lev.units='mb' 248 | lev.long_name='hybrid level at midpoints' 249 | 250 | time.units='days' 251 | time.long_name='time' 252 | 253 | out_var.long_name=vartodo[v] 254 | 255 | # Now loop over each file 256 | ts=0 257 | te=numtimes 258 | for thefile in filelist: 259 | 260 | print("Processing file: ", thefile) 261 | 262 | fi=nc4.Dataset(thefile,mode='r') 263 | time_in=fi.variables['time'][:] 264 | var=fi.variables[vartodo[v]][:] 265 | 266 | var_arranged,dummyx,dummyy=regrid_array(var, crm_grid_x, crm_grid_y) 267 | 268 | te=ts+len(time_in) 269 | time[ts:te]=time_in 270 | if (dimarr[v] == '2D'): 271 | out_var[ts:te,:,:]=var_arranged 272 | if (dimarr[v] == '3D'): 273 | out_var[ts:te,:,:,:]=var_arranged 274 | ts=te 275 | 276 | del(var) 277 | del(var_arranged) 278 | 279 | fi.close() 280 | 281 | f.close() 282 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_TRACER.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### TRACER 7 | ####### Convection during ARM TRACER field campaign 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### Forcing provided by: Raymond Oware and Youtong Zheng 11 | ####### 12 | ####### IMPORTANT: 13 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 14 | ####### have merged and here-on-out all SCREAM development will take place 15 | ####### on the E3SM master. 16 | ####### 17 | 18 | ####################################################### 19 | ####### BEGIN USER DEFINED SETTINGS 20 | ####### NOTE: beyond this section you will need to configure your 21 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 22 | ####### be brought to the correct locations. 23 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 24 | ####### of the scmlib repo to get you started. 25 | 26 | # Set the name of your case here 27 | setenv casename scream_dpxx_TRACER 28 | 29 | # Set the case directory here 30 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 31 | 32 | # Directory where code lives 33 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 34 | 35 | # Code tag name 36 | setenv code_tag E3SM_master 37 | 38 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 39 | setenv machine pm-cpu 40 | 41 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 42 | # frontier should use "craycray-mphipcc") 43 | # more machine compiler defaults will be added as they are tested/validated. 44 | setenv compiler gnu 45 | 46 | # Name of project to run on, if submitting to queue 47 | setenv projectname e3sm 48 | 49 | # Path where output YAML files are located (i.e. where you specify your output streams) 50 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 51 | # NOTE, you will likely need to edit the section of the script where the yaml files 52 | # are appended to your case. Do a search for "yamlpath" to find this location. 53 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 54 | 55 | 56 | # Set to debug queue? 57 | # - Some cases are small enough to run on debug queues 58 | # - Setting to true only supported for NERSC and Livermore Computing, 59 | # else user will need to modify script to submit to debug queue 60 | setenv debug_queue false 61 | 62 | # Set number of processors to use, should be less than or equal 63 | # to the total number of elements in your domain. Note that if you are running 64 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 65 | # domain size and resolution (RCE excluded). 66 | set num_procs = 384 67 | 68 | # set walltime 69 | set walltime = '05:00:00' 70 | 71 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 72 | # - Note that these scripts are set to run with dx=dy=3.33 km 73 | # which is the default SCREAM resolution. 74 | 75 | # To estimate dx (analogous for dy): 76 | # dx = domain_size_x / (num_ne_x * 3) 77 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 78 | 79 | # Set number of elements in the x&y directions 80 | set num_ne_x = 20 81 | set num_ne_y = 20 82 | 83 | # Set domain length [m] in x&y direction 84 | set domain_size_x = 200000 85 | set domain_size_y = 200000 86 | 87 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 88 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 89 | # need to be modified if you are not changing the resolution) 90 | 91 | # SET MODEL TIME STEPS 92 | # -NOTE that if you change the model resolution, 93 | # it is likely the physics and dynamics time steps will need to be adjusted. 94 | # See below for guidance on how to adjust both. 95 | 96 | # model/physics time step [s]: 97 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 98 | # decrease of the model/physics step. This needs to be an integer number. 99 | set model_dtime = 100 100 | 101 | # dynamics time step [s]: 102 | # should divide evenly into model_dtime. As a general rule of thumb, divide 103 | # model_dtime by 12 to get your dynamics time step. 104 | set dyn_dtime = 8.3333333333333 105 | 106 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 107 | # NOTE that if you decrease resolution you will also need to reduce 108 | # the value of "nu_top" (second-order viscosity applied only near model top). 109 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 110 | # factor of 2 decrease for this value 111 | 112 | # second order visocosity near model top [m2/s] 113 | set nu_top_dyn = 1e4 114 | 115 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 116 | ########################################################################### 117 | ########################################################################### 118 | ########################################################################### 119 | 120 | # Case specific information kept here 121 | set lat = 29.75 # latitude 122 | set lon = 264.550 # longitude 123 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 124 | set do_iop_nudge_tq = false # Relax T&Q to observations? 125 | set do_iop_nudge_uv = true # Relax U&V to observations? 126 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 127 | set do_iop_subsidence = false # compute LS vertical transport? 128 | set startdate = 2022-07-01 # Start date in IOP file 129 | set start_in_sec = 00000 # start time in seconds in IOP file 130 | set stop_option = ndays 131 | set stop_n = 91 132 | set iop_file = TRACER_iopfile_4scam.nc #IOP file name 133 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 134 | # End Case specific stuff here 135 | 136 | # Location of IOP file 137 | set iop_path = atm/cam/scam/iop 138 | 139 | set PROJECT=$projectname 140 | set E3SMROOT=${code_dir}/${code_tag} 141 | 142 | cd $E3SMROOT/cime/scripts 143 | 144 | set compset=FIOP-SCREAMv1-DP 145 | 146 | # Note that in DP-SCREAM the grid is set ONLY to initialize 147 | # the model from these files 148 | set grid=ne30pg2_ne30pg2 149 | 150 | set CASEID=$casename 151 | 152 | set CASEDIR=${casedirectory}/$CASEID 153 | 154 | set run_root_dir = $CASEDIR 155 | set temp_case_scripts_dir = $run_root_dir/case_scripts 156 | 157 | set case_scripts_dir = $run_root_dir/case_scripts 158 | set case_build_dir = $run_root_dir/build 159 | set case_run_dir = $run_root_dir/run 160 | 161 | # Create new case 162 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 163 | cd $temp_case_scripts_dir 164 | 165 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 166 | 167 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 168 | 169 | # Define executable and run directories 170 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 171 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 172 | 173 | # Set to debug, only on certain machines 174 | if ($debug_queue == 'true') then 175 | if ($machine =~ 'pm*') then 176 | ./xmlchange --id JOB_QUEUE --val 'debug' 177 | endif 178 | 179 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 180 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 181 | endif 182 | endif 183 | 184 | # need to use single thread 185 | set npes = $num_procs 186 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 187 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 188 | end 189 | 190 | # Compute maximum allowable number for processes (number of elements) 191 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 192 | 193 | set ELM_CONFIG_OPTS="-phys elm" 194 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 195 | 196 | # Modify the run start and duration parameters for the desired case 197 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 198 | 199 | # Compute number of columns needed for component model initialization 200 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 201 | 202 | # Modify the latitude and longitude for the particular case 203 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 204 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 205 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 206 | ./xmlchange CALENDAR="GREGORIAN" 207 | 208 | 209 | # Set model timesteps 210 | 211 | @ ncpl = 86400 / $model_dtime 212 | ./xmlchange ATM_NCPL=$ncpl 213 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 214 | 215 | ./case.setup 216 | 217 | # Get local input data directory path 218 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 219 | 220 | 221 | # Set relevant namelist modifications 222 | ./atmchange se_ne_x=$num_ne_x 223 | ./atmchange se_ne_y=$num_ne_y 224 | ./atmchange se_lx=$domain_size_x 225 | ./atmchange se_ly=$domain_size_y 226 | ./atmchange dt_remap_factor=2 227 | ./atmchange cubed_sphere_map=2 228 | ./atmchange target_latitude=$lat 229 | ./atmchange target_longitude=$lon 230 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 231 | ./atmchange nu=0.216784 232 | ./atmchange nu_top=$nu_top_dyn 233 | ./atmchange se_ftype=2 234 | ./atmchange se_tstep=$dyn_dtime 235 | ./atmchange rad_frequency=3 236 | ./atmchange iop_srf_prop=$do_iop_srf_prop 237 | ./atmchange iop_dosubsidence=$do_iop_subsidence 238 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 239 | ./atmchange extra_shoc_diags=true 240 | ./atmchange set_cld_frac_r_to_one=true 241 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 242 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 243 | 244 | # Allow for the computation of tendencies for output purposes 245 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 246 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 247 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 248 | ./atmchange homme::compute_tendencies=T_mid,qv 249 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 250 | 251 | # configure yaml output 252 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 253 | # Note that you can have as many output streams (yaml files) as you want! 254 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 255 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 256 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 257 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 258 | 259 | # avoid the monthly cice file from writing as this 260 | # appears to be currently broken for SCM 261 | cat <> user_nl_cice 262 | histfreq='y','x','x','x','x' 263 | EOF 264 | 265 | # Turn on UofA surface flux scheme 266 | cat <> user_nl_cpl 267 | ocn_surface_flux_scheme = 2 268 | EOF 269 | 270 | if ($do_turnoff_swrad == 'true') then 271 | set solar_angle = 180 # turns off incoming solar radiation 272 | else 273 | set solar_angle = -1 # Interactive SW radiation 274 | endif 275 | 276 | # Note that this call will be disabled for RCE 277 | cat <> user_nl_cpl 278 | constant_zenith_deg = $solar_angle 279 | EOF 280 | 281 | ./case.setup 282 | 283 | # Write restart files at the end of model simulation 284 | ./xmlchange PIO_TYPENAME="pnetcdf" 285 | 286 | # Build the case 287 | ./case.build 288 | 289 | # Submit the case 290 | ./case.submit 291 | 292 | exit 293 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_ATEX.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### ATEX 7 | ####### Cumulus under stratocumulus 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_ATEX 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue true 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 24 66 | 67 | # set walltime 68 | set walltime = '00:30:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 5 80 | set num_ne_y = 5 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 50000 84 | set domain_size_y = 50000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 15.0 # latitude 121 | set lon = 325.0 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = false # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = true # compute LS vertical transport? 127 | set startdate = 1969-02-15 # Start date in IOP file 128 | set start_in_sec = 0 # start time in seconds in IOP file 129 | set stop_option = nhours 130 | set stop_n = 8 131 | set iop_file = ATEX_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = true # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_GABLS.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### GABLS 7 | ####### Stable boundary layer 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_GABLS 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue true 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 24 66 | 67 | # set walltime 68 | set walltime = '00:30:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 5 80 | set num_ne_y = 5 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 50000 84 | set domain_size_y = 50000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 73.0 # latitude 121 | set lon = 180.0 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = false # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = false # compute LS vertical transport? 127 | set startdate = 1999-07-01 # Start date in IOP file 128 | set start_in_sec = 0 # start time in seconds in IOP file 129 | set stop_option = nhours 130 | set stop_n = 9 131 | set iop_file = GABLS_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_GATEIII.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### GATEIII 7 | ####### Maritime deep convection 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_GATEIII 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue false 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 384 66 | 67 | # set walltime 68 | set walltime = '04:00:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 20 80 | set num_ne_y = 20 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 200000 84 | set domain_size_y = 200000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 9.00 # latitude 121 | set lon = 336.0 # longitude 122 | set do_iop_srf_prop = false # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = true # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = true # compute LS vertical transport? 127 | set startdate = 1974-08-30 # Start date in IOP file 128 | set start_in_sec = 0 # start time in seconds in IOP file 129 | set stop_option = ndays 130 | set stop_n = 20 131 | set iop_file = GATEIII_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_TOGAII.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### TOGAII 7 | ####### TOGA-COARE deep convection 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_TOGAII 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue false 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 384 66 | 67 | # set walltime 68 | set walltime = '05:00:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 20 80 | set num_ne_y = 20 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 200000 84 | set domain_size_y = 200000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = -2.10 # latitude 121 | set lon = 154.69 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = true # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = false # compute LS vertical transport? 127 | set startdate = 1992-12-18 # Start date in IOP file 128 | set start_in_sec = 64800 # start time in seconds in IOP file 129 | set stop_option = ndays 130 | set stop_n = 20 131 | set iop_file = TOGAII_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_ARM95.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### ARM95 7 | ####### Deep convection over ARM SGP site 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_ARM95 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue false 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 384 66 | 67 | # set walltime 68 | set walltime = '05:00:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 20 80 | set num_ne_y = 20 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 200000 84 | set domain_size_y = 200000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 36.605 # latitude 121 | set lon = 262.515 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = true # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = false # compute LS vertical transport? 127 | set startdate = 1995-07-18 # Start date in IOP file 128 | set start_in_sec = 19800 # start time in seconds in IOP file 129 | set stop_option = ndays 130 | set stop_n = 17 131 | set iop_file = ARM95_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_ARM97.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### ARM97 7 | ####### Deep convection over ARM SGP site 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_ARM97 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue false 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 384 66 | 67 | # set walltime 68 | set walltime = '05:00:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 20 80 | set num_ne_y = 20 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 200000 84 | set domain_size_y = 200000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 36.605 # latitude 121 | set lon = 262.515 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = true # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = false # compute LS vertical transport? 127 | set startdate = 1997-06-19 # Start date in IOP file 128 | set start_in_sec = 84585 # start time in seconds in IOP file 129 | set stop_option = ndays 130 | set stop_n = 26 131 | set iop_file = ARM97_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = false # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | -------------------------------------------------------------------------------- /DPxx_SCREAM_SCRIPTS/run_dpxx_scream_BOMEX.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -fe 2 | 3 | ####################################################################### 4 | ####################################################################### 5 | ####### Script to run SCREAMv1 in doubly periodic (DP) mode (DP-EAMxx) 6 | ####### BOMEX 7 | ####### Non-precipitating trade-wind cumulus convection 8 | ####### 9 | ####### Script Author: P. Bogenschutz (bogenschutz1@llnl.gov) 10 | ####### 11 | ####### IMPORTANT: 12 | ####### - You should now be using E3SM master. The SCREAM and E3SM repos 13 | ####### have merged and here-on-out all SCREAM development will take place 14 | ####### on the E3SM master. 15 | ####### 16 | 17 | ####################################################### 18 | ####### BEGIN USER DEFINED SETTINGS 19 | ####### NOTE: beyond this section you will need to configure your 20 | ####### ouput yaml file(s). Please do a search for "yamlpath" and you will 21 | ####### be brought to the correct locations. 22 | ####### See the example yaml file in the DPxx_SCREAM_SCRIPTS/yaml_file_example 23 | ####### of the scmlib repo to get you started. 24 | 25 | # Set the name of your case here 26 | setenv casename scream_dpxx_BOMEX 27 | 28 | # Set the case directory here 29 | setenv casedirectory /pscratch/sd/b/bogensch/dp_screamxx 30 | 31 | # Directory where code lives 32 | setenv code_dir /pscratch/sd/b/bogensch/dp_scream/codes 33 | 34 | # Code tag name 35 | setenv code_tag E3SM_master 36 | 37 | # Name of machine you are running on (i.e. pm-cpu, anvil, etc) 38 | setenv machine pm-cpu 39 | 40 | # Compiler (pm-cpu should use "gnu"; pm-gpu should use "gnugpu"; LC should use "intel"; 41 | # frontier should use "craycray-mphipcc") 42 | # more machine compiler defaults will be added as they are tested/validated. 43 | setenv compiler gnu 44 | 45 | # Name of project to run on, if submitting to queue 46 | setenv projectname e3sm 47 | 48 | # Path where output YAML files are located (i.e. where you specify your output streams) 49 | # See example files in DPxx_SCREAM_SCRIPTS/yaml_file_example to get you started. 50 | # NOTE, you will likely need to edit the section of the script where the yaml files 51 | # are appended to your case. Do a search for "yamlpath" to find this location. 52 | setenv yamlpath /pscratch/sd/b/bogensch/dp_scream/codes/scmlib/DPxx_SCREAM_SCRIPTS/yaml_file_example 53 | 54 | 55 | # Set to debug queue? 56 | # - Some cases are small enough to run on debug queues 57 | # - Setting to true only supported for NERSC and Livermore Computing, 58 | # else user will need to modify script to submit to debug queue 59 | setenv debug_queue true 60 | 61 | # Set number of processors to use, should be less than or equal 62 | # to the total number of elements in your domain. Note that if you are running 63 | # on pm-gpu you will want to set this to either "4" or "8" if running the standard 64 | # domain size and resolution (RCE excluded). 65 | set num_procs = 24 66 | 67 | # set walltime 68 | set walltime = '00:30:00' 69 | 70 | ## SET DOMAIN SIZE AND DYNAMICS RESOLUTION: 71 | # - Note that these scripts are set to run with dx=dy=3.33 km 72 | # which is the default SCREAM resolution. 73 | 74 | # To estimate dx (analogous for dy): 75 | # dx = domain_size_x / (num_ne_x * 3) 76 | # (there are 3x3 unique dynamics columns per element, hence the "3" factor) 77 | 78 | # Set number of elements in the x&y directions 79 | set num_ne_x = 5 80 | set num_ne_y = 5 81 | 82 | # Set domain length [m] in x&y direction 83 | set domain_size_x = 50000 84 | set domain_size_y = 50000 85 | 86 | # BELOW SETS RESOLUTION DEPENDENT SETTINGS 87 | # (Note that all default values below are appropriate for dx=dy=3.33 km and do not 88 | # need to be modified if you are not changing the resolution) 89 | 90 | # SET MODEL TIME STEPS 91 | # -NOTE that if you change the model resolution, 92 | # it is likely the physics and dynamics time steps will need to be adjusted. 93 | # See below for guidance on how to adjust both. 94 | 95 | # model/physics time step [s]: 96 | # As a rule, a factor of 2 increase in resolution should equate to a factor of 2 97 | # decrease of the model/physics step. This needs to be an integer number. 98 | set model_dtime = 100 99 | 100 | # dynamics time step [s]: 101 | # should divide evenly into model_dtime. As a general rule of thumb, divide 102 | # model_dtime by 12 to get your dynamics time step. 103 | set dyn_dtime = 8.3333333333333 104 | 105 | # SET SECOND ORDER VISCOSITY NEAR MODEL TOP 106 | # NOTE that if you decrease resolution you will also need to reduce 107 | # the value of "nu_top" (second-order viscosity applied only near model top). 108 | # Rule of thumb is that a factor of 2 increase in resolution should equate to a 109 | # factor of 2 decrease for this value 110 | 111 | # second order visocosity near model top [m2/s] 112 | set nu_top_dyn = 1e4 113 | 114 | ####### END (mandatory) USER DEFINED SETTINGS, but see above about output 115 | ########################################################################### 116 | ########################################################################### 117 | ########################################################################### 118 | 119 | # Case specific information kept here 120 | set lat = 15.0 # latitude 121 | set lon = 300.0 # longitude 122 | set do_iop_srf_prop = true # Use surface fluxes in IOP file? 123 | set do_iop_nudge_tq = false # Relax T&Q to observations? 124 | set do_iop_nudge_uv = false # Relax U&V to observations? 125 | set do_iop_nudge_coriolis = false # Nudge to geostrophic winds? 126 | set do_iop_subsidence = true # compute LS vertical transport? 127 | set startdate = 1969-06-25 # Start date in IOP file 128 | set start_in_sec = 0 # start time in seconds in IOP file 129 | set stop_option = nhours 130 | set stop_n = 6 131 | set iop_file = BOMEX_iopfile_4scam.nc #IOP file name 132 | set do_turnoff_swrad = true # Turn off SW calculation (if false, keep false) 133 | # End Case specific stuff here 134 | 135 | # Location of IOP file 136 | set iop_path = atm/cam/scam/iop 137 | 138 | set PROJECT=$projectname 139 | set E3SMROOT=${code_dir}/${code_tag} 140 | 141 | cd $E3SMROOT/cime/scripts 142 | 143 | set compset=FIOP-SCREAMv1-DP 144 | 145 | # Note that in DP-SCREAM the grid is set ONLY to initialize 146 | # the model from these files 147 | set grid=ne30pg2_ne30pg2 148 | 149 | set CASEID=$casename 150 | 151 | set CASEDIR=${casedirectory}/$CASEID 152 | 153 | set run_root_dir = $CASEDIR 154 | set temp_case_scripts_dir = $run_root_dir/case_scripts 155 | 156 | set case_scripts_dir = $run_root_dir/case_scripts 157 | set case_build_dir = $run_root_dir/build 158 | set case_run_dir = $run_root_dir/run 159 | 160 | # Create new case 161 | ./create_newcase -case $casename --script-root $temp_case_scripts_dir -mach $machine -project $PROJECT -compset $compset -res $grid --compiler $compiler 162 | cd $temp_case_scripts_dir 163 | 164 | ./xmlchange JOB_WALLCLOCK_TIME=$walltime 165 | 166 | ./xmlchange SCREAM_CMAKE_OPTIONS="`./xmlquery -value SCREAM_CMAKE_OPTIONS | sed 's/SCREAM_NUM_VERTICAL_LEV [0-9][0-9]*/SCREAM_NUM_VERTICAL_LEV 128/'`" 167 | 168 | # Define executable and run directories 169 | ./xmlchange --id EXEROOT --val "${case_build_dir}" 170 | ./xmlchange --id RUNDIR --val "${case_run_dir}" 171 | 172 | # Set to debug, only on certain machines 173 | if ($debug_queue == 'true') then 174 | if ($machine =~ 'pm*') then 175 | ./xmlchange --id JOB_QUEUE --val 'debug' 176 | endif 177 | 178 | if ($machine == 'quartz' || $machine == 'syrah' || $machine == 'ruby') then 179 | ./xmlchange --id JOB_QUEUE --val 'pdebug' 180 | endif 181 | endif 182 | 183 | # need to use single thread 184 | set npes = $num_procs 185 | foreach component ( ATM LND ICE OCN CPL GLC ROF WAV ) 186 | ./xmlchange NTASKS_$component=$npes,NTHRDS_$component=1,ROOTPE_$component=0 187 | end 188 | 189 | # Compute maximum allowable number for processes (number of elements) 190 | set dyn_pes_nxny = `expr $num_ne_x \* $num_ne_y` 191 | 192 | set ELM_CONFIG_OPTS="-phys elm" 193 | ./xmlchange ELM_CONFIG_OPTS="$ELM_CONFIG_OPTS" 194 | 195 | # Modify the run start and duration parameters for the desired case 196 | ./xmlchange RUN_STARTDATE="$startdate",START_TOD="$start_in_sec",STOP_OPTION="$stop_option",STOP_N="$stop_n" 197 | 198 | # Compute number of columns needed for component model initialization 199 | set comp_mods_nx = `expr $num_ne_x \* $num_ne_y \* 4` 200 | 201 | # Modify the latitude and longitude for the particular case 202 | ./xmlchange PTS_MULTCOLS_MODE="TRUE",PTS_MODE="TRUE",PTS_LAT="$lat",PTS_LON="$lon" 203 | ./xmlchange MASK_GRID="USGS",PTS_NX="${comp_mods_nx}",PTS_NY=1 204 | ./xmlchange ICE_NX="${comp_mods_nx}",ICE_NY=1 205 | ./xmlchange CALENDAR="GREGORIAN" 206 | 207 | 208 | # Set model timesteps 209 | 210 | @ ncpl = 86400 / $model_dtime 211 | ./xmlchange ATM_NCPL=$ncpl 212 | ./xmlchange ELM_NAMELIST_OPTS="dtime=$model_dtime" 213 | 214 | ./case.setup 215 | 216 | # Get local input data directory path 217 | set input_data_dir = `./xmlquery DIN_LOC_ROOT -value` 218 | 219 | 220 | # Set relevant namelist modifications 221 | ./atmchange se_ne_x=$num_ne_x 222 | ./atmchange se_ne_y=$num_ne_y 223 | ./atmchange se_lx=$domain_size_x 224 | ./atmchange se_ly=$domain_size_y 225 | ./atmchange dt_remap_factor=2 226 | ./atmchange cubed_sphere_map=2 227 | ./atmchange target_latitude=$lat 228 | ./atmchange target_longitude=$lon 229 | ./atmchange iop_file=$input_data_dir/$iop_path/$iop_file 230 | ./atmchange nu=0.216784 231 | ./atmchange nu_top=$nu_top_dyn 232 | ./atmchange se_ftype=2 233 | ./atmchange se_tstep=$dyn_dtime 234 | ./atmchange rad_frequency=3 235 | ./atmchange iop_srf_prop=$do_iop_srf_prop 236 | ./atmchange iop_dosubsidence=$do_iop_subsidence 237 | ./atmchange iop_coriolis=$do_iop_nudge_coriolis 238 | ./atmchange extra_shoc_diags=true 239 | ./atmchange set_cld_frac_r_to_one=true 240 | ./atmchange iop_nudge_uv=$do_iop_nudge_uv 241 | ./atmchange iop_nudge_tq=$do_iop_nudge_tq 242 | 243 | # Set default physics process ordering (NOTE: if you add/remove a process or change ordering, 244 | # all DPxx runs NEED to have "iop_forcing" process in the list somewhere). 245 | ./atmchange physics::atm_procs_list=iop_forcing,mac_aero_mic,rrtmgp 246 | 247 | # Allow for the computation of tendencies for output purposes 248 | ./atmchange physics::mac_aero_mic::shoc::compute_tendencies=T_mid,qv 249 | ./atmchange physics::mac_aero_mic::p3::compute_tendencies=T_mid,qv 250 | ./atmchange physics::rrtmgp::compute_tendencies=T_mid 251 | ./atmchange homme::compute_tendencies=T_mid,qv 252 | ./atmchange physics::iop_forcing::compute_tendencies=T_mid,qv 253 | 254 | # configure yaml output 255 | # See the example yaml files in the DPxx_SCREAM_SCRIPTS/yaml_file_example 256 | # Note that you can have as many output streams (yaml files) as you want! 257 | cp ${yamlpath}/scream_output_avg_1hour.yaml . 258 | cp ${yamlpath}/scream_horiz_avg_output_15min.yaml . 259 | ./atmchange output_yaml_files="./scream_output_avg_1hour.yaml" 260 | ./atmchange output_yaml_files+="./scream_horiz_avg_output_15min.yaml" 261 | 262 | # avoid the monthly cice file from writing as this 263 | # appears to be currently broken for SCM 264 | cat <> user_nl_cice 265 | histfreq='y','x','x','x','x' 266 | EOF 267 | 268 | # Turn on UofA surface flux scheme 269 | cat <> user_nl_cpl 270 | ocn_surface_flux_scheme = 2 271 | EOF 272 | 273 | if ($do_turnoff_swrad == 'true') then 274 | set solar_angle = 180 # turns off incoming solar radiation 275 | else 276 | set solar_angle = -1 # Interactive SW radiation 277 | endif 278 | 279 | # Note that this call will be disabled for RCE 280 | cat <> user_nl_cpl 281 | constant_zenith_deg = $solar_angle 282 | EOF 283 | 284 | ./case.setup 285 | 286 | # Write restart files at the end of model simulation 287 | ./xmlchange PIO_TYPENAME="pnetcdf" 288 | 289 | # Build the case 290 | ./case.build 291 | 292 | # Submit the case 293 | ./case.submit 294 | 295 | exit 296 | --------------------------------------------------------------------------------