├── .gitattributes ├── .gitignore ├── LICENSE.txt ├── README.md ├── VERSION.txt ├── code ├── README.md ├── ini_templates │ ├── landsat_interpolate.ini │ ├── landsat_metric_inputs.ini │ ├── landsat_metric_monte_carlo.ini │ ├── landsat_metric_pixel_rating.ini │ └── landsat_project.ini ├── interp_functions │ ├── README.md │ ├── interpolate_rasters_func.py │ ├── interpolate_support.py │ └── interpolate_tables_func.py ├── local │ ├── README.md │ ├── landsat_interpolate.py │ ├── landsat_prep_ini.py │ ├── landsat_prep_path_row.py │ ├── landsat_prep_scene.py │ ├── metric_model1.py │ ├── metric_model2.py │ ├── metric_monte_carlo.py │ ├── metric_pixel_points.py │ └── metric_pixel_rating.py ├── metric_functions │ ├── README.md │ ├── auto_calibration_func.py │ ├── metric_model1_func.py │ ├── metric_model2_func.py │ ├── monte_carlo_func.py │ ├── pixel_points_func.py │ └── pixel_rating_func.py ├── prep_functions │ ├── README.md │ └── landsat_prep_scene_func.py └── support │ ├── et_common.py │ ├── et_image.py │ ├── et_numpy.py │ └── python_common.py ├── docs ├── EXAMPLE_DATA.md ├── EXAMPLE_METRIC.md ├── EXAMPLE_SETUP.md ├── INSTALL.md ├── README.md └── images │ └── harney_shapefile.png ├── example ├── .gitignore ├── README.md ├── cloud_masks │ ├── LE07_L1TP_043030_20150423_20160902_01_T1_mask.dbf │ ├── LE07_L1TP_043030_20150423_20160902_01_T1_mask.prj │ ├── LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbn │ ├── LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbx │ ├── LE07_L1TP_043030_20150423_20160902_01_T1_mask.shp │ └── LE07_L1TP_043030_20150423_20160902_01_T1_mask.shx ├── example_directory_structure.txt ├── fields │ ├── fields_2016_wgs84z11.dbf │ ├── fields_2016_wgs84z11.prj │ ├── fields_2016_wgs84z11.sbn │ ├── fields_2016_wgs84z11.sbx │ ├── fields_2016_wgs84z11.shp │ └── fields_2016_wgs84z11.shx ├── landsat │ ├── clear_scenes.txt │ └── cloudy_scenes.txt ├── landsat_2015.ini └── study_area │ ├── harney_wgs84z11.dbf │ ├── harney_wgs84z11.prj │ ├── harney_wgs84z11.sbn │ ├── harney_wgs84z11.sbx │ ├── harney_wgs84z11.shp │ ├── harney_wgs84z11.shx │ ├── wrs2_p043r030.dbf │ ├── wrs2_p043r030.prj │ ├── wrs2_p043r030.sbn │ ├── wrs2_p043r030.sbx │ ├── wrs2_p043r030.shp │ └── wrs2_p043r030.shx ├── landsat ├── .gitignore ├── README.md └── footprints │ ├── .gitignore │ ├── README.md │ └── wrs2_tile_utm_zones.json ├── misc ├── README.md ├── etrf_doy_adj.csv ├── etrf_training_original.csv ├── etrf_training_plots_20120918.py ├── etrf_training_test.csv ├── zom_nlcd_default.json └── zom_nlcd_metric.json ├── requirements.txt └── tools ├── README.md ├── cimis ├── README.md ├── _utils.py ├── cimis_ancillary.py ├── cimis_daily_refet.py ├── cimis_download.py ├── cimis_extract_convert.py └── cimis_gridmet_fill.py ├── daymet ├── README.md ├── _utils.py ├── daymet_ancillary.py ├── daymet_climatologies.py ├── daymet_daily_ea.py ├── daymet_daily_ppt.py ├── daymet_daily_temp.py ├── daymet_daily_variables.py └── daymet_download.py ├── download ├── README.md ├── _utils.py ├── download_cdl.py ├── download_footprints.py ├── download_landfire.py ├── download_landsat.py ├── download_ned.py ├── download_nlcd.py └── download_soils.py ├── gridmet ├── README.md ├── _utils.py ├── gridmet_ancillary.py ├── gridmet_daily_ea.py ├── gridmet_daily_ppt.py ├── gridmet_daily_refet.py ├── gridmet_daily_temp.py ├── gridmet_daily_variables.py └── gridmet_download.py ├── nldas ├── README.md ├── _utils.py ├── nldas_ancillary.py ├── nldas_download.py ├── nldas_hourly_ea.py ├── nldas_hourly_refet.py ├── nldas_hourly_variable.py └── nldas_hourly_wind.py └── plotting └── summary_histograms.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # PyCharm 7 | .idea 8 | 9 | # Temporary files 10 | *.bak 11 | *.lock 12 | 13 | # Documents 14 | *.mxd 15 | *.docx 16 | 17 | # Data folders 18 | /cdl 19 | /cimis/*/ 20 | /daymet 21 | /dem/tiles 22 | /dem/p*r*/ 23 | /fields 24 | /gridmet 25 | /landfire 26 | /landsat/*/ 27 | /nlcd 28 | /nldas/*/ 29 | /refet 30 | /soils 31 | /ssurgo 32 | /statsgo 33 | /summary_histograms 34 | 35 | # Projects 36 | /bear_river_basin 37 | /central_valley 38 | /malheur_lake_basin 39 | /western_nevada 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyMETRIC 2 | 3 | pyMETRIC is a set of Python based tools developed for estimating and mapping evapotranspiration (ET) for large areas, utilizing the Landsat image archive. This framework currently computes ET estimates using the [METRIC](http://www.uidaho.edu/cals/kimberly-research-and-extension-center/research/water-resources) surface energy balance model, developed at the University of Idaho. 4 | 5 | In order to produce ET estimates, pyMETRIC produces ancillary rasters from Landsat data products. These products are stored within the pyMETRIC data structure, and may be useful for tasks tangentially related to ET mapping. The raster datasets produced during typical processing include the following: 6 | - Albedo 7 | - LAI (Leaf Area Index) 8 | - NDVI (Normalized Difference Vegetation Index) 9 | - NDWI (Normalized Difference Water Index) 10 | - Top of Atmosphere Reflectance 11 | 12 | In addition to creating ET maps from Landsat images, pyMETRIC includes functionality to interpolate annual/seasonal/monthly ET maps, from individually processed ET maps. 13 | 14 | ## Install 15 | 16 | Details on installing pyMETRIC, Python, and necessary modules can be found in the [installation instructions](docs/INSTALL.md). 17 | 18 | ## Example 19 | 20 | A detailed walk-through on the setup and operation of pyMETRIC has been assembled in the following series of documentation. These examples are setup to process a portion of the Harney Basin, located in eastern Oregon. The documentation is contained in the following links: 21 | 1. [Data Preparation](docs/EXAMPLE_DATA.md) 22 | 2. [Project Setup](docs/EXAMPLE_SETUP.md) 23 | 3. [Running METRIC](docs/EXAMPLE_METRIC.md) 24 | 25 | ## References 26 | 27 | * [Satellite-Based Energy Balance for Mapping Evapotranspiration with Internalized Calibration (METRIC)-Model](https://ascelibrary.org/doi/abs/10.1061/(ASCE)0733-9437(2007)133:4(380)) 28 | * [Satellite-Based Energy Balance for Mapping Evapotranspiration with Internalized Calibration (METRIC)-Applications](https://ascelibrary.org/doi/abs/10.1061/(ASCE)0733-9437(2007)133:4(395)) 29 | * [Assessing calibration uncertainty and automation for estimating evapotranspiration from agricultural areas using METRIC](https://www.dri.edu/images/stories/divisions/dhs/dhsfaculty/Justin-Huntington/Morton_et_al._2013.pdf) 30 | 31 | ## Limitations 32 | 33 | METRIC requires an assemblage of several datasets in order to produce accurate estimates of evapotranspiration. The pyMETRIC framework serve to download and process the required data. Please note that this code is written for the data as it is currently provided, however the data and it’s formatting is controlled by the data providers and by third-party hosts. The maintainers of pyMETRIC will attempt to keep the package functional, however changes in the data and data availability may impact the functionality of pyMETRIC. 34 | 35 | ## Directory Structure 36 | 37 | When initially downloading or cloning pyMETRIC, this directory does not contain data necessary for estimating ET. As python scripts are ran as prescribed in ["Data Preparation"](docs/EXAMPLE_DATA.md) and ["Project Setup"](docs/EXAMPLE_SETUP.md), the top level directory will be populated with additional directories containing support data. These folders will be assigned names according to the directory contents (eg. "cdl", "dem", "gridmet", etc...). Ideally these data directories will be populated with project-agnostic data (example. "dem" may contain a digital elevation model (DEM) for the entire continental United States). The support data will be processed by pyMETRIC, which will isolate and subset the relevant data for processing. 38 | 39 | To serve as an example the "example" directory is included in the top-level directory. The "example" directory is an example of a "project directory", which should contain information specific to your chosen study area or area of interest. As pyMETRIC is ran according to ["Running METRIC"](docs/EXAMPLE_METRIC.md), the processed data will be stored within the project directory. 40 | -------------------------------------------------------------------------------- /VERSION.txt: -------------------------------------------------------------------------------- 1 | 0.2.3 -------------------------------------------------------------------------------- /code/README.md: -------------------------------------------------------------------------------- 1 | ### ini_templates 2 | ------------- 3 | Contains several .ini (input) templates to be used with pyMETRIC. 4 | __NOTE: As of 12/20/17 these templates may not be fully compatible with the most recent updates to pyMETRIC. For the most current .ini example please use [\pymetric\example\landsat_2015.ini](\pymetric\example\landsat_2015.ini)__ 5 | 6 | ### interp_functions 7 | ------------- 8 | Contains python functions that are used by the pyMETRIC software 9 | __NOTE: The python files contained in this directory are not intended to be used directly by the pyMETRIC operator__ 10 | 11 | ### prep_functions 12 | ------------- 13 | Contains python functions that are used by the pyMETRIC software 14 | __NOTE: The python files contained in this directory are not intended to be used directly by the pyMETRIC operator__ 15 | 16 | ### support 17 | ------------- 18 | Contains python functions that are used by the pyMETRIC software 19 | __NOTE: The python files contained in this directory are not intended to be used directly by the pyMETRIC operator__ 20 | -------------------------------------------------------------------------------- /code/ini_templates/landsat_interpolate.ini: -------------------------------------------------------------------------------- 1 | ## Interpolater Input File 2 | [INPUTS] 3 | folder_name = ET 4 | year = 2011 5 | 6 | ## Study area (for raster interpolator) 7 | ## Output projection will be inherited from 1) zones_proj or 2) study_area_path 8 | ## If study_area_mask is True, convert to raster and mask values outside study area features 9 | ## If study_area_mask is False, get output extent and spat. ref from study_area_path 10 | ## Buffering the study area is not fully supported 11 | ## Buffer will only be applied to the output extent 12 | study_area_path = C:\pymetric\example\study_area\harney_wgs84z11.shp 13 | study_area_mask = True 14 | study_area_snap = 0, 0 15 | study_area_cellsize = 30 16 | study_area_buffer = 0 17 | study_area_proj = None 18 | 19 | ## Zones (for table interpolator) 20 | ## Output projection will be inherited from zones_path 21 | ## projection of zones_mask will be assumed to be the same as zones_path 22 | ## Overriding the output projection is not currently supported 23 | ## Output raster properties will be be inherited from 1) zones_mask or 2) set separately 24 | ## If set, zones_mask values of 0 or nodata will be excluded from zonal statistics 25 | ## Buffering the zones is not currently supported 26 | zones_path = C:\pymetric\example\study_area\harney_wgs84z11.shp 27 | zones_name_field = Name 28 | zones_snap = 0, 0 29 | zones_cellsize = 30 30 | zones_mask = None 31 | # zones_proj = None 32 | # zones_buffer = 0 33 | 34 | ## Set path/rows the interpolator should try to use 35 | tile_list = p43r30 36 | 37 | ## ETrF (optional, if not set will default to current project/year) 38 | # etrf_input_folder = C:\pymetric\example\2015 39 | 40 | ## ETr 41 | etr_input_folder = C:\pymetric\gridmet\etr 42 | etr_input_re = (?Petr)_(?P\d{4})_daily_(?P\w+).img$ 43 | 44 | ## PPT 45 | ppt_input_folder = C:\pymetric\gridmet\ppt 46 | ppt_input_re = (?Pppt)_(?P\d{4})_daily_(?P\w+).img$ 47 | 48 | ## Available water capacity (for SWB) 49 | awc_input_path = C:\pymetric\soils\AWC_WTA_0to10cm_composite.img 50 | 51 | ## Landsat footprint shapefile 52 | footprint_path = C:\pymetric\landsat\footprint\wrs2_descending.shp 53 | 54 | ## List Landsat scene days that should be ignored 55 | # doy_start = 1 56 | # doy_end = 365 57 | use_landsat4_flag = False 58 | use_landsat5_flag = True 59 | use_landsat7_flag = True 60 | use_landsat8_flag = True 61 | 62 | ## Raster 63 | etrf_raster = ETRF/et_rf.img 64 | ndvi_raster = INDICES/ndvi_toa.img 65 | 66 | ## Calculation blocksize 67 | blocksize = 256 68 | 69 | ## NEAREST, LINEAR, or CUBICSPLINE 70 | ## Eventually add SPATIAL 71 | fill_method = LINEAR 72 | 73 | ## NEAREST, LINEAR, or CUBICSPLINE 74 | interp_method = LINEAR 75 | 76 | ## Only MEAN is currently supported 77 | mosaic_method = MEAN 78 | 79 | ## Soil Water Balance Adjust 80 | swb_adjust_flag = False 81 | swb_spinup_days = 30 82 | swb_min_spinup_days = 5 83 | swb_bare_soil_ndvi = 0.15 84 | swb_full_cover_ndvi = 0.7 85 | 86 | ## Use NDVI as surrogate for ETrF 87 | ## Setting the doy or month will force NDVI to be used 88 | ## Otherwise NDVI will only be used if ETrF doesn't exist 89 | etrf_ndvi_flag = False 90 | # etrf_ndvi_doy_list = 91 | # etrf_ndvi_month_list = 1, 2, 3, 11, 12 92 | # etrf_ndvi_slope = 1.25 93 | # etrf_ndvi_offset = 0 94 | 95 | ## Set to None or remove to not limit ETrF 96 | # low_etrf_limit = 0.0 97 | # high_etrf_limit = 1.5 98 | 99 | ## Zonal stats table flags 100 | calc_daily_zones_table_flag = True 101 | calc_monthly_zones_table_flag = True 102 | calc_annual_zones_table_flag = True 103 | 104 | ## Zonal stats plot flags 105 | calc_daily_ndvi_plots_flag = False 106 | calc_daily_etrf_plots_flag = False 107 | calc_daily_etr_plots_flag = False 108 | calc_daily_et_plots_flag = False 109 | calc_daily_ppt_plots_flag = False 110 | 111 | ## Daily raster flags 112 | calc_daily_ndvi_rasters_flag = False 113 | calc_daily_etrf_rasters_flag = False 114 | calc_daily_etr_rasters_flag = False 115 | calc_daily_et_rasters_flag = False 116 | calc_daily_ppt_rasters_flag = False 117 | 118 | ## Monthly raster flags 119 | calc_monthly_ndvi_rasters_flag = False 120 | calc_monthly_etrf_rasters_flag = True 121 | calc_monthly_etr_rasters_flag = True 122 | calc_monthly_et_rasters_flag = True 123 | calc_monthly_ppt_rasters_flag = True 124 | calc_monthly_count_rasters_flag = True 125 | 126 | ## Annual raster flags 127 | calc_annual_ndvi_rasters_flag = False 128 | calc_annual_etrf_rasters_flag = True 129 | calc_annual_etr_rasters_flag = True 130 | calc_annual_et_rasters_flag = True 131 | calc_annual_ppt_rasters_flag = True 132 | calc_annual_count_rasters_flag = True 133 | 134 | ## Seasonal raster flags 135 | #calc_seasonal_ndvi_rasters_flag = False 136 | #calc_seasonal_etrf_rasters_flag = False 137 | #calc_seasonal_etr_rasters_flag = False 138 | #calc_seasonal_et_rasters_flag = False 139 | #calc_seasonal_ppt_rasters_flag = False 140 | #calc_seasonal_count_rasters_flag = False 141 | 142 | ## Output folder/file names 143 | ndvi_name = NDVI 144 | etrf_name = ETrF 145 | etr_name = ETr 146 | et_name = ET 147 | 148 | pyramids_flag = False 149 | statistics_flag = True 150 | blend_overlap_flag = True 151 | 152 | ## Buffer path/row extents by N degrees for selecting Landsat path/rows 153 | tile_buffer = 0.25 154 | 155 | ## Plot parameters 156 | plots_zone_area_flag = True 157 | plots_ndvi_ylim = 0.0, 1.0 158 | plots_etrf_ylim = 0.0, 1.2 159 | plots_etr_ylim = 0, 12 160 | plots_et_ylim = 0, 12 161 | plots_ppt_ylim = 0, 100 162 | -------------------------------------------------------------------------------- /code/ini_templates/landsat_metric_inputs.ini: -------------------------------------------------------------------------------- 1 | ## METRIC Input File 2 | [INPUTS] 3 | 4 | kc_cold_pixel = 1.05 5 | kc_hot_pixel = 0.10 6 | 7 | dem_raster = C:\pymetric\dem\p043r030\dem.img 8 | landuse_raster = C:\pymetric\nlcd\p043r030\nlcd_2011.img 9 | landuse_type = NLCD 10 | 11 | zom_remap_path = C:\pymetric\misc\zom_nlcd_default.json 12 | 13 | # block_size = 1024 14 | block_size = 1536 15 | # block_size = 2048 16 | # block_size = 4096 17 | # block_size = 8192 18 | 19 | pyramids_flag = False 20 | statistics_flag = False 21 | 22 | ## Remove reflectance rasters after calculating Model 1 23 | remove_refl_toa_flag = False 24 | remove_refl_sur_flag = False 25 | remove_ts_bt_flag = False 26 | 27 | ## Datum elevation [m] 28 | datum = 86.868 29 | ## Lapse rate [K/km] 30 | lapse_rate = 6.5 31 | 32 | ## Weather 33 | weather_data_source = NLDAS 34 | # weather_data_source = REFET 35 | # gmt_offset = -8 36 | # refet_file = C:\pymetric\example\refet\FivePoints2_hourly_2011_formatted_datasub_Rs_corrected.out 37 | 38 | ## Height of wind speed measurement at weather station [m] 39 | wind_speed_height = 2.0 40 | ## Surface roughness at weather station [m] 41 | station_roughness = 0.015 42 | ## Add additional windspeed if unstable [m/s] 43 | additional_wind_speed = 0.0 44 | 45 | ## DEM or DATUM model (Pair calculated from DEM or datum elevation) 46 | pair_model = DEM 47 | ## SOLAR, CENTROID*, SPATIAL, or MOUNTAIN 48 | cos_theta_model = CENTROID 49 | ## FLAT or MOUNTAIN model 50 | terrain_model = FLAT 51 | 52 | ## Trim DEM and landuse rasters to common area 53 | ## Setting these True will overwrite the existing rasters if they exist 54 | save_dem_raster_flag = False 55 | save_mountain_rasters_flag = False 56 | save_landuse_raster_flag = False 57 | 58 | ## MODEL 1 INPUTS 59 | save_cos_theta_raster_flag = True 60 | 61 | ## At-Surface Reflectance, Albedo, and Broadband Emissivity 62 | save_refl_sur_raster_flag = False 63 | ## TASUMI or LEDAPS 64 | refl_sur_model_type = TASUMI 65 | save_albedo_sur_raster_flag = True 66 | save_tau_raster_flag = False 67 | 68 | ## Vegetation Indices: Top-of-atmosphere reflectance 69 | save_ndvi_toa_raster_flag = True 70 | save_savi_toa_raster_flag = False 71 | save_lai_toa_raster_flag = True 72 | ## SAVI or NDVI 73 | lai_toa_veg_index_type = SAVI 74 | save_ndwi_toa_raster_flag = True 75 | 76 | ## Vegetation Indices: At-surface reflectance 77 | save_ndvi_raster_flag = False 78 | save_savi_raster_flag = False 79 | save_lai_raster_flag = False 80 | ## SAVI or NDVI 81 | lai_index_type = SAVI 82 | save_ndwi_raster_flag = False 83 | 84 | ## Emissivity (for Rc/Ts and Rn) 85 | save_em_nb_raster_flag = False 86 | save_em_0_raster_flag = False 87 | ## TOA or SUR 88 | em_refl_type = TOA 89 | ## NDVI or NDWI 90 | em_water_index_type = NDVI 91 | 92 | ## Surface Temperature 93 | save_rc_raster_flag = False 94 | save_ts_raster_flag = True 95 | save_ts_dem_raster_flag = False 96 | 97 | 98 | ## MODEL 2 INPUTS 99 | ## Pixels_folder is the name of the folder with the hot/cold pixels shapefiles 100 | ## X_pixel needs to be the name of a point shapefile with a single point 101 | pixels_folder = PIXELS 102 | cold_pixel = cold.shp 103 | hot_pixel = hot.shp 104 | 105 | ## Net Radiation 106 | save_rn_raster_flag = False 107 | save_rn_24_raster_flag = False 108 | 109 | ## G 110 | save_g_raster_flag = False 111 | ## METRIC or SEBAL 112 | g_model_type = METRIC 113 | ## TOA or SUR 114 | g_refl_type = TOA 115 | save_g_landuse_rasters_flag = False 116 | use_g_water_flag = True 117 | use_g_snow_flag = False 118 | use_g_wetland_flag = False 119 | 120 | ## Zom 121 | save_zom_raster_flag = False 122 | zom_lai_refl_type = TOA 123 | 124 | ## Sensible Heat Flux 125 | save_h_raster_flag = False 126 | 127 | ## Sensible Heat Flux Iteration Controls 128 | ## Modes can be AUTO or MANUAL 129 | ## AUTO determines optimal number of iterations 130 | ## MANUAL uses user supplied number of iterations 131 | ## If MANUAL, add "Stability_xxx_iters = 10" 132 | stability_pixel_mode = AUTO2 133 | # stability_pixel_mode = AUTO 134 | stability_raster_mode = MANUAL 135 | stability_raster_iters = 6 136 | 137 | ## Sensible Heat Flux Component Rasters 138 | save_dt_raster_flag = False 139 | save_psi_raster_flag = False 140 | save_l_stabil_raster_flag = False 141 | save_rah_raster_flag = False 142 | save_u_star_raster_flag = False 143 | 144 | ## ET 145 | save_le_raster_flag = False 146 | save_et_inst_raster_flag = False 147 | save_etrf_raster_flag = True 148 | save_et_24_raster_flag = False 149 | 150 | ## Evaporative Fraction 151 | # save_ef_raster_flag = False 152 | # use_ef_flag = False 153 | # ef_landuses = 21, 52, 71 154 | -------------------------------------------------------------------------------- /code/ini_templates/landsat_metric_monte_carlo.ini: -------------------------------------------------------------------------------- 1 | ## METRIC Monte Carlo Input File 2 | [INPUTS] 3 | 4 | ## Maximum number of calibrations if using Morton calibration 5 | max_cal_iterations = 5 6 | max_point_iterations = 10 7 | ts_diff_threshold = 4 8 | 9 | save_etrf_temp_plots = False 10 | save_etrf_final_plots = True 11 | save_ndvi_plots = True 12 | etrf_training_path = C:\pymetric\misc\etrf_training_test.csv 13 | 14 | ## Linear interpolate target Kc values based on day of year 15 | kc_cold_doy_dict = 1:0.8, 60:0.8, 91:1.05, 274:1.05, 305:0.8, 366:0.8 16 | # kc_cold_doy_dict = 1:1.05, 366:1.05 17 | kc_hot_doy_dict = 1:0.1, 366:0.1 18 | 19 | ## ETr and PPT for daily soil water balance 20 | etr_ws = C:\pymetric\gridmet\etr 21 | ppt_ws = C:\pymetric\gridmet\ppt 22 | etr_re = (?Petr)_(?P\d{4})_daily_(?P\w+).img$ 23 | ppt_re = (?Pppt)_(?P\d{4})_daily_(?P\w+).img$ 24 | 25 | ## Available water capacity for daily soil water balance 26 | awc_path = C:\pymetric\soils\AWC_WTA_0to10cm_composite.shp 27 | 28 | ## Soil water balance 29 | swb_spinup_days = 30 30 | swb_min_spinup_days = 5 31 | 32 | ## Pixel database name/location 33 | ## Not currently implemented 34 | # cal_folder = CALIBRATION_DATABASE 35 | # cal_name = monte_carlo_pixels.shp -------------------------------------------------------------------------------- /code/ini_templates/landsat_metric_pixel_rating.ini: -------------------------------------------------------------------------------- 1 | ## Pixel Rating Input File 2 | [INPUTS] 3 | block_size = 1024 4 | # block_size = 2048 5 | # block_size = 4096 6 | 7 | landuse_raster = C:\pymetric\nlcd\p043r030\nlcd_2011.img 8 | cdl_ag_raster = C:\pymetric\cdl\p043r030\cdl_ag_2011.img 9 | fields_raster = C:\pymetric\fields\p043r030\fields_2011.img 10 | 11 | ## Filter pixel regions 12 | ## Use the CDL ag mask mainly when you don't have field boundaries 13 | apply_field_mask = True 14 | apply_cdl_ag_mask = False 15 | apply_ndwi_mask = True 16 | apply_ndvi_mask = True 17 | ## The following options are not currently supported 18 | # apply_nlcd_mask = False 19 | # apply_study_area_mask = True 20 | 21 | ## Rating algorithms 22 | albedo_rating_flag = True 23 | nlcd_rating_flag = True 24 | ndvi_rating_flag = True 25 | ts_rating_flag = True 26 | ke_rating_flag = False 27 | 28 | ## Select pixels in top X percentile of ratings 29 | cold_percentile = 99 30 | hot_percentile = 99 31 | 32 | overwrite_flag = True 33 | pyramids_flag = False 34 | statistics_flag = False 35 | 36 | ## Raster flags 37 | save_region_mask_flag = True 38 | save_rating_rasters_flag = True 39 | save_suggestion_rasters_flag = True 40 | 41 | cdl_buffer_cells = 2 42 | -------------------------------------------------------------------------------- /code/ini_templates/landsat_project.ini: -------------------------------------------------------------------------------- 1 | ## Landsat Harney Basin Input File 2 | [INPUTS] 3 | 4 | year = 2015 5 | tile_list = p043r030 6 | # tile_list = p042r030, p043r030, p044r030 7 | project_folder = C:\pymetric\example 8 | 9 | ## Monte Carlo 10 | monte_carlo_flag = True 11 | etrf_training_path = C:\pymetric\misc\etrf_training_test.csv 12 | 13 | ## Interpolator 14 | interpolate_folder = ET 15 | interpolate_rasters_flag = True 16 | interpolate_tables_flag = True 17 | 18 | ## ETrF (optional, if not set will default to current project/year) 19 | # etrf_input_folder = C:\pymetric\example\2015 20 | 21 | ## Interpolator study area (for rasters interpolator) 22 | ## Output projection will be inherited from study_area_path 23 | ## If study_area_mask_flag, convert to raster and mask values outside study area features 24 | ## If not study_area_mask_flag, set raster output extent and spat. ref from study_area_path 25 | ## Overriding the output projection with study_area_proj is not currently supported 26 | ## The study_area_buffer is currently only applied to the extent, not the features 27 | ## The buffer can be positive or negative (in or out) with units are in the output projection 28 | study_area_path = C:\pymetric\example\study_area\harney_wgs84z11.shp 29 | study_area_mask_flag = True 30 | study_area_snap = 0, 0 31 | study_area_cellsize = 30 32 | # study_area_buffer = 0 33 | # study_area_proj = None 34 | 35 | ## Interpolator zones (for tables interpolator) 36 | ## Output raster properties will be be inherited from 1) zones_mask or 2) set separately 37 | ## Output projection will be inherited from 1) zones_mask, 2) zones_proj, or 3) zones_path 38 | ## zones_proj can be EPSG, PROJ4, or WKT 39 | ## If set, zones_mask values of 0 or nodata will be excluded from zonal statistics 40 | ## Zones can be buffered in or out, units are in the output projection 41 | zones_path = C:\pymetric\example\fields\fields_2016_wgs84z11.shp 42 | zones_name_field = FID 43 | zones_snap = 0, 0 44 | zones_cellsize = 30 45 | # zones_buffer = 0 46 | # zones_proj = None 47 | # zones_mask = C:\pymetric\example\ag_mask\ca_NorCen_30m_reclass_poly_0b_2012.tif 48 | 49 | ## INI flags 50 | metric_flag = True 51 | 52 | ## Path/row control flags 53 | landsat_flag = True 54 | dem_flag = True 55 | nlcd_flag = True 56 | cdl_flag = True 57 | landfire_flag = False 58 | field_flag = True 59 | 60 | ## Scene control flags 61 | calc_refl_toa_flag = True 62 | calc_refl_toa_qa_flag = True 63 | calc_ts_bt_flag = True 64 | ## Use QA band to set common area 65 | ## Fmask cloud, shadow, & snow pixels will be removed from common area 66 | calc_fmask_common_flag = True 67 | fmask_smooth_flag = True 68 | fmask_smooth_cells = 1 69 | fmask_erode_flag = False 70 | fmask_erode_cells = 0 71 | fmask_buffer_flag = False 72 | fmask_buffer_cells = 0 73 | ## Extract separate Fmask rasters 74 | calc_fmask_flag = True 75 | calc_fmask_cloud_flag = False 76 | calc_fmask_snow_flag = False 77 | calc_fmask_water_flag = False 78 | ## Apply user cloud masks 79 | cloud_mask_flag = False 80 | # cloud_mask_ws = C:\pymetric\landsat\cloud_masks 81 | ## Keep raw Landsat DN, LEDAPS, and Fmask rasters 82 | keep_dn_flag = True 83 | keep_qa_flag = True 84 | ## Hourly/daily weather data for METRIC 85 | calc_metric_ea_flag = True 86 | calc_metric_wind_flag = True 87 | calc_metric_etr_flag = True 88 | calc_metric_tair_flag = False 89 | ## Daily soil water balance Ke 90 | calc_swb_ke_flag = False 91 | ## Remove edge (fringe) cells 92 | edge_smooth_flag = True 93 | 94 | 95 | ## Path/row inputs 96 | footprint_path = C:\pymetric\landsat\footprints\WRS2_descending.shp 97 | keep_list_path = C:\pymetric\example\landsat\clear_scenes.txt 98 | # skip_list_path = C:\pymetric\example\landsat\cloudy_scenes.txt 99 | 100 | ## NLDAS or REFET 101 | metric_hourly_weather = NLDAS 102 | 103 | ## METRIC hourly weather data 104 | metric_ea_input_folder = C:\pymetric\nldas\ea 105 | metric_wind_input_folder = C:\pymetric\nldas\wind 106 | metric_etr_input_folder = C:\pymetric\nldas\etr 107 | metric_tair_input_folder = C:\pymetric\nldas\tair 108 | metric_hourly_re = (?P\w+)_(?P\d{8})_hourly_(?P\w+).img$ 109 | metric_daily_re = (?P\w+)_(?P\d{8})_(?!hourly_)(?P\w+).img$ 110 | ## RefET station data per path/row 111 | # refet_params_path = C:\pymetric\refet\refet_path_row_inputs_windows.csv 112 | 113 | ## Round weather arrays to N digits to save space 114 | rounding_digits = 3 115 | 116 | ## Interpolator and soil water balance weather inputs 117 | etr_input_folder = C:\pymetric\gridmet\etr 118 | etr_input_re = (?Petr)_(?P\d{4})_daily_(?P\w+).img$ 119 | 120 | ## Soil water balance weather inputs 121 | ppt_input_folder = C:\pymetric\gridmet\ppt 122 | ppt_input_re = (?Pppt)_(?P\d{4})_daily_(?P\w+).img$ 123 | 124 | ## Soil water balance soil input 125 | awc_input_path = C:\pymetric\soils\AWC_WTA_0to10cm_composite.img 126 | 127 | ## Soil water balance 128 | swb_spinup_days = 30 129 | swb_min_spinup_days = 5 130 | 131 | ## Input folder paths 132 | landsat_input_folder = C:\pymetric\landsat 133 | 134 | ## DEM 135 | dem_input_folder = C:\pymetric\dem\tiles 136 | ## First item is latitude, second is longitude (i.e. n39w120.img) 137 | ## Don't change unless using custom DEM tiles 138 | dem_tile_fmt = {}{}.img 139 | 140 | ## NLCD 141 | nlcd_input_path = C:\pymetric\nlcd\nlcd_2011_landcover_2011_edition_2014_10_10\nlcd_2011_landcover_2011_edition_2014_10_10.img 142 | 143 | ## Buffer path/row extents by N degrees for selecting DEM tiles and clipping NLCD 144 | tile_buffer = 0.25 145 | 146 | ## CDL 147 | cdl_input_path = C:\pymetric\cdl\2015_30m_cdls.img 148 | cdl_ag_list = 1-61, 66-77, 204-254 149 | # non_ag_list = 0, 62-65, 81-199 150 | 151 | ## Landfire 152 | landfire_input_path = C:\pymetric\landfire\US_140_EVT\Grid\us_140evt 153 | landfire_ag_list = 3960-3999 154 | 155 | ## Fields (for pixel rating only) 156 | ## This could be set path/row specific 157 | ## It could be read from the refet_params_path or calculated as a buffer 158 | ## around the RefET site (add refet lat/lon to refet_params?) 159 | field_input_path = C:\pymetric\example\fields\fields_2016_wgs84z11.shp 160 | 161 | ## Zom remap 162 | zom_remap_path = C:\pymetric\misc\zom_nlcd_default.json 163 | 164 | ## Output folder paths 165 | dem_output_folder = C:\pymetric\dem 166 | nlcd_output_folder = C:\pymetric\nlcd 167 | cdl_output_folder = C:\pymetric\cdl 168 | landfire_output_folder = C:\pymetric\landfire 169 | field_output_folder = C:\pymetric\fields 170 | 171 | ## Output file name formats 172 | dem_output_name = dem.img 173 | nlcd_output_fmt = nlcd_{:04d}.img 174 | cdl_output_fmt = cdl_{:04d}.img 175 | cdl_ag_output_fmt = cdl_ag_{:04d}.img 176 | landfire_output_fmt = landfire_{:04d}.img 177 | landfire_ag_output_fmt = landfire_ag_{:04d}.img 178 | field_output_fmt = fields_{:04d}.img 179 | 180 | ## Script paths 181 | prep_scene_func = C:\pymetric\code\prep_functions\landsat_prep_scene_func.py 182 | metric_model1_func = C:\pymetric\code\metric_functions\metric_model1_func.py 183 | metric_model2_func = C:\pymetric\code\metric_functions\metric_model2_func.py 184 | pixel_rating_func = C:\pymetric\code\metric_functions\pixel_rating_func.py 185 | pixel_points_func = C:\pymetric\code\metric_functions\pixel_points_func.py 186 | monte_carlo_func = C:\pymetric\code\metric_functions\monte_carlo_func.py 187 | interpolate_rasters_func = C:\pymetric\code\interp_functions\interpolate_rasters_func.py 188 | interpolate_tables_func = C:\pymetric\code\interp_functions\interpolate_tables_func.py 189 | 190 | ## Template input files for scripts 191 | metric_ini = C:\pymetric\code\ini_templates\landsat_metric_inputs.ini 192 | pixel_rating_ini = C:\pymetric\code\ini_templates\landsat_metric_pixel_rating.ini 193 | monte_carlo_ini = C:\pymetric\code\ini_templates\landsat_metric_monte_carlo.ini 194 | interpolate_ini = C:\pymetric\code\ini_templates\landsat_interpolate.ini 195 | -------------------------------------------------------------------------------- /code/interp_functions/README.md: -------------------------------------------------------------------------------- 1 | # Interpolation Functions 2 | Contains python functions that are used by the pyMETRIC software 3 | __NOTE: The python files contained in this directory are not intended to be used directly by the pyMETRIC operator__ -------------------------------------------------------------------------------- /code/local/README.md: -------------------------------------------------------------------------------- 1 | pyMETRIC Workflow 2 | ================= 3 | 4 | >**Note:** 5 | >pyMETRIC makes use of the [ArgParse](https://docs.python.org/3/library/argparse.html) module which enables the use of command-line options, arguments and sub-commands. This functionality is only available when running pyMETRIC through the command line. Operating pyMETRIC through the graphic user interface (GUI) of your operating system is not recommended. 6 | 7 | Input File 8 | ------------- 9 | Components of pyMETRIC require the use of an input file (.ini), which sets parameters to be used in processing. The pathc to the input file must be preceded by "-i" or "--ini". (Example: -i C:\pymetric\example\landsat_2015.ini) 10 | 11 | 12 | Common Flags 13 | ------------- 14 | > -h : triggers a 'help' function within the interface, which will list available arguments for the code being operated 15 | > -o : indicates that output data from previous pyMETRIC runs will be overwritten by the execution of the code 16 | > -mp : allows for 'multiprocessing' of code, where multiple CPU cores will be used to run iterations of the process concurrently 17 | > -i : path to input file (.ini), where inputs for code execution are stored (this flag is mandatory for most scripts within pyMETRIC) 18 | 19 | 20 | Data Preparation 21 | ------------- 22 | ### landsat_prep_path_row.py 23 | **-i** or **-\-ini (str)** 24 | : File path of the input parameters file 25 | 26 | **-o** or **-\-overwrite_flag (bool)**: 27 | : If True, overwrite existing files 28 | 29 | **-mp (int)** or **-\-multiprocessing (int)**: 30 | : Number of cpu cores to use for processing 31 | 32 | **-d** or **-\-debug (bool)**: 33 | : If True, enable debug level logging 34 | 35 | ### landsat_prep_ini.py 36 | **ini_path (str)** 37 | : File path of the input parameters file 38 | 39 | **-o** or **-\-overwrite_flag (bool)**: 40 | : If True, overwrite existing files 41 | 42 | **-d** or **-\-debug (bool)**: 43 | : If True, enable debug level logging 44 | 45 | **-mp (int)** or **-\-multiprocessing (int)**: 46 | : Number of cpu cores to use for processing 47 | 48 | **-\-no_smooth (int)**: 49 | : Don't dilate and erode image to remove fringe/edge pixels 50 | 51 | **-\-no_stats (bool)**: 52 | : If True, compute raster statistics 53 | 54 | **-o** or **-\-overwrite_flag (bool)**: 55 | : If True, overwrite existing files 56 | 57 | **-pr** or **-\-path_row (str)** 58 | : Landsat path/rows to process (pXXrYY) 59 | 60 | **-\-window (bool)**: 61 | : If True, each process will be opened in a new terminal 62 | 63 | ### landsat_prep_scene.py 64 | **-i** or **-\-ini (str)** 65 | : File path of the input parameters file 66 | 67 | **-bs** or **-\-blocksize (str)** 68 | : Block size 69 | 70 | **-\-delay (float)**: 71 | : Max random delay starting function in seconds 72 | 73 | 74 | METRIC 75 | ------------- 76 | ### metric_model1.py 77 | **-i** or **-\-ini (str)** 78 | : File path of the input parameters file 79 | 80 | **-d** or **-\-debug (bool)**: 81 | : If True, enable debug level logging 82 | 83 | **-\-delay (float)**: 84 | : Max random delay starting function in seconds 85 | 86 | **-mp (int)** or **-\-multiprocessing (int)**: 87 | : Number of cpu cores to use for processing 88 | 89 | **-\-no_stats (bool)**: 90 | : If True, compute raster statistics 91 | 92 | **-o** or **-\-overwrite_flag (bool)**: 93 | : If True, overwrite existing files 94 | 95 | **-pr** or **-\-path_row (str)** 96 | : Landsat path/rows to process (pXXrYY) 97 | 98 | **-\-window (bool)**: 99 | : If True, each process will be opened in a new terminal 100 | 101 | ### metric_pixel_rating.py 102 | **-i** or **-\-ini (str)** 103 | : File path of the input parameters file 104 | 105 | **-d** or **-\-debug (bool)**: 106 | : If True, enable debug level logging 107 | 108 | **-\-delay (float)**: 109 | : Max random delay starting function in seconds 110 | 111 | **-mp (int)** or **-\-multiprocessing (int)**: 112 | : Number of cpu cores to use for processing 113 | 114 | **-\-no_stats (bool)**: 115 | : If True, compute raster statistics 116 | 117 | **-o** or **-\-overwrite_flag (bool)**: 118 | : If True, overwrite existing files 119 | 120 | **-pr** or **-\-path_row (str)** 121 | : Landsat path/rows to process (pXXrYY) 122 | 123 | **-\-window (bool)**: 124 | : If True, each process will be opened in a new terminal 125 | 126 | ### metric_pixel_points.py 127 | **-i** or **-\-ini (str)** 128 | : File path of the input parameters file 129 | 130 | **-gs** or **-\-groupsize (str)** 131 | : Minimum group size for placing calibration points 132 | 133 | **-bs** or **-\-blocksize (str)** 134 | : Block size 135 | 136 | **-\-no_shapefile (str)** 137 | : Don't save calibration points to shapefile 138 | 139 | **-j** or **-\-geojson (str)** 140 | : Don't save calibration points to shapefile 141 | 142 | **-o** or **-\-overwrite_flag (bool)**: 143 | : If True, overwrite existing files 144 | 145 | **-mp (int)** or **-\-multiprocessing (int)**: 146 | : Number of cpu cores to use for processing 147 | 148 | **-\-window (bool)**: 149 | : If True, each process will be opened in a new terminal 150 | 151 | **-\-delay (float)**: 152 | : Max random delay starting function in seconds 153 | 154 | **-d** or **-\-debug (bool)**: 155 | : If True, enable debug level logging 156 | 157 | ### metric_model2.py 158 | **-i** or **-\-ini (str)** 159 | : File path of the input parameters file 160 | 161 | **-d** or **-\-debug (bool)**: 162 | : If True, enable debug level logging 163 | 164 | **-\-delay (float)**: 165 | : Max random delay starting function in seconds 166 | 167 | **-mp (int)** or **-\-multiprocessing (int)**: 168 | : Number of cpu cores to use for processing 169 | 170 | **-\-no_stats (bool)**: 171 | : If True, compute raster statistics 172 | 173 | **-o** or **-\-overwrite_flag (bool)**: 174 | : If True, overwrite existing files 175 | 176 | **-pr** or **-\-path_row (str)** 177 | : Landsat path/rows to process (pXXrYY) 178 | 179 | **-\-window (bool)**: 180 | : If True, each process will be opened in a new terminal 181 | 182 | ### landsat_interpolate.py 183 | **-i** or **-\-ini (str)** 184 | : File path of the input parameters file 185 | 186 | **-d** or **-\-debug (bool)**: 187 | : If True, enable debug level logging 188 | 189 | **-\-delay (float)**: 190 | : Max random delay starting function in seconds 191 | 192 | **-mp (int)** or **-\-multiprocessing (int)**: 193 | : Number of cpu cores to use for processing 194 | 195 | **-\-no_pyramids (bool)**: 196 | : If True, compute raster pyramids 197 | 198 | **-\-no_stats (bool)**: 199 | : If True, compute raster statistics 200 | 201 | **-\-no_file_logging (bool)**: 202 | : If True, don't write logging to file 203 | 204 | **-o** or **-\-overwrite_flag (bool)**: 205 | : If True, overwrite existing files 206 | 207 | **-pr** or **-\-path_row (str)** 208 | : Landsat path/rows to process (pXXrYY) 209 | 210 | **-\-rasters (bool)** 211 | : If True, override INI and interpolate rasters 212 | 213 | **-\-tables (bool)** 214 | : If True, override INI and interpolate zone tables 215 | 216 | ## Example sequence of running the python codes within this directory 217 | __Please note that these scripts will only run effectively if the required data is downloaded and structured as prescribed in the [Example Data readme](../../docs/EXAMPLE_DATA.md).__ 218 | ``` 219 | python C:\pymetric\code\local\landsat_prep_path_row.py -i C:\pymetric\example\landsat_2015.ini 220 | python C:\pymetric\code\local\landsat_prep_ini.py -i C:\pymetric\example\landsat_2015.ini 221 | python C:\pymetric\code\local\landsat_prep_scene.py -i C:\pymetric\example\landsat_2015.ini -mp 222 | python C:\pymetric\code\local\metric_model1.py -i C:\pymetric\example\landsat_2015.ini -mp 223 | python C:\pymetric\code\local\metric_pixel_rating.py -i C:\pymetric\example\landsat_2015.ini -mp 224 | python C:\pymetric\code\local\metric_pixel_points.py -i C:\pymetric\example\landsat_2015.ini -mp 225 | python C:\pymetric\code\local\metric_model2.py -i C:\pymetric\example\landsat_2015.ini -mp 226 | python C:\pymetric\code\local\landsat_interpolate.py -i C:\pymetric\example\landsat_2015.ini -mp --tables 227 | python C:\pymetric\code\local\landsat_interpolate.py -i C:\pymetric\example\landsat_2015.ini -mp --rasters 228 | ``` 229 | -------------------------------------------------------------------------------- /code/local/landsat_prep_scene.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #-------------------------------- 3 | # Name: landsat_prep_scene.py 4 | # Purpose: Prepare Landsat Scenes 5 | #-------------------------------- 6 | 7 | import argparse 8 | from datetime import datetime 9 | import logging 10 | import multiprocessing as mp 11 | import os 12 | import re 13 | import subprocess 14 | import sys 15 | 16 | import python_common as dripy 17 | 18 | 19 | def main(ini_path, tile_list=None, blocksize=2048, stats_flag=True, 20 | overwrite_flag=False, mp_procs=1, delay=0, debug_flag=False, 21 | new_window_flag=False): 22 | """Prep Landsat scenes 23 | 24 | Parameters 25 | ---------- 26 | ini_path : str 27 | File path of the input parameters file. 28 | tile_list : list, optional 29 | Landsat path/rows to process (i.e. [p045r043, p045r033]). 30 | This will override the tile list in the INI file. 31 | blocksize : int, optional 32 | Processing block size (the default is 2048). 33 | stats_flag : bool, optional 34 | If True, compute raster statistics (the default is True). 35 | overwrite_flag : bool, optional 36 | If True, overwrite existing files (the default is False). 37 | mp_procs : int, optional 38 | Number of cores to use (the default is 1). 39 | delay : float, optional 40 | max random delay starting function in seconds (the default is 0). 41 | debug_flag : bool, optional 42 | If True, enable debug level logging (the default is False). 43 | new_window_flag : bool, optional 44 | If True, open each process in new terminal window (the default is False). 45 | Microsoft Windows only. 46 | 47 | Returns 48 | ------- 49 | None 50 | 51 | """ 52 | logging.info('\nPreparing Landsat scenes') 53 | 54 | # Open config file 55 | config = dripy.open_ini(ini_path) 56 | 57 | logging.debug(' Reading Input File') 58 | year = config.getint('INPUTS', 'year') 59 | if tile_list is None: 60 | tile_list = dripy.read_param('tile_list', [], config, 'INPUTS') 61 | project_ws = config.get('INPUTS', 'project_folder') 62 | logging.debug(' Year: {}'.format(year)) 63 | logging.debug(' Path/rows: {}'.format(', '.join(tile_list))) 64 | logging.debug(' Project: {}'.format(project_ws)) 65 | 66 | func_path = config.get('INPUTS', 'prep_scene_func') 67 | keep_list_path = dripy.read_param('keep_list_path', '', config, 'INPUTS') 68 | # DEADBEEF - Remove if keep list works 69 | # skip_list_path = dripy.read_param('skip_list_path', '', config, 'INPUTS') 70 | 71 | # Only allow new terminal windows on Windows 72 | if os.name is not 'nt': 73 | new_window_flag = False 74 | 75 | # Regular expressions 76 | # For now assume path/row are two digit numbers 77 | tile_re = re.compile('p\d{3}r\d{3}', re.IGNORECASE) 78 | image_id_re = re.compile( 79 | '^(LT04|LT05|LE07|LC08)_(?:\w{4})_(\d{3})(\d{3})_' 80 | '(\d{4})(\d{2})(\d{2})_(?:\d{8})_(?:\d{2})_(?:\w{2})$') 81 | 82 | # Check inputs folders/paths 83 | if not os.path.isdir(project_ws): 84 | logging.error('\nFolder {} does not exist'.format(project_ws)) 85 | sys.exit() 86 | 87 | # Setup command line argument 88 | call_args = [sys.executable, func_path, '-i', ini_path] 89 | if blocksize is not None: 90 | call_args.extend(['--blocksize', str(blocksize)]) 91 | if stats_flag: 92 | call_args.append('--stats') 93 | if overwrite_flag: 94 | call_args.append('--overwrite') 95 | if debug_flag: 96 | call_args.append('--debug') 97 | 98 | # Read keep/skip lists 99 | if keep_list_path: 100 | logging.debug('\nReading scene keep list') 101 | with open(keep_list_path) as keep_list_f: 102 | image_keep_list = keep_list_f.readlines() 103 | image_keep_list = [image_id.strip() for image_id in image_keep_list 104 | if image_id_re.match(image_id.strip())] 105 | else: 106 | logging.debug('\nScene keep list not set in INI') 107 | image_keep_list = [] 108 | # if skip_list_path: 109 | # logging.debug('\nReading scene skip list') 110 | # with open(skip_list_path) as skip_list_f: 111 | # image_skip_list = skip_list_f.readlines() 112 | # image_skip_list = [image_id.strip() for image_id in image_skip_list 113 | # if image_id_re.match(image_id.strip())] 114 | # else: 115 | # logging.debug('\nScene skip list not set in INI') 116 | # image_skip_list = [] 117 | 118 | # Process each image 119 | mp_list = [] 120 | for tile_name in sorted(tile_list): 121 | logging.debug('\nTile: {}'.format(tile_name)) 122 | tile_ws = os.path.join(project_ws, str(year), tile_name) 123 | if not os.path.isdir(tile_ws) and not tile_re.match(tile_name): 124 | logging.debug(' {} {} - invalid tile, skipping'.format( 125 | year, tile_name)) 126 | continue 127 | 128 | # Check that there are image folders 129 | image_id_list = [ 130 | image_id for image_id in sorted(os.listdir(tile_ws)) 131 | if (image_id_re.match(image_id) and 132 | os.path.isdir(os.path.join(tile_ws, image_id)) and 133 | (image_keep_list and image_id in image_keep_list))] 134 | # (image_skip_list and image_id not in image_skip_list))] 135 | if not image_id_list: 136 | logging.debug(' {} {} - no available images, skipping'.format( 137 | year, tile_name)) 138 | continue 139 | else: 140 | logging.debug(' {} {}'.format(year, tile_name)) 141 | 142 | # Prep each Landsat scene 143 | for image_id in image_id_list: 144 | image_ws = os.path.join(tile_ws, image_id) 145 | if mp_procs > 1: 146 | mp_list.append([ 147 | call_args, image_ws, delay, new_window_flag]) 148 | else: 149 | logging.debug(' {}'.format(image_id)) 150 | subprocess.call(call_args, cwd=image_ws) 151 | 152 | if mp_list: 153 | pool = mp.Pool(mp_procs) 154 | results = pool.map(dripy.call_mp, mp_list, chunksize=1) 155 | pool.close() 156 | pool.join() 157 | del results, pool 158 | 159 | logging.debug('\nScript complete') 160 | 161 | 162 | def arg_parse(): 163 | """""" 164 | parser = argparse.ArgumentParser( 165 | description='Batch Landsat scenes prep', 166 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 167 | parser.add_argument( 168 | '-i', '--ini', required=True, type=dripy.arg_valid_file, 169 | help='Landsat project input file', metavar='FILE') 170 | parser.add_argument( 171 | '-bs', '--blocksize', default=2048, type=int, 172 | help='Processing block size') 173 | parser.add_argument( 174 | '--delay', default=0, type=int, metavar='N', 175 | help='Max random delay starting job in seconds') 176 | parser.add_argument( 177 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 178 | help='Debug level logging', action="store_const", dest="loglevel") 179 | parser.add_argument( 180 | '-mp', '--multiprocessing', default=1, type=int, 181 | metavar='N', nargs='?', const=mp.cpu_count(), 182 | help='Number of processors to use') 183 | # The "no_stats" parameter is negated below to become "stats". 184 | # By default, prep_scene will NOT compute raster statistics. 185 | # If a user runs this "local" script, they probably want statistics. 186 | # If not, user can "turn off" statistics. 187 | parser.add_argument( 188 | '--no_stats', default=False, action="store_true", 189 | help='Don\'t compute raster statistics') 190 | parser.add_argument( 191 | '-o', '--overwrite', default=False, action="store_true", 192 | help='Force overwrite of existing files') 193 | parser.add_argument( 194 | '-pr', '--path_row', nargs="+", 195 | help='Landsat path/rows to process (pXXrYY)') 196 | parser.add_argument( 197 | '--window', default=False, action="store_true", 198 | help='Open each process in a new terminal (windows only)') 199 | args = parser.parse_args() 200 | 201 | # Default is to build statistics (opposite of --no_stats default=False) 202 | args.stats = not args.no_stats 203 | 204 | # Convert relative paths to absolute paths 205 | if os.path.isfile(os.path.abspath(args.ini)): 206 | args.ini = os.path.abspath(args.ini) 207 | 208 | return args 209 | 210 | 211 | if __name__ == '__main__': 212 | args = arg_parse() 213 | 214 | logging.basicConfig(level=args.loglevel, format='%(message)s') 215 | logging.info('\n{}'.format('#' * 80)) 216 | log_f = '{:<20s} {}' 217 | logging.info(log_f.format('Run Time Stamp:', datetime.now().isoformat(' '))) 218 | logging.info(log_f.format('Current Directory:', os.getcwd())) 219 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 220 | 221 | main(ini_path=args.ini, tile_list=args.path_row, blocksize=args.blocksize, 222 | stats_flag=args.stats, overwrite_flag=args.overwrite, 223 | mp_procs=args.multiprocessing, delay=args.delay, 224 | debug_flag=args.loglevel==logging.DEBUG, new_window_flag=args.window) 225 | -------------------------------------------------------------------------------- /code/metric_functions/README.md: -------------------------------------------------------------------------------- 1 | # metric_functions 2 | 3 | These functions are (should be) independent of the image type or operating system. 4 | Each function should be applied to a single image. 5 | 6 | This directory contains python scripts/functions that are used by the pyMETRIC software 7 | __NOTE: The python files contained in this directory are not intended to be used directly by the pyMETRIC operator__ -------------------------------------------------------------------------------- /code/prep_functions/README.md: -------------------------------------------------------------------------------- 1 | # prep_functions 2 | 3 | This directory contains a python script/function that is used by the pyMETRIC software 4 | __NOTE: The python file contained in this directory is not intended to be used directly by the pyMETRIC operator__ -------------------------------------------------------------------------------- /docs/EXAMPLE_METRIC.md: -------------------------------------------------------------------------------- 1 | Quicklinks: [Data Preparation](EXAMPLE_DATA.md) --- [Project Setup](EXAMPLE_SETUP.md) --- [Running METRIC](EXAMPLE_METRIC.md) 2 | 3 | # Running METRIC 4 | The following scripts should be ran in the following sequence in order to produce ET estimates. These scripts are all located in the ["code/local/"](/code/local) directory. 5 | 6 | ### landsat_prep_ini.py 7 | Prepare Landsat path/row data and populates input files to be used later in the PyMETRIC process. 8 | 9 | ### landsat_prep_scene.py 10 | Prepares Landsat scenes for processing. This step inlcudes the creation of rasters which are a subset of those data downloaded during the Data Preperation step of pymetric. 11 | 12 | #####Expected Raster Output: 13 | * Common Area Raster 14 | * Vapor Pressure 15 | * Wind 16 | * fmask 17 | * Reference ETr 18 | * Modified Cropland Data Layer 19 | 20 | 21 | ### metric_model1.py 22 | Runs METRIC Model 1 for all images. 23 | 24 | #####Expected Raster Output: 25 | * Cosine of Theta (cos_theta.img) 26 | * Albedo 27 | * Top of Atmosphere Reflectance 28 | * Delapsed Temperature 29 | * Ts Brightness ('ts_bt.img') 30 | * NDVI (Normalized Difference Vegetation Index) 31 | * NDWI (Normalized Difference Water Index) 32 | * SAVI (Soil Adjusted Vegetation Index) 33 | * LAI (Leaf Area Index) 34 | 35 | 36 | ### metric_pixel_rating.py 37 | Runs METRIC pixel rating function for all images, identifying potential calibration points. 38 | 39 | #####Expected Raster Output: 40 | * Region Mask 41 | * Cold Pixel Rating 42 | * Cold Pixel Suggestion 43 | * Hot Pixel Rating 44 | * Hot Pixel Suggestion 45 | 46 | 47 | 48 | ### metric_pixel_points.py 49 | Runs METRIC pixel points function for all images, selecting initial calibration points for each Landsat image. 50 | 51 | ### metric_model2.py 52 | Runs METRIC Model 2 for all images. 53 | 54 | Expected Raster Output: 55 | * Fraction of Reference ET 56 | 57 | ### landsat_interpolate.py 58 | Interpolates seasonal ET data from individual METRIC scenes 59 | 60 | # Example workflow 61 | This workflow is setup to run with the example input file (C:\pymetric\example\landsat_2015.ini). Use this workflow as a starting point when using pyMETRIC for your data. 62 | 63 | ``` 64 | python C:\pymetric\code\local\landsat_prep_path_row.py -i C:\pymetric\example\landsat_2015.ini 65 | python C:\pymetric\code\local\landsat_prep_ini.py -i C:\pymetric\example\landsat_2015.ini 66 | python C:\pymetric\code\local\landsat_prep_scene.py -i C:\pymetric\example\landsat_2015.ini 67 | python C:\pymetric\code\local\metric_model1.py -i C:\pymetric\example\landsat_2015.ini 68 | python C:\pymetric\code\local\metric_pixel_rating.py -i C:\pymetric\example\landsat_2015.ini 69 | python C:\pymetric\code\local\metric_pixel_points.py -i C:\pymetric\example\landsat_2015.ini 70 | ``` 71 | 72 | __Prior to the running of METRIC model 2, calibration pixels must be adjusted manually with ArcGIS. At this point in the workflow, the software has automatically chose sample pixels, however the location of the calibration pixels must be changed for best results. The METRIC Manual should be consulted during the calibration process in order to provide the best possible estimates of ET. If pixels are left un-modified, ETrF rasters will still be produced, however the validity of the ETrF data will be significantly degraded.__ 73 | 74 | ``` 75 | python C:\pymetric\code\local\metric_model2.py -i C:\pymetric\example\landsat_2015.ini 76 | python C:\pymetric\code\local\landsat_interpolate.py -i C:\pymetric\example\landsat_2015.ini --tables 77 | python C:\pymetric\code\local\landsat_interpolate.py -i C:\pymetric\example\landsat_2015.ini --rasters 78 | ``` 79 | 80 | ## Running Monte Carlo Tool 81 | 82 | The following will run one iteration of the Monte Carlo tool with fixed tail sizes of 1% (cold) and 4% (hot). This value is the percent of agricultural pixels with ETrFs greater than the cold calibration ETrF value (for the cold calibration point). 83 | ``` 84 | python C:\pymetric\code\local\metric_monte_carlo.py -i C:\pymetric\example\landsat_2015.ini -mc 0 --tails 1 4 85 | ``` 86 | 87 | The following will run ten different iterations of the Monte Carlo tool with varying tail sizes (developed from the training data in 'misc/etrf_training_test.csv'). The "mc" parameter specifies which iterations to run. 88 | ``` 89 | python C:\pymetric\code\local\metric_monte_carlo.py -i C:\pymetric\example\landsat_2015.ini -mc 1-10 90 | ``` -------------------------------------------------------------------------------- /docs/EXAMPLE_SETUP.md: -------------------------------------------------------------------------------- 1 | Quicklinks: [Data Preparation](EXAMPLE_DATA.md) --- [Project Setup](EXAMPLE_SETUP.md) --- [Running METRIC](EXAMPLE_METRIC.md) 2 | 3 | # pyMETRIC Project Setup Example 4 | 5 | This example will step through setting up and running pyMETRIC for a single Landsat path/row. The target study area for this example is the Harney basin in central Oregon, located in Landsat path 43 row 30. Before going through this example, make sure that the Landsat images and ancillary data have been downloaded and prepared following the steps in the [Setup Example](EXAMPLE_SETUP.md). 6 | 7 | ## Project Folder 8 | 9 | All of the example script calls listed below assume that the pyMETRIC repository was installed on a Windows computer to "C:\pymetric" and that the scripts are being called from within the "example" project folder (see [Setup Example](EXAMPLE_SETUP.md)). If you haven't already, change directory into the example project folder. 10 | 11 | ``` 12 | C:\pymetric>cd example 13 | ``` 14 | 15 | ## Control / Config / Parameter INI Files 16 | 17 | The parameters for controlling pyMETRIC are primarily set using ".INI" configuration files. The project INI is the main INI the user will change when running pyMETRIC. The values in the project INI are passed to the script specific INIs using the prep INI tool, but they can also be manually changed/overwritten by the user. 18 | 19 | ## Project INI 20 | 21 | Copy the template landsat_project.ini from the code\ini_templates folder to the example folder. 22 | 23 | Rename the template INI to "landsat_2015.ini" using the command line or file explorer. Typically, a separate INI file will be needed for each year that is processed. 24 | 25 | After renaming the INI, open the INI file in your favorite text editor. The default values for the template INI file have been set for the Harney Basin for 2015. To use the template INI in a different study area or folder structure, it will be necessary to change all of the folder paths from "C:\pymetric\example" to the new project folder. 26 | 27 | ### Snap Point 28 | 29 | The study_area_snap and zones_snap parameters should both be set to 15, 15 in the project INI. This ensures that alignment of the final ET maps and zonal statistics calculations will align with a Landsat image of the study area. The snap points will typically be set to 0, 0 if the final image is in a non-WGS84 Zone XX coordinate system (e.g. the NLCD/CDL CONUS Albers) and doesn't need to align with a Landsat image. 30 | 31 | ### Fields 32 | 33 | A field polygon dataset is not currently being provided or available for this example. The cropland data layer (CDL) could be used to generate quasi field boundaries if desired. For now, the user should ensure that the following flags in the project INI are all false: "cdl_flag", "landfire_flag", "field_flag". 34 | 35 | ## pyMETRIC Setup 36 | 37 | ### Prep path/row ancillary data 38 | 39 | This script will unpack the Landsat scenes and create the ancillary datasets for each Landsat path/row. 40 | 41 | ``` 42 | C:\pymetric\example>python ..\code\local\landsat_prep_path_row.py -i landsat_2015.ini 43 | ``` 44 | 45 | ### Prep INI files 46 | 47 | This script will generate the INI files for all of the subsequent processes. This script can also be used to update the INI files if the users makes a change to the main project INI. 48 | 49 | ``` 50 | C:\pymetric\example>python ..\code\local\landsat_prep_ini.py -i landsat_2015.ini 51 | ``` 52 | 53 | ### Prep Landsat scenes 54 | 55 | ``` 56 | C:\pymetric\example>python ..\code\local\landsat_prep_scene.py -i landsat_2015.ini 57 | ``` 58 | -------------------------------------------------------------------------------- /docs/INSTALL.md: -------------------------------------------------------------------------------- 1 | ## Installation 2 | 3 | pyMETRIC is most easily installed by cloning the [GitHub repository](https://github.com/WSWUP/pymetric). 4 | 5 | Most of the documentation and examples are written assuming you are running pyMETRIC on a Windows PC and that the pyMETRIC repository was cloned directly to the C: drive. If you are using a different operating system or cloned the repository to a different location, you will need adjust commands, drive letters, and paths accordingly. 6 | 7 | ## Python 8 | 9 | pyMETRIC has only been tested using Python 2.7 and 3.6, but may work with other versions. 10 | 11 | ## Dependencies 12 | 13 | The following external Python modules must be present to run pyMETRIC: 14 | * [fiona](http://toblerity.org/fiona/) (used to read and write multi-layered GIS file formats) 15 | * [future](https://pypi.python.org/pypi/future) (adds features from Python 3 to Python 2 installations) 16 | * [gdal](http://www.gdal.org/) (version >2.2 and <3.0.0) (the Geospatial Data Abstraction Library is used to interact with raster and vector geospatial data) 17 | * [matplotlib](https://matplotlib.org/) (necessary for creating plots of ET related data) 18 | * [netcdf4](https://www.unidata.ucar.edu/software/netcdf/) (for interacting with multi-dimensional scientific datasets, such as GRIDMET/DAYMET) 19 | * [numpy](http://www.numpy.org/) 20 | * [pandas](http://pandas.pydata.org) (used to perform data processing) 21 | * [requests](http://docs.python-requests.org/en/master/) (adds enhanced http functionality) 22 | * [scipy](https://www.scipy.org/) (provides numerous packages required for the processing of data) 23 | * [refet](https://github.com/WSWUP/RefET) (for computing reference ET) 24 | * [drigo](https://github.com/WSWUP/drigo) (GDAL/OGR helper functions) 25 | 26 | Please see the [requirements](../requirements.txt) file for details on the versioning requirements. The module version numbers listed in the file were tested and are known to work. Other combinations of versions may work but have not been tested. 27 | 28 | ### Python 2 29 | The following external Python modules must be present to run pyMETRIC on Python 2 30 | * [configparser]()(Python 2 implementation of the Python 3 configparser module) 31 | 32 | ## Anaconda/Miniconda 33 | 34 | The easiest way of obtaining Python and all of the necessary external modules, is to install [Miniconda](https://conda.io/miniconda.html) or [Anaconda](https://www.anaconda.com/download/). 35 | 36 | TODO: Add more explanation about where to install Miniconda (ideally to the root on the C drive) and what options need to be selected when installing. 37 | 38 | After installing Miniconda, make sure to add the [conda-forge](https://conda-forge.github.io/) channel by entering the following in the command prompt or terminal: 39 | ``` 40 | conda config --add channels conda-forge 41 | ``` 42 | 43 | ## Conda Environment 44 | 45 | The user is strongly encouraged to setup a dedicated conda environment for pyMETRIC: 46 | ``` 47 | conda create -n pymetric python=3.6 48 | ``` 49 | 50 | The environment must be "activated" before use: 51 | ``` 52 | conda activate pymetric 53 | ``` 54 | 55 | Most of the external modules can then be installed by calling: 56 | ``` 57 | conda install future matplotlib netcdf4 numpy pandas refet requests scipy requests-html 58 | conda install gdal==3.3.2 59 | ``` 60 | 61 | The drigo modules must be installed separately with pip: 62 | ``` 63 | pip install drigo --no-deps 64 | ``` 65 | 66 | ## Environment Variables 67 | 68 | ### Windows 69 | 70 | #### PYTHONPATH 71 | 72 | Many of the pyMETRIC scripts reference the "common" functions in the [pymetric/code/support](code/support) folder. To be able to access these functions, you will need to add/append this path to the PYTHONPATH environment variable. 73 | 74 | The environment variable can be set at the command line. First check if PYTHONPATH is already set by typing: 75 | ``` 76 | echo %PYTHONPATH% 77 | ``` 78 | If PYTHONPATH is not set, type the following in the command prompt: 79 | ``` 80 | setx PYTHONPATH "C:\pymetric\code\support" 81 | ``` 82 | To append to an existing PYTHONPATH, type: 83 | ``` 84 | setx PYTHONPATH "C:\pymetric\code\support;%PYTHONPATH%" 85 | ``` 86 | 87 | #### GDAL_DATA 88 | 89 | In order to execute pyMETRIC code, the GDAL_DATA environmental variable may need to be set (*example*: GDAL_DATA = C:\Miniconda3\envs\pymetric\Library\share\gdal). **Depending on your specific installation of Python, you file path for GDAL_DATA may be different** 90 | 91 | On a Windows PC, the user environment variables can be set through the Control Panel (System -> Advanced system settings -> Environment Variables). Assuming that pyMETRIC was cloned/installed directly to the C: drive and Python 3 is used, the GDAL_DATA environmental variable may be set as: 92 | ``` 93 | C:\Miniconda3\envs\pymetric\Library\share\gdal 94 | ``` 95 | 96 | This environment variable can also be set at the command line. First check if GDAL_DATA is already set by typing: 97 | ``` 98 | echo %GDAL_DATA% 99 | ``` 100 | 101 | If GDAL_DATA is not set, type the following in the command prompt: 102 | ``` 103 | setx GDAL_DATA "C:\Miniconda3\envs\pymetric\Library\share\gdal" 104 | ``` 105 | 106 | ### Mac / Linux 107 | 108 | #### PYTHONPATH 109 | 110 | ``` 111 | echo $PYTHONPATH 112 | ``` 113 | 114 | ``` 115 | export PYTHONPATH=/Users//pymetric/code/support 116 | ``` 117 | 118 | #### GDAL_DATA 119 | 120 | ``` 121 | echo $GDAL_DATA 122 | ``` 123 | 124 | ``` 125 | export GDAL_DATA=/Users//miniconda3/envs/python3/share/gdal 126 | ``` 127 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # pyMETRIC Documentation 2 | 3 | ## Install 4 | 5 | Details on installing pyMETRIC, Python, and necessary modules can be found in the[ installation instructions](INSTALL.md). 6 | 7 | ## Example 8 | 9 | A detailed walk-through on the setup and operation of pyMETRIC has been assembled in the following series of documentation. These examples are setup to process a portion of the Harney Basin, located in eastern Oregon. The documentation is contained in the following links: 10 | 1. [Data Preparation](EXAMPLE_DATA.md) 11 | 2. [Project Setup](EXAMPLE_SETUP.md) 12 | 3. [Running METRIC](EXAMPLE_METRIC.md) 13 | -------------------------------------------------------------------------------- /docs/images/harney_shapefile.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/docs/images/harney_shapefile.png -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | 2015 2 | *.cpg 3 | *.xml 4 | -------------------------------------------------------------------------------- /example/README.md: -------------------------------------------------------------------------------- 1 | # Harney Basin Example 2 | 3 | To serve as an example the "example" directory is included with pyMETRIC. The "example" directory is an example of a "project directory", which should contain information specific to your chosen study area or area of interest. As pyMETRIC is ran according to ["Running METRIC"](docs/EXAMPLE_METRIC.md), processed data will be stored within this project directory. 4 | 5 | When downloading support data, processing intermediate products, and producing ET data, the pyMETRIC code will allow you to define the location of support data and products. It's highly recommended that you use the default paths for the pyMETRIC code. Deviation from this directory structure may result in conflicts between the various steps of the pyMETRIC workflow. An example of the directory structure for the Harney example can be found in ["example_directory_structure.txt"](harney/example_directory_structure.txt) 6 | 7 | ## landsat_2015.ini 8 | This file serves as the project input file (INI). This file contains necessary inputs needed to running the pyMETRIC process. Most python scripts within the METRIC workflow will reference this specific file. This file is already populated with the necessary inputs to run the pyMETRIC example. 9 | 10 | ## landsat 11 | When pyMETRIC is downloaded or cloned, the only contents of this directory will be "clear_scenes.txt" and "cloud_scenes.txt." This file stores Landsat scenes that deemed by the user to not be useful in the pyMETRIC processes. A scene is most often skipped when the specific Landsat scene contains clouds that obscure the study area. 12 | 13 | ## study_area 14 | This file contains a shapefile (ESRI vector data storage format) the delineates the study area where ET estimates are to be calculated. This shapefile will used to subset support data and limit processing only to the areas where data is desired. In this example, the shapefile was derived from the USGS National Hydrography Dataset (WBDHU8) for a hydrographic basin in the area of the Harney, Oregon. 15 | 16 | ![Alt text](../docs/images/harney_shapefile.png?raw=true "Harney Basin, Oregon") 17 | 18 | 19 | -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.dbf: -------------------------------------------------------------------------------- 1 | vAIdN 0 0 -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.prj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS_1984_UTM_Zone_11N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-117.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbn -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.sbx -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.shp -------------------------------------------------------------------------------- /example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/cloud_masks/LE07_L1TP_043030_20150423_20160902_01_T1_mask.shx -------------------------------------------------------------------------------- /example/example_directory_structure.txt: -------------------------------------------------------------------------------- 1 | C:\pymetric 2 | +---cdl 3 | | +---p043r030 4 | +---code 5 | | +---ini_templates 6 | | +---interp_functions 7 | | +---local 8 | | +---metric_functions 9 | | +---prep_functions 10 | | \---support 11 | +---dem 12 | | +---p043r030 13 | | \---tiles 14 | +---docs 15 | +---gridmet 16 | | +---ancillary 17 | | +---etr 18 | | +---netcdf 19 | | \---ppt 20 | +-example 21 | | +---2015 22 | | | +---ET 23 | | | | +---LINEAR_ANNUAL 24 | | | | +---LINEAR_MONTHLY 25 | | | | \---LINEAR_ZONES 26 | | | \---p043r030 27 | | | +---LC08_L1TP_043030_20150415_20170227_01_T1 28 | | | | +---ETRF 29 | | | | +---INDICES 30 | | | | +---ORIGINAL_DATA 31 | | | | +---PIXELS 32 | | | | +---PIXEL_REGIONS 33 | | | | +---REFLECTANCE_TOA 34 | | | | \---SUPPORT_RASTERS 35 | | | +---LC08_L1TP_043030_20150501_20170301_01_T1 36 | | | | +---ETRF 37 | | | | +---INDICES 38 | | | | +---ORIGINAL_DATA 39 | | | | +---PIXELS 40 | | | | +---PIXEL_REGIONS 41 | | | | +---REFLECTANCE_TOA 42 | | | | \---SUPPORT_RASTERS 43 | | | +---LC08_L1TP_043030_20150720_20170226_01_T1 44 | | | | +---ETRF 45 | | | | +---INDICES 46 | | | | +---ORIGINAL_DATA 47 | | | | +---PIXELS 48 | | | | +---PIXEL_REGIONS 49 | | | | +---REFLECTANCE_TOA 50 | | | | \---SUPPORT_RASTERS 51 | | | +---LC08_L1TP_043030_20150821_20170225_01_T1 52 | | | | +---ETRF 53 | | | | +---INDICES 54 | | | | +---ORIGINAL_DATA 55 | | | | +---PIXELS 56 | | | | +---PIXEL_REGIONS 57 | | | | +---REFLECTANCE_TOA 58 | | | | \---SUPPORT_RASTERS 59 | | | +---LC08_L1TP_043030_20150906_20170225_01_T1 60 | | | | +---ETRF 61 | | | | +---INDICES 62 | | | | +---ORIGINAL_DATA 63 | | | | +---PIXELS 64 | | | | +---PIXEL_REGIONS 65 | | | | +---REFLECTANCE_TOA 66 | | | | \---SUPPORT_RASTERS 67 | | | \---LC08_L1TP_043030_20150922_20170225_01_T1 68 | | | +---ETRF 69 | | | +---INDICES 70 | | | +---ORIGINAL_DATA 71 | | | +---PIXELS 72 | | | +---PIXEL_REGIONS 73 | | | +---REFLECTANCE_TOA 74 | | | \---SUPPORT_RASTERS 75 | | +---cloud_masks 76 | | +---fields 77 | | +---landsat 78 | | \---study_area 79 | +---fields 80 | | +---p043r030 81 | +---landsat 82 | | +---043 83 | | | \---030 84 | | | \---2015 85 | | \---footprints 86 | +---misc 87 | +---nldas 88 | | +---ancillary 89 | | +---ea 90 | | | \---2015 91 | | +---etr 92 | | | \---2015 93 | | +---grb 94 | | | \---2015 95 | | | +---049 96 | | | +---065 97 | | | +---105 98 | | | +---113 99 | | | +---121 100 | | | +---129 101 | | | +---161 102 | | | +---177 103 | | | +---201 104 | | | +---209 105 | | | +---225 106 | | | +---233 107 | | | +---249 108 | | | +---265 109 | | | +---289 110 | | +---wind 111 | | +---2015 112 | +---soils 113 | \---tools 114 | +---cimis 115 | +---daymet 116 | +---download 117 | +---gridmet 118 | +---nldas 119 | \---plotting -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/fields/fields_2016_wgs84z11.dbf -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.prj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS_1984_UTM_Zone_11N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-117.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.sbn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/fields/fields_2016_wgs84z11.sbn -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.sbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/fields/fields_2016_wgs84z11.sbx -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/fields/fields_2016_wgs84z11.shp -------------------------------------------------------------------------------- /example/fields/fields_2016_wgs84z11.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/fields/fields_2016_wgs84z11.shx -------------------------------------------------------------------------------- /example/landsat/clear_scenes.txt: -------------------------------------------------------------------------------- 1 | LO08_L1TP_043030_20150210_20170301_01_T1 2 | LE07_L1TP_043030_20150218_20160902_01_T1 3 | LE07_L1TP_043030_20150306_20160902_01_T1 4 | LC08_L1TP_043030_20150415_20170227_01_T1 5 | LE07_L1TP_043030_20150423_20160902_01_T1 6 | LC08_L1TP_043030_20150501_20170301_01_T1 7 | LE07_L1TP_043030_20150509_20160902_01_T1 8 | LE07_L1TP_043030_20150610_20160905_01_T1 9 | LE07_L1TP_043030_20150626_20160902_01_T1 10 | LC08_L1TP_043030_20150720_20170226_01_T1 11 | LE07_L1TP_043030_20150728_20160902_01_T1 12 | LE07_L1TP_043030_20150813_20160903_01_T1 13 | LC08_L1TP_043030_20150821_20170225_01_T1 14 | LC08_L1TP_043030_20150906_20170225_01_T1 15 | LC08_L1TP_043030_20150922_20170225_01_T1 16 | LE07_L1TP_043030_20151016_20160903_01_T1 17 | -------------------------------------------------------------------------------- /example/landsat/cloudy_scenes.txt: -------------------------------------------------------------------------------- 1 | LE07_L1TP_043030_20000108_20161002_01_T1 2 | LT05_L1TP_043030_20000116_20160919_01_T1 3 | LE07_L1GT_043030_20000124_20161002_01_T2 4 | LT05_L1GS_043030_20000201_20160918_01_T2 5 | LE07_L1TP_043030_20000209_20161003_01_T1 6 | LT05_L1TP_043030_20000217_20160918_01_T1 7 | LE07_L1TP_043030_20000225_20161002_01_T1 8 | LT05_L1TP_043030_20000304_20160918_01_T1 9 | LE07_L1TP_043030_20000312_20161002_01_T1 10 | LE07_L1TP_043030_20000328_20161003_01_T1 11 | LE07_L1TP_043030_20000413_20161001_01_T1 12 | LT05_L1GS_043030_20000507_20160918_01_T2 13 | LT05_L1TP_043030_20000523_20160918_01_T1 14 | LT05_L1GS_043030_20000608_20160918_01_T2 15 | LE07_L1TP_043030_20000803_20161002_01_T1 16 | LT05_L1TP_043030_20000928_20160918_01_T1 17 | LT05_L1TP_043030_20001014_20160922_01_T1 18 | LT05_L1TP_043030_20001030_20160918_01_T1 19 | LE07_L1TP_043030_20001107_20161001_01_T1 20 | LT05_L1GS_043030_20001115_20160918_01_T2 21 | LE07_L1TP_043030_20001123_20161002_01_T1 22 | LT05_L1TP_043030_20001201_20160918_01_T1 23 | LE07_L1GT_043030_20001209_20161002_01_T2 24 | LT05_L1TP_043030_20001217_20160918_01_T1 25 | LE07_L1TP_043030_20001225_20161001_01_T1 26 | LE07_L1TP_043030_20150101_20160905_01_T1 27 | LC08_L1TP_043030_20150109_20170302_01_T2 28 | LE07_L1GT_043030_20150117_20160903_01_T2 29 | LC08_L1TP_043030_20150125_20170302_01_T1 30 | LE07_L1GT_043030_20150202_20160903_01_T2 31 | LC08_L1TP_043030_20150226_20180201_01_T1 32 | LC08_L1GT_043030_20150314_20170228_01_T2 33 | LE07_L1TP_043030_20150322_20160906_01_T1 34 | LC08_L1TP_043030_20150330_20170228_01_T1 35 | LE07_L1TP_043030_20150407_20160904_01_T1 36 | LC08_L1TP_043030_20150517_20170301_01_T1 37 | LE07_L1TP_043030_20150525_20160902_01_T1 38 | LC08_L1TP_043030_20150602_20170226_01_T1 39 | LC08_L1TP_043030_20150618_20170226_01_T1 40 | LC08_L1TP_043030_20150704_20170226_01_T1 41 | LE07_L1TP_043030_20150712_20160904_01_T1 42 | LC08_L1TP_043030_20150805_20170311_01_T1 43 | LE07_L1TP_043030_20150829_20160905_01_T1 44 | LE07_L1GT_043030_20150914_20160902_01_T2 45 | LC08_L1TP_043030_20151008_20170225_01_T1 46 | LC08_L1TP_043030_20151024_20170225_01_T1 47 | LE07_L1TP_043030_20151101_20160903_01_T1 48 | LC08_L1TP_043030_20151109_20170225_01_T1 49 | LE07_L1GT_043030_20151117_20160903_01_T2 50 | LC08_L1GT_043030_20151125_20170225_01_T2 51 | LE07_L1TP_043030_20151203_20160903_01_T1 52 | LC08_L1TP_043030_20151211_20170224_01_T1 53 | LE07_L1GT_043030_20151219_20160902_01_T2 54 | LC08_L1TP_043030_20151227_20180201_01_T2 55 | -------------------------------------------------------------------------------- /example/landsat_2015.ini: -------------------------------------------------------------------------------- 1 | ## Landsat Harney Input File 2 | [INPUTS] 3 | 4 | year = 2015 5 | tile_list = p043r030 6 | project_folder = C:\pymetric\example 7 | 8 | ## Monte Carlo 9 | monte_carlo_flag = True 10 | etrf_training_path = C:\pymetric\misc\etrf_training_test.csv 11 | 12 | ## Interpolator 13 | interpolate_folder = ET 14 | interpolate_rasters_flag = True 15 | interpolate_tables_flag = True 16 | 17 | ## ETrF (optional, if not set will default to current project/year) 18 | # etrf_input_folder = C:\pymetric\example\2015 19 | 20 | ## Interpolator study area (for rasters interpolator) 21 | ## Output projection will be inherited from study_area_path 22 | ## If study_area_mask_flag, convert to raster and mask values outside study area features 23 | ## If not study_area_mask_flag, set raster output extent and spat. ref from study_area_path 24 | ## Overriding the output projection with study_area_proj is not currently supported 25 | ## The study_area_buffer is currently only applied to the extent, not the features 26 | ## The buffer can be positive or negative (in or out) with units are in the output projection 27 | study_area_path = C:\pymetric\example\study_area\harney_wgs84z11.shp 28 | study_area_mask_flag = True 29 | study_area_snap = 15, 15 30 | study_area_cellsize = 30 31 | # study_area_buffer = 0 32 | # study_area_proj = None 33 | 34 | ## Interpolator zones (for tables interpolator) 35 | ## Output raster properties will be be inherited from 1) zones_mask or 2) set separately 36 | ## Output projection will be inherited from 1) zones_mask, 2) zones_proj, or 3) zones_path 37 | ## zones_proj can be EPSG, PROJ4, or WKT 38 | ## If set, zones_mask values of 0 or nodata will be excluded from zonal statistics 39 | ## Zones can be buffered in or out, units are in the output projection 40 | zones_path = C:\pymetric\example\fields\fields_2016_wgs84z11.shp 41 | zones_name_field = FID 42 | zones_snap = 15, 15 43 | zones_cellsize = 30 44 | 45 | ## INI flags 46 | metric_flag = True 47 | 48 | ## Path/row control flags 49 | landsat_flag = True 50 | dem_flag = True 51 | nlcd_flag = True 52 | cdl_flag = True 53 | landfire_flag = True 54 | field_flag = True 55 | 56 | ## Scene control flags 57 | calc_refl_toa_flag = True 58 | calc_refl_toa_qa_flag = True 59 | calc_ts_bt_flag = True 60 | ## Use QA band to set common area 61 | ## Fmask cloud, shadow, & snow pixels will be removed from common area 62 | calc_fmask_common_flag = True 63 | fmask_smooth_flag = True 64 | fmask_smooth_cells = 1 65 | fmask_erode_flag = False 66 | fmask_erode_cells = 0 67 | fmask_buffer_flag = True 68 | fmask_buffer_cells = 2 69 | ## Extract separate Fmask rasters 70 | calc_fmask_flag = True 71 | calc_fmask_cloud_flag = False 72 | calc_fmask_snow_flag = False 73 | calc_fmask_water_flag = False 74 | ## Apply user cloud masks 75 | cloud_mask_flag = True 76 | cloud_mask_ws = C:\pymetric\example\cloud_masks 77 | ## Keep raw Landsat DN, LEDAPS, and Fmask rasters 78 | keep_dn_flag = True 79 | keep_qa_flag = True 80 | ## Hourly/daily weather data for METRIC 81 | calc_metric_ea_flag = True 82 | calc_metric_wind_flag = True 83 | calc_metric_etr_flag = True 84 | calc_metric_tair_flag = False 85 | ## Daily soil water balance Ke 86 | calc_swb_ke_flag = False 87 | ## Remove edge (fringe) cells 88 | edge_smooth_flag = True 89 | 90 | 91 | ## Path/row inputs 92 | footprint_path = C:\pymetric\landsat\footprints\WRS2_descending.shp 93 | keep_list_path = C:\pymetric\example\landsat\clear_scenes.txt 94 | # skip_list_path = C:\pymetric\example\landsat\cloudy_scenes.txt 95 | 96 | ## NLDAS or REFET 97 | metric_hourly_weather = NLDAS 98 | 99 | ## METRIC hourly weather data 100 | metric_ea_input_folder = C:\pymetric\nldas\ea 101 | metric_wind_input_folder = C:\pymetric\nldas\wind 102 | metric_etr_input_folder = C:\pymetric\nldas\etr 103 | metric_tair_input_folder = C:\pymetric\nldas\tair 104 | metric_hourly_re = (?P\w+)_(?P\d{8})_hourly_(?P\w+).img$ 105 | metric_daily_re = (?P\w+)_(?P\d{8})_(?!hourly_)(?P\w+).img$ 106 | ## RefET station data per path/row 107 | # refet_params_path = C:\pymetric\refet\refet_path_row_inputs_windows.csv 108 | 109 | ## Round weather arrays to N digits to save space 110 | rounding_digits = 3 111 | 112 | ## Interpolator and soil water balance weather inputs 113 | etr_input_folder = C:\pymetric\gridmet\etr 114 | etr_input_re = (?Petr)_(?P\d{4})_daily_(?P\w+).img$ 115 | 116 | ## Soil water balance weather inputs 117 | ppt_input_folder = C:\pymetric\gridmet\ppt 118 | ppt_input_re = (?Pppt)_(?P\d{4})_daily_(?P\w+).img$ 119 | 120 | ## Soil water balance soil input 121 | awc_input_path = C:\pymetric\soils\AWC_WTA_0to10cm_composite.tif 122 | 123 | ## Soil water balance 124 | swb_spinup_days = 30 125 | swb_min_spinup_days = 5 126 | 127 | ## Input folder paths 128 | landsat_input_folder = C:\pymetric\landsat 129 | 130 | ## DEM 131 | dem_input_folder = C:\pymetric\dem\tiles 132 | ## First item is latitude, second is longitude (i.e. n39w120.img) 133 | ## Don't change unless using custom DEM tiles 134 | dem_tile_fmt = {}{}.img 135 | 136 | ## NLCD 137 | nlcd_input_path = C:\pymetric\nlcd\nlcd_2011_landcover_2011_edition_2014_10_10\nlcd_2011_landcover_2011_edition_2014_10_10.img 138 | 139 | ## Buffer path/row extents by N degrees for selecting DEM tiles and clipping NLCD 140 | tile_buffer = 0.25 141 | 142 | ## CDL 143 | cdl_input_path = C:\pymetric\cdl\2015_30m_cdls.img 144 | cdl_ag_list = 1-61, 66-77, 204-254 145 | # non_ag_list = 0, 62-65, 81-199 146 | 147 | ## Landfire 148 | landfire_input_path = C:\pymetric\landfire\US_140_EVT\Grid\us_140evt 149 | landfire_ag_list = 3960-3999 150 | 151 | ## Fields (for pixel rating only) 152 | field_input_path = C:\pymetric\example\fields\fields_2016_wgs84z11.shp 153 | 154 | ## Zom remap 155 | zom_remap_path = C:\pymetric\misc\zom_nlcd_default.json 156 | 157 | ## Output folder paths 158 | dem_output_folder = C:\pymetric\dem 159 | nlcd_output_folder = C:\pymetric\nlcd 160 | cdl_output_folder = C:\pymetric\cdl 161 | landfire_output_folder = C:\pymetric\landfire 162 | field_output_folder = C:\pymetric\fields 163 | 164 | ## Output file name formats 165 | dem_output_name = dem.img 166 | nlcd_output_fmt = nlcd_{:04d}.img 167 | cdl_output_fmt = cdl_{:04d}.img 168 | cdl_ag_output_fmt = cdl_ag_{:04d}.img 169 | landfire_output_fmt = landfire_{:04d}.img 170 | landfire_ag_output_fmt = landfire_ag_{:04d}.img 171 | field_output_fmt = fields_{:04d}.img 172 | 173 | ## Script paths 174 | prep_scene_func = C:\pymetric\code\prep_functions\landsat_prep_scene_func.py 175 | metric_model1_func = C:\pymetric\code\metric_functions\metric_model1_func.py 176 | metric_model2_func = C:\pymetric\code\metric_functions\metric_model2_func.py 177 | pixel_rating_func = C:\pymetric\code\metric_functions\pixel_rating_func.py 178 | pixel_points_func = C:\pymetric\code\metric_functions\pixel_points_func.py 179 | monte_carlo_func = C:\pymetric\code\metric_functions\monte_carlo_func.py 180 | interpolate_rasters_func = C:\pymetric\code\interp_functions\interpolate_rasters_func.py 181 | interpolate_tables_func = C:\pymetric\code\interp_functions\interpolate_tables_func.py 182 | 183 | ## Template input files for scripts 184 | metric_ini = C:\pymetric\code\ini_templates\landsat_metric_inputs.ini 185 | pixel_rating_ini = C:\pymetric\code\ini_templates\landsat_metric_pixel_rating.ini 186 | monte_carlo_ini = C:\pymetric\code\ini_templates\landsat_metric_monte_carlo.ini 187 | interpolate_ini = C:\pymetric\code\ini_templates\landsat_interpolate.ini 188 | -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/harney_wgs84z11.dbf -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.prj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS_1984_UTM_Zone_11N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-117.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.sbn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/harney_wgs84z11.sbn -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.sbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/harney_wgs84z11.sbx -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/harney_wgs84z11.shp -------------------------------------------------------------------------------- /example/study_area/harney_wgs84z11.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/harney_wgs84z11.shx -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/wrs2_p043r030.dbf -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.sbn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/wrs2_p043r030.sbn -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.sbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/wrs2_p043r030.sbx -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/wrs2_p043r030.shp -------------------------------------------------------------------------------- /example/study_area/wrs2_p043r030.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WSWUP/pymetric/c3a47fb0d5f98dd8746f557038b6291350ea0916/example/study_area/wrs2_p043r030.shx -------------------------------------------------------------------------------- /landsat/.gitignore: -------------------------------------------------------------------------------- 1 | *.tgz 2 | *.tar.gz 3 | usgs.txt 4 | -------------------------------------------------------------------------------- /landsat/README.md: -------------------------------------------------------------------------------- 1 | # Landsat 2 | 3 | ## Landsat clear scene "keep" lists 4 | 5 | Before running pyMETRIC, it is important to identify Landsat images that should be processed and are free of excessive clouds, smoke, haze, snow, shadows, or general bad data in the study area. Many of the pyMETRIC tools are expecting or will honor a text file of Landsat scene IDs that should processed. This file is typically referred to as a "keep list" in the documentation and INI files. 6 | 7 | One approach for generating this keep list is to the use the [Cloud Free Scene Counts tools](https://github.com/WSWUP/cloud-free-scene-counts). The Landsat path/row used in the example for those tools is also 43/30. An [example keep list file](../example/landsat/clear_scenes.txt) can be found in the Harney example folder. 8 | 9 | ## Landsat Images 10 | 11 | The following command will download the Landsat scenes required for the pyMETRIC example. The start and end date parameters are only needed if the clear scene list includes scenes from other years. The Landsat images are being downloaded to the non-project landsat folder so that they can be used by other projects, but they could be downloaded directly to the project folder instead. 12 | ``` 13 | C:\pymetric>python tools\download\download_landsat.py example\landsat\clear_scenes.txt --start 2015-01-01 --end 2015-12-31 14 | ``` 15 | 16 | This will create the directory structure pyMETRIC is expecting, with tar.gz files will be stored in nested separate folders by path, row, and year: 17 | 18 | ``` 19 | C:\pymetric\example\landsat\043\030\2015\LC70430302015101LGN01.tgz 20 | ``` 21 | -------------------------------------------------------------------------------- /landsat/footprints/.gitignore: -------------------------------------------------------------------------------- 1 | wrs2_descending.* -------------------------------------------------------------------------------- /landsat/footprints/README.md: -------------------------------------------------------------------------------- 1 | The Landsat WRS2 Descending footprint shapefile can be downloaded directly from the [Landsat website](https://landsat.usgs.gov/pathrow-shapefiles) or using the [download script](tools/download/landsat_footprints.py) with the following command: 2 | 3 | ``` 4 | C:\pymetric>python tools\download\download_footprints.py 5 | ``` 6 | 7 | The file "wrs2_tile_utm_zones.json" was generated from the bulk metadata CSV files. Path/rows that did not have any images in the metadata CSV files were removed. Please refer to the [cloud-free-scene-counts repository](https://github.com/WSWUP/cloud-free-scene-counts) for additional details on acquiring the bulk metadata CSV files. 8 | -------------------------------------------------------------------------------- /misc/README.md: -------------------------------------------------------------------------------- 1 | # Miscellaneous Files 2 | 3 | This directory contains files that are used by pyMETRIC to support the initial calibration of ETrF (fraction of reference evapotranspiration) images. It is recommended that you do not modify the files contained within this directory. 4 | -------------------------------------------------------------------------------- /misc/etrf_doy_adj.csv: -------------------------------------------------------------------------------- 1 | DOY,ETRF_ADJ 2 | 1,-0.3 3 | 2,-0.3 4 | 3,-0.3 5 | 4,-0.3 6 | 5,-0.3 7 | 6,-0.3 8 | 7,-0.3 9 | 8,-0.3 10 | 9,-0.3 11 | 10,-0.3 12 | 11,-0.3 13 | 12,-0.3 14 | 13,-0.3 15 | 14,-0.3 16 | 15,-0.3 17 | 16,-0.3 18 | 17,-0.3 19 | 18,-0.3 20 | 19,-0.3 21 | 20,-0.3 22 | 21,-0.3 23 | 22,-0.3 24 | 23,-0.3 25 | 24,-0.3 26 | 25,-0.3 27 | 26,-0.3 28 | 27,-0.3 29 | 28,-0.3 30 | 29,-0.3 31 | 30,-0.3 32 | 31,-0.3 33 | 32,-0.2 34 | 33,-0.2 35 | 34,-0.2 36 | 35,-0.2 37 | 36,-0.2 38 | 37,-0.2 39 | 38,-0.2 40 | 39,-0.2 41 | 40,-0.2 42 | 41,-0.2 43 | 42,-0.2 44 | 43,-0.2 45 | 44,-0.2 46 | 45,-0.2 47 | 46,-0.2 48 | 47,-0.2 49 | 48,-0.2 50 | 49,-0.2 51 | 50,-0.2 52 | 51,-0.2 53 | 52,-0.2 54 | 53,-0.2 55 | 54,-0.2 56 | 55,-0.2 57 | 56,-0.2 58 | 57,-0.2 59 | 58,-0.2 60 | 59,-0.2 61 | 60,-0.1 62 | 61,-0.1 63 | 62,-0.1 64 | 63,-0.1 65 | 64,-0.1 66 | 65,-0.1 67 | 66,-0.1 68 | 67,-0.1 69 | 68,-0.1 70 | 69,-0.1 71 | 70,-0.1 72 | 71,-0.1 73 | 72,-0.1 74 | 73,-0.1 75 | 74,-0.1 76 | 75,-0.1 77 | 76,-0.1 78 | 77,-0.1 79 | 78,-0.1 80 | 79,-0.1 81 | 80,-0.1 82 | 81,-0.1 83 | 82,-0.1 84 | 83,-0.1 85 | 84,-0.1 86 | 85,-0.1 87 | 86,-0.1 88 | 87,-0.1 89 | 88,-0.1 90 | 89,-0.1 91 | 90,-0.1 92 | 91,0 93 | 92,0 94 | 93,0 95 | 94,0 96 | 95,0 97 | 96,0 98 | 97,0 99 | 98,0 100 | 99,0 101 | 100,0 102 | 101,0 103 | 102,0 104 | 103,0 105 | 104,0 106 | 105,0 107 | 106,0 108 | 107,0 109 | 108,0 110 | 109,0 111 | 110,0 112 | 111,0 113 | 112,0 114 | 113,0 115 | 114,0 116 | 115,0 117 | 116,0 118 | 117,0 119 | 118,0 120 | 119,0 121 | 120,0 122 | 121,0 123 | 122,0 124 | 123,0 125 | 124,0 126 | 125,0 127 | 126,0 128 | 127,0 129 | 128,0 130 | 129,0 131 | 130,0 132 | 131,0 133 | 132,0 134 | 133,0 135 | 134,0 136 | 135,0 137 | 136,0 138 | 137,0 139 | 138,0 140 | 139,0 141 | 140,0 142 | 141,0 143 | 142,0 144 | 143,0 145 | 144,0 146 | 145,0 147 | 146,0 148 | 147,0 149 | 148,0 150 | 149,0 151 | 150,0 152 | 151,0 153 | 152,0 154 | 153,0 155 | 154,0 156 | 155,0 157 | 156,0 158 | 157,0 159 | 158,0 160 | 159,0 161 | 160,0 162 | 161,0 163 | 162,0 164 | 163,0 165 | 164,0 166 | 165,0 167 | 166,0 168 | 167,0 169 | 168,0 170 | 169,0 171 | 170,0 172 | 171,0 173 | 172,0 174 | 173,0 175 | 174,0 176 | 175,0 177 | 176,0 178 | 177,0 179 | 178,0 180 | 179,0 181 | 180,0 182 | 181,0 183 | 182,0 184 | 183,0 185 | 184,0 186 | 185,0 187 | 186,0 188 | 187,0 189 | 188,0 190 | 189,0 191 | 190,0 192 | 191,0 193 | 192,0 194 | 193,0 195 | 194,0 196 | 195,0 197 | 196,0 198 | 197,0 199 | 198,0 200 | 199,0 201 | 200,0 202 | 201,0 203 | 202,0 204 | 203,0 205 | 204,0 206 | 205,0 207 | 206,0 208 | 207,0 209 | 208,0 210 | 209,0 211 | 210,0 212 | 211,0 213 | 212,0 214 | 213,0 215 | 214,0 216 | 215,0 217 | 216,0 218 | 217,0 219 | 218,0 220 | 219,0 221 | 220,0 222 | 221,0 223 | 222,0 224 | 223,0 225 | 224,0 226 | 225,0 227 | 226,0 228 | 227,0 229 | 228,0 230 | 229,0 231 | 230,0 232 | 231,0 233 | 232,0 234 | 233,0 235 | 234,0 236 | 235,0 237 | 236,0 238 | 237,0 239 | 238,0 240 | 239,0 241 | 240,0 242 | 241,0 243 | 242,0 244 | 243,0 245 | 244,0 246 | 245,0 247 | 246,0 248 | 247,0 249 | 248,0 250 | 249,0 251 | 250,0 252 | 251,0 253 | 252,0 254 | 253,0 255 | 254,0 256 | 255,0 257 | 256,0 258 | 257,0 259 | 258,0 260 | 259,0 261 | 260,0 262 | 261,0 263 | 262,0 264 | 263,0 265 | 264,0 266 | 265,0 267 | 266,0 268 | 267,0 269 | 268,0 270 | 269,0 271 | 270,0 272 | 271,0 273 | 272,0 274 | 273,0 275 | 274,0 276 | 275,0 277 | 276,0 278 | 277,0 279 | 278,0 280 | 279,0 281 | 280,0 282 | 281,0 283 | 282,0 284 | 283,0 285 | 284,0 286 | 285,0 287 | 286,0 288 | 287,0 289 | 288,0 290 | 289,0 291 | 290,0 292 | 291,0 293 | 292,0 294 | 293,0 295 | 294,0 296 | 295,0 297 | 296,0 298 | 297,0 299 | 298,0 300 | 299,0 301 | 300,0 302 | 301,-0.1 303 | 302,-0.1 304 | 303,-0.1 305 | 304,-0.1 306 | 305,-0.1 307 | 306,-0.1 308 | 307,-0.1 309 | 308,-0.1 310 | 309,-0.1 311 | 310,-0.1 312 | 311,-0.1 313 | 312,-0.1 314 | 313,-0.1 315 | 314,-0.1 316 | 315,-0.1 317 | 316,-0.1 318 | 317,-0.1 319 | 318,-0.1 320 | 319,-0.1 321 | 320,-0.1 322 | 321,-0.1 323 | 322,-0.1 324 | 323,-0.1 325 | 324,-0.1 326 | 325,-0.1 327 | 326,-0.1 328 | 327,-0.1 329 | 328,-0.1 330 | 329,-0.1 331 | 330,-0.1 332 | 331,-0.2 333 | 332,-0.2 334 | 333,-0.2 335 | 334,-0.2 336 | 335,-0.2 337 | 336,-0.2 338 | 337,-0.2 339 | 338,-0.2 340 | 339,-0.2 341 | 340,-0.2 342 | 341,-0.2 343 | 342,-0.2 344 | 343,-0.2 345 | 344,-0.2 346 | 345,-0.2 347 | 346,-0.2 348 | 347,-0.2 349 | 348,-0.2 350 | 349,-0.2 351 | 350,-0.2 352 | 351,-0.2 353 | 352,-0.2 354 | 353,-0.2 355 | 354,-0.2 356 | 355,-0.2 357 | 356,-0.2 358 | 357,-0.2 359 | 358,-0.2 360 | 359,-0.2 361 | 360,-0.2 362 | 361,-0.2 363 | 362,-0.2 364 | 363,-0.2 365 | 364,-0.2 366 | 365,-0.2 367 | -------------------------------------------------------------------------------- /misc/zom_nlcd_default.json: -------------------------------------------------------------------------------- 1 | { 2 | "11": "0.0002", 3 | "12": "0.0004", 4 | "21": "0.1", 5 | "22": "0.55", 6 | "23": "0.55", 7 | "24": "1.1", 8 | "31": "0.00035", 9 | "32": "0.00035", 10 | "41": "perrier", 11 | "_comment": "1.2", 12 | "42": "perrier", 13 | "_comment": "1.2", 14 | "43": "perrier", 15 | "_comment": "1.2", 16 | "51": "0.4", 17 | "52": "0.4", 18 | "71": "0.019", 19 | "72": "0.04", 20 | "81": "LAI", 21 | "_comment": "0.065", 22 | "82": "LAI", 23 | "_comment": "0.15", 24 | "90": "0.1", 25 | "94": "0.1", 26 | "95": "0.03" 27 | } -------------------------------------------------------------------------------- /misc/zom_nlcd_metric.json: -------------------------------------------------------------------------------- 1 | { 2 | "11": "0.0005", 3 | "12": "0.005", 4 | "21": "0.05", 5 | "22": "0.08", 6 | "23": "0.1", 7 | "24": "0.2", 8 | "31": "0.1", 9 | "32": "0.005", 10 | "41": "perrier", 11 | "42": "perrier", 12 | "43": "perrier", 13 | "51": "0.2", 14 | "52": "0.2", 15 | "71": "0.05", 16 | "72": "0.03", 17 | "81": "LAI", 18 | "82": "LAI", 19 | "90": "0.4", 20 | "94": "0.2", 21 | "95": "0.01" 22 | } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | configparser 2 | drigo>=0.1.4 3 | future==0.16.0 4 | gdal>=2.2.3 5 | matplotlib>=2.1.1 6 | netCDF4>=1.3.1 7 | numpy>=1.14.0 8 | pandas>=0.22.0 9 | refet>=0.3.7 10 | requests>=2.2.0 11 | requests-html>=0.10.0 12 | scipy>=1.0.0 13 | -------------------------------------------------------------------------------- /tools/README.md: -------------------------------------------------------------------------------- 1 | ### CIMIS 2 | ------------- 3 | Scripts for downloading and preparing spatial CIMIS (California Irrigation Management Information System) 4 | 5 | ### DAYMET 6 | ------------- 7 | Download and process the Daymet products 8 | 9 | ### Download 10 | ------------- 11 | Contains scripts to download the following products: 12 | - Cropland Data Layer 13 | - The Landsat Footprint shapefile (WRS2) 14 | - Landfire 15 | - The National Elevation Dataset (NED) 16 | - National Land Cover Database (NLCD) 17 | - Soils data derived from the USDA Digital General Soil Map of the United States or STATSGO2 18 | 19 | ### GRIDMET 20 | ------------- 21 | Download and calculate daily meteorological data from the GRIDMET dataset. Variables are saved as daily IMG rasters in separate folders 22 | 23 | ### NLCDAS 24 | ------------- 25 | Contains python scripts that downlaod and procceses hourly meteorologic variables from NLDAS (North American Land Data Assimilation System) 26 | -------------------------------------------------------------------------------- /tools/cimis/README.md: -------------------------------------------------------------------------------- 1 | # CIMIS 2 | 3 | Scripts for downloading and preparing CIMIS daily weather data 4 | 5 | cimis_ancillary.py - Download and process the CIMIS mask, elevation, 6 | latitude, and longitude rasters 7 | cimis_download.py - Download the ".asc.gz" files from the CIMIS website 8 | The default date range is 2017-01-01 to 2017-12-31 9 | cimis_extract_convert.py - Uncompress the ".asc.gz" files and convert to IMG 10 | cimis_daily_refet.py - Calculate daily ETo and ETr from the CIMIS inputs 11 | ETo and ETr are saved as daily IMG rasters in separate folders 12 | cimis_nldas_fill.py - Get NLDAS 4km daily ETo/ETr for dates with missing data 13 | 14 | CIMIS elevation data is from: 15 | Global Multi-resolution Terrain Elevation Data 2010 (GMTED2010) 16 | http://topotools.cr.usgs.gov/gmted_viewer/ 17 | http://topotools.cr.usgs.gov/gmted_viewer/data/Grid_ZipFiles/mn30_grd.zip 18 | http://topotools.cr.usgs.gov/gmted_viewer/data/Grid_ZipFiles/md30_grd.zip -------------------------------------------------------------------------------- /tools/cimis/_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime as dt 3 | import logging 4 | 5 | import requests 6 | 7 | 8 | def date_range(start_date, end_date): 9 | """Yield datetimes within a date range""" 10 | for n in range(int((end_date - start_date).days)): 11 | yield start_date + dt.timedelta(n) 12 | 13 | 14 | def url_download(download_url, output_path, verify=True): 15 | """Download file from a URL using requests module""" 16 | response = requests.get(download_url, stream=True, verify=verify) 17 | if response.status_code != 200: 18 | logging.error(' HTTPError: {}'.format(response.status_code)) 19 | return False 20 | 21 | logging.debug(' Beginning download') 22 | with (open(output_path, "wb")) as output_f: 23 | for chunk in response.iter_content(chunk_size=1024 * 1024): 24 | if chunk: # filter out keep-alive new chunks 25 | output_f.write(chunk) 26 | logging.debug(' Download complete') 27 | return True 28 | 29 | 30 | def valid_date(input_date): 31 | """Check that a date string is ISO format (YYYY-MM-DD) 32 | 33 | This function is used to check the format of dates entered as command 34 | line arguments. 35 | DEADBEEF - It would probably make more sense to have this function 36 | parse the date using dateutil parser (http://labix.org/python-dateutil) 37 | and return the ISO format string. 38 | 39 | Parameters 40 | ---------- 41 | input_date : str 42 | 43 | Returns 44 | ------- 45 | datetime 46 | 47 | Raises 48 | ------ 49 | ArgParse ArgumentTypeError 50 | 51 | """ 52 | try: 53 | return dt.datetime.strptime(input_date, "%Y-%m-%d") 54 | # return dt.datetime.strptime(input_date, "%Y-%m-%d").date().isoformat() 55 | except ValueError: 56 | msg = "Not a valid date: '{}'.".format(input_date) 57 | raise argparse.ArgumentTypeError(msg) -------------------------------------------------------------------------------- /tools/cimis/cimis_ancillary.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: cimis_ancillary.py 3 | # Purpose: Download CIMIS data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import gzip 9 | import logging 10 | import os 11 | import subprocess 12 | import sys 13 | import zipfile 14 | 15 | import drigo 16 | import numpy as np 17 | 18 | import _utils 19 | 20 | 21 | def main(ancillary_ws, overwrite_flag=False): 22 | """Process CIMIS ancillary data 23 | 24 | Parameters 25 | ---------- 26 | ancillary_ws : str 27 | Folder of ancillary rasters. 28 | overwrite_flag : bool, optional 29 | If True, overwrite existing files (the default is False). 30 | 31 | Returns 32 | ------- 33 | None 34 | 35 | """ 36 | logging.info('\nProcess CIMIS ancillary data') 37 | 38 | # Site URL 39 | site_url = 'http://cimis.casil.ucdavis.edu/cimis/' 40 | 41 | # DEM for air pressure calculation 42 | # http://topotools.cr.usgs.gov/gmted_viewer/gmted2010_global_grids.php 43 | elev_full_url = 'http://edcintl.cr.usgs.gov/downloads/sciweb1/shared/topo/downloads/GMTED/Grid_ZipFiles/mn30_grd.zip' 44 | elev_full_zip = os.path.join(ancillary_ws, 'mn30_grd.zip') 45 | elev_full_raster = os.path.join(ancillary_ws, 'mn30_grd') 46 | 47 | # Get CIMIS grid properties from 2010/01/01 ETo raster 48 | # Grid of the spatial cimis input rasters 49 | # cimis_extent = drigo.Extent((-410000, -660000, 610000, 460000)) 50 | # cimis_cs = 2000 51 | # cimis_geo = drigo.extent_geo(cimis_extent, cimis_cs) 52 | 53 | # Spatial reference parameters 54 | cimis_proj4 = ( 55 | '+proj=aea +lat_1=34 +lat_2=40.5 +lat_0=0 +lon_0=-120 +x_0=0 ' 56 | '+y_0=-4000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs') 57 | cimis_osr = drigo.proj4_osr(cimis_proj4) 58 | # cimis_epsg = 3310 # NAD_1983_California_Teale_Albers 59 | # cimis_osr = drigo.epsg_osr(cimis_epsg) 60 | # Comment this line out if building GeoTIFF instead of IMG 61 | cimis_osr.MorphToESRI() 62 | cimis_proj = cimis_osr.ExportToWkt() 63 | 64 | # snap_xmin, snap_ymin = (0, 0) 65 | 66 | # Build output workspace if it doesn't exist 67 | if not os.path.isdir(ancillary_ws): 68 | os.makedirs(ancillary_ws) 69 | 70 | # File paths 71 | mask_url = site_url + '/2010/01/01/ETo.asc.gz' 72 | # mask_gz = os.path.join(ancillary_ws, 'cimis_mask.asc.gz') 73 | mask_ascii = os.path.join(ancillary_ws, 'cimis_mask.asc') 74 | mask_raster = os.path.join(ancillary_ws, 'cimis_mask.img') 75 | elev_raster = os.path.join(ancillary_ws, 'cimis_elev.img') 76 | lat_raster = os.path.join(ancillary_ws, 'cimis_lat.img') 77 | lon_raster = os.path.join(ancillary_ws, 'cimis_lon.img') 78 | 79 | # Download an ETo ASCII raster to generate the mask raster 80 | if overwrite_flag or not os.path.isfile(mask_raster): 81 | logging.info('\nCIMIS mask') 82 | logging.debug(' Downloading') 83 | logging.debug(" {}".format(mask_url)) 84 | logging.debug(" {}".format(mask_ascii)) 85 | _utils.url_download(mask_url, mask_ascii) 86 | 87 | # DEADBEEF - The files do not appeared to be compressed even though 88 | # they have a .asc.gz file extension on the server. 89 | # logging.debug(" {}".format(mask_gz)) 90 | # _utils.url_download(mask_url, mask_gz) 91 | # 92 | # they are named .asc.gz 93 | # # Uncompress '.gz' file to a new file 94 | # logging.debug(' Uncompressing') 95 | # logging.debug(' {}'.format(mask_ascii)) 96 | # try: 97 | # input_f = gzip.open(mask_gz, 'rb') 98 | # output_f = open(mask_ascii, 'wb') 99 | # output_f.write(input_f.read()) 100 | # output_f.close() 101 | # input_f.close() 102 | # del input_f, output_f 103 | # except: 104 | # logging.error(" ERROR EXTRACTING FILE") 105 | # os.remove(mask_gz) 106 | 107 | # # Set spatial reference of the ASCII files 108 | # if build_prj_flag: 109 | # prj_file = open(mask_asc.replace('.asc','.prj'), 'w') 110 | # prj_file.write(output_proj) 111 | # prj_file.close() 112 | 113 | # Convert the ASCII raster to a IMG raster 114 | logging.debug(' Computing mask') 115 | logging.debug(' {}'.format(mask_raster)) 116 | mask_array = drigo.raster_to_array(mask_ascii, return_nodata=False) 117 | cimis_geo = drigo.raster_path_geo(mask_ascii) 118 | # cimis_extent = drigo.raster_path_extent(mask_ascii) 119 | logging.debug(' {}'.format(cimis_geo)) 120 | mask_array = np.isfinite(mask_array).astype(np.uint8) 121 | drigo.array_to_raster( 122 | mask_array, mask_raster, 123 | output_geo=cimis_geo, output_proj=cimis_proj, output_nodata=0) 124 | # drigo.ascii_to_raster( 125 | # mask_ascii, mask_raster, np.float32, cimis_proj) 126 | os.remove(mask_ascii) 127 | 128 | # Compute latitude/longitude rasters 129 | if ((overwrite_flag or 130 | not os.path.isfile(lat_raster) or 131 | not os.path.isfile(lat_raster)) and 132 | os.path.isfile(mask_raster)): 133 | logging.info('\nCIMIS latitude/longitude') 134 | logging.debug(' {}'.format(lat_raster)) 135 | lat_array, lon_array = drigo.raster_lat_lon_func(mask_raster) 136 | drigo.array_to_raster( 137 | lat_array, lat_raster, output_geo=cimis_geo, 138 | output_proj=cimis_proj) 139 | logging.debug(' {}'.format(lon_raster)) 140 | drigo.array_to_raster( 141 | lon_array, lon_raster, output_geo=cimis_geo, 142 | output_proj=cimis_proj) 143 | 144 | # Compute DEM raster 145 | if overwrite_flag or not os.path.isfile(elev_raster): 146 | logging.info('\nCIMIS DEM') 147 | logging.debug(' Downloading GMTED2010 DEM') 148 | logging.debug(" {}".format(elev_full_url)) 149 | logging.debug(" {}".format(elev_full_zip)) 150 | if overwrite_flag or not os.path.isfile(elev_full_zip): 151 | _utils.url_download(elev_full_url, elev_full_zip) 152 | 153 | # Uncompress '.gz' file to a new file 154 | logging.debug(' Uncompressing') 155 | logging.debug(' {}'.format(elev_full_raster)) 156 | if overwrite_flag or not os.path.isfile(elev_full_raster): 157 | try: 158 | with zipfile.ZipFile(elev_full_zip, "r") as z: 159 | z.extractall(ancillary_ws) 160 | except: 161 | logging.error(" ERROR EXTRACTING FILE") 162 | os.remove(elev_full_zip) 163 | 164 | # Get the extent and cellsize from the mask 165 | logging.debug(' Projecting to CIMIS grid') 166 | cimis_cs = drigo.raster_path_cellsize(mask_raster)[0] 167 | cimis_extent = drigo.raster_path_extent(mask_raster) 168 | logging.debug(' Extent: {}'.format(cimis_extent)) 169 | logging.debug(' Cellsize: {}'.format(cimis_cs)) 170 | 171 | logging.info(' {}'.format(mask_ascii)) 172 | if overwrite_flag and os.path.isfile(elev_raster): 173 | subprocess.call(['gdalmanage', 'delete', elev_raster]) 174 | if not os.path.isfile(elev_raster): 175 | subprocess.call( 176 | ['gdalwarp', '-r', 'average', '-t_srs', cimis_proj4, 177 | '-te', str(cimis_extent.xmin), str(cimis_extent.ymin), 178 | str(cimis_extent.xmax), str(cimis_extent.ymax), 179 | '-tr', str(cimis_cs), str(cimis_cs), 180 | '-of', 'HFA', '-co', 'COMPRESSED=TRUE', 181 | elev_full_raster, elev_raster], 182 | cwd=ancillary_ws) 183 | 184 | logging.debug('\nScript Complete') 185 | 186 | 187 | def arg_parse(): 188 | """Base all default folders from script location 189 | scripts: ./pymetric/tools/cimis 190 | tools: ./pymetric/tools 191 | output: ./pymetric/cimis 192 | """ 193 | script_folder = sys.path[0] 194 | code_folder = os.path.dirname(script_folder) 195 | project_folder = os.path.dirname(code_folder) 196 | cimis_folder = os.path.join(project_folder, 'cimis') 197 | ancillary_folder = os.path.join(cimis_folder, 'ancillary') 198 | 199 | parser = argparse.ArgumentParser( 200 | description='Download/prep CIMIS ancillary data', 201 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 202 | parser.add_argument( 203 | '--ancillary', default=ancillary_folder, metavar='PATH', 204 | help='Ancillary raster folder path') 205 | parser.add_argument( 206 | '-o', '--overwrite', default=False, action="store_true", 207 | help='Force overwrite of existing files') 208 | parser.add_argument( 209 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 210 | help='Debug level logging', action="store_const", dest="loglevel") 211 | args = parser.parse_args() 212 | 213 | # Convert relative paths to absolute paths 214 | if args.ancillary and os.path.isdir(os.path.abspath(args.ancillary)): 215 | args.ancillary = os.path.abspath(args.ancillary) 216 | 217 | return args 218 | 219 | 220 | if __name__ == '__main__': 221 | args = arg_parse() 222 | 223 | logging.basicConfig(level=args.loglevel, format='%(message)s') 224 | logging.info('\n{}'.format('#' * 80)) 225 | logging.info('{:<20s} {}'.format( 226 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 227 | logging.info('{:<20s} {}'.format( 228 | 'Script:', os.path.basename(sys.argv[0]))) 229 | 230 | main(ancillary_ws=args.ancillary, overwrite_flag=args.overwrite) 231 | -------------------------------------------------------------------------------- /tools/cimis/cimis_download.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: cimis_download.py 3 | # Purpose: Download CIMIS data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import re 11 | import sys 12 | 13 | import requests 14 | 15 | import _utils 16 | 17 | 18 | def main(start_dt, end_dt, output_ws, variables=['all'], 19 | overwrite_flag=False): 20 | """Download CIMIS data 21 | 22 | Parameters 23 | ---------- 24 | start_dt : datetime 25 | Start date. 26 | end_dt : datetime 27 | End date. 28 | output_ws : str 29 | Folder path of the output ascii files. 30 | variables : list 31 | Choices: 'ETo', 'Rs', 'Tdew', 'Tn', 'Tx', 'U2', 'all' 32 | 'K', 'Rnl', 'Rso' can be downloaded but are not needed. 33 | Set as ['all'] to download all variables. 34 | overwrite_flag : bool, optional 35 | If True, overwrite existing files (the default is False). 36 | 37 | Returns 38 | ------- 39 | None 40 | 41 | Notes 42 | ----- 43 | The file on the CIMIS server do not appeared to be compressed even though 44 | they have a .asc.gz file extension. 45 | The files will be saved directly to ASCII type. 46 | 47 | """ 48 | logging.info('\nDownloading CIMIS data\n') 49 | logging.debug(' Start date: {}'.format(start_dt)) 50 | logging.debug(' End date: {}'.format(end_dt)) 51 | 52 | # Site URL 53 | site_url = 'http://cimis.casil.ucdavis.edu/cimis/' 54 | 55 | # CIMIS rasters to extract 56 | data_full_list = ['ETo', 'Rso', 'Rs', 'Tdew', 'Tn', 'Tx', 'U2'] 57 | if not variables: 58 | logging.error('\nERROR: variables parameter is empty\n') 59 | sys.exit() 60 | elif type(variables) is not list: 61 | logging.error('\nERROR: variables parameter must be a list\n') 62 | sys.exit() 63 | elif 'all' in variables: 64 | logging.error('Downloading all variables\n {}'.format( 65 | ','.join(data_full_list))) 66 | data_list = ['ETo', 'Rso', 'Rs', 'Tdew', 'Tn', 'Tx', 'U2'] 67 | elif not set(variables).issubset(set(data_full_list)): 68 | logging.error('\nERROR: variables parameter is invalid\n {}'.format( 69 | variables)) 70 | sys.exit() 71 | else: 72 | data_list = variables 73 | 74 | # Build output workspace if it doesn't exist 75 | if not os.path.isdir(output_ws): 76 | os.makedirs(output_ws) 77 | 78 | # Set data types to upper case for comparison 79 | data_list = list(map(lambda x: x.lower(), data_list)) 80 | 81 | # Each sub folder in the main folder has all imagery for 1 day 82 | # The path for each subfolder is the /YYYY/MM/DD 83 | logging.info('') 84 | for input_date in _utils.date_range(start_dt, end_dt + dt.timedelta(1)): 85 | logging.info('{}'.format(input_date.date())) 86 | date_url = site_url + '/' + input_date.strftime("%Y/%m/%d") 87 | logging.debug(' {}'.format(date_url)) 88 | 89 | # Download a list of all files in the date sub folder 90 | try: 91 | date_html = requests.get(date_url + '/').text 92 | except: 93 | logging.error(" ERROR: {}".format(date_url)) 94 | continue 95 | file_list = sorted(list(set( 96 | re.findall(r'href=[\'"]?([^\'" >]+)', date_html)))) 97 | if not file_list: 98 | logging.debug(' Empty file list, skipping date') 99 | continue 100 | 101 | # Create a separate folder for each day 102 | year_ws = os.path.join(output_ws, input_date.strftime("%Y")) 103 | if not os.path.isdir(year_ws): 104 | os.mkdir(year_ws) 105 | date_ws = os.path.join(year_ws, input_date.strftime("%Y_%m_%d")) 106 | if not os.path.isdir(date_ws): 107 | os.mkdir(date_ws) 108 | 109 | # Process each file in sub folder 110 | for file_name in file_list: 111 | if not file_name.endswith('.asc.gz'): 112 | continue 113 | elif file_name.replace('.asc.gz', '').lower() not in data_list: 114 | continue 115 | 116 | file_url = '{}/{}'.format(date_url, file_name) 117 | 118 | # DEADBEEF - The file on the CIMIS server do not appeared to be 119 | # compressed even though they have a .asc.gz file extension. 120 | # Saving the files directly to ASCII type. 121 | save_path = os.path.join( 122 | date_ws, file_name.replace('.asc.gz', '.asc')) 123 | # save_path = os.path.join(date_ws, file_name) 124 | 125 | logging.info(' {}'.format(os.path.basename(save_path))) 126 | logging.debug(' {}'.format(file_url)) 127 | logging.debug(' {}'.format(save_path)) 128 | if os.path.isfile(save_path): 129 | if not overwrite_flag: 130 | logging.debug(' File already exists, skipping') 131 | continue 132 | else: 133 | logging.debug(' File already exists, removing existing') 134 | os.remove(save_path) 135 | 136 | _utils.url_download(file_url, save_path) 137 | 138 | logging.debug('\nScript Complete') 139 | 140 | 141 | def arg_parse(): 142 | """Base all default folders from script location 143 | scripts: ./pymetric/tools/cimis 144 | tools: ./pymetric/tools 145 | output: ./pymetric/cimis 146 | """ 147 | script_folder = sys.path[0] 148 | code_folder = os.path.dirname(script_folder) 149 | project_folder = os.path.dirname(code_folder) 150 | cimis_folder = os.path.join(project_folder, 'cimis') 151 | ascii_folder = os.path.join(cimis_folder, 'input_asc') 152 | 153 | parser = argparse.ArgumentParser( 154 | description='Download daily CIMIS data', 155 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 156 | parser.add_argument( 157 | '--start', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 158 | help='Start date') 159 | parser.add_argument( 160 | '--end', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 161 | help='End date') 162 | parser.add_argument( 163 | '--ascii', default=ascii_folder, metavar='PATH', 164 | help='Output ascii root folder path') 165 | parser.add_argument( 166 | '-v', '--vars', default=['all'], nargs='+', metavar='ETo', 167 | choices=['ETo', 'Rso', 'Rs', 'Tdew', 'Tn', 'Tx', 'U2', 'All'], 168 | help='CIMIS variables to download') 169 | parser.add_argument( 170 | '-o', '--overwrite', default=False, action="store_true", 171 | help='Force overwrite of existing files') 172 | parser.add_argument( 173 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 174 | help='Debug level logging', action="store_const", dest="loglevel") 175 | args = parser.parse_args() 176 | 177 | # Convert relative paths to absolute paths 178 | if args.ascii and os.path.isdir(os.path.abspath(args.ascii)): 179 | args.ascii = os.path.abspath(args.ascii) 180 | 181 | return args 182 | 183 | 184 | if __name__ == '__main__': 185 | args = arg_parse() 186 | 187 | logging.basicConfig(level=args.loglevel, format='%(message)s') 188 | logging.info('\n{}'.format('#' * 80)) 189 | logging.info('{:<20s} {}'.format( 190 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 191 | logging.info('{:<20s} {}'.format( 192 | 'Script:', os.path.basename(sys.argv[0]))) 193 | 194 | main(start_dt=args.start, end_dt=args.end, output_ws=args.ascii, 195 | variables=args.vars, overwrite_flag=args.overwrite) 196 | -------------------------------------------------------------------------------- /tools/cimis/cimis_extract_convert.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: cimis_extract_convert.py 3 | # Purpose: Extract/convert CIMIS data from ASCII to IMG rasters 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import re 11 | import sys 12 | 13 | import drigo 14 | import numpy as np 15 | 16 | import _utils 17 | 18 | 19 | def main(start_dt, end_dt, input_ws, output_ws, remove_ascii_flag=False, 20 | stats_flag=False, overwrite_flag=False): 21 | """Extract/convert CIMIS data from ASCII to IMG rasters 22 | 23 | Parameters 24 | ---------- 25 | start_dt : datetime 26 | Start date. 27 | end_dt : datetime 28 | End date. 29 | input_ws : str 30 | Folder path of the input ascii files. 31 | output_ws : str 32 | Folder path of the output IMG rasters. 33 | remove_ascii_flag : bool, optional 34 | If True, remove extracted ascii files (the default is False). 35 | stats_flag : bool, optional 36 | If True, compute raster statistics (the default is True). 37 | overwrite_flag : bool, optional 38 | If True, overwrite existing files (the default is False). 39 | 40 | Returns 41 | ------- 42 | None 43 | 44 | """ 45 | logging.info('\nExtracting CIMIS data') 46 | logging.debug(' Start date: {}'.format(start_dt)) 47 | logging.debug(' End date: {}'.format(end_dt)) 48 | 49 | # CIMIS rasters to extract 50 | data_list = ['ETo', 'Rso', 'Rs', 'Tdew', 'Tn', 'Tx', 'U2'] 51 | 52 | # Spatial reference parameters 53 | cimis_proj4 = ( 54 | '+proj=aea +lat_1=34 +lat_2=40.5 +lat_0=0 +lon_0=-120 ' 55 | '+x_0=0 +y_0=-4000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs') 56 | cimis_osr = drigo.proj4_osr(cimis_proj4) 57 | # cimis_epsg = 3310 # NAD_1983_California_Teale_Albers 58 | # cimis_osr = drigo.epsg_osr(cimis_epsg) 59 | cimis_osr.MorphToESRI() 60 | cimis_proj = cimis_osr.ExportToWkt() 61 | 62 | # Set data types to upper case for comparison 63 | data_list = list(map(lambda x: x.lower(), data_list)) 64 | 65 | # Look for ascii files 66 | for year_str in sorted(os.listdir(input_ws)): 67 | logging.info('{}'.format(year_str)) 68 | if not re.match('^\d{4}$', year_str): 69 | logging.debug(' Not a 4 digit year folder, skipping') 70 | continue 71 | year_ws = os.path.join(input_ws, year_str) 72 | if start_dt is not None and int(year_str) < start_dt.year: 73 | logging.debug(' Before start date, skipping') 74 | continue 75 | elif end_dt is not None and int(year_str) > end_dt.year: 76 | logging.debug(' After end date, skipping') 77 | continue 78 | 79 | for date_str in sorted(os.listdir(year_ws)): 80 | date_ws = os.path.join(year_ws, date_str) 81 | try: 82 | date_dt = dt.datetime.strptime(date_str, '%Y_%m_%d') 83 | except ValueError: 84 | logging.debug( 85 | ' Invalid folder date format (YYYY_MM_DD), skipping') 86 | continue 87 | if start_dt is not None and date_dt < start_dt: 88 | logging.debug(' Before start date, skipping') 89 | continue 90 | elif end_dt is not None and date_dt > end_dt: 91 | logging.debug(' After end date, skipping') 92 | continue 93 | logging.info(date_str) 94 | 95 | for file_item in sorted(os.listdir(date_ws)): 96 | logging.debug(' {}'.format(file_item)) 97 | if not file_item.endswith('.asc'): 98 | logging.debug( 99 | ' Invalid file type (not .asc), skipping') 100 | continue 101 | 102 | asc_path = os.path.join(date_ws, file_item) 103 | raster_path = asc_path.replace(input_ws, output_ws)\ 104 | .replace('.asc', '.img') 105 | 106 | # Only process selected raster types 107 | if file_item.replace('.asc', '').lower() not in data_list: 108 | logging.debug(' Unused file/variable, skipping') 109 | continue 110 | 111 | if os.path.isfile(raster_path): 112 | logging.debug(' {}'.format(raster_path)) 113 | if not overwrite_flag: 114 | logging.debug(' File already exists, skipping') 115 | continue 116 | else: 117 | logging.debug( 118 | ' File already exists, removing existing') 119 | os.remove(raster_path) 120 | 121 | # Build the output folder if necessary 122 | if not os.path.isdir(os.path.dirname(raster_path)): 123 | os.makedirs(os.path.dirname(raster_path)) 124 | 125 | # # Set spatial reference of the ASCII files 126 | # if build_prj_flag: 127 | # output_osr.MorphToESRI() 128 | # cimis_proj = output_osr.ExportToWkt() 129 | # prj_file = open(asc_path.replace('.asc', '.prj'), 'w') 130 | # prj_file.write(cimis_proj) 131 | # prj_file.close() 132 | 133 | # Convert the ASCII raster to a IMG raster 134 | drigo.ascii_to_raster( 135 | asc_path, raster_path, input_type=np.float32, 136 | input_proj=cimis_proj, stats_flag=stats_flag) 137 | if remove_ascii_flag: 138 | os.remove(asc_path) 139 | 140 | # Cleanup 141 | del asc_path, raster_path 142 | 143 | logging.debug('\nScript Complete') 144 | 145 | 146 | def arg_parse(): 147 | """Base all default folders from script location 148 | scripts: ./pymetric/tools/cimis 149 | tools: ./pymetric/tools 150 | output: ./pymetric/cimis 151 | """ 152 | script_folder = sys.path[0] 153 | code_folder = os.path.dirname(script_folder) 154 | project_folder = os.path.dirname(code_folder) 155 | cimis_folder = os.path.join(project_folder, 'cimis') 156 | asc_folder = os.path.join(cimis_folder, 'input_asc') 157 | img_folder = os.path.join(cimis_folder, 'input_img') 158 | 159 | parser = argparse.ArgumentParser( 160 | description='CIMIS extract/convert', 161 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 162 | parser.add_argument( 163 | '--start', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 164 | help='Start date') 165 | parser.add_argument( 166 | '--end', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 167 | help='End date') 168 | parser.add_argument( 169 | '--ascii', default=asc_folder, metavar='PATH', 170 | help='Input ascii root folder path') 171 | parser.add_argument( 172 | '--img', default=img_folder, metavar='PATH', 173 | help='Output IMG raster folder path') 174 | parser.add_argument( 175 | '--stats', default=False, action="store_true", 176 | help='Compute raster statistics') 177 | parser.add_argument( 178 | '-o', '--overwrite', default=False, action="store_true", 179 | help='Force overwrite of existing files') 180 | parser.add_argument( 181 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 182 | help='Debug level logging', action="store_const", dest="loglevel") 183 | args = parser.parse_args() 184 | 185 | # Convert relative paths to absolute paths 186 | if args.ascii and os.path.isdir(os.path.abspath(args.ascii)): 187 | args.ascii = os.path.abspath(args.ascii) 188 | if args.img and os.path.isdir(os.path.abspath(args.img)): 189 | args.img = os.path.abspath(args.img) 190 | 191 | return args 192 | 193 | 194 | if __name__ == '__main__': 195 | args = arg_parse() 196 | 197 | logging.basicConfig(level=args.loglevel, format='%(message)s') 198 | logging.info('\n{}'.format('#' * 80)) 199 | logging.info('{:<20s} {}'.format( 200 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 201 | logging.info('{:<20s} {}'.format( 202 | 'Script:', os.path.basename(sys.argv[0]))) 203 | 204 | main(start_dt=args.start, end_dt=args.end, 205 | input_ws=args.ascii, output_ws=args.img, 206 | stats_flag=args.stats, overwrite_flag=args.overwrite) 207 | -------------------------------------------------------------------------------- /tools/daymet/README.md: -------------------------------------------------------------------------------- 1 | # DAYMET 2 | 3 | Scripts for downloading and preparing DAYMET daily weather data 4 | 5 | daymet_ancillary.py - Download and process the DAYMET elevation, 6 | latitude, and longitude rasters 7 | daymet_download.py - Download the ".nc" files from the DAYMET website 8 | The default date range is 2017-01-01 to 2017-12-31 9 | daymet_daily_ea.py - Calculate daily vapor pressure from the DAYMET inputs 10 | daymet_daily_ppt.py - Calculate daily precipitation from the DAYMET inputs 11 | daymet_daily_temp.py - Calculate daily min/max temperature from the DAYMET inputs 12 | daymet_daily_variables.py - Calculate daily variables from the DAYMET inputs 13 | -------------------------------------------------------------------------------- /tools/daymet/_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime as dt 3 | import logging 4 | 5 | import requests 6 | 7 | 8 | def date_range(start_date, end_date): 9 | """Yield datetimes within a date range""" 10 | for n in range(int((end_date - start_date).days)): 11 | yield start_date + dt.timedelta(n) 12 | 13 | 14 | def url_download(download_url, output_path, verify=True): 15 | """Download file from a URL using requests module""" 16 | response = requests.get(download_url, stream=True, verify=verify) 17 | if response.status_code != 200: 18 | logging.error(' HTTPError: {}'.format(response.status_code)) 19 | return False 20 | 21 | logging.debug(' Beginning download') 22 | with (open(output_path, "wb")) as output_f: 23 | for chunk in response.iter_content(chunk_size=1024 * 1024): 24 | if chunk: # filter out keep-alive new chunks 25 | output_f.write(chunk) 26 | logging.debug(' Download complete') 27 | return True 28 | 29 | 30 | def valid_date(input_date): 31 | """Check that a date string is ISO format (YYYY-MM-DD) 32 | 33 | This function is used to check the format of dates entered as command 34 | line arguments. 35 | DEADBEEF - It would probably make more sense to have this function 36 | parse the date using dateutil parser (http://labix.org/python-dateutil) 37 | and return the ISO format string. 38 | 39 | Parameters 40 | ---------- 41 | input_date : str 42 | 43 | Returns 44 | ------- 45 | datetime 46 | 47 | Raises 48 | ------ 49 | ArgParse ArgumentTypeError 50 | 51 | """ 52 | try: 53 | return dt.datetime.strptime(input_date, "%Y-%m-%d") 54 | # return dt.datetime.strptime(input_date, "%Y-%m-%d").date().isoformat() 55 | except ValueError: 56 | msg = "Not a valid date: '{}'.".format(input_date) 57 | raise argparse.ArgumentTypeError(msg) 58 | -------------------------------------------------------------------------------- /tools/daymet/daymet_ancillary.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: daymet_ancillary.py 3 | # Purpose: Process DAYMET ancillary data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | 12 | import drigo 13 | import numpy as np 14 | 15 | 16 | def main(ancillary_ws, zero_elev_nodata_flag=False, overwrite_flag=False): 17 | """Process DAYMET ancillary data 18 | 19 | Parameters 20 | ---------- 21 | ancillary_ws : str 22 | Folder of ancillary rasters. 23 | zero_elev_nodata_flag : bool, optional 24 | If True, set elevation nodata values to 0 (the default is False). 25 | overwrite_flag : bool, optional 26 | If True, overwrite existing files (the default is False). 27 | 28 | Returns 29 | ------- 30 | None 31 | 32 | """ 33 | logging.info('\nProcess DAYMET ancillary rasters') 34 | 35 | # Site URL 36 | # ancillary_url = 'http://daymet.ornl.gov/files/ancillary_files.tgz' 37 | 38 | # Build output workspace if it doesn't exist 39 | if not os.path.isdir(ancillary_ws): 40 | os.makedirs(ancillary_ws) 41 | 42 | # Input paths 43 | # ancillary_targz = os.path.join( 44 | # ancillary_ws, os.path.basename(ancillary_url)) 45 | # dem_nc = os.path.join(ancillary_ws, 'dem_data.nc') 46 | # mask_nc = os.path.join(ancillary_ws, 'mask_data.nc') 47 | 48 | # Output paths 49 | dem_raster = os.path.join(ancillary_ws, 'daymet_elev.img') 50 | lat_raster = os.path.join(ancillary_ws, 'daymet_lat.img') 51 | lon_raster = os.path.join(ancillary_ws, 'daymet_lon.img') 52 | # mask_raster = os.path.join(ancillary_ws, 'daymet_mask.img') 53 | 54 | # Spatial reference parameters 55 | daymet_proj4 = ( 56 | "+proj=lcc +datum=WGS84 +lat_1=25 n " 57 | "+lat_2=60n +lat_0=42.5n +lon_0=100w") 58 | daymet_osr = drigo.proj4_osr(daymet_proj4) 59 | daymet_osr.MorphToESRI() 60 | daymet_proj = daymet_osr.ExportToWkt() 61 | daymet_cs = 1000 62 | # daymet_nodata = -9999 63 | 64 | # For now, hardcode the DAYMET extent/geo 65 | snap_xmin, snap_ymin = -4560750, -3090500 66 | daymet_rows, daymet_cols = 8075, 7814 67 | # snap_xmin, snap_ymin = -4659000, -3135000 68 | # daymet_rows, daymet_cols = 8220, 8011 69 | # daymet_geo = ( 70 | # snap_xmin, daymet_cs, 0., 71 | # snap_ymin + daymet_cs * daymet_rows, 0., -daymet_cs) 72 | daymet_extent = drigo.Extent([ 73 | snap_xmin, snap_ymin, 74 | snap_xmin + daymet_cs * daymet_cols, 75 | snap_ymin + daymet_cs * daymet_rows]) 76 | daymet_geo = daymet_extent.geo(daymet_cs) 77 | logging.debug(" Extent: {}".format(daymet_extent)) 78 | logging.debug(" Geo: {}".format(daymet_geo)) 79 | # logging.debug(" Cellsize: {}".format(daymet_cs)) 80 | # logging.debug(" Shape: {}".format(daymet_extent.shape(daymet_cs))) 81 | 82 | # # Download the ancillary raster tar.gz 83 | # if overwrite_flag or not os.path.isfile(ancillary_targz): 84 | # logging.info('\nDownloading ancillary tarball files') 85 | # logging.info(" {}".format(os.path.basename(ancillary_url))) 86 | # logging.debug(" {}".format(ancillary_url)) 87 | # logging.debug(" {}".format(ancillary_targz)) 88 | # url_download(ancillary_url, ancillary_targz) 89 | # try: 90 | # urllib.urlretrieve(ancillary_url, ancillary_targz) 91 | # except: 92 | # logging.error(" ERROR: {}\n FILE: {}".format( 93 | # sys.exc_info()[0], ancillary_targz)) 94 | # os.remove(ancillary_targz) 95 | 96 | # # Extract the ancillary rasters 97 | # ancillary_list = [dem_nc] 98 | # # ancillary_list = [dem_nc, mask_nc] 99 | # if (os.path.isfile(ancillary_targz) and 100 | # (overwrite_flag or 101 | # not all([os.path.isfile(os.path.join(ancillary_ws, x)) 102 | # for x in ancillary_list]))): 103 | # logging.info('\nExtracting ancillary rasters') 104 | # logging.debug(" {}".format(ancillary_targz)) 105 | # tar = tarfile.open(ancillary_targz) 106 | # for member in tar.getmembers(): 107 | # print member.name 108 | # member.name = os.path.basename(member.name) 109 | # # Strip off leading numbers from ancillary raster name 110 | # member.name = member.name.split('_', 1)[1] 111 | # member_path = os.path.join(ancillary_ws, member.name) 112 | # if not member.name.endswith('.nc'): 113 | # continue 114 | # elif member_path not in ancillary_list: 115 | # continue 116 | # elif os.path.isfile(member_path): 117 | # continue 118 | # logging.debug(" {}".format(member.name)) 119 | # tar.extract(member, ancillary_ws) 120 | # tar.close() 121 | 122 | # # Mask 123 | # if ((overwrite_flag or 124 | # not os.path.isfile(mask_raster)) and 125 | # os.path.isfile(mask_nc)): 126 | # logging.info('\nExtracting mask raster') 127 | # mask_nc_f = netCDF4.Dataset(mask_nc, 'r') 128 | # logging.debug(mask_nc_f) 129 | # # logging.debug(mask_nc_f.variables['image']) 130 | # mask_array = mask_nc_f.variables['image'][:] 131 | # mask_array[mask_array == daymet_nodata] = 255 132 | # drigo.array_to_raster( 133 | # mask_array, mask_raster, 134 | # output_geo=daymet_geo, output_proj=daymet_proj, 135 | # output_nodata=255) 136 | # mask_nc_f.close() 137 | 138 | # # DEM 139 | # if ((overwrite_flag or not os.path.isfile(dem_raster)) and 140 | # os.path.isfile(dem_nc)): 141 | # logging.info('\nExtracting DEM raster') 142 | # dem_nc_f = netCDF4.Dataset(dem_nc, 'r') 143 | # logging.debug(dem_nc_f) 144 | # # logging.debug(dem_nc_f.variables['image']) 145 | # dem_array = dem_nc_f.variables['image'][:] 146 | # # Rounding issues of the nodata value when converting to float32 147 | # dem_array[dem_array == daymet_nodata] -= 1 148 | # dem_array = dem_array.astype(np.float32) 149 | # if zero_elev_nodata_flag: 150 | # dem_array[dem_array <= daymet_nodata] = 0 151 | # else: 152 | # dem_array[dem_array <= daymet_nodata] = np.nan 153 | # drigo.array_to_raster( 154 | # dem_array, dem_raster, 155 | # output_geo=daymet_geo, output_proj=daymet_proj) 156 | # dem_nc_f.close() 157 | 158 | # Latitude/Longitude 159 | if (os.path.isfile(dem_raster) and 160 | (overwrite_flag or 161 | not os.path.isfile(lat_raster) or 162 | not os.path.isfile(lon_raster))): 163 | logging.info('\nDAYMET Latitude/Longitude') 164 | logging.debug(' {}'.format(lat_raster)) 165 | lat_array, lon_array = drigo.raster_lat_lon_func( 166 | dem_raster, gcs_cs=0.05) 167 | drigo.array_to_raster( 168 | lat_array.astype(np.float32), lat_raster, 169 | output_geo=daymet_geo, output_proj=daymet_proj) 170 | logging.debug(' {}'.format(lon_raster)) 171 | drigo.array_to_raster( 172 | lon_array.astype(np.float32), lon_raster, 173 | output_geo=daymet_geo, output_proj=daymet_proj) 174 | del lat_array, lon_array 175 | 176 | logging.debug('\nScript Complete') 177 | 178 | 179 | def arg_parse(): 180 | """Base all default folders from script location 181 | scripts: ./pymetric/tools/daymet 182 | tools: ./pymetric/tools 183 | output: ./pymetric/daymet 184 | """ 185 | script_folder = sys.path[0] 186 | code_folder = os.path.dirname(script_folder) 187 | project_folder = os.path.dirname(code_folder) 188 | daymet_folder = os.path.join(project_folder, 'daymet') 189 | ancillary_folder = os.path.join(daymet_folder, 'ancillary') 190 | 191 | parser = argparse.ArgumentParser( 192 | description='Download/prep DAYMET ancillary data', 193 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 194 | parser.add_argument( 195 | '--ancillary', default=ancillary_folder, metavar='PATH', 196 | help='Ancillary raster folder path') 197 | parser.add_argument( 198 | '--zero', default=False, action="store_true", 199 | help='Set elevation nodata values to 0') 200 | parser.add_argument( 201 | '-o', '--overwrite', default=False, action="store_true", 202 | help='Force overwrite of existing files') 203 | parser.add_argument( 204 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 205 | help='Debug level logging', action="store_const", dest="loglevel") 206 | args = parser.parse_args() 207 | 208 | # Convert relative paths to absolute paths 209 | if args.ancillary and os.path.isdir(os.path.abspath(args.ancillary)): 210 | args.ancillary = os.path.abspath(args.ancillary) 211 | 212 | return args 213 | 214 | 215 | if __name__ == '__main__': 216 | args = arg_parse() 217 | 218 | logging.basicConfig(level=args.loglevel, format='%(message)s') 219 | logging.info('\n{}'.format('#' * 80)) 220 | logging.info('{:<20s} {}'.format( 221 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 222 | logging.info('{:<20s} {}'.format( 223 | 'Script:', os.path.basename(sys.argv[0]))) 224 | 225 | main(ancillary_ws=args.ancillary, zero_elev_nodata_flag=args.zero, 226 | overwrite_flag=args.overwrite) 227 | -------------------------------------------------------------------------------- /tools/daymet/daymet_download.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: daymet_download.py 3 | # Purpose: Download DAYMET data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | 12 | import _utils 13 | 14 | 15 | def main(start_dt, end_dt, netcdf_ws, variables=['all'], 16 | overwrite_flag=False): 17 | """Download DAYMET netcdf files 18 | 19 | Data is currently only available for 1980-2017 20 | 21 | Parameters 22 | ---------- 23 | start_dt : datetime 24 | Start date. 25 | end_dt : datetime 26 | End date. 27 | netcdf_ws : str 28 | Root folder of DAYMET data. 29 | variables : list, optional 30 | DAYMET variables to download ('prcp', 'srad', 'vp', 'tmmn', 'tmmx'). 31 | Set as ['all'] to download all available variables. 32 | overwrite_flag : bool, optional 33 | If True, overwrite existing files (the default is False). 34 | 35 | Returns 36 | ------- 37 | None 38 | 39 | Notes 40 | ----- 41 | https://thredds.daac.ornl.gov/thredds/catalog/ornldaac/1328/catalog.html 42 | 43 | """ 44 | logging.info('\nDownloading DAYMET data') 45 | logging.debug(' Start date: {}'.format(start_dt)) 46 | logging.debug(' End date: {}'.format(end_dt)) 47 | 48 | site_url = 'http://thredds.daac.ornl.gov/thredds/fileServer/ornldaac/1328' 49 | 50 | # DAYMET rasters to extract 51 | var_full_list = ['prcp', 'srad', 'vp', 'tmin', 'tmax'] 52 | if not variables: 53 | logging.error('\nERROR: variables parameter is empty\n') 54 | sys.exit() 55 | elif type(variables) is not list: 56 | # DEADBEEF - I could try converting comma separated strings to lists? 57 | logging.warning('\nERROR: variables parameter must be a list\n') 58 | sys.exit() 59 | elif 'all' in variables: 60 | logging.error('\nDownloading all variables\n {}'.format( 61 | ','.join(var_full_list))) 62 | var_list = var_full_list 63 | elif not set(variables).issubset(set(var_full_list)): 64 | logging.error('\nERROR: variables parameter is invalid\n {}'.format( 65 | variables)) 66 | sys.exit() 67 | else: 68 | var_list = variables[:] 69 | 70 | # Build output workspace if it doesn't exist 71 | if not os.path.isdir(netcdf_ws): 72 | os.makedirs(netcdf_ws) 73 | 74 | # DAYMET data is stored by year 75 | year_list = sorted(list(set([ 76 | i_dt.year for i_dt in _utils.date_range( 77 | start_dt, end_dt + dt.timedelta(1))]))) 78 | year_list = list(map(lambda x: '{:04d}'.format(x), year_list)) 79 | 80 | # Set data types to upper case for comparison 81 | var_list = list(map(lambda x: x.lower(), var_list)) 82 | 83 | # Each sub folder in the main folder has all imagery for 1 day 84 | # The path for each subfolder is the /YYYY/MM/DD 85 | logging.info('') 86 | for year_str in year_list: 87 | logging.info(year_str) 88 | 89 | # Process each file in sub folder 90 | for variable in var_list: 91 | file_name = 'daymet_v3_{}_{}_na.nc4'.format(variable, year_str) 92 | file_url = '{}/{}/{}'.format(site_url, year_str, file_name) 93 | save_path = os.path.join(netcdf_ws, file_name) 94 | 95 | logging.info(' {}'.format(file_name)) 96 | logging.debug(' {}'.format(file_url)) 97 | logging.debug(' {}'.format(save_path)) 98 | if os.path.isfile(save_path): 99 | if not overwrite_flag: 100 | logging.debug(' File already exists, skipping') 101 | continue 102 | else: 103 | logging.debug(' File already exists, removing existing') 104 | os.remove(save_path) 105 | 106 | _utils.url_download(file_url, save_path) 107 | 108 | logging.debug('\nScript Complete') 109 | 110 | 111 | def arg_parse(): 112 | """Base all default folders from script location 113 | scripts: ./pymetric/tools/daymet 114 | tools: ./pymetric/tools 115 | output: ./pymetric/daymet 116 | """ 117 | script_folder = sys.path[0] 118 | code_folder = os.path.dirname(script_folder) 119 | project_folder = os.path.dirname(code_folder) 120 | daymet_folder = os.path.join(project_folder, 'daymet') 121 | netcdf_folder = os.path.join(daymet_folder, 'netcdf') 122 | 123 | parser = argparse.ArgumentParser( 124 | description='Download daily DAYMET data', 125 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 126 | parser.add_argument( 127 | '--start', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 128 | help='Start date') 129 | parser.add_argument( 130 | '--end', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 131 | help='End date') 132 | parser.add_argument( 133 | '--netcdf', default=netcdf_folder, metavar='PATH', 134 | help='Output netCDF folder path') 135 | parser.add_argument( 136 | '--vars', default=['all'], nargs='+', 137 | choices=['all', 'prcp', 'srad', 'vp', 'tmin', 'tmax'], 138 | help='DAYMET variables to download') 139 | parser.add_argument( 140 | '-o', '--overwrite', default=False, action="store_true", 141 | help='Force overwrite of existing files') 142 | parser.add_argument( 143 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 144 | help='Debug level logging', action="store_const", dest="loglevel") 145 | args = parser.parse_args() 146 | 147 | # Convert relative paths to absolute paths 148 | if args.netcdf and os.path.isdir(os.path.abspath(args.netcdf)): 149 | args.netcdf = os.path.abspath(args.netcdf) 150 | 151 | return args 152 | 153 | 154 | if __name__ == '__main__': 155 | args = arg_parse() 156 | 157 | logging.basicConfig(level=args.loglevel, format='%(message)s') 158 | logging.info('\n{}'.format('#' * 80)) 159 | logging.info('{:<20s} {}'.format( 160 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 161 | logging.info('{:<20s} {}'.format( 162 | 'Script:', os.path.basename(sys.argv[0]))) 163 | 164 | main(start_dt=args.start, end_dt=args.end, netcdf_ws=args.netcdf, 165 | variables=args.vars, overwrite_flag=args.overwrite) 166 | -------------------------------------------------------------------------------- /tools/download/README.md: -------------------------------------------------------------------------------- 1 | # Download 2 | The following scripts have been assembled in order to facilitate the downloading of data required to run pyMETRIC. 3 | 4 | ### download_cdl.py 5 | This script will download the CONUS-wide CDL image. By default, the CDL image will be saved to the folder '.\cdl'. 6 | 7 | ### download_footprints.py 8 | This script will download the global Landsat WRS2 descending footprint shapefile. By default, the shapefile will be saved to the folder ".\landsat\footprints". 9 | 10 | ### download_landfire.py 11 | LANDFIRE data will not be used for the Harney example, but the following script will download a CONUS-wide LANDFIRE image. By default, the LANDFIRE image will be saved to the folder ".\landfire". 12 | 13 | ### download_ned.py 14 | This script will download the 1x1 degree 1-arcsecond (~30m) resolution NED tiles that intersect the study area. By default, the NED tiles will be saved to the folder ".\dem\tiles". For the script to run, a shapefile of the study area extent must be provided for the "--extent" command line argument. 15 | 16 | The NED tiles are being downloaded from the [USGS FTP server](ftp://rockyftp.cr.usgs.gov/vdelivery/Datasets/Staged/Elevation/1/IMG) and can be downloaded manually also. 17 | 18 | ### download_nlcd.py 19 | The CONUS-wide NLCD image can be downloaded using the following command. This script can only download the 2006 or 2011 NLCD images. By default, the NLCD image will be saved to the folder ".\nlcd". 20 | ``` 21 | C:\pymetric>python code\download\download_nlcd.py -y 2011 22 | ``` 23 | 24 | ### download_soils.py 25 | This script will download a CONUS-wide Available Water Capacity (AWC) raster to the appropriate directory. -------------------------------------------------------------------------------- /tools/download/_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime as dt 3 | from ftplib import FTP 4 | import logging 5 | 6 | import requests 7 | from requests_html import HTMLSession 8 | 9 | 10 | def ftp_download(site_url, site_folder, file_name, output_path): 11 | """""" 12 | try: 13 | ftp = FTP() 14 | ftp.connect(site_url) 15 | ftp.login() 16 | ftp.cwd('{}'.format(site_folder)) 17 | logging.debug(' Beginning download') 18 | ftp.retrbinary('RETR %s' % file_name, open(output_path, 'wb').write) 19 | logging.debug(' Download complete') 20 | ftp.quit() 21 | except Exception as e: 22 | logging.info(' Unhandled exception: {}'.format(e)) 23 | 24 | 25 | def ftp_file_list(site_url, site_folder): 26 | """""" 27 | try: 28 | ftp = FTP() 29 | ftp.connect(site_url) 30 | ftp.login() 31 | ftp.cwd('{}'.format(site_folder)) 32 | files = ftp.nlst() 33 | ftp.quit() 34 | except Exception as e: 35 | logging.info(' Unhandled exception: {}'.format(e)) 36 | files = [] 37 | return files 38 | 39 | 40 | def html_link_list(url): 41 | """List all links at the url.""" 42 | session = HTMLSession() 43 | r = session.get(url) 44 | r.html.render(timeout=10, sleep=10) 45 | return list(r.html.links) 46 | 47 | 48 | def url_download(download_url, output_path, verify=True): 49 | """Download file from a URL using requests module""" 50 | response = requests.get(download_url, stream=True, verify=verify) 51 | if response.status_code != 200: 52 | logging.error(' HTTPError: {}'.format(response.status_code)) 53 | return False 54 | 55 | logging.debug(' Beginning download') 56 | with (open(output_path, "wb")) as output_f: 57 | for chunk in response.iter_content(chunk_size=1024 * 1024): 58 | if chunk: # filter out keep-alive new chunks 59 | output_f.write(chunk) 60 | logging.debug(' Download complete') 61 | return True 62 | 63 | 64 | def valid_date(input_date): 65 | """Check that a date string is ISO format (YYYY-MM-DD) 66 | 67 | This function is used to check the format of dates entered as command 68 | line arguments. 69 | DEADBEEF - It would probably make more sense to have this function 70 | parse the date using dateutil parser (http://labix.org/python-dateutil) 71 | and return the ISO format string. 72 | 73 | Parameters 74 | ---------- 75 | input_date : str 76 | 77 | Returns 78 | ------- 79 | datetime 80 | 81 | Raises 82 | ------ 83 | ArgParse ArgumentTypeError 84 | 85 | """ 86 | try: 87 | return dt.datetime.strptime(input_date, "%Y-%m-%d") 88 | # return dt.datetime.strptime(input_date, "%Y-%m-%d").date().isoformat() 89 | except ValueError: 90 | msg = "Not a valid date: '{}'.".format(input_date) 91 | raise argparse.ArgumentTypeError(msg) 92 | -------------------------------------------------------------------------------- /tools/download/download_cdl.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_cdl.py 3 | # Purpose: Download national CDL zips 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | import zipfile 12 | 13 | import _utils 14 | 15 | 16 | def main(year, output_folder, overwrite_flag=False): 17 | """Download national CDL zips 18 | 19 | Parameters 20 | ---------- 21 | year : int 22 | 4 digit year. 23 | output_folder : str 24 | Folder path where files will be saved. 25 | overwrite_flag : bool, optional 26 | If True, overwrite existing files (the default is False). 27 | 28 | Returns 29 | ------- 30 | None 31 | 32 | """ 33 | site_url = 'ftp.nass.usda.gov' 34 | site_folder = 'download/res' 35 | zip_name = '{}_30m_cdls.zip'.format(year) 36 | zip_path = os.path.join(output_folder, zip_name) 37 | 38 | if int(year) < 2008: 39 | logging.error('\nERROR: CDL data is not available before 2008, exiting') 40 | return False 41 | 42 | if not os.path.isdir(output_folder): 43 | os.makedirs(output_folder) 44 | 45 | if not os.path.isfile(zip_path) or overwrite_flag: 46 | logging.info('\nDownload CDL files') 47 | logging.info(' {}'.format( 48 | '/'.join([site_url, site_folder, zip_name]))) 49 | logging.info(' {}'.format(zip_path)) 50 | _utils.ftp_download(site_url, site_folder, zip_name, zip_path) 51 | else: 52 | logging.info('\nCDL raster already downloaded') 53 | 54 | if os.path.isfile(zip_path): 55 | logging.info('\nExtracting CDL files') 56 | with zipfile.ZipFile(zip_path) as zf: 57 | zf.extractall(output_folder) 58 | else: 59 | logging.info('\nCDL raster already extracted') 60 | 61 | 62 | def arg_parse(): 63 | """Base all default folders from script location 64 | scripts: ./pymetric/tools/download 65 | tools: ./pymetric/tools 66 | output: ./pymetric/cdl 67 | """ 68 | script_folder = sys.path[0] 69 | code_folder = os.path.dirname(script_folder) 70 | project_folder = os.path.dirname(code_folder) 71 | output_folder = os.path.join(project_folder, 'cdl') 72 | 73 | parser = argparse.ArgumentParser( 74 | description='Download CDL', 75 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 76 | parser.add_argument( 77 | '-y', '--year', help='Year', metavar='YEAR', required=True, 78 | choices=range(2008, dt.datetime.today().year), type=int) 79 | parser.add_argument( 80 | '--output', default=output_folder, metavar='FOLDER', 81 | help='Output folder') 82 | parser.add_argument( 83 | '-o', '--overwrite', default=None, action="store_true", 84 | help='Force overwrite of existing files') 85 | parser.add_argument( 86 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 87 | help='Debug level logging', action="store_const", dest="loglevel") 88 | args = parser.parse_args() 89 | 90 | # Convert relative paths to absolute paths 91 | if args.output and os.path.isdir(os.path.abspath(args.output)): 92 | args.output = os.path.abspath(args.output) 93 | 94 | return args 95 | 96 | 97 | if __name__ == '__main__': 98 | args = arg_parse() 99 | 100 | logging.basicConfig(level=args.loglevel, format='%(message)s') 101 | logging.info('\n{}'.format('#' * 80)) 102 | log_f = '{:<20s} {}' 103 | logging.info(log_f.format( 104 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 105 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 106 | 107 | main(year=args.year, output_folder=args.output, 108 | overwrite_flag=args.overwrite) 109 | -------------------------------------------------------------------------------- /tools/download/download_footprints.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_footprints.py 3 | # Purpose: Download WRS2 descending footprints shapefile 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import shutil 11 | import sys 12 | import zipfile 13 | 14 | import _utils 15 | 16 | 17 | def main(output_folder, overwrite_flag=False): 18 | """Download Landsat WRS2 descending footprint shapefile 19 | 20 | Parameters 21 | ---------- 22 | output_folder : str 23 | Folder path where files will be saved. 24 | overwrite_flag : bool, optional 25 | If True, overwrite existing files (the default is False). 26 | 27 | Returns 28 | ------- 29 | None 30 | 31 | """ 32 | download_url = ( 33 | 'https://landsat.usgs.gov/sites/default/files/documents/' 34 | 'WRS2_descending.zip') 35 | 36 | zip_name = 'wrs2_descending.zip' 37 | zip_path = os.path.join(output_folder, zip_name) 38 | 39 | output_name = zip_name.replace('.zip', '.shp') 40 | output_path = os.path.join(output_folder, output_name) 41 | # output_path = os.path.join( 42 | # output_folder, os.path.splitext(zip_name)[0], output_name) 43 | 44 | if not os.path.isdir(output_folder): 45 | os.makedirs(output_folder) 46 | 47 | if ((not os.path.isfile(zip_path) and not os.path.isfile(output_path)) or 48 | overwrite_flag): 49 | logging.info('\nDownloading Landsat WRS2 descending shapefile') 50 | logging.info(' {}'.format(download_url)) 51 | logging.info(' {}'.format(zip_path)) 52 | _utils.url_download(download_url, zip_path) 53 | else: 54 | logging.info('\nFootprint shapefile already downloaded') 55 | 56 | if ((overwrite_flag or not os.path.isfile(output_path)) and 57 | os.path.isfile(zip_path)): 58 | logging.info('\nExtracting Landsat WRS2 descending shapefile') 59 | logging.debug(' {}'.format(output_path)) 60 | with zipfile.ZipFile(zip_path) as zf: 61 | zf.extractall(output_folder) 62 | else: 63 | logging.info('\nFootprint shapefile already extracted') 64 | 65 | # If the wrs2_tile_utm_zones.json doesn't exist in the output folder, 66 | # copy it there. Use the script location to figure out the input folder 67 | json_name = 'wrs2_tile_utm_zones.json' 68 | input_folder = os.path.join( 69 | os.path.dirname(os.path.dirname(sys.path[0])), 'landsat', 'footprints') 70 | input_json_path = os.path.join(input_folder, 'wrs2_tile_utm_zones.json') 71 | output_json_path = os.path.join(output_folder, 'wrs2_tile_utm_zones.json') 72 | if not os.path.isfile(output_json_path) and os.path.isfile(input_json_path): 73 | logging.info('\nCopying {} to the output footprints folder'.format( 74 | json_name)) 75 | shutil.copy(input_json_path, output_json_path) 76 | 77 | 78 | def arg_parse(): 79 | """Base all default folders from script location 80 | scripts: ./pymetric/tools/download 81 | tools: ./pymetric/tools 82 | output: ./pymetric/landsat/footprint 83 | """ 84 | script_folder = sys.path[0] 85 | code_folder = os.path.dirname(script_folder) 86 | project_folder = os.path.dirname(code_folder) 87 | output_folder = os.path.join(project_folder, 'landsat', 'footprints') 88 | 89 | parser = argparse.ArgumentParser( 90 | description='Download Landsat footprints', 91 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 92 | parser.add_argument( 93 | '--output', default=output_folder, metavar='FOLDER', 94 | help='Output folder') 95 | parser.add_argument( 96 | '-o', '--overwrite', default=None, action="store_true", 97 | help='Force overwrite of existing files') 98 | parser.add_argument( 99 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 100 | help='Debug level logging', action="store_const", dest="loglevel") 101 | args = parser.parse_args() 102 | 103 | # Convert relative paths to absolute paths 104 | if args.output and os.path.isdir(os.path.abspath(args.output)): 105 | args.output = os.path.abspath(args.output) 106 | 107 | return args 108 | 109 | 110 | if __name__ == '__main__': 111 | args = arg_parse() 112 | 113 | logging.basicConfig(level=args.loglevel, format='%(message)s') 114 | logging.info('\n{}'.format('#' * 80)) 115 | log_f = '{:<20s} {}' 116 | logging.info(log_f.format( 117 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 118 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 119 | 120 | main(output_folder=args.output, overwrite_flag=args.overwrite) 121 | -------------------------------------------------------------------------------- /tools/download/download_landfire.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_landfire.py 3 | # Purpose: Download LANDFIRE vegetation type raster 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import shutil 11 | import sys 12 | import zipfile 13 | 14 | import _utils 15 | 16 | 17 | def main(output_folder, version='140', overwrite_flag=False): 18 | """Download LANDFIRE vegetation type raster 19 | 20 | Parameters 21 | ---------- 22 | output_folder : str 23 | Folder path where files will be saved. 24 | version : {'105', '110', '120', '130', '140'} 25 | LANDFIRE version string (the default is '140'). 26 | overwrite_flag : bool, optional 27 | If True, overwrite existing files (the default is False). 28 | 29 | Returns 30 | ------- 31 | None 32 | 33 | """ 34 | version = str(version).replace('.', '') 35 | 36 | base_url = 'http://www.landfire.gov/bulk/downloadfile.php?FNAME=' 37 | zip_dict = { 38 | '140': 'US_{0}_mosaic-US_{0}EVT_20180618.zip&TYPE=landfire'.format(version), 39 | '130': 'US_{0}_Mosaic-US_{0}_EVT_04232015.zip&TYPE=landfire'.format(version), 40 | '120': 'US_{0}_Mosaic-US_{0}_EVT_06142017.zip&TYPE=landfire'.format(version), 41 | '110': 'US_{0}_mosaic_Refresh-US_{0}EVT_05312018.zip&TYPE=landfire'.format(version), 42 | '105': 'US_{0}_mosaic_Refresh-US_{0}evt_09122104.zip&TYPE=landfire'.format(version), 43 | } 44 | download_url = base_url + zip_dict[version] 45 | 46 | output_name = 'US_{}_EVT'.format(version) 47 | zip_path = os.path.join(output_folder, output_name + '.zip') 48 | 49 | if not os.path.isdir(output_folder): 50 | os.makedirs(output_folder) 51 | 52 | if not os.path.isfile(zip_path) or overwrite_flag: 53 | logging.info('\nDownloading LANDFIRE vegetation type') 54 | logging.info(' {}'.format(download_url)) 55 | logging.info(' {}'.format(zip_path)) 56 | _utils.url_download(download_url, zip_path) 57 | else: 58 | logging.info('\nLANDFIRE zip file already downloaded') 59 | 60 | if os.path.isfile(zip_path): 61 | logging.info('\nExtracting LANDFIRE files') 62 | with zipfile.ZipFile(zip_path) as zf: 63 | # Extract files using zip naming and folder structure 64 | # zf.extractall(output_folder) 65 | 66 | # Ignore top level zip folder name 67 | for member in zf.namelist(): 68 | # Replace root folder and switch to OS separator 69 | output_path = list(member.split('/')) 70 | output_path[0] = output_name 71 | output_path = os.sep.join(output_path) 72 | 73 | # Standardize the naming of the "Grid" folder 74 | output_path = output_path.replace('grid1', 'Grid')\ 75 | .replace('grid2', 'Grid')\ 76 | .replace('grid', 'Grid')\ 77 | .replace('Grid2', 'Grid') 78 | 79 | output_ws = os.path.join( 80 | output_folder, os.path.dirname(output_path)) 81 | 82 | # Skip directories 83 | if not os.path.basename(output_path): 84 | continue 85 | 86 | # Only process the "Grid" (or "grid", "Grid1", "Grid2") folder 87 | if 'grid' not in os.path.dirname(output_path).lower(): 88 | continue 89 | 90 | # Build output directories 91 | if not os.path.isdir(output_ws): 92 | os.makedirs(output_ws) 93 | 94 | # Extract 95 | logging.debug(' {}'.format(output_path)) 96 | source = zf.open(member) 97 | target = open(os.path.join(output_folder, output_path), "wb") 98 | with source, target: 99 | shutil.copyfileobj(source, target) 100 | else: 101 | logging.info('\nLANDFIRE zip file not present') 102 | 103 | 104 | def arg_parse(): 105 | """Base all default folders from script location 106 | scripts: ./pymetric/tools/download 107 | tools: ./pymetric/tools 108 | output: ./pymetric/landfire 109 | """ 110 | script_folder = sys.path[0] 111 | code_folder = os.path.dirname(script_folder) 112 | project_folder = os.path.dirname(code_folder) 113 | output_folder = os.path.join(project_folder, 'landfire') 114 | 115 | parser = argparse.ArgumentParser( 116 | description='Download LANDFIRE veg. type', 117 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 118 | parser.add_argument( 119 | '-v', '--version', metavar='VERSION', default='140', 120 | choices=['105', '110', '120', '130', '140'], 121 | help='Version (105, 110, 120, 130, or 140)') 122 | parser.add_argument( 123 | '--output', help='Output folder', metavar='FOLDER', 124 | default=output_folder) 125 | parser.add_argument( 126 | '-o', '--overwrite', default=None, action="store_true", 127 | help='Force overwrite of existing files') 128 | parser.add_argument( 129 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 130 | help='Debug level logging', action="store_const", dest="loglevel") 131 | args = parser.parse_args() 132 | 133 | # Convert output folder to an absolute path 134 | if args.output and os.path.isdir(os.path.abspath(args.output)): 135 | args.output = os.path.abspath(args.output) 136 | 137 | return args 138 | 139 | 140 | if __name__ == '__main__': 141 | args = arg_parse() 142 | 143 | logging.basicConfig(level=args.loglevel, format='%(message)s') 144 | logging.info('\n{}'.format('#' * 80)) 145 | log_f = '{:<20s} {}' 146 | logging.info(log_f.format('Run Time Stamp:', dt.datetime.now().isoformat(' '))) 147 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 148 | 149 | main(output_folder=args.output, version=args.version, 150 | overwrite_flag=args.overwrite) 151 | -------------------------------------------------------------------------------- /tools/download/download_landsat.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_landsat.py 3 | # Purpose: Download Landsat tar.gz files 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import re 11 | import requests 12 | import shutil 13 | import sys 14 | import tarfile 15 | 16 | import _utils 17 | 18 | 19 | def main(scene_list_path, output_folder, start_dt=None, end_dt=None, 20 | overwrite_flag=False): 21 | """Download Landsat tar.gz files 22 | 23 | Parameters 24 | ---------- 25 | scene_list_path : str 26 | Landsat scene keep list file path. 27 | output_folder : str 28 | Folder path where files will be saved. 29 | start_dt : datetime, optional 30 | Start date. 31 | end_dt : datetime, optional 32 | End date. 33 | overwrite_flag : bool, optional 34 | If True, overwrite existing files (the default is False). 35 | 36 | Returns 37 | ------- 38 | None 39 | 40 | """ 41 | 42 | base_url = 'http://storage.googleapis.com/gcp-public-data-landsat' 43 | url_fmt = '{url}/{sensor}/{collection}/{path}/{row}/{id}/{file}' 44 | 45 | # Landsat Collection 1 Product ID 46 | landsat_re = re.compile( 47 | '^(?PLT04|LT05|LE07|LC08)_(?P\w{4})_' 48 | '(?P\d{3})(?P\d{3})_(?P\d{8})_(?:\w{8})_' 49 | '(?P\d{2})_(?P\w{2})$') 50 | pre_c1_re = re.compile('^(LT04|LT05|LE07|LC08)_\d{6}_\d{8}$') 51 | 52 | logging.info('\nReading Landsat product IDs from scene keep list file') 53 | logging.info(' {}\n'.format(scene_list_path)) 54 | if not os.path.isfile(scene_list_path): 55 | logging.error('\nLandsat keep list file does not exist, exiting') 56 | return False 57 | with open(scene_list_path) as input_f: 58 | image_id_list = input_f.readlines() 59 | image_id_list = [image_id.strip() for image_id in image_id_list 60 | if landsat_re.match(image_id.strip())] 61 | 62 | # Apply start/end date filters 63 | if start_dt: 64 | logging.debug('Start date: {}'.format(start_dt.strftime('%Y-%m-%d'))) 65 | image_id_list = [id for id in image_id_list 66 | if id[17:25] >= start_dt.strftime('%Y%m%d')] 67 | if end_dt: 68 | logging.debug('End date: {}'.format(end_dt.strftime('%Y-%m-%d'))) 69 | image_id_list = [id for id in image_id_list 70 | if id[17:25] <= end_dt.strftime('%Y%m%d')] 71 | logging.debug('\nKeep List: {}\n'.format(', '.join(image_id_list))) 72 | 73 | bands = { 74 | 'LT04': ['B1.TIF', 'B2.TIF', 'B3.TIF', 'B4.TIF', 'B5.TIF', 75 | 'B6.TIF', 'B7.TIF', 'BQA.TIF', 'MTL.txt'], 76 | 'LT05': ['B1.TIF', 'B2.TIF', 'B3.TIF', 'B4.TIF', 'B5.TIF', 77 | 'B6.TIF', 'B7.TIF', 'BQA.TIF', 'MTL.txt'], 78 | 'LE07': ['B1.TIF', 'B2.TIF', 'B3.TIF', 'B4.TIF', 'B5.TIF', 79 | 'B6_VCID_1.TIF', 'B6_VCID_2.TIF', 'B7.TIF', 'B8.TIF', 80 | 'BQA.TIF', 'MTL.txt'], 81 | 'LC08': ['B1.TIF', 'B2.TIF', 'B3.TIF', 'B4.TIF', 'B5.TIF', 82 | 'B6.TIF', 'B7.TIF', 'B8.TIF', 'B9.TIF', 'B10.TIF', 'B11.TIF', 83 | 'BQA.TIF', 'MTL.txt'], 84 | } 85 | 86 | for image_id in image_id_list: 87 | logging.info(image_id) 88 | 89 | id_match = landsat_re.match(image_id) 90 | if not id_match and pre_c1_re.match(image_id): 91 | logging.error( 92 | '\nThe scene list does appear to contain LANDSAT_PRODUCT_IDs' 93 | ' (i.e. LE07_L1TP_043030_20150101_20160905_01_T1)' 94 | ' Exiting') 95 | return False 96 | 97 | sensor, type, path, row, date, number, category = id_match.groups() 98 | # print(sensor, type, path, row, date, number, category) 99 | 100 | year_folder = os.path.join( 101 | output_folder, '{:03d}'.format(int(path)), 102 | '{:03d}'.format(int(row)), date[:4]) 103 | product_folder = os.path.join(year_folder, image_id) 104 | if not os.path.isdir(product_folder): 105 | os.makedirs(product_folder) 106 | 107 | for band in bands[image_id[:4]]: 108 | logging.debug(' Band {}'.format(band)) 109 | file_name = '{}_{}'.format(image_id, band) 110 | file_url = url_fmt.format( 111 | url=base_url, sensor=sensor, collection=number, path=path, 112 | row=row, id=image_id, file=file_name) 113 | file_path = os.path.join( 114 | output_folder, '{:03d}'.format(int(path)), 115 | '{:03d}'.format(int(row)), date[:4], image_id, file_name) 116 | # logging.info(' {}'.format(image_name)) 117 | logging.debug(' {}'.format(file_url)) 118 | logging.debug(' {}'.format(file_path)) 119 | 120 | if overwrite_flag or not os.path.isfile(file_path): 121 | _fetch_image(file_url, file_path) 122 | 123 | output_path = os.path.join(year_folder, image_id + '.tar.gz') 124 | if ((overwrite_flag or not os.path.isfile(output_path)) and 125 | os.path.isdir(product_folder)): 126 | logging.info(' Zipping'.format(band)) 127 | logging.debug(' {}'.format(output_path)) 128 | with tarfile.open(output_path, "w:gz") as tar: 129 | tar.add(product_folder, arcname='.') 130 | 131 | if os.path.isdir(product_folder) and os.path.isfile(output_path): 132 | shutil.rmtree(product_folder) 133 | 134 | 135 | # Copied from Landsat578 136 | class BadRequestsResponse(Exception): 137 | pass 138 | 139 | def _fetch_image(url, destination_path): 140 | try: 141 | response = requests.get(url, stream=True) 142 | if response.status_code == 200: 143 | with open(destination_path, 'wb') as f: 144 | for chunk in response.iter_content(chunk_size=1024 * 1024 * 8): 145 | f.write(chunk) 146 | elif response.status_code > 399: 147 | logging.info(' Code {} on {}'.format(response.status_code, url)) 148 | raise BadRequestsResponse(Exception) 149 | except BadRequestsResponse: 150 | pass 151 | 152 | 153 | def arg_parse(): 154 | """Base all default folders from script location 155 | scripts: ./pymetric/tools/download 156 | tools: ./pymetric/tools 157 | output: ./pymetric/landsat 158 | """ 159 | script_folder = sys.path[0] 160 | code_folder = os.path.dirname(script_folder) 161 | project_folder = os.path.dirname(code_folder) 162 | output_folder = os.path.join(project_folder, 'landsat') 163 | 164 | parser = argparse.ArgumentParser( 165 | description='Download Landsat', 166 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 167 | parser.add_argument('scene_list', help='Landsat scene keep list path') 168 | parser.add_argument( 169 | '--start', default=None, type=_utils.valid_date, metavar='YYYY-MM-DD', 170 | help='Start date') 171 | parser.add_argument( 172 | '--end', default=None, type=_utils.valid_date, metavar='YYYY-MM-DD', 173 | help='End date') 174 | parser.add_argument( 175 | '--output', default=output_folder, metavar='FOLDER', 176 | help='Output folder') 177 | parser.add_argument( 178 | '-o', '--overwrite', default=None, action="store_true", 179 | help='Force overwrite of existing files') 180 | parser.add_argument( 181 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 182 | help='Debug level logging', action="store_const", dest="loglevel") 183 | args = parser.parse_args() 184 | 185 | # Convert relative paths to absolute paths 186 | if args.output and os.path.isdir(os.path.abspath(args.output)): 187 | args.output = os.path.abspath(args.output) 188 | if args.scene_list and os.path.isfile(os.path.abspath(args.scene_list)): 189 | args.scene_list = os.path.abspath(args.scene_list) 190 | 191 | return args 192 | 193 | 194 | if __name__ == '__main__': 195 | args = arg_parse() 196 | 197 | logging.basicConfig(level=args.loglevel, format='%(message)s') 198 | 199 | logging.info('\n{}'.format('#' * 80)) 200 | log_f = '{:<20s} {}' 201 | logging.info(log_f.format('Run Time Stamp:', dt.datetime.now().isoformat(' '))) 202 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 203 | 204 | main(scene_list_path=args.scene_list, output_folder=args.output, 205 | start_dt=args.start, end_dt=args.end, overwrite_flag=args.overwrite) 206 | -------------------------------------------------------------------------------- /tools/download/download_ned.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_ned.py 3 | # Purpose: Download NED tiles 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import re 11 | import sys 12 | import zipfile 13 | 14 | import drigo 15 | from osgeo import ogr 16 | 17 | import _utils as utils 18 | 19 | 20 | def main(extent_path, output_folder, overwrite_flag=False): 21 | """Download NED tiles that intersect the study_area 22 | 23 | Parameters 24 | ---------- 25 | extent_path : str 26 | File path to study area shapefile. 27 | output_folder : str 28 | Folder path where files will be saved. 29 | overwrite_flag : bool, optional 30 | If True, overwrite existing files (the default is False). 31 | 32 | Returns 33 | ------- 34 | None 35 | 36 | Notes 37 | ----- 38 | Script assumes DEM data is in 1x1 WGS84 degree tiles. 39 | Download 10m (1/3 arc-second) or 30m (1 arc-second) versions from: 40 | 10m: rockyftp.cr.usgs.gov/vdelivery/Datasets/Staged/Elevation/13/IMG 41 | 30m: rockyftp.cr.usgs.gov/vdelivery/Datasets/Staged/Elevation/1/IMG 42 | For this example, only download 30m DEM. 43 | 44 | """ 45 | logging.info('\nDownload NED tiles') 46 | # site_url = 'rockyftp.cr.usgs.gov' 47 | site_url = 'https://prd-tnm.s3.amazonaws.com' 48 | 49 | # site_folder = 'vdelivery/Datasets/Staged/Elevation/1/IMG' 50 | site_folder = 'StagedProducts/Elevation/1/IMG' 51 | 52 | # This path is what must be queried to list the links 53 | site_file_list_path = 'https://prd-tnm.s3.amazonaws.com/index.html?prefix=StagedProducts/Elevation/1/IMG/' 54 | 55 | 56 | # Use 1 degree snap point and "cellsize" to get 1x1 degree tiles 57 | tile_osr = drigo.epsg_osr(4326) 58 | tile_x, tile_y, tile_cs = 0, 0, 1 59 | 60 | buffer_cells = 0 61 | 62 | # Error checking 63 | if not os.path.isfile(extent_path): 64 | logging.error('\nERROR: The input_path does not exist\n') 65 | return False 66 | if not os.path.isdir(output_folder): 67 | os.makedirs(output_folder) 68 | 69 | # Check that input is a shapefile 70 | 71 | # Get the extent of each feature 72 | logging.debug(' Reading extents') 73 | lat_lon_list = [] 74 | shp_driver = ogr.GetDriverByName('ESRI Shapefile') 75 | input_ds = shp_driver.Open(extent_path, 1) 76 | input_osr = drigo.feature_ds_osr(input_ds) 77 | input_layer = input_ds.GetLayer() 78 | input_ftr = input_layer.GetNextFeature() 79 | while input_ftr: 80 | input_geom = input_ftr.GetGeometryRef() 81 | input_extent = drigo.Extent(input_geom.GetEnvelope()) 82 | input_extent = input_extent.ogrenv_swap() 83 | input_ftr = input_layer.GetNextFeature() 84 | logging.debug('Input Extent: {}'.format(input_extent)) 85 | 86 | # Project study area extent to input raster coordinate system 87 | output_extent = drigo.project_extent( 88 | input_extent, input_osr, tile_osr) 89 | logging.debug('Output Extent: {}'.format(output_extent)) 90 | 91 | # Extent needed to select 1x1 degree tiles 92 | tile_extent = output_extent.copy() 93 | tile_extent.adjust_to_snap( 94 | 'EXPAND', tile_x, tile_y, tile_cs) 95 | logging.debug('Tile Extent: {}'.format(tile_extent)) 96 | 97 | # Get list of avaiable tiles that intersect the extent 98 | lat_lon_list.extend([ 99 | (lat, -lon) 100 | for lon in range(int(tile_extent.xmin), int(tile_extent.xmax)) 101 | for lat in range(int(tile_extent.ymax), int(tile_extent.ymin), -1)]) 102 | lat_lon_list = sorted(list(set(lat_lon_list))) 103 | 104 | # Retrieve a list of files available on the site (keyed by lat/lon) 105 | logging.debug(' Retrieving NED tile list from server') 106 | zip_files = { 107 | m.group(1): x.split('/')[-1] 108 | for x in utils.html_link_list(site_file_list_path) 109 | for m in [re.search('[\w]*(n\d{2}w\d{3})[\w]*.zip', x)] if m} 110 | # logging.debug(zip_files[:10]) 111 | 112 | # Attempt to download the tiles 113 | logging.debug('\nDownloading tiles') 114 | logging.info('') 115 | for lat_lon in lat_lon_list: 116 | logging.info('Tile: {}'.format(lat_lon)) 117 | lat_lon_key = 'n{:02d}w{:03d}'.format(*lat_lon) 118 | 119 | try: 120 | zip_name = zip_files[lat_lon_key] 121 | except KeyError: 122 | logging.exception( 123 | 'Error finding zip file for {}, skipping tile'.format(lat_lon)) 124 | continue 125 | zip_url = '/'.join([site_url, site_folder, zip_name]) 126 | zip_path = os.path.join(output_folder, zip_name) 127 | 128 | tile_path = os.path.join(output_folder, '{}.img'.format(lat_lon_key)) 129 | 130 | logging.debug(' {}'.format(zip_url)) 131 | logging.debug(' {}'.format(zip_path)) 132 | logging.debug(' {}'.format(tile_path)) 133 | if os.path.isfile(tile_path): 134 | if not overwrite_flag: 135 | logging.debug(' tile already exists, skipping') 136 | continue 137 | else: 138 | logging.debug(' tile already exists, removing') 139 | os.remove(tile_path) 140 | 141 | utils.url_download(zip_url, zip_path) 142 | 143 | logging.debug(' Extracting') 144 | try: 145 | zip_f = zipfile.ZipFile(zip_path) 146 | img_name = [x for x in zip_f.namelist() 147 | if re.search('[\w]*(n\d{2}w\d{3})[\w]*.img$', x)][0] 148 | img_path = os.path.join(output_folder, img_name) 149 | zip_f.extract(img_name, output_folder) 150 | zip_f.close() 151 | os.rename(img_path, tile_path) 152 | except Exception as e: 153 | logging.info(' Unhandled exception: {}'.format(e)) 154 | 155 | try: 156 | os.remove(zip_path) 157 | except Exception as e: 158 | logging.info(' Unhandled exception: {}'.format(e)) 159 | 160 | 161 | def arg_parse(): 162 | """Base all default folders from script location 163 | scripts: ./pymetric/tools/download 164 | tools: ./pymetric/tools 165 | output: ./pymetric/dem 166 | """ 167 | script_folder = sys.path[0] 168 | code_folder = os.path.dirname(script_folder) 169 | project_folder = os.path.dirname(code_folder) 170 | output_folder = os.path.join(project_folder, 'dem', 'tiles') 171 | 172 | parser = argparse.ArgumentParser( 173 | description='Download NED', 174 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 175 | parser.add_argument( 176 | '--extent', required=True, metavar='FILE', 177 | help='Study area shapefile') 178 | parser.add_argument( 179 | '--output', default=output_folder, metavar='FOLDER', 180 | help='Output folder') 181 | parser.add_argument( 182 | '-o', '--overwrite', default=None, action="store_true", 183 | help='Force overwrite of existing files') 184 | parser.add_argument( 185 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 186 | help='Debug level logging', action="store_const", dest="loglevel") 187 | args = parser.parse_args() 188 | 189 | # Convert relative paths to absolute paths 190 | if args.extent and os.path.isfile(os.path.abspath(args.extent)): 191 | args.extent = os.path.abspath(args.extent) 192 | if args.output and os.path.isdir(os.path.abspath(args.output)): 193 | args.output = os.path.abspath(args.output) 194 | 195 | return args 196 | 197 | 198 | if __name__ == '__main__': 199 | args = arg_parse() 200 | 201 | logging.basicConfig(level=args.loglevel, format='%(message)s') 202 | logging.info('\n{}'.format('#' * 80)) 203 | log_f = '{:<20s} {}' 204 | logging.info(log_f.format( 205 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 206 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 207 | 208 | main(extent_path=args.extent, output_folder=args.output, 209 | overwrite_flag=args.overwrite) 210 | -------------------------------------------------------------------------------- /tools/download/download_nlcd.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_nlcd.py 3 | # Purpose: Download NLCD raster 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | import zipfile 12 | 13 | import _utils 14 | 15 | 16 | def main(output_folder, year='2011', overwrite_flag=False): 17 | """Download NLCD raster 18 | 19 | Parameters 20 | ---------- 21 | output_folder : str 22 | Folder path where files will be saved. 23 | year : {2001, 2006, 2011}; optional 24 | NLCD year (the default is 2011). 25 | overwrite_flag : bool, optional 26 | If True, overwrite existing files (the default is False). 27 | 28 | Returns 29 | ------- 30 | None 31 | 32 | """ 33 | 34 | download_url = ( 35 | 'https://prd-tnm.s3.amazonaws.com/StagedProducts/NLCD2011/Land_Cover/' 36 | 'CONUS/nlcd_{}_landcover_2011_edition_2014_10_10.zip').format(year) 37 | # download_url = ( 38 | # 'http://www.landfire.gov/bulk/downloadfile.php?' 39 | # 'TYPE=nlcd{0}&FNAME=nlcd_{0}_landcover_2011_edition_2014_10_10.zip').format(year) 40 | # download_url = ( 41 | # 'http://gisdata.usgs.gov/TDDS/DownloadFile.php?' 42 | # 'TYPE=nlcd{0}&FNAME=nlcd_{0}_landcover_2011_edition_2014_10_10.zip').format(year) 43 | 44 | zip_name = 'nlcd_{}_landcover_2011_edition_2014_10_10.zip'.format(year) 45 | zip_path = os.path.join(output_folder, zip_name) 46 | 47 | output_name = zip_name.replace('.zip', '.img') 48 | # output_path = os.path.join(output_folder, output_name) 49 | output_path = os.path.join( 50 | output_folder, os.path.splitext(zip_name)[0], output_name) 51 | 52 | if not os.path.isdir(output_folder): 53 | os.makedirs(output_folder) 54 | 55 | if ((not os.path.isfile(zip_path) and not os.path.isfile(output_path)) or 56 | overwrite_flag): 57 | logging.info('\nDownloading NLCD') 58 | logging.info(' {}'.format(download_url)) 59 | logging.info(' {}'.format(zip_path)) 60 | _utils.url_download(download_url, zip_path) 61 | else: 62 | logging.info('\nNLCD raster already downloaded') 63 | 64 | if ((overwrite_flag or not os.path.isfile(output_path)) and 65 | os.path.isfile(zip_path)): 66 | logging.info('\nExtracting NLCD files') 67 | logging.debug(' {}'.format(output_path)) 68 | with zipfile.ZipFile(zip_path) as zf: 69 | zf.extractall(output_folder) 70 | else: 71 | logging.info('\nNLCD raster already extracted') 72 | 73 | 74 | def arg_parse(): 75 | """Base all default folders from script location 76 | scripts: ./pymetric/tools/download 77 | tools: ./pymetric/tools 78 | output: ./pymetric/nlcd 79 | """ 80 | script_folder = sys.path[0] 81 | code_folder = os.path.dirname(script_folder) 82 | project_folder = os.path.dirname(code_folder) 83 | output_folder = os.path.join(project_folder, 'nlcd') 84 | 85 | parser = argparse.ArgumentParser( 86 | description='Download NLCD', 87 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 88 | parser.add_argument( 89 | '--output', default=output_folder, metavar='FOLDER', 90 | help='Output folder') 91 | parser.add_argument( 92 | '-y', '--year', metavar='YEAR', default='2011', 93 | choices=['2001', '2006', '2011'], 94 | help='NLCD Year (2001, 2006, or 2011)') 95 | parser.add_argument( 96 | '-o', '--overwrite', default=None, action="store_true", 97 | help='Force overwrite of existing files') 98 | parser.add_argument( 99 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 100 | help='Debug level logging', action="store_const", dest="loglevel") 101 | args = parser.parse_args() 102 | 103 | # Convert relative paths to absolute paths 104 | if args.output and os.path.isdir(os.path.abspath(args.output)): 105 | args.output = os.path.abspath(args.output) 106 | 107 | return args 108 | 109 | 110 | if __name__ == '__main__': 111 | args = arg_parse() 112 | 113 | logging.basicConfig(level=args.loglevel, format='%(message)s') 114 | logging.info('\n{}'.format('#' * 80)) 115 | log_f = '{:<20s} {}' 116 | logging.info(log_f.format( 117 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 118 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 119 | 120 | main(output_folder=args.output, year=args.year, 121 | overwrite_flag=args.overwrite) 122 | -------------------------------------------------------------------------------- /tools/download/download_soils.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: download_soils.py 3 | # Purpose: Download soil AWC raster 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | 12 | import _utils 13 | 14 | 15 | def main(output_folder, overwrite_flag=False): 16 | """Download soil Available Water Capacity (AWC) raster 17 | 18 | Parameters 19 | ---------- 20 | output_folder : str 21 | Folder path where files will be saved. 22 | overwrite_flag : bool, optional 23 | If True, overwrite existing files (the default is False). 24 | 25 | Returns 26 | ------- 27 | None 28 | 29 | """ 30 | # Composite SSURGO/STATSGO 31 | download_url = 'https://storage.googleapis.com/openet/ssurgo/AWC_WTA_0to10cm_composite.tif' 32 | 33 | # STATSGO Only 34 | # download_url = 'https://storage.googleapis.com/openet/statsgo/AWC_WTA_0to10cm_statsgo.tif' 35 | 36 | output_name = download_url.split('/')[-1] 37 | output_path = os.path.join(output_folder, output_name) 38 | if not os.path.isdir(output_folder): 39 | os.makedirs(output_folder) 40 | 41 | if not os.path.isfile(output_path) or overwrite_flag: 42 | logging.info('\nDownloading AWC') 43 | logging.info(' {}'.format(download_url)) 44 | logging.info(' {}'.format(output_path)) 45 | _utils.url_download(download_url, output_path) 46 | else: 47 | logging.info('\nAWC raster already downloaded') 48 | 49 | 50 | def arg_parse(): 51 | """Base all default folders from script location 52 | scripts: ./pymetric/tools/download 53 | code: ./pymetric/code 54 | output: ./pymetric/soils 55 | """ 56 | script_folder = sys.path[0] 57 | code_folder = os.path.dirname(script_folder) 58 | project_folder = os.path.dirname(code_folder) 59 | output_folder = os.path.join(project_folder, 'soils') 60 | 61 | parser = argparse.ArgumentParser( 62 | description='Download Soil Available Water Capacity (AWC)', 63 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 64 | parser.add_argument( 65 | '--output', default=output_folder, metavar='FOLDER', 66 | help='Output folder') 67 | parser.add_argument( 68 | '-o', '--overwrite', default=None, action="store_true", 69 | help='Force overwrite of existing files') 70 | parser.add_argument( 71 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 72 | help='Debug level logging', action="store_const", dest="loglevel") 73 | args = parser.parse_args() 74 | 75 | # Convert relative paths to absolute paths 76 | if args.output and os.path.isdir(os.path.abspath(args.output)): 77 | args.output = os.path.abspath(args.output) 78 | 79 | return args 80 | 81 | 82 | if __name__ == '__main__': 83 | args = arg_parse() 84 | 85 | logging.basicConfig(level=args.loglevel, format='%(message)s') 86 | logging.info('\n{}'.format('#' * 80)) 87 | log_f = '{:<20s} {}' 88 | logging.info(log_f.format( 89 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 90 | logging.info(log_f.format('Script:', os.path.basename(sys.argv[0]))) 91 | 92 | main(output_folder=args.output, overwrite_flag=args.overwrite) 93 | -------------------------------------------------------------------------------- /tools/gridmet/README.md: -------------------------------------------------------------------------------- 1 | # GRIDMET 2 | 3 | Scripts for downloading and preparing GRIDMET daily weather data. 4 | 5 | ### gridmet_ancillary.py 6 | ------------- 7 | Download and process the GRIDMET elevation, latitude, and longitude rasters 8 | 9 | **-o** or **-\-overwrite_flag (bool)**: 10 | : If True, overwrite existing files 11 | 12 | **-d** or **-\-debug (bool)**: 13 | : If True, enable debug level logging 14 | 15 | ### gridmet_download.py - Download the ".nc" files from the GRIDMET website 16 | ------------- 17 | The default date range is 2017-01-01 to 2017-12-31 18 | 19 | ### gridmet_daily_refet.py 20 | ------------- 21 | - Calculate daily ETo and ETr from the GRIDMET inputs. ETo and ETr are saved as daily IMG rasters in separate folders 22 | 23 | ### gridmet_daily_ppt.py 24 | ------------- 25 | - Calculate daily precipitation from the GRIDMET inputs 26 | 27 | # These are optional scripts for extracting other GRIDMET variables 28 | 29 | ### gridmet_daily_temp.py 30 | ------------- 31 | - Calculate daily min/max temperature from the GRIDMET inputs 32 | 33 | ### gridmet_daily_ea.py 34 | ------------- 35 | - Calculate daily vapor pressure from the GRIDMET inputs 36 | 37 | ### gridmet_daily_variables.py 38 | ------------- 39 | - Calculate daily variables from the GRIDMET inputs 40 | -------------------------------------------------------------------------------- /tools/gridmet/_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime as dt 3 | import logging 4 | 5 | import requests 6 | 7 | 8 | def date_range(start_date, end_date): 9 | """Yield datetimes within a date range""" 10 | for n in range(int((end_date - start_date).days)): 11 | yield start_date + dt.timedelta(n) 12 | 13 | 14 | def url_download(download_url, output_path, verify=True): 15 | """Download file from a URL using requests module""" 16 | response = requests.get(download_url, stream=True, verify=verify) 17 | if response.status_code != 200: 18 | logging.error(' HTTPError: {}'.format(response.status_code)) 19 | return False 20 | 21 | logging.debug(' Beginning download') 22 | with (open(output_path, "wb")) as output_f: 23 | for chunk in response.iter_content(chunk_size=1024 * 1024): 24 | if chunk: # filter out keep-alive new chunks 25 | output_f.write(chunk) 26 | logging.debug(' Download complete') 27 | return True 28 | 29 | 30 | def valid_date(input_date): 31 | """Check that a date string is ISO format (YYYY-MM-DD) 32 | 33 | This function is used to check the format of dates entered as command 34 | line arguments. 35 | DEADBEEF - It would probably make more sense to have this function 36 | parse the date using dateutil parser (http://labix.org/python-dateutil) 37 | and return the ISO format string. 38 | 39 | Parameters 40 | ---------- 41 | input_date : str 42 | 43 | Returns 44 | ------- 45 | datetime 46 | 47 | Raises 48 | ------ 49 | ArgParse ArgumentTypeError 50 | 51 | """ 52 | try: 53 | return dt.datetime.strptime(input_date, "%Y-%m-%d") 54 | # return dt.datetime.strptime(input_date, "%Y-%m-%d").date().isoformat() 55 | except ValueError: 56 | msg = "Not a valid date: '{}'.".format(input_date) 57 | raise argparse.ArgumentTypeError(msg) -------------------------------------------------------------------------------- /tools/gridmet/gridmet_ancillary.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: gridmet_ancillary.py 3 | # Purpose: Process GRIDMET ancillary data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | 12 | import drigo 13 | import netCDF4 14 | import numpy as np 15 | from osgeo import osr 16 | 17 | import _utils 18 | 19 | 20 | def main(ancillary_ws=os.getcwd(), zero_elev_nodata_flag=False, 21 | overwrite_flag=False): 22 | """Process GRIDMET ancillary data 23 | 24 | Parameters 25 | ---------- 26 | ancillary_ws : str 27 | Folder of ancillary rasters. 28 | zero_elev_nodata_flag : bool, optional 29 | If True, set elevation nodata values to 0 (the default is False). 30 | overwrite_flag : bool, optional 31 | If True, overwrite existing files (the default is False). 32 | 33 | Returns 34 | ------- 35 | None 36 | 37 | """ 38 | logging.info('\nProcess GRIDMET ancillary rasters') 39 | 40 | # Site URL 41 | elev_url = 'https://climate.northwestknowledge.net/METDATA/data/metdata_elevationdata.nc' 42 | 43 | # Manually define the spatial reference and extent of the GRIDMET data 44 | # This could be read in from a raster 45 | gridmet_osr = osr.SpatialReference() 46 | # Assume GRIDMET data is in WGS84 not NAD83 (need to check with John) 47 | gridmet_osr.ImportFromEPSG(4326) 48 | # gridmet_osr.ImportFromEPSG(4326) 49 | gridmet_proj = drigo.osr_proj(gridmet_osr) 50 | gridmet_cs = 1. / 24 # 0.041666666666666666 51 | gridmet_x = -125 + gridmet_cs * 5 52 | gridmet_y = 49 + gridmet_cs * 10 53 | # gridmet_y = lon_array[0,0] - 0.5 * gridmet_cs 54 | # gridmet_y = lat_array[0,0] + 0.5 * gridmet_cs 55 | # gridmet_rows, gridmet_cols = elev_array.shape 56 | gridmet_geo = (gridmet_x, gridmet_cs, 0., gridmet_y, 0., -gridmet_cs) 57 | # gridmet_extent = drigo.geo_extent( 58 | # gridmet_geo, gridmet_rows, gridmet_cols) 59 | # Keep track of the original/full geo-transform and extent 60 | # gridmet_full_geo = ( 61 | # gridmet_x, gridmet_cs, 0., gridmet_y, 0., -gridmet_cs) 62 | # gridmet_full_extent = drigo.geo_extent( 63 | # gridmet_geo, gridmet_rows, gridmet_cols) 64 | logging.debug(' X/Y: {} {}'.format(gridmet_x, gridmet_y)) 65 | logging.debug(' Geo: {}'.format(gridmet_geo)) 66 | logging.debug(' Cellsize: {}'.format(gridmet_cs)) 67 | 68 | # Build output workspace if it doesn't exist 69 | if not os.path.isdir(ancillary_ws): 70 | os.makedirs(ancillary_ws) 71 | 72 | # Output paths 73 | elev_nc = os.path.join(ancillary_ws, os.path.basename(elev_url)) 74 | elev_raster = os.path.join(ancillary_ws, 'gridmet_elev.img') 75 | lat_raster = os.path.join(ancillary_ws, 'gridmet_lat.img') 76 | lon_raster = os.path.join(ancillary_ws, 'gridmet_lon.img') 77 | 78 | # Compute DEM raster 79 | if overwrite_flag or not os.path.isfile(elev_raster): 80 | logging.info('\nGRIDMET DEM') 81 | logging.info(' Downloading') 82 | logging.debug(' {}'.format(elev_url)) 83 | logging.debug(' {}'.format(elev_nc)) 84 | _utils.url_download(elev_url, elev_nc) 85 | 86 | logging.info(' Extracting') 87 | logging.debug(' {}'.format(elev_raster)) 88 | elev_nc_f = netCDF4.Dataset(elev_nc, 'r') 89 | if len(elev_nc_f.variables['elevation'].shape) == 3: 90 | elev_ma = elev_nc_f.variables['elevation'][0, :, :] 91 | else: 92 | elev_ma = elev_nc_f.variables['elevation'][:, :] 93 | elev_array = elev_ma.data.astype(np.float32) 94 | # elev_nodata = float(elev_ma.fill_value) 95 | elev_array[ 96 | (elev_array == elev_ma.fill_value) | 97 | (elev_array <= -300)] = np.nan 98 | if zero_elev_nodata_flag: 99 | elev_array[np.isnan(elev_array)] = 0 100 | if np.all(np.isnan(elev_array)): 101 | logging.error( 102 | '\nERROR: The elevation array is all nodata, exiting\n') 103 | sys.exit() 104 | drigo.array_to_raster( 105 | elev_array, elev_raster, 106 | output_geo=gridmet_geo, output_proj=gridmet_proj) 107 | elev_nc_f.close() 108 | # del elev_nc_f, elev_ma, elev_array, elev_nodata 109 | del elev_nc_f, elev_ma, elev_array 110 | os.remove(elev_nc) 111 | 112 | # Compute latitude/longitude rasters 113 | if ((overwrite_flag or 114 | not os.path.isfile(lat_raster) or 115 | not os.path.isfile(lat_raster)) and 116 | os.path.isfile(elev_raster)): 117 | logging.info('\nGRIDMET Latitude/Longitude') 118 | logging.debug(' {}'.format(lat_raster)) 119 | lat_array, lon_array = drigo.raster_lat_lon_func(elev_raster) 120 | # Handle the conversion to radians in the other GRIDMET scripts 121 | # lat_array *= (math.pi / 180) 122 | drigo.array_to_raster( 123 | lat_array, lat_raster, output_geo=gridmet_geo, 124 | output_proj=gridmet_proj) 125 | logging.debug(' {}'.format(lon_raster)) 126 | drigo.array_to_raster( 127 | lon_array, lon_raster, output_geo=gridmet_geo, 128 | output_proj=gridmet_proj) 129 | del lat_array, lon_array 130 | 131 | logging.debug('\nScript Complete') 132 | 133 | 134 | def arg_parse(): 135 | """Base all default folders from script location 136 | scripts: ./pymetric/tools/gridmet 137 | tools: ./pymetric/tools 138 | output: ./pymetric/gridmet 139 | """ 140 | script_folder = sys.path[0] 141 | code_folder = os.path.dirname(script_folder) 142 | project_folder = os.path.dirname(code_folder) 143 | gridmet_folder = os.path.join(project_folder, 'gridmet') 144 | ancillary_folder = os.path.join(gridmet_folder, 'ancillary') 145 | 146 | parser = argparse.ArgumentParser( 147 | description='Process GRIDMET ancillary data', 148 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 149 | parser.add_argument( 150 | '--ancillary', default=ancillary_folder, metavar='PATH', 151 | help='Ancillary raster folder path') 152 | parser.add_argument( 153 | '--zero', default=False, action="store_true", 154 | help='Set elevation nodata values to 0') 155 | parser.add_argument( 156 | '-o', '--overwrite', default=False, action="store_true", 157 | help='Force overwrite of existing files') 158 | parser.add_argument( 159 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 160 | help='Debug level logging', action="store_const", dest="loglevel") 161 | args = parser.parse_args() 162 | 163 | # Convert relative paths to absolute paths 164 | if args.ancillary and os.path.isdir(os.path.abspath(args.ancillary)): 165 | args.ancillary = os.path.abspath(args.ancillary) 166 | 167 | return args 168 | 169 | 170 | if __name__ == '__main__': 171 | args = arg_parse() 172 | 173 | logging.basicConfig(level=args.loglevel, format='%(message)s') 174 | logging.info('\n{}'.format('#' * 80)) 175 | logging.info('{:<20s} {}'.format( 176 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 177 | logging.info('{:<20s} {}'.format( 178 | 'Script:', os.path.basename(sys.argv[0]))) 179 | 180 | main(ancillary_ws=args.ancillary, zero_elev_nodata_flag=args.zero, 181 | overwrite_flag=args.overwrite) 182 | -------------------------------------------------------------------------------- /tools/gridmet/gridmet_download.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: gridmet_download.py 3 | # Purpose: Download GRIDMET data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import sys 11 | 12 | import _utils 13 | 14 | 15 | def main(start_dt, end_dt, netcdf_ws, variables=['etr', 'pr'], 16 | overwrite_flag=False): 17 | """Download GRIDMET netcdf files 18 | 19 | Parameters 20 | ---------- 21 | start_dt : datetime 22 | Start date. 23 | end_dt : str 24 | End date.. 25 | netcdf_ws : str 26 | Folder of GRIDMET netcdf files. 27 | variable : list, optional 28 | GRIDMET variables to download (the default is ['etr', 'ppt']). 29 | Choices: 'etr', 'pet', 'pr', 'srad', 'sph', 'tmmn', 'tmmx', 'vs' 30 | overwrite_flag : bool, optional 31 | If True, overwrite existing files (the default is False). 32 | 33 | Returns 34 | ------- 35 | None 36 | 37 | """ 38 | logging.info('Downloading GRIDMET data\n') 39 | logging.debug(' Start date: {}'.format(start_dt)) 40 | logging.debug(' End date: {}'.format(end_dt)) 41 | 42 | site_url = 'https://www.northwestknowledge.net/metdata/data' 43 | 44 | # GRIDMET rasters to extract 45 | data_full_list = ['etr', 'pet', 'pr', 'srad', 'sph', 'tmmn', 'tmmx', 'vs'] 46 | if not variables: 47 | logging.error('\nERROR: variables parameter is empty\n') 48 | sys.exit() 49 | elif type(variables) is not list: 50 | # DEADBEEF - I could try converting comma separated strings to lists? 51 | logging.warning('\nERROR: variables parameter must be a list\n') 52 | sys.exit() 53 | # elif 'all' in variables: 54 | # logging.error('Downloading all variables\n {}'.format( 55 | # ','.join(data_full_list))) 56 | # data_list = data_full_list 57 | # elif 'eto' in variables or 'etr' in variables: 58 | # data_etr_list = ['srad', 'sph', 'tmmn', 'tmmx', 'vs'] 59 | # logging.error( 60 | # 'Downloading all variables needed to compute ETr/ETo\n {}'.format( 61 | # ','.join(data_etr_list))) 62 | # data_list = data_etr_list 63 | elif not set(variables).issubset(set(data_full_list)): 64 | logging.error('\nERROR: variables parameter is invalid\n {}'.format( 65 | variables)) 66 | sys.exit() 67 | else: 68 | data_list = variables 69 | 70 | # Build output workspace if it doesn't exist 71 | if not os.path.isdir(netcdf_ws): 72 | os.makedirs(netcdf_ws) 73 | 74 | # GRIDMET data is stored by year 75 | year_list = sorted(list(set([ 76 | i_dt.year for i_dt in _utils.date_range( 77 | start_dt, end_dt + dt.timedelta(1))]))) 78 | year_list = map(lambda x: '{:04d}'.format(x), year_list) 79 | 80 | # Set data types to upper case for comparison 81 | data_list = list(map(lambda x: x.lower(), data_list)) 82 | 83 | # Each sub folder in the main folder has all imagery for 1 day 84 | # The path for each subfolder is the /YYYY/MM/DD 85 | logging.info('') 86 | for year_str in year_list: 87 | logging.info(year_str) 88 | 89 | # Process each file in sub folder 90 | for data_str in data_list: 91 | file_name = '{}_{}.nc'.format(data_str, year_str) 92 | file_url = '{}/{}'.format(site_url, file_name) 93 | save_path = os.path.join(netcdf_ws, file_name) 94 | 95 | logging.info(' {}'.format(file_name)) 96 | logging.debug(' {}'.format(file_url)) 97 | logging.debug(' {}'.format(save_path)) 98 | if os.path.isfile(save_path): 99 | if not overwrite_flag: 100 | logging.debug(' File already exists, skipping') 101 | continue 102 | else: 103 | logging.debug(' File already exists, removing existing') 104 | os.remove(save_path) 105 | 106 | _utils.url_download(file_url, save_path) 107 | 108 | logging.debug('\nScript Complete') 109 | 110 | 111 | def arg_parse(): 112 | """Base all default folders from script location 113 | scripts: ./pymetric/tools/gridmet 114 | tools: ./pymetric/tools 115 | output: ./pymetric/gridmet 116 | """ 117 | script_folder = sys.path[0] 118 | code_folder = os.path.dirname(script_folder) 119 | project_folder = os.path.dirname(code_folder) 120 | gridmet_folder = os.path.join(project_folder, 'gridmet') 121 | netcdf_folder = os.path.join(gridmet_folder, 'netcdf') 122 | 123 | parser = argparse.ArgumentParser( 124 | description='Download daily GRIDMET data', 125 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 126 | parser.add_argument( 127 | '--start', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 128 | help='Start date') 129 | parser.add_argument( 130 | '--end', required=True, type=_utils.valid_date, metavar='YYYY-MM-DD', 131 | help='End date') 132 | parser.add_argument( 133 | '--netcdf', default=netcdf_folder, metavar='PATH', 134 | help='Output netCDF folder path') 135 | parser.add_argument( 136 | '--vars', default=['etr', 'pr'], nargs='+', 137 | choices=['etr', 'pet', 'pr', 'srad', 'sph', 'tmmn', 'tmmx', 'vs'], 138 | help='GRIDMET variables to download') 139 | parser.add_argument( 140 | '-o', '--overwrite', default=False, action="store_true", 141 | help='Force overwrite of existing files') 142 | parser.add_argument( 143 | '-d', '--debug', default=logging.INFO, const=logging.DEBUG, 144 | help='Debug level logging', action="store_const", dest="loglevel") 145 | args = parser.parse_args() 146 | 147 | # Convert relative paths to absolute paths 148 | if args.netcdf and os.path.isdir(os.path.abspath(args.netcdf)): 149 | args.netcdf = os.path.abspath(args.netcdf) 150 | 151 | return args 152 | 153 | 154 | if __name__ == '__main__': 155 | args = arg_parse() 156 | 157 | logging.basicConfig(level=args.loglevel, format='%(message)s') 158 | logging.info('\n{}'.format('#' * 80)) 159 | logging.info('{:<20s} {}'.format( 160 | 'Run Time Stamp:', dt.datetime.now().isoformat(' '))) 161 | logging.info('{:<20s} {}'.format( 162 | 'Script:', os.path.basename(sys.argv[0]))) 163 | 164 | main(start_dt=args.start, end_dt=args.end, netcdf_ws=args.netcdf, 165 | variables=args.vars, overwrite_flag=args.overwrite) 166 | -------------------------------------------------------------------------------- /tools/nldas/README.md: -------------------------------------------------------------------------------- 1 | # NLDAS 2 | 3 | Scripts for downloading and preparing NLDAS hourly weather data 4 | Hourly vapour pressure data is used to generate the Tasumi at-surface reflectance data 5 | Hourly wind speed and ASCE standardized reference ET (ETr) are used in METRIC to estimate ET 6 | 7 | nldas_ancillary.py - Download and process the NLDAS mask, elevation, 8 | latitude, and longitude rasters 9 | nldas_download.py - Download the ".grb" files from the NLDAS website 10 | The default date range is 2017-01-01 to 2017-12-31 11 | nldas_hourly_ea.py - Calculate hourly vapour pressure from the NLDAS inputs 12 | nldas_hourly_wind.py - Calculate hourly wind speed from the NLDAS inputs 13 | nldas_hourly_refet.py - Calculate hourly ETo and ETr from the NLDAS inputs 14 | ETo and ETr are saved as hourly IMG rasters in separate folders 15 | 16 | ##Run nldas_hourly_variable.py to extract NLDAS variable(s) 17 | ## Not currently supported 18 | -------------------------------------------------------------------------------- /tools/nldas/_utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import datetime as dt 3 | import logging 4 | 5 | import requests 6 | 7 | 8 | def date_range(start_date, end_date): 9 | """Yield datetimes within a date range""" 10 | for n in range(int((end_date - start_date).days)): 11 | yield start_date + dt.timedelta(n) 12 | 13 | 14 | def parse_int_set(nputstr=""): 15 | """Return list of numbers given a string of ranges 16 | 17 | http://thoughtsbyclayg.blogspot.com/2008/10/parsing-list-of-numbers-in-python.html 18 | """ 19 | selection = set() 20 | invalid = set() 21 | # tokens are comma seperated values 22 | tokens = [x.strip() for x in nputstr.split(',')] 23 | for i in tokens: 24 | try: 25 | # typically tokens are plain old integers 26 | selection.add(int(i)) 27 | except: 28 | # if not, then it might be a range 29 | try: 30 | token = [int(k.strip()) for k in i.split('-')] 31 | if len(token) > 1: 32 | token.sort() 33 | # we have items seperated by a dash 34 | # try to build a valid range 35 | first = token[0] 36 | last = token[len(token)-1] 37 | for x in range(first, last+1): 38 | selection.add(x) 39 | except: 40 | # not an int and not a range... 41 | invalid.add(i) 42 | # Report invalid tokens before returning valid selection 43 | # print "Invalid set: " + str(invalid) 44 | return selection 45 | 46 | 47 | def url_download(download_url, output_path, verify=True): 48 | """Download file from a URL using requests module""" 49 | response = requests.get(download_url, stream=True, verify=verify) 50 | if response.status_code != 200: 51 | logging.error(' HTTPError: {}'.format(response.status_code)) 52 | return False 53 | 54 | logging.debug(' Beginning download') 55 | with (open(output_path, "wb")) as output_f: 56 | for chunk in response.iter_content(chunk_size=1024 * 1024): 57 | if chunk: # filter out keep-alive new chunks 58 | output_f.write(chunk) 59 | logging.debug(' Download complete') 60 | return True 61 | 62 | 63 | def valid_date(input_date): 64 | """Check that a date string is ISO format (YYYY-MM-DD) 65 | 66 | This function is used to check the format of dates entered as command 67 | line arguments. 68 | DEADBEEF - It would probably make more sense to have this function 69 | parse the date using dateutil parser (http://labix.org/python-dateutil) 70 | and return the ISO format string. 71 | 72 | Parameters 73 | ---------- 74 | input_date : str 75 | 76 | Returns 77 | ------- 78 | datetime 79 | 80 | Raises 81 | ------ 82 | ArgParse ArgumentTypeError 83 | 84 | """ 85 | try: 86 | return dt.datetime.strptime(input_date, "%Y-%m-%d") 87 | # return dt.datetime.strptime(input_date, "%Y-%m-%d").date().isoformat() 88 | except ValueError: 89 | msg = "Not a valid date: '{}'.".format(input_date) 90 | raise argparse.ArgumentTypeError(msg) -------------------------------------------------------------------------------- /tools/nldas/nldas_ancillary.py: -------------------------------------------------------------------------------- 1 | #-------------------------------- 2 | # Name: nldas_ancillary.py 3 | # Purpose: Process NLDAS ancillary data 4 | #-------------------------------- 5 | 6 | import argparse 7 | import datetime as dt 8 | import logging 9 | import os 10 | import subprocess 11 | import sys 12 | 13 | import drigo 14 | import numpy as np 15 | import pandas as pd 16 | 17 | import _utils 18 | 19 | 20 | def main(ancillary_ws=os.getcwd(), zero_elev_nodata_flag=False, 21 | overwrite_flag=False): 22 | """Process NLDAS ancillary data 23 | 24 | Parameters 25 | ---------- 26 | ancillary_ws : str 27 | Folder of ancillary rasters. 28 | zero_elev_nodata_flag : bool, optional 29 | If True, set elevation nodata values to 0 (the default is False). 30 | overwrite_flag : bool, optional 31 | If True, overwrite existing files (the default is False). 32 | 33 | Returns 34 | ------- 35 | None 36 | 37 | """ 38 | logging.info('\nProcess NLDAS ancillary data') 39 | 40 | # Site URLs 41 | mask_url = 'http://ldas.gsfc.nasa.gov/nldas/asc/NLDASmask_UMDunified.asc' 42 | elev_url = 'http://ldas.gsfc.nasa.gov/nldas/asc/gtopomean15k.asc' 43 | 44 | nldas_epsg = 'EPSG:4269' 45 | # nldas_epsg = 'EPSG:4326' 46 | 47 | nldas_nodata = -9999.0 48 | 49 | # Site URLs 50 | # file_re = re.compile( 51 | # 'NLDAS_FORA0125_H.A(?P\d{4})(?P\d{2})(?P\d{2}).' + 52 | # '(?P