├── dnppy ├── __file__.py ├── R_dnppy │ ├── lance.R │ ├── __init__.py │ └── README.md ├── core │ ├── __file__.py │ ├── enf_list.py │ ├── exists.py │ ├── __init__.py │ ├── move.py │ ├── rename.py │ ├── install_from_wheel.py │ ├── enf_filelist.py │ ├── run_command.py │ └── create_outname.py ├── modis │ ├── __file__.py │ ├── __init__.py │ └── define_projection.py ├── radar │ ├── __file__.py │ ├── __init__.py │ └── decibel_convert.py ├── solar │ ├── __file__.py │ ├── ref │ │ ├── NOAA_Solar_Calculations_day.xls │ │ └── NOAA_Solar_Calculations_year.xls │ ├── README.md │ └── __init__.py ├── tsa │ ├── __file__.py │ ├── __init__.py │ └── test_data │ │ └── un_ordered_example.txt ├── convert │ ├── __file__.py │ ├── lib │ │ ├── prj │ │ │ ├── 77442.prj │ │ │ ├── README │ │ │ └── 7744.prj │ │ ├── README.md │ │ └── datatype_library.csv │ ├── _convert_dtype.py │ ├── __init__.py │ ├── extract_TRMM_NetCDF.py │ ├── extract_TRMM_HDF.py │ ├── extract_MPE_NetCDF.py │ ├── extract_SMOS_NetCDF.py │ ├── HDF5_to_numpy.py │ ├── _extract_NetCDF_datatype.py │ ├── _extract_HDF_datatype.py │ ├── extract_GPM_IMERG.py │ ├── extract_archive.py │ ├── ll_to_utm.py │ └── extract_GCMO_NetCDF.py ├── download │ ├── __file__.py │ ├── fetch_SMAP.py │ ├── fetch_ASTER.py │ ├── fetch_AVHRR.py │ ├── fetch_VIIRS.py │ ├── fetch_AIRS.py │ ├── list_http_e4ftl01.py │ ├── __init__.py │ ├── list_http_waterweather.py │ ├── datatype.py │ ├── list_ftp.py │ ├── fetch_MPE.py │ ├── download_filelist.py │ ├── download_url.py │ ├── download_urls.py │ └── fetch_Landsat_WELD.py ├── landsat │ ├── __file__.py │ ├── __init__.py │ └── ndvi.py ├── raster │ ├── __file__.py │ ├── in_dir.py │ ├── is_rast.py │ ├── enf_rastlist.py │ ├── null_define.py │ ├── __init__.py │ ├── clip_to_shape.py │ ├── raster_fig.py │ ├── null_set_range.py │ ├── raster_overlap.py │ ├── from_numpy.py │ ├── new_mosaic.py │ └── apply_linear_correction.py ├── README.md ├── textio │ ├── __init__.py │ ├── test_data │ │ ├── conf_test.txt │ │ └── conf_test.json │ └── read_DS3505.py └── __init__.py ├── undeployed ├── subjects │ ├── METRIC │ │ └── ET │ │ │ ├── __file__.py │ │ │ ├── dnppy_limited │ │ │ ├── __file__.py │ │ │ ├── landsat │ │ │ │ ├── __file__.py │ │ │ │ └── __init__.py │ │ │ ├── time_series │ │ │ │ ├── __file__.py │ │ │ │ ├── __init__.py │ │ │ │ └── csv_io.py │ │ │ └── __init__.py │ │ │ ├── Empty_Metric_Model │ │ │ ├── output │ │ │ │ └── readme.txt │ │ │ ├── scratch │ │ │ │ └── readme.txt │ │ │ ├── input_dem │ │ │ │ └── readme.txt │ │ │ ├── input_landsat │ │ │ │ └── readme.txt │ │ │ ├── input_weather │ │ │ │ └── readme.txt │ │ │ ├── intermediate_calculations │ │ │ │ └── readme.txt │ │ │ └── input_ref_pixels │ │ │ │ ├── pixel_cold_stats.dbf │ │ │ │ └── pixel_hot_stats.dbf │ │ │ ├── info │ │ │ └── arc.dir │ │ │ ├── Obtaining_weather_data.doc │ │ │ ├── __init__.py │ │ │ ├── README.md │ │ │ ├── metric_tbx.pyt.xml │ │ │ └── Wx_Data_Extract.py │ ├── chunking │ │ ├── __file__.py │ │ ├── __init__.py │ │ ├── README.md │ │ └── chunk.py │ ├── CDRs │ │ ├── PERSIANN-CDR_v01r01_19890523_c20140523.nc │ │ └── PERSIANN-CDR_v01r01_19840101_c20140523_lw.nc │ ├── DSI │ │ ├── Precip_180DaySum.py │ │ ├── Precip_RollingSum.py │ │ ├── Drought_Severity.py │ │ ├── Scaled_Precip.py │ │ ├── CombinePrecipAllpoint.py │ │ └── LST_Script_Tool.py │ └── R_Code │ │ ├── Netcdf_to_Raster_Script.R │ │ └── Loop_Netcdf_to_Raster_Script.R ├── legacy │ ├── Misc │ │ ├── OceanColor.tbx │ │ ├── OceanColor10.0.tbx │ │ ├── Reverb ECHO Tool.tbx │ │ ├── desktop.ini │ │ └── OceanColorGeoreference.py │ ├── VIIRS │ │ ├── VIIRS_10.0.tbx │ │ ├── VIIRS_10.1.tbx │ │ ├── desktop.ini │ │ └── QFreader.py │ ├── Landsat │ │ ├── Landsat_10.0.tbx │ │ ├── Landsat_10.1.tbx │ │ ├── desktop.ini │ │ ├── L7GapFiller_ArcInterface.py │ │ ├── DNtoReflectance_ArcInterface.py │ │ ├── LTKCloudDetector_ArcInterface.py │ │ ├── L8_surf_temp.py │ │ ├── SingleBandGapFiller.py │ │ ├── Cloud_Fill.py │ │ └── ACCACloudDetector_ArcInterface.py │ ├── Basic_Batch_Tools │ │ ├── Batch_Clip.tbx │ │ ├── Batch_Project.tbx │ │ ├── Batch_Resample.tbx │ │ ├── Batch_Extract_By_Mask.tbx │ │ ├── Batch_Project_Raster.tbx │ │ ├── Batch_Define_Projection_Feature.tbx │ │ ├── Batch_Define_Projection_Raster.tbx │ │ ├── desktop.ini │ │ ├── Batch_Define_Projection_Feature.py │ │ ├── Batch_Define_Projection_Raster.py │ │ ├── Batch_Project.py │ │ ├── Batch_Filename_Rename.py │ │ ├── Batch_Resample.py │ │ ├── Batch_Clip.py │ │ ├── Batch_Project_Raster.py │ │ ├── Batch_Extract_By_Mask.py │ │ ├── Batch_Extract_By_Mask_Python.py │ │ ├── Batch_DBF_to_CSV.py │ │ ├── Batch_Zonal_Statistics_as_Table.py │ │ ├── Batch_Zonal_Statistics_as_Table (1).py │ │ └── Batch_Con_Py.py │ ├── README.md │ ├── authentication.py │ ├── line_count.py │ └── sort.py ├── proj_code │ ├── unzip_projects.py │ └── README.md └── README.md ├── docs └── source │ ├── trub │ ├── faq.rst │ └── git.rst │ ├── tiny_develop_logo_solid.png │ ├── modules │ ├── textio.rst │ ├── radar.rst │ ├── convert.rst │ ├── core.rst │ ├── download.rst │ └── solar.rst │ ├── modulesum.rst │ ├── design.rst │ └── dev_pages │ └── dev_goals.rst ├── dev ├── test │ ├── test_precip_module.py │ ├── __init__.py │ ├── build_test_env.py │ ├── fetch_VA_shapefile.py │ ├── fetch_test_precip.py │ ├── fetch_test_SRTM.py │ └── fetch_test_MODIS.py ├── README.md ├── generate_release_stat_report.py └── sphinx_build.py ├── .gitignore ├── easy_install.py ├── .travis.yml ├── README.md └── setup.py /dnppy/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/R_dnppy/lance.R: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/core/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/modis/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/radar/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/solar/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/tsa/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/convert/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/download/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/landsat/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dnppy/raster/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/chunking/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/landsat/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/output/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/scratch/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/time_series/__file__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_dem/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_landsat/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_weather/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/source/trub/faq.rst: -------------------------------------------------------------------------------- 1 | Frequently Asked Questions 2 | ========================== 3 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/intermediate_calculations/readme.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/landsat/__init__.py: -------------------------------------------------------------------------------- 1 | from .grab_meta import * 2 | -------------------------------------------------------------------------------- /undeployed/legacy/Misc/OceanColor.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Misc/OceanColor.tbx -------------------------------------------------------------------------------- /docs/source/tiny_develop_logo_solid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/docs/source/tiny_develop_logo_solid.png -------------------------------------------------------------------------------- /undeployed/legacy/VIIRS/VIIRS_10.0.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/VIIRS/VIIRS_10.0.tbx -------------------------------------------------------------------------------- /undeployed/legacy/VIIRS/VIIRS_10.1.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/VIIRS/VIIRS_10.1.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/Landsat_10.0.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Landsat/Landsat_10.0.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/Landsat_10.1.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Landsat/Landsat_10.1.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Misc/OceanColor10.0.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Misc/OceanColor10.0.tbx -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/info/arc.dir: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/METRIC/ET/info/arc.dir -------------------------------------------------------------------------------- /dev/test/test_precip_module.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import gdal 4 | 5 | def test_precip_module(test_dir): 6 | pass 7 | 8 | 9 | -------------------------------------------------------------------------------- /undeployed/legacy/Misc/Reverb ECHO Tool.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Misc/Reverb ECHO Tool.tbx -------------------------------------------------------------------------------- /dnppy/solar/ref/NOAA_Solar_Calculations_day.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/dnppy/solar/ref/NOAA_Solar_Calculations_day.xls -------------------------------------------------------------------------------- /dnppy/solar/ref/NOAA_Solar_Calculations_year.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/dnppy/solar/ref/NOAA_Solar_Calculations_year.xls -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Clip.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Clip.tbx -------------------------------------------------------------------------------- /dnppy/convert/lib/prj/77442.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["HRAP_Sphere",DATUM["",SPHEROID["",6371200.0,0.0]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Project.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Project.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Resample.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Resample.tbx -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Obtaining_weather_data.doc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/METRIC/ET/Obtaining_weather_data.doc -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Extract_By_Mask.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Extract_By_Mask.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Project_Raster.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Project_Raster.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Misc/desktop.ini: -------------------------------------------------------------------------------- 1 | [.ShellClassInfo] 2 | InfoTip=This folder is shared online. 3 | IconFile=C:\Program Files (x86)\Google\Drive\googledrivesync.exe 4 | IconIndex=12 5 | -------------------------------------------------------------------------------- /undeployed/subjects/CDRs/PERSIANN-CDR_v01r01_19890523_c20140523.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/CDRs/PERSIANN-CDR_v01r01_19890523_c20140523.nc -------------------------------------------------------------------------------- /dnppy/R_dnppy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | R_dnppy is a compatibility package between R scripts and the dnppy library 3 | 4 | Development is pending 5 | """ 6 | 7 | 8 | __author__ = ["lancewatkins"] -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/desktop.ini: -------------------------------------------------------------------------------- 1 | [.ShellClassInfo] 2 | InfoTip=This folder is shared online. 3 | IconFile=C:\Program Files (x86)\Google\Drive\googledrivesync.exe 4 | IconIndex=12 5 | -------------------------------------------------------------------------------- /undeployed/legacy/VIIRS/desktop.ini: -------------------------------------------------------------------------------- 1 | [.ShellClassInfo] 2 | InfoTip=This folder is shared online. 3 | IconFile=C:\Program Files (x86)\Google\Drive\googledrivesync.exe 4 | IconIndex=12 5 | -------------------------------------------------------------------------------- /undeployed/subjects/CDRs/PERSIANN-CDR_v01r01_19840101_c20140523_lw.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/CDRs/PERSIANN-CDR_v01r01_19840101_c20140523_lw.nc -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Feature.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Feature.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Raster.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Raster.tbx -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/desktop.ini: -------------------------------------------------------------------------------- 1 | [.ShellClassInfo] 2 | InfoTip=This folder is shared online. 3 | IconFile=C:\Program Files (x86)\Google\Drive\googledrivesync.exe 4 | IconIndex=12 5 | -------------------------------------------------------------------------------- /undeployed/legacy/README.md: -------------------------------------------------------------------------------- 1 | This folder is for legacy code with functionality that should be 2 | built into the greater module framework eventually, but also needs to be 3 | continuously accessible for its users. -------------------------------------------------------------------------------- /dev/README.md: -------------------------------------------------------------------------------- 1 | Developer tools, including scripts for gathering stats from the github API, a sphinx documentation builder, and many functions used to help build a common test suite of data for function checking. 2 | 3 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_ref_pixels/pixel_cold_stats.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_ref_pixels/pixel_cold_stats.dbf -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_ref_pixels/pixel_hot_stats.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-DEVELOP/dnppy/HEAD/undeployed/subjects/METRIC/ET/Empty_Metric_Model/input_ref_pixels/pixel_hot_stats.dbf -------------------------------------------------------------------------------- /dnppy/R_dnppy/README.md: -------------------------------------------------------------------------------- 1 | # R-dnppy 2 | 3 | 4 | This was initially set up to build on promised compatibility between armap and R, but this effort has now likely been absorbed by [https://github.com/R-ArcGIS](https://github.com/R-ArcGIS) 5 | -------------------------------------------------------------------------------- /dnppy/README.md: -------------------------------------------------------------------------------- 1 | This directory should contain deployable or soon to be deployable code that fits well within the overall dnppy framework. 2 | 3 | This directory will be coppied into the local site-packages folder with the name "dnppy" as well as a version specific copy named according to "dnppy0.0.0" 4 | -------------------------------------------------------------------------------- /dnppy/convert/lib/prj/README: -------------------------------------------------------------------------------- 1 | This folder contains custom prj files downloaded from spatialreference.org 2 | a file must be in this folder for entries in the datatype_library when 3 | using the SR-ORG identifier. Use EPSG identifiers whenever possible, as these 4 | can be easily loaded from maintained gdal libraries. -------------------------------------------------------------------------------- /dnppy/download/fetch_SMAP.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Jwely' 2 | 3 | __all__ = ["fetch_SMAP"] 4 | 5 | 6 | def fetch_SMAP(): 7 | """ 8 | this function downloads SMAP data via FTP server 9 | 10 | no ftp server is presently public. 11 | """ 12 | 13 | print("this function is an unfinished stub!") 14 | return -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | The EvapoTranspiration (ET) module has been added to contain 4 | all ET related code, starting with the METRIC model. 5 | 6 | It is not as scalable as it could be, but it met the requirements 7 | of the first partner handoff involving ET calculations. 8 | """ 9 | 10 | -------------------------------------------------------------------------------- /dnppy/download/fetch_ASTER.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | __all__ = ["fetch_ASTER"] 4 | 5 | def fetch_ASTER(): 6 | """ 7 | fetches a limited set of ASTER data products via http from the servers at 8 | http://e4ftl01.cr.usgs.gov/ASTT 9 | """ 10 | 11 | print("this function is an unfinished stub!") 12 | return -------------------------------------------------------------------------------- /dnppy/download/fetch_AVHRR.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | 4 | __all__ = ["fetch_AVHRR"] 5 | 6 | 7 | def fetch_AVHRR(): 8 | """ 9 | fetches AVHRR-pathfinder data via ftp 10 | 11 | server: ftp://ftp.nodc.noaa.gov/pub/data.nodc/pathfinder/ 12 | """ 13 | 14 | print("this function is an unfinished stub!") 15 | return -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # filter by file extension or type 2 | *.pyc 3 | *.whl 4 | *.xml 5 | *.tif 6 | *.TIF 7 | *.egg* 8 | *.html 9 | 10 | # specific files and folders 11 | docs/build 12 | *dnppy_stat_report.txt 13 | *make_html.bat 14 | *arc.dir 15 | _logo.png 16 | 17 | # ignore files with test tag. 18 | *_test* 19 | 20 | # the users workspace 21 | .idea/ 22 | *install_pip.py 23 | dnppy_stat_report.txt 24 | 25 | -------------------------------------------------------------------------------- /dev/test/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | """ 4 | The functions in this module are for testing dnppy, including 5 | download functions to establish a common baseline. 6 | """ 7 | 8 | from build_test_env import * 9 | from fetch_test_landsat import * 10 | from fetch_test_MODIS import * 11 | from fetch_VA_shapefile import * 12 | from fetch_test_precip import * 13 | from fetch_test_SRTM import * 14 | -------------------------------------------------------------------------------- /dnppy/download/fetch_VIIRS.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Jwely' 2 | 3 | __all__ = ["fetch_VIIRS"] 4 | 5 | 6 | def fetch_VIIRS(): 7 | """ 8 | functiong fetches VIIRS data via ftp server 9 | 10 | help: http://www.class.ncdc.noaa.gov/release/data_available/npp/npp_ftpserver.htm 11 | ftp: ftp://ftp-npp.class.ngdc.noaa.gov/ 12 | """ 13 | 14 | print("this function is an unfinished stub!") 15 | return -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/__init__.py: -------------------------------------------------------------------------------- 1 | # DEVELOP National Program python module (dnppy) 2 | 3 | """ 4 | This is a limited beta version of the dnppy module 5 | with just a few key functions we found usefull for this 6 | project 7 | 8 | to read more about the dnppy module, or download full 9 | versions, please visit our github: 10 | 11 | https://github.com/nasa/dnppy 12 | 13 | """ 14 | 15 | 16 | -------------------------------------------------------------------------------- /dnppy/convert/lib/prj/7744.prj: -------------------------------------------------------------------------------- 1 | PROJCS["User_Defined_Stereographic_North_Pole",GEOGCS["GCS_User_Defined",DATUM["D_User_Defined",SPHEROID["User_Defined_Spheroid",6371200.0,0.0]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.017453292519943295]],PROJECTION["Stereographic_North_Pole"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-105.0],PARAMETER["Standard_Parallel_1",60.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /dnppy/core/enf_list.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | 4 | def enf_list(item): 5 | """ 6 | When a list is expected, this function can be used to ensure 7 | non-list data types are placed inside of a single entry list. 8 | 9 | :param item: any datatype 10 | :return list: a list type 11 | """ 12 | 13 | if not isinstance(item,list) and item: 14 | return [item] 15 | else: 16 | return item -------------------------------------------------------------------------------- /undeployed/subjects/chunking/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | This little chunking module is used to subset raster images into slices 4 | to allow tasks with large memory requirements to be subsetted a little more 5 | easily. 6 | 7 | It was not included in the raster module, because it has applicability 8 | outside of an armap geospatial environment. 9 | """ 10 | 11 | __author__ = ["Jeffry Ely, jeff.ely.08@gmail.com"] 12 | 13 | from chunk import * 14 | from chunk_bundle import * 15 | 16 | 17 | -------------------------------------------------------------------------------- /dnppy/radar/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The radar module is very small, but houses functions used to process 3 | radar data. It focuses on UAVSAR data, which is data from a Synthetic Aperture 4 | Radar (SAR) on board an Unmanned Aerial Vehicle (UAV). Assists the user in 5 | building header data and converting to decibels. 6 | """ 7 | 8 | __author__ = ["djjensen", 9 | "Scott Baron", 10 | "Jwely"] 11 | 12 | # local imports 13 | from decibel_convert import * 14 | from create_header import * 15 | -------------------------------------------------------------------------------- /easy_install.py: -------------------------------------------------------------------------------- 1 | """ 2 | Simple "open this script and run it" installer for dnppy. 3 | """ 4 | 5 | __author__ = 'Jwely' 6 | 7 | # set up dependencies for active python directory, includes installation of pip 8 | import install_dependencies 9 | install_dependencies.main() 10 | 11 | # uses pip to install this local copy of the repo 12 | import pip 13 | import os 14 | 15 | fold_name = os.path.realpath("__file__").split("\\")[-2] 16 | pip.main(["install", "--upgrade", "../{dir}".format(dir=fold_name)]) 17 | 18 | -------------------------------------------------------------------------------- /dnppy/download/fetch_AIRS.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | __all__ = ["fetch_AIRS"] 4 | 5 | def fetch_AIRS(): 6 | """ 7 | function for fetching AIRS data from the following FTP servers 8 | 9 | level1: ftp://airsl1.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level1 10 | level2: ftp://airsl2.gesdisc.eosdis.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level2 11 | level3: ftp://acdisc.gsfc.nasa.gov/ftp/data/s4pa/Aqua_AIRS_Level3 12 | """ 13 | 14 | print("This function is an unfinished stub!") 15 | pass -------------------------------------------------------------------------------- /docs/source/modules/textio.rst: -------------------------------------------------------------------------------- 1 | textio 2 | ====== 3 | 4 | .. automodule:: dnppy.textio 5 | :members: 6 | 7 | Examples 8 | -------- 9 | 10 | .. rubric:: Using ``text_data`` objects 11 | 12 | 13 | .. rubric:: Using ``ioconfig`` objects 14 | 15 | Code Help 16 | --------- 17 | 18 | .. automodule:: dnppy.textio.read_DS3505 19 | :members: 20 | 21 | .. automodule:: dnppy.textio.text_data 22 | :members: 23 | :private-members: 24 | 25 | .. automodule:: dnppy.textio.ioconfig 26 | :members: 27 | :private-members: 28 | 29 | -------------------------------------------------------------------------------- /dnppy/tsa/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The tsa (short for time series analysis) module is centered around the time_series 3 | class. One or more time_series objects should be central to any data analysis 4 | task that examines temporal relationships in data sets of raster or tabular 5 | format. This module also houses the rast_series class, which is an extension of 6 | time_series for handling filepaths to raster data. 7 | """ 8 | 9 | __author__ = ["Jwely"] 10 | 11 | 12 | # local imports 13 | from time_series import * 14 | from rast_series import * 15 | 16 | 17 | -------------------------------------------------------------------------------- /dnppy/textio/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The textio module, read like "text I/O", is a repository of functions for reading specific `text` 3 | data formats as they are served up from their respective DAACs. Custom text file formats are 4 | common in historical weather data and other ground based data collection networks. This module 5 | aims to convert them to something more standardized. Some limited `json`_ writing capabilities exist. 6 | 7 | .. _json: http://json.org/ 8 | """ 9 | 10 | __author__ = ["Jwely"] 11 | 12 | from ioconfig import * 13 | from read_DS3505 import * 14 | from text_data import * 15 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/README.md: -------------------------------------------------------------------------------- 1 | # EvapoTranspiration (ET) module 2 | 3 | Presently, this only houses the METRIC model. 4 | 5 | 6 | ### METRIC 7 | 8 | METRIC was developed for estimating evapotranspiration from landsat imagery and weather station data. It includes a short tutorial on the download method of properly formatted weather data. The easiest way to interface with the METRIC model is to add the python toolbox file "pyt" in arcmap and follow the user prompts. 9 | 10 | It is not as scalable as it could be, but it met the requirements of the first partner handoff involving ET calculations. 11 | -------------------------------------------------------------------------------- /dnppy/core/exists.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | 5 | 6 | def exists(location): 7 | """ 8 | Ensures input location is either a file or a folder 9 | 10 | :param location: a filepath to a directory or file 11 | :return bool: returns true if filepath leads to a real place 12 | """ 13 | 14 | # if the object is neither a file or a location, return False. 15 | if not os.path.exists(location) and not os.path.isfile(location): 16 | print("{0} is not a valid file or folder!".format(location)) 17 | return False 18 | 19 | else: 20 | return True -------------------------------------------------------------------------------- /dnppy/modis/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Two satellites, Terra and Aqua, are home to a MODIS sensor, which has produced a large number 3 | of data products for over a full decade with minimal interruption. The modis module houses 4 | functions specifically related to processing and handling MODIS data, which includes handling 5 | of the MODIS sinusoidal projection, and mosaic operations. 6 | 7 | Requires ``arcpy`` 8 | """ 9 | 10 | # created October 2014 11 | __author__ = ["Jwely"] 12 | 13 | # local imports 14 | from mosaic import * 15 | from define_projection import * 16 | from extract_from_hdf import * 17 | from modis_metadata import * -------------------------------------------------------------------------------- /dnppy/convert/lib/README.md: -------------------------------------------------------------------------------- 1 | The `datatype_library.csv` file contains the following fields: 2 | ##### Unique_Name 3 | a simple unique identifier 4 | ##### projectionID 5 | The ID number associated with the projection, use spatialreference.org or the EPSG website to find these 6 | ##### A, B, C, D, E, F 7 | Geotransform coefficients used to perform a two dimensional coordinate transform between matrix space and projection space according to the equations: 8 | x = A + iB + jC 9 | y = D + iE + jF 10 | 11 | ##### download_source 12 | The download location where data matching this format is typically retreived 13 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/metric_tbx.pyt.xml: -------------------------------------------------------------------------------- 1 | 2 | 20150413131850001.0TRUE20150415110818c:\program files (x86)\arcgis\desktop10.2\Help\gpmetric_tbxArcToolbox Toolbox 3 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python - "2.7" 2 | 3 | before_install: 4 | - wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh 5 | - chmod +x miniconda.sh 6 | - "./miniconda.sh -b" 7 | - export PATH=/home/travis/miniconda2/bin:$PATH 8 | - conda update --yes conda 9 | - sudo rm -rf /dev/shm 10 | - sudo ln -s /run/shm /dev/shm 11 | install: 12 | - conda install --yes python="2.7" psutil sphinx numpy scipy gdal cython h5py pycurl shapely mock matplotlib 13 | - pip install --user travis-sphinx 14 | 15 | script: 16 | travis-sphinx build 17 | 18 | after_success: 19 | travis-sphinx deploy 20 | -------------------------------------------------------------------------------- /dnppy/core/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The ``core`` module houses functions that assist in data formatting, input sanitation, 3 | path manipulations, file naming, logical checks, etc for use in other functions within 4 | dnppy. They are short and sweet, and can be used as examples to start defining your own functions! 5 | """ 6 | 7 | __author__ = ["jwely", 8 | "lmakely", 9 | ] 10 | 11 | # local imports 12 | from run_command import * 13 | from create_outname import * 14 | from enf_filelist import * 15 | from enf_list import * 16 | from exists import * 17 | from list_files import * 18 | from move import * 19 | from rename import * 20 | from install_from_wheel import * 21 | -------------------------------------------------------------------------------- /undeployed/proj_code/unzip_projects.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | import zipfile 5 | 6 | def unzip_projects(): 7 | """ 8 | Quick script to perform mass unzipping on code packages collected 9 | for inclusion in proj_code. 10 | """ 11 | 12 | # lists all zip files in the current directory non-recursively 13 | zips = [a for a in os.listdir(os.curdir) if ".zip" in a] 14 | 15 | for azip in zips: 16 | z = zipfile.ZipFile(azip) 17 | z.extractall() 18 | z.close() 19 | os.remove(os.path.abspath(azip)) 20 | 21 | print("unzipped {0} project folders".format(len(zips))) 22 | 23 | 24 | if __name__ == "__main__": 25 | unzip_projects() 26 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Feature.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Desired Projection 14 | prjfile= arcpy.GetParameterAsText(1) 15 | 16 | #Define projection to all datasets. 17 | for shps in arcpy.ListFeatureClasses(): 18 | arcpy.AddMessage(shps) 19 | arcpy.DefineProjection_management(shps, prjfile) 20 | -------------------------------------------------------------------------------- /dnppy/solar/README.md: -------------------------------------------------------------------------------- 1 | # solar 2 | tiny solar module for handling the variety of calculations that are commonly required 3 | to make sense of the complex relationship between a point on the earths surface and the sun 4 | 5 | For starters, it replicates the functionality of the NOAA calculator found here: 6 | [http://www.esrl.noaa.gov/gmd/grad/solcalc/calcdetails.html] 7 | 8 | It has recently been updated to allow calculations for all points within a numpy array (lat,lon) 9 | 10 | It was produced in my free time as part of a personal project of mine, but clearly has some utility in the dnppy module. 11 | It is distributed as part of the dnppy module with permission of the original author. 12 | 13 | Author: Jeffry Ely 14 | 15 | 16 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Define_Projection_Raster.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Desired Projection 14 | prjfile= arcpy.GetParameterAsText(1) 15 | 16 | #Define projection to all datasets. 17 | for rasters in arcpy.ListRasters(): 18 | arcpy.AddMessage(rasters) 19 | arcpy.DefineProjection_management(rasters, prjfile) 20 | -------------------------------------------------------------------------------- /dnppy/textio/test_data/conf_test.txt: -------------------------------------------------------------------------------- 1 | param_name ; param_type ; param_value 2 | test long title for just one single str ; ; some test string 3 | test_bool ; ; True 4 | test_float ; ; 1.12345 5 | test_int ; ; 1 6 | test_long ; ; 1000000000000000000 7 | test_list ; ; ['a', 'b', 75, 'd'] 8 | test_dict ; ; {'string_key': 0, 1: 'string_value'} 9 | test_tuple ; ; (1, 2) 10 | -------------------------------------------------------------------------------- /dnppy/raster/in_dir.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["in_dir"] 3 | 4 | from enf_rastlist import enf_rastlist 5 | from dnppy import core 6 | 7 | def in_dir(dir_name, recursive = False): 8 | """ 9 | Lists all the rasters in an input directory. finds all formats 10 | supported by ``raster.enf_rastlist()``. 11 | 12 | :param dir_name: directory to search rasters for 13 | :param recursive: Set to "True" to search within subfolders of input 14 | directory "dir_name" 15 | """ 16 | 17 | rast_list = core.list_files(recursive, dir_name) 18 | rast_list = enf_rastlist(rast_list) 19 | 20 | print("Found {0} file with valid raster format".format(len(rast_list))) 21 | 22 | return rast_list 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### Docs 2 | 3 | https://nasa-develop.github.io/dnppy/ 4 | 5 | ### For DEVELOP partners! 6 | 7 | Looking for some scripts and code for a specific DEVELOP project? If that project does not have its own repository, the code will be located in `undeployed/proj_code`, and organized by term, node, and title. To get this code, simply follow the instructions for installation on our documentation pages, then open up the downloaded dnppy-master.zip and navigate to the `undeployed/proj_code` folder 8 | 9 | ### For the general GIS community! 10 | 11 | dnppy is a collection of functions and classes for download, format, and analysis of NASA satelite data and anciliary NOAA datasets! Visit the [docs](https://nasa-develop.github.io/dnppy/) to learn more! 12 | -------------------------------------------------------------------------------- /dnppy/convert/_convert_dtype.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["_convert_dtype"] 3 | import gdal 4 | 5 | def _convert_dtype(numpy_dtype_string): 6 | """ 7 | converts numpy dtype to a gdal data type object 8 | 9 | :param numpy_dtype_string 10 | :return gdal_datatype_object: 11 | """ 12 | 13 | ndt = str(numpy_dtype_string) 14 | 15 | if ndt == "float64": 16 | return gdal.GDT_Float64 17 | 18 | elif ndt == "float32": 19 | return gdal.GDT_Float32 20 | 21 | elif ndt == "uint32": 22 | return gdal.GDT_UInt32 23 | 24 | elif "unit" in ndt: 25 | return gdal.GDT_UInt16 26 | 27 | elif ndt == "int32": 28 | return gdal.GDT_Int32 29 | 30 | else: 31 | return gdal.GDT_Int16 -------------------------------------------------------------------------------- /dnppy/core/move.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os,shutil 4 | 5 | 6 | def move(source, destination): 7 | """ 8 | moves a file, ensures destination directory exists 9 | 10 | :param source: the current full location of the file 11 | :param destination: the desired full location of the file 12 | """ 13 | 14 | dest_path, name = os.path.split(destination) 15 | 16 | # create that directory if it doesnt already exist 17 | if not os.path.exists(dest_path): 18 | os.makedirs(dest_path) 19 | try: 20 | shutil.move(source, destination) 21 | print('moved file from {0} to {1}'.format(source, destination)) 22 | except: 23 | print("failed to move file from {0}".format(source)) 24 | 25 | return dest_path -------------------------------------------------------------------------------- /dnppy/raster/is_rast.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["is_rast"] 3 | import os 4 | 5 | def is_rast(filename): 6 | """ 7 | Verifies that input filename exists, and is of a valid raster format 8 | 9 | :param filename: check if this filename is a valid accessible raster. 10 | :return : returns True if filename is valid accessible raster. False otherwise. 11 | """ 12 | 13 | rast_types=['bil','bip','bmp','bsq','dat','gif','img','jpg','jp2','png','tif', 14 | 'BIL','BIP','BMP','BSQ','DAT','GIF','IMG','JPG','JP2','PNG','TIF'] 15 | ext = filename[-3:] 16 | 17 | if os.path.isfile(filename): 18 | for rast_type in rast_types: 19 | if ext == rast_type: 20 | return True 21 | 22 | return False 23 | -------------------------------------------------------------------------------- /dnppy/download/list_http_e4ftl01.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import urllib 4 | 5 | __all__ = ["list_http_e4ftl01"] 6 | 7 | def list_http_e4ftl01(site): 8 | """ 9 | Lists contents of http download site at [http://e4ftl01.cr.usgs.gov] 10 | which hosts select MODIS products, landsat WELD, and SRTM data. 11 | 12 | :param site: a url to somewhere on the server at http://e4ftl01.cr.usgs.gov 13 | 14 | :return file_urls: returns a list of urls to files on that http page. 15 | """ 16 | 17 | website = urllib.urlopen(site) 18 | string = website.readlines() 19 | 20 | file_urls = [] 21 | for line in string: 22 | try: 23 | file_urls.append(line.replace('/','').split('"')[5]) 24 | except: 25 | pass 26 | return file_urls -------------------------------------------------------------------------------- /dnppy/landsat/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | Landsat imagery is pretty versatile and commonly used, so the landsat data has its own 6 | module for common tasks associated with this product. This includes things like converting 7 | to top-of-atmosphere reflectance, at-satellite brightness temperature, cloud masking, and others. 8 | 9 | Requires ``arcpy`` 10 | """ 11 | 12 | __author__ = ["djjensen", 13 | "Jwely", 14 | "Quinten Geddes"] 15 | 16 | # local imports 17 | from atsat_bright_temp import * 18 | from cloud_mask import * 19 | from landsat_metadata import * 20 | from ndvi import * 21 | from scene import * 22 | from surface_reflectance import * 23 | from surface_temp import * 24 | from toa_radiance import * 25 | from toa_reflectance import * 26 | 27 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Project.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Output coordinate system. 14 | OutSys= arcpy.GetParameterAsText(1) 15 | 16 | #For all the shapefiles in the input folder, alter the projection. 17 | for shps in arcpy.ListFeatureClasses(): 18 | arcpy.AddMessage(shps) 19 | #Out name is the Output File name. 20 | Out_Name= "Prj_" + shps 21 | arcpy.Project_management(shps, Out_Name, OutSys) 22 | 23 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Filename_Rename.py: -------------------------------------------------------------------------------- 1 | #--------------------------------- 2 | #Name: String Replace 3 | #Purpose: Rename filenames (i.e. remove special characters) 4 | #Notes: Text that must be changed are those in all Caps. 5 | #AAA = file type extension (i.e ".tif") 6 | #BBB = characters to be replaced 7 | #CCC = new characters to replace BBB 8 | #Created: 02/24/2014 9 | #--------------------------------- 10 | 11 | arcpy.env.workspace = "INPUT FILE PATH TO DATA FOLDER HERE" 12 | arcpy.env.overwriteOutput = True 13 | Files = arcpy.ListFiles("*.AAA") 14 | 15 | for filename in Files: 16 | if 'BBB' in filename: 17 | newfilename = string.replace(filename, 'BBB', 'CCC') 18 | print "Renaming", filename, "to", newfilename, "..." 19 | os.rename(filename, newfilename) 20 | 21 | 22 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Resample.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Cell_Size 14 | Cell_Size= arcpy.GetParameterAsText(1) 15 | 16 | #Resample Type 17 | Resample_Type= arcpy.GetParameterAsText(2) 18 | 19 | #For each raster in the input folder, apply the resample. 20 | for rasters in arcpy.ListRasters(): 21 | out_name= "Resample_" + rasters[0:-4] + ".tif" 22 | arcpy.Resample_management(rasters, out_name, Cell_Size, Resample_Type) 23 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Clip.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Clipping file. 14 | Clip_file= arcpy.GetParameterAsText(1) 15 | 16 | #For all the shapefiles in the folder, perform a clip. 17 | for shps in arcpy.ListFeatureClasses(): 18 | if shps != Clip_file: 19 | arcpy.AddMessage(shps) 20 | #Out name is the Output File name. 21 | Out_Name= "Clip_" + shps 22 | arcpy.Clip_analysis(shps, Clip_file, Out_Name) 23 | -------------------------------------------------------------------------------- /dnppy/download/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The download module houses many "fetch" functions for automatic retrieval of specific data 3 | products from ``http`` and ``ftp`` servers around the USA. While centered around NASA data 4 | products, some functions exist for fetching of ancillary NOAA climate products and others. 5 | """ 6 | 7 | __author__ = ["Jwely", 8 | "lancewatkins"] 9 | 10 | # local imports 11 | from download_filelist import * 12 | from download_url import * 13 | from download_urls import * 14 | 15 | from fetch_GPM_IMERG import * 16 | from fetch_Landsat8 import * 17 | from fetch_Landsat_WELD import * 18 | from fetch_MODIS import * 19 | from fetch_MPE import * 20 | from fetch_SRTM import * 21 | from fetch_TRMM import * 22 | 23 | 24 | from list_http_e4ftl01 import * 25 | from list_http_waterweather import * 26 | from list_ftp import * 27 | -------------------------------------------------------------------------------- /dnppy/raster/enf_rastlist.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["enf_rastlist"] 3 | 4 | from dnppy import core 5 | 6 | from is_rast import is_rast 7 | import os 8 | 9 | def enf_rastlist(filelist): 10 | """ 11 | Ensures a list of inputs filepaths contains only valid raster types 12 | 13 | :param filelist: a list of filepaths that contains some raster filetypes 14 | :return new_filelist: a list of filepaths with all non-raster files removed 15 | """ 16 | 17 | # first place the input through the same requirements of any filelist 18 | filelist = core.enf_filelist(filelist) 19 | new_filelist = [] 20 | 21 | for filename in filelist: 22 | 23 | if os.path.isfile(filename): 24 | if is_rast(filename): 25 | new_filelist.append(filename) 26 | 27 | return new_filelist 28 | -------------------------------------------------------------------------------- /dnppy/radar/decibel_convert.py: -------------------------------------------------------------------------------- 1 | 2 | __author__ = ["Daniel Jensen, danieljohnjensen@gmail.com", 3 | "Scott Baron", 4 | "Jwely"] 5 | 6 | from dnppy import raster 7 | import numpy 8 | 9 | 10 | def decibel_convert(filename): 11 | """ 12 | Converts the input UAVSAR .grd file into units of decibels. Note 13 | that a .hdr file must be created and accompany the .grd/.inc files for this to work 14 | 15 | :param filename: the full file path string for the .grd data file 16 | :return outname: filepath to output file created by this function. 17 | """ 18 | 19 | #arcpy.CheckOutExtension("Spatial") 20 | 21 | inRaster, meta = raster.to_numpy(filename) 22 | dB_raster = 10 * numpy.log10(inRaster) 23 | outname = filename.replace(".grd", "_dB.tif") 24 | raster.from_numpy(dB_raster, meta, outname) 25 | return outname 26 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Project_Raster.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Desired Projection 14 | prjfile= arcpy.GetParameterAsText(1) 15 | 16 | #Resampling Type 17 | Resample= arcpy.GetParameterAsText(2) 18 | 19 | #For all the rasters in the input folder, change the projection. 20 | for rasters in arcpy.ListRasters(): 21 | arcpy.AddMessage(rasters) 22 | Out_Name= "Prj_" + rasters[0:-4] + ".tif" 23 | arcpy.ProjectRaster_management(rasters, Out_Name, prjfile, Resample) 24 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Extract_By_Mask.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= arcpy.GetParameterAsText(0) 11 | arcpy.env.workspace= folder_path 12 | 13 | #Masking file. 14 | Mask_file= arcpy.GetParameterAsText(1) 15 | 16 | #For all the rasters in the file, perform an extract by mask. 17 | for rasters in arcpy.ListRasters(): 18 | arcpy.AddMessage(rasters) 19 | #Out name is the Output File name. EBM stands for "Extract By Mask". 20 | Out_Name= "EBM_" + rasters 21 | outExtractByMask = ExtractByMask(rasters, Mask_file) 22 | outExtractByMask.save(Out_Name) 23 | -------------------------------------------------------------------------------- /undeployed/subjects/chunking/README.md: -------------------------------------------------------------------------------- 1 | # Chunking 2 | 3 | This small module is intended to allow more scalable raster operations by making it very simple to split rasters into smaller 4 | slices for processing with less memory consumption. 5 | 6 | The concept is simple, there are "chunk_bundle"s which are comprised of "chunk"s. A "chunk" object simply contains a numpy array and 7 | an index location that represents its position in a larger "chunk_bundle", which in turn posses just an object list of its constituent 8 | chunks, as well as the geospatial metadata associated with the entire image. A "chunk_bundle" posesses all the necessary methods 9 | to load a new image and split it into chunks, as well as aggregate constituent chunks into a raster image to be saved to disk. 10 | 11 | Each of the chunks within a chunk bundle may be accessed with simple indexing. 12 | chunk_bund[i] will return the i'th chunks numpy array. 13 | -------------------------------------------------------------------------------- /dnppy/core/rename.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | from exists import exists 5 | 6 | 7 | def rename(filename, replace_this, with_this): 8 | """ 9 | renames a file 10 | 11 | :param filename: input file to rename 12 | :param replace_this: string to be replaced. such as " " (a space) 13 | :param with_this: what to replace the string with. such as "_" (an underscore) 14 | 15 | :return newfilename: the new name of the file. 16 | """ 17 | 18 | if replace_this in filename: 19 | 20 | # make sure the filename exists 21 | exists(filename) 22 | 23 | # define a new filename 24 | newfilename = filename.replace(replace_this, with_this) 25 | 26 | # rename the file 27 | os.rename(filename, newfilename) 28 | 29 | print("renamed" + filename + "to" + newfilename) 30 | return newfilename 31 | 32 | else: 33 | return filename 34 | -------------------------------------------------------------------------------- /dnppy/download/list_http_waterweather.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | 4 | import urllib 5 | 6 | def list_http_waterweather(site): 7 | """ 8 | Lists contents of http directories at [http://water.weather.gov/precip/p_download_new/] 9 | which hosts MPE data. 10 | 11 | :param site: url to somewhere on the server at http://water.weather.gov/precip/p_download_new/ 12 | 13 | :return file_urls: returns a list of file urls at input site. 14 | """ 15 | 16 | website = urllib.urlopen(site) 17 | string = website.readlines() 18 | 19 | file_urls = [] 20 | for line in string: 21 | try: 22 | name = line.split('"')[7] 23 | if "." in name: 24 | file_urls.append(line.split('"')[7]) 25 | except: 26 | pass 27 | return file_urls 28 | 29 | if __name__ == "__main__": 30 | print list_http_waterweather("http://water.weather.gov/precip/p_download_new/2015/01/01") -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/L7GapFiller_ArcInterface.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: module1 3 | # Purpose: 4 | # 5 | # Author: qgeddes 6 | # 7 | # Created: 25/04/2013 8 | # Copyright: (c) qgeddes 2013 9 | # Licence: 10 | #------------------------------------------------------------------------------- 11 | import L7GapFiller 12 | 13 | 14 | Scenes=arcpy.GetParameterAsText(0) 15 | Scenes=Scenes.split(";") 16 | 17 | OutputFolder=arcpy.GetParameterAsText(1) 18 | OutputFile= arcpy.GetParameterAsText(2) 19 | Output=OutputFolder+"\\"+OutputFile 20 | CloudMasks= arcpy.GetParameterAsText(3) 21 | CloudMasks= CloudMasks.split(";") 22 | Z=arcpy.GetParameter(4) 23 | 24 | arcpy.AddMessage(Z) 25 | arcpy.env.scratchWorkspace=OutputFolder 26 | arcpy.CheckOutExtension("Spatial") 27 | arcpy.env.overwriteOutput=True 28 | 29 | L7GapFiller.L7GapFill(Scenes, Output,CloudMasks,Z) 30 | -------------------------------------------------------------------------------- /dev/test/build_test_env.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from fetch_test_landsat import fetch_test_landsat 4 | from fetch_test_MODIS import fetch_test_MODIS 5 | from fetch_VA_shapefile import fetch_VA_shapefile 6 | from fetch_test_SRTM import fetch_test_SRTM 7 | from fetch_test_precip import fetch_test_precip 8 | 9 | def build_test_env(test_dir): 10 | """ 11 | wraps each of the "fetch" functions for building a common 12 | testing environment for dnppy, shouldn't be relied upon, but 13 | this function is a quick way to just go grab everything. 14 | """ 15 | 16 | print("Building the testing environment!") 17 | print("Data downloads may take several minutes") 18 | 19 | fetch_test_landsat(test_dir) 20 | fetch_VA_shapefile(test_dir) 21 | fetch_test_SRTM(test_dir) 22 | fetch_test_MODIS(test_dir) 23 | fetch_test_precip(test_dir) 24 | 25 | return 26 | 27 | if __name__ == "__main__": 28 | build_test_env(r"C:\Users\jwely\Desktop\dnppytest") -------------------------------------------------------------------------------- /dnppy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The DEVELOP national Program python package (dnppy) is a living codebase for the 3 | NASA DEVELOP program, our project partners, and the GIS programming community! 4 | DEVELOP is under NASA's "Capacity Building" program, and functions to educate its 5 | participants and partners in earth science and remote sensing. 6 | 7 | Please consult the dnppy documentation pages for further information! 8 | """ 9 | 10 | __version__ = "1.15.3b1" 11 | 12 | __author__ = ["Jwely", 13 | "djjensen", 14 | "Syntaf", 15 | "lancewatkins", 16 | "lmakely", 17 | "qgeddes", 18 | "Scott Baron", 19 | ] 20 | 21 | 22 | #from R_dnppy import * 23 | from convert import * 24 | from core import * 25 | from download import * 26 | from landsat import * 27 | from modis import * 28 | from radar import * 29 | from raster import * 30 | from solar import * 31 | from textio import * 32 | from tsa import * 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/DNtoReflectance_ArcInterface.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: module1 3 | # Purpose: 4 | # 5 | # Author: qgeddes 6 | # 7 | # Created: 26/04/2013 8 | # Copyright: (c) qgeddes 2013 9 | # Licence: 10 | #------------------------------------------------------------------------------- 11 | 12 | import DNtoReflectance 13 | arcpy.env.overwriteOutput = True 14 | arcpy.CheckOutExtension("Spatial") 15 | #Variables----------------------------------------------------------------- 16 | Lbands=arcpy.GetParameterAsText(0) 17 | MetaData =arcpy.GetParameterAsText(1) 18 | OutputType=arcpy.GetParameterAsText(2) 19 | OutputFolder=arcpy.GetParameterAsText(3) 20 | #-------------------------------------------------------------------------- 21 | 22 | #Reading Metadata that pertains to all bands 23 | Lbands=Lbands.split(";") 24 | 25 | DNtoReflectance.DNtoReflectance(Lbands,MetaData,OutputType,True,OutputFolder) -------------------------------------------------------------------------------- /undeployed/legacy/authentication.py: -------------------------------------------------------------------------------- 1 | 2 | import urllib 3 | import urllib2 4 | import cookielib 5 | 6 | # target login url 7 | auth_url = r"https://earthexplorer.usgs.gov/login/" 8 | file_url = r"http://earthexplorer.usgs.gov/download/3372/LE70410362003114EDC00/STANDARD" 9 | save_path = r"C:/test.html" 10 | 11 | username = "" 12 | password = "" 13 | 14 | # store cookies and make an oppener to store them 15 | cj = cookielib.CookieJar() 16 | opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) 17 | opener.addheaders = [("user-agent", "Testing")] 18 | 19 | #install opener 20 | urllib2.install_opener(opener) 21 | 22 | # password payload for sending 23 | payload = { 24 | 'op': 'login-main', 25 | 'username': username, 26 | 'password': password 27 | } 28 | 29 | # encode the payload 30 | data = urllib.urlencode(payload) 31 | 32 | # build a request object 33 | req = urllib2.Request(auth_url, data) 34 | resp = urllib2.urlopen(req) 35 | 36 | contents = resp.read() 37 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/time_series/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ====================================================================================== 3 | dnppy.landsat 4 | ====================================================================================== 5 | The "time_series" module is part of the "dnppy" package (develop national program py). 6 | This module houses a time_series class, which has bundled many methods for quick and 7 | easy handling of time_series data, usually on the scale of days to years, but can also 8 | handle hourly, minutely, and 1 second resolution time_data pretty well. 9 | 10 | If you wrote a function you think should be added to this module, or have an idea for one 11 | you wish was available, please email the Geoinformatics Fellow class or code it up yourself! 12 | 13 | """ 14 | 15 | __author__ = ["Jeffry Ely, jeff.ely.08@gmail.com"] 16 | 17 | 18 | # local imports 19 | from .time_series import * 20 | from .csv_io import * 21 | 22 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Extract_By_Mask_Python.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= raw_input("Please enter the name and location of the folder containing the data to be masked: ") 11 | arcpy.env.workspace= r"%s" %folder_path 12 | 13 | #Masking file. 14 | Mask= raw_input("Please enter the name of the masking file: ") 15 | Mask_file= r"%s" %Mask 16 | 17 | #For all the rasters in the file, perform an extract by mask. 18 | for rasters in arcpy.ListRasters(): 19 | #Out name is the Output File name. EBM stands for "Extract By Mask". 20 | Out_Name= "EBM" + rasters[] 21 | print Out_Name 22 | outExtractByMask = ExtractByMask(rasters, Mask_file) 23 | outExtractByMask.save(Out_Name) 24 | -------------------------------------------------------------------------------- /dev/test/fetch_VA_shapefile.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | import urllib 5 | import zipfile 6 | 7 | 8 | def fetch_VA_shapefile(test_dir): 9 | """ 10 | downloads and unzips a shape file of VA for testing purposes 11 | """ 12 | 13 | # set up path names 14 | url = "http://www2.census.gov/geo/tiger/TIGER2013/COUSUB/tl_2013_51_cousub.zip" 15 | subdir = os.path.join(test_dir, "raw", "VA_shapefile") 16 | fname = os.path.join(subdir, "VA_shapefile.zip") 17 | 18 | if not os.path.exists(subdir): 19 | os.makedirs(subdir) 20 | 21 | # download the file 22 | urllib.urlretrieve(url, fname) 23 | 24 | # unzip it 25 | with zipfile.ZipFile(fname, "r") as z: 26 | z.extractall(os.path.join(test_dir, subdir)) 27 | 28 | # delete the zip file 29 | os.remove(fname) 30 | 31 | print("Downloaded Virginia shapefile to {0}".format(os.path.join(test_dir,subdir))) 32 | shapefile_path = os.path.join(test_dir, subdir, "tl_2013_51_cousub.shp") 33 | 34 | return shapefile_path -------------------------------------------------------------------------------- /dnppy/convert/lib/datatype_library.csv: -------------------------------------------------------------------------------- 1 | Unique_Name, projectionID, A, B, C, D, E, F, download_source 2 | GPM_IMERG_0.1_GLOBAL, EPSG-4326, -179.95, 0.0, 0.1, -89.95, 0.1, 0.0, "ftp://jsimpson.pps.eosdis.nasa.gov" 3 | GPM_IMERG_0.15_GLOBAL,EPSG-4326, -179.95, 0.0, 0.15, -89.95, 0.15, 0.0, "ftp://jsimpson.pps.eosdis.nasa.gov" 4 | TRMM_0.25_GLOBAL, EPSG-4326, -179.875, 0.0, 0.25, -49.85, 0.25, 0.0, "ftp://trmmopen.gsfc.nasa.gov/trmmdata/ByDate/V07/" 5 | TRMM_0.5_GLOBAL, EPSG-4326, -179.875, 0.0, 0.5, -49.85, 0.5, 0.0, "ftp://trmmopen.gsfc.nasa.gov/trmmdata/ByDate/V07/" 6 | TRMM_1.0_GLOBAL, EPSG-4326, -179.875, 0.0, 1.0, -49.85, 1.0, 0.0, "ftp://trmmopen.gsfc.nasa.gov/trmmdata/ByDate/V07/" 7 | TRMM_5.0_GLOBAL, EPSG-4326, -179.875, 0.0, 5.0, -49.85, 5.0, 0.0, "ftp://trmmopen.gsfc.nasa.gov/trmmdata/ByDate/V07/" 8 | SMOS_25k_GLOBAL, EPSG-4326, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, "cp34-bec.cmima.csic.es/land-datasets/" 9 | MPE_HRAP_CONUS, SR-ORG-7744, -2767773.750000, 7828.068359, 0.0, 1666070.375000, 0.0, -7824.255859, "http://water.weather.gov/precip/p_download_new" 10 | -------------------------------------------------------------------------------- /dnppy/solar/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The solar module contains just one class definition, with many associated methods to perform a plethora 3 | of calculations based around the sun-earth relationship. It replicates the functionality of the `common 4 | NOAA excel calculator`_, including adjustments due to atmospheric refraction. Each method may be run 5 | independently as the user wishes, as each method will automatically invoke any other calculations that 6 | must be performed first, and it will not unnecessarily redo calculations that have already been completed. 7 | 8 | A solar class object allows scalar or numpy array inputs for latitude and longitude values for per-pixel 9 | computation and improved accuracy when scene-center calculations are insufficient. The solar class is not 10 | yet capable of ingesting digital elevation models to calculate slope and aspect dependent parameters, but 11 | it is a planned addition. 12 | 13 | .. _common NOAA excel calculator: http://www.esrl.noaa.gov/gmd/grad/solcalc/calcdetails.html 14 | """ 15 | from solar import solar 16 | -------------------------------------------------------------------------------- /dev/test/fetch_test_precip.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | 4 | from dnppy import download 5 | from datetime import datetime 6 | 7 | import os 8 | 9 | def fetch_test_precip(test_dir): 10 | """ 11 | fetches both TRMM and GPM test data 12 | """ 13 | 14 | if not os.path.exists(test_dir): 15 | os.makedirs(os.path.join(test_dir, "raw")) 16 | 17 | 18 | print("Downloading GPM IMERG test data!") 19 | gpmdir = os.path.join(test_dir,"raw","GPM") 20 | download.fetch_GPM_IMERG(datetime(2015,4,1), 21 | datetime(2015,4,2), 22 | outdir = gpmdir, 23 | product = "late") 24 | 25 | print("Downloading TRMM 3b42 test data!") 26 | trmmdir = os.path.join(test_dir,"raw","TRMM") 27 | download.fetch_TRMM(datetime(2014,1,1), 28 | datetime(2014,1,2), 29 | outdir = trmmdir, 30 | product_string = "3B42") 31 | 32 | 33 | if __name__ == "__main__": 34 | fetch_test_precip(r"C:\Users\jwely\Desktop\dnppytest") 35 | -------------------------------------------------------------------------------- /undeployed/legacy/line_count.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | 4 | def lines_in_py_package(dirname): 5 | 6 | python_filelist = [] 7 | for r,d,f in os.walk(dirname): 8 | for afile in f: 9 | if afile.endswith(".py"): 10 | python_filelist.append(os.path.join(r, afile)) 11 | 12 | line_total = 0 13 | 14 | for pyfile in python_filelist: 15 | lines = lines_in_file(pyfile) 16 | line_total += lines 17 | print("{0} {1}".format(pyfile.replace(dirname,"").ljust(90), lines)) 18 | 19 | print("-"*98) 20 | print("{0} {1}".format("Total line count".ljust(90), line_total)) 21 | return python_filelist 22 | 23 | 24 | 25 | 26 | def lines_in_file(fname): 27 | """ returns number of lines for input filename """ 28 | 29 | c = 0 30 | with open(fname) as f: 31 | for l in f: 32 | if len(l.replace(" ","")) > 3: 33 | c += 1 34 | return c 35 | 36 | 37 | 38 | if __name__ == "__main__": 39 | files = lines_in_py_package(r"C:\Users\Jeff\Desktop\Github\dnppy") 40 | 41 | 42 | -------------------------------------------------------------------------------- /undeployed/subjects/chunking/chunk.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | 4 | import numpy 5 | 6 | class chunk(): 7 | """ 8 | creates an individual chunk object (vertical slice of a 2 or 3d raster image). 9 | 10 | Individual chunks do NOT store metadata! Any metadata should be managed 11 | at the chunk_bundle level. Object chunk_bundles are made of chunk lists, 12 | ordered by index number. 13 | """ 14 | 15 | def __init__(self, index, data = None): 16 | """ initializes a chunk object. """ 17 | 18 | self.index = index # integer value denoting position within chunk_bundle 19 | self.data = data # a numpy masked_array type 20 | 21 | return 22 | 23 | def __getitem__(self): 24 | return self.data 25 | 26 | 27 | def __setitem__(self, item): 28 | 29 | con1 = isinstance(item, numpy.ma.core.MaskedArray) 30 | con2 = isinstance(item, numpy.ndarray) 31 | 32 | if con1 or con2: 33 | self.data = item 34 | 35 | if hasattr(item, "index"): 36 | self.index = item.index 37 | 38 | return 39 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/LTKCloudDetector_ArcInterface.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: module1 3 | # Purpose: 4 | # 5 | # Author: qgeddes 6 | # 7 | # Created: 26/04/2013 8 | # Copyright: (c) qgeddes 2013 9 | # Licence: 10 | #------------------------------------------------------------------------------- 11 | 12 | import LTKCloudDetector 13 | 14 | Band1path=arcpy.GetParameterAsText(0) 15 | Band3path=arcpy.GetParameterAsText(1) 16 | Band4path=arcpy.GetParameterAsText(2) 17 | Band5path=arcpy.GetParameterAsText(3) 18 | 19 | pixelvalue=arcpy.GetParameterAsText(4) 20 | MetaData =arcpy.GetParameterAsText(5) 21 | OutputFolder =arcpy.GetParameterAsText(6) 22 | OutputFileName =arcpy.GetParameterAsText(7) 23 | 24 | #Tool input is opposite of function input 25 | SaveRefl = arcpy.GetParameter(8) 26 | 27 | ReflOutFolder=arcpy.GetParameterAsText(9) 28 | 29 | L7bands=[Band1path,Band3path,Band4path,Band5path] 30 | 31 | arcpy.env.scratchWorkspace=OutputFolder 32 | 33 | LTKCloudDetector.LTKCloudDetector(L7bands,pixelvalue,OutputFolder+'\\'+OutputFileName,MetaData,SaveRefl,ReflOutFolder) -------------------------------------------------------------------------------- /dnppy/download/datatype.py: -------------------------------------------------------------------------------- 1 | __author__ = ['jwely'] 2 | 3 | __all__ = ["datatype"] 4 | 5 | class datatype(): 6 | """ 7 | simple class for dnppy supported download and convert 8 | NASA/NOAA/WeatherService/USGS data types 9 | """ 10 | 11 | def __init__(self, name, projectionID = None, geotransform = None): 12 | """ 13 | Inputs: 14 | name (str) the product name, (descriptive) 15 | projectionID (str) projection ID according to prj files 16 | downloaded from "spatialreference.org" 17 | geotransform (list floats) geotransform array, lsit of 6 18 | float values in the gdal ordering: 19 | [top left x, 20 | w-e pixel resolution, 21 | 0, 22 | top left y, 23 | 0, 24 | n-s pixel resolution (negative value)] 25 | """ 26 | 27 | self.name = name 28 | self.projectionID = projectionID 29 | self.geotransform = geotransform 30 | 31 | -------------------------------------------------------------------------------- /dnppy/raster/null_define.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["null_define"] 3 | 4 | import arcpy 5 | from enf_rastlist import enf_rastlist 6 | 7 | def null_define(rastlist, NoData_Value): 8 | """ 9 | Simple batch NoData setting function. Makes raster data more arcmap viewing friendly 10 | 11 | Function inputs a list of raster (usually tifs) files and sets no data values. This 12 | function does not actually change the raster values in any way, and simply defines which 13 | numerical values to be considered NoData in metadata. 14 | 15 | :param rastlist: list of rasters for which to set nodata value 16 | :param NoData_Value: Value to declare as NoData (usually 0 or -9999) 17 | 18 | :return rastlist: returns list of modified files 19 | """ 20 | 21 | rastlist = enf_rastlist(rastlist) 22 | 23 | # iterate through each file in the filelist and set nodata values 24 | for rastname in rastlist: 25 | 26 | arcpy.SetRasterProperties_management(rastname,data_type="#",statistics="#", 27 | stats_file="#",nodata="1 "+str(NoData_Value)) 28 | 29 | print("Set nulls in {0}".format(rastname)) 30 | return rastlist -------------------------------------------------------------------------------- /dnppy/textio/test_data/conf_test.json: -------------------------------------------------------------------------------- 1 | [ 2 | [ 3 | "param_name", 4 | "param_type", 5 | "param_value" 6 | ], 7 | [ 8 | "test long title for just one single str", 9 | "", 10 | "some test string" 11 | ], 12 | [ 13 | "test_bool", 14 | "", 15 | true 16 | ], 17 | [ 18 | "test_float", 19 | "", 20 | 1.12345 21 | ], 22 | [ 23 | "test_int", 24 | "", 25 | 1 26 | ], 27 | [ 28 | "test_long", 29 | "", 30 | 1000000000000000000 31 | ], 32 | [ 33 | "test_list", 34 | "", 35 | [ 36 | "a", 37 | "b", 38 | 75, 39 | "d" 40 | ] 41 | ], 42 | [ 43 | "test_dict", 44 | "", 45 | { 46 | "string_key": 0, 47 | "1": "string_value" 48 | } 49 | ], 50 | [ 51 | "test_tuple", 52 | "", 53 | [ 54 | 1, 55 | 2 56 | ] 57 | ] 58 | ] -------------------------------------------------------------------------------- /undeployed/README.md: -------------------------------------------------------------------------------- 1 | This folder holds code that is packaged with dnppy for good record keeping, but is NOT installed with the module. The code in this directory should be distinct from dnppy, but cannot be treated as such due to limitations imposed by NASA open source software release protocols. There are a few different categories that code like this falls within, and it is organized as such. 2 | 3 | ####/proj_code 4 | NASA DEVELOP project teams contribute specialized code to this directory for access by project partners. Project code is evaluated for its generality and moved up to the subjects folder, then into dnppy if some generalization is possible. Code in this directory may have a very narrow focus on the needs of the project partners and lack scalability, but may have utility for specific users or have syntactical reference value. Code in this directory may be in any programming language. 5 | 6 | ####/subjects 7 | This repository is filled with unstructured code for a general purpose that could be useful to refactor and add to the dnppy framework in the near future. 8 | 9 | ####/legacy 10 | This is old python code from before our days of configuration management that might have very useful snippets to pull into dnppy. 11 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/Precip_180DaySum.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gc 3 | import arcpy 4 | from arcpy import env 5 | from arcpy import sa 6 | from arcpy.sa import * 7 | env.overwriteOutput = True 8 | gc.disable() 9 | arcpy.CheckOutExtension("Spatial") 10 | env.workspace = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script" ## WORKSPACE 11 | interp_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip" ## Folder containing interpolated precip rasters 12 | Rolling_sum_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip/Sum_precip" ##Folder to save precip sum rasters 13 | 14 | 15 | 16 | 17 | env.workspace = interp_F 18 | sumlist = arcpy.ListRasters("*int.tif") 19 | print sumlist 20 | for octad in range(len(sumlist)/8): 21 | firstraster = sumlist[octad*8] 22 | if octad*8 > 180: 23 | last180days = sumlist[(octad*8)-179:(octad*8)+1] 24 | cellstats = CellStatistics(last180days, "SUM", "DATA") 25 | cellstats.save("C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\Test_Script\\Sum_Precip" + "\\" + str(firstraster[0:-7]) + "_180Day_Sum.tif") 26 | print firstraster + "180 Day Summed" 27 | print "FIN" 28 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/Precip_RollingSum.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gc 3 | import arcpy 4 | from arcpy import env 5 | from arcpy import sa 6 | from arcpy.sa import * 7 | env.overwriteOutput = True 8 | gc.disable() 9 | arcpy.CheckOutExtension("Spatial") 10 | env.workspace = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script" ## WORKSPACE 11 | interp_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip" ## Folder containing interpolated precip rasters 12 | Rolling_sum_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip/Sum_precip" ##Folder to save precip sum rasters 13 | 14 | 15 | 16 | 17 | env.workspace = interp_F 18 | sumlist = arcpy.ListRasters("*int.tif") 19 | print sumlist 20 | for octad in range(len(sumlist)/8): 21 | firstraster = sumlist[octad*8] 22 | if octad*8 > 180: 23 | last180days = sumlist[(octad*8)-179:(octad*8)+1] 24 | cellstats = CellStatistics(last180days, "SUM", "DATA") 25 | cellstats.save("C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\Test_Script\\Sum_Precip" + "\\" + str(firstraster[0:-7]) + "_180Day_Sum.tif") 26 | print firstraster + "180 Day Summed" 27 | print "FIN" 28 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/L8_surf_temp.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | quick landsat 8 surface temperature code, doesnt work yet 4 | """ 5 | 6 | 7 | def surface_temp(): 8 | """ 9 | calculates the surface temperature with a modified plank equation 10 | following Markham and Barker. 11 | 12 | K2 13 | Ts = ---------------------- 14 | ln((enb * K1 / Rc) +1) 15 | 16 | where 17 | K1 = constant 1, landsat specific 18 | K2 = constant 2, landsat specific 19 | Rc = corrected thermal radiance 20 | enb = narrow band emissivity for thermal sensor wavelength band 21 | 22 | and 23 | 24 | Lt6 - Rp (tnb) 25 | Rc = -------------- - (1 - enb) * Rsky 26 | tnb 27 | 28 | where 29 | Lt6 = spectral radience of landsat band 6 30 | Rp = path radiance in the 10.4 to 12.5 um band 31 | Rsky= narrow band downward thermal radiation from clear sky 32 | tnb = narrow band transmissivity of air (10.4 to 12.5 um range) 33 | """ 34 | 35 | correct_rad = ((therm_rad - path_rad) / nbt) - ((1-nbe) * sky_rad) 36 | 37 | surface_temp = (K2 / (math.ln(((nbe * K1) / correct_rad) + 1 ))) 38 | return 39 | -------------------------------------------------------------------------------- /undeployed/legacy/sort.py: -------------------------------------------------------------------------------- 1 | import os, shutil 2 | from dnppy import core 3 | 4 | def sort(filelist, delim = "_", recursive = False): 5 | """ 6 | Simple function to sort files into folders based on common leading 7 | strings in the filenames 8 | 9 | use a custom delimiter by setting "delim", and set "recursive" to True 10 | if you need to do many directories deep and do not mind renaming files 11 | """ 12 | 13 | for filename in filelist: 14 | head,tail = os.path.split(filename) 15 | 16 | tail_list = tail.split(delim) 17 | move_dir = os.path.join(head, tail_list[0]) 18 | 19 | if not os.path.exists(move_dir): 20 | os.makedirs(move_dir) 21 | 22 | if recursive: 23 | shutil.move(filename, os.path.join(move_dir, "_".join(tail_list[1:]))) 24 | else: 25 | shutil.move(filename, os.path.join(move_dir, "_".join(tail_list))) 26 | print("Moved file '{0}' ".format(filename)) 27 | 28 | print("Moved all files!") 29 | return 30 | 31 | 32 | if __name__ == "__main__": 33 | 34 | filelist = core.list_files(False, r"C:\Users\jwely\Desktop\troubleshooting\lauren_organize") 35 | sort(filelist) 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/Drought_Severity.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gc 3 | import arcpy 4 | from arcpy import env 5 | from arcpy.sa import * 6 | env.overwriteOutput = True 7 | gc.disable() 8 | arcpy.CheckOutExtension("Spatial") 9 | env.workspace = "GENERAL WORKSPACE" ##SET AS PARAMETER 10 | 11 | #Sum rasters 12 | #change projection 13 | #add color ramp 14 | 15 | i = 0 16 | j = 1 17 | k = 2 18 | dsilist = arcpy.ListRasters("*scaled.tif"): 19 | for rasters in dsilist: 20 | while i < len(dsilist) or j < len(dsilist) or k < len(dsilist): 21 | inRast1 = dsilist[i] 22 | inRast2 = dsilist[j] 23 | inRast3 = dsilist[k] 24 | LST = Times(0.25,inRast1) 25 | NDVI = Times(0.25,inRast2) 26 | Precip = Times(0.5,inRast3) 27 | cellstats = CellStatistics([LST,NDVI,Precip],"SUM", "NODATA") 28 | cellstats.save(inRast1[0:-11]+ "dsi.tif") 29 | print "Cell Stats:" + inRast1[0:-11] + "Complete" 30 | i = i + 3 31 | j = j + 3 32 | k = k + 3 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /docs/source/modules/radar.rst: -------------------------------------------------------------------------------- 1 | radar 2 | ===== 3 | 4 | .. automodule:: dnppy.radar 5 | :members: 6 | 7 | Examples 8 | -------- 9 | 10 | .. rubric:: Building a header file for UAVSAR data 11 | 12 | UAVSAR data comes in .grd files that require a text header file with metadata information in order to be displayed in GIS or image processing software. This simple script will read in a folder containing the data and create a header file for each .grd file in it. 13 | 14 | .. code-block:: python 15 | 16 | from dnppy import radar 17 | 18 | folder = r"C:\folder" 19 | radar.create_header(folder) 20 | 21 | .. rubric:: Converting backscatter values to decibels 22 | 23 | Raw radar data has units of backscatter for its given polarization, but it is often more useful to convert these units to decibels (dB). This script will create a new tiff raster in decibel units from the original .grd file (which must also include the .hdr file). 24 | 25 | .. code-block:: python 26 | 27 | from dnppy import radar 28 | 29 | filename = r"C:\folder\file.grd" 30 | radar.decibel_convert(filename) 31 | 32 | Code Help 33 | --------- 34 | 35 | .. automodule:: dnppy.radar.create_header 36 | :members: 37 | 38 | .. automodule:: dnppy.radar.decibel_convert 39 | :members: 40 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/Scaled_Precip.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gc 3 | import arcpy 4 | from arcpy import env 5 | from arcpy import sa 6 | from arcpy.sa import * 7 | env.overwriteOutput = True 8 | gc.disable() 9 | arcpy.CheckOutExtension("Spatial") 10 | env.workspace = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script" ## WORKSPACE 11 | Rolling_sum_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip/Sum_precip" ##Folder containing precip sum rasters 12 | 13 | 14 | 15 | env.workspace = Rolling_sum_F 16 | scalelist = arcpy.ListRasters() 17 | print scalelist 18 | for files in scalelist: 19 | minRast = arcpy.GetRasterProperties_management(tif, "MINIMUM") 20 | minvalue = (minRast.getOutput(0)) 21 | maxRast = arcpy.GetRasterProperties_management(tif, "MAXIMUM") 22 | maxvalue = (maxRast.getOutput(0)) 23 | print maxvalue + "_max" 24 | print minvalue + "_min" 25 | num = Minus(tif, float(minvalue)) 26 | denom = float(maxvalue) - float(minvalue) 27 | print str(denom) 28 | scaledLST = arcpy.sa.Divide(num,denom) 29 | scaledLST.save("C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\180day_script\\Raw_precip\\Inter_precip\\Sum_precip\\Scale_sum" + "\\" + files[0:-7] + "precip_scaled.tif") 30 | print "FIN" 31 | -------------------------------------------------------------------------------- /dnppy/raster/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The raster module is used to analyze raster data from non-specific sources. In an idealized 3 | workflow, use of the convert and download modules can help users obtain and pre-process data 4 | for use with functions in the raster module. The modules capabilities include statistics 5 | generation, subsetting, reprojecting, null data management, correction functions, and others. 6 | Top level functions in the raster module should all have batch processing capabilities bult in. 7 | 8 | Requires ``arcpy`` 9 | """ 10 | 11 | __author__ = ["Jwely", 12 | "lmakely"] 13 | 14 | 15 | from apply_linear_correction import * 16 | from clip_and_snap import * 17 | from clip_to_shape import * 18 | from degree_days import * 19 | from degree_days_accum import * 20 | from enf_rastlist import * 21 | from from_numpy import * 22 | from gap_fill_temporal import * 23 | from gap_fill_interpolate import * 24 | from in_dir import * 25 | from is_rast import * 26 | from many_stats import * 27 | from metadata import * 28 | from new_mosaic import * 29 | from null_define import * 30 | from null_set_range import * 31 | from project_resample import * 32 | from raster_fig import * 33 | from raster_overlap import * 34 | from spatially_match import * 35 | from to_numpy import * 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /undeployed/legacy/Misc/OceanColorGeoreference.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: module1 3 | # Purpose: 4 | # 5 | # Author: qgeddes 6 | # 7 | # Created: 11/06/2013 8 | # Copyright: (c) qgeddes 2013 9 | # Licence: 10 | #------------------------------------------------------------------------------- 11 | 12 | import arcpy 13 | import numpy 14 | 15 | InputRaster= arcpy.GetParameterAsText(0) 16 | OutputFolder= arcpy.GetParameterAsText(1) 17 | OutputName= arcpy.GetParameterAsText(2) 18 | #--------------------------------------- 19 | #INPUTS 20 | #InputRaster="C:\\Users\\qgeddes\\Downloads\\temptrash\\A2011008.L3m_DAY_CHL_chlor_a_4km.hdf" 21 | #OutputRaster="C:\\Users\\qgeddes\\Downloads\\temptrash\\test7.tif" 22 | #------------------------------------------------ 23 | 24 | 25 | OutputRaster=OutputFolder+"\\"+OutputName 26 | 27 | proj="""GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]""" 28 | pnt= arcpy.Point(-180,-90) 29 | 30 | a=arcpy.RasterToNumPyArray(InputRaster) 31 | xsize=360./a.shape[1] 32 | ysize=180./a.shape[0] 33 | newRaster=arcpy.NumPyArrayToRaster(a,pnt,xsize,ysize) 34 | 35 | 36 | newRaster.save(OutputRaster) 37 | 38 | arcpy.DefineProjection_management(OutputRaster,proj) 39 | 40 | -------------------------------------------------------------------------------- /dnppy/core/install_from_wheel.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import pip 4 | import sys 5 | import platform 6 | import os 7 | 8 | __all__ = ["install_from_wheel"] 9 | 10 | 11 | def install_from_wheel(whl_filepath): 12 | """ 13 | This script can be used to add python libraries to the arcmap 14 | installation of python from wheel files. A great source of wheel files 15 | can be found at [http://www.lfd.uci.edu/~gohlke/pythonlibs/] 16 | 17 | :param whl_filepath: the full local filepath to a downloaded wheel file. 18 | """ 19 | 20 | head, tail = os.path.split(whl_filepath) 21 | 22 | py_architecture = platform.architecture()[0] 23 | py_version = "".join(map(str,sys.version_info[0:2])) 24 | 25 | # make sure the wheel file appears to be the correct version 26 | if py_architecture in tail and py_version in tail: 27 | pip.main(["install", whl_filepath]) 28 | else: 29 | if py_architecture == "64bit": 30 | ending = "_amd64.whl" 31 | else: 32 | ending = "32.whl" 33 | 34 | filesuffix = "-cp{0}-none-win{1}".format(py_version, ending) 35 | 36 | raise Exception('''Bad wheel file for your version and architecture! 37 | the wheel file you want should end with "{0}"'''.format(filesuffix)) 38 | return 39 | 40 | 41 | if __name__ == "__main__": 42 | install_from_wheel("bad_test") 43 | 44 | -------------------------------------------------------------------------------- /dnppy/modis/define_projection.py: -------------------------------------------------------------------------------- 1 | # local imports 2 | from dnppy import raster 3 | 4 | # arcpy imports 5 | import arcpy 6 | if arcpy.CheckExtension('Spatial')=='Available': 7 | arcpy.CheckOutExtension('Spatial') 8 | arcpy.env.overwriteOutput = True 9 | 10 | 11 | def define_projection(file_list): 12 | """ 13 | Give raster(s) proper MODIS sinusoidal projection metadata. 14 | Some MODIS data does not have an adequately defined projection 15 | for some software like arcmap to use 16 | 17 | :param file_list: a list of one or more filepaths 18 | """ 19 | 20 | # accept list of file_list 21 | file_list = raster.enf_rastlist(file_list) 22 | 23 | # custom text for MODIS sinusoidal projection 24 | proj = """PROJCS["Sinusoidal", 25 | GEOGCS["GCS_Undefined", 26 | DATUM["D_Undefined", 27 | SPHEROID["User_Defined_Spheroid",6371007.181,0.0]], 28 | PRIMEM["Greenwich",0.0], 29 | UNIT["Degree",0.017453292519943295]], 30 | PROJECTION["Sinusoidal"], 31 | PARAMETER["False_Easting",0.0], 32 | PARAMETER["False_Northing",0.0], 33 | PARAMETER["Central_Meridian",0.0], 34 | UNIT["Meter",1.0]]""" 35 | 36 | for filename in file_list: 37 | arcpy.DefineProjection_management(filename, proj) 38 | 39 | return 40 | -------------------------------------------------------------------------------- /dnppy/convert/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | dnppy convert contains functions for conversion between filetypes. Usually between 3 | a file type distributed by a NASA Distributed Active Archive Center (DAAC) such as 4 | NetCDF or HDF5 to geotiff. Due to differences in metadata standards, many of these 5 | functions only operate successfully data from a specific source. 6 | 7 | Requires ``arcpy`` 8 | """ 9 | 10 | __author__ = ["Jwely", 11 | "djjensen"] 12 | 13 | 14 | from datatype_library import * 15 | from extract_archive import * 16 | from extract_GCMO_NetCDF import * 17 | from extract_GPM_IMERG import * 18 | from extract_GRACE_DA_binary import * 19 | from extract_MPE_NetCDF import * 20 | from extract_SMOS_NetCDF import * 21 | from extract_TRMM_HDF import * 22 | from extract_TRMM_NetCDF import * 23 | from HDF5_to_numpy import * 24 | from ll_to_utm import * 25 | from nongrid_data import * 26 | 27 | # special members 28 | from _convert_dtype import * 29 | from _extract_HDF_datatype import * 30 | from _extract_HDF_layer_data import * 31 | from _extract_NetCDF_datatype import * 32 | from _extract_NetCDF_layer_data import * 33 | from _gdal_dataset_to_tif import * 34 | 35 | # file type declarations 36 | HDF_EXTENSIONS = ["hdf", "h4", "hdf4", "he2", "h5", "hdf5", "he5", "rt-h5", 37 | "HDF", "H4", "HDF4", "HE2", "H5", "HDF5", "HE5", "RT-H5"] 38 | 39 | NETCDF_EXTENSIONS = ["nc", "cdf", 40 | "NC", "CDF"] 41 | 42 | -------------------------------------------------------------------------------- /undeployed/subjects/R_Code/Netcdf_to_Raster_Script.R: -------------------------------------------------------------------------------- 1 | library(chron) 2 | library(RColorBrewer) 3 | library(lattice) 4 | library(ncdf) 5 | library(ncdf4) 6 | 7 | getwd() 8 | workdir <- "C:/Users/jessica.sutton/Documents/DATA/Precipitation/1984_2014/1984" 9 | setwd(workdir) 10 | 11 | ncname <- "PERSIANN-CDR_v01r01_19840101_c20140523" 12 | ncfname <- paste(ncname, ".nc", sep = "") 13 | 14 | # open a NetCDF file 15 | ncin <- nc_open(ncfname) ### open the netcdf 16 | ### look at the metadata to determine the extent, cell size, resolution, etc. 17 | data <- ncvar_get( ncin, 'precipitation') ## get the precipitation layer out of the netcdf 18 | 19 | 20 | library(raster) 21 | 22 | # create a new (not projected) RasterLayer with cellnumbers as values 23 | e <- extent(0, 360, -60,60) ### define the extent of the data <- this is the original extent, not the cropped version 24 | r <- raster(data) ### change the precipitation layer into a raster 25 | 26 | r4 <- setExtent(r, e) ### set the extent of the raster 27 | print(r4) ### check to make sure the extent, number of cells, etc. is all correct 28 | final <- crop(r4, extent(r4,81,320,501,860))### crop the raster for the location you want <-- based on the cell locations not lat and long 29 | 30 | 31 | projection(final) <- "+proj=utm +datum=WGS84" ### assign the raster a projection 32 | 33 | 34 | writeRaster(final,"C:/Users/jessica.sutton/Documents/DATA/test.tif",overwrite=TRUE) ### save the raster as a tif 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /dnppy/convert/extract_TRMM_NetCDF.py: -------------------------------------------------------------------------------- 1 | 2 | # dnppy imports 3 | from dnppy import core 4 | 5 | # arcpy imports 6 | import arcpy 7 | if arcpy.CheckExtension('Spatial')=='Available': 8 | arcpy.CheckOutExtension('Spatial') 9 | arcpy.env.overwriteOutput = True 10 | 11 | 12 | def TRMM_NetCDF(filelist, outdir): 13 | """ 14 | Function converts NetCDFs to tiffs. Designed to work with TRMM data 15 | downloaded from GLOVIS 16 | 17 | :param filelist: list of '.nc' files to convert to tifs. 18 | :param outdir: directory to which tif files should be saved 19 | 20 | :return output_filelist: list of local filepaths of extracted data. 21 | """ 22 | 23 | # Set up initial parameters. 24 | arcpy.env.workspace = outdir 25 | filelist = core.enf_list(filelist) 26 | output_filelist = [] 27 | 28 | # convert every file in the list "filelist" 29 | for infile in filelist: 30 | 31 | # use arcpy module to make raster layer from netcdf 32 | arcpy.MakeNetCDFRasterLayer_md(infile, "r", "longitude", "latitude", "r", "", "", "BY_VALUE") 33 | arcpy.CopyRaster_management("r", infile[:-3] + ".tif", "", "", "", "NONE", "NONE", "") 34 | output_filelist.append(infile[:-3] + ".tif") 35 | print('Converted netCDF file ' + infile + ' to Raster') 36 | 37 | return output_filelist 38 | 39 | if __name__ == "__main__": 40 | afile = "" 41 | outdir = "" 42 | TRMM_NetCDF(afile, outdir) 43 | -------------------------------------------------------------------------------- /docs/source/trub/git.rst: -------------------------------------------------------------------------------- 1 | GitHub Basics 2 | ============= 3 | 4 | Git is one of several "version control systems", and the industry standard for open source software. GitHub is the specific implementation of git where a complete copy of a repository is kept online for easy remote access. It is both a technology and a workflow that allows many people to contribute to a single set of code easily, with the reason behind every change made available for posterity. 5 | 6 | .. rubric:: Introductory Videos 7 | 8 | Below is a set of a few short videos that introduce the concept of Git at the most basic level. 9 | 10 | * `Version Control`_ 11 | * `What is Git?`_ 12 | * `Get going with Git`_ 13 | * `Quick wins with git`_ 14 | 15 | .. rubric:: Getting started with GitHub Desktop 16 | 17 | GitHub is a specific implementation of Git that has an abundance of user documentation and help out on the web. GitHub has a nice `desktop client`_ to acquaint new users with the workflow, with quick access to a console for command line interface. Consult the `GitHub Desktop User Guides`_ for a walk-through type lesson. 18 | 19 | .. _Version Control: https://www.youtube.com/watch?v=8oRjP8yj2Wo 20 | .. _What is Git?: https://www.youtube.com/watch?v=uhtzxPU7Bz0 21 | .. _Get going with Git: https://www.youtube.com/watch?v=wmnSyrRBKTw 22 | .. _Quick wins with git: https://www.youtube.com/watch?v=wmnSyrRBKTw 23 | .. _desktop client: https://desktop.github.com/ 24 | .. _GitHub Desktop User Guides: https://desktop.github.com/ -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_DBF_to_CSV.py: -------------------------------------------------------------------------------- 1 | #--------------------------------------------- 2 | #Name: DBF to CSV Conversion 3 | #Purpose: Convert DBF files into CSV's to enable merging and other processes 4 | #Notes: Utilizes dbfpy module, download here - http://dbfpy.sourceforge.net/ 5 | Fill in text where denoted by all Caps 6 | #Created: 02/24/2014 7 | #--------------------------------------------- 8 | import csv, arcpy, sys, os 9 | from dbfpy import dbf 10 | from arcpy import env 11 | from arcpy.sa import * 12 | 13 | ############################################### 14 | #Input user parameters 15 | arcpy.env.workspace = "INSERT PATH TO DBF FOLDER" 16 | path = "INSERT PATH TO DBF FOLDER" 17 | ############################################### 18 | 19 | arcpy.env.overwriteOutput = True 20 | 21 | DBFfiles = arcpy.ListFiles("*.dbf") 22 | 23 | for filename in DBFfiles: 24 | print ("Converting DBF file: " + filename + " to CSV") 25 | inDBFfiles = arcpy.env.workspace + "/" + filename 26 | fileroot = filename[0:(len(filename)-4)] 27 | csv_fn = fileroot + ".csv" 28 | with open(csv_fn,'wb') as csvfile: 29 | in_db = dbf.Dbf(inDBFfiles) 30 | out_csv = csv.writer(csvfile) 31 | names = [] 32 | for field in in_db.header.fields: 33 | names.append(field.name) 34 | out_csv.writerow(names) 35 | for rec in in_db: 36 | out_csv.writerow(rec.fieldData) 37 | in_db.close() 38 | 39 | print "DONE" 40 | -------------------------------------------------------------------------------- /undeployed/subjects/R_Code/Loop_Netcdf_to_Raster_Script.R: -------------------------------------------------------------------------------- 1 | library(chron) 2 | library(RColorBrewer) 3 | library(lattice) 4 | library(ncdf) 5 | library(ncdf4) 6 | library(raster) 7 | 8 | ## For multiple files 9 | ## Input directory 10 | 11 | files <- list.files(path="C:/Users/jessica.sutton/Documents/DATA/test_data/", pattern="*.nc", full.names=T, recursive=T)### recursive = T <-- loops through all subfolders 12 | 13 | ## Output directory 14 | dir.output <- 'C:/Users/jessica.sutton/Documents/DATA/Rasters/FullRasters/test/' ### change as needed to give output location 15 | 16 | ## For simplicity, I use "i" as the file name, you could change any name you want, "substr" is a good function to do this. 17 | for (i in 1:length(files)) { 18 | a <- nc_open(files[i]) # load file 19 | p <- ncvar_get(nc=a,'precipitation') ### select the parameter you want to make into raster files 20 | e <- extent(0,360,-60,60) ### choose the extent based on the netcdf file info 21 | r <- raster(p) 22 | re <- setExtent(r,e) ### set the extent to the raster 23 | projection(re) <- "+proj=longlat +datum=WGS84 +ellps=WGS84 +towgs84=0,0,0" 24 | 25 | writeRaster(re, paste(dir.output, basename(files[i]), sep = ''), format = 'GTiff', overwrite = T) ###the basename allows the file to be named the same as the original 26 | } 27 | 28 | ## END 29 | 30 | gc() 31 | 32 | 33 | final <- crop(re, extent(re,81,320,501,860)) ### crop down to the specific area you want ### add this to the above script if you want a cropped/clipped version -------------------------------------------------------------------------------- /dnppy/tsa/test_data/un_ordered_example.txt: -------------------------------------------------------------------------------- 1 | USAF WBAN YR--MODAHRMN DIR SPD GUS CLG SKC L M H VSB MW MW MW MW AW AW AW AW W TEMP DEWP SLP ALT STP MAX MIN PCP01 PCP06 PCP24 PCPXX SD 2 | 723095 93719 201307010044 170 10 *** 22 *** * * * 10.0 ** ** ** ** ** ** ** ** * 81 75 ****** 29.90 1011.7 *** *** ***** ***** ***** ***** ** 3 | 723095 93719 201307010054 190 10 *** 20 *** * * * 10.0 ** ** ** ** ** ** ** ** * 80 75 1012.8 29.91 1012.0 *** *** 0.00 ***** ***** ***** ** 4 | 723095 93719 201307010554 170 10 16 15 *** * * * 10.0 ** ** ** ** ** ** ** ** * 80 75 1013.2 29.92 1012.4 80 78 0.00 ***** ***** ***** ** 5 | 723095 93719 201307311754 140 7 *** 722 CLR * * * 10.0 ** ** ** ** ** ** ** ** * 86 67 1020.2 30.13 1019.5 86 76 0.00 ***** ***** ***** ** 6 | 723095 93719 201307230154 200 7 *** 722 CLR * * * 10.0 ** ** ** ** ** ** ** ** * 80 76 1012.0 29.89 1011.4 *** *** 0.00 ***** ***** ***** ** 7 | 723095 93719 201307242354 200 7 *** 722 CLR * * * 10.0 ** ** ** ** ** ** ** ** * 83 75 1008.7 29.79 1008.0 94 83 0.00 ***** ***** ***** ** 8 | 723095 93719 201307010454 170 9 *** 15 *** * * * 10.0 ** ** ** ** ** ** ** ** * 80 75 1012.9 29.91 1012.0 84 74 0.00 ***** ***** ***** ** 9 | 723095 93719 201307260154 010 8 *** 722 CLR * * * 10.0 ** ** ** ** ** ** ** ** * 77 69 1015.5 29.99 1014.7 *** *** 0.00 ***** ***** ***** ** 10 | 723095 93719 201307180354 *** 0 *** 722 CLR * * * 8.0 ** ** ** ** ** ** ** ** * 79 75 1021.0 30.15 1020.1 *** *** 0.00 ***** ***** ***** ** -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Zonal_Statistics_as_Table.py: -------------------------------------------------------------------------------- 1 | #--------------------------------- 2 | #Name: Zonal Statistics as Table 3 | #Purpose: Summarizes the values of a raster within the zones of another dataset and reports the results to a table 4 | #Notes: FieldName should be the field within the attribute table that defines the entire zone 5 | #Created: 02/24/2014 6 | #--------------------------------- 7 | 8 | import arcpy, arcgisscripting, sys, os, csv, string 9 | from arcpy import env 10 | from arcpy.sa import * 11 | 12 | ################################## 13 | #Input user parameters 14 | arcpy.env.workspace = r"INPUT PATH TO TIFF FOLDER HERE" 15 | OutputFolder = r"INPUT PATH TO OUTPUT DBF FOLDER HERE" 16 | ShapefileMask = r"INPUT SHAPEFILE NAME HERE (NOT FILE PATH, JUST NAME WITH '.SHP' ATTACHED" 17 | FieldName = "SEE NOTES" 18 | ################################### 19 | 20 | arcpy.env.overwriteOutput = True 21 | 22 | # Loops through a list of files in the workspace 23 | TIFfiles = arcpy.ListFiles("*.tif") 24 | 25 | # Performs Zonal Statistics 26 | for filename in TIFfiles: 27 | fileroot = filename[0:(len(filename)-3)] 28 | print ("Calculating zonal statistics on: " + filename + " using " + ShapefileMask) 29 | inValueRaster = arcpy.env.workspace + "/" + filename 30 | outTable = OutputFolder + "/" + fileroot 31 | 32 | arcpy.CheckOutExtension("Spatial") 33 | 34 | outZstat = ZonalStatisticsAsTable(ShapefileMask, FieldName, inValueRaster, outTable, "NODATA", "ALL") 35 | 36 | print "Done!" 37 | 38 | -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Zonal_Statistics_as_Table (1).py: -------------------------------------------------------------------------------- 1 | #--------------------------------- 2 | #Name: Zonal Statistics as Table 3 | #Purpose: Summarizes the values of a raster within the zones of another dataset and reports the results to a table 4 | #Notes: FieldName should be the field within the attribute table that defines the entire zone 5 | #Created: 02/24/2014 6 | #--------------------------------- 7 | 8 | import arcpy, arcgisscripting, sys, os, csv, string 9 | from arcpy import env 10 | from arcpy.sa import * 11 | 12 | ################################## 13 | #Input user parameters 14 | arcpy.env.workspace = r"INPUT PATH TO TIFF FOLDER HERE" 15 | OutputFolder = r"INPUT PATH TO OUTPUT DBF FOLDER HERE" 16 | ShapefileMask = r"INPUT SHAPEFILE NAME HERE (NOT FILE PATH, JUST NAME WITH '.SHP' ATTACHED" 17 | FieldName = "SEE NOTES" 18 | ################################### 19 | 20 | arcpy.env.overwriteOutput = True 21 | 22 | # Loops through a list of files in the workspace 23 | TIFfiles = arcpy.ListFiles("*.tif") 24 | 25 | # Performs Zonal Statistics 26 | for filename in TIFfiles: 27 | fileroot = filename[0:(len(filename)-3)] 28 | print ("Calculating zonal statistics on: " + filename + " using " + ShapefileMask) 29 | inValueRaster = arcpy.env.workspace + "/" + filename 30 | outTable = OutputFolder + "/" + fileroot 31 | 32 | arcpy.CheckOutExtension("Spatial") 33 | 34 | outZstat = ZonalStatisticsAsTable(ShapefileMask, FieldName, inValueRaster, outTable, "NODATA", "ALL") 35 | 36 | print "Done!" 37 | 38 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | setup installer for dnppy. 3 | """ 4 | 5 | # sets up dependencies that pip alone seems to frequently fail at. 6 | import install_dependencies 7 | install_dependencies.main() 8 | 9 | # standard setup 10 | from distutils.core import setup 11 | 12 | setup( 13 | name='dnppy', 14 | version='1.15.3b2', 15 | packages=['dnppy', 16 | 'dnppy.convert', 17 | 'dnppy.core', 18 | 'dnppy.download', 19 | 'dnppy.landsat', 20 | 'dnppy.modis', 21 | 'dnppy.radar', 22 | 'dnppy.raster', 23 | 'dnppy.solar', 24 | 'dnppy.textio', 25 | 'dnppy.tsa'], 26 | url='https://github.com/NASA-DEVELOP/dnppy', 27 | download_url="https://github.com/NASA-DEVELOP/dnppy/archive/master.zip", 28 | license='NASA OPEN SOURCE AGREEMENT VERSION 1.3', 29 | author=["Jwely", 30 | "djjensen", 31 | "Syntaf", 32 | "lancewatkins", 33 | "lmakely", 34 | "qgeddes", 35 | "Scott Baron", 36 | ], 37 | author_email='', 38 | description='DEVELOP National Program python package', 39 | package_data={'dnppy.convert' : ['lib/datatype_library.csv', 40 | 'lib/prj/*'], 41 | 'dnppy.landsat' : ['metadata/*'], 42 | 'dnppy.solar' : ['ref/*'], 43 | 'dnppy.textio' : ['test_data/*'], 44 | 'dnppy.tsa' : ['test_data/*']}, 45 | include_package_data=True 46 | ) -------------------------------------------------------------------------------- /undeployed/proj_code/README.md: -------------------------------------------------------------------------------- 1 | Code in this directory may have utility for specific DEVELOP project partners. Each folder will be named according to 2 | `Year_Term_Node_ProjectShortTitle` such as `2015_Spring_LaRC_VA_agriculture`. 3 | 4 | 5 | Upon creation of a final code deliverable, perform the following tasks. 6 | 7 | 1. Create a folder with the name `Year_Term_Node_ProjectShortTitle` such as `2015_Spring_LaRC_VA_agriculture` 8 | 2. Place all project code in this folder using an organizational scheme of your choice. 9 | 3. The level of documentation you provide is at your discretion. Simple in-code comments may be sufficient for simple applications, but code intended to be run by project partners should have decent tutorial document included in this folder. 10 | 4. Create a readme file named exactly "README.md" with the following information: 11 | * A brief description of this folders content and how it was used in meeting the projects objectives. 12 | * How to run the code, and the order in which scripts are executed or processes were run, if applicable. 13 | * The dependencies and packages required to run the scripts including versions. (ex: dnppy, gdal, scipy, matplotlib) 14 | 5. zip the folder containing this file up and send it to the geoinformatics point of contact. It will then be made available right here on github for download and reference by the public. 15 | 16 | 17 | Do not include any non-text files in this folder! This means no sample raster data or extensive shapefiles. Your whole zipped folder should be only a few kilobytes in size. 18 | -------------------------------------------------------------------------------- /dnppy/core/enf_filelist.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | from list_files import list_files 5 | 6 | 7 | def enf_filelist(filelist, extension = None): 8 | """ 9 | Sanitizes file list inputs 10 | 11 | This function checks that the input is a list of files and not a directory. If the input 12 | is a directory, then it returns a list of files in the directory which match the desired 13 | extension. This is to allow all functions which input filelists to be a little more 14 | flexible by accepting directories instead. 15 | 16 | :param filelist: a list of filepath strings 17 | :param extension: output list contains only files with this string extension. (txt, tif, etc) 18 | 19 | :return new_filelist: sanitized file list 20 | """ 21 | 22 | new_filelist = None 23 | 24 | if isinstance(filelist, str): 25 | if os.path.isdir(filelist): 26 | new_filelist = list_files(False, filelist, False, False) 27 | 28 | elif os.path.isfile(filelist): 29 | new_filelist = [filelist] 30 | 31 | elif isinstance(filelist, list): 32 | new_filelist = filelist 33 | 34 | elif isinstance(filelist, bool) or isinstance(filelist, None): 35 | raise TypeError('Expected file list or directory but received boolean or None type input!') 36 | 37 | 38 | if new_filelist is None: 39 | new_filelist = filelist 40 | 41 | if extension is not None: 42 | new_filelist = [new_file for new_file in new_filelist if extension in new_filelist] 43 | 44 | return new_filelist 45 | -------------------------------------------------------------------------------- /dnppy/textio/read_DS3505.py: -------------------------------------------------------------------------------- 1 | 2 | # local imports 3 | from text_data import * 4 | 5 | def read_DS3505(filepath, has_headers = True): 6 | """ 7 | Text reader for DS3505 data (space delimited) with some fixes 8 | 9 | Weather data downloaded from the following website has a peculiarity 10 | [http://gis.ncdc.noaa.gov/map/viewer/#app=cdo&cfg=cdo&theme=hourly&layers=1&node=gi] 11 | in that it has some upercase T's that are rarely needed, but ruin otherwise 12 | uniform space formatting. 13 | 14 | :param filepath: filepath to DS3505 data 15 | :param has_headers: set False if filepath does not have headers. This doesn't seem 16 | to ever happen for DS3505 data. 17 | :return tdo: returns a text data object with DS3505 data in it. 18 | """ 19 | 20 | with open(filepath, 'r') as f: 21 | 22 | data = [] 23 | 24 | if has_headers: 25 | headers = next(f).replace('\n','').split(' ') 26 | headers = [x for x in headers if x != ""] 27 | else: 28 | headers = None 29 | 30 | for line in f: 31 | entry = line.replace("T"," ").replace("\n","").split(' ') 32 | entry = [x for x in entry if x!= ""] # remove empties 33 | data.append(entry) 34 | f.close() 35 | 36 | print("Loaded data from '{0}'".format(filepath)) 37 | 38 | # assemble the text data object and return it 39 | tdo = text_data(text_filepath = filepath, 40 | headers = headers, 41 | row_data = data) 42 | 43 | return tdo 44 | -------------------------------------------------------------------------------- /dnppy/convert/extract_TRMM_HDF.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["extract_TRMM_HDF"] 3 | 4 | from dnppy import core 5 | from datatype_library import * 6 | from _extract_HDF_datatype import * 7 | 8 | def extract_TRMM_HDF(hdf_list, layer_indexs, outdir, resolution): 9 | """ 10 | Extracts TRMM products from HDF to tif. 11 | http://pmm.nasa.gov/data-access/data-products 12 | 13 | :param hdf_list: list of hdf files or directory with hdfs 14 | :param layer_indexs: list of integer layer indexs 15 | :param outdir: directory to place outputs 16 | :param resolution: The size of a pixel in degrees, either 17 | "0.25", "0.5", "1.0", "5.0" depending on 18 | the specific TRMM product you are extracting. 19 | :return: a list of all files created as output 20 | 21 | """ 22 | 23 | hdf_list = core.enf_filelist(hdf_list) 24 | output_filelist = [] 25 | 26 | # load the GPM datatype from the library 27 | datatype = datatype_library()["TRMM_{0}_GLOBAL".format(resolution)] 28 | 29 | # for every hdf file in the input list 30 | for hdf in hdf_list: 31 | # extract layers and add the new filepaths to the output filelist 32 | hdf_output_filelist = _extract_HDF_datatype(hdf, layer_indexs, outdir, datatype) 33 | output_filelist += hdf_output_filelist 34 | 35 | return output_filelist 36 | 37 | 38 | if __name__ == "__main__": 39 | rasterpath = r"C:\Users\jwely\Desktop\troubleshooting\3B42.20140101.00.7.HDF" 40 | outdir = r"C:\Users\jwely\Desktop\troubleshooting" 41 | extract_TRMM_HDF(rasterpath, [0], outdir, "0.25") -------------------------------------------------------------------------------- /dnppy/raster/clip_to_shape.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | __all__ = ["clip_to_shape"] 4 | 5 | from dnppy import core 6 | from enf_rastlist import enf_rastlist 7 | 8 | import os 9 | import arcpy 10 | from arcpy.sa import ExtractByMask 11 | 12 | 13 | def clip_to_shape(rasterlist, shapefile, outdir = False): 14 | """ 15 | Simple batch clipping script to clip rasters to shapefiles. 16 | 17 | :param rasterlist: single file, list of files, or directory for which to clip rasters 18 | :param shapefile: shapefile to which rasters will be clipped 19 | :param outdir: desired output directory. If no output directory is specified, the 20 | new files will simply have '_c' added as a suffix. 21 | 22 | :return output_filelist: list of files created by this function. 23 | """ 24 | 25 | rasterlist = enf_rastlist(rasterlist) 26 | output_filelist = [] 27 | 28 | # ensure output directorycore.exists 29 | if outdir and not os.path.exists(outdir): 30 | os.makedirs(outdir) 31 | 32 | for raster in rasterlist: 33 | 34 | # create output filename with "c" suffix 35 | outname = core.create_outname(outdir,raster,'c') 36 | 37 | # perform double clip , first using clip_management (preserves no data values) 38 | # then using arcpy.sa module which can actually do clipping geometry unlike the management tool. 39 | arcpy.Clip_management(raster, "#", outname, shapefile, "ClippingGeometry") 40 | out = ExtractByMask(outname, shapefile) 41 | out.save(outname) 42 | output_filelist.append(outname) 43 | print("Clipped and saved: {0}".format(outname)) 44 | 45 | return output_filelist 46 | -------------------------------------------------------------------------------- /dnppy/raster/raster_fig.py: -------------------------------------------------------------------------------- 1 | 2 | __author__ = "Jwely" 3 | __all__ = ["raster_fig"] 4 | 5 | # standard imports 6 | import matplotlib.pyplot as plt 7 | 8 | 9 | class raster_fig: 10 | """ 11 | raster_fig objects are used for heads up displays of raster 12 | data to the user. 13 | 14 | :param numpy_rast: a numpy array representing araster dataset 15 | :param title: title to put on the raster figure plot 16 | """ 17 | 18 | def __init__(self, numpy_rast, title = False): 19 | """ initializes the raster figure """ 20 | 21 | self.numpy_rast = numpy_rast 22 | self.title = title 23 | 24 | self.make_fig() 25 | return 26 | 27 | 28 | def make_fig(self): 29 | """ function to set up an initial figure """ 30 | 31 | self.fig, ax = plt.subplots() 32 | self.fig.show() 33 | 34 | self.im = ax.imshow(self.numpy_rast) 35 | 36 | if self.title: 37 | self.fig.suptitle(self.title, fontsize = 20) 38 | 39 | self.im.set_data(self.numpy_rast) 40 | self.fig.canvas.draw() 41 | return 42 | 43 | 44 | def update_fig(self, numpy_rast, title = False): 45 | """ 46 | Function to update a figure that already exists. 47 | 48 | :param numpy_rast: a numpy array representing a raster dataset 49 | :param title: title to put on the raster figure object 50 | """ 51 | 52 | if title: 53 | self.fig.suptitle(title, fontsize = 20) 54 | 55 | self.im.set_data(numpy_rast) 56 | self.fig.canvas.draw() 57 | return 58 | 59 | 60 | def close_fig(self): 61 | """ closes an active figure """ 62 | 63 | plt.close(self.fig) 64 | return 65 | -------------------------------------------------------------------------------- /docs/source/modules/convert.rst: -------------------------------------------------------------------------------- 1 | convert 2 | ======= 3 | 4 | .. automodule:: dnppy.convert 5 | :members: 6 | 7 | Examples 8 | -------- 9 | no examples yet! 10 | 11 | Code Help 12 | --------- 13 | 14 | Auto-documentation for functions and classes within this module is generated below! 15 | 16 | .. automodule:: dnppy.convert.datatype_library 17 | :members: 18 | 19 | .. automodule:: dnppy.convert.extract_archive 20 | :members: 21 | 22 | .. automodule:: dnppy.convert.extract_GCMO_NetCDF 23 | :members: 24 | 25 | .. automodule:: dnppy.convert.extract_GPM_IMERG 26 | :members: 27 | 28 | .. automodule:: dnppy.convert.extract_GRACE_DA_binary 29 | :members: 30 | 31 | .. automodule:: dnppy.convert.extract_MPE_NetCDF 32 | :members: 33 | 34 | .. automodule:: dnppy.convert.extract_SMOS_NetCDF 35 | :members: 36 | 37 | .. automodule:: dnppy.convert.extract_TRMM_HDF 38 | :members: 39 | 40 | .. automodule:: dnppy.convert.extract_TRMM_NetCDF 41 | :members: 42 | 43 | .. automodule:: dnppy.convert.HDF5_to_numpy 44 | :members: 45 | 46 | .. automodule:: dnppy.convert.ll_to_utm 47 | :members: 48 | 49 | .. automodule:: dnppy.convert.nongrid_data 50 | :members: 51 | :private-members: 52 | 53 | .. automodule:: dnppy.convert._convert_dtype 54 | :private-members: 55 | 56 | .. automodule:: dnppy.convert._extract_HDF_datatype 57 | :private-members: 58 | 59 | .. automodule:: dnppy.convert._extract_HDF_layer_data 60 | :private-members: 61 | 62 | .. automodule:: dnppy.convert._extract_NetCDF_datatype 63 | :private-members: 64 | 65 | .. automodule:: dnppy.convert._extract_NetCDF_layer_data 66 | :private-members: 67 | 68 | .. automodule:: dnppy.convert._gdal_dataset_to_tif 69 | :private-members: 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /dev/test/fetch_test_SRTM.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from dnppy import download 4 | import os 5 | import arcpy 6 | 7 | def fetch_test_SRTM(test_dir): 8 | """ 9 | grabs two example setss of SRTM data. One at low resolution for 10 | the state of VA, and some at high resolution for the LA metro area 11 | """ 12 | 13 | srtm_dir = os.path.join(test_dir,"raw", "SRTM") 14 | dem_dir = os.path.join(test_dir,"pre_processed", "SRTM") 15 | 16 | if not os.path.exists(srtm_dir): 17 | os.makedirs(srtm_dir) 18 | os.makedirs(dem_dir) 19 | 20 | # start with a DEM for hampton roads VA. 21 | print("Downloading SRTM test data for Hampton Roads VA!") 22 | lat_lon_pairs = [(37,-77),(37,-78),(37,-76), 23 | (36,-77),(36,-78),(36,-76), 24 | (38,-77),(38,-78),(38,-76)] 25 | 26 | dem_files = download.fetch_SRTM(lat_lon_pairs, "SRTMGL3", srtm_dir) 27 | arcpy.MosaicToNewRaster_management(dem_files, dem_dir, "VAcoast_DEM.tif", 28 | number_of_bands = 1, pixel_type = "32_BIT_SIGNED") 29 | 30 | # grab a DEM for the Los Angeles metro area 31 | print("Downloading SRTM test data for Los Angeles metro area!") 32 | lat_lon_pairs = [(34,-118),(34,-119),(34,-120),(34,-117), 33 | (35,-118),(35,-119),(35,-120),(35,-117), 34 | (33,-118),(33,-119),(33,-120),(33,-117)] 35 | 36 | dem_files = download.fetch_SRTM(lat_lon_pairs, "SRTMGL3", srtm_dir) 37 | arcpy.MosaicToNewRaster_management(dem_files, dem_dir, "LAmetro_DEM.tif", 38 | number_of_bands = 1, pixel_type = "32_BIT_SIGNED") 39 | 40 | return 41 | 42 | 43 | if __name__ == "__main__": 44 | fetch_test_SRTM(r"C:\Users\jwely\Desktop\dnppytest") 45 | 46 | 47 | -------------------------------------------------------------------------------- /dnppy/raster/null_set_range.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["null_set_range"] 3 | 4 | from enf_rastlist import enf_rastlist 5 | from to_numpy import to_numpy 6 | from from_numpy import from_numpy 7 | 8 | import arcpy 9 | 10 | def null_set_range(rastlist, high_thresh = None, low_thresh = None, NoData_Value = None): 11 | """ 12 | Changes values within a certain range to NoData. similar to ``raster.null_define``, 13 | but can take an entire range of values to set to NoData. useful in filtering 14 | obviously erroneous high or low values from a raster dataset. 15 | 16 | :param rastlist: list of rasters for which to set no dta values 17 | :param high_thresh: will set all values above this to NoData 18 | :param low_thresh: will set all values below this to NoData 19 | 20 | :return rastlist: list of all rasters modified by this function 21 | """ 22 | 23 | # sanitize filelist input 24 | rastlist = enf_rastlist(rastlist) 25 | 26 | # iterate through each file in the filelist and set nodata values 27 | for rastname in rastlist: 28 | 29 | #load raster as numpy array and save spatial referencing. 30 | rast, meta = to_numpy(rastname) 31 | 32 | if not NoData_Value is None: 33 | NoData_Value = meta.NoData_Value 34 | 35 | if not high_thresh is None: 36 | rast[rast >= high_thresh] = NoData_Value 37 | 38 | if not low_thresh is None: 39 | rast[rast <= low_thresh] = NoData_Value 40 | 41 | from_numpy(rast, meta, rastname) 42 | try: 43 | arcpy.SetRasterProperties_management(rastname, data_type = "#", statistics = "#", 44 | stats_file = "#", nodata = "1 " + str(NoData_Value)) 45 | except RuntimeError: 46 | print("failed to set nodata in {0}".format(rastname)) 47 | 48 | return -------------------------------------------------------------------------------- /dev/generate_release_stat_report.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import requests 4 | import datetime 5 | 6 | def view_release_stats(): 7 | """ 8 | Simple script to track downloads of dnppy assets. Generates 9 | text file report with some key stats in it that help indicate 10 | the number of downloads, and whether our user base is mostly 11 | 32 or 64 bit. 12 | """ 13 | 14 | # init log and total download counter 15 | now = datetime.datetime.now() 16 | log = ["Report generated on {0}\n\n".format(now)] 17 | total_downloads = 0 18 | 19 | # send the request to the github API and read the json 20 | r = requests.get("https://api.github.com/repos/nasa/dnppy/releases") 21 | rjson = r.json() 22 | 23 | # check for overuse warnings 24 | if r.status_code is not 200: 25 | if r.status_code is 403: 26 | raise Exception("You have made too many requests to the GitHub API this hour. Try again later") 27 | 28 | # generate download statistics report 29 | for release in rjson: 30 | 31 | log.append("{0}\n".format(release["name"])) 32 | 33 | if not release["assets"]: 34 | log.append("\t[this release has no assets]\n") 35 | 36 | for asset in release["assets"]: 37 | log.append("\t{0}: \n\t\t{1} = {2}\n\t\t{3} = {4}\n\t\t{5} = {6}\n".format( 38 | asset["name"], 39 | "updated".ljust(10), asset["updated_at"].replace("T"," at "), 40 | "downloads".ljust(10), asset["download_count"], 41 | "asset url".ljust(10), asset["url"])) 42 | 43 | log.append("") 44 | 45 | # write the log to a text file. 46 | with open("dnppy_stat_report.txt",'w+') as f: 47 | for entry in log: 48 | print(entry) 49 | f.write(entry) 50 | 51 | 52 | def main(): 53 | view_release_stats() 54 | 55 | 56 | if __name__ == "__main__": 57 | main() -------------------------------------------------------------------------------- /undeployed/legacy/Basic_Batch_Tools/Batch_Con_Py.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | #Makes sure Spatial Analyst is turned on. 3 | if arcpy.CheckExtension("Spatial")== "Available": 4 | arcpy.CheckOutExtension("Spatial") 5 | from arcpy.sa import * 6 | else: 7 | arcpy.AddError("You do not have the Spatial Analyst Extension, and therefore cannot use this tool.") 8 | 9 | #Input folder. 10 | folder_path= r"H:\Spectral" 11 | arcpy.env.workspace= folder_path 12 | 13 | """#Iso1 14 | outUnsupervised = IsoClusterUnsupervisedClassification(("Clip_North_Hansen_2000Band5.tif","Clip_North_Hansen_2000Band7.tif","Clip_z2000_7_5.tif","Clip_zDivide200057.tif"), 30, 20, 10) 15 | outUnsupervised.save("Iso1.tif") 16 | 17 | #Iso2 18 | outUnsupervised = IsoClusterUnsupervisedClassification(("Clip2_North_Hansen_2012Band5.tif","Clip2_North_Hansen_2012Band7.tif","Clip2_z2012_7_5.tif","Clip2_zDivide201257.tif"), 30, 20, 10) 19 | outUnsupervised.save("Iso2.tif") 20 | 21 | #Iso3 22 | outUnsupervised = IsoClusterUnsupervisedClassification(("Clip_North_Hansen_2000Band3.tif","Clip_North_Hansen_2000Band4.tif","Clip_North_Hansen_2000Band5.tif","Clip_North_Hansen_2000Band7.tif","Clip_z2012_3_5.tif","Clip_z2012_5_4.tif","Clip_z2012_7_5.tif"), 30, 20, 10) 23 | outUnsupervised.save("Iso3.tif") 24 | 25 | #Iso4 26 | outUnsupervised = IsoClusterUnsupervisedClassification(("Clip2_North_Hansen_2000Band3.tif","Clip2_North_Hansen_2000Band4.tif","Clip2_North_Hansen_2000Band5.tif","Clip2_North_Hansen_2000Band7.tif","Clip2_z2012_3_5.tif","Clip2_z2012_5_4.tif","Clip2_z2012_7_5.tif"), 30, 20, 10) 27 | outUnsupervised.save("Iso4.tif")""" 28 | Mask_file= "Caclulated_Mangroves.tif" 29 | 30 | for rasters in arcpy.ListRasters(): 31 | if rasters[0:5]== "Clip_": 32 | print rasters[5:] 33 | OutName= "WithoutMang_" + rasters[5:] 34 | outCon = Con("C:/Users/sbarron/Desktop/Delete_Folder/Mangroves_Null.tif", rasters, "", "VALUE = 1") 35 | outCon.save(OutName) 36 | -------------------------------------------------------------------------------- /dnppy/convert/extract_MPE_NetCDF.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["extract_MPE_NetCDF"] 3 | 4 | from dnppy import core 5 | from datatype_library import * 6 | from _extract_NetCDF_layer_data import * 7 | from _gdal_dataset_to_tif import * 8 | 9 | 10 | def extract_MPE_NetCDF(netcdf_list, layer_indexs, outdir, area): 11 | """ 12 | extracts SMOS data from its native NetCDF format. 13 | 14 | :param netcdf_list: list of hdf files or directory with netcdfs 15 | :param layer_indexs: list of integer layer indices 16 | :param outdir: directory to place outputs 17 | :param area: presently only supports "CONUS" 18 | 19 | :return: A list of all files created as output 20 | """ 21 | 22 | netcdf_list = core.enf_filelist(netcdf_list) 23 | output_filelist = [] 24 | 25 | # load the GPM datatype from the library 26 | dtype = datatype_library()["MPE_HRAP_{0}".format(area)] 27 | 28 | # for every hdf file in the input list 29 | for netcdf in netcdf_list: 30 | data = _extract_NetCDF_layer_data(netcdf, layer_indexs) 31 | 32 | for layer_index in layer_indexs: 33 | 34 | dataset = data[layer_index] 35 | outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") 36 | 37 | print("creating dataset at {0}".format(outpath)) 38 | 39 | _gdal_dataset_to_tif(dataset, outpath, 40 | cust_projection = dtype.projectionTXT, 41 | cust_geotransform = dtype.geotransform, 42 | force_custom = False, 43 | nodata_value = -1) 44 | 45 | output_filelist.append(outpath) 46 | 47 | return output_filelist 48 | 49 | 50 | if __name__ == "__main__": 51 | 52 | filepath = r"C:\Users\jwely\Desktop\troubleshooting\MPE\nws_precip_conus_20150101.nc" 53 | od = r"C:\Users\jwely\Desktop\troubleshooting\MPE" 54 | extract_MPE_NetCDF(filepath, [0], od, "CONUS") -------------------------------------------------------------------------------- /dnppy/convert/extract_SMOS_NetCDF.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["extract_SMOS_NetCDF"] 3 | 4 | from dnppy import core 5 | from datatype_library import * 6 | from _extract_NetCDF_layer_data import * 7 | from _gdal_dataset_to_tif import * 8 | 9 | def extract_SMOS_NetCDF(netcdf_list, layer_indexs, outdir, resolution): 10 | """ 11 | Extracts SMOS data from its native NetCDF format. 12 | 13 | :param netcdf_list: list of hdf files or directory with netcdfs 14 | :param layer_indexs: list of integer layer indices 15 | :param outdir: directory to place outputs 16 | :param resolution: Presently ONLY supports input of "25k" 17 | 18 | :return: a list of all files created as output 19 | """ 20 | 21 | netcdf_list = core.enf_filelist(netcdf_list) 22 | output_filelist = [] 23 | 24 | # load the GPM datatype from the library 25 | dtype = datatype_library()["SMOS_{0}_GLOBAL".format(resolution)] 26 | 27 | # for every hdf file in the input list 28 | for netcdf in netcdf_list: 29 | data = _extract_NetCDF_layer_data(netcdf, layer_indexs) 30 | 31 | for layer_index in layer_indexs: 32 | 33 | dataset = data[layer_index] 34 | outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") 35 | 36 | print("creating dataset at {0}".format(outpath)) 37 | 38 | _gdal_dataset_to_tif(dataset, outpath, 39 | cust_projection = dtype.projectionTXT, 40 | cust_geotransform = dtype.geotransform, 41 | force_custom = False, 42 | nodata_value = -999) 43 | 44 | output_filelist.append(outpath) 45 | 46 | return output_filelist 47 | 48 | 49 | if __name__ == "__main__": 50 | 51 | filepath = r"C:\Users\jwely\Desktop\troubleshooting\SMOS\_NRTSM003D025A_ALL.nc" 52 | od = r"C:\Users\jwely\Desktop\troubleshooting\SMOS" 53 | extract_SMOS_NetCDF(filepath, [0], od, "25k") -------------------------------------------------------------------------------- /dev/test/fetch_test_MODIS.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from dnppy import download 4 | import os 5 | 6 | def fetch_test_MODIS(test_dir): 7 | """ 8 | Fetches MODIS test data with dnppy download module 9 | 10 | Data is targeted for February of 2015, but some data is 16-day 11 | and yearly which extends outside that boundary. 12 | """ 13 | 14 | if not os.path.exists(test_dir): 15 | os.makedirs(os.path.join(test_dir, "raw")) 16 | 17 | print("Downloading MODIS test data!") 18 | # set up universal MODIS parameters 19 | modis_dir = os.path.join(test_dir,"raw","MODIS") 20 | years = [2015] 21 | days = range(31, 90) 22 | tiles = ['h11v05','h12v05'] 23 | 24 | # download some test products 25 | 26 | prod = "MOD13A1" # 16 day vegetation indices 27 | vers = "005" 28 | outdir = os.path.join(modis_dir, prod) 29 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 30 | 31 | prod = "MYD13A1" # 16 day vegetation indices 32 | vers = "005" 33 | outdir = os.path.join(modis_dir, prod) 34 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 35 | 36 | prod = "MYD11A1" # daily surface temperature 37 | vers = "041" 38 | outdir = os.path.join(modis_dir, prod) 39 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 40 | 41 | prod = "MOD10A1" # daily snow cover 42 | vers = "005" 43 | outdir = os.path.join(modis_dir, prod) 44 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 45 | 46 | prod = "MYD09A1" # daily 500m surface reflectance 47 | vers = "005" 48 | outdir = os.path.join(modis_dir, prod) 49 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 50 | 51 | prod = "MOD09A1" # daily 500m surface reflectance 52 | vers = "005" 53 | outdir = os.path.join(modis_dir, prod) 54 | download.fetch_MODIS(prod, vers, tiles, outdir, years, days) 55 | return 56 | 57 | 58 | if __name__ == "__main__": 59 | fetch_test_MODIS(r"C:\Users\jwely\Desktop\dnppytest") -------------------------------------------------------------------------------- /docs/source/modules/core.rst: -------------------------------------------------------------------------------- 1 | core 2 | ==== 3 | 4 | .. automodule:: dnppy.core 5 | :members: 6 | 7 | Examples 8 | -------- 9 | 10 | .. rubric:: Listing files in a directory 11 | 12 | Perhaps the function with the greatest utility in the core module is the `list_files` function, which can crawl through a directory, or entire tree of directories (recursive) and find files that match a certain criteria in a beginner friendly way. The syntax is as follows 13 | 14 | .. code-block:: python 15 | 16 | from dnppy import core 17 | 18 | recursive = True # will find files in directories subfolders 19 | directory = r"C:\mydir" # the directory that we would like to search 20 | contains = ["this","that"] # files must contain these strings 21 | not_contain = ["otherthing"] # files must NOT contain these strings 22 | 23 | filelist = core.list_files(recursive, directory, contains, not_contain) 24 | 25 | Note that both ``contains`` and ``not_contain`` may be either single strings, or lists of strings. This is a good way to filter down by NASA EOS product type, Landsat band index, or perhaps filter out files by extension. Users wishing to get a little more advanced should check out `regular expressions`_. 26 | 27 | .. _regular expressions: https://docs.python.org/2/howto/regex.html 28 | 29 | Code Help 30 | --------- 31 | 32 | Auto-documentation for functions and classes within this module is generated below! 33 | 34 | .. automodule:: dnppy.core.create_outname 35 | :members: 36 | 37 | .. automodule:: dnppy.core.enf_filelist 38 | :members: 39 | 40 | .. automodule:: dnppy.core.enf_list 41 | :members: 42 | 43 | .. automodule:: dnppy.core.exists 44 | :members: 45 | 46 | .. automodule:: dnppy.core.install_from_wheel 47 | :members: 48 | 49 | .. automodule:: dnppy.core.list_files 50 | :members: 51 | 52 | .. automodule:: dnppy.core.move 53 | :members: 54 | 55 | .. automodule:: dnppy.core.rename 56 | :members: 57 | 58 | .. automodule:: dnppy.core.run_command 59 | :members: 60 | 61 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/SingleBandGapFiller.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: Single Band Gap Filler For Landsat 7 3 | # Purpose: To use cloud masks for three seperate scenes to fill gaps in data 4 | # due to SLC-induced gaps and clouds 5 | # Author: Quinten Geddes Quinten.A.Geddes@nasa.gov 6 | # NASA DEVELOP PRogram 7 | # Created: 12/04/2013 8 | 9 | #------------------------------------------------------------------------------- 10 | 11 | import arcpy 12 | arcpy.CheckOutExtension("Spatial") 13 | arcpy.env.overwriteOutput=True 14 | 15 | #Registering the scenes of interest 16 | Scene1 = arcpy.Raster(arcpy.GetParameterAsText(0)) 17 | Scene2 = arcpy.Raster(arcpy.GetParameterAsText(1)) 18 | Scene3 = arcpy.Raster(arcpy.GetParameterAsText(2)) 19 | 20 | #establishing 21 | CloudMaskpath1= arcpy.GetParameterAsText(3) 22 | CloudMaskpath2= arcpy.GetParameterAsText(4) 23 | CloudMaskpath3= arcpy.GetParameterAsText(5) 24 | 25 | OutputFolder= arcpy.GetParameterAsText(6) 26 | OutputFile= arcpy.GetParameterAsText(7) 27 | 28 | #preempting scratch workspace errors 29 | arcpy.env.scratchWorkspace=OutputFolder 30 | 31 | #establishing gaps in each image 32 | Mask1=Scene1>0 33 | Mask2=Scene2>0 34 | Mask3=Scene3>0 35 | 36 | #Applying the Cloud mask if provided 37 | for scene in [1,2,3]: 38 | try: 39 | exec("CloudMask{0}=arcpy.Raster(CloudMaskpath{0})".format(scene)) 40 | exec("Mask{0}=Mask{0}*CloudMask{0}".format(scence)) 41 | except: 42 | a=0 43 | 44 | #keeping all good pixels for the first scene 45 | Scene1Fill=Mask1*Scene1 46 | #keeping good pixels for the 2nd scene where 1st pixels are bad 47 | Scene2Fill=((Mask1==0)*Mask2)*Scene2 48 | #keeping good pixels for the 3rd scene where 2nd and 1st pixels are bad 49 | Scene3Fill=((Mask1==0)*(Mask2==0)*Mask3)*Scene3 50 | 51 | #combining the kept pixels from each scene 52 | FinalImage=Scene1Fill+Scene2Fill+Scene3Fill 53 | 54 | FinalImage.save(OutputFolder+"\\"+OutputFile) 55 | -------------------------------------------------------------------------------- /dnppy/download/list_ftp.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import ftplib 4 | import socket 5 | 6 | __all__ = ["list_ftp"] 7 | 8 | def list_ftp(site, username = None , password = None, dir = None): 9 | """ 10 | lists contents of typical FTP download site. returns two lists, the first is of 11 | filenames, the second is of full filepaths (including filenames) that one could 12 | patch through to the "download_url" function. returns False if the server has 13 | rejected our connection 14 | 15 | :param site: url to ftp website root, does not need to include "ftp://" 16 | :param username: username to log in with, if required 17 | :param password: password to log in with, if required 18 | :param dir: specific ftp directory we wish to access 19 | 20 | :returns filenames: a list of filenames 21 | :returns filepaths: a list of full filepaths 22 | """ 23 | 24 | # ftplib does not like the ftp address out front for some reason 25 | if "ftp://" in site: 26 | site = site.replace("ftp://", "") 27 | 28 | try: 29 | ftp = ftplib.FTP(site) 30 | except EOFError: 31 | return [], [] 32 | 33 | except socket.gaierror: 34 | raise Exception("Socket.gaierror indicates this ftp address '{0}' does not exist".format(site)) 35 | 36 | 37 | if username is not None and password is not None: 38 | ftp.login(username, password) 39 | elif username is not None: 40 | ftp.login(username) 41 | else: 42 | ftp.login() 43 | 44 | if dir is not None: 45 | ftp.cwd(dir) 46 | else: 47 | ftp.cwd("") 48 | dir = "" 49 | 50 | rawdata = [] 51 | ftp.dir(rawdata.append) 52 | filenames = [i.split()[-1] for i in rawdata] 53 | filepaths = ["ftp://"+"/".join([site, dir, afile]).replace("//","/") for afile in filenames] 54 | ftp.quit() 55 | 56 | return filenames, filepaths 57 | 58 | 59 | # testin area 60 | if __name__ == "__main__": 61 | filenames, filepaths = list_ftp("n5eil01u.ecs.nsidc.org") 62 | 63 | for filename in filenames: 64 | print filename -------------------------------------------------------------------------------- /dnppy/convert/HDF5_to_numpy.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import h5py 4 | import os 5 | 6 | def HDF5_to_numpy(hdfpath, layers = None): 7 | """ 8 | NOTE: This is functionally identical to ``_extract_HDF_layer_data``, but employs 9 | an h5py based approach instead of gdal. It is experimental, and not yet used 10 | anywhere. 11 | 12 | Extracts one or more layers from an HDF5 file and returns a dict of numpy arrays 13 | 14 | :param hdfpath: Filepath to an HDF5 file 15 | :param layers: A list of integer values or layer names to extract 16 | leave "None" to return numpy arrays for ALL layers 17 | 18 | :return layer_dict: Dict with band names as keys and numpy arrays as values 19 | """ 20 | 21 | with h5py.File(hdfpath, "r", driver = "core") as hdf: 22 | group = hdf[list(hdf)[0]] 23 | bands = list(group) 24 | 25 | # print info about each dataset 26 | print("Contents of {0}".format(os.path.basename(hdfpath))) 27 | for i,x in enumerate(group.values()): 28 | print i,x 29 | 30 | if layers is None: 31 | layers = bands 32 | 33 | elif isinstance(layers, str) or isinstance(layers, int): 34 | layers = [layers] 35 | 36 | # verify that the desired layer can be extracted 37 | for i, layer in enumerate(layers): 38 | if isinstance(layer, int) and layer <= len(bands): 39 | layers[i] = bands[layer] 40 | elif isinstance(layer, str) and layer in bands: 41 | layers[i] = layer 42 | 43 | layer_dict ={} 44 | for layer in layers: 45 | try: 46 | layer_dict[layer] = group[layer][()] 47 | except: 48 | print("Failed to read layer '{0}'".format(layer)) 49 | 50 | return layer_dict 51 | 52 | 53 | if __name__ == "__main__": 54 | rasterpath = r"C:\Users\jwely\Desktop\troubleshooting\HDF_tests\3B-HHR-L.MS.MRG.3IMERG.20150401-S233000-E235959.1410.V03E.RT-H5" 55 | output = HDF5_to_numpy(rasterpath, [2,3,5]) 56 | 57 | print output.keys() -------------------------------------------------------------------------------- /dnppy/convert/_extract_NetCDF_datatype.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["_extract_NetCDF_datatype"] 3 | 4 | from _extract_NetCDF_layer_data import * 5 | from _gdal_dataset_to_tif import * 6 | 7 | from dnppy import core 8 | 9 | def _extract_NetCDF_datatype(netcdf, layer_indexs, outdir, datatype, 10 | force_custom = False, nodata_value = None): 11 | """ 12 | This function wraps "_extract_NetCDF_layer_data" and "_gdal_dataset_to_tif". 13 | It only works for datatypes listed in the datatype_library.csv 14 | 15 | :param netcdf: a single netcdf filepath 16 | :param layer_indexs: list of int index values of layers to extract 17 | :param outdir: filepath to output directory to place tifs 18 | :param datatype: a dnppy.convert.datatype object created from an 19 | entry in the datatype_library.csv 20 | :param force_custom: if True, this will force the data to take on the 21 | projection and geotransform attributes from 22 | the datatype object, even if valid projection 23 | and geotransform info can be pulled from the gdal 24 | dataset. Should almost never be True. 25 | :param nodata_value: the value to set to Nodata 26 | 27 | :return: list of filepaths to output files 28 | """ 29 | 30 | output_filelist = [] 31 | 32 | data = _extract_NetCDF_layer_data(netcdf, layer_indexs) 33 | 34 | for layer_index in layer_indexs: 35 | 36 | dataset = data[layer_index] 37 | outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") 38 | 39 | print("creating dataset at {0}".format(outpath)) 40 | 41 | _gdal_dataset_to_tif(dataset, outpath, 42 | cust_projection = datatype.projectionTXT, 43 | cust_geotransform = datatype.geotransform, 44 | force_custom = force_custom, 45 | nodata_value = nodata_value) 46 | 47 | output_filelist.append(outpath) 48 | 49 | return output_filelist -------------------------------------------------------------------------------- /dnppy/core/run_command.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["run_command"] 3 | 4 | 5 | import subprocess 6 | import collections 7 | 8 | 9 | def run_command(*command): 10 | """ 11 | This function formats and runs commands that one would call from the console. 12 | Primarily intended for calling gdal commands from other functions in cases 13 | where the python bindings are absent. Particularly useful for commands with 14 | list type arguments. This function provides simplicity, but users wanting more 15 | functionality should use the subprocess module directly. 16 | 17 | :param command: command can be virtually any number of string arguments in 18 | any configuration of args, lists, and tuples. This function 19 | will take all input strings in the order in which they are 20 | given and place a space ``" "`` between each argument before 21 | passing it to the command line. 22 | 23 | .. code-block:: python 24 | 25 | # all of these are valid syntax 26 | core.run_command(arg1) 27 | core.run_command(arg1, arg2) 28 | core.run_command([arg1, arg2]) 29 | core.run_command(arg1, [arg2, arg3]) 30 | core.run_command(arg1, [arg2, arg3, [arg4, arg5]], (arg7, arg8)) 31 | """ 32 | 33 | # create both a single string command and a list of args. 34 | command_list = list(_flatten_args(command)) 35 | command_str = " ".join(map(str, command_list)) 36 | command_args = command_str.split(" ") 37 | 38 | print(command_str) 39 | subprocess.call(command_args) 40 | 41 | return 42 | 43 | 44 | # tiny function for flattening unknown nested argument structure 45 | def _flatten_args(l): 46 | """ 47 | flattens a list with nested lists, tuples, and other irregular structures 48 | :param l: list to flatten 49 | :return: generator object that can be converted to list with list(). 50 | """ 51 | for el in l: 52 | if isinstance(el, collections.Iterable) and not isinstance(el, basestring): 53 | for sub in _flatten_args(el): 54 | yield sub 55 | else: 56 | yield el -------------------------------------------------------------------------------- /docs/source/modulesum.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | Exploring the modules 3 | ===================== 4 | 5 | OK, so you have read the overview, maybe you've downloaded and installed the package, you read about the design and how how to use it, but what can dnppy *actually* do!? How do you access functions?! What can you do with NASA data? The next section of the sidebar breaks dnppy down into its individual modules with some introductory examples on how to use some of the functions and classes in each one. When accessing code from some remote location with python, we use the an ``import`` statement to bring the module contents into the current namespace of python. 6 | 7 | .. code-block:: python 8 | 9 | from dnppy import core # import the core module from dnppy 10 | help(core) # print list of functions in core 11 | help(core.function) # print detailed help for function "function" 12 | 13 | # collects the returned value from passing some arguments into our hypothetical function 14 | returned_value = core.function(*args) 15 | 16 | 17 | Module Summary 18 | -------------- 19 | 20 | .. rubric:: :doc:`convert ` 21 | 22 | .. automodule:: dnppy.convert 23 | :members: 24 | 25 | .. rubric:: :doc:`core ` 26 | 27 | .. automodule:: dnppy.core 28 | :members: 29 | 30 | .. rubric:: :doc:`download ` 31 | 32 | .. automodule:: dnppy.download 33 | :members: 34 | 35 | .. rubric:: :doc:`landsat ` 36 | 37 | .. automodule:: dnppy.landsat 38 | :members: 39 | 40 | .. rubric:: :doc:`modis ` 41 | 42 | .. automodule:: dnppy.modis 43 | :members: 44 | 45 | .. rubric:: :doc:`radar ` 46 | 47 | .. automodule:: dnppy.radar 48 | :members: 49 | 50 | .. rubric:: :doc:`raster ` 51 | 52 | .. automodule:: dnppy.raster 53 | :members: 54 | 55 | .. rubric:: :doc:`solar ` 56 | 57 | .. automodule:: dnppy.solar 58 | :members: 59 | 60 | .. rubric:: :doc:`textio ` 61 | 62 | .. automodule:: dnppy.textio 63 | :members: 64 | 65 | .. rubric:: :doc:`tsa ` 66 | 67 | .. automodule:: dnppy.tsa 68 | :members: 69 | -------------------------------------------------------------------------------- /dnppy/core/create_outname.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | 5 | 6 | def create_outname(outdir, inname, suffix, ext = False): 7 | """ 8 | Quick way to create unique output file names within iterative functions 9 | 10 | This function is built to simplify the creation of output file names. Function allows 11 | ``outdir = False`` and will create an outname in the same directory as inname. Function will 12 | add a the user input suffix, separated by an underscore "_" to generate an output name. 13 | this is useful when performing a small modification to a file and saving new output with 14 | a new suffix. Function merely returns an output name, it does not save the file as that name. 15 | 16 | :param outdir: either the directory of the desired outname or False to create an outname 17 | in the same directory as the inname 18 | :param inname: the input file from which to generate the output name "outname" 19 | :param suffix: suffix to attach to the end of the filename to mark it as output 20 | :param ext: specify the file extension of the output filename. Leave blank or False 21 | and the outname will inherit the same extension as inname. 22 | 23 | :return outname: the full filepath at which a new file can be created. 24 | 25 | """ 26 | 27 | # isolate the filename from its directory and extension 28 | if os.path.isfile(inname): 29 | head, tail = os.path.split(inname) 30 | noext = tail.split('.')[:-1] 31 | noext = '.'.join(noext) 32 | else: 33 | head = "" 34 | tail = inname 35 | if "." in inname: 36 | noext = tail.split('.')[:-1] 37 | noext = '.'.join(noext) 38 | else: 39 | noext = inname 40 | 41 | # create the suffix 42 | if ext: 43 | suffix = "_{0}.{1}".format(suffix, ext) 44 | else: 45 | ext = tail.split('.')[-1:] 46 | suffix = "_{0}.{1}".format(suffix, ''.join(ext)) 47 | 48 | if outdir: 49 | outname = os.path.join(outdir, noext + suffix) 50 | return outname 51 | else: 52 | outname = os.path.join(head, noext + suffix) 53 | return outname 54 | -------------------------------------------------------------------------------- /docs/source/modules/download.rst: -------------------------------------------------------------------------------- 1 | download 2 | ======== 3 | 4 | .. automodule:: dnppy.download 5 | :members: 6 | 7 | Examples 8 | -------- 9 | 10 | .. rubric:: Fetching MODIS 11 | 12 | Automated download or "fetching" can be helpful for bulk data retrieval, or for enabling software to pull the most up to date data at runtime. This module has a few fetch functions for different NASA data platforms and products. 13 | As an example, the proper syntax for the `fetch_MODIS` function to download all available data for 2015 (January 1st through December 31st) is 14 | 15 | .. code-block:: python 16 | 17 | from dnppy import download 18 | from datetime import datetime 19 | 20 | product = "MYD11A1" # land surface temperature 21 | version = "041" # version 041 22 | tiles = ["h11v05", "h12v05"] # two tiles of interest (SouthEast US) 23 | outdir = r"C:\Users\jwely\test" # local path to save files 24 | start_dto = datetime(2015,1,1) # datetime object of start date 25 | end_dto = datetime(2015,12,31) # datetime object of end date 26 | 27 | download.fetch_MODIS(product, version, tiles, outdir, start_dto, end_dto) 28 | 29 | The other fetching functions work similarly. 30 | 31 | 32 | Code Help 33 | --------- 34 | 35 | Auto-documentation for functions and classes within this module is generated below! 36 | 37 | .. automodule:: dnppy.download.download_filelist 38 | :members: 39 | 40 | .. automodule:: dnppy.download.download_url 41 | :members: 42 | 43 | .. automodule:: dnppy.download.download_urls 44 | :members: 45 | 46 | .. automodule:: dnppy.download.fetch_GPM_IMERG 47 | :members: 48 | 49 | .. automodule:: dnppy.download.fetch_Landsat8 50 | :members: 51 | 52 | .. automodule:: dnppy.download.fetch_Landsat_WELD 53 | :members: 54 | 55 | .. automodule:: dnppy.download.fetch_MODIS 56 | :members: 57 | 58 | .. automodule:: dnppy.download.fetch_MPE 59 | :members: 60 | 61 | .. automodule:: dnppy.download.fetch_SRTM 62 | :members: 63 | 64 | .. automodule:: dnppy.download.fetch_TRMM 65 | :members: 66 | 67 | .. automodule:: dnppy.download.list_http_e4ftl01 68 | :members: 69 | 70 | .. automodule:: dnppy.download.list_http_waterweather 71 | :members: 72 | 73 | .. automodule:: dnppy.download.list_ftp 74 | :members: 75 | 76 | 77 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/CombinePrecipAllpoint.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gc 3 | import arcpy 4 | from arcpy import env 5 | from arcpy import sa 6 | from arcpy.sa import * 7 | env.overwriteOutput = True 8 | gc.disable() 9 | arcpy.CheckOutExtension("Spatial") 10 | env.workspace = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script" ##WORKSPACE 11 | raw_precip_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip" ##Folder containing raw precip files 12 | interp_F = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Raw_precip/Inter_precip" ##Folder to save interpolated precip rasters 13 | study_area = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/StudyArea/newgp_usgs.shp" ##STUDY AREA shpfile 14 | allpoint = "C:/Users/torne/Documents/GP_Ag/Data/Precipitation/180day_script/Allpoint/allpoint_clip.shp" ##ALLPOINT file 15 | 16 | 17 | #####CREATE BUFFER AND JOIN POINTS 18 | 19 | buff = arcpy.Buffer_analysis(study_area, "SA_Buff", "10000 Meters") 20 | print "SA Buffer: Done" 21 | dis = arcpy.Dissolve_management(buff,"SA_Dis") 22 | print "SA Buffer: Done" 23 | allpoint_Clip= arcpy.Clip_analysis(allpoint, dis, "AllPointClip") 24 | print "SA Clip: Done" 25 | print "work" 26 | 27 | 28 | 29 | env.workspace = raw_precip_F 30 | cliplist = arcpy.ListFeatureClasses("nws*.shp") 31 | for files in cliplist: 32 | clip = arcpy.Clip_analysis(files,dis,"C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\180day_script\\Raw_precip\\Inter_precip" + "\\" + files[25:33] + "SA.shp") 33 | print "Part I: Clip Complete" 34 | shp_layer = files[25:-33] + "layer" 35 | arcpy.MakeFeatureLayer_management(allpoint_Clip, "allpoint_layer") 36 | arcpy.MakeFeatureLayer_management(clip,shp_layer) 37 | print shp_layer + "Feature Layers Complete" 38 | allpoint_lyr = arcpy.AddJoin_management("allpoint_layer","Id",shp_layer,"Id") 39 | join = arcpy.CopyFeatures_management(allpoint_lyr, "C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\180day_script\\Raw_precip\\Inter_precip" + "\\" + files[25:33] + "_join.shp") 40 | print "Part II: Join Complete" 41 | interpolation = NaturalNeighbor(join, str(files[25:33]) + "_6", 1.10376739501955E-03) 42 | interpolation.save("C:\\Users\\torne\\Documents\\GP_Ag\\Data\\Precipitation\\180day_script\\Raw_precip\\Inter_precip" + "\\" + files[25:33] + "int.tif") 43 | print "Part III: Interpolation Complete" 44 | 45 | print "FIN" 46 | -------------------------------------------------------------------------------- /docs/source/design.rst: -------------------------------------------------------------------------------- 1 | ====== 2 | Design 3 | ====== 4 | 5 | There exists several design themes within dnppy, and an effort is (and should continue to be) made to follow these design themes to keep its use intuitive. 6 | 7 | Modules 8 | ------- 9 | 10 | dnppy is divided into some thematic `modules`_ by purpose. Each module has its own page and some examples to get you started, but they are all used in a similar fashion. You can read more about each specific module on the :doc:`modules summary page `. 11 | 12 | Functional Format 13 | ----------------- 14 | 15 | At heart, dnppy is a simple collection of classes and functions. Wherever possible, these functions are designed such that they can be assembled together in simple "chain-link" style sequence to perform manipulation and analysis on many files, typically raster data from a NASA sensor, by feeding the outputs of one function into the inputs of another. This approach makes otherwise complex programming tasks more accessible to novice programmers, and provides context for learning more foundational computer programming. 16 | 17 | In the example below, ``foo_outputs``, which is a list of the output files created by ``foo``, was directly fed into the inputs for function ``bar`` 18 | 19 | .. code-block:: python 20 | 21 | foo_outputs = foo(my_filelist, my_arg1) 22 | bar_outputs = bar(foo_outputs, my_arg2) 23 | 24 | This is accomplished by defining functions in a manner similar to the following: 25 | 26 | .. code-block:: python 27 | 28 | def foo(input_filelist, other_argument): 29 | """ does something foo-ish """ 30 | 31 | output_filelist = [] 32 | for input_file in input_filelist: 33 | # do something to create an output_file 34 | output_filelist.append(output_file) 35 | 36 | return output_filelist 37 | 38 | 39 | def bar(input_filelist, other_argument): 40 | """ does something bar-ish """ 41 | 42 | output_filelist = [] 43 | for input_file in input_filelist: 44 | # do something to create an output_file 45 | output_filelist.append(output_file) 46 | 47 | return output_filelist 48 | 49 | 50 | .. note:: Future developers should keep this in mind when building functions to operate on files. You can read more about the specifics on the module and developer pages. 51 | 52 | .. _modules: https://docs.python.org/2/tutorial/modules.html 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /dnppy/convert/_extract_HDF_datatype.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["_extract_HDF_datatype"] 3 | 4 | from _extract_HDF_layer_data import * 5 | from _gdal_dataset_to_tif import * 6 | 7 | from dnppy import core 8 | import os 9 | 10 | 11 | def _extract_HDF_datatype(hdf, layer_indexs, outdir = None, datatype = None, 12 | force_custom = False, nodata_value = None): 13 | """ 14 | This function wraps "_extract_HDF_layer_data" and "_gdal_dataset_to_tif" 15 | It only works for datatypes listed in the datatype_library.csv 16 | 17 | :param hdf: a single hdf filepath 18 | :param layer_indexs: list of int index values of layers to extract 19 | :param outdir: filepath to output directory to place tifs. If left 20 | as "None" output geotiffs will be placed right next to 21 | input HDF. 22 | :param datatype: a dnppy.convert.datatype object created from an 23 | entry in the datatype_library.csv 24 | :param force_custom: if True, this will force the data to take on the 25 | projection and geotransform attributes from 26 | the datatype object, even if valid projection 27 | and geotransform info can be pulled from the gdal 28 | dataset. Should almost never be True. 29 | :param nodata_value: the value to set to Nodata 30 | 31 | :return: list of filepaths to output files 32 | """ 33 | 34 | output_filelist = [] 35 | 36 | if outdir is None: 37 | outdir = os.path.dirname(hdf) 38 | 39 | data = _extract_HDF_layer_data(hdf, layer_indexs) 40 | layer_indexs = core.enf_list(layer_indexs) 41 | for layer_index in layer_indexs: 42 | 43 | dataset = data[layer_index] 44 | outpath = core.create_outname(outdir, hdf, str(layer_index), "tif") 45 | 46 | print("creating dataset at {0}".format(outpath)) 47 | 48 | _gdal_dataset_to_tif(dataset, outpath, 49 | cust_projection = datatype.projectionTXT, 50 | cust_geotransform = datatype.geotransform, 51 | force_custom = force_custom, 52 | nodata_value = nodata_value) 53 | 54 | output_filelist.append(outpath) 55 | 56 | return output_filelist 57 | 58 | -------------------------------------------------------------------------------- /undeployed/legacy/VIIRS/QFreader.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: VIIRS Vegetation Index Quality Flag Reader 3 | # Purpose: To read quality flag information that is stored bitwise 4 | # 5 | # Author: Quinten Geddes - NASA DEVELOP Program 6 | # 7 | # Created: 14/11/2012 8 | 9 | #------------------------------------------------------------------------------- 10 | import numpy as np 11 | import arcpy 12 | from tempfile import TemporaryFile 13 | 14 | InputRaster=arcpy.GetParameterAsText(0) 15 | FlagType=arcpy.GetParameterAsText(1) 16 | OutputFolder=arcpy.GetParameterAsText(2) 17 | OutputFileName=arcpy.GetParameterAsText(3) 18 | blockSize=500 19 | 20 | if not ".tif" in OutputFileName: 21 | OutputFileName+=".tif" 22 | 23 | FlagIndex = {"Land_Water":"0:3","Cloud_Cover":"3:5","Thin_Cirrus":"7"} 24 | Land_Water = {"101":1,"011":2,"010":3,"001":4,"000":5,"100":0,"111":0} 25 | Cloud_Cover= {"11":1,"10":2,"01":3,"00":4} 26 | Thin_Cirris= {"0":1,"1":2} 27 | FlagInd=FlagIndex[FlagType] 28 | 29 | 30 | def bitreader(integer): 31 | binary='{0:08b}'.format(integer) 32 | exec("NewFlag={0}[binary[{1}]]".format(FlagType,FlagInd)) 33 | return NewFlag 34 | 35 | 36 | vbitreader = np.vectorize(bitreader) 37 | inputdata=arcpy.RasterToNumPyArray(InputRaster) 38 | 39 | OutputArray=np.memmap(TemporaryFile(),"uint8","w+",shape=inputdata.shape) 40 | 41 | descData=arcpy.Describe(InputRaster+"\\Band_1") 42 | cellSize=descData.meanCellHeight 43 | sr= descData.spatialReference 44 | extent=descData.Extent 45 | pnt=arcpy.Point(extent.XMin,extent.YMin) 46 | rows=inputdata.shape[0] 47 | cols=inputdata.shape[1] 48 | 49 | for i in range(0, rows, blockSize): 50 | if (i + blockSize) < rows: 51 | numRows = blockSize 52 | else: 53 | numRows = rows - i 54 | for j in range(0, cols, blockSize): 55 | if (j + blockSize) < cols: 56 | numCols = blockSize 57 | else: 58 | numCols = cols - j 59 | OutputArray[i:i+numRows,j:j+numCols]=vbitreader(inputdata[i:i+numRows,j:j+numCols]) 60 | arcpy.AddMessage("row {0} of {1} column {2} of {3}" 61 | .format((i/blockSize+1),int(rows/blockSize+1), 62 | (j/blockSize+1),int(cols/blockSize+1))) 63 | 64 | outraster = arcpy.NumPyArrayToRaster(OutputArray,pnt, cellSize,cellSize,0) 65 | outraster.save(OutputFolder+"\\"+OutputFileName) -------------------------------------------------------------------------------- /dnppy/raster/raster_overlap.py: -------------------------------------------------------------------------------- 1 | __author__ = "Jwely" 2 | __all__ = ["raster_overlap"] 3 | 4 | 5 | from to_numpy import to_numpy 6 | from from_numpy import from_numpy 7 | from is_rast import is_rast 8 | from clip_and_snap import clip_and_snap 9 | 10 | 11 | import arcpy 12 | 13 | 14 | def raster_overlap(file_A, file_B, outpath, NoData_A = None, NoData_B = None): 15 | """ 16 | Finds overlaping area between two raster images. this function examines 17 | two images and outputs a raster identifying pixels where both rasters have 18 | non-NoData values. Output raster has 1's where both images have data and 19 | 0's where one or both images are missing data. 20 | 21 | :param file_A: the first file 22 | :param file_B: the second file 23 | :param outpath: the output filename for the desired output. must end in ".tif" 24 | :param NoData_A: the NoData value of file A 25 | :param NoData_B: the NoData value of file B 26 | 27 | :return outpath: filepath to raster created by this function. 28 | 29 | This function automatically invokes 30 | * clip_and_snap 31 | * null_define 32 | """ 33 | 34 | if not is_rast(file_A) or not is_rast(file_B): 35 | raise Exception('both inputs must be rasters!') 36 | 37 | 38 | # load the rasters as numpy arays. 39 | a, metaA = to_numpy(file_A) 40 | b, metaB = to_numpy(file_B) 41 | 42 | # set no_datas 43 | if NoData_A is None: 44 | NoData_A = metaA.NoData_Value 45 | if NoData_B is None: 46 | NoData_B = metaB.NoData_Value 47 | 48 | # spatially match the rasters 49 | print('preparing input rasters!') 50 | clip_and_snap(file_A, file_B, outpath.replace(".shp",".tif"), NoData_B) 51 | 52 | # reload the rasters as numpy arrays now that spatial matching is done 53 | a, metaA = to_numpy(file_A) 54 | b, metaB = to_numpy(file_B) 55 | 56 | # create work matrix and find the overlap 57 | print('Finding overlaping pixels!') 58 | Workmatrix = a.mask + b.mask 59 | Workmatrix = Workmatrix.astype('uint8') 60 | Workmatrix[Workmatrix == 1] = 2 61 | 62 | print('Saving overlap file!') 63 | metaA.numpy_datatype = 'uint8' 64 | from_numpy(Workmatrix, metaA, outpath.replace(".shp",".tif"), NoData_Value = 2) 65 | arcpy.RasterToPolygon_conversion(outpath.replace(".shp",".tif"), 66 | outpath.replace(".tif",".shp"), 67 | 'NO_SIMPLIFY') 68 | 69 | return outpath 70 | -------------------------------------------------------------------------------- /dnppy/download/fetch_MPE.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from download_url import download_url 4 | 5 | from datetime import timedelta, datetime 6 | import os 7 | 8 | __all__ = ["fetch_MPE"] 9 | 10 | 11 | def fetch_MPE(start_dto, end_dto, outdir, area = None): 12 | """ 13 | Fetches Multisensor Precipitation Estimates data from 14 | weather/noaa server at [http://water.weather.gov/precip/p_download_new/] 15 | 16 | :param start_dto: datetime object for start date of desired range 17 | :param end_dto: datetime object for end date of desired range 18 | :param outdir: output directory where files should be saved (str) 19 | :param area: area of interest, either "conus", "ak" or "pr" 20 | for continental us, alaska, or Puerto Rico respectively 21 | 22 | :return output_files: list of output files fetched by this function 23 | """ 24 | 25 | # set defaults 26 | if area is None: 27 | area = "conus" 28 | 29 | server = "http://water.weather.gov" 30 | 31 | # use start and end datetimes to build list of dates 32 | dates = [] 33 | output_files = [] 34 | date_delta = end_dto - start_dto 35 | 36 | for i in range(date_delta.days +1): 37 | dates.append(start_dto + timedelta(days = i)) 38 | 39 | # try to download all files for dates 40 | for date in dates: 41 | workdir = "/".join([server, "precip","p_download_new", 42 | str(date.year), 43 | str(date.month).zfill(2), 44 | str(date.day).zfill(2)]) 45 | 46 | filename = "nws_precip_{0}_{1}{2}{3}.nc".format(area, 47 | str(date.year), 48 | str(date.month).zfill(2), 49 | str(date.day).zfill(2)) 50 | try: 51 | full_url = "/".join([workdir, filename]) 52 | outname = os.path.join(outdir, filename) 53 | download_url(full_url, outname) 54 | output_files.append(outname) 55 | print("Downloaded '{0}'".format(filename)) 56 | 57 | except: 58 | print("Could not find MPE data for '{0}' on {0}".format(area, date)) 59 | 60 | return output_files 61 | 62 | 63 | if __name__ == "__main__": 64 | startdto = datetime(2015,1,1) 65 | enddto = datetime(2015,12,31) 66 | fetch_MPE(startdto, enddto, r"C:\Users\jwely\Desktop\troubleshooting\MPE") -------------------------------------------------------------------------------- /dnppy/convert/extract_GPM_IMERG.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["extract_GPM_IMERG"] 3 | 4 | from dnppy import core 5 | from datatype_library import * 6 | from _extract_HDF_datatype import * 7 | 8 | def extract_GPM_IMERG(hdf_list, layer_indexs, outdir = None, resolution = "0.1"): 9 | """ 10 | Extracts GPM_IMERG data from its HDF5 format. 11 | 12 | :param hdf_list: list of hdf files or directory with hdfs 13 | :param layer_indexs: list of integer layer indexs 14 | :param outdir: directory to place outputs 15 | :param resolution: The size of a pixel in degrees, either 16 | "0.1" or "0.15" depending on GPM product. 17 | :return: a list of all files created as output 18 | 19 | Typical contents of a GPM HDF are: 20 | 21 | == =========== ================================ ============== 22 | ID layer shape Layer name data type 23 | == =========== ================================ ============== 24 | 0 [3600x1800] HQobservationTime (16-bit int) 25 | 1 [3600x1800] HQprecipSource (16-bit int) 26 | 2 [3600x1800] HQprecipitation (32-bit float) 27 | 3 [3600x1800] IRkalmanFilterWeight (16-bit int) 28 | 4 [3600x1800] IRprecipitation (32-bit float) 29 | 5 [3600x1800] precipitationCal (32-bit float) 30 | 6 [3600x1800] precipitationUncal (32-bit float) 31 | 7 [3600x1800] probabilityLiquidPrecipitation (16-bit int) 32 | 8 [3600x1800] randomError (32-bit float) 33 | == =========== ================================ ============== 34 | """ 35 | 36 | hdf_list = core.enf_filelist(hdf_list) 37 | output_filelist = [] 38 | 39 | # load the GPM datatype from the library 40 | datatype = datatype_library()["GPM_IMERG_{0}_GLOBAL".format(resolution)] 41 | 42 | # for every hdf file in the input list 43 | for hdf in hdf_list: 44 | # extract layers and add the new filepaths to the output filelist 45 | hdf_output_filelist = _extract_HDF_datatype(hdf, layer_indexs, outdir, 46 | datatype, nodata_value = -9999.9) 47 | output_filelist += hdf_output_filelist 48 | 49 | return output_filelist 50 | 51 | 52 | if __name__ == "__main__": 53 | rasterpath = r"C:\Users\jwely\Desktop\troubleshooting\3B-HHR-L.MS.MRG.3IMERG.20150401-S233000-E235959.1410.V03E.RT-H5" 54 | outdir = r"C:\Users\jwely\Desktop\troubleshooting" 55 | extract_GPM_IMERG(rasterpath, [5], outdir, "0.1") -------------------------------------------------------------------------------- /dnppy/convert/extract_archive.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import os 4 | import tarfile 5 | import gzip 6 | import zipfile 7 | from dnppy import core 8 | 9 | __all__ = ["extract_archive"] 10 | 11 | def extract_archive(filepaths, delete_originals = False): 12 | """ 13 | Input list of filepaths OR a directory path with compressed 14 | files in it. Attempts to decompress the following formats 15 | 16 | Support formats include ``.tar.gz``, ``.tar``, ``.gz``, ``.zip``. 17 | 18 | :param filepaths: list of filepaths to archives for extraction 19 | :param delete_originals: Set to "True" if archives may be deleted after 20 | their contents is successful extracted. 21 | """ 22 | 23 | filepaths = core.enf_filelist(filepaths) 24 | 25 | for filepath in filepaths: 26 | 27 | head,tail = os.path.split(filepath) 28 | 29 | if filepath.endswith(".tar.gz"): 30 | with tarfile.open(filepath, 'r:gz') as tfile: 31 | outdir = os.path.join(head, tail.replace(".tar.gz","")) 32 | tfile.extractall(outdir) 33 | 34 | # gzip only compresses single files 35 | elif filepath.endswith(".gz"): 36 | with gzip.open(filepath, 'rb') as gzfile: 37 | outfile = os.path.join(head, tail.replace(".gz","")) 38 | content = gzfile.read() 39 | with open(outfile, 'wb') as of: 40 | of.write(content) 41 | 42 | elif filepath.endswith(".tar"): 43 | with tarfile.open(filepath, 'r') as tfile: 44 | outdir = os.path.join(head, tail.replace(".tar","")) 45 | tfile.extractall(outdir) 46 | 47 | elif filepath.endswith(".zip"): 48 | with zipfile.ZipFile(filepath, "r") as zipf: 49 | outdir = os.path.join(head, tail.replace(".zip","")) 50 | zipf.extractall(outdir) 51 | 52 | else: return 53 | 54 | print("Extracted {0}".format(filepath)) 55 | 56 | if delete_originals: 57 | os.remove(filepath) 58 | 59 | return 60 | 61 | 62 | #testing area 63 | if __name__ == "__main__": 64 | formats = [r"C:\Users\jwely\Desktop\troubleshooting\zip_tests\MOD09A1.A2015033.h11v05.005.2015044233105_1_tar.tar", 65 | r"C:\Users\jwely\Desktop\troubleshooting\zip_tests\MOD09A1.A2015033.h11v05.005.2015044233105_1_targz.tar.gz", 66 | r"C:\Users\jwely\Desktop\troubleshooting\zip_tests\MOD09A1.A2015033.h11v05.005.2015044233105_1.tif.gz", 67 | r"C:\Users\jwely\Desktop\troubleshooting\zip_tests\MOD09A1.A2015033.h11v05.005.2015044233105_1_zip.zip"] 68 | 69 | for format in formats: 70 | extract_archive(format) 71 | -------------------------------------------------------------------------------- /dnppy/download/download_filelist.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from dnppy import core 4 | from download_urls import download_urls 5 | import os, time 6 | 7 | __all__ = ["download_filelist"] 8 | 9 | def download_filelist(ftp_texts, file_type = None, outdir = None): 10 | """ 11 | Reads text file of download links, downloads them. 12 | 13 | This script reads a text file with urls such as those output from ECHO REVERB 14 | and outputs them to an output directory. It will retry failed links 20 times before 15 | giving up and outputting a warning to the user. 16 | 17 | :param ftp_texts: array of txt files ordered from reverb containing ftp links 18 | :param file_type: file extension of the desired files, leave blank or False to grab all types. 19 | :param outdir: folder where files are to be placed after download 20 | 21 | :return list failed: list of files which failed to download after the end of the script. 22 | """ 23 | 24 | failed = [] 25 | 26 | # force inputs to take list format 27 | ftp_texts = core.enf_list(ftp_texts) 28 | if file_type is not None: 29 | file_type = core.enf_list(file_type) 30 | 31 | for ftptext in ftp_texts: 32 | #verify that things exist. 33 | core.exists(ftptext) 34 | 35 | if not outdir: 36 | outdir,_ = os.path.split(ftptext) 37 | 38 | ftp = open(ftptext,'r') 39 | sites = ftp.readlines() 40 | 41 | print("Attempting to download {0} files!".format(len(sites))) 42 | print("Saving all files to {0}".format(outdir)) 43 | 44 | # perform the first attempt 45 | failed = download_urls(sites, outdir, file_type) 46 | 47 | # for 19 more times, if there are still items in the failed list, try again 48 | for i in range(1,19): 49 | if len(failed)>0: 50 | print("retry number {0} to grab {1} failed downloads!".format(i,len(failed))) 51 | time.sleep(60) 52 | failed = download_urls(failed, file_type, outdir) 53 | 54 | # once all tries are complete, print a list of files which repeatedly failed 55 | if len(failed)>0: 56 | print('Files at the following URLs have failed 20 download attempts') 57 | print('Manually verify that these files exist on the server:') 58 | for i in failed: 59 | print(i) 60 | else: 61 | print('Finished with no errors!') 62 | 63 | # close the open text files and finish up 64 | ftp.close() 65 | 66 | return failed 67 | 68 | 69 | # testing area 70 | if __name__ == "__main__": 71 | 72 | download_filelist("reverb_filelist.txt", 73 | outdir = r"C:\Users\jwely\Desktop\troubleshooting\rawMODIS") -------------------------------------------------------------------------------- /dnppy/download/download_url.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import urllib 4 | import ftplib 5 | import os 6 | 7 | __all__ = ["download_url"] 8 | 9 | 10 | def download_url(url, outname, username = None, password = None, force_overwrite = False): 11 | """ 12 | Download a single file. input source url and output filename 13 | 14 | :param url: a string url to file for download 15 | :param outname: filepath to write location of file 16 | :param username: use if url leads to file on ftp server with username 17 | :param password: use if url leads to file on ftp server with password 18 | :param force_overwrite: by default, this function will not overwrite existing files 19 | on drive. This is to avoid accidental overwrite of critical 20 | files as well as repeated download of the same files. 21 | 22 | :return outname: returns filepath to locally created file after download 23 | """ 24 | 25 | # if output file already exists, do not download a file there. 26 | if os.path.isfile(os.path.abspath(outname)) and not force_overwrite: 27 | return outname 28 | 29 | head, tail = os.path.split(outname) 30 | if not os.path.exists(head) and head is not "": 31 | os.makedirs(head) 32 | 33 | if "http" in url[:4]: 34 | connection = urllib.urlopen(url) 35 | page = connection.read() 36 | 37 | # escapes in the event of a 404 not found 38 | if "404 Not Found" in page: 39 | return None 40 | 41 | writefile = open(outname, 'wb+') 42 | writefile.write(page) 43 | writefile.close() 44 | del connection 45 | return outname 46 | 47 | elif "ftp:" in url[:4]: 48 | filename = os.path.basename(url) 49 | server = url.split("/")[2] 50 | path = "/".join(url.split("/")[3:-1]) 51 | 52 | ftp = ftplib.FTP(server) 53 | 54 | # log in to the server with user specified username and password 55 | if username is not None and password is not None: 56 | ftp.login(username, password) 57 | elif username is not None: 58 | ftp.login(username) 59 | else: 60 | ftp.login() 61 | 62 | ftp.cwd(path) 63 | ftp.retrbinary("RETR " + filename, open(outname, 'wb').write) 64 | ftp.quit() 65 | return outname 66 | 67 | else: 68 | raise Exception("Unknown url protocol type, must be http or ftp") 69 | 70 | 71 | 72 | 73 | if __name__ == "__main__": 74 | 75 | url = "http://water.weather.gov/precip/p_download_new/2002/01/05/nws_precip_conus_20020105.nc" 76 | outpath = r"C:\Users\jwely\Desktop\troubleshooting\test.nc" 77 | download_url(url, outpath) 78 | -------------------------------------------------------------------------------- /dnppy/convert/ll_to_utm.py: -------------------------------------------------------------------------------- 1 | __author__ = ['Qgeddes', 'Jwely'] 2 | __all__ = ["ll_to_utm"] 3 | 4 | import numpy as np 5 | 6 | def ll_to_utm(lat, lon, utm_zone, hemisphere): 7 | """ 8 | Function converts lat/lon to UTM zone coordinates. Equations from USGS 9 | bulletin 1532. East and North are positive, South and West are negative. 10 | 11 | :param lat: latitude value in degrees (East is positive) 12 | :param lon: longitude value in degrees (North is positive) 13 | :param utm_zone: UTM zone number as an integer, without the "S" or "N" 14 | :param hemisphere: hemisphere for UTM zone, either "S", or "N" 15 | 16 | :return: UTM_easting, UTM_northing 17 | """ 18 | a = 6378137 19 | eccSq = 0.00669438 # ECC squared 20 | k0 = 0.9996 21 | 22 | # Make sure the longitude is between -180.00 .. 179.9 23 | LongTemp = (lon + 180) - ((lon + 180) / 360).astype('int32') * 360 - 180 24 | 25 | # convert to radians 26 | LatRad = lat * np.pi / 180.0 27 | LongRad = LongTemp * np.pi / 180.0 28 | 29 | # find the origin of longitude in radians 30 | LongOrigin = (int(utm_zone) - 1) * 6 - 180 + 3 # +3 puts origin in middle of zone 31 | LongOriginRad = LongOrigin * np.pi / 180.0 32 | 33 | # find set of coefficients 34 | eccPrSq = eccSq / (1 - eccSq) # ECC prime squared 35 | N = a / np.sqrt(1 - eccSq * np.sin(LatRad) * np.sin( LatRad)) 36 | T = np.tan(LatRad) * np.tan(LatRad) 37 | C = eccPrSq * np.cos(LatRad) * np.cos(LatRad) 38 | A = np.cos(LatRad) * (LongRad-LongOriginRad) 39 | 40 | # generate M 41 | M = a * ((1 42 | - eccSq / 4 43 | - 3 * eccSq * eccSq / 64 44 | - 5 * eccSq * eccSq * eccSq / 256) * LatRad 45 | - (3 * eccSq / 8 46 | + 3 * eccSq * eccSq / 32 47 | + 45 * eccSq * eccSq * eccSq / 1024) * np.sin(2 * LatRad) 48 | + (15 * eccSq * eccSq / 256 + 45 * eccSq * eccSq * eccSq / 1024) * np.sin(4 * LatRad) 49 | - (35 * eccSq * eccSq * eccSq / 3072) * np.sin(6 * LatRad)) 50 | 51 | 52 | # calculate UTM coordinates for input lat/lon 53 | UTM_easting = (k0 * N * (A + (1 - T + C) * (A ** 3) / 6 54 | + (5 - 18 * T + T * T + 72 * C - 58 * eccPrSq) * (A ** 5) / 120) 55 | + 500000.0) 56 | 57 | UTM_northing = (k0 * (M + N * np.tan(LatRad) * ((A * A / 2) + (5 - T + (9 * (C + 4)) * (C ** 2)) * ((A ** 4) / 24) 58 | + (61 -58 * T + (T * T) + (600 * C) - (330 * eccPrSq)) * (A ** 6) / 720))) 59 | 60 | # apply a 10000000 meter offset for southern hemisphere 61 | if hemisphere == "S": 62 | UTM_northing += 10000000.0 63 | 64 | return UTM_easting, UTM_northing -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/dnppy_limited/time_series/csv_io.py: -------------------------------------------------------------------------------- 1 | """ 2 | Referenced by time_series module 3 | 4 | quick dirty script for simple read and write of csv files 5 | 6 | It contains some information about special formats that may be 7 | frequently used by DEVELOP participants 8 | 9 | """ 10 | 11 | __author__ = ["Jeffry Ely, jeff.ely.08@gmail.com"] 12 | 13 | # standard imports 14 | import os 15 | 16 | 17 | def read_csv_rows(filepath, has_headers = True, delim = ',', spec_format = False): 18 | """ 19 | import csv data as standard rows 20 | 21 | It allows some custom spec_format flags to be used for very special 22 | case datasets: 23 | 24 | spec_format: 25 | DS3505 26 | data downloaded from the following website has a peculiarity 27 | [http://gis.ncdc.noaa.gov/map/viewer/#app=cdo&cfg=cdo&theme=hourly&layers=1&node=gi] 28 | in that it has some upercase T's that are rarely needed, but ruin otherwise 29 | uniform space formatting. 30 | """ 31 | 32 | with open(filepath,'r') as f: 33 | 34 | data = [] 35 | 36 | if has_headers: 37 | headers = next(f).replace('\n','').split(delim) 38 | headers = [x for x in headers if x != ""] # remove emptys 39 | else: 40 | headers = False 41 | 42 | for line in f: 43 | 44 | if spec_format == "DS3505": 45 | entry = line.replace("T"," ").replace("\n","").split(delim) 46 | else: 47 | entry = line.replace("\n","").split(delim) 48 | 49 | entry = [x for x in entry if x!= ""] # remove emptys 50 | data.append(entry) 51 | f.close() 52 | 53 | print("Loaded data from '{0}'".format(filepath)) 54 | return data, headers 55 | 56 | 57 | def read_csv_cols(filepath, has_headers = True, delim = ','): 58 | """import csv data in columnwise format (transposed)""" 59 | 60 | data, headers = read_csv_rows(filepath, has_headers, delim) 61 | return zip(*data), headers 62 | 63 | 64 | def write_csv_rows(data, headers, filepath): 65 | """ writes some row wise data structure to a csv file""" 66 | 67 | with open(filepath,'w+') as f: 68 | 69 | if headers: 70 | f.write(','.join(headers) + '\n') 71 | 72 | for row in data: 73 | row = map(str,row) 74 | entry = ','.join(row) + '\n' 75 | f.write(entry) 76 | f.close() 77 | 78 | print("Saved data to '{0}'".format(filepath)) 79 | return 80 | 81 | 82 | def write_csv_cols(data, headers, filepath): 83 | """ writes some column wise data structure to a csv file""" 84 | 85 | data = zip(*data) 86 | write_csv_rows(data, headers, filepath) 87 | return 88 | 89 | 90 | -------------------------------------------------------------------------------- /dnppy/download/download_urls.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from dnppy import core 4 | from download_url import download_url 5 | import os, time 6 | 7 | __all__ = ["download_urls"] 8 | 9 | def download_urls(url_list, outdir, file_types = None): 10 | """ 11 | Downloads a list of files. Retries failed downloads 12 | 13 | This script downloads a list of files and places it in the output directory. It was 14 | built to be nested within "Download_filelist" to allow loops to continuously retry 15 | failed files until they are successful or a retry limit is reached. 16 | 17 | :param url_list: array of urls, probably as read from a text file 18 | :param file_types: list of file types to download. Useful for excluding extraneous 19 | metadata by only downloading 'hdf' or 'tif' for example. Please note 20 | that often times, you actually NEED the metadata. 21 | :param outdir: folder where files are to be placed after download 22 | 23 | :return failed: list of files which failed download 24 | """ 25 | 26 | failed = [] 27 | url_list = core.enf_list(url_list) 28 | 29 | # creates output folder at desired path if it doesn't already exist 30 | if not os.path.exists(outdir): 31 | os.makedirs(outdir) 32 | 33 | # establish a wait time that will increase when downloads fail. This helps to reduce 34 | # the frequency of REVERB server rejections for requesting too many downloads 35 | wait = 0 36 | 37 | for site in url_list: 38 | download = False 39 | url = site.rstrip() 40 | sub = url.split("/") 41 | leng = len(sub) 42 | name = sub[leng-1] 43 | 44 | # Determine whether or not to download the file based on filetype. 45 | if file_types is not None: 46 | for filetype in file_types: 47 | if filetype in name[-4:]: 48 | download = True 49 | else: 50 | download = True 51 | 52 | # attempt download of the file, or skip it. 53 | if download: 54 | 55 | try: 56 | # wait for the wait time before attempting writing a file 57 | time.sleep(wait) 58 | download_url(url, os.path.join(outdir,name)) 59 | print("{0} is downloaded {1}".format(name, wait)) 60 | 61 | # reduce the wait time when downloads succeed. 62 | if wait >= 1: 63 | wait -= wait 64 | 65 | # add to the fail count if the download is unsuccessful and wait longer next time. 66 | except: 67 | print("{0} will be retried! {1}".format(sub[leng-1], wait)) 68 | wait += 5 69 | failed.append(url) 70 | 71 | print("Finished downloading urls!") 72 | return failed -------------------------------------------------------------------------------- /dnppy/raster/from_numpy.py: -------------------------------------------------------------------------------- 1 | __author__ = "jwely" 2 | __all__ = ["from_numpy"] 3 | 4 | import arcpy 5 | arcpy.env.overwriteOutput = True 6 | import numpy 7 | 8 | 9 | def from_numpy(numpy_rast, metadata, outpath, NoData_Value = None): 10 | """ 11 | Wrapper for arcpy.NumPyArrayToRaster function with better metadata handling 12 | 13 | this is just a wrapper for the NumPyArrayToRaster function within arcpy. It is used in 14 | conjunction with to_numpy to streamline reading image files in and out of numpy 15 | arrays. It also ensures that all spatial referencing and projection info is preserved 16 | between input and outputs of numpy manipulations. 17 | 18 | :param numpy_rast: The numpy array version of the input raster 19 | :param metadata: The variable exactly as output from "to_numpy" 20 | :param outpath: Output filepath of the individual raster 21 | :param NoData_Value: The no data value of the output raster 22 | 23 | :return outpath: Same as input outpath, filepath to created file. 24 | 25 | Usage example 26 | call to_numpy with "rast,metadata = to_numpy(Raster)" 27 | perform numpy manipulations as you please 28 | then save the array with "raster.from_numpy(rast, metadata, output)" 29 | """ 30 | 31 | numpy_rast = numpy_rast.astype(metadata.numpy_datatype) 32 | 33 | if NoData_Value is None: 34 | NoData_Value = metadata.NoData_Value 35 | 36 | llcorner = arcpy.Point(metadata.Xmin, metadata.Ymin) 37 | 38 | # save the output. 39 | if isinstance(numpy_rast, numpy.ma.core.MaskedArray): 40 | mask = numpy_rast.mask 41 | data = numpy_rast.data 42 | data[mask] = metadata.NoData_Value 43 | 44 | OUT = arcpy.NumPyArrayToRaster(data, llcorner, metadata.cellWidth ,metadata.cellHeight) 45 | OUT.save(outpath) 46 | 47 | elif isinstance(numpy_rast, numpy.ndarray): 48 | OUT = arcpy.NumPyArrayToRaster(numpy_rast, llcorner, metadata.cellWidth ,metadata.cellHeight) 49 | OUT.save(outpath) 50 | 51 | # define its projection 52 | try: 53 | arcpy.DefineProjection_management(outpath, metadata.projection) 54 | except: 55 | Warning("Unable to define the projection on {0}".format(outpath)) 56 | 57 | # reset the NoData_Values 58 | try: 59 | arcpy.SetRasterProperties_management( 60 | outpath, 61 | data_type = "#", 62 | statistics = "#", 63 | stats_file = "#", 64 | nodata = "1 " + str(NoData_Value)) 65 | 66 | except: 67 | Warning("Unable to establish NoData profile on {0}".format(outpath)) 68 | 69 | # calculate statistics and pyramids 70 | arcpy.CalculateStatistics_management(outpath) 71 | arcpy.BuildPyramids_management(outpath) 72 | 73 | print("Saved output file as {0}".format(outpath)) 74 | 75 | return outpath 76 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/Cloud_Fill.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy 3 | import osr 4 | from osgeo import gdal 5 | from scipy import misc 6 | import rasterio 7 | 8 | folder_path=r"C:\Users\sbarron\Desktop\Test\Output\Newfolder" 9 | 10 | #Part 1: Convert NoData values to -9999 in order to solve patching issue later on. 11 | #Need to run first part of script for each individual file. 12 | file1= "Clip_CGSM_1989_B3_atm.tif" 13 | 14 | #Open raster twice, using rasterio and gdal 15 | raster= rasterio.open(file1) 16 | raster1= gdal.Open(file1) 17 | 18 | #Create array of the same size as the input raster, but fill it with place holder values of 0. 19 | Array= numpy.zeros(shape=(raster.height,raster.width)) 20 | 21 | raster.read_band(1) 22 | 23 | #For loop reads each line in the input raster as an array, if pixel value== "nan", replaces value with -9999. 24 | #During each iteration, values replace placeholder 0 values in the created array above. 25 | x=0 26 | for line in raster.read_band(1): 27 | List=list() 28 | for pixel in line: 29 | if str(pixel)== "nan": 30 | List.append("-9999.0") 31 | else: 32 | List.append(pixel) 33 | Array[x]= List 34 | x=x+1 35 | 36 | #Once the for loop is finished, we need to conver the numpy array into a raster. 37 | def array_to_raster(array, output_file): 38 | dst_filename = output_file 39 | 40 | #Get values 41 | x_pixels = raster.width # number of pixels in x 42 | y_pixels = raster.height # number of pixels in y 43 | PIXEL_SIZE = 30 # size of the pixel... 44 | x_min = raster1.GetGeoTransform()[0] 45 | y_max = raster1.GetGeoTransform()[3] # x_min & y_max are like the "top left" corner. 46 | wkt_projection = raster1.GetProjection() 47 | 48 | driver = gdal.GetDriverByName('GTiff') 49 | 50 | dataset = driver.Create( 51 | dst_filename, 52 | x_pixels, 53 | y_pixels, 54 | 1, 55 | gdal.GDT_Float32, ) 56 | 57 | dataset.SetGeoTransform(( 58 | x_min, # 0 59 | PIXEL_SIZE, # 1 60 | 0, # 2 61 | y_max, # 3 62 | 0, # 4 63 | -PIXEL_SIZE)) 64 | 65 | dataset.SetProjection(wkt_projection) 66 | dataset.GetRasterBand(1).WriteArray(array) 67 | dataset.FlushCache() # Write to disk. 68 | return dataset, dataset.GetRasterBand(1) #If you need to return, remenber to return also the dataset because the band don`t live without dataset. 69 | 70 | 71 | array_to_raster(Array, "CGSM_1989_B3_9999.tif") 72 | 73 | print "Process Completed." 74 | 75 | #Part 2 76 | #This portion will then fill in the holes left by clouds in file1 with data from file2 77 | file1= "Input1.tif" 78 | file2= "Input2.tif" 79 | Output_File= "Output_File.tif" 80 | 81 | os.system('gdal_calc.py -A%s -B%s --outfile=%s --calc="((A==-9999)*B)+((A>=-9000)*A)" --type="Float32" --NoDataValue=-9999.0' % (file1,file2,Output_File)) 82 | 83 | -------------------------------------------------------------------------------- /dnppy/raster/new_mosaic.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | __all__ = ["new_mosaic"] 3 | 4 | from enf_rastlist import enf_rastlist 5 | from to_numpy import to_numpy 6 | from from_numpy import from_numpy 7 | import numpy 8 | import arcpy 9 | import os 10 | 11 | 12 | def new_mosaic(rasterpaths, output_path, mosaic_method = None, cell_size = None, number_of_bands = None): 13 | """ 14 | Simply creates a new raster dataset mosaic of input rasters by wrapping the 15 | arcpy.MosaicToNewRaster_management function. learn more about the fields here 16 | 17 | http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#//001700000098000000 18 | 19 | :param rasterpaths: list of complete filepaths to raster data to mosaic 20 | :param output_path: place to save new mosaic raster dataset 21 | :param mosaic_method: options are "FIRST", "LAST", "BLEND", "MEAN", "MINIMUM","MAXIMUM" 22 | :param cell_size: of format "[cellwidth] [cellheight]" in the appropriate linear units, 23 | usually meters. 24 | 25 | :return output_path: returns filepath to new file, same as input ``output_path`` 26 | """ 27 | 28 | # set up input parameters 29 | if mosaic_method is None: 30 | mosaic_method = "FIRST" 31 | 32 | if cell_size is not None: 33 | print("using custom cell size of '{0}'".format(cell_size)) 34 | 35 | if number_of_bands is None: 36 | number_of_bands = 1 37 | 38 | rasterpaths = enf_rastlist(rasterpaths) 39 | 40 | # get some metadata about the first raster in the mosaic 41 | numpy, meta = to_numpy(rasterpaths[0]) 42 | 43 | # check output directories and set up inputs for arcpy function 44 | outdir, outname = os.path.split(output_path) 45 | 46 | if not os.path.exists(outdir): 47 | os.makedirs(outdir) 48 | 49 | arcpy.MosaicToNewRaster_management(rasterpaths, outdir, outname, 50 | None, # coordinate system 51 | meta.pixel_type, 52 | cell_size, 53 | str(number_of_bands), 54 | mosaic_method = mosaic_method) 55 | 56 | print("Created raster mosaic at {0}".format(output_path)) 57 | return output_path 58 | 59 | 60 | 61 | if __name__ == "__main__": 62 | 63 | adir = r"C:\Users\jwely\Desktop\Team_Projects\2015_sumer_CO_water\LiDAR_Format_Trial" 64 | outpath = os.path.join(adir, "mosaic", "test_mosaic.tif") 65 | new_mosaic(adir, outpath, mosaic_method = "FIRST" ) 66 | 67 | rast, meta = to_numpy(outpath) 68 | rast.data[rast.data == numpy.nan] = 0 69 | rast.data[(2452 >= rast.data) & (rast.data >= 2450)] = numpy.nan 70 | rast.data[(2430 >= rast.data) & (rast.data >= 2428)] = numpy.nan 71 | rast.data[(2350 >= rast.data) & (rast.data >= 2348)] = numpy.nan 72 | from_numpy(rast, meta, outpath.replace(".tif","_gaps.tif")) -------------------------------------------------------------------------------- /dnppy/convert/extract_GCMO_NetCDF.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ["extract_GCMO_NetCDF"] 3 | 4 | # standard imports 5 | import os 6 | from dnppy import core 7 | 8 | # arcpy imports 9 | import arcpy 10 | if arcpy.CheckExtension('Spatial')=='Available': 11 | arcpy.CheckOutExtension('Spatial') 12 | arcpy.env.overwriteOutput = True 13 | 14 | 15 | def extract_GCMO_NetCDF(netcdf_list, variable, outdir): 16 | """ 17 | Extracts all time layers from a "Global Climate Model Output" NetCDF layer 18 | 19 | :param netcdf_list: List of netcdfs from CORDEX climate distribution 20 | :param variable: The climate variable of interest (tsmax, tsmin, etc) 21 | :param outdir: Output directory to save files. 22 | 23 | :return output_filelist: returns list of files created by this function 24 | """ 25 | 26 | output_filelist = [] 27 | 28 | if not os.path.exists(outdir): 29 | os.makedirs(outdir) 30 | 31 | netcdf_list = core.enf_list(netcdf_list) 32 | 33 | for netcdf in netcdf_list: 34 | # get net cdf properties object 35 | props = arcpy.NetCDFFileProperties(netcdf) 36 | 37 | print("finding dimensions") 38 | dims = props.getDimensions() 39 | for dim in dims: 40 | print dim, props.getDimensionSize(dim) 41 | 42 | # make sure the variable is in this netcdf 43 | if variable: 44 | if not variable in props.getVariables(): 45 | print("Valid variables for this file include {0}".format(props.getVariables())) 46 | raise Exception("Variable '{0}' is not in this netcdf!".format(variable)) 47 | 48 | for dim in dims: 49 | if dim == "time": 50 | 51 | # set other dimensions 52 | x_dim = "lon" 53 | y_dim = "lat" 54 | band_dim = "" 55 | valueSelectionMethod = "BY_VALUE" 56 | 57 | size = props.getDimensionSize(dim) 58 | for i in range(size): 59 | 60 | # sanitize the dimname for invalid characters 61 | dimname = props.getDimensionValue(dim,i).replace(" 12:00:00 PM","") 62 | dimname = dimname.replace("/","-").replace(" ","_") 63 | 64 | dim_value = [["time", props.getDimensionValue(dim,i)]] 65 | print("extracting '{0}' from '{1}'".format(variable, dim_value)) 66 | 67 | outname = core.create_outname(outdir, netcdf, dimname, 'tif') 68 | output_filelist.append(outname) 69 | 70 | arcpy.MakeNetCDFRasterLayer_md(netcdf, variable, x_dim, y_dim, "temp", 71 | band_dim, dim_value, valueSelectionMethod) 72 | arcpy.CopyRaster_management("temp", outname, "", "", "", "NONE", "NONE", "") 73 | 74 | return output_filelist 75 | -------------------------------------------------------------------------------- /docs/source/modules/solar.rst: -------------------------------------------------------------------------------- 1 | solar 2 | ===== 3 | 4 | .. automodule:: dnppy.solar 5 | :members: 6 | 7 | Examples 8 | -------- 9 | 10 | .. rubric:: Compute all available solar parameters 11 | 12 | For a simple example, lets just assume we want to go ahead and calculate everything. The inputs for all solar calculations come down to space and time. More specifically, latitude and longitude coordinates and a precise time in standard GMT. For the example below, we are going to use 2015-May-15th at exactly noon local time, in Hampton Virginia USA. Note that while observing daylight savings time, the east coast observes EDT, which is GMT-4, watch out for mistakes with time zones! 13 | 14 | .. code-block:: python 15 | 16 | from dnppy import solar 17 | 18 | lat = 37 # lat (N positive) 19 | lon = -76.4 # lon (E positive) 20 | datestamp = "20150515-120000" # date stamp 21 | fmt = "%Y%m%d-%H%M%S" # datestamp format 22 | tz = -4 # timezone (GMT/UTC) offset 23 | 24 | sc = solar(lat, lon, datestamp, tz, fmt) 25 | sc.compute_all() 26 | 27 | The code above successfully updates ``solar`` instance ``sc`` to have all the attributes supported by the class, and prints a summary with descriptive variable names, values, and units. If the ``datestamp`` and ``fmt`` variables are unfamiliar to you, you can read more about python `datetime objects`_ and how to create them from strings with `fmt syntax`_. 28 | 29 | Once you mastered datetime objects, you will probably want to use them all the time (they are pretty great). Fortunately, you can simply input a datetime object in place of the ``datestamp`` variable in the code above, and omit the ``fmt`` variable entirely, such as: 30 | 31 | .. code-block:: python 32 | 33 | from dnppy import solar 34 | from datetime import datetime 35 | 36 | lat = 37 37 | lon = -76.4 38 | dt_obj = datetime(2015,5,15,12,0,0) 39 | tz = -4 40 | 41 | sc = solar(lat, lon, dt_obj, tz) 42 | sc.compute_all() 43 | 44 | 45 | Lets say you aren't interested in anything except the solar elevation angle. After consulting ``help(solar)`` or the function list below, we see there is a method called ``get_elevation``, and decide to invoke that instead of ``compute_all()`` and save it in a new variable called ``solar_elevation``. 46 | 47 | .. code-block:: python 48 | 49 | solar_elevation = sc.get_elevation() # invoke method of solar instance "sc" 50 | 51 | That's it! you can get any available attribute of a solar instance in exactly the same way! 52 | 53 | .. _datetime objects: https://docs.python.org/2/library/datetime.html#datetime-objects 54 | .. _fmt syntax: https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior 55 | 56 | Code Help 57 | --------- 58 | 59 | Auto-documentation for functions and classes within this module is generated below! 60 | 61 | .. automodule:: dnppy.solar.solar 62 | :members: 63 | 64 | -------------------------------------------------------------------------------- /dnppy/download/fetch_Landsat_WELD.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | from dnppy import core 4 | from list_http_e4ftl01 import list_http_e4ftl01 5 | from download_url import download_url 6 | import os 7 | 8 | __all__ = ["fetch_Landsat_WELD"] 9 | 10 | 11 | def fetch_Landsat_WELD(product, tiles, years, outdir): 12 | """ 13 | Fetch WELD data from the server at [http://e4ftl01.cr.usgs.gov/WELD]. 14 | Weld data is corrected and processed Landsat 5 and 7 data that is distributed in the 15 | MODIS sinusoidal projection and grid format. Read more about WELD data. 16 | https://landsat.usgs.gov/WELD.php 17 | http://globalmonitoring.sdstate.edu/projects/weldglobal/ 18 | 19 | :param product: WELD product to download such as 'USWK','USMO','USYR' 20 | :param tiles: list of tiles to grab such as ['h11v12','h11v11'] 21 | :param years: list of years to grab such as range(2001,2014) 22 | :param outdir: output directory to save downloaded files 23 | 24 | :return output_filelist: A list of full filepaths to files fetched be this function 25 | """ 26 | 27 | output_filelist = [] 28 | 29 | # check formats 30 | global dates 31 | tiles = core.enf_list(tiles) 32 | years = core.enf_list(years) 33 | years = [str(year) for year in years] 34 | 35 | # create output directories 36 | for tile in tiles: 37 | if not os.path.exists(os.path.join(outdir,tile)): 38 | os.makedirs(os.path.join(outdir,tile)) 39 | 40 | print('Connecting to servers!') 41 | 42 | # Map the contents of the directory 43 | site= 'https://e4ftl01.cr.usgs.gov/WELD/WELD'+product+'.001' 44 | try: 45 | dates = list_http_e4ftl01(site) 46 | except: 47 | print('Could not connect to site! check inputs!') 48 | 49 | # find just the folders within the desired year range. 50 | good_dates=[] 51 | for date in dates: 52 | try: 53 | y, m, d = date.split(".") 54 | if y in years: 55 | good_dates.append(date) 56 | except: pass 57 | 58 | print("Found {0} days within year range".format(len(good_dates))) 59 | 60 | # for all folders within the desired date range, map the subfolder contents. 61 | for good_date in good_dates: 62 | 63 | files = list_http_e4ftl01(site+'/'+good_date) 64 | 65 | for afile in files: 66 | # only list files with desired tilenames and not preview jpgs 67 | if not '.jpg' in afile: 68 | for tile in tiles: 69 | if tile in afile: 70 | 71 | # assemble the address 72 | address = '/'.join([site,good_date,afile]) 73 | print("Downloading {0}".format(address)) 74 | 75 | #download the file. 76 | outname = os.path.join(outdir,tile,afile) 77 | output_filelist.append(outname) 78 | download_url(address, outname) 79 | return 80 | -------------------------------------------------------------------------------- /undeployed/legacy/Landsat/ACCACloudDetector_ArcInterface.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------- 2 | # Name: module1 3 | # Purpose: 4 | # 5 | # Author: qgeddes 6 | # 7 | # Created: 29/04/2013 8 | # Copyright: (c) qgeddes 2013 9 | # Licence: 10 | #------------------------------------------------------------------------------- 11 | 12 | try: 13 | import numpy as np 14 | 15 | except: 16 | msg=""" 17 | The NumPy module is need for this tool. The module can be downloaded at the following address: 18 | http://sourceforge.net/projects/numpy/files/NumPy/1.6.2 19 | Download the appropriate superpack installer for windows for your Python version 20 | ArcGIS 10 uses Python 2.6 21 | ArcGIS 10.1 uses Python 2.7""" 22 | arcpy.AddError(dedent(msg)) 23 | raise arcpy.ExecuteError 24 | try: 25 | from scipy import stats 26 | except: 27 | msg=""" 28 | The SciPy module is need for this tool. The module can be downloaded at the following address: 29 | http://sourceforge.net/projects/scipy/files/scipy/0.11.0 30 | Download the appropriate superpack installer for windows for your Python version 31 | ArcGIS 10 uses Python 2.6 32 | ArcGIS 10.1 uses Python 2.7""" 33 | arcpy.AddError(dedent(msg)) 34 | raise arcpy.ExecuteError 35 | import ACCACloudDetector 36 | 37 | 38 | Band2path= arcpy.GetParameterAsText(0) 39 | Band3path= arcpy.GetParameterAsText(1) 40 | Band4path= arcpy.GetParameterAsText(2) 41 | Band5path= arcpy.GetParameterAsText(3) 42 | Band6path= arcpy.GetParameterAsText(4) 43 | 44 | pixelvalue= arcpy.GetParameterAsText(5) 45 | MetaData= arcpy.GetParameterAsText(6) 46 | OutputFolder= arcpy.GetParameterAsText(7) 47 | OutputFileName= arcpy.GetParameterAsText(8) 48 | 49 | Filter5Thresh=float(arcpy.GetParameterAsText(9)) 50 | Filter6Thresh=float(arcpy.GetParameterAsText(10)) 51 | SaveRefl=arcpy.GetParameter(11) 52 | ReflFolder=arcpy.GetParameterAsText(12) 53 | 54 | 55 | L7bands=[Band2path,Band3path,Band4path,Band5path,Band6path] 56 | #checking if the file extension is appropriate and making alterations if necessary 57 | FileNameSplit=OutputFileName.split(".") 58 | if FileNameSplit[-1] not in ["tif","img"]: 59 | arcpy.AddWarning("Output Image must be saved in either the .tif or .img file format. File has been change to .tif") 60 | if len(FileNameSplit)==1: 61 | OutputFileName+=".tif" 62 | else: 63 | FileNameSplit[-1]="tif" 64 | OutputFileName=".".join(FileNameSplit) 65 | 66 | arcpy.env.scratchWorkspace=OutputFolder 67 | 68 | 69 | ACCACloudDetector.ACCACloudDetector(L7bands, 70 | pixelvalue, 71 | OutputFolder+"\\"+OutputFileName, 72 | MetaData, 73 | SaveRefl, 74 | ReflFolder, 75 | Filter5Thresh, 76 | Filter6Thresh) -------------------------------------------------------------------------------- /docs/source/dev_pages/dev_goals.rst: -------------------------------------------------------------------------------- 1 | Future Development Goals 2 | ======================== 3 | dnppy has some long term goals in mind, goals that will take slow and continuous effort to achieve. 4 | 5 | Dump arcpy 6 | ---------- 7 | 8 | The arcpy module is available only to users who purchase ESRI's ArcMap software. It's principle advantage over most other GIS programming tools or API's out there is that the user and help documentation is really good, and the "model builder" interface is a good baby step towards python programming, so it's a good place for burgeoning GIS programmers to start learning. Alternatively, the `gdal library`_ is very powerful open source cross platform library with python bindings, but has disjointed documentation that can be intimidating and confusing. dnppy's long term goal is to package the power of gdal into more easily understood functional wrappers akin to arcpy functions. Actually, ESRI's ArcMap itself is built on top of some parts of the gdal binaries! Priority should always be placed on meeting the immediate needs of the GIS community and NASA data users, but the addition of new arcpy dependent functions should be avoided. If you've installed dnppy, you've already got an arcpy compatible version of gdal installed, and some dnppy functions already use gdal! 9 | 10 | It would be rather inconsiderate of us to tell future developers, "please use gdal k-thanks" without giving them some kind of starter guide. :doc:`Read more on gdal. `. 11 | 12 | .. _gdal library: http://www.gdal.org/ 13 | 14 | 15 | Python 3 16 | -------- 17 | 18 | Once dnppy is no longer tied to arcpy (which is what anchors dnppy to python 2.7), we will be free to embrace the future of the Python programming language. In the mean-time, there are some things we can be doing `right now` that will make the transition to Python 3 significantly easier. 19 | 20 | .. note:: A very complete and succinct list can be found at `python-future.org`_, but here are some high priority ones. 21 | 22 | .. _python-future.org: http://python-future.org/compatible_idioms.html 23 | 24 | .. rubric:: Printing string output 25 | 26 | String manipulations are very common even at the most basic level.In Python 2, users can use ``print "my string"``, but in Python 3 this is no longer acceptable! The ``print`` keyword has become a function in python 3. Furthermore, in line substitutions abide by slightly different rules. So, in order to institute good practice for the inevitable update, it costs us very little to write print statements that work in `both` 2.7 and 3.0. Therefore we use the ``format`` method on a string. 27 | 28 | .. code-block:: python 29 | 30 | my_name = "Jwely" 31 | my_age = 26 32 | 33 | print "my name is " + my_name + " and my age is " + str(my_age) # this is BAD 34 | print("my name is {0} and my age is {1}".format(my_name, my_age)) # this is GOOD 35 | 36 | Notice that you do not need to cast ``my_age`` as a string when using the ``format`` method, as it takes care of this on its own. You may find violations of this rule within dnppy, if you do, please fix it! 37 | 38 | 39 | -------------------------------------------------------------------------------- /dev/sphinx_build.py: -------------------------------------------------------------------------------- 1 | __author__ = 'jwely' 2 | 3 | import pip 4 | import os 5 | 6 | """ 7 | Running this script will create the sphinx build in the local repository. 8 | As you might notice, this folder is in the .gitignore, so it 9 | doesn't add to the git history. Why is this? We choose to follow 10 | the best practice of keeping the entire documentation set 11 | on a distinct branch, called "gh-pages" away from the master and 12 | development branches. The whole documentation website is rebuilt 13 | from source and pushed to the "gh-pages" branch every time a commit 14 | is made to the master branch of dnppy, so you do not need to worry about 15 | performing this process manually. 16 | 17 | However! before making a commit to the master branch, you should 18 | run this sphinx_build script to build a copy of the doc pages in your 19 | local "docs/build" folder. Make sure you resolve any errors or warnings 20 | given by sphinx, then open up "index.html" and browse the new 21 | build to make sure it displays as intended. You should do this in addition 22 | to any other quality assurance and testing checks you deem necessary. Once 23 | everything looks good, commit to the master branch, and updates should go 24 | live to "https://nasa-develop.github.io/dnppy/" within two minutes. 25 | """ 26 | 27 | 28 | def get_sphinx(): 29 | """ sub function to make sure modules are available """ 30 | try: 31 | import sphinx 32 | except ImportError: 33 | pip.main(["install", "sphinx"]) 34 | try: 35 | import graphviz 36 | except ImportError: 37 | pip.main(["install", "graphviz"]) 38 | try: 39 | import mock 40 | except ImportError: 41 | pip.main(["install", "mock"]) 42 | 43 | 44 | def build_sphinx(): 45 | """ 46 | This function creates a batch file for running sphinx-build on 47 | the developers system then runs it. The batch file is saved 48 | for troubleshooting purposes. This uses absolute filepaths, 49 | so the developer does not need to modify their PATH variable. 50 | """ 51 | 52 | # make sure sphinx is available 53 | get_sphinx() 54 | 55 | # assemble filepaths 56 | sphinx_path = pip.__file__.replace("\\", "/").replace( 57 | "lib/site-packages/pip/__init__.pyc", 58 | "Scripts/sphinx-build.exe") 59 | 60 | self_path = __file__.replace("\\", "/") 61 | source_path = self_path.replace("dev/sphinx_build.py", "docs/source") 62 | dest_path = self_path.replace("dev/sphinx_build.py", "docs/build") 63 | 64 | # remove key files to force rebuild 65 | buildinfo = os.path.join(dest_path, ".buildinfo") 66 | pickle = os.path.join(dest_path, ".doctrees", "environment.pickle") 67 | if os.path.exists(pickle): 68 | os.remove(pickle) 69 | if os.path.exists(buildinfo): 70 | os.remove(buildinfo) 71 | 72 | with open("make_html.bat", "w+") as f: 73 | line1 = "{0} -b html {1} {2}".format(sphinx_path, source_path, dest_path) 74 | f.write(line1) 75 | f.write("\n\nPAUSE") 76 | 77 | os.system("make_html.bat") 78 | 79 | 80 | if __name__ == "__main__": 81 | build_sphinx() 82 | 83 | -------------------------------------------------------------------------------- /dnppy/landsat/ndvi.py: -------------------------------------------------------------------------------- 1 | 2 | #standard imports 3 | import os 4 | import arcpy 5 | if arcpy.CheckExtension('Spatial')=='Available': 6 | arcpy.CheckOutExtension('Spatial') 7 | arcpy.env.overwriteOutput = True 8 | 9 | __all__=['ndvi_8', # complete 10 | 'ndvi_457'] # complete 11 | 12 | 13 | def ndvi_8(Band5, Band4, outdir = None): 14 | """ 15 | Simple calculator of Normalized difference vegetation index on some Landsat 8 OLI 16 | data. Output file will have same name as inputs with "NDVI" in place of "B5", so 17 | inputs of files "LC80140342014347LGN00_B5.tif" and "LC80140342014347LGN00_B4.tif" 18 | will generate a file named "LC80140342014347LGN00_NDVI.tif" 19 | 20 | :param Band5: The full filepath to the band 5 tiff file, the OLI NIR band 21 | :param Band4: The full filepath to the band 4 tiff file, the OLI Visible Red band 22 | :param outdir: directory to store output "NDVI" tiff 23 | 24 | :return ndvi8: Name of output file created by this function 25 | """ 26 | 27 | Band4 = os.path.abspath(Band4) 28 | Band5 = os.path.abspath(Band5) 29 | 30 | #Set the input bands to float 31 | Red = arcpy.sa.Float(Band4) 32 | NIR = arcpy.sa.Float(Band5) 33 | 34 | #Calculate the NDVI 35 | L8_NDVI = (NIR - Red)/(NIR + Red) 36 | 37 | # find output directory 38 | tail = os.path.basename(Band5).replace("_B5", "_NDVI") 39 | if outdir is None: 40 | head = os.path.dirname(Band5) 41 | ndvi8 = os.path.join(head, tail) 42 | else: 43 | ndvi8 = os.path.join(outdir, tail) 44 | 45 | L8_NDVI.save(ndvi8) 46 | 47 | print("saved ndvi_8 at {0}".format(ndvi8)) 48 | return ndvi8 49 | 50 | 51 | def ndvi_457(Band4, Band3, outdir = None): 52 | """ 53 | Simple calculator of Normalized difference vegetation index on some Landsat 4/5/7 TM/ETM+ 54 | data. Output file will have same name as inputs with "NDVI" in place of "B5", so 55 | inputs of files "LC70140342014347LGN00_B4.tif" and "LC70140342014347LGN00_B3.tif" 56 | will generate a file named "LC70140342014347LGN00_NDVI.tif" 57 | 58 | :param Band4: The full filepath to the band 4 tiff file, the TM/ETM+ NIR band 59 | :param Band3: The full filepath to the band 3 tiff file, the TM/ETM+ Visible Red band 60 | :param outdir: directory to store output "NDVI" tiff 61 | 62 | :return ndvi457: Name of output file created by this function 63 | """ 64 | 65 | Band3 = os.path.abspath(Band3) 66 | Band4 = os.path.abspath(Band4) 67 | 68 | #Set the input bands to float 69 | Red = arcpy.sa.Float(Band3) 70 | NIR = arcpy.sa.Float(Band4) 71 | 72 | #Calculate the NDVI 73 | L457_NDVI = (NIR - Red)/(NIR + Red) 74 | 75 | # find output directory 76 | tail = os.path.basename(Band3).replace("_B3", "_NDVI") 77 | if outdir is None: 78 | head = os.path.dirname(Band3) 79 | ndvi457 = os.path.join(head, tail) 80 | else: 81 | ndvi457 = os.path.join(outdir, tail) 82 | 83 | L457_NDVI.save(ndvi457) 84 | 85 | print("saved ndvi_457 at {0}".format(ndvi457)) 86 | return ndvi457 87 | -------------------------------------------------------------------------------- /dnppy/raster/apply_linear_correction.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from enf_rastlist import * 4 | from from_numpy import * 5 | from to_numpy import * 6 | from dnppy import core 7 | import os 8 | 9 | __all_ = ["apply_linear_correction"] 10 | 11 | def apply_linear_correction(rasterlist, factor, offset, suffix = 'lc', 12 | outdir = None, floor = -999999): 13 | """ 14 | Applies a linear correction to a raster dataset. 15 | New offset rasters are saved in the output directory with a suffix of "lc" 16 | unless one is specified. This may be used to apply any kind of linear relationship 17 | that can be described with "mx + b" such as conversion between between K,C, and F. 18 | Also useful when ground truthing satellite data and discovering linear errors. 19 | All outputs are 32 bit floating point values. 20 | 21 | :param rasterlist: list of rasters, a single raster, or a directory full of tiffs to 22 | Have a linear correction applied to them. 23 | :param factor: every pixel in the raster will be MULTIPLIED by this value. 24 | :param offset: this offset value will be ADDED to every pixel in the raster. 25 | :param suffix: output files will take the same name as input files with this string 26 | appended to the end. So input "FILE.tif" outputs "FILE_suffix.tif" 27 | :param outdir: directory to save output rasters. "None" will save output images 28 | in the same folder as the input images. 29 | :param floor: Used to manage NoData. All values less than floor are set to floor 30 | then floor is set to the new NoData value. defaults to -999,999 31 | 32 | 33 | return outputpath: filepath to output files created by this function 34 | 35 | Example Usage 36 | to convert from MODIS Land surface temperature from digital number to kelvin, you 37 | must simply multiply by 0.02 as the stated scale factor listed at the link below 38 | [https://lpdaac.usgs.gov/products/modis_products_table/myd11a1]. 39 | 40 | Now that it is in kelvin, converting to Celsius can be done by adding (-273.15) 41 | So, use this function with:: 42 | 43 | factor = 0.02 44 | offset = -273.15 45 | 46 | and one may convert MODIS land surface temperature digital numbers directly to 47 | celsius! 48 | """ 49 | 50 | output_filelist = [] 51 | 52 | if outdir is not None and not os.path.isdir(outdir): 53 | os.makedirs(outdir) 54 | rasterlist = enf_rastlist(rasterlist) 55 | 56 | for raster in rasterlist: 57 | print("applying a linear correction to " + raster) 58 | image, metadata = to_numpy(raster, "float32") 59 | new_NoData = floor 60 | 61 | output = image * factor + offset 62 | low_value_indices = output < new_NoData 63 | output[low_value_indices] = new_NoData 64 | 65 | outname = core.create_outname(outdir,raster,suffix) 66 | from_numpy(output, metadata, outname, new_NoData) 67 | output_filelist.append(outname) 68 | 69 | print("Finished! \n ") 70 | return output_filelist 71 | -------------------------------------------------------------------------------- /undeployed/subjects/DSI/LST_Script_Tool.py: -------------------------------------------------------------------------------- 1 | ## LAND SURFACE TEMPERATURE 2 | import fnmatch 3 | import arcpy 4 | import os 5 | import gc 6 | gc.disable() 7 | from arcpy import env 8 | from arcpy.sa import* 9 | arcpy.CheckOutExtension("Spatial") #Turns on Spatial Analyst Extension 10 | env.workspace = arcpy.GetParameterAsText(0) # TO BE PARAMETER 11 | env.overwriteOutput = True #If TRUE: when ran deletes data if it already exists. If FALSE: Does not delte data that already exists. 12 | study_area = arcpy.GetParameterAsText(1) # MUST BE IN GEODATABASE # TO BE PARAMETER 13 | 14 | 15 | ## HDF > RASTER TIFF 16 | lst = arcpy.ListFiles("*.hdf") 17 | mosaiclist = [] 18 | for filez in lst: 19 | output_rast = filez[0:-4]+"_raster" + ".tif" 20 | extracted = arcpy.ExtractSubDataset_management(filez,output_rast,"0") 21 | OutExtractByMask = ExtractByMask(extracted, study_area) 22 | OutExtractByMask.save((filez[0:-4])+"_sa.tif") 23 | 24 | 25 | ## MOSAIC RASTERS TOGETHER 26 | 27 | for dirname, dirnames, filenames in os.walk(env.workspace): 28 | for subdirname in dirnames: 29 | env.workspace = os.path.join(dirname, subdirname) 30 | 31 | mosaiclist = arcpy.ListRasters("*sa.tif") 32 | M_Out = env.workspace 33 | i = 0 34 | for raster in mosaiclist: 35 | while i < len(lst): 36 | currentPattern = lst[i] 37 | M_outname = currentPattern[0:-8] + "_LST.tif" 38 | pattern = currentPattern[0:6]+"*.tif" 39 | group = fnmatch.filter(mosaiclist,pattern) 40 | print group 41 | arcpy.MosaicToNewRaster_management(group,M_Out,M_outname,"","16_BIT_UNSIGNED","","1") 42 | if len(group) == 1: 43 | group.save(M_outname) ## HOW TO SAVE CONTENT 44 | print "Group 1" 45 | i = i + 1 46 | elif len(group)== 2: 47 | arcpy.MosaicToNewRaster_management(group,M_Out,M_outname,"","16_BIT_UNSIGNED","","1") 48 | print "Group 2" 49 | i = i + 2 50 | elif len(group) == 3: 51 | arcpy.MosaicToNewRaster_management(group,M_Out,M_outname,"","16_BIT_UNSIGNED","","1") 52 | print "Group 3" 53 | i = i + 3 54 | else: 55 | break 56 | 57 | scalelist = arcpy.ListRasters("*LST.tif") 58 | for rasters in scalelist: 59 | minRast = arcpy.GetRasterProperties_management(rasters, "MINIMUM") 60 | minvalue = (minRast.getOutput(0)) 61 | maxRast = arcpy.GetRasterProperties_management(rasters, "MAXIMUM") 62 | maxvalue = (maxRast.getOutput(0)) 63 | num = Minus(float(maxvalue), rasters) 64 | denom = float(maxvalue)- float(minvalue) 65 | scaledLST = Divide(num,denom) 66 | scaledoutplace = arcpy.GetParameterAsText(2) #MAKE PARAMETER OUTPUTFOLDER 67 | scaledLST.save(scaledoutplace + rasters[0:-4] + "_scaled.tif") 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /undeployed/subjects/METRIC/ET/Wx_Data_Extract.py: -------------------------------------------------------------------------------- 1 | 2 | # dnppy imports 3 | from dnppy_limited.time_series.csv_io import * 4 | from dnppy_limited.time_series import time_series 5 | 6 | # standard imports 7 | from datetime import datetime 8 | 9 | 10 | def Wx_Data_Extract(time_obj, wx_path): 11 | """ 12 | This function was writen to reenstate wx file parsing for tha greed upon NOAA 13 | weather data format for any study area within the USA. This is THE function 14 | that should be used for reading weather data, the others will not be supported. 15 | 16 | It expects data in the format as retrieved from this URL: 17 | [http://gis.ncdc.noaa.gov/map/viewer/#app=cdo&cfg=cdo&theme=hourly&layers=1&node=gi] 18 | 19 | Please see the readme for more detailed instructions on data download. 20 | 21 | Inputs: 22 | time_obj A datetime object representing the image data aquisition datetime 23 | wx_path filepath to the weather data. (hourly data) 24 | 25 | Returns: 26 | an array with specific ordering of climate variables. 27 | 28 | Author: Jeffry Ely 29 | """ 30 | 31 | # format weather (daily and hourly) as a time series object from dnppy module 32 | wx = time_series("wx_data") 33 | wx.from_csv(wx_path, delim = " ", spec_format = "DS3505") 34 | 35 | time_lable = "YR--MODAHRMN" 36 | time_format = "%Y%m%d%H%M" 37 | start_time = "200001010000" 38 | wx.define_time(time_lable, time_format, start_time) 39 | 40 | # bin the data into days pull out the one we want. 41 | wx.discretize("%j", cust_center_time = time_obj) 42 | 43 | day_name = time_obj.strftime("%Y-%m-%d") 44 | wx.interogate() 45 | 46 | # if it cant find a subset in wx with the input dates name, wx data is for wrong time. 47 | try: wx_day = wx[day_name] 48 | except: raise Exception("wx data has no entries for date of landsat acquisition ({0})".format(time_obj)) 49 | 50 | # get min/max temperatures and convert to Celcius (statistical operations clean up NoData) 51 | print("Centered statistics around {0}".format(wx_day.center_time)) 52 | Tstats = wx_day.column_stats("TEMP") 53 | temp_C_min = (Tstats["TEMP_min_v"] - 32) * (5.0/9) # F --> C 54 | temp_C_max = (Tstats["TEMP_max_v"] - 32) * (5.0/9) # F --> C 55 | 56 | # get instantaneous variables at input @param time_obj by interpolating between nearest values 57 | temp_C_mid = (wx_day.interp_col(time_obj, "TEMP") - 32) * (5.0/9) # F --> C 58 | P_air = wx_day.interp_col(time_obj, "STP" ) # in millibars 59 | wind_speed = wx_day.interp_col(time_obj, "SPD" ) * 0.51444 # knots --> meters / second 60 | dewp_C = (wx_day.interp_col(time_obj, "DEWP") - 32) * (5.0/9) # F --> C 61 | 62 | # this format is for legacy support 63 | return [temp_C_min, temp_C_max, temp_C_mid, P_air, wind_speed, dewp_C] 64 | 65 | 66 | 67 | # testing 68 | if __name__ == "__main__": 69 | 70 | wx_filepath = r"E:\DEVELOP\Team_Projects\2015_Spring_METRIC\code_current_dev\input_weather\2013_July_CravenCountyAirport.txt" 71 | time = datetime(2013,7,17, 11,43,24) 72 | wx = Wx_Data_Extract(time, wx_filepath) 73 | print wx 74 | 75 | --------------------------------------------------------------------------------