├── python-xarray ├── unused │ ├── f17_ssmis_20190101v7.nc.dmrpp.dap?sst_dtime │ ├── f17_ssmis_20190101v7.nc.dmrpp.dods?sst_dtime │ ├── GHRC_ngdap_west_cloud.py │ ├── AIRS_ngdap_west_cloud.py │ ├── f17_ssmis_20190101v7.nc.dmrpp?sst_dtime │ ├── MERRA_ngdap_west_cloud.py │ ├── On-PremGHRC.py │ └── On-PremGHRC_cloud.py ├── run1000 ├── README ├── ssmis_curl.sh ├── balto-urls.sh ├── ngap_xarray_test.py ├── grfn_ngap_west_cloud.py ├── ssmis_azure.py ├── ssmis_gcloud.py ├── grfn_granules.py ├── balto_opendap_s3.py ├── ssmis_ngap_west_cloud.py ├── merra_granules.py └── airs_granules.py ├── .gitignore ├── CITATION ├── release-notes ├── release_notes-1.13.3.txt ├── release_notes-1.13.0.txt ├── release_notes-1.13.2.txt ├── release_notes-1.13.1.txt ├── release_notes-1.14.0.txt ├── release_notes-1.15.0.txt └── release-notes-1.15.1.txt ├── Makefile ├── CMakeLists.txt ├── hyrax_clone.sh ├── spath.sh ├── README.md └── hyrax_build.sh /python-xarray/unused/f17_ssmis_20190101v7.nc.dmrpp.dap?sst_dtime: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /python-xarray/unused/f17_ssmis_20190101v7.nc.dmrpp.dods?sst_dtime: -------------------------------------------------------------------------------- 1 | Dataset { 2 | Int16 sst_dtime[time = 2][latitude = 720][longitude = 1440]; 3 | } f17_ssmis_20190101v7.nc; 4 | Data: 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *~ 3 | *.log 4 | .metadata/ 5 | 6 | # never check in login.txt 7 | login.txt 8 | 9 | logs/ 10 | hyrax-dependencies/ 11 | bes/ 12 | libdap4/ 13 | olfs/ 14 | build/ 15 | /.recommenders/ 16 | 17 | ngap 18 | -------------------------------------------------------------------------------- /python-xarray/run1000: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rm -f xarray_results/tea_uat_xarray.log ; 4 | for i in {1..1000}; do 5 | id="pass-${i}-"`date "+%FT%T"` 6 | python3 ssmis_ngap_west_cloud.py -i "${id}" -m 2>&1 | tee -a xarray_results/tea_uat_xarray.log ; 7 | done 8 | 9 | 10 | -------------------------------------------------------------------------------- /CITATION: -------------------------------------------------------------------------------- 1 | 2 | How to cite Hyrax 3 | 4 | Use the following text within your publications to cite Hyrax 5 | 6 | For data use: "Data were accessed using the Hyrax data server, version 7 | , running at ." 8 | 9 | For providing data: "Data were made avalable using the HYrax data 10 | server, version ." 11 | 12 | To find the DOI for the current version of Hyrax (or recent previous 13 | versions), look on the GitHUb pages for the OLFS and note the DOI for 14 | that software. This is the DO for the whole server and includes the 15 | DOI for the backend server and DAP library distributions. 16 | 17 | The OLFS GitHub page is https://github.com/OPENDAP/olfs. 18 | 19 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.13.3.txt: -------------------------------------------------------------------------------- 1 | This version of Hyrax is bug fix release. It uses version 3.18.3 of libdap, version 3.17.4 2 | of the BES and version xxx of the OLFS 3 | 4 | Bug fix for DAP4 data transmissions. 5 | 6 | XML Parser fixes for DAP4: We are now tolerant of elements that 7 | are not in the DAP4 namespace, so the DMR document can be extended 8 | with additional information without breaking our parser. 9 | 10 | D4Group's clone (aka ptr_duplicate) method returned a D4Group and not 11 | a BaseType; fixed. 12 | 13 | A bug with logrotate was crashing the server. 14 | 15 | Hyrax-282 fixed (Aggregations crash the BES when a response as netcdf 16 | or netcdf-4 is requested). This problem was actually that any Grid 17 | subset so that only some of the elements are sent broke the netcdf 18 | handler. 19 | 20 | Patched the HDF4 handler code so that the source dist contains needed 21 | testing scripts/templates. 22 | 23 | 24 | -------------------------------------------------------------------------------- /python-xarray/unused/GHRC_ngdap_west_cloud.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | import sys 4 | 5 | # Get the granule names 6 | from ssmis_granules_ghrc import f16_ssmis_100 7 | from ssmis_granules_ghrc import base_url 8 | 9 | # Allows us to visualize the dask progress for parallel operations 10 | from dask.diagnostics import ProgressBar 11 | 12 | ProgressBar().register() 13 | 14 | # OPeNDAP In the Cloud 15 | 16 | od_files = [] 17 | 18 | for g in f16_ssmis_100: 19 | od_files.append(base_url + g) 20 | 21 | print(od_files[0], '\n', od_files[-1]) 22 | 23 | try: 24 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 25 | 26 | cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 27 | 28 | cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 29 | 30 | print(cloud_ws_mean) 31 | 32 | except: 33 | print("Error:", sys.exc_info()[0]) 34 | -------------------------------------------------------------------------------- /python-xarray/unused/AIRS_ngdap_west_cloud.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | 4 | # Get the granule names 5 | import airs_granules 6 | 7 | # Allows us to visualize the dask progress for parallel operations 8 | from dask.diagnostics import ProgressBar 9 | 10 | ProgressBar().register() 11 | 12 | # OPeNDAP In the Cloud 13 | 14 | base_url = 'http://ngap-west.opendap.org/opendap/dmrpp/s3/airs/' 15 | 16 | od_files = [] 17 | 18 | for g in airs_granules.airs_366: 19 | od_files.append(base_url + g) 20 | 21 | print('URL start and end:\n', od_files[0], '\n', od_files[-1]) 22 | 23 | cloud_data = xa.open_mfdataset(od_files[0:2], data_vars='CloudTopPres_A', engine='pydap', parallel=True, combine='by_coords') 24 | 25 | print('Completed the xarray open_mfdataset\n') 26 | 27 | cloud_ws = cloud_data['CloudTopPres_A'].sel(Latitude=slice(-53.99, -14), Longitude=slice(140, 170)) 28 | 29 | print('Completed the CloudTopPres_A selection\n') 30 | 31 | cloud_ws_mean = cloud_ws.mean(dim=['Latitude', 'Longitude']) 32 | 33 | print('Computed the mean of the CloudTopPres_A values\n') 34 | print(cloud_ws_mean) 35 | 36 | # cloud_ws_mean.plot.line() 37 | 38 | 39 | -------------------------------------------------------------------------------- /python-xarray/unused/f17_ssmis_20190101v7.nc.dmrpp?sst_dtime: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Hyrax: Access Denied 6 | 7 | 8 | 9 | OPeNDAP Logo 10 |

Hyrax : Access Denied (403)

11 |
12 |
The requested URL directly references a data source.
13 |

You must use the OPeNDAP request interface to get data from the data source.

14 |

If you would like to start at the top level of this server, go here: 15 | http://balto.opendap.org/opendap/hyrax

16 |

If you think that the server is broken (that the URL you 17 | submitted should have worked), then please contact the 18 | OPeNDAP user support coordinator at: 19 | support@opendap.org

20 |
21 |

Hyrax : Access Denied (403)

22 | 23 | 24 | -------------------------------------------------------------------------------- /python-xarray/unused/MERRA_ngdap_west_cloud.py: -------------------------------------------------------------------------------- 1 | 2 | # to run this in a bare environment, set up the environment thusly: 3 | # conda install xarray dask requests 4 | # pip install pydap 5 | 6 | import xarray as xa 7 | 8 | # Get the granule names 9 | import merra_granules 10 | 11 | # Allows us to visualize the dask progress for parallel operations 12 | from dask.diagnostics import ProgressBar 13 | 14 | ProgressBar().register() 15 | 16 | # OPeNDAP In the Cloud 17 | 18 | base_url = 'http://ngap-west.opendap.org/opendap/dmrpp/s3/merra2/' 19 | 20 | od_files = [] 21 | 22 | for g in merra_granules.merra2: 23 | od_files.append(base_url + g) 24 | 25 | print('URL start and end:\n', od_files[0], '\n', od_files[-1]) 26 | 27 | # Removed this: data_vars='Var_DHDT_ANA', 28 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 29 | 30 | print('Completed the xarray open_mfdataset\n') 31 | 32 | cloud_ws = cloud_data['Var_DHDT_ANA'].sel(lat=slice(-53.99, -14), lon=slice(140, 170)) 33 | 34 | print('Completed the Var_DHDT_ANA selection\n') 35 | 36 | cloud_ws_mean = cloud_ws.mean(dim=['lat', 'lon']) 37 | 38 | print('Computed the mean of the Var_DHDT_ANA values\n') 39 | print(cloud_ws_mean) 40 | 41 | # cloud_ws_mean.plot.line() 42 | 43 | 44 | -------------------------------------------------------------------------------- /python-xarray/unused/On-PremGHRC.py: -------------------------------------------------------------------------------- 1 | from netCDF4 import Dataset 2 | import xarray as xa 3 | import dask 4 | 5 | # Allows us to visualize the dask progress for parallel operations 6 | from dask.diagnostics import ProgressBar 7 | 8 | ProgressBar().register() 9 | 10 | # from dask.distributed import Client 11 | # client = Client(memory_limit=10e10, processes=False) # Note: was 6e9 12 | # client"n 13 | 14 | # https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/MERRA2/M2T1NXFLX.5.12.4/contents.html 15 | 16 | url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/MERRA2/M2T1NXFLX.5.12.4/1984/11/MERRA2_100.tavg1_2d_flx_Nx.198411' 17 | 18 | from datetime import date, timedelta 19 | 20 | files = [] 21 | d = date.fromisoformat('1984-11-01') 22 | while True: 23 | 24 | files.append(f'{url}{str(d.day).zfill(2)}.nc4') 25 | d = d + timedelta(days=1) 26 | 27 | if d.month == 12: 28 | break 29 | 30 | files 31 | 32 | import getpass 33 | 34 | username = input("URS Username: ") 35 | password = getpass.getpass("URS Password: ") 36 | 37 | from pydap.client import open_url 38 | from pydap.cas.urs import setup_session 39 | 40 | ds_url = files[0] 41 | session = setup_session(username, password, check_url=ds_url) 42 | gesdisc_data = xa.open_mfdataset(files, engine='pydap', parallel=True, combine='by_coords', 43 | backend_kwargs={'session': session}) 44 | gesdisc_data 45 | hflux = gesdisc_data.HFLUX.sel(lat=slice(-53.99, -14), lon=slice(140, 170)) 46 | hflux_mean = hflux.mean(dim=['lat', 'lon']) 47 | hflux_mean.plot.line() 48 | 49 | -------------------------------------------------------------------------------- /python-xarray/README: -------------------------------------------------------------------------------- 1 | 2 | Set up a conda environment that includes: 3 | netCDF4 pip numpy matplotlib cartopy basemap requests xarray dask ipykernel 4 | 5 | After installing Anaconda's free version, use 6 | 7 | conda create --name opendap pip numpy matplotlib cartopy basemap requests xarray dask ipykernel 8 | 9 | That makes an 'opendap' environment and loads the packages pip, ..., ipykernel into 10 | the environment. Now switch to the enviroment 11 | 12 | conda activate opendap 13 | 14 | Use pip to install pydap 15 | 16 | pip install pydap 17 | 18 | NB: You might not need to use pip for pydap and might not need ipykernel 19 | 20 | ## Tweaking the conda install to coexist with the OPeNDAP/Hyrax C++ build 21 | 22 | Add this to the bottom of ~/.bashrc: 23 | 24 | function conda_on() { 25 | source ~/anaconda3/etc/profile.d/conda.sh 26 | conda activate base 27 | } 28 | 29 | Then in ~/.bash_profile: 30 | 31 | # I removed the following because the 'conda' system clashes with 32 | # our dependencies (well, the icu library it uses does). jhrg 4/24/2019 33 | 34 | # added by Anaconda3 2019.03 installer 35 | # >>> conda init >>> 36 | # !! Contents within this block are managed by 'conda init' !! 37 | # __conda_setup="$(CONDA_REPORT_ERRORS=false '/Users/jimg/anaconda3/bin/conda' shell.bash hook 2> /dev/null)" 38 | # if [ $? -eq 0 ]; then 39 | # \eval "$__conda_setup" 40 | # else 41 | # if [ -f "/Users/jimg/anaconda3/etc/profile.d/conda.sh" ]; then 42 | # . "/Users/jimg/anaconda3/etc/profile.d/conda.sh" 43 | # CONDA_CHANGEPS1=false conda activate base 44 | # else 45 | # \export PATH="/Users/jimg/anaconda3/bin:$PATH" 46 | # fi 47 | # fi 48 | # unset __conda_setup 49 | # <<< conda init <<< 50 | 51 | With the above left in place, every shell will have the default conda environment 52 | (base) and as I say above, as of 4/24/2019, that broke the ICU library on OSX. 53 | By removing the above code and having the function 'conda-on' shown previously, 54 | the normal shell is unaltered. Once you run 'conda-on' then that shell is a conda 55 | shell and should not be used for Hyrax builds. 56 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | # Build the Hyrax Server C++ sources. The intent is to provide an easy 3 | # way to support CLion indexing the C++ sources to streamline use of 4 | # that IDE. jhrg 9/30/22 5 | 6 | # sbl 9/30/22 7 | 8 | # Build and test. This will compile all the code and should thus set 9 | # up CLion so that all files will be indexed. Continue building even 10 | # if errors are found in the tests - that will index code that would 11 | # otherwise be left out. jhrg 9/30/22 12 | .PHONY: all 13 | all: configured 14 | $(MAKE) $(MFLAGS) -C libdap4 15 | $(MAKE) $(MFLAGS) -C libdap4 -k check 16 | $(MAKE) $(MFLAGS) -C bes 17 | $(MAKE) $(MFLAGS) -C bes -k check 18 | 19 | .PHONY: clean 20 | clean: configured 21 | $(MAKE) $(MFLAGS) -C libdap4 -k $@ 22 | $(MAKE) $(MFLAGS) -C bes -k $@ 23 | 24 | .PHONY: hyrax-dependencies 25 | hyrax-dependencies: prefix-set 26 | $(MAKE) $(MFLAGS) -C $@ 27 | 28 | clion-setup: 29 | export prefix="$(shell pwd)/build"; echo $$prefix 30 | export PATH="$$prefix/bin:$$prefix/deps/bin:$$PATH"; echo $$PATH 31 | $(MAKE) $(MFLAGS) all 32 | $(MAKE) $(MFLAGS) check 33 | 34 | # If $prefix is not set in the calling shell, exit. 35 | # If the PATH is not set correctly, exit. 36 | .PHONY: prefix-set 37 | prefix-set: 38 | @test -n "$$prefix" \ 39 | || (echo "The env variable 'prefix' must be set. See README"; exit 1) 40 | @printenv PATH | grep -q $$prefix/bin \ 41 | || (echo "Did not find $$prefix/bin in PATH"; exit 1) 42 | @printenv PATH | grep -q $$prefix/deps/bin \ 43 | || (echo "Did not find $$prefix/deps/bin in PATH"; exit 1) 44 | 45 | .PHONY: configured 46 | configured: prefix-set 47 | @test -f libdap4/Makefile \ 48 | || (echo "Run ./configure --prefix=... in libdap4 (Makefile missing)"; exit 1) 49 | @test -f bes/Makefile \ 50 | || (echo "Run ./configure --prefix=... in bes (Makefile missing)"; exit 1) 51 | 52 | # current_dir="$(shell pwd)" 53 | # current_dir+="/build" 54 | # $(info current_dir is ${current_dir}) 55 | # (cd hyrax-dependencies && ${MAKE} prefix=${current_dir}) 56 | # (cd libdap4 && ${MAKE}) 57 | # (cd bes && ${MAKE}) 58 | 59 | 60 | # (cd libdap4 && ${MAKE} check) 61 | # (cd bes && ${MAKE} check) 62 | 63 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.13.0.txt: -------------------------------------------------------------------------------- 1 | HDF5: 2 | 3 | [from Kent] I finished a non-trival update for the HDF5 hander 4 | to support SMAP and some MEaSURES NASA products. I merged all my 5 | coding to the master branch. I also updated the 6 | documents(configure.ac etc.) . Could you run coverity and see if new 7 | high risk issues appear? I hope it can be done ASAP because of the 8 | reason listed below. [Coverity run, issues addressed] 9 | 10 | When will you guys have another release? The Raytheon folks may depend 11 | on the leading underscore removal of the recent update in their 12 | software. I told them I will make my part available early January. 13 | 14 | Also I will not do anything with the HDF4 handler except updating the 15 | documents. I will tackle the code improvement some time later in this 16 | year. 17 | 18 | What's new for version 2.3.2 [the HDF5 handler] 19 | ---------------------------- 20 | CF option: 21 | 22 | - By default, the leading underscore of a variable path is removed for 23 | all files. Although not recommended, Users can change the BES key 24 | H5.KeepVarLeadingUnderscore at h5.conf.in to be true for backward 25 | compatibility if necessary. 26 | 27 | - Significantly improve the support of generic HDF5 files that have 28 | 2-D lat/lon. This improvement makes some SMAP level 1, level 3 and 29 | level 4 products plottable by CF tools such as Panoply. 30 | 31 | - Add the general support of netCDF-4-like HDF5 files that have 2-D 32 | lat/lon. This improvement makes TOMS MEaSURES product plottable by 33 | CF tools such as Panoply. It will aso support future potential 34 | products that follow netCDF generic data model. 35 | 36 | lWe (OPeNDAP) also have some fixes in place: Better error messages and 37 | a new timeout feature that enables the front end to control the 38 | timeout duration for each BES - so even for a setup where here are 39 | many BES instances running on different machines, one configuration 40 | file can control when a response is taking too long and should 41 | timeout. The server now returns a decent message when it 'times out’ 42 | and, lastly, the server offers an option whereby the timeout period is 43 | cancelled once data transmission starts. 44 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.13.2.txt: -------------------------------------------------------------------------------- 1 | Changed BES timeout error from 418 (TeaPot) to 504 (Gateway Timeout). 2 | 3 | Updated OLFS configuration defaults in viewers.xml file to reflect 4 | changes for ncWMS2 and Godiva3 5 | 6 | Patched a problem with the way the XSL transforms are carried out that 7 | could have left the process running in an unexpected directory. 8 | 9 | Added an in-memory cache for the handlers. Hyrax module developers can 10 | use this to speed response times when building metadata responses (e.g., DAS) 11 | is time-consuming. 12 | 13 | Added a logrotate file to the rpm package. This helps keep the bes.log 14 | size manageable and provides better integration with Linux. 15 | 16 | Added a Function Response Cache. Now function results are computed 17 | once and cached. 18 | 19 | Fixed Hyrax-254. This bug caused random behavior when accessing 20 | aggregations. 21 | 22 | Fixed a problem with ASCII responses when one or more dimensions of an 23 | N-dim array had only one element. 24 | 25 | Fixes for the HDF5 handler: 26 | * HFVHANDLER-175: Make sure the lat/lon are checked under both 27 | /geolocation and / (root) groups. 28 | * HFVHANDLER-189: Handle the root attribute 29 | * HFVHANDLER-191 30 | * HFVHANDLER-192: Handle _FilLValue and units for GPM 31 | * HFVHANDLER-194: New nlayer values for GPM level-3 4.0 data 32 | * HFVHANDLER-199: DAP4 memory cache 33 | * HFVHANDLER-198 and 184: DAP2 memory cache 34 | 35 | Updated GDAL-based code to use GDAL 2.x 36 | 37 | Fixed BES 'Transmitter' modules (modules that build non-DAP responses 38 | like netCDF, JSON, etc.) so that they work when constraint expressions 39 | contain function calls. 40 | 41 | Fixed a bug (Hyrax-282) in the netCDF Transmitter where subsets of Grid variables 42 | could result in a 500 error. Those now return the correct data values. 43 | 44 | Updated the 'history' attribute in the netCDF Transmitter's responses 45 | so it's more precise about how the response was generated. The new 46 | value contains the date, time, server version and requesting URL, so 47 | it should be possible to recreate the response. The format of the 48 | attribute conforms to CF-1.6. 49 | 50 | Changed the representation of Not-a-Number from 'nan' to 'NaN' in the 51 | ASCII response. 52 | 53 | Fixed in libdap: an issue with DAP4 CE parsing, double quotes and %20 escape 54 | characters. 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | # CMakeLists.txt file for use with CLion's code discovery, etc., modes. 3 | # Use the autotools configure, Makefiles, etc., for building the code. 4 | # jhrg 12/5/19 5 | 6 | cmake_minimum_required(VERSION 3.15) 7 | 8 | project(hyrax_git) 9 | 10 | # Edited. set(CMAKE_CXX_STANDARD 14) 11 | set(CMAKE_CXX_STANDARD 11) 12 | 13 | include_directories(/usr/local/include) 14 | include_directories(/usr/local/include/c++) 15 | 16 | if (EXISTS /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/) 17 | include_directories(/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/) 18 | endif() 19 | 20 | # Added. 21 | if (EXISTS /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk) 22 | # this is needed on OSX Bug Sur to find libSystem 23 | link_directories(/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib) 24 | include_directories(/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/libxml2) 25 | include_directories(/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/curl) 26 | endif() 27 | 28 | # Added for the sql handler and its use of unixODBC 29 | if (EXISTS /usr/local/Cellar/openssl/1.0.2r) 30 | include_directories(/usr/local/Cellar/openssl/1.0.2r/include/) 31 | endif() 32 | 33 | # The Hyrax dependencies directories 34 | include_directories(build/deps/include) 35 | 36 | include_directories(build/deps/include/gridfields) 37 | include_directories(build/deps/include/openjpeg-2.1) 38 | include_directories(build/deps/include/unicode) 39 | 40 | # More stuff for the sql handler, which is not formally part of the bes. jhrg 7/22/21 41 | if (EXISTS build/include/sql) 42 | include_directories(build/include/sql) 43 | include_directories(build/include/sql/action) 44 | include_directories(build/include/sql/connector) 45 | include_directories(build/include/sql/container) 46 | include_directories(build/include/sql/DEM) 47 | include_directories(build/include/sql/DTM) 48 | include_directories(build/include/sql/handler) 49 | include_directories(build/include/sql/handler/connector) 50 | include_directories(build/include/sql/utils) 51 | endif() 52 | 53 | add_subdirectory(libdap4) 54 | add_subdirectory(bes) 55 | 56 | # Add STAREmaster in until it's moved into the hyrax-dependencies or elsewhere. jhrg 7/22/21 57 | if (EXISTS STAREmaster) 58 | add_subdirectory(STAREmaster) 59 | endif() 60 | 61 | include_directories(bes) 62 | include_directories(libdap4) 63 | 64 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.13.1.txt: -------------------------------------------------------------------------------- 1 | Changes to the BES: 2 | 3 | - Added an in-memory cache that handlers can use to cache DDS, DMR, 4 | ..., objects. The handlers have to be modified to use this, but the 5 | performance benefits are potentially huge. See bes/dap/ObjMemCache 6 | and the netcdf_handler for the code and an example of its use. 7 | 8 | - Server functions can now return multiple variables and have then 9 | appear as discrete things without being 'wrapped' in a parent 10 | Structure variable. They do this by using a structure name that ends 11 | with '_unwrap'. This means that server functions can return values in 12 | a way that is usable by clients built using the netcdf-library. 13 | 14 | - Added code to 'bes/functions' so that the module built there will 15 | respond to the Version and Help commands (I'm not sure the later is 16 | ever used by the BES, but it's not much more work). This could be 17 | propagated to other modules that previously lacked a RequestHandler 18 | instance. 19 | 20 | - Patch for gcc-6 from Orion Poplawski. 21 | 22 | Three handlers we ship as part of Hyrax have significant changes: 23 | 24 | - The netCDF file response now includes history information and has a 25 | 'smart' suffix option (on by default; this part is actually in the 26 | front end 27 | of Hyrax). 28 | 29 | - In the HDF4 code, improve the calculation of XDim and YDim for the 30 | sinusoidal projection 31 | and a bug fix for the case when the _FillValue is NaN. 32 | 33 | - Add support for HDF5 scalar datasets and unlimited dimensions to 34 | the CF option. 35 | 36 | The front end has some significant changes too: 37 | 38 | - Added code to amend the CF "history" attribute so that 39 | fileout_netcdf responses contain the amended value that shows that 40 | Hyrax produced it along with the constraint and id information of 41 | the dataset. 42 | 43 | - Added new rules for the Content-Disposition header download file 44 | names. Now the old behavior (add the netcdf file suffix to very 45 | file's basename) can be replaced so that the only the correct suffix 46 | appears in the filename. 47 | 48 | - Added memory cache for BES Catalog responses. This allows the OLFS 49 | to stop asking the same things over and over again. The size and 50 | refresh interval are specified by the " element in the 51 | olfs.xml file. If the element is missing then the memory cache will 52 | be disabled. 53 | 54 | - Increased the number of simultaneous requests from 10 to 200, which 55 | is the default limit for Tomcat. 56 | 57 | - Bug fixes: Large file issues in w10n. Fixed build for WCS-2.0 58 | -------------------------------------------------------------------------------- /hyrax_clone.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Clone all of Hyrax from the OpenDAP organization page on GitHub 4 | # This is fairly rough... 5 | 6 | function help { 7 | echo "Usage: $0 [options] where options are:" 8 | echo "-h: help; this message" 9 | echo "-v: verbose" 10 | echo "-n: print what would be done" 11 | echo "-D: do not get the hyrax-dependencies repo (defult is to get it)" 12 | } 13 | 14 | args=`getopt hvnD $*` 15 | if [ $? != 0 ] 16 | then 17 | help 18 | exit 2 19 | fi 20 | 21 | set -- $args 22 | 23 | # Set verbose and do_nothing to false 24 | verbose=""; 25 | dry_run="no" 26 | get_deps="yes" 27 | 28 | for i in $* 29 | do 30 | case "$i" 31 | in 32 | -h) 33 | help 34 | exit 0;; 35 | -v) 36 | verbose="--verbose" 37 | shift;; 38 | -n) 39 | dry_run="yes" 40 | shift;; 41 | -D) 42 | get_deps="no" 43 | shift;; 44 | --) 45 | shift; break;; 46 | esac 47 | done 48 | 49 | function verbose { 50 | if test -n "$verbose" 51 | then 52 | echo "$*" 53 | fi 54 | } 55 | 56 | function do_command { 57 | if test "$dry_run" = "yes" 58 | then 59 | echo "$*" 60 | else 61 | # if test -n "$verbose"; then echo "$*"; fi 62 | verbose "$*" 63 | $* 64 | fi 65 | } 66 | 67 | repo_root=https://github.com/OPENDAP 68 | 69 | # On CentOS the fileout_netcdf tests fail when the RPM netcdf and 70 | # hyrax-dependencies netcdf libraries are mixed. jhrg 1/2/15 71 | if test "$get_deps" = "yes" 72 | then 73 | 74 | if test ! -d hyrax-dependencies 75 | then 76 | do_command "git clone $repo_root/hyrax-dependencies.git $verbose" 77 | else 78 | ( 79 | cd hyrax-dependencies 80 | verbose "In hyrax-dependencies..." 81 | do_command "git pull $verbose" 82 | ) 83 | fi 84 | 85 | fi 86 | 87 | libdap="libdap4" 88 | bes_module_branch="master" 89 | 90 | if test ! -d $libdap 91 | then 92 | do_command "git clone ${repo_root}/${libdap}.git $verbose" 93 | else 94 | ( 95 | cd $libdap 96 | verbose "In ${libdap}..." 97 | do_command "git pull $verbose" 98 | ) 99 | fi 100 | 101 | if test ! -d bes 102 | then 103 | do_command "git clone --recurse-submodules -j4 $repo_root/bes.git $verbose" 104 | do_command "git checkout $bes_module_branch" 105 | do_command "git pull $verbose" 106 | fi 107 | 108 | if test ! -d olfs 109 | then 110 | do_command "git clone $repo_root/olfs.git $verbose" 111 | else 112 | ( 113 | cd olfs 114 | verbose "In olfs..." 115 | do_command "git pull $verbose" 116 | ) 117 | fi 118 | -------------------------------------------------------------------------------- /python-xarray/unused/On-PremGHRC_cloud.py: -------------------------------------------------------------------------------- 1 | # https://ghrc.nsstc.nasa.gov/opendap/globalir/data/2020/0525/globir.20146.0000 2 | from netCDF4 import Dataset 3 | import xarray as xa 4 | import dask 5 | 6 | # Allows us to visualize the dask progress for parallel operations 7 | from dask.diagnostics import ProgressBar 8 | 9 | ProgressBar().register() 10 | 11 | # url = 'https://ghrc.nsstc.nasa.gov/opendap/ssmis/f17/daily/data/2019/f17_ssmis_2019' 12 | # 13 | # from datetime import date, timedelta 14 | # 15 | # files = [] 16 | # d = date.fromisoformat('2019-01-01') 17 | # str(d.day).zfill(2) + str(d.month).zfill(2) 18 | # # + timedelta(days=1) 19 | # while True: 20 | # 21 | # if d.month == 3 and d.day == 7: 22 | # d = d + timedelta(days=1) 23 | # continue 24 | # 25 | # files.append(f'{url}{str(d.month).zfill(2)}{str(d.day).zfill(2)}v7.nc') 26 | # d = d + timedelta(days=1) 27 | # 28 | # if d.month == 4: 29 | # break 30 | # 31 | # print(files) 32 | # data = xa.open_mfdataset(files, parallel=True, combine='by_coords') 33 | # data 34 | # 35 | # ws = data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 36 | # print(ws.data) 37 | # 38 | # ws_mean = ws.mean(dim=['latitude', 'longitude']) 39 | # ws_mean.plot.line() 40 | 41 | ## OPeNDAP In the Cloud 42 | 43 | import requests 44 | 45 | # CMR Link to use 46 | # https://cmr.earthdata.nasa.gov/search/granules.umm_json?collection_concept_id=C1625128926-GHRC_CLOUD&temporal=2019-01-01T10:00:00Z,2019-12-31T23:59:59Z 47 | r = requests.get( 48 | 'https://cmr.earthdata.nasa.gov/search/granules.umm_json?collection_concept_id=C1625128926-GHRC_CLOUD&temporal=2019-01-01T10:00:00Z,2019-04-01T00:00:00Z&pageSize=365') 49 | response_body = r.json() 50 | 51 | od_files = [] 52 | for itm in response_body['items']: 53 | for urls in itm['umm']['RelatedUrls']: 54 | if 'OPeNDAP' in urls['Description']: 55 | od_files.append(urls['URL']) 56 | 57 | print(od_files) 58 | 59 | import getpass 60 | 61 | username = input("URS Username: ") 62 | password = getpass.getpass("URS Password: ") 63 | 64 | from pydap.client import open_url 65 | from pydap.cas.urs import setup_session 66 | 67 | ds_url = od_files[0] 68 | session = setup_session(username, password, check_url=ds_url) 69 | 70 | # data = xa.open_mfdataset(od_files,engine='pydap',parallel=True, backend_kwargs={'session':session}) 71 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords', 72 | backend_kwargs={'session': session}) 73 | cloud_data 74 | 75 | cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 76 | cloud_ws.data 77 | 78 | cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 79 | cloud_ws_mean.plot.line() 80 | ws_mean.plot.line() 81 | 82 | -------------------------------------------------------------------------------- /spath.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Source this in the directory to be 'prefix' or pass the directory 4 | # as the first param. Make this a command using 'alias.' e.g. 5 | # alias spath='source ~/bin/spath.sh' 6 | 7 | verbose=1 8 | 9 | # Set 'prefix' to either the first argument or the current working directory. 10 | # Can't use ${1:-...}; positional params don't work in the ${:-} syntax 11 | prefix=$1 12 | export prefix=${prefix:-$PWD/build} 13 | test $verbose && echo " prefix: $prefix" 14 | 15 | # undo this for a production build 16 | export GZIP_ENV=--fast 17 | 18 | if ! echo $PATH | grep $prefix > /dev/null 19 | then 20 | export PATH=$prefix/bin:$prefix/deps/bin:$PATH 21 | test $verbose && echo "# Updated PATH" >&2 22 | fi 23 | test $verbose && echo " PATH: $PATH" >&2 24 | 25 | # set the site config file, saving some typing and maybe some grief 26 | # export CONFIG_SITE=$(pwd)/config.site 27 | 28 | # This is needed for the linux builds; if using the deps libraries 29 | # on linux, those directories also need to be on LD_LIBRARY_PATH. 30 | # I'm not sure this is true... jhrg 1/2/13 31 | # We do need this for icu-3.6 on AWS EC2 instances. jhrg 3/5/13 32 | 33 | if ! echo $LD_LIBRARY_PATH | grep -q deps/lib 34 | then 35 | if test -n "$LD_LIBRARY_PATH" 36 | then 37 | export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$prefix/deps/lib" 38 | else 39 | export LD_LIBRARY_PATH="$prefix/deps/lib" 40 | fi 41 | test $verbose && echo "# Updated LD_LIBRARY_PATH" >&2 42 | fi 43 | test $verbose && echo "LD_LIBRARY_PATH: $LD_LIBRARY_PATH" >&2 44 | 45 | if test -f /etc/redhat-release && grep -q '8\.' /etc/redhat-release 46 | then 47 | echo "Found RHEL 8 or equivalent OS" 48 | 49 | test -d /usr/include/tirpc || echo "WARNING: tirpc header dir not at /usr/include/tirpc" 50 | 51 | if ! echo $CPPFLAGS | grep -q /usr/include/tirpc 52 | then 53 | export CPPFLAGS="$CPPFLAGS -I/usr/include/tirpc" 54 | test $verbose && echo "# Updated CPPFLAGS" >&2 55 | fi 56 | 57 | if ! echo $LDFLAGS | grep -q tirpc 58 | then 59 | export LDFLAGS="$LDFLAGS -ltirpc" 60 | test $verbose && echo "# Updated LDFLAGS" >&2 61 | fi 62 | fi 63 | test $verbose && echo " CPPFLAGS: $CPPFLAGS" >&2 64 | test $verbose && echo " LDFLAGS: $LDFLAGS" >&2 65 | 66 | export TESTSUITEFLAGS=--jobs=9 67 | 68 | # I removed the apache tomcat dist from dependencies/downloads 69 | # because it was causing bloat. Assume that a typical nightly build 70 | # has both the tar.gz and directory for tomcat. jhrg 4/28/14 71 | # 72 | # Added it back. The new tomcat 7 scripts don't require that 73 | # CATALINA_HOME is set, so this is really for TC 6 compat. jhrg 12/30/14 74 | tc=`ls -d -1 $prefix/apache-tomcat-* 2> /dev/null | grep -v '.*\.tar\.gz'` 75 | if test -n "$tc" 76 | then 77 | export TOMCAT_DIR=$tc 78 | export CATALINA_HOME=$TOMCAT_DIR 79 | fi 80 | test $verbose && echo " TOMCAT_DIR: $TOMCAT_DIR" >&2 81 | test $verbose && echo " CATALINA_HOME: $CATALINA_HOME" >&2 82 | -------------------------------------------------------------------------------- /python-xarray/ssmis_curl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | granules_file="ssmis_granules.py" 4 | dap_suffix=".ascii" 5 | req_var="wind_speed" 6 | array_subset="%5B0%5D%5B143:1:304%5D%5B559:1:680%5D" 7 | constraint="${req_var}${array_subset}" 8 | result_file_base="ssmis_curl_result" 9 | 10 | 11 | function run_curl(){ 12 | out_file_base="${1}" 13 | cf_name="${2}" 14 | time -p curl -s -n -L -c ${cf_name} -b ${cf_name} ${dap_url}${dap_suffix}?${constraint} | grep -v "${req_var}" >> "${out_file_base}${dap_suffix}" 15 | } 16 | 17 | function use_tea_uat { 18 | export server_url="http://ngap-west.opendap.org/opendap/ssmis/tea-uat/" 19 | export granule_suffix=".dmrpp" 20 | echo "Using TEA in UAT" 21 | } 22 | 23 | function use_ngap_west { 24 | export server_url="http://ngap-west.opendap.org/opendap/ngap/providers/GHRC_CLOUD/collections/RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/" 25 | export granule_suffix="" 26 | echo "Using NGAP Service (us-west-2)" 27 | } 28 | 29 | function use_ngap_uat { 30 | export server_url="https://opendap.uat.earthdata.nasa.gov/providers/GHRC_CLOUD/collections/RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/" 31 | export granule_suffix="" 32 | echo "Using NGAP in UAT" 33 | } 34 | 35 | function use_localhost { 36 | export server_url="http://localhost:8080/opendap/ngap/providers/GHRC_CLOUD/collections/RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/" 37 | export granule_suffix="" 38 | echo "Using NGAP in UAT" 39 | } 40 | 41 | 42 | 43 | function run_ssmis() { 44 | lap=${1}; 45 | pid=${2}; 46 | mark="${lap}-${pid}"; 47 | 48 | use_ngap_uat 49 | # use_localhost 50 | cookie_file="${result_file_base}-${pid}.cf" 51 | rm -f "${cookie_file}" 52 | 53 | log_file="${result_file_base}-${pid}" 54 | 55 | echo "${mark} -- -- -- -- SSMIS wind_speed subset BEGIN" 56 | granules=`cat ${granules_file} | awk '{if(NR<91){n=split($0,s,"\"");if(NF==2){print s[3];}else{print s[2];}}}'` 57 | echo "${mark} Found "`echo "${granules}" | wc -l`" granules." 58 | count=0; 59 | for granule in ${granules}; do 60 | let "count++" 61 | log_mark="(${mark}-${count})" 62 | echo -n "." 63 | 64 | # echo "granule[${count}]: ${granule}" 65 | echo "${log_mark}-${granule}" >> ${log_file}.time # granule name in time file 66 | dap_url=${server_url}${granule}${granule_suffix} 67 | run_curl "${log_file}" "${cookie_file}" "${pid}" 2>> ${log_file}.time # time output in time file 68 | status=$? 69 | echo "status ${status}" >> ${log_file}.time # cURL status in time file 70 | done 71 | echo "${mark} -- -- -- -- SSMIS wind_speed subset END" 72 | } 73 | 74 | function curl_run1000() { 75 | 76 | rm -f ${result_file_base}* 77 | 78 | for i in {1..1000}; do 79 | echo "----- LAP: $i Started: "`date`" uTime: "`date "+%s"` 80 | for process in {0..7}; do 81 | run_ssmis "${i}" "${process}" & 2>&1 82 | done 83 | wait; 84 | done 85 | } 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /python-xarray/balto-urls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SERVICE="https://ghrc.nsstc.nasa.gov/opendap" 4 | SERVICE="https://ghrcdrive.nsstc.nasa.gov/pub" 5 | SERVICE="http://balto.opendap.org/opendap" 6 | SERVICE="http://balto.opendap.org/opendap/ssmis/dmrpp_s3" 7 | 8 | FILES_TO_GET=$( 9 | cat < "${name}" 109 | done 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.14.0.txt: -------------------------------------------------------------------------------- 1 | 2 | Release notes for Hyrax 1.14.0 3 | 4 | Logging Improvements 5 | 6 | - HYRAX-548: introduce copytruncate directive in besd.logrotate, 7 | remove brute-force stop/start 8 | 9 | - Log only one line per get command. This can be switched back to 10 | the 'three-lines-per-get' mode. using compile-time switches. There 11 | is also a way to trace which calls to the LOG() and VERBOSE() macros 12 | are responsible for which lines in the log. And there is an ERROR() 13 | macro to call out that an error is being logged. See the ChangeLog for 14 | more information. 15 | 16 | - As per the above, now, when logging is not verbose, there is only about 17 | one line per request (more for errors). Turning on verbose mode go back 18 | to the old way of logging where each (internal) command run by the BES 19 | is explicitly logged. 20 | 21 | - Switched to ISO8601 date-time; UTC is the default. 22 | Can use local time in the logs using BES.LogTimeLocal 23 | in bes.conf. The old date format is still in the code and 24 | can be used by #undef the compile-time switch ISO8601_TIME_IN_LOGS. 25 | 26 | Handler fixes 27 | 28 | - HDF4 and 5 Handler fixes - summarized in those projects' NEWS file 29 | 30 | Bug Fixes 31 | 32 | - Refactored the software used to serve GeoTiff files (gdal_handler) 33 | so it no longer keeps the GDALDataset handle open. This code was 34 | keeping the GDALDataset handle open across calls to the RequestHandler 35 | when returning data responses and this breaks the NCML handler. 36 | 37 | - Fixed the integration tests in bes/cmdln. 38 | 39 | Specific tickets fixed 40 | 41 | HYRAX-248 Fixed the GeoTiff and JPEG2000 responses; in some 42 | cases the raster output was upside down. 43 | HYRAX-263 Geotiff output appears to have the latitude axis inverted. 44 | HYRAX-281 The scale_array() server function now works with 3D arrays. 45 | HYRAX-283 fileout geotiff doesn't work for NCEP dataset 46 | HYRAX-294 Make this function (scale_grid) work correctly when 47 | subsetting an array drops it down to 2D 48 | HYRAX-309 Test the geo functions with real data 49 | HYRAX-362 Make the GeoTiff (GDAL) handler work with NCML aggregations 50 | HYRAX-417 Determine axis order for scale_grid() and scale_array() functions. 51 | HYRAX-432 WCS-2.0 war file/ deployment is broken 52 | HYRAX-458 Use of mkstemp/mkstemps in libdap and bes is bogus 53 | HYRAX-465 The bes (travis) build is broken 54 | HYRAX-491 Missing single source of truth for supported Format 55 | HYRAX-507 Fix double quote problems with WCS constraint expression eval 56 | HYRAX-529 Hyrax fails to start using "service besd start" 57 | HYRAX-547 FreeForm handler fmt file for ASCII data with two header lines 58 | HYRAX-548 logrotate.d in the release 1.13.4 RPM restarts bes and tomcat every day 59 | 60 | Developer fixes 61 | 62 | Interface Refactoring 63 | 64 | - BESInterface/XMLInterface improved, easier to grok. This affects only 65 | people who write modules for the BES. It does not affect the behavior 66 | of the BES in any way. 67 | 68 | - Streamlined the BESInterface and BESXMLInterface classes and the 69 | DataHandlerInterface object manipulation. Removed unused fields, 70 | renamed confusing fields, removed unused formal parameters. Removed 71 | useless typedefs from BESInterface. Removed init and end lists from 72 | BESInterface. (as they are not used). Simplified class field name 73 | changes Improved formatting for BESInterface and BESXMLCommand. 74 | 75 | - Added the 'hello_world' module into the BES's 'developer' build so 76 | that people using our developer documentation can reference a handler 77 | that works with the current version of the BES. 78 | 79 | - Added the DMR++ handler (see modules/dmrpp_module) the regular 80 | source distribution. This code is not built as part of the regular 81 | build, but it is part of the 'developer' build. 82 | 83 | - There were several functions/methods that made temporary files 84 | (and used mkstemps, et c. to do so). These were refactored into 85 | just one function. 86 | 87 | - Unit tests for the code were made more usable (hyrax-391) 88 | 89 | - Tests for the 'file locking cache' sub-system were added. 90 | 91 | - 'make distcheck' now works without the cumbersome env var for configure 92 | on Centos 7 and ubuntu 14. Centos 6 is still in the dark ages. 93 | 94 | - Removed unneeded classes in the BES framework (hyrax-378). 95 | 96 | * libdap4 updates included in this release 97 | 98 | - Portability issues: Updated gnulib and mkstemps fixed as per user 99 | reports. Removed a test for block_size in HTTPCacheTest that failed on 100 | Fedora ppc64le system with XFS system 101 | 102 | - Branches/tickets merged: HYRAX-390 (CppUnit test improvements) 103 | 104 | All the autotest dirs are now named 'tests' 105 | 106 | -------------------------------------------------------------------------------- /python-xarray/ngap_xarray_test.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Test access to OPeNDAP within NGAP. This is part of getting 4 | OPeNDAP access functioning for 'analysis in place' within the 5 | NGAP environment. 6 | 7 | jhrg 1/24/22 8 | """ 9 | 10 | import sys 11 | import os 12 | import glob 13 | import webob 14 | import time # function run-times 15 | import getopt # for main() 16 | import getpass # for get_credentials() 17 | 18 | import xarray as xa 19 | 20 | from pydap.client import open_url 21 | from pydap.cas.urs import setup_session 22 | 23 | show_timing = False # True shows the run-time for some functions 24 | 25 | 26 | def get_credentials(): 27 | """ 28 | Read a username and password either from the environment variables USER and URS_PWORD 29 | or from the terminal. 30 | :return: A tuple (user, password) 31 | """ 32 | username = os.environ.get('USER') 33 | password = os.environ.get('URS_PWORD') 34 | # Most machines do set USER, but URS_PWORD is suitably obscure 35 | if username is None or password is None: 36 | username = input("URS Username: ") 37 | password = getpass.getpass("URS Password: ") 38 | 39 | return username, password 40 | 41 | 42 | def build_session(credentials, url): 43 | """ 44 | Build a pydap.cmr.session object using the credentials and the URL. As a bonus, 45 | the session lets the OPeNDAP server know that this client can accept compressed 46 | responses. NB: This is HTTP-level response compression and not the HDF5/NetCDF4 47 | per-variable compression. 48 | :param credentials: A tuple of the username and password to use for the session. 49 | credentials[0] == the username, credentials[1] == the password. 50 | :param url: The URL tied to the session 51 | :return: The session object or None 52 | """ 53 | session = setup_session(credentials[0], credentials[1], check_url=url) 54 | session.headers.update({'Accept-Encoding': 'deflate'}) 55 | return session 56 | 57 | 58 | def open_dataset(url, session): 59 | """ 60 | Given a URL to a dataset in NGAP, open it using xarray. This function uses PyDAP to 61 | open the remote dataset via OPeNDAP. 62 | :param url: The URL - any opendap URL should work. 63 | :param session: A pydap.cmr.session object for the URL. 64 | :return: An xarray.Dataset that references the open dataset. 65 | """ 66 | try: 67 | if show_timing: 68 | tic = time.perf_counter() 69 | 70 | # open_mfdataset() == open multi-file dataset. 71 | # both open_datasets and ...mfdataset use netcdf4 as the default engine and that 72 | # should be able to open DAP URLS. jhrg 1/24/22 73 | if session is not None: 74 | xa_ds = xa.open_dataset(url, engine='pydap', backend_kwargs={'session': session}) 75 | else: 76 | xa_ds = xa.open_mfdataset(url, engine='pydap') 77 | 78 | return xa_ds 79 | 80 | except webob.exc.HTTPError as err: 81 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 82 | print("HTTPError: code: ", err.code, ": ", err.detail); 83 | print("Error: ", sys.exc_info()[0]) 84 | 85 | except UnicodeError as err: 86 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 87 | print("UnicodeError - encoding: ", err.encoding, " reason: ", err.reason, " object: ", type(err.object), " start: ", err.object[err.start]," end: ",err.end); 88 | print("Error: ", sys.exc_info()[0]) 89 | 90 | except: 91 | print("Error: ", sys.exc_info()[0]) 92 | 93 | finally: 94 | if show_timing: 95 | print(f"Time to open the url: {time.perf_counter() - tic:0.4f}\n") 96 | 97 | 98 | def main(): 99 | 100 | usage = "Options h: get help, l: login name, p: URS password, u: OPeNDAP URL" # FIXME 101 | 102 | try: 103 | # see https://docs.python.org/3.1/library/getopt.htm 104 | optlist, args = getopt.getopt(sys.argv[1:], 'hl:p:u:') 105 | except: 106 | # print help information and exit: 107 | print(usage) 108 | sys.exit(2) 109 | 110 | url = "" 111 | username = "" 112 | password = "" 113 | session = False 114 | 115 | for o, a in optlist: 116 | if o in ("-h", "--help"): 117 | print(usage) 118 | 119 | if o == "-l": 120 | username = a 121 | 122 | if o == "-p": 123 | password = a 124 | 125 | if o == "-u": 126 | url = a 127 | 128 | if url is None: 129 | print("A URL is required") 130 | print(usage) 131 | sys.exit(2) 132 | 133 | if username != "" and password != "": 134 | session = build_session((username, password), url) 135 | 136 | xa_ds = open_dataset(url, session) 137 | 138 | # cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 139 | # cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 140 | # print(cloud_ws_mean) 141 | 142 | 143 | if __name__ == "__main__": 144 | main() 145 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![NSF-1740627](https://img.shields.io/badge/NSF-1740627-blue.svg)](https://nsf.gov/awardsearch/showAward?AWD_ID=1740627) 2 | 3 | This is README describes how to use the very simple scripts here to 4 | clone and build Hyrax. The scripts can also be used to set up a kind 5 | of poor-man's nightly, or CI, build. 6 | 7 | The scripts in this repo are not the only way to build Hyrax, however. 8 | You can easily clone the three main repos and build them using the 9 | normal autotools (for libdap and BES) and ant (for the OLFS) process. 10 | Because Hyrax is a bit more complicated than a simple webapp, the 11 | process has a few more steps, but it's certainly possible to build the 12 | server in under 10 minutes on a typical laptop. For instructions see: 13 | 14 | http://docs.opendap.org/index.php/Hyrax_GitHub_Source_Build 15 | 16 | Here's how to build using the scripts contained in this project. First, 17 | the short version, which will work if you have a machine that meets the 18 | prerequisites: 19 | 20 | source spath.sh 21 | 22 | ./hyrax_clone.sh 23 | 24 | ./hyrax_build.sh 25 | 26 | The longer version: 27 | 28 | 0. You need a Linux/OSX/Unix computer that can compile C, C++ and 29 | Java. Most of the requirements are fairly plain, with the exception 30 | that you'll need a recent copy of bison and flex and newer versions of 31 | the autotools software. Since CentOS/RedHat comes with 'yum' and the 32 | yum command syntax is fairly concise, I'll use it as shorthand for the 33 | packages you need (with the advantage that some users can cut and 34 | paste in a plain machine and get the packages installed very quickly): 35 | 36 | yum install java-1.7.0-openjdk java-1.7.0-openjdk-devel ant git \ 37 | gcc-c++ flex bison openssl-devel libuuid-devel readline-devel \ 38 | zlib-devel libjpeg-devel libxml2-devel curl-devel ant-junit 39 | 40 | Then download and build the latest versions of autoconf, automake and 41 | libtool. Those can be found at: 42 | 43 | http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz 44 | http://ftp.gnu.org/gnu/automake/automake-1.14.1.tar.gz 45 | http://ftp.gnu.org/gnu/libtool/libtool-2.4.2.tar.gz 46 | 47 | and are very easy to build. 48 | 49 | 1. Set up the 'prefix' and 'PATH' environment variables in the shell 50 | you're using. Use 'source spath.sh' to do this. This will set the 51 | 'prefix' environment variable to `pwd`/build and add 52 | `pwd`/build/bin to the front of PATH so that libdap, BES and the 53 | various modules/handlers for the BES can find the dependencies once 54 | they are built. 55 | 56 | source spath.sh 57 | 58 | 2. Now clone all of the source repos for Hyrax using the 59 | 'hyrax_clone.sh' script. This will take a while, but it's not too bad. 60 | The script takes some options: verbose (-v), dry run (-n) and No 61 | Dependencies (-D). The default will clone all of the repos including 62 | the hyrax-dependencies. If you're building on CentOS and want to use 63 | EPEL for the deps, use -D. Using -D will suppress cloning the 64 | hyrax-dependencies repo. This script assumes you want to clone the BES 65 | and load all of the modules/handlers that normally are released with 66 | Hyrax. If thats not what you want to do, go to the web page described 67 | earlier and build the code by hand, which will give you complete 68 | control over what software is cloned from git, how it is built and so 69 | on. 70 | 71 | ./hyrax_clone.sh -v 72 | 73 | 3. Build the code using the 'hyrax_build.sh' script. It takes various 74 | options: verbose (-v), dry run (-n) and some others; -h provides some 75 | help. This script will build all of the code, including the 76 | hyrax-dependencies if they are present (so this script works 77 | 'intelligently' in conjunction with the hyrax_clone.sh script). The -c 78 | and -d options run the 'clean' and 'distcheck' targets of the Makefiles 79 | and are useful for automated builds. 80 | 81 | ./hyrax_build.sh -v 82 | 83 | 4. Test the server. The hyrax_build.sh script will install the server 84 | in $prefix/build. If it completes successfully, the server should be 85 | built and installed. 86 | 87 | To start the server, first start the BES and then the OLFS. Note that 88 | the besctl utility is on your PATH since you sourced 'spath.sh' and 89 | therefore have $prefix/bin on your PATH 90 | 91 | besctl start 92 | 93 | ./build/apache-tomcat-7.0.57/bin/startup.sh 94 | 95 | Now goto http://localhost:8080/opendap and you should see the server 96 | and the test data that is distributed with it. If not here are some things 97 | to check: 98 | 99 | * Is the BES running? There should be several processes associated 100 | with the BES and you can see them using 'besctl pids'. If not, look at 101 | the BES log file ($prefix/build/var/bes.log) for error messages. 102 | 103 | * Is tomcat running? Use 'ps -ef | grep tomcat' to see if it is. If 104 | not, look in $prefix/bui.d apache-tomcat-*/logs/catalins.out for clues 105 | as to why. 106 | 107 | * Are you working on a machine that has ports like 8080 blocked? Hyrax 108 | needs an open port for Tomcat, nominally 8080, plus an open port for 109 | the BES. By default the BES uses port 10022. 110 | 111 | For more detailed information on Hyrax and its configuration, see: 112 | 113 | http://docs.opendap.org/index.php/Documentation 114 | 115 | ----------------------------------------------------------------------- 116 | 117 | Notes: 118 | 119 | To clean the repo, returning it to it's initial state, use: 120 | 121 | rm -rf bes build hyrax-dependencies/ libdap logs olfs \ 122 | bes.log libdap.log olfs.log 123 | 124 | If one of the distcheck targets failed, then the build dir that 125 | it left behind will not be writable by anyone, so chmod 755 or sudo 126 | to remove it. 127 | -------------------------------------------------------------------------------- /python-xarray/grfn_ngap_west_cloud.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | import sys 4 | 5 | # Get the granule names 6 | # from ssmis_granules import f16_ssmis_100 7 | from grfn_granules import grfn_gunw_100 8 | 9 | import os 10 | import glob 11 | 12 | 13 | def clean_cache(): 14 | files = glob.glob('/tmp/hyrax_http/*') 15 | 16 | for f in files: 17 | try: 18 | # f.unlink() 19 | os.unlink(f) 20 | except OSError as e: 21 | print("Error: %s : %s" % (f, e.strerror)) 22 | 23 | 24 | base_url = "" 25 | suffix = "" 26 | f = False # results output file 27 | 28 | def ngap_service(): 29 | global base_url 30 | global suffix 31 | # This is the base url for the NGAP service which is attached to prod. 32 | ngap_service_base = 'http://ngap-west.opendap.org/opendap/ngap/providers/GHRC_CLOUD/collections/' \ 33 | 'RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/' 34 | base_url = ngap_service_base 35 | suffix = "" 36 | print("Using NGAP Service: ", base_url) 37 | 38 | 39 | def s3_bucket(): 40 | global base_url 41 | global suffix 42 | 43 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 44 | # point to objects in an opendap S3 bucket called ngap-ssmis-west 45 | s3_bucket_base = "http://ngap-west.opendap.org/opendap/asf_grfn/s3/" 46 | base_url = s3_bucket_base 47 | suffix=".dmrpp" 48 | print("Using S3 Bucket: ", base_url) 49 | 50 | 51 | def tea(): 52 | global base_url 53 | global suffix 54 | 55 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 56 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 57 | tea_prod_base = "http://ngap-west.opendap.org/opendap/asf_grfn/tea/" 58 | base_url = tea_prod_base 59 | suffix=".dmrpp" 60 | print("Using TEA: ", base_url) 61 | 62 | 63 | def granules(): 64 | global base_url 65 | global suffix 66 | # This is the base URL for the collection of source netcdf-4 granule 67 | # files. 68 | granule_files_base = "http://ngap-west.opendap.org/opendap/asf_grfn/granules/" 69 | base_url = granule_files_base 70 | suffix="" 71 | print("Using Granules: ", base_url) 72 | 73 | 74 | def get_the_things(): 75 | import webob 76 | import time 77 | 78 | global base_url 79 | global suffix 80 | global f # results file 81 | 82 | print("base_url: ", base_url) 83 | print(" suffix: ", suffix) 84 | 85 | # Allows us to visualize the dask progress for parallel operations 86 | from dask.diagnostics import ProgressBar 87 | 88 | ProgressBar().register() 89 | 90 | # OPeNDAP In the Cloud 91 | 92 | od_files = [] 93 | 94 | for g in grfn_gunw_100: 95 | od_files.append(base_url + g + suffix) 96 | 97 | print(" first: ", od_files[0], '\n', " last: ", od_files[-1]) 98 | try: 99 | tic = time.perf_counter() 100 | 101 | print("Calling xa.open_mfdataset()"); 102 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 103 | print(cloud_data) 104 | 105 | print("Calling cloud_data[] subset"); 106 | cloud_ws = cloud_data['science_grids_data_amplitude'] #.sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 107 | print(cloud_ws) 108 | 109 | #print("Calling cloud_ws.mean()"); 110 | #cloud_ws_mean = cloud_ws.mean(dim=['science_grids_data_latitude', 'science_grids_data_longitude']) 111 | 112 | #print(cloud_ws_mean) 113 | 114 | if f: 115 | f.write(f"{time.perf_counter() - tic:0.4f},") 116 | f.write("success\n") 117 | 118 | except webob.exc.HTTPError as err: 119 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 120 | print("HTTPError: code: ", err.code, ": ", err.detail); 121 | print("Error: ", sys.exc_info()[0]) 122 | if f: 123 | f.write(f"{time.perf_counter() - tic:0.4f},") 124 | f.write("fail\n") 125 | except: 126 | print("Error: ", sys.exc_info()[0]) 127 | if f: 128 | f.write(f"{time.perf_counter() - tic:0.4f},") 129 | f.write("fail\n") 130 | 131 | 132 | def main(): 133 | import getopt 134 | 135 | global f # results file 136 | 137 | try: 138 | # see https://docs.python.org/3.1/library/getopt.htm 139 | optlist, args = getopt.getopt(sys.argv[1:], 'sgntahd:') 140 | except: 141 | # print help information and exit: 142 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 143 | sys.exit(2) 144 | 145 | for o, a in optlist: 146 | if o in ("-h", "--help"): 147 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 148 | 149 | if o in ("-d",): 150 | print("Datafile name: ", a) 151 | f = open(a, "a") 152 | 153 | if o in ("-s", "-a"): 154 | print("###########################################") 155 | if f: 156 | f.write("s3,") 157 | s3_bucket() 158 | clean_cache() 159 | get_the_things() 160 | 161 | if o in ("-g", "-a"): 162 | print("###########################################") 163 | if f: 164 | f.write("granule,") 165 | granules() 166 | clean_cache() 167 | get_the_things() 168 | 169 | if o in ("-t", "-a"): 170 | print("###########################################") 171 | if f: 172 | f.write("tea,") 173 | tea() 174 | clean_cache() 175 | get_the_things() 176 | 177 | if o in ("-n", "-a"): 178 | print("###########################################") 179 | if f: 180 | f.write("ngap,") 181 | ngap_service() 182 | clean_cache() 183 | get_the_things() 184 | 185 | if f: 186 | f.close() 187 | 188 | 189 | if __name__ == "__main__": 190 | main() 191 | -------------------------------------------------------------------------------- /python-xarray/ssmis_azure.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | import sys 4 | 5 | # Get the granule names 6 | from ssmis_azure_granules import f16_ssmis_100 7 | 8 | import os 9 | import glob 10 | 11 | 12 | def clean_cache(): 13 | files = glob.glob('/tmp/hyrax_http/*') 14 | 15 | for f in files: 16 | try: 17 | # f.unlink() 18 | os.unlink(f) 19 | except OSError as e: 20 | print("Error: %s : %s" % (f, e.strerror)) 21 | 22 | 23 | base_url = "" 24 | suffix = "" 25 | f = False # results output file 26 | 27 | def ngap_service(): 28 | global base_url 29 | global suffix 30 | # This is the base url for the NGAP service which is attached to prod. 31 | ngap_service_base = "FAIL" 32 | base_url = ngap_service_base 33 | suffix = "" 34 | print("Using NGAP Service") 35 | 36 | 37 | def s3_bucket(): 38 | global base_url 39 | global suffix 40 | 41 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 42 | # point to objects in an opendap S3 bucket called ngap-ssmis-west 43 | s3_bucket_base = "http://hyrax-azure-instance.opendap.org:8080/opendap/data/ssmis/" 44 | # Old URL: "http://35.209.179.223:8080/opendap/data/dmrpp/ssmis/" 45 | base_url = s3_bucket_base 46 | suffix=".dmrpp" 47 | print("Using Azure BlobStorage") 48 | 49 | 50 | def tea_prod(): 51 | global base_url 52 | global suffix 53 | 54 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 55 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 56 | tea_prod_base = "FAIL" 57 | base_url = tea_prod_base 58 | suffix=".dmrpp" 59 | print("Using TEA in PROD") 60 | 61 | def tea_uat(): 62 | global base_url 63 | global suffix 64 | 65 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 66 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 67 | tea_prod_base = "FAIL" 68 | base_url = tea_prod_base 69 | suffix=".dmrpp" 70 | print("Using TEA in UAT") 71 | 72 | 73 | def granules(): 74 | global base_url 75 | global suffix 76 | # This is the base URL for the collection of source netcdf-4 granule 77 | # files. 78 | granule_files_base = "FAIL" 79 | base_url = granule_files_base 80 | suffix="" 81 | print("Using Granules") 82 | 83 | 84 | def get_the_things(): 85 | import webob 86 | import time 87 | 88 | global base_url 89 | global suffix 90 | global f # results file 91 | 92 | print("base_url: ", base_url) 93 | print(" suffix: ", suffix) 94 | 95 | # Allows us to visualize the dask progress for parallel operations 96 | from dask.diagnostics import ProgressBar 97 | 98 | ProgressBar().register() 99 | 100 | # OPeNDAP In the Cloud 101 | 102 | od_files = [] 103 | 104 | for g in f16_ssmis_100: 105 | od_files.append(base_url + g + suffix) 106 | 107 | print(" first: ", od_files[0], '\n', " last: ", od_files[-1]) 108 | try: 109 | tic = time.perf_counter() 110 | 111 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 112 | 113 | cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 114 | 115 | cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 116 | 117 | print(cloud_ws_mean) 118 | 119 | if f: 120 | f.write(f"{time.perf_counter() - tic:0.4f},") 121 | f.write("success\n") 122 | 123 | except webob.exc.HTTPError as err: 124 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 125 | print("HTTPError: code: ", err.code, ": ", err.detail); 126 | print("Error: ", sys.exc_info()[0]) 127 | if f: 128 | f.write(f"{time.perf_counter() - tic:0.4f},") 129 | f.write("fail\n") 130 | except: 131 | print("Error: ", sys.exc_info()[0]) 132 | if f: 133 | f.write(f"{time.perf_counter() - tic:0.4f},") 134 | f.write("fail\n") 135 | 136 | 137 | def main(): 138 | import getopt 139 | 140 | global f # results file 141 | 142 | try: 143 | # see https://docs.python.org/3.1/library/getopt.htm 144 | optlist, args = getopt.getopt(sys.argv[1:], 'sgntahud:') 145 | except: 146 | # print help information and exit: 147 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 148 | sys.exit(2) 149 | 150 | for o, a in optlist: 151 | if o in ("-h", "--help"): 152 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 153 | 154 | if o in ("-d",): 155 | print("Datafile name: ", a) 156 | f = open(a, "a") 157 | 158 | if o in ("-s", "-a"): 159 | print("###########################################") 160 | if f: 161 | f.write("s3,") 162 | s3_bucket() 163 | clean_cache() 164 | get_the_things() 165 | 166 | if o in ("-g", "-a"): 167 | print("###########################################") 168 | if f: 169 | f.write("granule,") 170 | granules() 171 | clean_cache() 172 | get_the_things() 173 | 174 | if o in ("-t", "-a"): 175 | print("###########################################") 176 | if f: 177 | f.write("tea_prod,") 178 | tea_prod() 179 | clean_cache() 180 | get_the_things() 181 | 182 | if o in ("-u", "-a"): 183 | print("###########################################") 184 | if f: 185 | f.write("tea_uat,") 186 | tea_uat() 187 | clean_cache() 188 | get_the_things() 189 | 190 | if o in ("-n", "-a"): 191 | print("###########################################") 192 | if f: 193 | f.write("ngap,") 194 | ngap_service() 195 | clean_cache() 196 | get_the_things() 197 | 198 | if f: 199 | f.close() 200 | 201 | 202 | if __name__ == "__main__": 203 | main() 204 | -------------------------------------------------------------------------------- /python-xarray/ssmis_gcloud.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | import sys 4 | 5 | # Get the granule names 6 | from ssmis_granules import f16_ssmis_100 7 | 8 | import os 9 | import glob 10 | 11 | 12 | def clean_cache(): 13 | files = glob.glob('/tmp/hyrax_http/*') 14 | 15 | for f in files: 16 | try: 17 | # f.unlink() 18 | os.unlink(f) 19 | except OSError as e: 20 | print("Error: %s : %s" % (f, e.strerror)) 21 | 22 | 23 | base_url = "" 24 | suffix = "" 25 | f = False # results output file 26 | 27 | def ngap_service(): 28 | global base_url 29 | global suffix 30 | # This is the base url for the NGAP service which is attached to prod. 31 | ngap_service_base = "FAIL" 32 | base_url = ngap_service_base 33 | suffix = "" 34 | print("Using NGAP Service") 35 | 36 | 37 | def s3_bucket(): 38 | global base_url 39 | global suffix 40 | 41 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 42 | # point to objects in an opendap S3 bucket called ngap-ssmis-west 43 | s3_bucket_base = "http://hyrax-gce-instance.opendap.org:8080/opendap/data/dmrpp/ssmis/" 44 | # Old URL: "http://35.209.179.223:8080/opendap/data/dmrpp/ssmis/" 45 | base_url = s3_bucket_base 46 | suffix=".dmrpp" 47 | print("Using S3 Bucket ngap-ssmis-west") 48 | 49 | 50 | def tea_prod(): 51 | global base_url 52 | global suffix 53 | 54 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 55 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 56 | tea_prod_base = "FAIL" 57 | base_url = tea_prod_base 58 | suffix=".dmrpp" 59 | print("Using TEA in PROD") 60 | 61 | def tea_uat(): 62 | global base_url 63 | global suffix 64 | 65 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 66 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 67 | tea_prod_base = "FAIL" 68 | base_url = tea_prod_base 69 | suffix=".dmrpp" 70 | print("Using TEA in UAT") 71 | 72 | 73 | def granules(): 74 | global base_url 75 | global suffix 76 | # This is the base URL for the collection of source netcdf-4 granule 77 | # files. 78 | granule_files_base = "FAIL" 79 | base_url = granule_files_base 80 | suffix="" 81 | print("Using Granules") 82 | 83 | 84 | def get_the_things(): 85 | import webob 86 | import time 87 | 88 | global base_url 89 | global suffix 90 | global f # results file 91 | 92 | print("base_url: ", base_url) 93 | print(" suffix: ", suffix) 94 | 95 | # Allows us to visualize the dask progress for parallel operations 96 | from dask.diagnostics import ProgressBar 97 | 98 | ProgressBar().register() 99 | 100 | # OPeNDAP In the Cloud 101 | 102 | od_files = [] 103 | 104 | for g in f16_ssmis_100: 105 | od_files.append(base_url + g + suffix) 106 | 107 | print(" first: ", od_files[0], '\n', " last: ", od_files[-1]) 108 | try: 109 | tic = time.perf_counter() 110 | 111 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 112 | 113 | cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 114 | 115 | cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 116 | 117 | print(cloud_ws_mean) 118 | 119 | if f: 120 | f.write(f"{time.perf_counter() - tic:0.4f},") 121 | f.write("success\n") 122 | 123 | except webob.exc.HTTPError as err: 124 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 125 | print("HTTPError: code: ", err.code, ": ", err.detail); 126 | print("Error: ", sys.exc_info()[0]) 127 | if f: 128 | f.write(f"{time.perf_counter() - tic:0.4f},") 129 | f.write("fail\n") 130 | except: 131 | print("Error: ", sys.exc_info()[0]) 132 | if f: 133 | f.write(f"{time.perf_counter() - tic:0.4f},") 134 | f.write("fail\n") 135 | 136 | 137 | def main(): 138 | import getopt 139 | 140 | global f # results file 141 | 142 | try: 143 | # see https://docs.python.org/3.1/library/getopt.htm 144 | optlist, args = getopt.getopt(sys.argv[1:], 'sgntahud:') 145 | except: 146 | # print help information and exit: 147 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 148 | sys.exit(2) 149 | 150 | for o, a in optlist: 151 | if o in ("-h", "--help"): 152 | print("Options -d -s s3, -g granules, -n ngap api, -t tea, -a all of s, g, n and t.") 153 | 154 | if o in ("-d",): 155 | print("Datafile name: ", a) 156 | f = open(a, "a") 157 | 158 | if o in ("-s", "-a"): 159 | print("###########################################") 160 | if f: 161 | f.write("s3,") 162 | s3_bucket() 163 | clean_cache() 164 | get_the_things() 165 | 166 | if o in ("-g", "-a"): 167 | print("###########################################") 168 | if f: 169 | f.write("granule,") 170 | granules() 171 | clean_cache() 172 | get_the_things() 173 | 174 | if o in ("-t", "-a"): 175 | print("###########################################") 176 | if f: 177 | f.write("tea_prod,") 178 | tea_prod() 179 | clean_cache() 180 | get_the_things() 181 | 182 | if o in ("-u", "-a"): 183 | print("###########################################") 184 | if f: 185 | f.write("tea_uat,") 186 | tea_uat() 187 | clean_cache() 188 | get_the_things() 189 | 190 | if o in ("-n", "-a"): 191 | print("###########################################") 192 | if f: 193 | f.write("ngap,") 194 | ngap_service() 195 | clean_cache() 196 | get_the_things() 197 | 198 | if f: 199 | f.close() 200 | 201 | 202 | if __name__ == "__main__": 203 | main() 204 | -------------------------------------------------------------------------------- /hyrax_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Build all of Hyrax given a fresh checkout from git. 4 | # This is fairly rough... 5 | 6 | function help { 7 | echo "Usage: $0 [options] where options are:" 8 | echo "-h: help; this message" 9 | echo "-v: verbose" 10 | echo "-n: print what would be done" 11 | echo "-2: DAP2 build" 12 | echo "-c: run make clean before the builds" 13 | echo "-d: run the distcheck targets for the C++ code" 14 | echo "-D: Do not build thehyrax-dependencies package" 15 | echo "-N: If the dependencies are present, build the target" 16 | echo "-p prefix: use as the build/install prefix" 17 | } 18 | 19 | # Hack jhrg 1/8/24 20 | jobs=-j20 21 | 22 | args=`getopt hvn2cdDNp: $*` 23 | if test $? != 0 24 | then 25 | help 26 | exit 2 27 | fi 28 | 29 | set -- $args 30 | 31 | # Not sure about this way of handling prefix... Should we make it 32 | # easier to build and install to /usr/local? 33 | export prefix=${prefix:-$PWD/build} 34 | 35 | if echo $PATH | grep $prefix > /dev/null 36 | then 37 | echo "PATH Already set" 38 | else 39 | export PATH=$prefix/bin:$prefix/deps/bin:$PATH 40 | fi 41 | 42 | # This is needed for the linux builds; if using the deps libraries 43 | # on linux, those directories also need to be on LD_LIBRARY_PATH. 44 | # I'm not sure this is true... jhrg 1/2/13 45 | # We do need this for icu-3.6 on AWS EC2 instances. jhrg 3/5/13 46 | if echo $LD_LIBRARY_PATH | grep $prefix/lib:$prefix/deps/lib > /dev/null 47 | then 48 | echo "LD_LIBRARY_PATH already set" 49 | else 50 | export LD_LIBRARY_PATH=$prefix/lib:$prefix/deps/lib:$LD_LIBRARY_PATH 51 | fi 52 | 53 | # Set verbose and dry_run to false 54 | 55 | verbose="" 56 | dry_run="no" 57 | dap2="no" 58 | clean="" 59 | distcheck="" 60 | build_hyrax_deps="yes" 61 | for_nasa_rpm="" 62 | 63 | for i in $* 64 | do 65 | case "$i" 66 | in 67 | -h) 68 | help 69 | exit 0;; 70 | -v) 71 | verbose="--verbose" 72 | shift;; 73 | -n) 74 | dry_run="yes" 75 | shift;; 76 | -2) 77 | dap2="yes" 78 | shift;; 79 | -c) 80 | clean="yes" 81 | shift;; 82 | -d) 83 | distcheck="yes" 84 | shift;; 85 | -D) 86 | build_hyrax_deps="no" 87 | shift;; 88 | -N) 89 | for_nasa_rpm="for-static-rpm" 90 | shift;; 91 | -p) 92 | prefix=$2 93 | shift; shift;; 94 | --) 95 | shift; break;; 96 | esac 97 | done 98 | 99 | function verbose { 100 | if test -n "$verbose" 101 | then 102 | echo "$*" 103 | fi 104 | } 105 | 106 | function do_command { 107 | if test "$dry_run" = "yes" 108 | then 109 | echo "$*" 110 | else 111 | verbose "$*" 112 | $* 113 | fi 114 | } 115 | 116 | # Two args "do_make_build " 117 | function do_make_build { 118 | verbose "Building in $1" 119 | 120 | # in a sub-shell 121 | ( 122 | if cd $1 123 | then 124 | # shift first arg off so $* holds the remaining args 125 | shift 126 | 127 | if test ! -x configure 128 | then 129 | do_command "autoreconf --force --install $verbose" 130 | fi 131 | if test ! -x configure -a -z "$dry_run" 132 | then 133 | echo "Could not find or build configure script" 134 | exit 1 135 | fi 136 | 137 | do_command "./configure $*" 138 | verbose "%%% configure status: $?" 139 | 140 | if test -n "$clean" 141 | then 142 | do_command "make clean" 143 | fi 144 | 145 | do_command "make $jobs" 146 | verbose "%%% make status: $?" 147 | 148 | # some of the bes/handler tests fail w/parallel builds 149 | do_command "make check -k" 150 | verbose "%%% check status: $?" 151 | 152 | do_command "make install" 153 | verbose "%%% install status: $?" 154 | 155 | if test -n "$distcheck" 156 | then 157 | do_command "make distcheck $jobs" 158 | verbose "%%% distcheck status: $?" 159 | fi 160 | fi 161 | ) 162 | } 163 | 164 | # Two args "do_build " 165 | function do_ant_build { 166 | verbose "Building in $1" 167 | 168 | # in a sub-shell 169 | ( 170 | if cd $1 171 | then 172 | # shift first arg off so $* holds the remaining args 173 | shift 174 | if test ! -f build.xml 175 | then 176 | echo "Could not find build.xml script" 177 | exit 1 178 | fi 179 | 180 | do_command "ant server $*" 181 | verbose "%%% make status: $? (ant server $*)" 182 | 183 | if test -d $tomcat_webapps 184 | then 185 | do_command "cp build/dist/opendap.war $tomcat_webapps" 186 | verbose "%%% install status: $?" 187 | else 188 | echo "Could not find $tomcat_webapps" 189 | verbose "%%% install status: 2" 190 | fi 191 | 192 | do_command "ant check $*" 193 | verbose "%%% check status: $? (ant check $*)" 194 | fi 195 | ) 196 | } 197 | 198 | if test -d hyrax-dependencies -a x$build_hyrax_deps = xyes 199 | then 200 | ( 201 | verbose "Building the local dependencies" 202 | 203 | cd hyrax-dependencies 204 | # $for_nasa_rpm will contain the magic needed to make just the stuff 205 | # we need for the BES rpm for NASA (with static HDF4/hdfeos2, ...). 206 | # NB: The -N option of this script will build the static version of 207 | # these dependencies; without -N you get all the deps as dynamic libraries. 208 | do_command "make $jobs $for_nasa_rpm" 209 | 210 | # The above unpacks, builds and installs each dependency. It uses lots 211 | # of space. This removes the source build directories, saving almost 212 | # 2GB of disk. jhrg 10/24/19 213 | do_command "make really-clean" 214 | 215 | # figure out the apache tomcat dir name based on the rev of tomcat's 216 | # tar file in the 'extra_downloads' dir and replace if needed. This 217 | # tests if the versions are not the same, not if one is newer than the 218 | # other. 219 | deps_tomcat_ver=`ls -1 extra_downloads/apache-tomcat-7.*.*.tar.gz | sed 's@.*\([0-9]\.[0-9]*\.[0-9]*\)\.tar.gz@\1@'` 220 | if test ! -d $prefix/apache-tomcat-$deps_tomcat_ver 221 | then 222 | verbose "Replacing tomcat with version $dep_tomcat_ver" 223 | # remove previous tomcat; add the new one 224 | rm $prefix/apache-tomcat-* 225 | do_command "tar -xzf extra_downloads/apache-tomcat-7.*.*.tar.gz -C $prefix" 226 | fi 227 | ) 228 | 229 | # deps is used later with the BES build. If hyrax-dependencies is not 230 | # here, then assume the third-party packages are otherwise available. 231 | deps="--with-dependencies=$prefix/deps" 232 | fi 233 | 234 | prefix_arg=--prefix=$prefix 235 | 236 | if test "$dap2" == "yes" 237 | then 238 | libdap="libdap" 239 | else 240 | libdap="libdap4" 241 | fi 242 | 243 | do_make_build $libdap $prefix_arg --enable-developer 2>&1 | tee $libdap.log 244 | 245 | do_make_build bes $prefix_arg $deps --enable-developer 2>&1 | tee bes.log 246 | 247 | tomcat_webapps=$prefix/apache-tomcat-9.*.*/webapps 248 | 249 | do_ant_build olfs 2>&1 | tee olfs.log 250 | 251 | -------------------------------------------------------------------------------- /python-xarray/grfn_granules.py: -------------------------------------------------------------------------------- 1 | grfn_gunw_100 = [ "S1-GUNW-A-R-087-tops-20141023_20141011-153856-27545N_25464N-PP-1a1a-v2_0_2.nc", 2 | "S1-GUNW-A-R-087-tops-20141023_20141011-153922-29040N_26961N-PP-b548-v2_0_2.nc", 3 | "S1-GUNW-A-R-087-tops-20141023_20141011-153947-30534N_28625N-PP-86d9-v2_0_2.nc", 4 | "S1-GUNW-A-R-087-tops-20141023_20141011-154012-320275N_30122N-PP-f721-v2_0_2.nc", 5 | "S1-GUNW-A-R-087-tops-20141023_20141011-154036-33520N_31618N-PP-b56c-v2_0_2.nc", 6 | "S1-GUNW-A-R-087-tops-20141023_20141011-154101-35011N_33113N-PP-a240-v2_0_2.nc", 7 | "S1-GUNW-A-R-087-tops-20141023_20141011-154101-35011N_33113N-PP-a240-v2_0_3.nc", 8 | "S1-GUNW-A-R-160-tops-20141028_20141016-154720-28242N_26016N-PP-fa45-v2_0_2.nc", 9 | "S1-GUNW-A-R-160-tops-20141028_20141016-154720-28242N_26016N-PP-fa45-v2_0_3.nc", 10 | "S1-GUNW-A-R-160-tops-20141028_20141016-154747-29735N_27680N-PP-4b3d-v2_0_2.nc", 11 | "S1-GUNW-A-R-160-tops-20141028_20141016-154747-29735N_27680N-PP-4b3d-v2_0_3.nc", 12 | "S1-GUNW-A-R-160-tops-20141028_20141016-154812-31230N_29343N-PP-f75d-v2_0_2.nc", 13 | "S1-GUNW-A-R-160-tops-20141028_20141016-154812-31230N_29343N-PP-f75d-v2_0_3.nc", 14 | "S1-GUNW-A-R-160-tops-20141028_20141016-154837-32723N_30840N-PP-5247-v2_0_2.nc", 15 | "S1-GUNW-A-R-160-tops-20141028_20141016-154837-32723N_30840N-PP-5247-v2_0_3.nc", 16 | "S1-GUNW-A-R-087-tops-20141104_20141011-153856-27532N_25471N-PP-54dd-v2_0_2.nc", 17 | "S1-GUNW-A-R-087-tops-20141104_20141023-153856-27532N_25471N-PP-56c3-v2_0_2.nc", 18 | "S1-GUNW-A-R-087-tops-20141104_20141011-153922-29026N_27135N-PP-ef64-v2_0_2.nc", 19 | "S1-GUNW-A-R-087-tops-20141104_20141023-153922-29026N_27135N-PP-ea40-v2_0_2.nc", 20 | "S1-GUNW-A-R-087-tops-20141104_20141011-153946-30520N_28633N-PP-305c-v2_0_2.nc", 21 | "S1-GUNW-A-R-087-tops-20141104_20141023-153946-30520N_28633N-PP-7262-v2_0_2.nc", 22 | "S1-GUNW-A-R-087-tops-20141104_20141023-154011-32015N_30130N-PP-e6bd-v2_0_2.nc", 23 | "S1-GUNW-A-R-087-tops-20141104_20141011-154011-32015N_30130N-PP-aeea-v2_0_2.nc", 24 | "S1-GUNW-A-R-087-tops-20141104_20141011-154036-33508N_31625N-PP-0482-v2_0_2.nc", 25 | "S1-GUNW-A-R-087-tops-20141104_20141023-154036-33508N_31625N-PP-10f4-v2_0_2.nc", 26 | "S1-GUNW-A-R-087-tops-20141104_20141011-154101-35011N_33112N-PP-9de2-v2_0_2.nc", 27 | "S1-GUNW-A-R-087-tops-20141104_20141023-154101-35011N_33112N-PP-1405-v2_0_2.nc", 28 | "S1-GUNW-A-R-087-tops-20141104_20141011-154101-35011N_33112N-PP-9de2-v2_0_3.nc", 29 | "S1-GUNW-A-R-087-tops-20141104_20141023-154101-35011N_33112N-PP-1405-v2_0_3.nc", 30 | "S1-GUNW-A-R-143-tops-20141108_20141015-114912-30031N_27809N-PP-f0ea-v2_0_3.nc", 31 | "S1-GUNW-A-R-143-tops-20141108_20141015-114938-31525N_29472N-PP-6c30-v2_0_3.nc", 32 | "S1-GUNW-A-R-143-tops-20141108_20141015-115004-33018N_31134N-PP-e3d9-v2_0_3.nc", 33 | "S1-GUNW-A-R-143-tops-20141108_20141015-115029-34510N_32630N-PP-a450-v2_0_3.nc", 34 | "S1-GUNW-A-R-143-tops-20141108_20141015-115053-36003N_34125N-PP-f5b9-v2_0_3.nc", 35 | "S1-GUNW-A-R-143-tops-20141108_20141015-115118-37495N_35619N-PP-ff00-v2_0_3.nc", 36 | "S1-GUNW-A-R-143-tops-20141108_20141015-115144-38985N_37112N-PP-9e72-v2_0_3.nc", 37 | "S1-GUNW-A-R-143-tops-20141108_20141015-115209-40477N_38605N-PP-ebdb-v2_0_3.nc", 38 | "S1-GUNW-A-R-143-tops-20141108_20141015-115234-41966N_40095N-PP-f72d-v2_0_3.nc", 39 | "S1-GUNW-A-R-160-tops-20141109_20141028-154720-28242N_26016N-PP-9abe-v2_0_2.nc", 40 | "S1-GUNW-A-R-160-tops-20141109_20141016-154720-28242N_26016N-PP-9100-v2_0_2.nc", 41 | "S1-GUNW-A-R-160-tops-20141109_20141028-154720-28242N_26016N-PP-9abe-v2_0_3.nc", 42 | "S1-GUNW-A-R-160-tops-20141109_20141016-154720-28242N_26016N-PP-9100-v2_0_3.nc", 43 | "S1-GUNW-A-R-160-tops-20141109_20141028-154747-29736N_27680N-PP-96ec-v2_0_2.nc", 44 | "S1-GUNW-A-R-160-tops-20141109_20141016-154747-29736N_27680N-PP-53d5-v2_0_2.nc", 45 | "S1-GUNW-A-R-160-tops-20141109_20141028-154747-29736N_27680N-PP-96ec-v2_0_3.nc", 46 | "S1-GUNW-A-R-160-tops-20141109_20141016-154747-29736N_27680N-PP-53d5-v2_0_3.nc", 47 | "S1-GUNW-A-R-160-tops-20141109_20141028-154812-31230N_29344N-PP-616a-v2_0_2.nc", 48 | "S1-GUNW-A-R-160-tops-20141109_20141016-154812-31230N_29344N-PP-7c1e-v2_0_2.nc", 49 | "S1-GUNW-A-R-160-tops-20141109_20141028-154812-31230N_29344N-PP-616a-v2_0_3.nc", 50 | "S1-GUNW-A-R-160-tops-20141109_20141016-154812-31230N_29344N-PP-7c1e-v2_0_3.nc", 51 | "S1-GUNW-A-R-160-tops-20141109_20141028-154837-32724N_30840N-PP-f2b7-v2_0_2.nc", 52 | "S1-GUNW-A-R-160-tops-20141109_20141016-154837-32724N_30840N-PP-3d94-v2_0_2.nc", 53 | "S1-GUNW-A-R-160-tops-20141109_20141028-154837-32724N_30840N-PP-f2b7-v2_0_3.nc", 54 | "S1-GUNW-A-R-160-tops-20141109_20141016-154837-32724N_30840N-PP-3d94-v2_0_3.nc", 55 | "S1-GUNW-A-R-014-tops-20141111_20141030-152945-24165N_21930N-PP-c685-v2_0_2.nc", 56 | "S1-GUNW-A-R-014-tops-20141111_20141030-153011-25661N_23595N-PP-f017-v2_0_2.nc", 57 | "S1-GUNW-A-R-014-tops-20141111_20141030-153037-27156N_25261N-PP-0737-v2_0_2.nc", 58 | "S1-GUNW-A-R-014-tops-20141111_20141030-153101-28651N_26759N-PP-6a25-v2_0_2.nc", 59 | "S1-GUNW-D-R-032-tops-20141112_20141019-214636-15317N_13295N-PP-a82c-v2_0_2.nc", 60 | "S1-GUNW-A-R-041-tops-20141113_20141020-115721-29806N_27584N-PP-a54f-v2_0_3.nc", 61 | "S1-GUNW-A-R-041-tops-20141113_20141020-115748-31300N_29248N-PP-6040-v2_0_3.nc", 62 | "S1-GUNW-A-R-041-tops-20141113_20141020-115813-32793N_30910N-PP-a498-v2_0_1.nc", 63 | "S1-GUNW-A-R-041-tops-20141113_20141020-115813-32793N_30910N-PP-a4e3-v2_0_3.nc", 64 | "S1-GUNW-A-R-041-tops-20141113_20141020-115838-34285N_32405N-PP-1051-v2_0_1.nc", 65 | "S1-GUNW-A-R-041-tops-20141113_20141020-115838-34285N_32405N-PP-350c-v2_0_3.nc", 66 | "S1-GUNW-A-R-041-tops-20141113_20141020-115903-35778N_33900N-PP-21bc-v2_0_1.nc", 67 | "S1-GUNW-A-R-041-tops-20141113_20141020-115903-35778N_33900N-PP-cf7d-v2_0_3.nc", 68 | "S1-GUNW-A-R-041-tops-20141113_20141020-115928-37270N_35394N-PP-c5ca-v2_0_1.nc", 69 | "S1-GUNW-A-R-041-tops-20141113_20141020-115928-37270N_35394N-PP-a999-v2_0_3.nc", 70 | "S1-GUNW-A-R-041-tops-20141113_20141020-115953-38760N_36887N-PP-b3f4-v2_0_1.nc", 71 | "S1-GUNW-A-R-041-tops-20141113_20141020-115953-38760N_36887N-PP-8805-v2_0_3.nc", 72 | "S1-GUNW-A-R-041-tops-20141113_20141020-120018-40250N_38380N-PP-a44f-v2_0_1.nc", 73 | "S1-GUNW-A-R-041-tops-20141113_20141020-120018-40250N_38380N-PP-56cb-v2_0_3.nc", 74 | "S1-GUNW-A-R-041-tops-20141113_20141020-120043-41741N_39871N-PP-7827-v2_0_1.nc", 75 | "S1-GUNW-A-R-041-tops-20141113_20141020-120043-41741N_39871N-PP-d1c9-v2_0_3.nc", 76 | "S1-GUNW-A-R-041-tops-20141113_20141020-120108-43231N_41362N-PP-c81b-v2_0_1.nc", 77 | "S1-GUNW-A-R-041-tops-20141113_20141020-120108-43231N_41362N-PP-f424-v2_0_3.nc", 78 | "S1-GUNW-A-R-041-tops-20141113_20141020-120133-44720N_42852N-PP-c59a-v2_0_1.nc", 79 | "S1-GUNW-A-R-041-tops-20141113_20141020-120133-44720N_42852N-PP-0507-v2_0_3.nc", 80 | "S1-GUNW-A-R-041-tops-20141113_20141020-120158-46209N_44341N-PP-ff3f-v2_0_3.nc", 81 | "S1-GUNW-A-R-041-tops-20141113_20141020-120223-47861N_45830N-PP-dc1b-v2_0_3.nc", 82 | "S1-GUNW-A-R-041-tops-20141113_20141020-120251-49679N_47316N-PP-defa-v2_0_3.nc", 83 | "S1-GUNW-D-R-079-tops-20141116_20141023-030719-16084N_14064N-PP-3545-v2_0_2.nc", 84 | "S1-GUNW-D-R-079-tops-20141116_20141023-030746-14420N_12395N-PP-75c0-v2_0_2.nc", 85 | "S1-GUNW-D-R-079-tops-20141116_20141023-030811-12922N_10895N-PP-a98f-v2_0_2.nc", 86 | "S1-GUNW-D-R-079-tops-20141116_20141023-030836-11425N_09393N-PP-3df1-v2_0_2.nc", 87 | "S1-GUNW-D-R-079-tops-20141116_20141023-030901-09928N_07891N-PP-7d6c-v2_0_2.nc", 88 | "S1-GUNW-D-R-079-tops-20141116_20141023-030926-08430N_06390N-PP-5c6d-v2_0_2.nc", 89 | "S1-GUNW-D-R-083-tops-20141116_20141023-095326-24087S_26269S-PP-7c53-v2_0_2.nc", 90 | "S1-GUNW-D-R-083-tops-20141116_20141023-095351-25567S_27757S-PP-3f05-v2_0_2.nc", 91 | "S1-GUNW-D-R-083-tops-20141116_20141023-095416-27045S_29243S-PP-4328-v2_0_2.nc", 92 | "S1-GUNW-D-R-083-tops-20141116_20141023-095441-28522S_30729S-PP-767e-v2_0_2.nc", 93 | "S1-GUNW-D-R-083-tops-20141116_20141023-095506-29996S_32212S-PP-7a95-v2_0_2.nc", 94 | "S1-GUNW-D-R-083-tops-20141116_20141023-095531-31468S_33694S-PP-0965-v2_0_2.nc", 95 | "S1-GUNW-D-R-083-tops-20141116_20141023-095556-33101S_35173S-PP-3928-v2_0_2.nc", 96 | "S1-GUNW-D-R-083-tops-20141116_20141023-095621-34568S_36651S-PP-d3dd-v2_0_2.nc", 97 | "S1-GUNW-D-R-083-tops-20141116_20141023-095646-360325S_38126S-PP-24b3-v2_0_2.nc", 98 | "S1-GUNW-D-R-083-tops-20141116_20141023-095711-37493S_39600S-PP-c4b4-v2_0_2.nc", 99 | "S1-GUNW-D-R-083-tops-20141116_20141023-095736-38950S_41233S-PP-e380-v2_0_2.nc", 100 | "S1-GUNW-A-R-087-tops-20141116_20141011-153856-27533N_25472N-PP-b6df-v2_0_2.nc"] 101 | -------------------------------------------------------------------------------- /release-notes/release_notes-1.15.0.txt: -------------------------------------------------------------------------------- 1 | 2 | 1.15.0 release (24 September 2018) 3 | 4 | Welcome to the Hyrax 1.15.0 release 5 | 6 | This release, Hyrax-1.15, contains new features and bug fixes. 7 | 8 | The complete server documentation can be found on line at 9 | https://opendap.github.io/hyrax_guide/Master_Hyrax_Guide.html 10 | 11 | New Features 12 | 13 | CoverageJSON - For datasets that contain geo-spatial data, we now 14 | provide the option to get those data (and related metadata) encoded 15 | using the covjson format. Thanks to Corey Hemphill, Riley Rimer, and 16 | Lewis McGibbney for this contribution. 17 | 18 | JSON-LD - With this release Hyrax adds JSON-LD content to every 19 | browser navigable catalog page ( i.e. "*/contents.html") and to every 20 | dataset/granule OPeNDAP Data Access Form. This feature along with the 21 | site map generation can be used to assist search engines like Google 22 | to catalog/index/find the data you wish the world to access. 23 | 24 | New Data Access Form - The OPeNDAP Data Access Form is renewed. A more 25 | compact and response UI that we hope to be improving even more in the 26 | months to come. 27 | 28 | NASA Earthdata Login (URS) Support - In addition to utilizing the 29 | mod_auth_urs Apache module to provide Earthdata Login services Hyrax 30 | now has it's own standalone implementation of the Earthdata Login 31 | client. This means that it can be deployed using only Tomcat (or other 32 | servlet Engine) and still work with Earthdata Login with out binding 33 | it to an instance of Apache httpd. 34 | 35 | MetaData Store (MDS) - A new cache has been added to the BES for 36 | Metadata Responses (aka, the MDS or MetaData Store). This cache is 37 | unlike the other BES caches in that it is intended to be operated as 38 | either a 'cache' or a 'store.' In the latter case, items added will 39 | never be removed - it is an open-ended place where metadata response 40 | objects will be kept indefinitely. The MDS contents (as a cache or a 41 | store) will persist through Hyrax restarts. 42 | 43 | Prototype support for data stored on Amazon's S3 Web Object Store - 44 | Hyrax 1.15 has prototype support for subset-in-place of HDF5 and 45 | NetCDF4 data files that are stored on AWS S3. Data files stored in S3 46 | must be configured for use with this feature of Hyrax. To configure 47 | files for us with this feature, a special metadata file must be built 48 | and installed in the MDS. For this feature, the MDS should be run in 49 | 'store' mode. Note: The software has a known problem with requests for 50 | very large numbers of variables in a single URL. Contact us for 51 | details. We welcome feedback on this new feature. 52 | 53 | Improved catalog support - We have generalized the Hyrax catalog 54 | system so that it is much easier to extend. As an example, we have 55 | implemented a new module that reads information about datasets from 56 | NASA's Common Metadata Repository (CMR) and uses that to display 57 | virtual directories for NASA data holdings. This software is currently 58 | available in source form only - contact us if you would like to extend 59 | the Hyrax Catalog system for your own data collections. 60 | 61 | Additional support for CF for HDF4 and HDF5 files 62 | 63 | HDF4 CF option - Enhance the support of handling the scale_factor and 64 | add_offset to follow the CF. The scale_factor and add_offset rule for 65 | the MOD16A3 product is different than other MODIS products. We make an 66 | exception for this product only to ensure the scale_factor and 67 | add_offset follow the CF conventions. 68 | 69 | For other the new features and fixes see the section titled “What’s 70 | new for Hyrax 1.15.0” in the HDF4 handler README files: 71 | https://github.com/OPENDAP/hdf4_handler/blob/master/README 72 | 73 | HDF5 CF option - Added support of the HDF-EOS5 Polar Stereographic(PS) 74 | and Lambert Azimuthal Equal Area(LAMAZ) grid projection files. Both 75 | projection files can be found in NASA LANCE products. Added HDF-EOS5 76 | grid latitude and longitude cache support. This is the same as what we 77 | did for the HDF-EOS2 grid. Added support for TROP-OMI, new OMI level 2 78 | and OMPS-NPP product. Removed the internal reserved netCDF-4 79 | attributes for DAP output. Made the behavior of the "drop long string" 80 | BES key consistent with the current limitation of netCDF Java. 81 | 82 | For other the new features and fixes see the section titled “What’s 83 | new for Hyrax 1.15.0” in the HDF5 handler README files: 84 | https://github.com/OPENDAP/hdf5_handler/blob/master/README 85 | 86 | Bug Fixes 87 | 88 | About 40 bugs have been fixed for this release. 89 | 90 | Specific tickets fixed: 91 | 92 | HYRAX-10 The fileout netCDF-4 doesn't generate the correct dimensions for aggregated files 93 | HYRAX-247 elements in a constrained DMR sometimes have 'random' order 94 | HYRAX-248 fileout_gdal seems to build broken JP2k files 95 | HYRAX-362 Make the GeoTiff (GDAL) handler work with NCML aggregations 96 | HYRAX-554 BES now includes DMR++; hack the configure script WRT libcurl 97 | HYRAX-561 The fileout_netcdf, and/or the ncml_handler code does not clean up the temporary netCDF result file if the requesting client is killed during the transaction. 98 | HYRAX-588 Gateway HTML Form Rendering Failure 99 | HYRAX-591 Tests that create files fail 'make distcheck' (e.g., the tests for HYRAX-561) 100 | HYRAX-595 The "SHOW HELP" button in the DAP2 Data Request Form points to a broken link 101 | HYRAX-598 NULL pointer dereference in D4ParserSax2 102 | HYRAX-599 Symbolic links to data not showing up in Hyrax 1.14.0 RPMs on CentOS 7 103 | HYRAX-600 Unable to startup Hyrax installed from RPM on boot on Centos OS 7 104 | HYRAX-603 The OLFS authentication code is no longer compatible with the current deployment "bootstrap". 105 | HYRAX-612 Renaming the result of an aggregation (only join new?) fails. 106 | HYRAX-613 OLFS installation bootstrap is broken on CentOS-7 107 | HYRAX-621 Replace logo.gif with a transparent logo.png in Hyrax 108 | HYRAX-623 Fix the CI build 109 | HYRAX-630 The Keywords feature of libdap is hosed 110 | HYRAX-645 Build issue causes make -j check to fail 111 | HYRAX-646 Target collect-coverage-data doesn't work 112 | HYRAX-647 The DDS print_das() method does not produce the same DAS as the DAS::print() method for Grids sometimes 113 | HYRAX-648 MDS tests 61 and 62 fail on the first run of ./testsuite --conf=bes.mds.conf 114 | HYRAX-670 Reading values from olfs.xml file is fragile 115 | HYRAX-692 cppunit-config is no longer present in cppunit 116 | HYRAX-721 The implementation of Sequence in the XSLT based Data Request Form (IFH) is broken 117 | HYRAX-723 The DMR++ parser doesn't see a newline (cr?) as whitepspace. 118 | HYRAX-745 Broker service needs to make correct links for data access. 119 | HYRAX-755 The build_dmrpp code seems to fail on DMRs/Files with several variables. 120 | HYRAX-756 get_dmrpp fails on datasets where variables are not in the root group 121 | HYRAX-764 Fileout_netcdf returns empty response for dataset when no query is provided. 122 | HYRAX-767 Change the Data Request Form code (all 3 versions) so that it URL encodes the query before using it. 123 | HYRAX-775 The DMR response from the MDS has the xml:base attribute in a random place. 124 | HYRAX-790 geogrid is failing in OLFS regression tests 125 | HYRAX-791 w10n syntax collides with URI encoding rules enforced by recent Tomcat 126 | HYRAX-794 Some tests regarding the enum type in the netcdf handler fail randomly 127 | HYRAX-801 ASAN Reveals 104 issues in the BES 128 | HYRAX-802 Issues remain in the Aggregation rename bug 129 | HYRAX-803 NcML Memory errors 130 | HYRAX-804 Error in ResponseBuilderTest - a unit test 131 | HYRAX-805 fileout_netcdf memory errors 132 | HYRAX-818 showNode removes catalog name from the path name of the node - stop it. 133 | HYRAX-833 Update baselines for BES cmdlm tests 134 | HYRAX-837 libdap seems to return DAP2 and DAP3.2 DDX responses in kind of random way. 135 | HYRAX-844 THREDDS catalog produced by Hyrax no longer work with Unidata code because of time zone issues 136 | HYRAX-845 BESUtil::get_time() has a pointer problem 137 | HYRAX-851 Memory leak in BESCatalog 138 | 139 | Required External Dependencies in order to run Hyrax 1.15.0, you will need: 140 | 141 | Java 1.7 or greater 142 | Tomcat 7.x or 8.x Note: On CentOS 6.6 use Java 7 + Tomcat 7 if you're using yum to install software. 143 | Linux (We provide RPMs for CentOS 6.9 and 7.13; install them with yum), Ubuntu, OSX or another suitable Unix OS. 144 | -------------------------------------------------------------------------------- /release-notes/release-notes-1.15.1.txt: -------------------------------------------------------------------------------- 1 | 2 | 1.15.0 release (26 November 2018) 3 | 4 | Welcome to the Hyrax 1.15.1 release 5 | 6 | This release, Hyrax-1.15.1, contains bug fixes for 1.15.0. 7 | 8 | The MDS (Metadata Store) was installed incorrectly from the 9 | CentOS rpm packages; fixed. 10 | 11 | This version of the server also includes the first developer (i.e., 12 | source) release of the BALTO brokering code and an alpha version of 13 | JSON-LD support for BALTO/Hyrax. 14 | 15 | Specific tickets fixed for 1.15.1 16 | 17 | HK-277 hyrax-dependencies on the web site are out of date 18 | HK-269 The current BES RPM package installs with MDS unwritable 19 | HK-258 Modify roi() server-side function so that it is more flexible in the kinds of arguments it can use 20 | HK-215, HK-246 Support subsetting a 3D+ variable using the roi() function 21 | HK-268, HK-108, HK-257, HK-136, HK-224, HK-249, HK-152, HK-222 Implement Continuous Delivery for Hyrax binary packages 22 | HK-214, Add Debian package build to libdap and BES 23 | HK-103, Build an initial version of the BALTO broker for two different data source types: Hyrax and Files accessible using HTTP 24 | HK-22 The max_response_size limit is not working. 25 | HK-1, HK-2, HK-3, HK-10 Data Transfer Errors in DMR++ 26 | 27 | Information from the new features in 1.15 follows: 28 | 29 | The complete server documentation can be found on line at 30 | https://opendap.github.io/hyrax_guide/Master_Hyrax_Guide.html 31 | 32 | New Features 33 | 34 | CoverageJSON - For datasets that contain geo-spatial data, we now 35 | provide the option to get those data (and related metadata) encoded 36 | using the covjson format. Thanks to Corey Hemphill, Riley Rimer, and 37 | Lewis McGibbney for this contribution. 38 | 39 | JSON-LD - With this release Hyrax adds JSON-LD content to every 40 | browser navigable catalog page ( i.e. "*/contents.html") and to every 41 | dataset/granule OPeNDAP Data Access Form. This feature along with the 42 | site map generation can be used to assist search engines like Google 43 | to catalog/index/find the data you wish the world to access. 44 | 45 | New Data Access Form - The OPeNDAP Data Access Form is renewed. A more 46 | compact and response UI that we hope to be improving even more in the 47 | months to come. 48 | 49 | NASA Earthdata Login (URS) Support - In addition to utilizing the 50 | mod_auth_urs Apache module to provide Earthdata Login services Hyrax 51 | now has it's own standalone implementation of the Earthdata Login 52 | client. This means that it can be deployed using only Tomcat (or other 53 | servlet Engine) and still work with Earthdata Login with out binding 54 | it to an instance of Apache httpd. 55 | 56 | MetaData Store (MDS) - A new cache has been added to the BES for 57 | Metadata Responses (aka, the MDS or MetaData Store). This cache is 58 | unlike the other BES caches in that it is intended to be operated as 59 | either a 'cache' or a 'store.' In the latter case, items added will 60 | never be removed - it is an open-ended place where metadata response 61 | objects will be kept indefinitely. The MDS contents (as a cache or a 62 | store) will persist through Hyrax restarts. 63 | 64 | Prototype support for data stored on Amazon's S3 Web Object Store - 65 | Hyrax 1.15 has prototype support for subset-in-place of HDF5 and 66 | NetCDF4 data files that are stored on AWS S3. Data files stored in S3 67 | must be configured for use with this feature of Hyrax. To configure 68 | files for us with this feature, a special metadata file must be built 69 | and installed in the MDS. For this feature, the MDS should be run in 70 | 'store' mode. Note: The software has a known problem with requests for 71 | very large numbers of variables in a single URL. Contact us for 72 | details. We welcome feedback on this new feature. 73 | 74 | Improved catalog support - We have generalized the Hyrax catalog 75 | system so that it is much easier to extend. As an example, we have 76 | implemented a new module that reads information about datasets from 77 | NASA's Common Metadata Repository (CMR) and uses that to display 78 | virtual directories for NASA data holdings. This software is currently 79 | available in source form only - contact us if you would like to extend 80 | the Hyrax Catalog system for your own data collections. 81 | 82 | Additional support for CF for HDF4 and HDF5 files 83 | 84 | HDF4 CF option - Enhance the support of handling the scale_factor and 85 | add_offset to follow the CF. The scale_factor and add_offset rule for 86 | the MOD16A3 product is different than other MODIS products. We make an 87 | exception for this product only to ensure the scale_factor and 88 | add_offset follow the CF conventions. 89 | 90 | For other the new features and fixes see the section titled “What’s 91 | new for Hyrax 1.15.0” in the HDF4 handler README files: 92 | https://github.com/OPENDAP/hdf4_handler/blob/master/README 93 | 94 | HDF5 CF option - Added support of the HDF-EOS5 Polar Stereographic(PS) 95 | and Lambert Azimuthal Equal Area(LAMAZ) grid projection files. Both 96 | projection files can be found in NASA LANCE products. Added HDF-EOS5 97 | grid latitude and longitude cache support. This is the same as what we 98 | did for the HDF-EOS2 grid. Added support for TROP-OMI, new OMI level 2 99 | and OMPS-NPP product. Removed the internal reserved netCDF-4 100 | attributes for DAP output. Made the behavior of the "drop long string" 101 | BES key consistent with the current limitation of netCDF Java. 102 | 103 | For other the new features and fixes see the section titled “What’s 104 | new for Hyrax 1.15.0” in the HDF5 handler README files: 105 | https://github.com/OPENDAP/hdf5_handler/blob/master/README 106 | 107 | Bug Fixes 108 | 109 | About 40 bugs have been fixed for this release. 110 | 111 | Specific tickets fixed: 112 | 113 | HYRAX-10 The fileout netCDF-4 doesn't generate the correct dimensions for aggregated files 114 | HYRAX-247 elements in a constrained DMR sometimes have 'random' order 115 | HYRAX-248 fileout_gdal seems to build broken JP2k files 116 | HYRAX-362 Make the GeoTiff (GDAL) handler work with NCML aggregations 117 | HYRAX-554 BES now includes DMR++; hack the configure script WRT libcurl 118 | HYRAX-561 The fileout_netcdf, and/or the ncml_handler code does not clean up the temporary netCDF result file if the requesting client is killed during the transaction. 119 | HYRAX-588 Gateway HTML Form Rendering Failure 120 | HYRAX-591 Tests that create files fail 'make distcheck' (e.g., the tests for HYRAX-561) 121 | HYRAX-595 The "SHOW HELP" button in the DAP2 Data Request Form points to a broken link 122 | HYRAX-598 NULL pointer dereference in D4ParserSax2 123 | HYRAX-599 Symbolic links to data not showing up in Hyrax 1.14.0 RPMs on CentOS 7 124 | HYRAX-600 Unable to startup Hyrax installed from RPM on boot on Centos OS 7 125 | HYRAX-603 The OLFS authentication code is no longer compatible with the current deployment "bootstrap". 126 | HYRAX-612 Renaming the result of an aggregation (only join new?) fails. 127 | HYRAX-613 OLFS installation bootstrap is broken on CentOS-7 128 | HYRAX-621 Replace logo.gif with a transparent logo.png in Hyrax 129 | HYRAX-623 Fix the CI build 130 | HYRAX-630 The Keywords feature of libdap is hosed 131 | HYRAX-645 Build issue causes make -j check to fail 132 | HYRAX-646 Target collect-coverage-data doesn't work 133 | HYRAX-647 The DDS print_das() method does not produce the same DAS as the DAS::print() method for Grids sometimes 134 | HYRAX-648 MDS tests 61 and 62 fail on the first run of ./testsuite --conf=bes.mds.conf 135 | HYRAX-670 Reading values from olfs.xml file is fragile 136 | HYRAX-692 cppunit-config is no longer present in cppunit 137 | HYRAX-721 The implementation of Sequence in the XSLT based Data Request Form (IFH) is broken 138 | HYRAX-723 The DMR++ parser doesn't see a newline (cr?) as whitepspace. 139 | HYRAX-745 Broker service needs to make correct links for data access. 140 | HYRAX-755 The build_dmrpp code seems to fail on DMRs/Files with several variables. 141 | HYRAX-756 get_dmrpp fails on datasets where variables are not in the root group 142 | HYRAX-764 Fileout_netcdf returns empty response for dataset when no query is provided. 143 | HYRAX-767 Change the Data Request Form code (all 3 versions) so that it URL encodes the query before using it. 144 | HYRAX-775 The DMR response from the MDS has the xml:base attribute in a random place. 145 | HYRAX-790 geogrid is failing in OLFS regression tests 146 | HYRAX-791 w10n syntax collides with URI encoding rules enforced by recent Tomcat 147 | HYRAX-794 Some tests regarding the enum type in the netcdf handler fail randomly 148 | HYRAX-801 ASAN Reveals 104 issues in the BES 149 | HYRAX-802 Issues remain in the Aggregation rename bug 150 | HYRAX-803 NcML Memory errors 151 | HYRAX-804 Error in ResponseBuilderTest - a unit test 152 | HYRAX-805 fileout_netcdf memory errors 153 | HYRAX-818 showNode removes catalog name from the path name of the node - stop it. 154 | HYRAX-833 Update baselines for BES cmdlm tests 155 | HYRAX-837 libdap seems to return DAP2 and DAP3.2 DDX responses in kind of random way. 156 | HYRAX-844 THREDDS catalog produced by Hyrax no longer work with Unidata code because of time zone issues 157 | HYRAX-845 BESUtil::get_time() has a pointer problem 158 | HYRAX-851 Memory leak in BESCatalog 159 | 160 | Required External Dependencies in order to run Hyrax 1.15.0, you will need: 161 | 162 | Java 1.7 or greater 163 | Tomcat 7.x or 8.x Note: On CentOS 6.6 use Java 7 + Tomcat 7 if you're using yum to install software. 164 | Linux (We provide RPMs for CentOS 6.9 and 7.13; install them with yum), Ubuntu, OSX or another suitable Unix OS. 165 | -------------------------------------------------------------------------------- /python-xarray/balto_opendap_s3.py: -------------------------------------------------------------------------------- 1 | 2 | from netCDF4 import Dataset 3 | import xarray as xa 4 | import dask 5 | 6 | # Allows us to visualize the dask progress for parallel operations 7 | from dask.diagnostics import ProgressBar 8 | 9 | ProgressBar().register() 10 | 11 | # from dask.distributed import Client 12 | # client = Client(memory_limit=10e10, processes=False) # Note: was 6e9 13 | # client"n 14 | 15 | # https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/MERRA2/M2T1NXFLX.5.12.4/contents.html 16 | 17 | url = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/opendap/MERRA2/M2T1NXFLX.5.12.4/1984/11/MERRA2_100.tavg1_2d_flx_Nx.198411' 18 | 19 | # from datetime import date, timedelta 20 | 21 | # files = [] 22 | # d = date.fromisoformat('1984-11-01') 23 | # while True: 24 | # 25 | # files.append(f'{url}{str(d.day).zfill(2)}.nc4') 26 | # d = d + timedelta(days=1) 27 | # 28 | # if d.month == 12: 29 | # break 30 | 31 | files = ["http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190101v7.nc.dmrpp", 32 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190102v7.nc.dmrpp", 33 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190103v7.nc.dmrpp", 34 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190104v7.nc.dmrpp", 35 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190105v7.nc.dmrpp", 36 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190106v7.nc.dmrpp", 37 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190107v7.nc.dmrpp", 38 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190108v7.nc.dmrpp", 39 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190109v7.nc.dmrpp", 40 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190110v7.nc.dmrpp", 41 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190111v7.nc.dmrpp", 42 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190112v7.nc.dmrpp", 43 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190113v7.nc.dmrpp", 44 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190114v7.nc.dmrpp", 45 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190115v7.nc.dmrpp", 46 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190116v7.nc.dmrpp", 47 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190117v7.nc.dmrpp", 48 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190118v7.nc.dmrpp", 49 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190119v7.nc.dmrpp", 50 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190120v7.nc.dmrpp", 51 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190121v7.nc.dmrpp", 52 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190122v7.nc.dmrpp", 53 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190123v7.nc.dmrpp", 54 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190124v7.nc.dmrpp", 55 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190125v7.nc.dmrpp", 56 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190126v7.nc.dmrpp", 57 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190127v7.nc.dmrpp", 58 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190128v7.nc.dmrpp", 59 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190129v7.nc.dmrpp", 60 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190130v7.nc.dmrpp", 61 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190131v7.nc.dmrpp", 62 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190201v7.nc.dmrpp", 63 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190202v7.nc.dmrpp", 64 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190203v7.nc.dmrpp", 65 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190204v7.nc.dmrpp", 66 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190205v7.nc.dmrpp", 67 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190206v7.nc.dmrpp", 68 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190207v7.nc.dmrpp", 69 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190208v7.nc.dmrpp", 70 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190209v7.nc.dmrpp", 71 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190210v7.nc.dmrpp", 72 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190211v7.nc.dmrpp", 73 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190212v7.nc.dmrpp", 74 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190213v7.nc.dmrpp", 75 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190214v7.nc.dmrpp", 76 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190215v7.nc.dmrpp", 77 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190216v7.nc.dmrpp", 78 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190217v7.nc.dmrpp", 79 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190218v7.nc.dmrpp", 80 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190219v7.nc.dmrpp", 81 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190220v7.nc.dmrpp", 82 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190221v7.nc.dmrpp", 83 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190222v7.nc.dmrpp", 84 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190223v7.nc.dmrpp", 85 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190224v7.nc.dmrpp", 86 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190225v7.nc.dmrpp", 87 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190226v7.nc.dmrpp", 88 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190227v7.nc.dmrpp", 89 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190228v7.nc.dmrpp", 90 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190301v7.nc.dmrpp", 91 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190302v7.nc.dmrpp", 92 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190303v7.nc.dmrpp", 93 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190304v7.nc.dmrpp", 94 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190305v7.nc.dmrpp", 95 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190306v7.nc.dmrpp", 96 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190308v7.nc.dmrpp", 97 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190309v7.nc.dmrpp", 98 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190310v7.nc.dmrpp", 99 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190311v7.nc.dmrpp", 100 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190312v7.nc.dmrpp", 101 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190313v7.nc.dmrpp", 102 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190314v7.nc.dmrpp", 103 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190315v7.nc.dmrpp", 104 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190316v7.nc.dmrpp", 105 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190317v7.nc.dmrpp", 106 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190318v7.nc.dmrpp", 107 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190319v7.nc.dmrpp", 108 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190320v7.nc.dmrpp", 109 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190321v7.nc.dmrpp", 110 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190322v7.nc.dmrpp", 111 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190323v7.nc.dmrpp", 112 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190324v7.nc.dmrpp", 113 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190325v7.nc.dmrpp", 114 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190326v7.nc.dmrpp", 115 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190327v7.nc.dmrpp", 116 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190328v7.nc.dmrpp", 117 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190329v7.nc.dmrpp", 118 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190330v7.nc.dmrpp", 119 | "http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190331v7.nc.dmrpp"] 120 | 121 | # files = ["http://balto.opendap.org/opendap/ssmis/dmrpp_s3/f17_ssmis_20190102v7.nc.dmrpp"] 122 | 123 | # import getpass 124 | 125 | # username = input("URS Username: ") 126 | # password = getpass.getpass("URS Password: ") 127 | 128 | from pydap.client import open_url 129 | # from pydap.cas.urs import setup_session 130 | 131 | # session = setup_session(username, password, check_url=files[0]) 132 | # gesdisc_data = xa.open_mfdataset(files, engine='pydap', parallel=True, combine='by_coords', 133 | # backend_kwargs={'session': session}) 134 | 135 | ssmis_data = xa.open_mfdataset(files, engine='pydap', parallel=True, combine='by_coords') 136 | 137 | print(ssmis_data) 138 | 139 | sst_dtime = ssmis_data.sst_dtime.sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 140 | sst_dtime_mean = sst_dtime.mean(dim=['latitude', 'longitude']) 141 | sst_dtime_mean.plot.line() 142 | 143 | -------------------------------------------------------------------------------- /python-xarray/ssmis_ngap_west_cloud.py: -------------------------------------------------------------------------------- 1 | 2 | import xarray as xa 3 | import sys 4 | 5 | # Get the granule names 6 | from ssmis_granules import f16_ssmis_100 7 | from pydap.client import open_url 8 | from pydap.cas.urs import setup_session 9 | import os 10 | import glob 11 | 12 | 13 | def clean_cache(): 14 | files = glob.glob('/tmp/hyrax_http/*') 15 | 16 | for f in files: 17 | try: 18 | # f.unlink() 19 | os.unlink(f) 20 | except OSError as e: 21 | print("Error: %s : %s" % (f, e.strerror)) 22 | 23 | 24 | base_url = "" 25 | suffix = "" 26 | f = False # results output file 27 | 28 | # -l switch 29 | def ngap_localhost(): 30 | global base_url 31 | global suffix 32 | # This is the base url for the NGAP service which is attached to prod. 33 | ngap_service_base = 'http://localhost:8080/opendap/ngap/providers/GHRC_CLOUD/collections/' \ 34 | 'RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/' 35 | base_url = ngap_service_base 36 | suffix = "" 37 | print("Using NGAP Service (localhost:8080)") 38 | 39 | # -n switch 40 | def ngap_service_west(): 41 | global base_url 42 | global suffix 43 | # This is the base url for the NGAP service which is attached to prod. 44 | ngap_service_base = 'http://ngap-west.opendap.org/opendap/ngap/providers/GHRC_CLOUD/collections/' \ 45 | 'RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/' 46 | base_url = ngap_service_base 47 | suffix = "" 48 | print("Using NGAP Service (us-west-2)") 49 | 50 | # -m switch 51 | def ngap_service_uat(): 52 | global base_url 53 | global suffix 54 | # This is the base url for the NGAP service which is attached to prod. 55 | ngap_service_base = 'https://opendap.uat.earthdata.nasa.gov/providers/GHRC_CLOUD/collections/' \ 56 | 'RSS%20SSMIS%20OCEAN%20PRODUCT%20GRIDS%20DAILY%20FROM%20DMSP%20F16%20NETCDF%20V7/granules/' 57 | base_url = ngap_service_base 58 | suffix = "" 59 | print("Using NGAP Service (UAT)") 60 | 61 | # -s switch 62 | def s3_bucket(): 63 | global base_url 64 | global suffix 65 | 66 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 67 | # point to objects in an opendap S3 bucket called ngap-ssmis-west 68 | s3_bucket_base = "http://ngap-west.opendap.org/opendap/ssmis/ngap-ssmis-west/" 69 | base_url = s3_bucket_base 70 | suffix=".dmrpp" 71 | print("Using S3 Bucket ngap-ssmis-west") 72 | 73 | 74 | # -t switch 75 | def tea_prod(): 76 | global base_url 77 | global suffix 78 | 79 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 80 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 81 | tea_prod_base = "http://ngap-west.opendap.org/opendap/ssmis/tea-prod/" 82 | base_url = tea_prod_base 83 | suffix=".dmrpp" 84 | print("Using TEA in PROD") 85 | 86 | # -u switch 87 | def tea_uat(): 88 | global base_url 89 | global suffix 90 | 91 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 92 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 93 | tea_prod_base = "http://ngap-west.opendap.org/opendap/ssmis/tea-uat/" 94 | base_url = tea_prod_base 95 | suffix=".dmrpp" 96 | print("Using TEA in UAT") 97 | 98 | # -p switch 99 | def tea_apigw(): 100 | global base_url 101 | global suffix 102 | 103 | # This is the base URL for the collection of dmr++ files whose dmrpp:href urls 104 | # point to the TEA endpoint for PROD. URLs from TEA are cached. 105 | tea_prod_base = "http://ngap-west.opendap.org/opendap/ssmis/tea-apigw/" 106 | base_url = tea_prod_base 107 | suffix=".dmrpp" 108 | print("Using TEA in API Gateway") 109 | 110 | # -g switch 111 | def granules(): 112 | global base_url 113 | global suffix 114 | # This is the base URL for the collection of source netcdf-4 granule 115 | # files. 116 | granule_files_base = "http://ngap-west.opendap.org/opendap/ssmis/granules/" 117 | base_url = granule_files_base 118 | suffix="" 119 | print("Using Granules") 120 | 121 | 122 | def get_the_things(): 123 | import webob 124 | import time 125 | 126 | global base_url 127 | global suffix 128 | global f # results file 129 | 130 | print("base_url: ", base_url, sep="") 131 | print(" suffix: ", suffix, sep="") 132 | 133 | username = os.environ.get('USER') 134 | password = os.environ.get('PWORD') 135 | 136 | # print("username: ",username,sep="") 137 | # print("password: ",password,sep="") 138 | 139 | do_auth = True 140 | if username is not None and password is not None : 141 | print("Using credentials for '", username, "'", sep="") 142 | else: 143 | print("No (complete) authentication credentials available.") 144 | do_auth = False 145 | 146 | 147 | # Allows us to visualize the dask progress for parallel operations 148 | from dask.diagnostics import ProgressBar 149 | 150 | ProgressBar().register() 151 | 152 | # OPeNDAP In the Cloud 153 | 154 | od_files = [] 155 | 156 | for g in f16_ssmis_100: 157 | od_files.append(base_url + g + suffix) 158 | 159 | print(" first:", od_files[0], '\n', " last:", od_files[-1]) 160 | try: 161 | tic = time.perf_counter() 162 | 163 | # open_mfdataset() == open multi-file dataset. 164 | # both open_datasets and ...mfdataset use netcdf4 as the default engine and that 165 | # should be able to open DAP URLS. jhrg 1/24/22 166 | if do_auth: 167 | session = setup_session(username, password, check_url=od_files[0]) 168 | session.headers.update({'Accept-Encoding': 'deflate'}) 169 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords', backend_kwargs={'session': session}) 170 | else: 171 | cloud_data = xa.open_mfdataset(od_files, engine='pydap', parallel=True, combine='by_coords') 172 | 173 | cloud_ws = cloud_data['wind_speed'].sel(latitude=slice(-53.99, -14), longitude=slice(140, 170)) 174 | 175 | cloud_ws_mean = cloud_ws.mean(dim=['latitude', 'longitude']) 176 | 177 | print(cloud_ws_mean) 178 | 179 | if f: 180 | f.write(f"{time.perf_counter() - tic:0.4f},") 181 | f.write("success\n") 182 | 183 | except webob.exc.HTTPError as err: 184 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 185 | print("HTTPError: code: ", err.code, ": ", err.detail); 186 | print("Error: ", sys.exc_info()[0]) 187 | if f: 188 | f.write(f"{time.perf_counter() - tic:0.4f},") 189 | f.write("fail\n") 190 | 191 | except UnicodeError as err: 192 | # See https://docs.pylonsproject.org/projects/webob/en/stable/api/exceptions.html# 193 | print("UnicodeError - encoding: ", err.encoding, " reason: ", err.reason, " object: ", type(err.object), " start: ", err.object[err.start]," end: ",err.end); 194 | print("Error: ", sys.exc_info()[0]) 195 | if f: 196 | f.write(f"{time.perf_counter() - tic:0.4f},") 197 | f.write("fail\n") 198 | except: 199 | print("Error: ", sys.exc_info()[0]) 200 | if f: 201 | f.write(f"{time.perf_counter() - tic:0.4f},") 202 | f.write("fail\n") 203 | 204 | 205 | def main(): 206 | import getopt 207 | hr = "--- --- --- --- --- --- --- --- --- --- --- --- " 208 | run_id="id_not_set " 209 | global f # results file 210 | 211 | usage="Options -i -d -s s3, -g granules, -n ngap api (us-west-2+prod), -m ngap api (UAT), -t tea, -u tea-uat, -p tea-apigw -a all of s, g, n and t." 212 | 213 | try: 214 | # see https://docs.python.org/3.1/library/getopt.htm 215 | optlist, args = getopt.getopt(sys.argv[1:], 'mlsgntahupd:i:') 216 | except: 217 | # print help information and exit: 218 | print(usage) 219 | sys.exit(2) 220 | 221 | for o, a in optlist: 222 | 223 | if o in ("-h", "--help"): 224 | print(usage) 225 | 226 | if o == "-i": 227 | run_id=a 228 | 229 | if o == "-d": 230 | print("Datafile name: ", a) 231 | f = open(a, "a") 232 | 233 | if o in ("-s", "-a"): 234 | print(hr) 235 | print("Run ID:", run_id) 236 | if f: 237 | f.write("s3,") 238 | s3_bucket() 239 | clean_cache() 240 | get_the_things() 241 | 242 | if o in ("-g", "-a"): 243 | print(hr) 244 | print("Run ID:", run_id) 245 | if f: 246 | f.write("granule,") 247 | granules() 248 | clean_cache() 249 | get_the_things() 250 | 251 | if o in ("-l", "-a"): 252 | print(hr) 253 | print("Run ID:", run_id) 254 | if f: 255 | f.write("ngap_localhost,") 256 | ngap_localhost() 257 | clean_cache() 258 | get_the_things() 259 | 260 | if o in ("-t", "-a"): 261 | print(hr) 262 | print("Run ID:", run_id) 263 | if f: 264 | f.write("tea_prod,") 265 | tea_prod() 266 | clean_cache() 267 | get_the_things() 268 | 269 | if o in ("-u", "-a"): 270 | print(hr) 271 | print("Run ID:", run_id) 272 | if f: 273 | f.write("tea_uat,") 274 | tea_uat() 275 | clean_cache() 276 | get_the_things() 277 | 278 | if o in ("-p", "-a"): 279 | print(hr) 280 | print("Run ID:", run_id) 281 | if f: 282 | f.write("tea_apigw,") 283 | tea_apigw() 284 | clean_cache() 285 | get_the_things() 286 | 287 | if o in ("-n", "-a"): 288 | print(hr) 289 | print("Run ID:", run_id) 290 | if f: 291 | f.write("ngap_west,") 292 | ngap_service_west() 293 | clean_cache() 294 | get_the_things() 295 | 296 | if o in ("-m", "-a"): 297 | print(hr) 298 | print("Run ID:", run_id) 299 | if f: 300 | f.write("ngap_uat,") 301 | ngap_service_uat() 302 | clean_cache() 303 | get_the_things() 304 | 305 | if f: 306 | f.close() 307 | 308 | 309 | if __name__ == "__main__": 310 | main() 311 | -------------------------------------------------------------------------------- /python-xarray/merra_granules.py: -------------------------------------------------------------------------------- 1 | merra2 = ["MERRA2_100.tavgM_2d_int_Nx.198001.nc4.dmrpp", 2 | "MERRA2_100.tavgM_2d_int_Nx.198002.nc4.dmrpp", 3 | "MERRA2_100.tavgM_2d_int_Nx.198003.nc4.dmrpp", 4 | "MERRA2_100.tavgM_2d_int_Nx.198004.nc4.dmrpp", 5 | "MERRA2_100.tavgM_2d_int_Nx.198005.nc4.dmrpp", 6 | "MERRA2_100.tavgM_2d_int_Nx.198006.nc4.dmrpp", 7 | "MERRA2_100.tavgM_2d_int_Nx.198007.nc4.dmrpp", 8 | "MERRA2_100.tavgM_2d_int_Nx.198008.nc4.dmrpp", 9 | "MERRA2_100.tavgM_2d_int_Nx.198009.nc4.dmrpp", 10 | "MERRA2_100.tavgM_2d_int_Nx.198010.nc4.dmrpp", 11 | "MERRA2_100.tavgM_2d_int_Nx.198011.nc4.dmrpp", 12 | "MERRA2_100.tavgM_2d_int_Nx.198012.nc4.dmrpp", 13 | "MERRA2_100.tavgM_2d_int_Nx.198101.nc4.dmrpp", 14 | "MERRA2_100.tavgM_2d_int_Nx.198102.nc4.dmrpp", 15 | "MERRA2_100.tavgM_2d_int_Nx.198103.nc4.dmrpp", 16 | "MERRA2_100.tavgM_2d_int_Nx.198104.nc4.dmrpp", 17 | "MERRA2_100.tavgM_2d_int_Nx.198105.nc4.dmrpp", 18 | "MERRA2_100.tavgM_2d_int_Nx.198106.nc4.dmrpp", 19 | "MERRA2_100.tavgM_2d_int_Nx.198107.nc4.dmrpp", 20 | "MERRA2_100.tavgM_2d_int_Nx.198108.nc4.dmrpp", 21 | "MERRA2_100.tavgM_2d_int_Nx.198109.nc4.dmrpp", 22 | "MERRA2_100.tavgM_2d_int_Nx.198110.nc4.dmrpp", 23 | "MERRA2_100.tavgM_2d_int_Nx.198111.nc4.dmrpp", 24 | "MERRA2_100.tavgM_2d_int_Nx.198112.nc4.dmrpp", 25 | "MERRA2_100.tavgM_2d_int_Nx.198201.nc4.dmrpp", 26 | "MERRA2_100.tavgM_2d_int_Nx.198202.nc4.dmrpp", 27 | "MERRA2_100.tavgM_2d_int_Nx.198203.nc4.dmrpp", 28 | "MERRA2_100.tavgM_2d_int_Nx.198204.nc4.dmrpp", 29 | "MERRA2_100.tavgM_2d_int_Nx.198205.nc4.dmrpp", 30 | "MERRA2_100.tavgM_2d_int_Nx.198206.nc4.dmrpp", 31 | "MERRA2_100.tavgM_2d_int_Nx.198207.nc4.dmrpp", 32 | "MERRA2_100.tavgM_2d_int_Nx.198208.nc4.dmrpp", 33 | "MERRA2_100.tavgM_2d_int_Nx.198209.nc4.dmrpp", 34 | "MERRA2_100.tavgM_2d_int_Nx.198210.nc4.dmrpp", 35 | "MERRA2_100.tavgM_2d_int_Nx.198211.nc4.dmrpp", 36 | "MERRA2_100.tavgM_2d_int_Nx.198212.nc4.dmrpp", 37 | "MERRA2_100.tavgM_2d_int_Nx.198301.nc4.dmrpp", 38 | "MERRA2_100.tavgM_2d_int_Nx.198302.nc4.dmrpp", 39 | "MERRA2_100.tavgM_2d_int_Nx.198303.nc4.dmrpp", 40 | "MERRA2_100.tavgM_2d_int_Nx.198304.nc4.dmrpp", 41 | "MERRA2_100.tavgM_2d_int_Nx.198305.nc4.dmrpp", 42 | "MERRA2_100.tavgM_2d_int_Nx.198306.nc4.dmrpp", 43 | "MERRA2_100.tavgM_2d_int_Nx.198307.nc4.dmrpp", 44 | "MERRA2_100.tavgM_2d_int_Nx.198308.nc4.dmrpp", 45 | "MERRA2_100.tavgM_2d_int_Nx.198309.nc4.dmrpp", 46 | "MERRA2_100.tavgM_2d_int_Nx.198310.nc4.dmrpp", 47 | "MERRA2_100.tavgM_2d_int_Nx.198311.nc4.dmrpp", 48 | "MERRA2_100.tavgM_2d_int_Nx.198312.nc4.dmrpp", 49 | "MERRA2_100.tavgM_2d_int_Nx.198401.nc4.dmrpp", 50 | "MERRA2_100.tavgM_2d_int_Nx.198402.nc4.dmrpp", 51 | "MERRA2_100.tavgM_2d_int_Nx.198403.nc4.dmrpp", 52 | "MERRA2_100.tavgM_2d_int_Nx.198404.nc4.dmrpp", 53 | "MERRA2_100.tavgM_2d_int_Nx.198405.nc4.dmrpp", 54 | "MERRA2_100.tavgM_2d_int_Nx.198406.nc4.dmrpp", 55 | "MERRA2_100.tavgM_2d_int_Nx.198407.nc4.dmrpp", 56 | "MERRA2_100.tavgM_2d_int_Nx.198408.nc4.dmrpp", 57 | "MERRA2_100.tavgM_2d_int_Nx.198409.nc4.dmrpp", 58 | "MERRA2_100.tavgM_2d_int_Nx.198410.nc4.dmrpp", 59 | "MERRA2_100.tavgM_2d_int_Nx.198411.nc4.dmrpp", 60 | "MERRA2_100.tavgM_2d_int_Nx.198412.nc4.dmrpp", 61 | "MERRA2_100.tavgM_2d_int_Nx.198501.nc4.dmrpp", 62 | "MERRA2_100.tavgM_2d_int_Nx.198502.nc4.dmrpp", 63 | "MERRA2_100.tavgM_2d_int_Nx.198503.nc4.dmrpp", 64 | "MERRA2_100.tavgM_2d_int_Nx.198504.nc4.dmrpp", 65 | "MERRA2_100.tavgM_2d_int_Nx.198505.nc4.dmrpp", 66 | "MERRA2_100.tavgM_2d_int_Nx.198506.nc4.dmrpp", 67 | "MERRA2_100.tavgM_2d_int_Nx.198507.nc4.dmrpp", 68 | "MERRA2_100.tavgM_2d_int_Nx.198508.nc4.dmrpp", 69 | "MERRA2_100.tavgM_2d_int_Nx.198509.nc4.dmrpp", 70 | "MERRA2_100.tavgM_2d_int_Nx.198510.nc4.dmrpp", 71 | "MERRA2_100.tavgM_2d_int_Nx.198511.nc4.dmrpp", 72 | "MERRA2_100.tavgM_2d_int_Nx.198512.nc4.dmrpp", 73 | "MERRA2_100.tavgM_2d_int_Nx.198601.nc4.dmrpp", 74 | "MERRA2_100.tavgM_2d_int_Nx.198602.nc4.dmrpp", 75 | "MERRA2_100.tavgM_2d_int_Nx.198603.nc4.dmrpp", 76 | "MERRA2_100.tavgM_2d_int_Nx.198604.nc4.dmrpp", 77 | "MERRA2_100.tavgM_2d_int_Nx.198605.nc4.dmrpp", 78 | "MERRA2_100.tavgM_2d_int_Nx.198606.nc4.dmrpp", 79 | "MERRA2_100.tavgM_2d_int_Nx.198607.nc4.dmrpp", 80 | "MERRA2_100.tavgM_2d_int_Nx.198608.nc4.dmrpp", 81 | "MERRA2_100.tavgM_2d_int_Nx.198609.nc4.dmrpp", 82 | "MERRA2_100.tavgM_2d_int_Nx.198610.nc4.dmrpp", 83 | "MERRA2_100.tavgM_2d_int_Nx.198611.nc4.dmrpp", 84 | "MERRA2_100.tavgM_2d_int_Nx.198612.nc4.dmrpp", 85 | "MERRA2_100.tavgM_2d_int_Nx.198701.nc4.dmrpp", 86 | "MERRA2_100.tavgM_2d_int_Nx.198702.nc4.dmrpp", 87 | "MERRA2_100.tavgM_2d_int_Nx.198703.nc4.dmrpp", 88 | "MERRA2_100.tavgM_2d_int_Nx.198704.nc4.dmrpp", 89 | "MERRA2_100.tavgM_2d_int_Nx.198705.nc4.dmrpp", 90 | "MERRA2_100.tavgM_2d_int_Nx.198706.nc4.dmrpp", 91 | "MERRA2_100.tavgM_2d_int_Nx.198707.nc4.dmrpp", 92 | "MERRA2_100.tavgM_2d_int_Nx.198708.nc4.dmrpp", 93 | "MERRA2_100.tavgM_2d_int_Nx.198709.nc4.dmrpp", 94 | "MERRA2_100.tavgM_2d_int_Nx.198710.nc4.dmrpp", 95 | "MERRA2_100.tavgM_2d_int_Nx.198711.nc4.dmrpp", 96 | "MERRA2_100.tavgM_2d_int_Nx.198712.nc4.dmrpp", 97 | "MERRA2_100.tavgM_2d_int_Nx.198801.nc4.dmrpp", 98 | "MERRA2_100.tavgM_2d_int_Nx.198802.nc4.dmrpp", 99 | "MERRA2_100.tavgM_2d_int_Nx.198803.nc4.dmrpp", 100 | "MERRA2_100.tavgM_2d_int_Nx.198804.nc4.dmrpp", 101 | "MERRA2_100.tavgM_2d_int_Nx.198805.nc4.dmrpp", 102 | "MERRA2_100.tavgM_2d_int_Nx.198806.nc4.dmrpp", 103 | "MERRA2_100.tavgM_2d_int_Nx.198807.nc4.dmrpp", 104 | "MERRA2_100.tavgM_2d_int_Nx.198808.nc4.dmrpp", 105 | "MERRA2_100.tavgM_2d_int_Nx.198809.nc4.dmrpp", 106 | "MERRA2_100.tavgM_2d_int_Nx.198810.nc4.dmrpp", 107 | "MERRA2_100.tavgM_2d_int_Nx.198811.nc4.dmrpp", 108 | "MERRA2_100.tavgM_2d_int_Nx.198812.nc4.dmrpp", 109 | "MERRA2_100.tavgM_2d_int_Nx.198901.nc4.dmrpp", 110 | "MERRA2_100.tavgM_2d_int_Nx.198902.nc4.dmrpp", 111 | "MERRA2_100.tavgM_2d_int_Nx.198903.nc4.dmrpp", 112 | "MERRA2_100.tavgM_2d_int_Nx.198904.nc4.dmrpp", 113 | "MERRA2_100.tavgM_2d_int_Nx.198905.nc4.dmrpp", 114 | "MERRA2_100.tavgM_2d_int_Nx.198906.nc4.dmrpp", 115 | "MERRA2_100.tavgM_2d_int_Nx.198907.nc4.dmrpp", 116 | "MERRA2_100.tavgM_2d_int_Nx.198908.nc4.dmrpp", 117 | "MERRA2_100.tavgM_2d_int_Nx.198909.nc4.dmrpp", 118 | "MERRA2_100.tavgM_2d_int_Nx.198910.nc4.dmrpp", 119 | "MERRA2_100.tavgM_2d_int_Nx.198911.nc4.dmrpp", 120 | "MERRA2_100.tavgM_2d_int_Nx.198912.nc4.dmrpp", 121 | "MERRA2_100.tavgM_2d_int_Nx.199001.nc4.dmrpp", 122 | "MERRA2_100.tavgM_2d_int_Nx.199002.nc4.dmrpp", 123 | "MERRA2_100.tavgM_2d_int_Nx.199003.nc4.dmrpp", 124 | "MERRA2_100.tavgM_2d_int_Nx.199004.nc4.dmrpp", 125 | "MERRA2_100.tavgM_2d_int_Nx.199005.nc4.dmrpp", 126 | "MERRA2_100.tavgM_2d_int_Nx.199006.nc4.dmrpp", 127 | "MERRA2_100.tavgM_2d_int_Nx.199007.nc4.dmrpp", 128 | "MERRA2_100.tavgM_2d_int_Nx.199008.nc4.dmrpp", 129 | "MERRA2_100.tavgM_2d_int_Nx.199009.nc4.dmrpp", 130 | "MERRA2_100.tavgM_2d_int_Nx.199010.nc4.dmrpp", 131 | "MERRA2_100.tavgM_2d_int_Nx.199011.nc4.dmrpp", 132 | "MERRA2_100.tavgM_2d_int_Nx.199012.nc4.dmrpp", 133 | "MERRA2_100.tavgM_2d_int_Nx.199101.nc4.dmrpp", 134 | "MERRA2_100.tavgM_2d_int_Nx.199102.nc4.dmrpp", 135 | "MERRA2_100.tavgM_2d_int_Nx.199103.nc4.dmrpp", 136 | "MERRA2_100.tavgM_2d_int_Nx.199104.nc4.dmrpp", 137 | "MERRA2_100.tavgM_2d_int_Nx.199105.nc4.dmrpp", 138 | "MERRA2_100.tavgM_2d_int_Nx.199106.nc4.dmrpp", 139 | "MERRA2_100.tavgM_2d_int_Nx.199107.nc4.dmrpp", 140 | "MERRA2_100.tavgM_2d_int_Nx.199108.nc4.dmrpp", 141 | "MERRA2_100.tavgM_2d_int_Nx.199109.nc4.dmrpp", 142 | "MERRA2_100.tavgM_2d_int_Nx.199110.nc4.dmrpp", 143 | "MERRA2_100.tavgM_2d_int_Nx.199111.nc4.dmrpp", 144 | "MERRA2_100.tavgM_2d_int_Nx.199112.nc4.dmrpp", 145 | "MERRA2_200.tavgM_2d_int_Nx.199201.nc4.dmrpp", 146 | "MERRA2_200.tavgM_2d_int_Nx.199202.nc4.dmrpp", 147 | "MERRA2_200.tavgM_2d_int_Nx.199203.nc4.dmrpp", 148 | "MERRA2_200.tavgM_2d_int_Nx.199204.nc4.dmrpp", 149 | "MERRA2_200.tavgM_2d_int_Nx.199205.nc4.dmrpp", 150 | "MERRA2_200.tavgM_2d_int_Nx.199206.nc4.dmrpp", 151 | "MERRA2_200.tavgM_2d_int_Nx.199207.nc4.dmrpp", 152 | "MERRA2_200.tavgM_2d_int_Nx.199208.nc4.dmrpp", 153 | "MERRA2_200.tavgM_2d_int_Nx.199209.nc4.dmrpp", 154 | "MERRA2_200.tavgM_2d_int_Nx.199210.nc4.dmrpp", 155 | "MERRA2_200.tavgM_2d_int_Nx.199211.nc4.dmrpp", 156 | "MERRA2_200.tavgM_2d_int_Nx.199212.nc4.dmrpp", 157 | "MERRA2_200.tavgM_2d_int_Nx.199301.nc4.dmrpp", 158 | "MERRA2_200.tavgM_2d_int_Nx.199302.nc4.dmrpp", 159 | "MERRA2_200.tavgM_2d_int_Nx.199303.nc4.dmrpp", 160 | "MERRA2_200.tavgM_2d_int_Nx.199304.nc4.dmrpp", 161 | "MERRA2_200.tavgM_2d_int_Nx.199305.nc4.dmrpp", 162 | "MERRA2_200.tavgM_2d_int_Nx.199306.nc4.dmrpp", 163 | "MERRA2_200.tavgM_2d_int_Nx.199307.nc4.dmrpp", 164 | "MERRA2_200.tavgM_2d_int_Nx.199308.nc4.dmrpp", 165 | "MERRA2_200.tavgM_2d_int_Nx.199309.nc4.dmrpp", 166 | "MERRA2_200.tavgM_2d_int_Nx.199310.nc4.dmrpp", 167 | "MERRA2_200.tavgM_2d_int_Nx.199311.nc4.dmrpp", 168 | "MERRA2_200.tavgM_2d_int_Nx.199312.nc4.dmrpp", 169 | "MERRA2_200.tavgM_2d_int_Nx.199401.nc4.dmrpp", 170 | "MERRA2_200.tavgM_2d_int_Nx.199402.nc4.dmrpp", 171 | "MERRA2_200.tavgM_2d_int_Nx.199403.nc4.dmrpp", 172 | "MERRA2_200.tavgM_2d_int_Nx.199404.nc4.dmrpp", 173 | "MERRA2_200.tavgM_2d_int_Nx.199405.nc4.dmrpp", 174 | "MERRA2_200.tavgM_2d_int_Nx.199406.nc4.dmrpp", 175 | "MERRA2_200.tavgM_2d_int_Nx.199407.nc4.dmrpp", 176 | "MERRA2_200.tavgM_2d_int_Nx.199408.nc4.dmrpp", 177 | "MERRA2_200.tavgM_2d_int_Nx.199409.nc4.dmrpp", 178 | "MERRA2_200.tavgM_2d_int_Nx.199410.nc4.dmrpp", 179 | "MERRA2_200.tavgM_2d_int_Nx.199411.nc4.dmrpp", 180 | "MERRA2_200.tavgM_2d_int_Nx.199412.nc4.dmrpp", 181 | "MERRA2_200.tavgM_2d_int_Nx.199501.nc4.dmrpp", 182 | "MERRA2_200.tavgM_2d_int_Nx.199502.nc4.dmrpp", 183 | "MERRA2_200.tavgM_2d_int_Nx.199503.nc4.dmrpp", 184 | "MERRA2_200.tavgM_2d_int_Nx.199504.nc4.dmrpp", 185 | "MERRA2_200.tavgM_2d_int_Nx.199505.nc4.dmrpp", 186 | "MERRA2_200.tavgM_2d_int_Nx.199506.nc4.dmrpp", 187 | "MERRA2_200.tavgM_2d_int_Nx.199507.nc4.dmrpp", 188 | "MERRA2_200.tavgM_2d_int_Nx.199508.nc4.dmrpp", 189 | "MERRA2_200.tavgM_2d_int_Nx.199509.nc4.dmrpp", 190 | "MERRA2_200.tavgM_2d_int_Nx.199510.nc4.dmrpp", 191 | "MERRA2_200.tavgM_2d_int_Nx.199511.nc4.dmrpp", 192 | "MERRA2_200.tavgM_2d_int_Nx.199512.nc4.dmrpp", 193 | "MERRA2_200.tavgM_2d_int_Nx.199601.nc4.dmrpp", 194 | "MERRA2_200.tavgM_2d_int_Nx.199602.nc4.dmrpp", 195 | "MERRA2_200.tavgM_2d_int_Nx.199603.nc4.dmrpp", 196 | "MERRA2_200.tavgM_2d_int_Nx.199604.nc4.dmrpp", 197 | "MERRA2_200.tavgM_2d_int_Nx.199605.nc4.dmrpp", 198 | "MERRA2_200.tavgM_2d_int_Nx.199606.nc4.dmrpp", 199 | "MERRA2_200.tavgM_2d_int_Nx.199607.nc4.dmrpp", 200 | "MERRA2_200.tavgM_2d_int_Nx.199608.nc4.dmrpp", 201 | "MERRA2_200.tavgM_2d_int_Nx.199609.nc4.dmrpp", 202 | "MERRA2_200.tavgM_2d_int_Nx.199610.nc4.dmrpp", 203 | "MERRA2_200.tavgM_2d_int_Nx.199611.nc4.dmrpp", 204 | "MERRA2_200.tavgM_2d_int_Nx.199612.nc4.dmrpp", 205 | "MERRA2_200.tavgM_2d_int_Nx.199701.nc4.dmrpp", 206 | "MERRA2_200.tavgM_2d_int_Nx.199702.nc4.dmrpp", 207 | "MERRA2_200.tavgM_2d_int_Nx.199703.nc4.dmrpp", 208 | "MERRA2_200.tavgM_2d_int_Nx.199704.nc4.dmrpp", 209 | "MERRA2_200.tavgM_2d_int_Nx.199705.nc4.dmrpp", 210 | "MERRA2_200.tavgM_2d_int_Nx.199706.nc4.dmrpp", 211 | "MERRA2_200.tavgM_2d_int_Nx.199707.nc4.dmrpp", 212 | "MERRA2_200.tavgM_2d_int_Nx.199708.nc4.dmrpp", 213 | "MERRA2_200.tavgM_2d_int_Nx.199709.nc4.dmrpp", 214 | "MERRA2_200.tavgM_2d_int_Nx.199710.nc4.dmrpp", 215 | "MERRA2_200.tavgM_2d_int_Nx.199711.nc4.dmrpp", 216 | "MERRA2_200.tavgM_2d_int_Nx.199712.nc4.dmrpp", 217 | "MERRA2_200.tavgM_2d_int_Nx.199801.nc4.dmrpp", 218 | "MERRA2_200.tavgM_2d_int_Nx.199802.nc4.dmrpp", 219 | "MERRA2_200.tavgM_2d_int_Nx.199803.nc4.dmrpp", 220 | "MERRA2_200.tavgM_2d_int_Nx.199804.nc4.dmrpp", 221 | "MERRA2_200.tavgM_2d_int_Nx.199805.nc4.dmrpp", 222 | "MERRA2_200.tavgM_2d_int_Nx.199806.nc4.dmrpp", 223 | "MERRA2_200.tavgM_2d_int_Nx.199807.nc4.dmrpp", 224 | "MERRA2_200.tavgM_2d_int_Nx.199808.nc4.dmrpp", 225 | "MERRA2_200.tavgM_2d_int_Nx.199809.nc4.dmrpp", 226 | "MERRA2_200.tavgM_2d_int_Nx.199810.nc4.dmrpp", 227 | "MERRA2_200.tavgM_2d_int_Nx.199811.nc4.dmrpp", 228 | "MERRA2_200.tavgM_2d_int_Nx.199812.nc4.dmrpp", 229 | "MERRA2_200.tavgM_2d_int_Nx.199901.nc4.dmrpp", 230 | "MERRA2_200.tavgM_2d_int_Nx.199902.nc4.dmrpp", 231 | "MERRA2_200.tavgM_2d_int_Nx.199903.nc4.dmrpp", 232 | "MERRA2_200.tavgM_2d_int_Nx.199904.nc4.dmrpp", 233 | "MERRA2_200.tavgM_2d_int_Nx.199905.nc4.dmrpp", 234 | "MERRA2_200.tavgM_2d_int_Nx.199906.nc4.dmrpp", 235 | "MERRA2_200.tavgM_2d_int_Nx.199907.nc4.dmrpp", 236 | "MERRA2_200.tavgM_2d_int_Nx.199908.nc4.dmrpp", 237 | "MERRA2_200.tavgM_2d_int_Nx.199909.nc4.dmrpp", 238 | "MERRA2_200.tavgM_2d_int_Nx.199910.nc4.dmrpp", 239 | "MERRA2_200.tavgM_2d_int_Nx.199911.nc4.dmrpp", 240 | "MERRA2_200.tavgM_2d_int_Nx.199912.nc4.dmrpp", 241 | "MERRA2_200.tavgM_2d_int_Nx.200001.nc4.dmrpp", 242 | "MERRA2_200.tavgM_2d_int_Nx.200002.nc4.dmrpp", 243 | "MERRA2_200.tavgM_2d_int_Nx.200003.nc4.dmrpp", 244 | "MERRA2_200.tavgM_2d_int_Nx.200004.nc4.dmrpp", 245 | "MERRA2_200.tavgM_2d_int_Nx.200005.nc4.dmrpp", 246 | "MERRA2_200.tavgM_2d_int_Nx.200006.nc4.dmrpp", 247 | "MERRA2_200.tavgM_2d_int_Nx.200007.nc4.dmrpp", 248 | "MERRA2_200.tavgM_2d_int_Nx.200008.nc4.dmrpp", 249 | "MERRA2_200.tavgM_2d_int_Nx.200009.nc4.dmrpp", 250 | "MERRA2_200.tavgM_2d_int_Nx.200010.nc4.dmrpp", 251 | "MERRA2_200.tavgM_2d_int_Nx.200011.nc4.dmrpp", 252 | "MERRA2_200.tavgM_2d_int_Nx.200012.nc4.dmrpp", 253 | "MERRA2_300.tavgM_2d_int_Nx.200101.nc4.dmrpp", 254 | "MERRA2_300.tavgM_2d_int_Nx.200102.nc4.dmrpp", 255 | "MERRA2_300.tavgM_2d_int_Nx.200103.nc4.dmrpp", 256 | "MERRA2_300.tavgM_2d_int_Nx.200104.nc4.dmrpp", 257 | "MERRA2_300.tavgM_2d_int_Nx.200105.nc4.dmrpp", 258 | "MERRA2_300.tavgM_2d_int_Nx.200106.nc4.dmrpp", 259 | "MERRA2_300.tavgM_2d_int_Nx.200107.nc4.dmrpp", 260 | "MERRA2_300.tavgM_2d_int_Nx.200108.nc4.dmrpp", 261 | "MERRA2_300.tavgM_2d_int_Nx.200109.nc4.dmrpp", 262 | "MERRA2_300.tavgM_2d_int_Nx.200110.nc4.dmrpp", 263 | "MERRA2_300.tavgM_2d_int_Nx.200111.nc4.dmrpp", 264 | "MERRA2_300.tavgM_2d_int_Nx.200112.nc4.dmrpp", 265 | "MERRA2_300.tavgM_2d_int_Nx.200201.nc4.dmrpp", 266 | "MERRA2_300.tavgM_2d_int_Nx.200202.nc4.dmrpp", 267 | "MERRA2_300.tavgM_2d_int_Nx.200203.nc4.dmrpp", 268 | "MERRA2_300.tavgM_2d_int_Nx.200204.nc4.dmrpp", 269 | "MERRA2_300.tavgM_2d_int_Nx.200205.nc4.dmrpp", 270 | "MERRA2_300.tavgM_2d_int_Nx.200206.nc4.dmrpp", 271 | "MERRA2_300.tavgM_2d_int_Nx.200207.nc4.dmrpp", 272 | "MERRA2_300.tavgM_2d_int_Nx.200208.nc4.dmrpp", 273 | "MERRA2_300.tavgM_2d_int_Nx.200209.nc4.dmrpp", 274 | "MERRA2_300.tavgM_2d_int_Nx.200210.nc4.dmrpp", 275 | "MERRA2_300.tavgM_2d_int_Nx.200211.nc4.dmrpp", 276 | "MERRA2_300.tavgM_2d_int_Nx.200212.nc4.dmrpp", 277 | "MERRA2_300.tavgM_2d_int_Nx.200301.nc4.dmrpp", 278 | "MERRA2_300.tavgM_2d_int_Nx.200302.nc4.dmrpp", 279 | "MERRA2_300.tavgM_2d_int_Nx.200303.nc4.dmrpp", 280 | "MERRA2_300.tavgM_2d_int_Nx.200304.nc4.dmrpp", 281 | "MERRA2_300.tavgM_2d_int_Nx.200305.nc4.dmrpp", 282 | "MERRA2_300.tavgM_2d_int_Nx.200306.nc4.dmrpp", 283 | "MERRA2_300.tavgM_2d_int_Nx.200307.nc4.dmrpp", 284 | "MERRA2_300.tavgM_2d_int_Nx.200308.nc4.dmrpp", 285 | "MERRA2_300.tavgM_2d_int_Nx.200309.nc4.dmrpp", 286 | "MERRA2_300.tavgM_2d_int_Nx.200310.nc4.dmrpp", 287 | "MERRA2_300.tavgM_2d_int_Nx.200311.nc4.dmrpp", 288 | "MERRA2_300.tavgM_2d_int_Nx.200312.nc4.dmrpp", 289 | "MERRA2_300.tavgM_2d_int_Nx.200401.nc4.dmrpp", 290 | "MERRA2_300.tavgM_2d_int_Nx.200402.nc4.dmrpp", 291 | "MERRA2_300.tavgM_2d_int_Nx.200403.nc4.dmrpp", 292 | "MERRA2_300.tavgM_2d_int_Nx.200404.nc4.dmrpp", 293 | "MERRA2_300.tavgM_2d_int_Nx.200405.nc4.dmrpp", 294 | "MERRA2_300.tavgM_2d_int_Nx.200406.nc4.dmrpp", 295 | "MERRA2_300.tavgM_2d_int_Nx.200407.nc4.dmrpp", 296 | "MERRA2_300.tavgM_2d_int_Nx.200408.nc4.dmrpp", 297 | "MERRA2_300.tavgM_2d_int_Nx.200409.nc4.dmrpp", 298 | "MERRA2_300.tavgM_2d_int_Nx.200410.nc4.dmrpp", 299 | "MERRA2_300.tavgM_2d_int_Nx.200411.nc4.dmrpp", 300 | "MERRA2_300.tavgM_2d_int_Nx.200412.nc4.dmrpp", 301 | "MERRA2_300.tavgM_2d_int_Nx.200501.nc4.dmrpp", 302 | "MERRA2_300.tavgM_2d_int_Nx.200502.nc4.dmrpp", 303 | "MERRA2_300.tavgM_2d_int_Nx.200503.nc4.dmrpp", 304 | "MERRA2_300.tavgM_2d_int_Nx.200504.nc4.dmrpp", 305 | "MERRA2_300.tavgM_2d_int_Nx.200505.nc4.dmrpp", 306 | "MERRA2_300.tavgM_2d_int_Nx.200506.nc4.dmrpp", 307 | "MERRA2_300.tavgM_2d_int_Nx.200507.nc4.dmrpp", 308 | "MERRA2_300.tavgM_2d_int_Nx.200508.nc4.dmrpp", 309 | "MERRA2_300.tavgM_2d_int_Nx.200509.nc4.dmrpp", 310 | "MERRA2_300.tavgM_2d_int_Nx.200510.nc4.dmrpp", 311 | "MERRA2_300.tavgM_2d_int_Nx.200511.nc4.dmrpp", 312 | "MERRA2_300.tavgM_2d_int_Nx.200512.nc4.dmrpp", 313 | "MERRA2_300.tavgM_2d_int_Nx.200601.nc4.dmrpp", 314 | "MERRA2_300.tavgM_2d_int_Nx.200602.nc4.dmrpp", 315 | "MERRA2_300.tavgM_2d_int_Nx.200603.nc4.dmrpp", 316 | "MERRA2_300.tavgM_2d_int_Nx.200604.nc4.dmrpp", 317 | "MERRA2_300.tavgM_2d_int_Nx.200605.nc4.dmrpp", 318 | "MERRA2_300.tavgM_2d_int_Nx.200606.nc4.dmrpp", 319 | "MERRA2_300.tavgM_2d_int_Nx.200607.nc4.dmrpp", 320 | "MERRA2_300.tavgM_2d_int_Nx.200608.nc4.dmrpp", 321 | "MERRA2_300.tavgM_2d_int_Nx.200609.nc4.dmrpp", 322 | "MERRA2_300.tavgM_2d_int_Nx.200610.nc4.dmrpp", 323 | "MERRA2_300.tavgM_2d_int_Nx.200611.nc4.dmrpp", 324 | "MERRA2_300.tavgM_2d_int_Nx.200612.nc4.dmrpp", 325 | "MERRA2_300.tavgM_2d_int_Nx.200701.nc4.dmrpp", 326 | "MERRA2_300.tavgM_2d_int_Nx.200702.nc4.dmrpp", 327 | "MERRA2_300.tavgM_2d_int_Nx.200703.nc4.dmrpp", 328 | "MERRA2_300.tavgM_2d_int_Nx.200704.nc4.dmrpp", 329 | "MERRA2_300.tavgM_2d_int_Nx.200705.nc4.dmrpp", 330 | "MERRA2_300.tavgM_2d_int_Nx.200706.nc4.dmrpp", 331 | "MERRA2_300.tavgM_2d_int_Nx.200707.nc4.dmrpp", 332 | "MERRA2_300.tavgM_2d_int_Nx.200708.nc4.dmrpp", 333 | "MERRA2_300.tavgM_2d_int_Nx.200709.nc4.dmrpp", 334 | "MERRA2_300.tavgM_2d_int_Nx.200710.nc4.dmrpp", 335 | "MERRA2_300.tavgM_2d_int_Nx.200711.nc4.dmrpp", 336 | "MERRA2_300.tavgM_2d_int_Nx.200712.nc4.dmrpp", 337 | "MERRA2_300.tavgM_2d_int_Nx.200801.nc4.dmrpp", 338 | "MERRA2_300.tavgM_2d_int_Nx.200802.nc4.dmrpp", 339 | "MERRA2_300.tavgM_2d_int_Nx.200803.nc4.dmrpp", 340 | "MERRA2_300.tavgM_2d_int_Nx.200804.nc4.dmrpp", 341 | "MERRA2_300.tavgM_2d_int_Nx.200805.nc4.dmrpp", 342 | "MERRA2_300.tavgM_2d_int_Nx.200806.nc4.dmrpp", 343 | "MERRA2_300.tavgM_2d_int_Nx.200807.nc4.dmrpp", 344 | "MERRA2_300.tavgM_2d_int_Nx.200808.nc4.dmrpp", 345 | "MERRA2_300.tavgM_2d_int_Nx.200809.nc4.dmrpp", 346 | "MERRA2_300.tavgM_2d_int_Nx.200810.nc4.dmrpp", 347 | "MERRA2_300.tavgM_2d_int_Nx.200811.nc4.dmrpp", 348 | "MERRA2_300.tavgM_2d_int_Nx.200812.nc4.dmrpp", 349 | "MERRA2_300.tavgM_2d_int_Nx.200901.nc4.dmrpp", 350 | "MERRA2_300.tavgM_2d_int_Nx.200902.nc4.dmrpp", 351 | "MERRA2_300.tavgM_2d_int_Nx.200903.nc4.dmrpp", 352 | "MERRA2_300.tavgM_2d_int_Nx.200904.nc4.dmrpp", 353 | "MERRA2_300.tavgM_2d_int_Nx.200905.nc4.dmrpp", 354 | "MERRA2_300.tavgM_2d_int_Nx.200906.nc4.dmrpp", 355 | "MERRA2_300.tavgM_2d_int_Nx.200907.nc4.dmrpp", 356 | "MERRA2_300.tavgM_2d_int_Nx.200908.nc4.dmrpp", 357 | "MERRA2_300.tavgM_2d_int_Nx.200909.nc4.dmrpp", 358 | "MERRA2_300.tavgM_2d_int_Nx.200910.nc4.dmrpp", 359 | "MERRA2_300.tavgM_2d_int_Nx.200911.nc4.dmrpp", 360 | "MERRA2_300.tavgM_2d_int_Nx.200912.nc4.dmrpp", 361 | "MERRA2_300.tavgM_2d_int_Nx.201001.nc4.dmrpp", 362 | "MERRA2_300.tavgM_2d_int_Nx.201002.nc4.dmrpp", 363 | "MERRA2_300.tavgM_2d_int_Nx.201003.nc4.dmrpp", 364 | "MERRA2_300.tavgM_2d_int_Nx.201004.nc4.dmrpp", 365 | "MERRA2_300.tavgM_2d_int_Nx.201005.nc4.dmrpp", 366 | "MERRA2_300.tavgM_2d_int_Nx.201006.nc4.dmrpp", 367 | "MERRA2_300.tavgM_2d_int_Nx.201007.nc4.dmrpp", 368 | "MERRA2_300.tavgM_2d_int_Nx.201008.nc4.dmrpp", 369 | "MERRA2_300.tavgM_2d_int_Nx.201009.nc4.dmrpp", 370 | "MERRA2_300.tavgM_2d_int_Nx.201010.nc4.dmrpp", 371 | "MERRA2_300.tavgM_2d_int_Nx.201011.nc4.dmrpp", 372 | "MERRA2_300.tavgM_2d_int_Nx.201012.nc4.dmrpp", 373 | "MERRA2_400.tavgM_2d_int_Nx.201101.nc4.dmrpp", 374 | "MERRA2_400.tavgM_2d_int_Nx.201102.nc4.dmrpp", 375 | "MERRA2_400.tavgM_2d_int_Nx.201103.nc4.dmrpp", 376 | "MERRA2_400.tavgM_2d_int_Nx.201104.nc4.dmrpp", 377 | "MERRA2_400.tavgM_2d_int_Nx.201105.nc4.dmrpp", 378 | "MERRA2_400.tavgM_2d_int_Nx.201106.nc4.dmrpp", 379 | "MERRA2_400.tavgM_2d_int_Nx.201107.nc4.dmrpp", 380 | "MERRA2_400.tavgM_2d_int_Nx.201108.nc4.dmrpp", 381 | "MERRA2_400.tavgM_2d_int_Nx.201109.nc4.dmrpp", 382 | "MERRA2_400.tavgM_2d_int_Nx.201110.nc4.dmrpp", 383 | "MERRA2_400.tavgM_2d_int_Nx.201111.nc4.dmrpp", 384 | "MERRA2_400.tavgM_2d_int_Nx.201112.nc4.dmrpp", 385 | "MERRA2_400.tavgM_2d_int_Nx.201201.nc4.dmrpp", 386 | "MERRA2_400.tavgM_2d_int_Nx.201202.nc4.dmrpp", 387 | "MERRA2_400.tavgM_2d_int_Nx.201203.nc4.dmrpp", 388 | "MERRA2_400.tavgM_2d_int_Nx.201204.nc4.dmrpp", 389 | "MERRA2_400.tavgM_2d_int_Nx.201205.nc4.dmrpp", 390 | "MERRA2_400.tavgM_2d_int_Nx.201206.nc4.dmrpp", 391 | "MERRA2_400.tavgM_2d_int_Nx.201207.nc4.dmrpp", 392 | "MERRA2_400.tavgM_2d_int_Nx.201208.nc4.dmrpp", 393 | "MERRA2_400.tavgM_2d_int_Nx.201209.nc4.dmrpp", 394 | "MERRA2_400.tavgM_2d_int_Nx.201210.nc4.dmrpp", 395 | "MERRA2_400.tavgM_2d_int_Nx.201211.nc4.dmrpp", 396 | "MERRA2_400.tavgM_2d_int_Nx.201212.nc4.dmrpp", 397 | "MERRA2_400.tavgM_2d_int_Nx.201301.nc4.dmrpp", 398 | "MERRA2_400.tavgM_2d_int_Nx.201302.nc4.dmrpp", 399 | "MERRA2_400.tavgM_2d_int_Nx.201303.nc4.dmrpp", 400 | "MERRA2_400.tavgM_2d_int_Nx.201304.nc4.dmrpp", 401 | "MERRA2_400.tavgM_2d_int_Nx.201305.nc4.dmrpp", 402 | "MERRA2_400.tavgM_2d_int_Nx.201306.nc4.dmrpp", 403 | "MERRA2_400.tavgM_2d_int_Nx.201307.nc4.dmrpp", 404 | "MERRA2_400.tavgM_2d_int_Nx.201308.nc4.dmrpp", 405 | "MERRA2_400.tavgM_2d_int_Nx.201309.nc4.dmrpp", 406 | "MERRA2_400.tavgM_2d_int_Nx.201310.nc4.dmrpp", 407 | "MERRA2_400.tavgM_2d_int_Nx.201311.nc4.dmrpp", 408 | "MERRA2_400.tavgM_2d_int_Nx.201312.nc4.dmrpp", 409 | "MERRA2_400.tavgM_2d_int_Nx.201401.nc4.dmrpp", 410 | "MERRA2_400.tavgM_2d_int_Nx.201402.nc4.dmrpp", 411 | "MERRA2_400.tavgM_2d_int_Nx.201403.nc4.dmrpp", 412 | "MERRA2_400.tavgM_2d_int_Nx.201404.nc4.dmrpp", 413 | "MERRA2_400.tavgM_2d_int_Nx.201405.nc4.dmrpp", 414 | "MERRA2_400.tavgM_2d_int_Nx.201406.nc4.dmrpp", 415 | "MERRA2_400.tavgM_2d_int_Nx.201407.nc4.dmrpp", 416 | "MERRA2_400.tavgM_2d_int_Nx.201408.nc4.dmrpp", 417 | "MERRA2_400.tavgM_2d_int_Nx.201409.nc4.dmrpp", 418 | "MERRA2_400.tavgM_2d_int_Nx.201410.nc4.dmrpp", 419 | "MERRA2_400.tavgM_2d_int_Nx.201411.nc4.dmrpp", 420 | "MERRA2_400.tavgM_2d_int_Nx.201412.nc4.dmrpp", 421 | "MERRA2_400.tavgM_2d_int_Nx.201501.nc4.dmrpp", 422 | "MERRA2_400.tavgM_2d_int_Nx.201502.nc4.dmrpp", 423 | "MERRA2_400.tavgM_2d_int_Nx.201503.nc4.dmrpp", 424 | "MERRA2_400.tavgM_2d_int_Nx.201504.nc4.dmrpp", 425 | "MERRA2_400.tavgM_2d_int_Nx.201505.nc4.dmrpp", 426 | "MERRA2_400.tavgM_2d_int_Nx.201506.nc4.dmrpp", 427 | "MERRA2_400.tavgM_2d_int_Nx.201507.nc4.dmrpp", 428 | "MERRA2_400.tavgM_2d_int_Nx.201508.nc4.dmrpp", 429 | "MERRA2_400.tavgM_2d_int_Nx.201509.nc4.dmrpp", 430 | "MERRA2_400.tavgM_2d_int_Nx.201510.nc4.dmrpp", 431 | "MERRA2_400.tavgM_2d_int_Nx.201511.nc4.dmrpp", 432 | "MERRA2_400.tavgM_2d_int_Nx.201512.nc4.dmrpp", 433 | "MERRA2_400.tavgM_2d_int_Nx.201601.nc4.dmrpp", 434 | "MERRA2_400.tavgM_2d_int_Nx.201602.nc4.dmrpp", 435 | "MERRA2_400.tavgM_2d_int_Nx.201603.nc4.dmrpp", 436 | "MERRA2_400.tavgM_2d_int_Nx.201604.nc4.dmrpp", 437 | "MERRA2_400.tavgM_2d_int_Nx.201605.nc4.dmrpp", 438 | "MERRA2_400.tavgM_2d_int_Nx.201606.nc4.dmrpp", 439 | "MERRA2_400.tavgM_2d_int_Nx.201607.nc4.dmrpp"] 440 | -------------------------------------------------------------------------------- /python-xarray/airs_granules.py: -------------------------------------------------------------------------------- 1 | airs_366 = ["AIRS.2015.01.01.L3.RetStd_IR001.v6.0.11.0.G15013155825.nc.h5.dmrpp", 2 | "AIRS.2015.01.02.L3.RetStd_IR001.v6.0.11.0.G15005190621.nc.h5.dmrpp", 3 | "AIRS.2015.01.03.L3.RetStd_IR001.v6.0.11.0.G15006005549.nc.h5.dmrpp", 4 | "AIRS.2015.01.04.L3.RetStd_IR001.v6.0.11.0.G15006043115.nc.h5.dmrpp", 5 | "AIRS.2015.01.05.L3.RetStd_IR001.v6.0.11.0.G15007130423.nc.h5.dmrpp", 6 | "AIRS.2015.01.06.L3.RetStd_IR001.v6.0.11.0.G15007171215.nc.h5.dmrpp", 7 | "AIRS.2015.01.07.L3.RetStd_IR001.v6.0.11.0.G15008175227.nc.h5.dmrpp", 8 | "AIRS.2015.01.08.L3.RetStd_IR001.v6.0.11.0.G15009164045.nc.h5.dmrpp", 9 | "AIRS.2015.01.09.L3.RetStd_IR001.v6.0.11.0.G15010163004.nc.h5.dmrpp", 10 | "AIRS.2015.01.10.L3.RetStd_IR001.v6.0.11.0.G15011162002.nc.h5.dmrpp", 11 | "AIRS.2015.01.11.L3.RetStd_IR001.v6.0.11.0.G15012165225.nc.h5.dmrpp", 12 | "AIRS.2015.01.12.L3.RetStd_IR001.v6.0.11.0.G15021125448.nc.h5.dmrpp", 13 | "AIRS.2015.01.13.L3.RetStd_IR001.v6.0.11.0.G15021125153.nc.h5.dmrpp", 14 | "AIRS.2015.01.14.L3.RetStd_IR001.v6.0.11.0.G15021120249.nc.h5.dmrpp", 15 | "AIRS.2015.01.15.L3.RetStd_IR001.v6.0.11.0.G15016175121.nc.h5.dmrpp", 16 | "AIRS.2015.01.16.L3.RetStd_IR001.v6.0.11.0.G15017165029.nc.h5.dmrpp", 17 | "AIRS.2015.01.17.L3.RetStd_IR001.v6.0.11.0.G15018164040.nc.h5.dmrpp", 18 | "AIRS.2015.01.18.L3.RetStd_IR001.v6.0.11.0.G15019172037.nc.h5.dmrpp", 19 | "AIRS.2015.01.19.L3.RetStd_IR001.v6.0.11.0.G15020173159.nc.h5.dmrpp", 20 | "AIRS.2015.01.20.L3.RetStd_IR001.v6.0.11.0.G15021184554.nc.h5.dmrpp", 21 | "AIRS.2015.01.21.L3.RetStd_IR001.v6.0.11.0.G15022165534.nc.h5.dmrpp", 22 | "AIRS.2015.01.22.L3.RetStd_IR001.v6.0.11.0.G15023185212.nc.h5.dmrpp", 23 | "AIRS.2015.01.23.L3.RetStd_IR001.v6.0.11.0.G15024223141.nc.h5.dmrpp", 24 | "AIRS.2015.01.24.L3.RetStd_IR001.v6.0.11.0.G15025162144.nc.h5.dmrpp", 25 | "AIRS.2015.01.25.L3.RetStd_IR001.v6.0.11.0.G15026174547.nc.h5.dmrpp", 26 | "AIRS.2015.01.26.L3.RetStd_IR001.v6.0.11.0.G15027194507.nc.h5.dmrpp", 27 | "AIRS.2015.01.27.L3.RetStd_IR001.v6.0.11.0.G15028191220.nc.h5.dmrpp", 28 | "AIRS.2015.01.28.L3.RetStd_IR001.v6.0.11.0.G15029200444.nc.h5.dmrpp", 29 | "AIRS.2015.01.29.L3.RetStd_IR001.v6.0.11.0.G15033123447.nc.h5.dmrpp", 30 | "AIRS.2015.01.30.L3.RetStd_IR001.v6.0.11.0.G15033135335.nc.h5.dmrpp", 31 | "AIRS.2015.01.31.L3.RetStd_IR001.v6.0.11.0.G15033002627.nc.h5.dmrpp", 32 | "AIRS.2015.02.01.L3.RetStd_IR001.v6.0.11.0.G15033192757.nc.h5.dmrpp", 33 | "AIRS.2015.02.02.L3.RetStd_IR001.v6.0.11.0.G15034184154.nc.h5.dmrpp", 34 | "AIRS.2015.02.03.L3.RetStd_IR001.v6.0.11.0.G15036141942.nc.h5.dmrpp", 35 | "AIRS.2015.02.04.L3.RetStd_IR001.v6.0.11.0.G15040144931.nc.h5.dmrpp", 36 | "AIRS.2015.02.05.L3.RetStd_IR001.v6.0.11.0.G15040150906.nc.h5.dmrpp", 37 | "AIRS.2015.02.06.L3.RetStd_IR001.v6.0.11.0.G15040125632.nc.h5.dmrpp", 38 | "AIRS.2015.02.07.L3.RetStd_IR001.v6.0.11.0.G15040121547.nc.h5.dmrpp", 39 | "AIRS.2015.02.08.L3.RetStd_IR001.v6.0.11.0.G15040190512.nc.h5.dmrpp", 40 | "AIRS.2015.02.09.L3.RetStd_IR001.v6.0.11.0.G15041173532.nc.h5.dmrpp", 41 | "AIRS.2015.02.10.L3.RetStd_IR001.v6.0.11.0.G15042190902.nc.h5.dmrpp", 42 | "AIRS.2015.02.11.L3.RetStd_IR001.v6.0.11.0.G15043174648.nc.h5.dmrpp", 43 | "AIRS.2015.02.12.L3.RetStd_IR001.v6.0.11.0.G15044232815.nc.h5.dmrpp", 44 | "AIRS.2015.02.13.L3.RetStd_IR001.v6.0.11.0.G15049142314.nc.h5.dmrpp", 45 | "AIRS.2015.02.14.L3.RetStd_IR001.v6.0.11.0.G15049170508.nc.h5.dmrpp", 46 | "AIRS.2015.02.15.L3.RetStd_IR001.v6.0.11.0.G15075141809.nc.h5.dmrpp", 47 | "AIRS.2015.02.16.L3.RetStd_IR001.v6.0.11.0.G15075141321.nc.h5.dmrpp", 48 | "AIRS.2015.02.17.L3.RetStd_IR001.v6.0.11.0.G15075134502.nc.h5.dmrpp", 49 | "AIRS.2015.02.18.L3.RetStd_IR001.v6.0.11.0.G15050195102.nc.h5.dmrpp", 50 | "AIRS.2015.02.19.L3.RetStd_IR001.v6.0.11.0.G15051182920.nc.h5.dmrpp", 51 | "AIRS.2015.02.20.L3.RetStd_IR001.v6.0.11.0.G15052190632.nc.h5.dmrpp", 52 | "AIRS.2015.02.21.L3.RetStd_IR001.v6.0.11.0.G15054173653.nc.h5.dmrpp", 53 | "AIRS.2015.02.22.L3.RetStd_IR001.v6.0.11.0.G15054194443.nc.h5.dmrpp", 54 | "AIRS.2015.02.23.L3.RetStd_IR001.v6.0.11.0.G15055193409.nc.h5.dmrpp", 55 | "AIRS.2015.02.24.L3.RetStd_IR001.v6.0.11.0.G15056184735.nc.h5.dmrpp", 56 | "AIRS.2015.02.25.L3.RetStd_IR001.v6.0.11.0.G15057220709.nc.h5.dmrpp", 57 | "AIRS.2015.02.26.L3.RetStd_IR001.v6.0.11.0.G15058190744.nc.h5.dmrpp", 58 | "AIRS.2015.02.27.L3.RetStd_IR001.v6.0.11.0.G15075150450.nc.h5.dmrpp", 59 | "AIRS.2015.02.28.L3.RetStd_IR001.v6.0.11.0.G15075145526.nc.h5.dmrpp", 60 | "AIRS.2015.03.01.L3.RetStd_IR001.v6.0.11.0.G15075145757.nc.h5.dmrpp", 61 | "AIRS.2015.03.02.L3.RetStd_IR001.v6.0.11.0.G15063141640.nc.h5.dmrpp", 62 | "AIRS.2015.03.03.L3.RetStd_IR001.v6.0.11.0.G15063182510.nc.h5.dmrpp", 63 | "AIRS.2015.03.04.L3.RetStd_IR001.v6.0.11.0.G15064212219.nc.h5.dmrpp", 64 | "AIRS.2015.03.05.L3.RetStd_IR001.v6.0.11.0.G15068101551.nc.h5.dmrpp", 65 | "AIRS.2015.03.06.L3.RetStd_IR001.v6.0.11.0.G15066191432.nc.h5.dmrpp", 66 | "AIRS.2015.03.07.L3.RetStd_IR001.v6.0.11.0.G15067195418.nc.h5.dmrpp", 67 | "AIRS.2015.03.08.L3.RetStd_IR001.v6.0.11.0.G15068194038.nc.h5.dmrpp", 68 | "AIRS.2015.03.09.L3.RetStd_IR001.v6.0.11.0.G15070081349.nc.h5.dmrpp", 69 | "AIRS.2015.03.10.L3.RetStd_IR001.v6.0.11.0.G15070222745.nc.h5.dmrpp", 70 | "AIRS.2015.03.11.L3.RetStd_IR001.v6.0.11.0.G15071181425.nc.h5.dmrpp", 71 | "AIRS.2015.03.12.L3.RetStd_IR001.v6.0.11.0.G15075113344.nc.h5.dmrpp", 72 | "AIRS.2015.03.13.L3.RetStd_IR001.v6.0.11.0.G15075121613.nc.h5.dmrpp", 73 | "AIRS.2015.03.14.L3.RetStd_IR001.v6.0.11.0.G15075122550.nc.h5.dmrpp", 74 | "AIRS.2015.03.15.L3.RetStd_IR001.v6.0.11.0.G15075190312.nc.h5.dmrpp", 75 | "AIRS.2015.03.16.L3.RetStd_IR001.v6.0.11.0.G15076184035.nc.h5.dmrpp", 76 | "AIRS.2015.03.17.L3.RetStd_IR001.v6.0.11.0.G15077183049.nc.h5.dmrpp", 77 | "AIRS.2015.03.18.L3.RetStd_IR001.v6.0.11.0.G15086133503.nc.h5.dmrpp", 78 | "AIRS.2015.03.19.L3.RetStd_IR001.v6.0.11.0.G15086135823.nc.h5.dmrpp", 79 | "AIRS.2015.03.20.L3.RetStd_IR001.v6.0.11.0.G15082202212.nc.h5.dmrpp", 80 | "AIRS.2015.03.21.L3.RetStd_IR001.v6.0.11.0.G15082204057.nc.h5.dmrpp", 81 | "AIRS.2015.03.22.L3.RetStd_IR001.v6.0.11.0.G15082210050.nc.h5.dmrpp", 82 | "AIRS.2015.03.23.L3.RetStd_IR001.v6.0.31.0.G15183114312.nc.h5.dmrpp", 83 | "AIRS.2015.03.24.L3.RetStd_IR001.v6.0.31.0.G15184075842.nc.h5.dmrpp", 84 | "AIRS.2015.03.25.L3.RetStd_IR001.v6.0.31.0.G15187110451.nc.h5.dmrpp", 85 | "AIRS.2015.03.26.L3.RetStd_IR001.v6.0.31.0.G15187102922.nc.h5.dmrpp", 86 | "AIRS.2015.03.27.L3.RetStd_IR001.v6.0.31.0.G15185011318.nc.h5.dmrpp", 87 | "AIRS.2015.03.28.L3.RetStd_IR001.v6.0.31.0.G15187105355.nc.h5.dmrpp", 88 | "AIRS.2015.03.29.L3.RetStd_IR001.v6.0.31.0.G15187101830.nc.h5.dmrpp", 89 | "AIRS.2015.03.30.L3.RetStd_IR001.v6.0.31.0.G15187110506.nc.h5.dmrpp", 90 | "AIRS.2015.03.31.L3.RetStd_IR001.v6.0.31.0.G15187115701.nc.h5.dmrpp", 91 | "AIRS.2015.04.01.L3.RetStd_IR001.v6.0.31.0.G15187113437.nc.h5.dmrpp", 92 | "AIRS.2015.04.02.L3.RetStd_IR001.v6.0.31.0.G15187114846.nc.h5.dmrpp", 93 | "AIRS.2015.04.03.L3.RetStd_IR001.v6.0.31.0.G15188110828.nc.h5.dmrpp", 94 | "AIRS.2015.04.04.L3.RetStd_IR001.v6.0.31.0.G15188134210.nc.h5.dmrpp", 95 | "AIRS.2015.04.05.L3.RetStd_IR001.v6.0.31.0.G15188165256.nc.h5.dmrpp", 96 | "AIRS.2015.04.06.L3.RetStd_IR001.v6.0.31.0.G15189011837.nc.h5.dmrpp", 97 | "AIRS.2015.04.07.L3.RetStd_IR001.v6.0.31.0.G15189061953.nc.h5.dmrpp", 98 | "AIRS.2015.04.08.L3.RetStd_IR001.v6.0.31.0.G15189085146.nc.h5.dmrpp", 99 | "AIRS.2015.04.09.L3.RetStd_IR001.v6.0.31.0.G15189155326.nc.h5.dmrpp", 100 | "AIRS.2015.04.10.L3.RetStd_IR001.v6.0.31.0.G15190021354.nc.h5.dmrpp", 101 | "AIRS.2015.04.11.L3.RetStd_IR001.v6.0.31.0.G15190063508.nc.h5.dmrpp", 102 | "AIRS.2015.04.12.L3.RetStd_IR001.v6.0.31.0.G15190133730.nc.h5.dmrpp", 103 | "AIRS.2015.04.13.L3.RetStd_IR001.v6.0.31.0.G15190185721.nc.h5.dmrpp", 104 | "AIRS.2015.04.14.L3.RetStd_IR001.v6.0.31.0.G15191062646.nc.h5.dmrpp", 105 | "AIRS.2015.04.15.L3.RetStd_IR001.v6.0.31.0.G15195104032.nc.h5.dmrpp", 106 | "AIRS.2015.04.16.L3.RetStd_IR001.v6.0.31.0.G15195105823.nc.h5.dmrpp", 107 | "AIRS.2015.04.17.L3.RetStd_IR001.v6.0.31.0.G15195184345.nc.h5.dmrpp", 108 | "AIRS.2015.04.18.L3.RetStd_IR001.v6.0.31.0.G15196045958.nc.h5.dmrpp", 109 | "AIRS.2015.04.19.L3.RetStd_IR001.v6.0.31.0.G15196170529.nc.h5.dmrpp", 110 | "AIRS.2015.04.20.L3.RetStd_IR001.v6.0.31.0.G15197051810.nc.h5.dmrpp", 111 | "AIRS.2015.04.21.L3.RetStd_IR001.v6.0.31.0.G15197132455.nc.h5.dmrpp", 112 | "AIRS.2015.04.22.L3.RetStd_IR001.v6.0.31.0.G15198160545.nc.h5.dmrpp", 113 | "AIRS.2015.04.23.L3.RetStd_IR001.v6.0.31.0.G15198133400.nc.h5.dmrpp", 114 | "AIRS.2015.04.24.L3.RetStd_IR001.v6.0.31.0.G15198145552.nc.h5.dmrpp", 115 | "AIRS.2015.04.25.L3.RetStd_IR001.v6.0.31.0.G15199033539.nc.h5.dmrpp", 116 | "AIRS.2015.04.26.L3.RetStd_IR001.v6.0.31.0.G15199092136.nc.h5.dmrpp", 117 | "AIRS.2015.04.27.L3.RetStd_IR001.v6.0.31.0.G15201122501.nc.h5.dmrpp", 118 | "AIRS.2015.04.28.L3.RetStd_IR001.v6.0.31.0.G15201121931.nc.h5.dmrpp", 119 | "AIRS.2015.04.29.L3.RetStd_IR001.v6.0.31.0.G15200163343.nc.h5.dmrpp", 120 | "AIRS.2015.04.30.L3.RetStd_IR001.v6.0.31.0.G15205024531.nc.h5.dmrpp", 121 | "AIRS.2015.05.01.L3.RetStd_IR001.v6.0.31.0.G15205024629.nc.h5.dmrpp", 122 | "AIRS.2015.05.02.L3.RetStd_IR001.v6.0.31.0.G15204070908.nc.h5.dmrpp", 123 | "AIRS.2015.05.03.L3.RetStd_IR001.v6.0.31.0.G15204151052.nc.h5.dmrpp", 124 | "AIRS.2015.05.04.L3.RetStd_IR001.v6.0.31.0.G15205031847.nc.h5.dmrpp", 125 | "AIRS.2015.05.05.L3.RetStd_IR001.v6.0.31.0.G15205081357.nc.h5.dmrpp", 126 | "AIRS.2015.05.06.L3.RetStd_IR001.v6.0.31.0.G15205145040.nc.h5.dmrpp", 127 | "AIRS.2015.05.07.L3.RetStd_IR001.v6.0.31.0.G15206003719.nc.h5.dmrpp", 128 | "AIRS.2015.05.08.L3.RetStd_IR001.v6.0.31.0.G15206071933.nc.h5.dmrpp", 129 | "AIRS.2015.05.09.L3.RetStd_IR001.v6.0.31.0.G15206170855.nc.h5.dmrpp", 130 | "AIRS.2015.05.10.L3.RetStd_IR001.v6.0.31.0.G15231111646.nc.h5.dmrpp", 131 | "AIRS.2015.05.11.L3.RetStd_IR001.v6.0.31.0.G15231111944.nc.h5.dmrpp", 132 | "AIRS.2015.05.12.L3.RetStd_IR001.v6.0.31.0.G15212020635.nc.h5.dmrpp", 133 | "AIRS.2015.05.13.L3.RetStd_IR001.v6.0.31.0.G15212053108.nc.h5.dmrpp", 134 | "AIRS.2015.05.14.L3.RetStd_IR001.v6.0.31.0.G15212191403.nc.h5.dmrpp", 135 | "AIRS.2015.05.15.L3.RetStd_IR001.v6.0.31.0.G15213005119.nc.h5.dmrpp", 136 | "AIRS.2015.05.16.L3.RetStd_IR001.v6.0.31.0.G15213143012.nc.h5.dmrpp", 137 | "AIRS.2015.05.17.L3.RetStd_IR001.v6.0.31.0.G15213221245.nc.h5.dmrpp", 138 | "AIRS.2015.05.18.L3.RetStd_IR001.v6.0.31.0.G15214034207.nc.h5.dmrpp", 139 | "AIRS.2015.05.19.L3.RetStd_IR001.v6.0.31.0.G15215112758.nc.h5.dmrpp", 140 | "AIRS.2015.05.20.L3.RetStd_IR001.v6.0.31.0.G15219212804.nc.h5.dmrpp", 141 | "AIRS.2015.05.21.L3.RetStd_IR001.v6.0.31.0.G15220053350.nc.h5.dmrpp", 142 | "AIRS.2015.05.22.L3.RetStd_IR001.v6.0.31.0.G15225112057.nc.h5.dmrpp", 143 | "AIRS.2015.05.23.L3.RetStd_IR001.v6.0.31.0.G15225104912.nc.h5.dmrpp", 144 | "AIRS.2015.05.24.L3.RetStd_IR001.v6.0.31.0.G15221051937.nc.h5.dmrpp", 145 | "AIRS.2015.05.25.L3.RetStd_IR001.v6.0.31.0.G15221133012.nc.h5.dmrpp", 146 | "AIRS.2015.05.26.L3.RetStd_IR001.v6.0.31.0.G15225113048.nc.h5.dmrpp", 147 | "AIRS.2015.05.27.L3.RetStd_IR001.v6.0.31.0.G15225111327.nc.h5.dmrpp", 148 | "AIRS.2015.05.28.L3.RetStd_IR001.v6.0.31.0.G15225114613.nc.h5.dmrpp", 149 | "AIRS.2015.05.29.L3.RetStd_IR001.v6.0.31.0.G15225110844.nc.h5.dmrpp", 150 | "AIRS.2015.05.30.L3.RetStd_IR001.v6.0.31.0.G15225140916.nc.h5.dmrpp", 151 | "AIRS.2015.05.31.L3.RetStd_IR001.v6.0.31.0.G15225165303.nc.h5.dmrpp", 152 | "AIRS.2015.06.01.L3.RetStd_IR001.v6.0.31.0.G15225162659.nc.h5.dmrpp", 153 | "AIRS.2015.06.02.L3.RetStd_IR001.v6.0.31.0.G15199070943.nc.h5.dmrpp", 154 | "AIRS.2015.06.03.L3.RetStd_IR001.v6.0.31.0.G15201082424.nc.h5.dmrpp", 155 | "AIRS.2015.06.04.L3.RetStd_IR001.v6.0.31.0.G15201143444.nc.h5.dmrpp", 156 | "AIRS.2015.06.05.L3.RetStd_IR001.v6.0.31.0.G15201143404.nc.h5.dmrpp", 157 | "AIRS.2015.06.06.L3.RetStd_IR001.v6.0.31.0.G15201164403.nc.h5.dmrpp", 158 | "AIRS.2015.06.07.L3.RetStd_IR001.v6.0.31.0.G15201200746.nc.h5.dmrpp", 159 | "AIRS.2015.06.08.L3.RetStd_IR001.v6.0.31.0.G15202125313.nc.h5.dmrpp", 160 | "AIRS.2015.06.09.L3.RetStd_IR001.v6.0.31.0.G15202125530.nc.h5.dmrpp", 161 | "AIRS.2015.06.10.L3.RetStd_IR001.v6.0.31.0.G15202165145.nc.h5.dmrpp", 162 | "AIRS.2015.06.11.L3.RetStd_IR001.v6.0.31.0.G15203000739.nc.h5.dmrpp", 163 | "AIRS.2015.06.12.L3.RetStd_IR001.v6.0.31.0.G15203042847.nc.h5.dmrpp", 164 | "AIRS.2015.06.13.L3.RetStd_IR001.v6.0.31.0.G15207022701.nc.h5.dmrpp", 165 | "AIRS.2015.06.14.L3.RetStd_IR001.v6.0.31.0.G15207055959.nc.h5.dmrpp", 166 | "AIRS.2015.06.15.L3.RetStd_IR001.v6.0.31.0.G15208173612.nc.h5.dmrpp", 167 | "AIRS.2015.06.16.L3.RetStd_IR001.v6.0.31.0.G15208195104.nc.h5.dmrpp", 168 | "AIRS.2015.06.17.L3.RetStd_IR001.v6.0.31.0.G15209192358.nc.h5.dmrpp", 169 | "AIRS.2015.06.18.L3.RetStd_IR001.v6.0.31.0.G15210073815.nc.h5.dmrpp", 170 | "AIRS.2015.06.19.L3.RetStd_IR001.v6.0.31.0.G15211041208.nc.h5.dmrpp", 171 | "AIRS.2015.06.20.L3.RetStd_IR001.v6.0.31.0.G15215201033.nc.h5.dmrpp", 172 | "AIRS.2015.06.21.L3.RetStd_IR001.v6.0.31.0.G15216100600.nc.h5.dmrpp", 173 | "AIRS.2015.06.22.L3.RetStd_IR001.v6.0.31.0.G15216144527.nc.h5.dmrpp", 174 | "AIRS.2015.06.23.L3.RetStd_IR001.v6.0.31.0.G15217010220.nc.h5.dmrpp", 175 | "AIRS.2015.06.24.L3.RetStd_IR001.v6.0.31.0.G15217062101.nc.h5.dmrpp", 176 | "AIRS.2015.06.25.L3.RetStd_IR001.v6.0.31.0.G15218202528.nc.h5.dmrpp", 177 | "AIRS.2015.06.26.L3.RetStd_IR001.v6.0.31.0.G15226102839.nc.h5.dmrpp", 178 | "AIRS.2015.06.27.L3.RetStd_IR001.v6.0.31.0.G15218202454.nc.h5.dmrpp", 179 | "AIRS.2015.06.28.L3.RetStd_IR001.v6.0.31.0.G15226102900.nc.h5.dmrpp", 180 | "AIRS.2015.06.29.L3.RetStd_IR001.v6.0.31.0.G15220000354.nc.h5.dmrpp", 181 | "AIRS.2015.06.30.L3.RetStd_IR001.v6.0.31.0.G15219235948.nc.h5.dmrpp", 182 | "AIRS.2015.07.01.L3.RetStd_IR001.v6.0.31.0.G15219232452.nc.h5.dmrpp", 183 | "AIRS.2015.07.02.L3.RetStd_IR001.v6.0.31.0.G15226104941.nc.h5.dmrpp", 184 | "AIRS.2015.07.03.L3.RetStd_IR001.v6.0.31.0.G15226110506.nc.h5.dmrpp", 185 | "AIRS.2015.07.04.L3.RetStd_IR001.v6.0.31.0.G15188000111.nc.h5.dmrpp", 186 | "AIRS.2015.07.05.L3.RetStd_IR001.v6.0.31.0.G15188014955.nc.h5.dmrpp", 187 | "AIRS.2015.07.06.L3.RetStd_IR001.v6.0.31.0.G15190051959.nc.h5.dmrpp", 188 | "AIRS.2015.07.07.L3.RetStd_IR001.v6.0.31.0.G15190070030.nc.h5.dmrpp", 189 | "AIRS.2015.07.08.L3.RetStd_IR001.v6.0.31.0.G15191040651.nc.h5.dmrpp", 190 | "AIRS.2015.07.09.L3.RetStd_IR001.v6.0.31.0.G15191182008.nc.h5.dmrpp", 191 | "AIRS.2015.07.10.L3.RetStd_IR001.v6.0.31.0.G15192172639.nc.h5.dmrpp", 192 | "AIRS.2015.07.11.L3.RetStd_IR001.v6.0.31.0.G15193172658.nc.h5.dmrpp", 193 | "AIRS.2015.07.12.L3.RetStd_IR001.v6.0.31.0.G15205030539.nc.h5.dmrpp", 194 | "AIRS.2015.07.13.L3.RetStd_IR001.v6.0.31.0.G15198111139.nc.h5.dmrpp", 195 | "AIRS.2015.07.14.L3.RetStd_IR001.v6.0.31.0.G15197075159.nc.h5.dmrpp", 196 | "AIRS.2015.07.15.L3.RetStd_IR001.v6.0.31.0.G15198034817.nc.h5.dmrpp", 197 | "AIRS.2015.07.16.L3.RetStd_IR001.v6.0.31.0.G15199051509.nc.h5.dmrpp", 198 | "AIRS.2015.07.17.L3.RetStd_IR001.v6.0.31.0.G15200003256.nc.h5.dmrpp", 199 | "AIRS.2015.07.18.L3.RetStd_IR001.v6.0.31.0.G15200234455.nc.h5.dmrpp", 200 | "AIRS.2015.07.19.L3.RetStd_IR001.v6.0.31.0.G15202033842.nc.h5.dmrpp", 201 | "AIRS.2015.07.20.L3.RetStd_IR001.v6.0.31.0.G15203034310.nc.h5.dmrpp", 202 | "AIRS.2015.07.21.L3.RetStd_IR001.v6.0.31.0.G15204035156.nc.h5.dmrpp", 203 | "AIRS.2015.07.22.L3.RetStd_IR001.v6.0.31.0.G15205034133.nc.h5.dmrpp", 204 | "AIRS.2015.07.23.L3.RetStd_IR001.v6.0.31.0.G15206050004.nc.h5.dmrpp", 205 | "AIRS.2015.07.24.L3.RetStd_IR001.v6.0.31.0.G15207115940.nc.h5.dmrpp", 206 | "AIRS.2015.07.25.L3.RetStd_IR001.v6.0.31.0.G15209144145.nc.h5.dmrpp", 207 | "AIRS.2015.07.26.L3.RetStd_IR001.v6.0.31.0.G15209182734.nc.h5.dmrpp", 208 | "AIRS.2015.07.27.L3.RetStd_IR001.v6.0.31.0.G15210112814.nc.h5.dmrpp", 209 | "AIRS.2015.07.28.L3.RetStd_IR001.v6.0.31.0.G15211003011.nc.h5.dmrpp", 210 | "AIRS.2015.07.29.L3.RetStd_IR001.v6.0.31.0.G15212175054.nc.h5.dmrpp", 211 | "AIRS.2015.07.30.L3.RetStd_IR001.v6.0.31.0.G15231110946.nc.h5.dmrpp", 212 | "AIRS.2015.07.31.L3.RetStd_IR001.v6.0.31.0.G15213234252.nc.h5.dmrpp", 213 | "AIRS.2015.08.01.L3.RetStd_IR001.v6.0.31.0.G15214230515.nc.h5.dmrpp", 214 | "AIRS.2015.08.02.L3.RetStd_IR001.v6.0.31.0.G15215204200.nc.h5.dmrpp", 215 | "AIRS.2015.08.03.L3.RetStd_IR001.v6.0.31.0.G15216225225.nc.h5.dmrpp", 216 | "AIRS.2015.08.04.L3.RetStd_IR001.v6.0.31.0.G15218141841.nc.h5.dmrpp", 217 | "AIRS.2015.08.05.L3.RetStd_IR001.v6.0.31.0.G15218212447.nc.h5.dmrpp", 218 | "AIRS.2015.08.06.L3.RetStd_IR001.v6.0.31.0.G15223024315.nc.h5.dmrpp", 219 | "AIRS.2015.08.07.L3.RetStd_IR001.v6.0.31.0.G15223085212.nc.h5.dmrpp", 220 | "AIRS.2015.08.08.L3.RetStd_IR001.v6.0.31.0.G15223194053.nc.h5.dmrpp", 221 | "AIRS.2015.08.09.L3.RetStd_IR001.v6.0.31.0.G15224032531.nc.h5.dmrpp", 222 | "AIRS.2015.08.10.L3.RetStd_IR001.v6.0.31.0.G15224103409.nc.h5.dmrpp", 223 | "AIRS.2015.08.11.L3.RetStd_IR001.v6.0.31.0.G15225012119.nc.h5.dmrpp", 224 | "AIRS.2015.08.12.L3.RetStd_IR001.v6.0.31.0.G15225231043.nc.h5.dmrpp", 225 | "AIRS.2015.08.13.L3.RetStd_IR001.v6.0.31.0.G15226183747.nc.h5.dmrpp", 226 | "AIRS.2015.08.14.L3.RetStd_IR001.v6.0.31.0.G15229090959.nc.h5.dmrpp", 227 | "AIRS.2015.08.15.L3.RetStd_IR001.v6.0.31.0.G15229091004.nc.h5.dmrpp", 228 | "AIRS.2015.08.16.L3.RetStd_IR001.v6.0.31.0.G15229181146.nc.h5.dmrpp", 229 | "AIRS.2015.08.17.L3.RetStd_IR001.v6.0.31.0.G15230173908.nc.h5.dmrpp", 230 | "AIRS.2015.08.18.L3.RetStd_IR001.v6.0.31.0.G15231182643.nc.h5.dmrpp", 231 | "AIRS.2015.08.19.L3.RetStd_IR001.v6.0.31.0.G15232181051.nc.h5.dmrpp", 232 | "AIRS.2015.08.20.L3.RetStd_IR001.v6.0.31.0.G15233191751.nc.h5.dmrpp", 233 | "AIRS.2015.08.21.L3.RetStd_IR001.v6.0.31.0.G15234191854.nc.h5.dmrpp", 234 | "AIRS.2015.08.22.L3.RetStd_IR001.v6.0.31.0.G15235180412.nc.h5.dmrpp", 235 | "AIRS.2015.08.23.L3.RetStd_IR001.v6.0.31.0.G15239122851.nc.h5.dmrpp", 236 | "AIRS.2015.08.24.L3.RetStd_IR001.v6.0.31.0.G15239122612.nc.h5.dmrpp", 237 | "AIRS.2015.08.25.L3.RetStd_IR001.v6.0.31.0.G15240170412.nc.h5.dmrpp", 238 | "AIRS.2015.08.26.L3.RetStd_IR001.v6.0.31.0.G15240170318.nc.h5.dmrpp", 239 | "AIRS.2015.08.27.L3.RetStd_IR001.v6.0.31.0.G15240181026.nc.h5.dmrpp", 240 | "AIRS.2015.08.28.L3.RetStd_IR001.v6.0.31.0.G15241175425.nc.h5.dmrpp", 241 | "AIRS.2015.08.29.L3.RetStd_IR001.v6.0.31.0.G15244124618.nc.h5.dmrpp", 242 | "AIRS.2015.08.30.L3.RetStd_IR001.v6.0.31.0.G15244185832.nc.h5.dmrpp", 243 | "AIRS.2015.08.31.L3.RetStd_IR001.v6.0.31.0.G15245121721.nc.h5.dmrpp", 244 | "AIRS.2015.09.01.L3.RetStd_IR001.v6.0.31.0.G15246055428.nc.h5.dmrpp", 245 | "AIRS.2015.09.02.L3.RetStd_IR001.v6.0.31.0.G15246185957.nc.h5.dmrpp", 246 | "AIRS.2015.09.03.L3.RetStd_IR001.v6.0.31.0.G15251121607.nc.h5.dmrpp", 247 | "AIRS.2015.09.04.L3.RetStd_IR001.v6.0.31.0.G15251123039.nc.h5.dmrpp", 248 | "AIRS.2015.09.05.L3.RetStd_IR001.v6.0.31.0.G15249182655.nc.h5.dmrpp", 249 | "AIRS.2015.09.06.L3.RetStd_IR001.v6.0.31.0.G15250191403.nc.h5.dmrpp", 250 | "AIRS.2015.09.07.L3.RetStd_IR001.v6.0.31.0.G15251192912.nc.h5.dmrpp", 251 | "AIRS.2015.09.08.L3.RetStd_IR001.v6.0.31.0.G15252181912.nc.h5.dmrpp", 252 | "AIRS.2015.09.09.L3.RetStd_IR001.v6.0.31.0.G15253185245.nc.h5.dmrpp", 253 | "AIRS.2015.09.10.L3.RetStd_IR001.v6.0.31.0.G15254182904.nc.h5.dmrpp", 254 | "AIRS.2015.09.11.L3.RetStd_IR001.v6.0.31.0.G15255181510.nc.h5.dmrpp", 255 | "AIRS.2015.09.12.L3.RetStd_IR001.v6.0.31.0.G15256180147.nc.h5.dmrpp", 256 | "AIRS.2015.09.13.L3.RetStd_IR001.v6.0.31.0.G15259120130.nc.h5.dmrpp", 257 | "AIRS.2015.09.14.L3.RetStd_IR001.v6.0.31.0.G15259120757.nc.h5.dmrpp", 258 | "AIRS.2015.09.15.L3.RetStd_IR001.v6.0.31.0.G15259183006.nc.h5.dmrpp", 259 | "AIRS.2015.09.16.L3.RetStd_IR001.v6.0.31.0.G15260173550.nc.h5.dmrpp", 260 | "AIRS.2015.09.17.L3.RetStd_IR001.v6.0.31.0.G15265195548.nc.h5.dmrpp", 261 | "AIRS.2015.09.18.L3.RetStd_IR001.v6.0.31.0.G15266052228.nc.h5.dmrpp", 262 | "AIRS.2015.09.19.L3.RetStd_IR001.v6.0.31.0.G15286123632.nc.h5.dmrpp", 263 | "AIRS.2015.09.20.L3.RetStd_IR001.v6.0.31.0.G15265172219.nc.h5.dmrpp", 264 | "AIRS.2015.09.21.L3.RetStd_IR001.v6.0.31.0.G15281122640.nc.h5.dmrpp", 265 | "AIRS.2015.09.22.L3.RetStd_IR001.v6.0.31.0.G15281123620.nc.h5.dmrpp", 266 | "AIRS.2015.09.23.L3.RetStd_IR001.v6.0.31.0.G15281123841.nc.h5.dmrpp", 267 | "AIRS.2015.09.24.L3.RetStd_IR001.v6.0.31.0.G15280122823.nc.h5.dmrpp", 268 | "AIRS.2015.09.25.L3.RetStd_IR001.v6.0.31.0.G15280125816.nc.h5.dmrpp", 269 | "AIRS.2015.09.26.L3.RetStd_IR001.v6.0.31.0.G15280134337.nc.h5.dmrpp", 270 | "AIRS.2015.09.27.L3.RetStd_IR001.v6.0.31.0.G15280130827.nc.h5.dmrpp", 271 | "AIRS.2015.09.28.L3.RetStd_IR001.v6.0.31.0.G15272182749.nc.h5.dmrpp", 272 | "AIRS.2015.09.29.L3.RetStd_IR001.v6.0.31.0.G15279133600.nc.h5.dmrpp", 273 | "AIRS.2015.09.30.L3.RetStd_IR001.v6.0.31.0.G15279132236.nc.h5.dmrpp", 274 | "AIRS.2015.10.01.L3.RetStd_IR001.v6.0.31.0.G15275180621.nc.h5.dmrpp", 275 | "AIRS.2015.10.02.L3.RetStd_IR001.v6.0.31.0.G15276183800.nc.h5.dmrpp", 276 | "AIRS.2015.10.03.L3.RetStd_IR001.v6.0.31.0.G15277183928.nc.h5.dmrpp", 277 | "AIRS.2015.10.04.L3.RetStd_IR001.v6.0.31.0.G15279132537.nc.h5.dmrpp", 278 | "AIRS.2015.10.05.L3.RetStd_IR001.v6.0.31.0.G15280122741.nc.h5.dmrpp", 279 | "AIRS.2015.10.06.L3.RetStd_IR001.v6.0.31.0.G15281171105.nc.h5.dmrpp", 280 | "AIRS.2015.10.07.L3.RetStd_IR001.v6.0.31.0.G15281194558.nc.h5.dmrpp", 281 | "AIRS.2015.10.08.L3.RetStd_IR001.v6.0.31.0.G15282181604.nc.h5.dmrpp", 282 | "AIRS.2015.10.09.L3.RetStd_IR001.v6.0.31.0.G15283180208.nc.h5.dmrpp", 283 | "AIRS.2015.10.10.L3.RetStd_IR001.v6.0.31.0.G15284180331.nc.h5.dmrpp", 284 | "AIRS.2015.10.11.L3.RetStd_IR001.v6.0.31.0.G15286170440.nc.h5.dmrpp", 285 | "AIRS.2015.10.12.L3.RetStd_IR001.v6.0.31.0.G15286201628.nc.h5.dmrpp", 286 | "AIRS.2015.10.13.L3.RetStd_IR001.v6.0.31.0.G15287180710.nc.h5.dmrpp", 287 | "AIRS.2015.10.14.L3.RetStd_IR001.v6.0.31.0.G15288181532.nc.h5.dmrpp", 288 | "AIRS.2015.10.15.L3.RetStd_IR001.v6.0.31.0.G15292105844.nc.h5.dmrpp", 289 | "AIRS.2015.10.16.L3.RetStd_IR001.v6.0.31.0.G15292111218.nc.h5.dmrpp", 290 | "AIRS.2015.10.17.L3.RetStd_IR001.v6.0.31.0.G15291213341.nc.h5.dmrpp", 291 | "AIRS.2015.10.18.L3.RetStd_IR001.v6.0.31.0.G15292183048.nc.h5.dmrpp", 292 | "AIRS.2015.10.19.L3.RetStd_IR001.v6.0.31.0.G15293180703.nc.h5.dmrpp", 293 | "AIRS.2015.10.20.L3.RetStd_IR001.v6.0.31.0.G15294183756.nc.h5.dmrpp", 294 | "AIRS.2015.10.21.L3.RetStd_IR001.v6.0.31.0.G15296182331.nc.h5.dmrpp", 295 | "AIRS.2015.10.22.L3.RetStd_IR001.v6.0.31.0.G15299105437.nc.h5.dmrpp", 296 | "AIRS.2015.10.23.L3.RetStd_IR001.v6.0.31.0.G15299110134.nc.h5.dmrpp", 297 | "AIRS.2015.10.24.L3.RetStd_IR001.v6.0.31.0.G15299105522.nc.h5.dmrpp", 298 | "AIRS.2015.10.25.L3.RetStd_IR001.v6.0.31.0.G15299190428.nc.h5.dmrpp", 299 | "AIRS.2015.10.26.L3.RetStd_IR001.v6.0.31.0.G15300185045.nc.h5.dmrpp", 300 | "AIRS.2015.10.27.L3.RetStd_IR001.v6.0.31.0.G15301185928.nc.h5.dmrpp", 301 | "AIRS.2015.10.28.L3.RetStd_IR001.v6.0.31.0.G15302183008.nc.h5.dmrpp", 302 | "AIRS.2015.10.29.L3.RetStd_IR001.v6.0.31.0.G15303183201.nc.h5.dmrpp", 303 | "AIRS.2015.10.30.L3.RetStd_IR001.v6.0.31.0.G15304181811.nc.h5.dmrpp", 304 | "AIRS.2015.10.31.L3.RetStd_IR001.v6.0.31.0.G15305182054.nc.h5.dmrpp", 305 | "AIRS.2015.11.01.L3.RetStd_IR001.v6.0.31.0.G15306172043.nc.h5.dmrpp", 306 | "AIRS.2015.11.02.L3.RetStd_IR001.v6.0.31.0.G15307173725.nc.h5.dmrpp", 307 | "AIRS.2015.11.03.L3.RetStd_IR001.v6.0.31.0.G15308184039.nc.h5.dmrpp", 308 | "AIRS.2015.11.04.L3.RetStd_IR001.v6.0.31.0.G15309175722.nc.h5.dmrpp", 309 | "AIRS.2015.11.05.L3.RetStd_IR001.v6.0.31.0.G15310174156.nc.h5.dmrpp", 310 | "AIRS.2015.11.06.L3.RetStd_IR001.v6.0.31.0.G15311174328.nc.h5.dmrpp", 311 | "AIRS.2015.11.07.L3.RetStd_IR001.v6.0.31.0.G15313022554.nc.h5.dmrpp", 312 | "AIRS.2015.11.08.L3.RetStd_IR001.v6.0.31.0.G15313181728.nc.h5.dmrpp", 313 | "AIRS.2015.11.09.L3.RetStd_IR001.v6.0.31.0.G15314173250.nc.h5.dmrpp", 314 | "AIRS.2015.11.10.L3.RetStd_IR001.v6.0.31.0.G15317190829.nc.h5.dmrpp", 315 | "AIRS.2015.11.11.L3.RetStd_IR001.v6.0.31.0.G15317202128.nc.h5.dmrpp", 316 | "AIRS.2015.11.12.L3.RetStd_IR001.v6.0.31.0.G15317210501.nc.h5.dmrpp", 317 | "AIRS.2015.11.13.L3.RetStd_IR001.v6.0.31.0.G15318181838.nc.h5.dmrpp", 318 | "AIRS.2015.11.14.L3.RetStd_IR001.v6.0.31.0.G15319171913.nc.h5.dmrpp", 319 | "AIRS.2015.11.15.L3.RetStd_IR001.v6.0.31.0.G15320174853.nc.h5.dmrpp", 320 | "AIRS.2015.11.16.L3.RetStd_IR001.v6.0.31.0.G15321175428.nc.h5.dmrpp", 321 | "AIRS.2015.11.17.L3.RetStd_IR001.v6.0.31.0.G15322173634.nc.h5.dmrpp", 322 | "AIRS.2015.11.18.L3.RetStd_IR001.v6.0.31.0.G15323173646.nc.h5.dmrpp", 323 | "AIRS.2015.11.19.L3.RetStd_IR001.v6.0.31.0.G15327171246.nc.h5.dmrpp", 324 | "AIRS.2015.11.20.L3.RetStd_IR001.v6.0.31.0.G15327174304.nc.h5.dmrpp", 325 | "AIRS.2015.11.21.L3.RetStd_IR001.v6.0.31.0.G15327175839.nc.h5.dmrpp", 326 | "AIRS.2015.11.22.L3.RetStd_IR001.v6.0.31.0.G15328160009.nc.h5.dmrpp", 327 | "AIRS.2015.11.23.L3.RetStd_IR001.v6.0.31.0.G15328171702.nc.h5.dmrpp", 328 | "AIRS.2015.11.24.L3.RetStd_IR001.v6.0.31.0.G15329174949.nc.h5.dmrpp", 329 | "AIRS.2015.11.25.L3.RetStd_IR001.v6.0.31.0.G15338133714.nc.h5.dmrpp", 330 | "AIRS.2015.11.26.L3.RetStd_IR001.v6.0.31.0.G15338140629.nc.h5.dmrpp", 331 | "AIRS.2015.11.27.L3.RetStd_IR001.v6.0.31.0.G15341130700.nc.h5.dmrpp", 332 | "AIRS.2015.11.28.L3.RetStd_IR001.v6.0.31.0.G15334014939.nc.h5.dmrpp", 333 | "AIRS.2015.11.29.L3.RetStd_IR001.v6.0.31.0.G15335015610.nc.h5.dmrpp", 334 | "AIRS.2015.11.30.L3.RetStd_IR001.v6.0.31.0.G15335173247.nc.h5.dmrpp", 335 | "AIRS.2015.12.01.L3.RetStd_IR001.v6.0.31.0.G15336180328.nc.h5.dmrpp", 336 | "AIRS.2015.12.02.L3.RetStd_IR001.v6.0.31.0.G15337191146.nc.h5.dmrpp", 337 | "AIRS.2015.12.03.L3.RetStd_IR001.v6.0.31.0.G15338200027.nc.h5.dmrpp", 338 | "AIRS.2015.12.04.L3.RetStd_IR001.v6.0.31.0.G15339182910.nc.h5.dmrpp", 339 | "AIRS.2015.12.05.L3.RetStd_IR001.v6.0.31.0.G15340172926.nc.h5.dmrpp", 340 | "AIRS.2015.12.06.L3.RetStd_IR001.v6.0.31.0.G15341175309.nc.h5.dmrpp", 341 | "AIRS.2015.12.07.L3.RetStd_IR001.v6.0.31.0.G15342173411.nc.h5.dmrpp", 342 | "AIRS.2015.12.08.L3.RetStd_IR001.v6.0.31.0.G15343174548.nc.h5.dmrpp", 343 | "AIRS.2015.12.09.L3.RetStd_IR001.v6.0.31.0.G15344174447.nc.h5.dmrpp", 344 | "AIRS.2015.12.10.L3.RetStd_IR001.v6.0.31.0.G15345173547.nc.h5.dmrpp", 345 | "AIRS.2015.12.11.L3.RetStd_IR001.v6.0.31.0.G15346173637.nc.h5.dmrpp", 346 | "AIRS.2015.12.12.L3.RetStd_IR001.v6.0.31.0.G15347173750.nc.h5.dmrpp", 347 | "AIRS.2015.12.13.L3.RetStd_IR001.v6.0.31.0.G15349151342.nc.h5.dmrpp", 348 | "AIRS.2015.12.14.L3.RetStd_IR001.v6.0.31.0.G15349183244.nc.h5.dmrpp", 349 | "AIRS.2015.12.15.L3.RetStd_IR001.v6.0.31.0.G15350164838.nc.h5.dmrpp", 350 | "AIRS.2015.12.16.L3.RetStd_IR001.v6.0.31.0.G15351172052.nc.h5.dmrpp", 351 | "AIRS.2015.12.17.L3.RetStd_IR001.v6.0.31.0.G15355185144.nc.h5.dmrpp", 352 | "AIRS.2015.12.18.L3.RetStd_IR001.v6.0.31.0.G15353180234.nc.h5.dmrpp", 353 | "AIRS.2015.12.19.L3.RetStd_IR001.v6.0.31.0.G15357151707.nc.h5.dmrpp", 354 | "AIRS.2015.12.20.L3.RetStd_IR001.v6.0.31.0.G15357153310.nc.h5.dmrpp", 355 | "AIRS.2015.12.21.L3.RetStd_IR001.v6.0.31.0.G15357154855.nc.h5.dmrpp", 356 | "AIRS.2015.12.22.L3.RetStd_IR001.v6.0.31.0.G15357171531.nc.h5.dmrpp", 357 | "AIRS.2015.12.23.L3.RetStd_IR001.v6.0.31.0.G15358171257.nc.h5.dmrpp", 358 | "AIRS.2015.12.24.L3.RetStd_IR001.v6.0.31.0.G15359171421.nc.h5.dmrpp", 359 | "AIRS.2015.12.25.L3.RetStd_IR001.v6.0.31.0.G15362153615.nc.h5.dmrpp", 360 | "AIRS.2015.12.26.L3.RetStd_IR001.v6.0.31.0.G15362202938.nc.h5.dmrpp", 361 | "AIRS.2015.12.27.L3.RetStd_IR001.v6.0.31.0.G15363023350.nc.h5.dmrpp", 362 | "AIRS.2015.12.28.L3.RetStd_IR001.v6.0.31.0.G15363170846.nc.h5.dmrpp", 363 | "AIRS.2015.12.29.L3.RetStd_IR001.v6.0.31.0.G15364172519.nc.h5.dmrpp", 364 | "AIRS.2015.12.30.L3.RetStd_IR001.v6.0.31.0.G16116184203.nc.h5.dmrpp", 365 | "AIRS.2015.12.31.L3.RetStd_IR001.v6.0.31.0.G16118131039.nc.h5.dmrpp"] 366 | --------------------------------------------------------------------------------