├── .gitignore ├── CONTRIBUTING.md ├── Install.md ├── LICENSE.md ├── README.md ├── data ├── grassdata │ └── temp │ │ └── PERMANENT │ │ ├── .bashrc │ │ ├── DEFAULT_WIND │ │ ├── MYNAME │ │ ├── VAR │ │ └── WIND ├── n35w125_wth.bin.bz2 ├── n35w125_wth.ctl └── nAmerica_GRWDL_river_topo.tgz ├── demos ├── TestCenterline │ └── TestCenterline.ipynb ├── TestGWDLR │ └── TestGWDLR.ipynb ├── TestGWDLR2shape │ ├── NA1GWDLR2shape.ipynb │ ├── NA1GWDLR2shape_100m.ipynb │ └── TestGWDLR2shape.ipynb ├── TestIteratedRiverObs │ └── TestIteratedRiverObs.ipynb ├── TestReachExtractor │ └── TestReachExtractor.ipynb ├── TestSWOTL2 │ └── TestSWOTL2.ipynb └── TestWidthDataBase.py │ └── TestWidthDataBase.ipynb ├── doc └── sphinx │ └── RiverObs │ ├── API.rst │ ├── Centerline.rst │ ├── CenterlineRefinementExample.rst │ ├── CenterlineUsageExample.rst │ ├── EndToEndProcessingExample.rst │ ├── GDALOGRUtilities.rst │ ├── GWDLR.rst │ ├── GeometryDataBase.rst │ ├── Installation.rst │ ├── Makefile │ ├── Overview.rst │ ├── RDF.rst │ ├── ReachPreProcessorExample.rst │ ├── RiverNodeUsageExample.rst │ ├── RiverObs.rst │ ├── RiverObsConcepts.rst │ ├── RiverObsUsageExample.rst │ ├── SWOTRiver.rst │ ├── conf.py │ ├── images │ ├── CenterlinePicture.pptx │ └── centerline_nodes.pdf │ ├── index.rst │ └── make.bat ├── environment.yml ├── notebooks ├── MakeNAmericaGRWDLTopology │ └── MakeNAmericaGRWDLRiverTopology.ipynb ├── SacramentoFittingExample │ └── SacramentoFittingExample.ipynb ├── SacramentoFittingExampleIteratedCenterline │ └── SacramentoFittingExampleIteratedCenterlineV2.ipynb ├── SacramentoFittingExampleMaxWidth │ └── SacramentoFittingExampleMaxWidth.ipynb ├── doc │ ├── CenterlineRefinementExample.ipynb │ ├── EndToEndProcessingExample.ipynb │ └── ReachPreProcessorExample.ipynb └── examples │ ├── CenterlineExample.ipynb │ └── SacramentoFittingExampleIteratedCenterline_no_classification.ipynb ├── requirements.txt ├── setup.py └── src ├── Centerline ├── Centerline.py ├── __init__.py ├── test_Centerline.py └── version.py ├── GDALOGRUtilities ├── CoordinateTransformations.py ├── GDALInfo.py ├── GDALLatLonLayer.py ├── GDALWriter.py ├── GDALutilities.py ├── GeodeticPath.py ├── OGR2Shapely.py ├── OGRWriter.py ├── __init__.py └── version.py ├── GWDLR ├── GWDLR.py ├── GWDLR2shape.py ├── README.md ├── __init__.py └── version.py ├── GeometryDataBase ├── GeometryDataBase.py ├── __init__.py └── version.py ├── RDF ├── ExecuteRDF.py ├── MRDF.py ├── RDF.py ├── RDF_to_class.py ├── __init__.py └── version.py ├── RiverObs ├── IteratedRiverObs.py ├── LatLonRegion.py ├── ReachDatabase.py ├── ReachExtractor.py ├── ReachPreProcessor.py ├── RiverNode.py ├── RiverObs.py ├── RiverReach.py ├── RiverReachWriter.py ├── ShapeWriter.py ├── WidthDataBase.py ├── __init__.py └── version.py ├── SWOTRiver ├── Estimate.py ├── EstimateSWOTRiver.py ├── SWOTL2.py ├── SWOTRiverEstimator.py ├── __init__.py ├── analysis │ ├── __init__.py │ ├── riverobs.py │ └── tabley.py ├── discharge.py ├── errors.py ├── products │ ├── __init__.py │ ├── calval.py │ ├── pixcvec.py │ ├── riversp.py │ └── rivertile.py ├── scripts │ ├── README.md │ ├── cythorun.py │ ├── estimate_swot_river.py │ ├── estimate_swot_rivers.py │ └── make_simulation_catalog.py └── version.py ├── SWOTWater ├── __init__.py ├── aggregate.py ├── constants.py ├── products │ ├── __init__.py │ ├── constants.py │ ├── netcdf.py │ └── product.py └── version.py ├── bin ├── README.md ├── analyze_reach_table.py ├── calval2rivertile.py ├── compare_tables.py ├── fake_pixc_from_gdem.py ├── plot_pixcvec.py ├── plot_reach.py ├── plot_reach_stats.py ├── plot_riverobs.py ├── plot_tile_reaches.py ├── preproc_gdem.py ├── reach_collection_stats.py ├── reach_comparison.py ├── swot_pixc2rivertile.py └── swot_rivertiles2riversp.py └── toggle_input ├── __init__.py ├── toggle_input.py └── version.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | *.so 4 | *.o 5 | *~ 6 | *# 7 | *.h5 8 | #*.nc 9 | #*.pdf 10 | *.png 11 | *.dat 12 | *.tar 13 | *.gz 14 | *.pickle 15 | *#unison* 16 | junk* 17 | .#* 18 | *.old 19 | *.shp 20 | *.dbf 21 | *.shx 22 | *.svg 23 | *.cache 24 | .gmt* 25 | *.bak 26 | *.egg* 27 | .scons* 28 | .Python 29 | .svn/ 30 | build/ 31 | html/ 32 | CVS/ 33 | lib/ 34 | include/ 35 | man/ 36 | packages/ 37 | anaconda/ 38 | .ipynb_checkpoints/ 39 | SWOT/ 40 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to RiverObs 2 | We love your input! We want to make contributing to this project as easy and transparent as possible, whether it's: 3 | 4 | - Reporting a bug 5 | - Discussing the current state of the code 6 | - Submitting a fix 7 | - Proposing new features 8 | 9 | 10 | ## We Develop with Github 11 | We use github to host code, to track issues and feature requests, as well as accept [pull requests](../../pulls). We use the github workflow, here are some examples: 12 | * https://guides.github.com/introduction/flow/ quick overview 13 | * https://www.youtube.com/watch?v=8UguQzmswC4 more in-depth video showing how to do it 14 | 15 | 16 | ## Report bugs using Github's [issues](../../issues) 17 | We use GitHub issues to track public bugs. Report a bug by opening a new issue; it's that easy! 18 | See these for examples: 19 | * https://guides.github.com/features/issues/ 20 | * https://www.youtube.com/watch?v=fSIHnwPT5_k 21 | 22 | ## Use a Consistent Coding Style 23 | Use [PEP8](https://www.python.org/dev/peps/pep-0008/) with the exception of CamelCase for module filenames and directories. 24 | 25 | ## License 26 | By contributing, you agree that your contributions will be licensed under this [liscene](LICENSE.md). 27 | -------------------------------------------------------------------------------- /Install.md: -------------------------------------------------------------------------------- 1 | # RiverObs Installation Instructions 2 | 3 | Starting with an anaconda 3.7 env on linux: https://www.anaconda.com/download/#linux 4 | 5 | * install anaconda 6 | * ```conda install netCDF4 pyproj pysal rtree shapely gdal fiona``` (this can be quite slow!) 7 | 8 | Add this to your .profile, .bash_profile, .zshenv,..etc: 9 | ``` 10 | declare -x PYTHONPATH=/path/to/RiverObs/src/:$PYTHONPATH 11 | ``` 12 | 13 | # Unsupported installation method below 14 | * Updating the below instructions along with ```setup.py``` would be a great project for a contributor! 15 | 16 | ## Preliminaries 17 | 18 | These are the instructions for installing the RiverObs package 19 | written by Ernesto Rodriguez in a Unix (linux or Mac) machine with an 20 | [anaconda](https://store.continuum.io/cshop/anaconda) python setup. 21 | The nominal installation instructions have been tested with python3.6, 22 | but should also work with python2.7. Future developments may stop 23 | supporting python2.7, as it is no longer the community standard. 24 | 25 | In what follows, it is assumed that the environment variable RIVER_DIR has been 26 | set to point to the root directory of the RiverObs package cloned 27 | by git. For instance, using bash 28 | 29 | export RIVER_DIR=/home/erodrigu/SWOT/RiverObs 30 | 31 | ## Python virtual environment installation 32 | 33 | Note that the dependence on scikit-image is optional and 34 | required only if one wants to vectorize GWDLR data. In 35 | that case, a working grass installation is required (tested 36 | with grass 6.4; grass70 beta has a bug in r.to.vector as of 37 | this writing). 38 | 39 | ### Setting up an anaconda virtual environment 40 | 41 | To make sure that you are retrieving the same version packages as have 42 | been used for testing, make sure that the conda-forge channel is added 43 | to your conda configuration. This can be done by issuing the command 44 | 45 | conda config --add channels conda-forge 46 | 47 | or modifying your ~/.condarc file to look something like this: 48 | 49 | channels: 50 | - conda-forge 51 | - defaults 52 | show_channel_urls: true 53 | 54 | To create an anaconda virtual environment, execute (Simplest): 55 | 56 | conda create -n RiverObs python=3.6 numpy jupyter notebook matplotlib 57 | gdal scipy pip scikit-image statsmodels pysal pandas pytables 58 | shapely netcdf4 sphinx numpydoc rtree pyproj 59 | 60 | some thrid party packages may have trouble with the newer python 3.6, if you have trouble you can try with 3.5. Also, it may be necessary to use version 8d version of jpeg. If so try the following: 61 | 62 | conda create -n RiverObs python=3.5 numpy jupyter notebook matplotlib gdal scipy pip scikit-image statsmodels pysal pandas pytables shapely netcdf4 sphinx numpydoc rtree pyproj jpeg=8d 63 | 64 | Here is what I got working on a linux box with all the versions explicitly stated: 65 | 66 | conda create -n RiverObs python=3.5 numpy=1.13.1 jupyter=1.0.0 notebook=5.0.0 matplotlib=2.0.2 gdal=2.1.0 libgdal=2.1.0 scipy=0.19.1 pip=9.0.1 scikit-image=0.13.0 statsmodels=0.8.0 pysal=1.13.0 pandas=0.20.3 pytables=3.4.2 shapely=1.5.16 netcdf4=1.2.4 sphinx=1.6.3 numpydoc=0.7.0 rtree=0.8.3 pyproj=1.9.5.1 jpeg=8d 67 | 68 | or, if you want to keep the code and executables under the RiverObs folder: 69 | 70 | cd $RIVER_DIR 71 | conda create -p $RIVER_DIR/anaconda python=3.6 numpy jupyter notebook matplotlib 72 | gdal scipy pip scikit-image statsmodels pysal pandas pytables 73 | shapely netcdf4 sphinx numpydoc rtree pyproj 74 | 75 | Note: if you must run python 2.7, substitute python=2.7 in the lines above 76 | (not recommended). 77 | 78 | To activate this environment, if the first option was used, type 79 | 80 | source activate RiverObs 81 | 82 | or, if building in the RiverObs folder, 83 | 84 | source activate $RIVER_DIR/anaconda 85 | 86 | if anaconda/bin is in your path. Otherwise, use /path/to/anaconda/bin/source. 87 | 88 | To deactivate this environment, type 89 | 90 | source deactivate 91 | 92 | If you would like to use jupyter notebooks within the RiverObs environment, 93 | issue the following command while inside the environment: 94 | 95 | python -m ipykernel install --user 96 | 97 | ## Build the package 98 | 99 | Then, to build the RiverObs and associated packages: 100 | 101 | cd $RIVER_DIR 102 | python setup.py install --force 103 | 104 | For an anaconada local virtual environment, this will install the libraries in 105 | 106 | $RIVER_DIR/anaconda/python3.6/site-packages 107 | 108 | and the executables in 109 | 110 | $RIVER_DIR/anaconda/bin 111 | 112 | Otherwise, they are in similar directories in ~/anaconda/envs/RiverObs 113 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, California Institute of Technology ("Caltech"). U.S. Government sponsorship acknowledged. 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 6 | 7 | • Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 8 | 9 | • Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 10 | 11 | • Neither the name of Caltech nor its operating division, the Jet Propulsion Laboratory, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Open Source Love](https://badges.frapsoft.com/os/v1/open-source.png?v=103) 2 | # RiverObs 3 | 4 | This is a package written initially by 5 | [Ernesto Rodriguez](mailto:ernesto.rodriguez@jpl.nasa.gov) to estimate 6 | various river parameters starting from remote sensing data. 7 | [Alex Fore](mailto:alexander.fore@jpl.nasa.gov), [Brent Williams](mailto:brent.a.williams@jpl.nasa.gov), 8 | [Cassie Stuurman](mailto:cassie.stuurman@jpl.nasa.gov), [Rui Wei](mailto:rui.wei@jpl.nasa.gov), 9 | and [Renato P.M. Frasson](mailto:renato.prata.de.moraes.frasson@jpl.nasa.gov) have also provided code 10 | to reflect the evolving SWOT project. 11 | The code is currently maintained by the SWOT Algorithm Definition Team. 12 | 13 | Detailed installation instructions are in the Install.md file. 14 | 15 | # Usage 16 | 17 | For generating data products are most similar to the SWOT project's data products, the following script is recommended (found in src/bin): 18 | ``` 19 | usage: swot_pixc2rivertile.py [-h] [--shpbasedir SHPBASEDIR] [-l LOG_LEVEL] 20 | [--gdem-file GDEM_FILE] 21 | pixc_file out_riverobs_file out_pixc_vector_file 22 | rdf_file 23 | ``` 24 | where ```pixc_file``` is the SWOT high-resolution pixel cloud data product, ```out_riverobs_file``` is the filename of the output rivertile data product, ```out_pixc_vector_file``` is the filename of the output pixel cloud vector data product, ```rdf_file``` is the configuration file (see [this link](https://github.com/SWOTAlgorithms/RiverObs/blob/develop/src/bin/swot_pixc2rivertile.py#L13)) for the recommended configuration). Additionally there are some optional arguments: ```--shpbasedir SHPBASEDIR``` will write out the nodes and reaches as shapefile format (written as netCDF to ```out_riverobs_file```), ```-l LOG_LEVEL``` controls the verbosity of the logging, and ```--gdem-file GDEM_FILE``` will create a pixc_file from the GDEM file and run RiverObs on that as a type of truth processing. 25 | 26 | # Prior Reach Database 27 | RiverObs requires a prior reach and node database. The database contains fixed node locations, reach boundaries, and high-resolution reach centerlines. It is distributed as a set of netcdf files, broken by continent (first two characters in the file name) and "major basins" in the continent (3rd and 4th characters in the file name). Metadata describing the database fields and the current version of the database is available [here](http://gaia.geosci.unc.edu/SWORD/). 28 | 29 | ## Summary of packages provided 30 | 31 | **RiverObs**: This is the main package for associating data with river 32 | reaches, and estimating hydrology parameters base on reach 33 | averaging (or not...). In addition to the homegrown packages listed 34 | below, this package requires the following open source packages: 35 | 36 | * [scipy](http://www.scipy.org/): Science algorithms swiss army knife. 37 | * [numpy](http://www.scipy.org/): Numerics swiss army knife. 38 | * [netCDF4](code.google.com/p/netcdf4-python): Reading netcdf4 files, 39 | including SWOT L2 data files. 40 | * [StatsModels](http://statsmodels.sourceforge.net): Fitting and 41 | estimation tools. 42 | * [pysal](http://pysal.org): nice interface to shapefiles and 43 | shapely bridge. 44 | * [pyproj](http://code.google.com/p/pyproj): Cartographic 45 | projections swiss army knife. 46 | * [pandas](http://pandas.pydata.org): The Python Data Analysis 47 | Library for DataFrames and HDFStore. 48 | * [pytables](http://www.pytables.org): easy HDF5 support, required for 49 | pandas HDFStore. 50 | 51 | **Centerline**: Provides a class that can be used to project data 52 | or refine a river center line. Requires the following packages: 53 | 54 | * [scipy](http://www.scipy.org/): Science algorithms swiss army knife. 55 | * [numpy](http://www.scipy.org/): Numerics swiss army knife. 56 | 57 | **GeometryDataBase**: Find quickly which reach intersects with a 58 | geometry of interest. The geometries are assumed to be stored in a 59 | shapefile. Requires the following packages: 60 | 61 | * [Rtree](https://github.com/Toblerity/rtree): Fast bounding box queries. 62 | * [libspatialindex](http://libspatialindex.github.io): Required by Rtree. 63 | * [pysal](http://pysal.org): nice interface to shapefiles and 64 | shapely bridge. 65 | * [shapely](https://github.com/sgillies/shapely): geometry 66 | calculations. 67 | 68 | **SWOTRiver**: This package contains classes that use the RiverObs 69 | capabilities to produce hydrology outputs from SWOT (simulated) data. 70 | 71 | * [numpy](http://www.scipy.org/): Numerics swiss army knife. 72 | * [netCDF4](code.google.com/p/netcdf4-python): Reading netcdf4 files, 73 | including SWOT L2 data files. 74 | * [pyproj](http://code.google.com/p/pyproj): Cartographic 75 | projections swiss army knife. 76 | * [pandas](http://pandas.pydata.org): The Python Data Analysis 77 | Library for DataFrames and HDFStore. 78 | * [pytables](http://www.pytables.org): easy HDF5 support, required for 79 | pandas HDFStore. 80 | 81 | **GDALOGRUtilities**: Provides homegrown utilities for reading and writing 82 | various GIS files. Requires the following packages: 83 | 84 | * [gdal](http://www.gdal.org): GIS files swiss army knife. 85 | * [pyproj](http://code.google.com/p/pyproj): Cartographic 86 | projections swiss army knife. 87 | 88 | **GWDLR**: This is an optional package to convert Global Width 89 | Database-Large Rivers raster data provided by 90 | [Dai Yamazaki](mailto:bigasmountain1022@gmail.com) to vectors that can be used as 91 | centerlines. Requires: 92 | 93 | * [grass](http://grass.osgeo.org): for raster to vector program. 94 | * [scikit-image](http://scikit-image.org): for skeletonize. 95 | -------------------------------------------------------------------------------- /data/grassdata/temp/PERMANENT/.bashrc: -------------------------------------------------------------------------------- 1 | test -r ~/.alias && . ~/.alias 2 | PS1='GRASS 6.4.2 (temp):\w > ' 3 | PROMPT_COMMAND="'/usr/lib64/grass-6.4.2/etc/prompt.sh'" 4 | export PATH="/usr/lib64/grass-6.4.2/bin:/usr/lib64/grass-6.4.2/scripts:/home/erodrigu/.grass6/addons:/home/erodrigu/anaconda/envs/SWOTRiver/bin:/home/erodrigu/sw/bin:/usr/lib64/qt-3.3/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/erodrigu/bin" 5 | export HOME="/home/erodrigu" 6 | export GRASS_SHELL_PID=$$ 7 | trap "echo \"GUI issued an exit\"; exit" SIGQUIT 8 | -------------------------------------------------------------------------------- /data/grassdata/temp/PERMANENT/DEFAULT_WIND: -------------------------------------------------------------------------------- 1 | proj: 0 2 | zone: 0 3 | north: 1 4 | south: 0 5 | east: 1 6 | west: 0 7 | cols: 1 8 | rows: 1 9 | e-w resol: 1 10 | n-s resol: 1 11 | top: 1 12 | bottom: 0 13 | cols3: 1 14 | rows3: 1 15 | depths: 1 16 | e-w resol3: 1 17 | n-s resol3: 1 18 | t-b resol: 1 19 | -------------------------------------------------------------------------------- /data/grassdata/temp/PERMANENT/MYNAME: -------------------------------------------------------------------------------- 1 | Temp data set for importing. -------------------------------------------------------------------------------- /data/grassdata/temp/PERMANENT/VAR: -------------------------------------------------------------------------------- 1 | DB_DRIVER: dbf 2 | DB_DATABASE: $GISDBASE/$LOCATION_NAME/$MAPSET/dbf/ 3 | -------------------------------------------------------------------------------- /data/grassdata/temp/PERMANENT/WIND: -------------------------------------------------------------------------------- 1 | proj: 0 2 | zone: 0 3 | north: 1 4 | south: 0 5 | east: 1 6 | west: 0 7 | cols: 1 8 | rows: 1 9 | e-w resol: 1 10 | n-s resol: 1 11 | top: 1 12 | bottom: 0 13 | cols3: 1 14 | rows3: 1 15 | depths: 1 16 | e-w resol3: 1 17 | n-s resol3: 1 18 | t-b resol: 1 19 | -------------------------------------------------------------------------------- /data/n35w125_wth.bin.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/data/n35w125_wth.bin.bz2 -------------------------------------------------------------------------------- /data/n35w125_wth.ctl: -------------------------------------------------------------------------------- 1 | dset ^n35w125_wth.bin 2 | undef -9999 3 | title HydroSHEDS 4 | options yrev little_endian 5 | xdef 6000 linear -125.0000000000000000 0.000833333333333 6 | ydef 6000 linear 35.0000000000000000 0.000833333333333 7 | tdef 1 linear 00Z01jan2000 1yr 8 | zdef 1 linear 1 1 9 | vars 1 10 | wth 1 99 ** rivwth 11 | ENDVARS 12 | -------------------------------------------------------------------------------- /data/nAmerica_GRWDL_river_topo.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/data/nAmerica_GRWDL_river_topo.tgz -------------------------------------------------------------------------------- /demos/TestCenterline/TestCenterline.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# This is for making changes on the fly\n", 12 | "\n", 13 | "%load_ext autoreload\n", 14 | "%autoreload 2" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "%pylab inline" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import numpy as np\n", 33 | "from Centerline import Centerline" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": null, 39 | "metadata": { 40 | "collapsed": true 41 | }, 42 | "outputs": [], 43 | "source": [ 44 | "npoints = 10\n", 45 | "x = np.arange(npoints)*2*np.pi/npoints\n", 46 | "y = np.sin(x)" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": { 53 | "collapsed": true 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "cl = Centerline(x,y)" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "x0 = x[5]*0.5 + x[6]*0.5\n", 67 | "y0 = np.sin(x0) + 0.2\n", 68 | "x1 = x0\n", 69 | "y1 = np.sin(x0) - 0.2\n", 70 | "\n", 71 | "xx = [x0,x1]\n", 72 | "yy = [y0,y1]\n", 73 | "\n", 74 | "i,d,xcl,ycl,s,n = cl(xx,yy)\n", 75 | "\n", 76 | "print(i,d,s,n)" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "metadata": {}, 83 | "outputs": [], 84 | "source": [ 85 | "plot(x,y,'o')\n", 86 | "plot(xx,yy,'rx')\n", 87 | "plot(xcl,ycl,'ro')\n", 88 | "plot(x[i],y[i],'gx')" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": { 95 | "collapsed": true 96 | }, 97 | "outputs": [], 98 | "source": [ 99 | "cl2 = Centerline(x,y,ds=0.1)" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": { 106 | "collapsed": true 107 | }, 108 | "outputs": [], 109 | "source": [ 110 | "i,d,xcl,ycl,s,n = cl2(xx,yy)" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": null, 116 | "metadata": {}, 117 | "outputs": [], 118 | "source": [ 119 | "plot(cl2.x,cl2.y,'o')\n", 120 | "plot(xx,yy,'rx')\n", 121 | "plot(xcl,ycl,'ro')\n", 122 | "plot(cl2.x[i],cl2.y[i],'gx')" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "Test the centerline with an observation and resampling" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "width = 0.5 + 0.25*y\n", 139 | "clw = Centerline(x,y,obs=[width],obs_names=['width'])" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": null, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "plot(clw.x,clw.y,'o')\n", 149 | "plot(clw.x,clw.y+clw.width,'-k',alpha=0.5)\n", 150 | "plot(clw.x,clw.y-clw.width,'-k',alpha=0.5)\n" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": { 157 | "collapsed": true 158 | }, 159 | "outputs": [], 160 | "source": [ 161 | "width = 0.5 + 0.25*y\n", 162 | "clw2 = Centerline(x,y,ds=0.1,obs=[width],obs_names=['width'])" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": null, 168 | "metadata": {}, 169 | "outputs": [], 170 | "source": [ 171 | "plot(clw2.x,clw2.y,'o')\n", 172 | "plot(clw2.x,clw2.y+clw2.width,'-k',alpha=0.5)\n", 173 | "plot(clw2.x,clw2.y-clw2.width,'-k',alpha=0.5)" 174 | ] 175 | } 176 | ], 177 | "metadata": { 178 | "kernelspec": { 179 | "display_name": "Python 3", 180 | "language": "python", 181 | "name": "python3" 182 | }, 183 | "language_info": { 184 | "codemirror_mode": { 185 | "name": "ipython", 186 | "version": 3 187 | }, 188 | "file_extension": ".py", 189 | "mimetype": "text/x-python", 190 | "name": "python", 191 | "nbconvert_exporter": "python", 192 | "pygments_lexer": "ipython3", 193 | "version": "3.6.1" 194 | } 195 | }, 196 | "nbformat": 4, 197 | "nbformat_minor": 1 198 | } 199 | -------------------------------------------------------------------------------- /demos/TestGWDLR/TestGWDLR.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "%load_ext autoreload\n", 12 | "%autoreload 2" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "metadata": {}, 18 | "source": [ 19 | "# Test GWDLR to Centerlines\n", 20 | "\n", 21 | "**GWDLR**: This is an optional package to convert Global Width\n", 22 | " Database-Large Rivers raster data provided by\n", 23 | " [Dai Yamazaki](mailto:bigasmountain1022@gmail.com) to vectors that can be used as\n", 24 | " centerlines. This part tests going to a thinned geotiff file." 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": { 31 | "collapsed": true 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "import os\n", 36 | "from os.path import exists\n", 37 | "def find_riverobs_test_data_dir():\n", 38 | " \"\"\"Fin the location of the test data root directory\"\"\"\n", 39 | " \n", 40 | " if 'RIVEROBS_TESTDATA_DIR' in os.environ:\n", 41 | " test_data_dir = os.environ('RIVEROBS_TESTDATA_DIR')\n", 42 | " else: # try the default location\n", 43 | " test_data_dir = '../../../RiverObsTestData'\n", 44 | " \n", 45 | " if not exists(test_data_dir):\n", 46 | " print('You must either set the environment variable RIVEROBS_TESTDATA_DIR')\n", 47 | " print('or locate the test data directory at ../../../RiverObsTestData')\n", 48 | " raise Exception('Test data directory not found.')\n", 49 | " \n", 50 | " return test_data_dir\n", 51 | "\n", 52 | "data_dir = find_riverobs_test_data_dir()\n", 53 | "data_dir" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": { 60 | "collapsed": true 61 | }, 62 | "outputs": [], 63 | "source": [ 64 | "%pylab inline" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": null, 70 | "metadata": { 71 | "collapsed": true 72 | }, 73 | "outputs": [], 74 | "source": [ 75 | "from os.path import join\n", 76 | "from GWDLR import GWDLR" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "metadata": { 83 | "collapsed": true 84 | }, 85 | "outputs": [], 86 | "source": [ 87 | "data_dir = join(data_dir,'GWDLR')\n", 88 | "root_name = 'n35w125_wth'" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": { 95 | "collapsed": true 96 | }, 97 | "outputs": [], 98 | "source": [ 99 | "gwdlr = GWDLR(root_name,data_dir)\n", 100 | "gwdlr.__dict__" 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "metadata": {}, 106 | "source": [ 107 | "Thin the data to get centerlines." 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "metadata": { 114 | "collapsed": true 115 | }, 116 | "outputs": [], 117 | "source": [ 118 | "min_width = 25.\n", 119 | "gwdlr.to_mask(min_width,overwrite=True,thin=True)" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": null, 125 | "metadata": { 126 | "collapsed": true 127 | }, 128 | "outputs": [], 129 | "source": [ 130 | "figsize(8,8)\n", 131 | "imshow(gwdlr.data,cmap=cm.gray)" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "Output the thinned center line data to a Geotiff file." 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": { 145 | "collapsed": true 146 | }, 147 | "outputs": [], 148 | "source": [ 149 | "mask_file = join(data_dir,root_name+'_center_line_%d.tif'%min_width)\n", 150 | "gwdlr.to_gdal(mask_file,gdal_format='GTiff')" 151 | ] 152 | } 153 | ], 154 | "metadata": { 155 | "kernelspec": { 156 | "display_name": "Python 3", 157 | "language": "python", 158 | "name": "python3" 159 | }, 160 | "language_info": { 161 | "codemirror_mode": { 162 | "name": "ipython", 163 | "version": 3 164 | }, 165 | "file_extension": ".py", 166 | "mimetype": "text/x-python", 167 | "name": "python", 168 | "nbconvert_exporter": "python", 169 | "pygments_lexer": "ipython3", 170 | "version": "3.6.1" 171 | } 172 | }, 173 | "nbformat": 4, 174 | "nbformat_minor": 1 175 | } 176 | -------------------------------------------------------------------------------- /demos/TestGWDLR2shape/NA1GWDLR2shape.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "%load_ext autoreload\n", 12 | "%autoreload 2" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import os\n", 22 | "from os.path import exists\n", 23 | "def find_riverobs_test_data_dir():\n", 24 | " \"\"\"Fin the location of the test data root directory\"\"\"\n", 25 | " \n", 26 | " if 'RIVEROBS_TESTDATA_DIR' in os.environ:\n", 27 | " test_data_dir = os.environ('RIVEROBS_TESTDATA_DIR')\n", 28 | " else: # try the default location\n", 29 | " test_data_dir = '../../../RiverObsTestData'\n", 30 | " \n", 31 | " if not exists(test_data_dir):\n", 32 | " print('You must either set the environment variable RIVEROBS_TESTDATA_DIR')\n", 33 | " print('or locate the test data directory at ../../../RiverObsTestData')\n", 34 | " raise Exception('Test data directory not found.')\n", 35 | " \n", 36 | " return test_data_dir\n", 37 | "\n", 38 | "data_dir = find_riverobs_test_data_dir()\n", 39 | "data_dir" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": { 46 | "collapsed": true 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "from glob import glob\n", 51 | "from os.path import join, splitext, split\n", 52 | "from GWDLR import GWDLR2shape" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": null, 58 | "metadata": { 59 | "collapsed": true 60 | }, 61 | "outputs": [], 62 | "source": [ 63 | "gwdlr_data_dir = join(data_dir,'GWDLR')\n", 64 | "output_dir = join(gwdlr_data_dir,'GWD-LRVectors','na1')\n", 65 | "min_width = 50" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "bin_file = glob(join(gwdlr_data_dir,'*.bin'))\n", 75 | "bin_file" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "g2s = GWDLR2shape(gwdlr_data_dir,output_dir)" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": { 91 | "collapsed": true 92 | }, 93 | "outputs": [], 94 | "source": [ 95 | "for tile in bin_file:\n", 96 | " rootname = splitext(split(tile)[1])[0]\n", 97 | " print 'Processing: ',tile\n", 98 | " g2s.process_tile(rootname,min_width)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": { 105 | "collapsed": true 106 | }, 107 | "outputs": [], 108 | "source": [] 109 | } 110 | ], 111 | "metadata": { 112 | "kernelspec": { 113 | "display_name": "Python 3", 114 | "language": "python", 115 | "name": "python3" 116 | }, 117 | "language_info": { 118 | "codemirror_mode": { 119 | "name": "ipython", 120 | "version": 3 121 | }, 122 | "file_extension": ".py", 123 | "mimetype": "text/x-python", 124 | "name": "python", 125 | "nbconvert_exporter": "python", 126 | "pygments_lexer": "ipython3", 127 | "version": "3.6.1" 128 | } 129 | }, 130 | "nbformat": 4, 131 | "nbformat_minor": 1 132 | } 133 | -------------------------------------------------------------------------------- /demos/TestGWDLR2shape/NA1GWDLR2shape_100m.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from glob import glob\n", 10 | "from os.path import join, splitext, split\n", 11 | "from GWDLR import GWDLR2shape" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "gwdlr_data_dir = '/Volumes/Reservoir/Data/GWD-LR/na1'\n", 21 | "output_dir = '/Volumes/Reservoir/Data/GWD-LRVectors/na1'\n", 22 | "#rootname = 'n35w125_wth'\n", 23 | "min_width = 100" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "bin_file = glob(join(gwdlr_data_dir,'*.bin'))" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "splitext(split(bin_file[0])[1])[0]" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "g2s = GWDLR2shape(gwdlr_data_dir,output_dir)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "for tile in bin_file:\n", 60 | " rootname = splitext(split(tile)[1])[0]\n", 61 | " print 'Processing: ',tile\n", 62 | " g2s.process_tile(rootname,min_width)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [] 71 | } 72 | ], 73 | "metadata": { 74 | "kernelspec": { 75 | "display_name": "Python 3", 76 | "language": "python", 77 | "name": "python3" 78 | }, 79 | "language_info": { 80 | "codemirror_mode": { 81 | "name": "ipython", 82 | "version": 3 83 | }, 84 | "file_extension": ".py", 85 | "mimetype": "text/x-python", 86 | "name": "python", 87 | "nbconvert_exporter": "python", 88 | "pygments_lexer": "ipython3", 89 | "version": "3.6.1" 90 | } 91 | }, 92 | "nbformat": 4, 93 | "nbformat_minor": 1 94 | } 95 | -------------------------------------------------------------------------------- /demos/TestGWDLR2shape/TestGWDLR2shape.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "from GWDLR import GWDLR2shape" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "gwdlr_data_dir = '../../data'\n", 19 | "output_dir = '/Volumes/Reservoir/Data/GWD-LRVectors/na1'\n", 20 | "rootname = 'n35w125_wth'\n", 21 | "min_width = 50" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "g2s = GWDLR2shape(gwdlr_data_dir,output_dir)" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "g2s.process_tile(rootname,min_width)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [] 48 | } 49 | ], 50 | "metadata": { 51 | "kernelspec": { 52 | "display_name": "Python 3", 53 | "language": "python", 54 | "name": "python3" 55 | }, 56 | "language_info": { 57 | "codemirror_mode": { 58 | "name": "ipython", 59 | "version": 3 60 | }, 61 | "file_extension": ".py", 62 | "mimetype": "text/x-python", 63 | "name": "python", 64 | "nbconvert_exporter": "python", 65 | "pygments_lexer": "ipython3", 66 | "version": "3.6.1" 67 | } 68 | }, 69 | "nbformat": 4, 70 | "nbformat_minor": 1 71 | } 72 | -------------------------------------------------------------------------------- /demos/TestReachExtractor/TestReachExtractor.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "%load_ext autoreload\n", 12 | "%autoreload 2" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "%pylab inline" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "from os.path import join\n", 31 | "from SWOTRiver import SWOTL2\n", 32 | "from RiverObs import ReachExtractor" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "import os\n", 42 | "from os.path import exists\n", 43 | "def find_riverobs_test_data_dir():\n", 44 | " \"\"\"Fin the location of the test data root directory\"\"\"\n", 45 | " \n", 46 | " if 'RIVEROBS_TESTDATA_DIR' in os.environ:\n", 47 | " test_data_dir = os.environ('RIVEROBS_TESTDATA_DIR')\n", 48 | " else: # try the default location\n", 49 | " test_data_dir = '../../../RiverObsTestData'\n", 50 | " \n", 51 | " if not exists(test_data_dir):\n", 52 | " print('You must either set the environment variable RIVEROBS_TESTDATA_DIR')\n", 53 | " print('or locate the test data directory at ../../../RiverObsTestData')\n", 54 | " raise Exception('Test data directory not found.')\n", 55 | " \n", 56 | " return test_data_dir\n", 57 | "\n", 58 | "data_dir = find_riverobs_test_data_dir()\n", 59 | "data_dir" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "l2_file = join(data_dir,'L2','L2v1','swot_heights_ohio_example_v1.Multilook_L2PIXC.nc')\n", 69 | "assert exists(l2_file)\n", 70 | "\n", 71 | "db_dir = join(data_dir,'GRWDL','nAmerica_GRWDL_river_topo')\n", 72 | "shape_file_root = join(db_dir,'nAmerica_GRWDL_river_topo')\n", 73 | "shape_file_root" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "lonmin = -83 \n", 83 | "latmin = 38\n", 84 | "lonmax = -82\n", 85 | "latmax = 39\n", 86 | "bounding_box = lonmin,latmin,lonmax,latmax\n", 87 | "\n", 88 | "# The list of classes to consider for potential inundation.\n", 89 | "# The truth classes are [1], if no_layover_classification' is used.\n", 90 | "# If estimated classification is used, the choice depends on whether\n", 91 | "# use_fractional_inundation is set.\n", 92 | "# If it is not set, either [3,4] or [4] should be used.\n", 93 | "# If it is set, [2,3,4] or [3,4] should be used.\n", 94 | "class_list = [2,3,4,5]\n", 95 | "\n", 96 | "lat_kwd = 'latitude_medium'\n", 97 | "lon_kwd = 'longitude_medium'\n", 98 | "class_kwd = 'classification'\n", 99 | "height_kwd = 'height_medium'\n", 100 | "\n", 101 | "l2 = SWOTL2(l2_file,bounding_box=bounding_box,\n", 102 | " class_list=class_list,\n", 103 | " lat_kwd=lat_kwd,lon_kwd=lon_kwd,class_kwd=class_kwd)" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": null, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "reaches = ReachExtractor(shape_file_root, l2,clip_buffer=0)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "reaches[0].metadata" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "figsize(6,6)\n", 131 | "scatter(l2.lon,l2.lat,edgecolor='none',alpha=0.1,label='l2 data')\n", 132 | "for i,reach in enumerate(reaches):\n", 133 | " lon, lat = reach.lon, reach.lat\n", 134 | " #plot(lon,lat,'.m',alpha=0.1)\n", 135 | " plot(lon,lat,alpha=1,label='reach %d'%i)\n", 136 | "legend(loc='best')" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "figsize(6,6)\n", 146 | "scatter(l2.x/1.e3,l2.y/1.e3,edgecolor='none',alpha=0.1,label='l2 data')\n", 147 | "for i,reach in enumerate(reaches):\n", 148 | " x, y = reach.x, reach.y\n", 149 | " #plot(x/1.e3,y/1.e3,'.m',alpha=0.1)\n", 150 | " plot(x/1.e3,y/1.e3,alpha=1,label='reach %d'%i)\n", 151 | "legend(loc='best')" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": null, 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "reaches[:]" 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "metadata": {}, 167 | "outputs": [], 168 | "source": [ 169 | "len(reaches)" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": null, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "reaches.reach_idx" 179 | ] 180 | } 181 | ], 182 | "metadata": { 183 | "kernelspec": { 184 | "display_name": "Python 3", 185 | "language": "python", 186 | "name": "python3" 187 | }, 188 | "language_info": { 189 | "codemirror_mode": { 190 | "name": "ipython", 191 | "version": 3 192 | }, 193 | "file_extension": ".py", 194 | "mimetype": "text/x-python", 195 | "name": "python", 196 | "nbconvert_exporter": "python", 197 | "pygments_lexer": "ipython3", 198 | "version": "3.6.1" 199 | } 200 | }, 201 | "nbformat": 4, 202 | "nbformat_minor": 1 203 | } 204 | -------------------------------------------------------------------------------- /demos/TestWidthDataBase.py/TestWidthDataBase.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# This is for making changes on the fly\n", 12 | "\n", 13 | "%load_ext autoreload\n", 14 | "%autoreload 2" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "%pylab inline" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "from os.path import join\n", 33 | "import numpy as np\n", 34 | "from SWOTRiver import SWOTL2\n", 35 | "from RiverObs import ReachExtractor\n", 36 | "from RiverObs import WidthDataBase" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "import os\n", 46 | "from os.path import exists\n", 47 | "def find_riverobs_test_data_dir():\n", 48 | " \"\"\"Fin the location of the test data root directory\"\"\"\n", 49 | " \n", 50 | " if 'RIVEROBS_TESTDATA_DIR' in os.environ:\n", 51 | " test_data_dir = os.environ('RIVEROBS_TESTDATA_DIR')\n", 52 | " else: # try the default location\n", 53 | " test_data_dir = '../../../RiverObsTestData'\n", 54 | " \n", 55 | " if not exists(test_data_dir):\n", 56 | " print('You must either set the environment variable RIVEROBS_TESTDATA_DIR')\n", 57 | " print('or locate the test data directory at ../../../RiverObsTestData')\n", 58 | " raise Exception('Test data directory not found.')\n", 59 | " \n", 60 | " return test_data_dir\n", 61 | "\n", 62 | "data_dir = find_riverobs_test_data_dir()\n", 63 | "data_dir" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "l2_file = join(data_dir,'L2','L2v1','swot_heights_ohio_example_v1.Multilook_L2PIXC.nc')\n", 73 | "assert exists(l2_file)\n", 74 | "\n", 75 | "db_dir = join(data_dir,'GRWDL')\n", 76 | "shape_file_root = join(db_dir,'nAmerica_GRWDL_river_topo','nAmerica_GRWDL_river_topo')\n", 77 | "db_file = join(db_dir,'nAmerica_GRWDL.h5')\n", 78 | "assert exists(db_file)\n", 79 | "db_file" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": { 86 | "collapsed": true 87 | }, 88 | "outputs": [], 89 | "source": [ 90 | "lonmin = -83 \n", 91 | "latmin = 38\n", 92 | "lonmax = -82\n", 93 | "latmax = 39\n", 94 | "bounding_box = lonmin,latmin,lonmax,latmax\n", 95 | "\n", 96 | "# The list of classes to consider for potential inundation.\n", 97 | "# The truth classes are [1], if no_layover_classification' is used.\n", 98 | "# If estimated classification is used, the choice depends on whether\n", 99 | "# use_fractional_inundation is set.\n", 100 | "# If it is not set, either [3,4] or [4] should be used.\n", 101 | "# If it is set, [2,3,4] or [3,4] should be used.\n", 102 | "class_list = [2,3,4,5]\n", 103 | "\n", 104 | "lat_kwd = 'latitude_medium'\n", 105 | "lon_kwd = 'longitude_medium'\n", 106 | "class_kwd = 'classification'\n", 107 | "height_kwd = 'height_medium'\n", 108 | "\n", 109 | "l2 = SWOTL2(l2_file,bounding_box=bounding_box,\n", 110 | " class_list=class_list,\n", 111 | " lat_kwd=lat_kwd,lon_kwd=lon_kwd,class_kwd=class_kwd)" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": { 118 | "collapsed": true 119 | }, 120 | "outputs": [], 121 | "source": [ 122 | "clip_buffer = 0.02\n", 123 | "\n", 124 | "clip = False\n", 125 | "reaches_no_clip = ReachExtractor(shape_file_root, l2,clip=clip,\n", 126 | " clip_buffer=clip_buffer)\n", 127 | "\n", 128 | "clip = True\n", 129 | "reaches_clip = ReachExtractor(shape_file_root, l2,clip=clip,\n", 130 | " clip_buffer=clip_buffer)" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "print(reaches_no_clip.reach_idx, reaches_clip.reach_idx)\n", 140 | "print(reaches_no_clip[1].lon.shape, reaches_clip[1].lon.shape)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": null, 146 | "metadata": { 147 | "collapsed": true 148 | }, 149 | "outputs": [], 150 | "source": [ 151 | "db = WidthDataBase(db_file)" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": null, 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "reach_index = reaches_no_clip.reach_idx[1]\n", 161 | "\n", 162 | "lon_nc,lat_nc = db.get_lon_lat(reach_index)\n", 163 | "print('no clip length: %d'%len(lon_nc))\n", 164 | "\n", 165 | "lon_c,lat_c,inbbox = db.get_lon_lat(reach_index,\n", 166 | " bounding_box=l2.bounding_box,\n", 167 | " clip_buffer=clip_buffer)\n", 168 | "\n", 169 | "print('clip length: %d'%len(lon_c))\n", 170 | " \n", 171 | "figsize(10,5)\n", 172 | "subplot(1,2,1)\n", 173 | "plot(lon_nc,lat_nc,'.',alpha=0.1)\n", 174 | "\n", 175 | "subplot(1,2,2)\n", 176 | "plot(lon_c,lat_c,'.',alpha=0.1)\n" 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": null, 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "reach_index = reaches_no_clip.reach_idx[1]\n", 186 | "\n", 187 | "x_nc,y_nc = db.get_xy(reach_index,l2.proj)\n", 188 | "print('no clip length: %d'%len(x_nc))\n", 189 | "\n", 190 | "x_c,y_c = db.get_xy(reach_index,l2.proj,\n", 191 | " bounding_box=l2.bounding_box,\n", 192 | " clip_buffer=clip_buffer)\n", 193 | "\n", 194 | "print('clip length: %d'%len(x_c))\n", 195 | " \n", 196 | "figsize(10,5)\n", 197 | "subplot(1,2,1)\n", 198 | "plot(x_nc,y_nc,'.',alpha=0.1)\n", 199 | "\n", 200 | "subplot(1,2,2)\n", 201 | "plot(x_c,y_c,'.',alpha=0.1)\n" 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": null, 207 | "metadata": {}, 208 | "outputs": [], 209 | "source": [ 210 | "reach_index = reaches_no_clip.reach_idx[1]\n", 211 | "\n", 212 | "lon_nc,lat_nc,width_nc = db.get_river(reach_index,\n", 213 | " columns=['long','lat','width'],\n", 214 | " asarray=True,transpose=True)\n", 215 | "print('no clip length: %d'%len(lon_nc))\n", 216 | "\n", 217 | "lon_c,lat_c,width_c = db.get_river(reach_index,\n", 218 | " columns=['long','lat','width'],\n", 219 | " asarray=True,transpose=True,\n", 220 | " bounding_box=l2.bounding_box,\n", 221 | " clip_buffer=clip_buffer\n", 222 | " )\n", 223 | "print('clip length: %d'%len(lon_c))" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": {}, 230 | "outputs": [], 231 | "source": [ 232 | "figsize(10,5)\n", 233 | "\n", 234 | "subplot(1,2,1)\n", 235 | "scatter(lon_nc,lat_nc,c=width_nc,edgecolor='none',alpha=0.1)\n", 236 | "\n", 237 | "subplot(1,2,2)\n", 238 | "scatter(lon_c,lat_c,c=width_c,edgecolor='none',alpha=0.1)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": null, 244 | "metadata": { 245 | "collapsed": true 246 | }, 247 | "outputs": [], 248 | "source": [] 249 | } 250 | ], 251 | "metadata": { 252 | "kernelspec": { 253 | "display_name": "Python 3", 254 | "language": "python", 255 | "name": "python3" 256 | }, 257 | "language_info": { 258 | "codemirror_mode": { 259 | "name": "ipython", 260 | "version": 3 261 | }, 262 | "file_extension": ".py", 263 | "mimetype": "text/x-python", 264 | "name": "python", 265 | "nbconvert_exporter": "python", 266 | "pygments_lexer": "ipython3", 267 | "version": "3.6.1" 268 | } 269 | }, 270 | "nbformat": 4, 271 | "nbformat_minor": 1 272 | } 273 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/API.rst: -------------------------------------------------------------------------------- 1 | .. _API: 2 | 3 | RiverObs API 4 | ==================================== 5 | 6 | Contents: 7 | 8 | .. toctree:: 9 | :maxdepth: 4 10 | 11 | Centerline 12 | GDALOGRUtilities 13 | GWDLR 14 | GeometryDataBase 15 | PyRivWidth 16 | RDF 17 | RiverObs 18 | SWOTRiver 19 | 20 | Indices and tables 21 | ================== 22 | 23 | * :ref:`genindex` 24 | * :ref:`modindex` 25 | * :ref:`search` 26 | 27 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/Centerline.rst: -------------------------------------------------------------------------------- 1 | Centerline package 2 | ================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | Centerline.Centerline module 8 | ---------------------------- 9 | 10 | .. automodule:: Centerline.Centerline 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Centerline.version module 16 | ------------------------- 17 | 18 | .. automodule:: Centerline.version 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: Centerline 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/CenterlineUsageExample.rst: -------------------------------------------------------------------------------- 1 | .. _centerline-usage: 2 | 3 | Centerline Usage Example 4 | ======================== 5 | 6 | This example show how to intialize a centerline using a shapefile and to 7 | add point river location measurements. 8 | 9 | Below are the Python modules required for this example: 10 | 11 | .. code:: python 12 | 13 | from os.path import join 14 | import numpy as N 15 | import pysal 16 | from SWOTRiver import SWOTL2 17 | from Centerline import Centerline 18 | 19 | # For plotting 20 | 21 | %pylab inline 22 | 23 | .. parsed-literal:: 24 | 25 | Populating the interactive namespace from numpy and matplotlib 26 | 27 | 28 | Read the example data 29 | --------------------- 30 | 31 | In this step, the data are read, a projection to (x,y) coordinates is 32 | defined, and the data bounding box is found. 33 | 34 | The data locations are given below: 35 | 36 | .. code:: python 37 | 38 | # This is the example data 39 | 40 | data_dir = '../../data/examples/' 41 | l2_file = join(data_dir,'simulated_swot_test_data.nc') 42 | centerline_file = join(data_dir,'sacramento_centerline.shp') 43 | Read the simulated SWOT data 44 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 45 | 46 | A simplified version of the SWOT simulator data containing only 47 | latitudes, longitudes, water classification, true and simulated noisy 48 | height is provided in the ``data/examples`` directory. 49 | 50 | For this example, only data with classification=1 labels are used (pure 51 | water pixels) and the latitude and longitude are assumed to be known 52 | from the reference interferogram. The following lines read the 53 | latitude-longitude data for the water pixels, and projects it to a 54 | Lambert Equiarea projection. 55 | 56 | .. code:: python 57 | 58 | class_list=[1] 59 | lat_kwd='no_layover_latitude' 60 | lon_kwd='no_layover_longitude' 61 | 62 | l2 = SWOTL2(l2_file,class_list=class_list,lat_kwd=lat_kwd,lon_kwd=lon_kwd) 63 | 64 | .. parsed-literal:: 65 | 66 | Dataset opened 67 | Bounding box calculated 68 | Good data selected 69 | lat/lon read 70 | projection set and x,y calculated 71 | 72 | 73 | Read a candidate centerline 74 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 75 | 76 | A candidate centerline for part of the Sacramento River is provided in 77 | the ``data/examples`` directory courtesy of `Tamlin 78 | Pavelsky `__ and `George 79 | Allen `__ and modified by `Ernesto 80 | Rodriguez `__ so that reaches 81 | appeared as topologically connected. 82 | 83 | The shapefile is read using the pysal package and the latitude and 84 | longitude are extracted. 85 | 86 | .. code:: python 87 | 88 | shp = pysal.open(centerline_file) 89 | cline = shp[0] 90 | lon, lat = N.asarray(cline.vertices).T 91 | To get true distances, the centerline must be given points so that the 92 | Euclidean distance can be calculated. Below, the SWOTL2 projection 93 | function is used to project to the same projection as the SWOT data. 94 | 95 | .. code:: python 96 | 97 | xc, yc = l2.proj(lon,lat) 98 | To see what has been done, plot the centerline points and the measurement locations 99 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 100 | 101 | The centerline is plotted as black points, while the water measurements 102 | are plotted as blue points. As can be seen, the simulated measurements 103 | have some gaps over the river and also include water points outside the 104 | river. Notice also that the measurements show meanders not present in 105 | the candidate centerline: this defect is corrected by the class 106 | IteratedRiverObs, discussed later on. 107 | 108 | .. code:: python 109 | 110 | figsize(6,6) 111 | plot(l2.x/1.e3,l2.y/1.e3,'.b',alpha=0.01) 112 | plot(xc/1.e3,yc/1.e3,',k',alpha=1) 113 | xlim(-5,5) 114 | ylim(-10,10) 115 | xlabel('X (km)') 116 | ylabel('Y (km)') 117 | 118 | 119 | 120 | .. parsed-literal:: 121 | 122 | 123 | 124 | 125 | 126 | 127 | .. image:: CenterlineExample_files/CenterlineExample_11_1.png 128 | 129 | 130 | Initialize the Centerline instance 131 | ---------------------------------- 132 | 133 | The following step shows how the Centerline is initialized with default 134 | parameters. 135 | 136 | .. code:: python 137 | 138 | centerline = Centerline(xc,yc) 139 | The following step shows how to associate the simulated measurements 140 | with centerline node locations and assign along-track and normal 141 | coordinates to each point. *(Note that instead of calling the instance, 142 | the member function to\_centerline could also have been used.)* 143 | 144 | .. code:: python 145 | 146 | index, distance,x,y,s,n = centerline(l2.x,l2.y) 147 | Plot the distribution of distances to the input centerline nodes 148 | (*distance*), as well as the normal coordinate (*n*), for each of the 149 | SWOT simulated data points. 150 | 151 | .. code:: python 152 | 153 | figsize(10,5) 154 | subplot(1,2,1) 155 | hist(distance,bins=100,log=True) 156 | xlabel('Distance to node (m)') 157 | ylabel('N observations') 158 | grid(); 159 | subplot(1,2,2) 160 | hist(n,bins=100,log=True) 161 | xlabel('Normal coordinate (m)') 162 | ylabel('N observations') 163 | grid(); 164 | 165 | 166 | .. image:: CenterlineExample_files/CenterlineExample_17_0.png 167 | 168 | 169 | Notice that most of the data points are close to the centerline, but, as 170 | expected from the data picture, some of the points are far away. These 171 | points can be filtered using the RiverObs class. 172 | 173 | Below is a zoom around the centerline. 174 | 175 | .. code:: python 176 | 177 | figsize(10,5) 178 | subplot(1,2,1) 179 | hist(distance,bins=arange(0,500,10),log=False) 180 | xlabel('Distance to node (m)') 181 | ylabel('N observations') 182 | grid(); 183 | subplot(1,2,2) 184 | hist(n,bins=arange(-500,500,50),log=False) 185 | xlabel('Normal coordinate (m)') 186 | ylabel('N observations') 187 | grid(); 188 | 189 | 190 | .. image:: CenterlineExample_files/CenterlineExample_19_0.png 191 | 192 | 193 | Below is a plot of the normal coordinate ploted as a function of the 194 | reach distance along the centerline. Where the centerline and the river 195 | measurements agree, one can estimate the river width. The missed 196 | meanders in the centerline are also easily identified. 197 | 198 | .. code:: python 199 | 200 | plot(centerline.s[index]/1.e3,n,'.',alpha=0.1) 201 | xlim(40,90) 202 | ylim(-500,500) 203 | xlabel('Centerline Reach (km)') 204 | ylabel('Normal coordinate (m)'); 205 | 206 | 207 | .. image:: CenterlineExample_files/CenterlineExample_21_0.png 208 | 209 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/GDALOGRUtilities.rst: -------------------------------------------------------------------------------- 1 | GDALOGRUtilities package 2 | ======================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | GDALOGRUtilities.CoordinateTransformations module 8 | ------------------------------------------------- 9 | 10 | .. automodule:: GDALOGRUtilities.CoordinateTransformations 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | GDALOGRUtilities.GDALInfo module 16 | -------------------------------- 17 | 18 | .. automodule:: GDALOGRUtilities.GDALInfo 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | GDALOGRUtilities.GDALLatLonLayer module 24 | --------------------------------------- 25 | 26 | .. automodule:: GDALOGRUtilities.GDALLatLonLayer 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | GDALOGRUtilities.GDALWriter module 32 | ---------------------------------- 33 | 34 | .. automodule:: GDALOGRUtilities.GDALWriter 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | GDALOGRUtilities.GDALutilities module 40 | ------------------------------------- 41 | 42 | .. automodule:: GDALOGRUtilities.GDALutilities 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | GDALOGRUtilities.GeodeticPath module 48 | ------------------------------------ 49 | 50 | .. automodule:: GDALOGRUtilities.GeodeticPath 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | GDALOGRUtilities.OGR2Shapely module 56 | ----------------------------------- 57 | 58 | .. automodule:: GDALOGRUtilities.OGR2Shapely 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | GDALOGRUtilities.OGRWriter module 64 | --------------------------------- 65 | 66 | .. automodule:: GDALOGRUtilities.OGRWriter 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | GDALOGRUtilities.version module 72 | ------------------------------- 73 | 74 | .. automodule:: GDALOGRUtilities.version 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | 80 | Module contents 81 | --------------- 82 | 83 | .. automodule:: GDALOGRUtilities 84 | :members: 85 | :undoc-members: 86 | :show-inheritance: 87 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/GWDLR.rst: -------------------------------------------------------------------------------- 1 | GWDLR package 2 | ============= 3 | 4 | Submodules 5 | ---------- 6 | 7 | GWDLR.GWDLR module 8 | ------------------ 9 | 10 | .. automodule:: GWDLR.GWDLR 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | GWDLR.GWDLR2shape module 16 | ------------------------ 17 | 18 | .. automodule:: GWDLR.GWDLR2shape 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: GWDLR 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/GeometryDataBase.rst: -------------------------------------------------------------------------------- 1 | GeometryDataBase package 2 | ======================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | GeometryDataBase.GeometryDataBase module 8 | ---------------------------------------- 9 | 10 | .. automodule:: GeometryDataBase.GeometryDataBase 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | GeometryDataBase.version module 16 | ------------------------------- 17 | 18 | .. automodule:: GeometryDataBase.version 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: GeometryDataBase 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/Installation.rst: -------------------------------------------------------------------------------- 1 | .. _Installation: 2 | 3 | Installation 4 | ================================== 5 | 6 | Preliminaries 7 | ------------- 8 | 9 | These are the instructions for installing the RiverObs package written 10 | by Ernesto Rodriguez in a Unix (linux or Mac) machine with an 11 | `anaconda `__ python setup, 12 | or using a `virtualenv `__ with 13 | another python installation. In both cases, it is assumed that 14 | `numpy `__ is available. In addition, to use the 15 | ipython notebooks, it is assumed that `ipython `__ 16 | is also installed. If you do not have a lot of python experience, it is 17 | recommended that you follow the anaconda install route. 18 | 19 | In what follows, it is assumed that the environment variable RIVER\_DIR 20 | has been set to point to the root directory of the 21 | AirSWOTAnomalyAnalysis package cloned by git. For instance, using bash 22 | 23 | :: 24 | 25 | export RIVER_DIR=/home/erodrigu/SWOT/RiverObs 26 | 27 | Python virtual environment installation 28 | --------------------------------------- 29 | 30 | Note that the dependence on scikit-image is optional and required only 31 | if one wants to vectorize GWDLR data. In that case, a working grass 32 | installation is required (tested with grass 6.4; grass70 beta has a bug 33 | in r.to.vector as of this writing). 34 | 35 | Setting up an anaconda virtual environment (Simplest) 36 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 37 | 38 | To create an anaconda virtual environment, execute: 39 | 40 | :: 41 | 42 | cd $RIVER_DIR 43 | conda create -p $RIVER_DIR/anaconda numpy ipython ipython-notebook 44 | matplotlib gdal scipy pip scikit-image statsmodels pysal pandas 45 | pytables shapely netcdf4 sphinx 46 | 47 | or (Simplest) 48 | 49 | :: 50 | 51 | conda create -n RiverObs numpy ipython ipython-notebook matplotlib 52 | gdal scipy pip scikit-image statsmodels pysal pandas pytables 53 | shapely netcdf4 sphinx 54 | 55 | To activate this environment, type 56 | 57 | :: 58 | 59 | source activate $RIVER_DIR/anaconda 60 | 61 | or 62 | 63 | :: 64 | 65 | source activate RiverObs 66 | 67 | if anaconda/bin is in your path. Otherwise, use 68 | /path/to/anaconda/bin/source. 69 | 70 | To deactivate this environment, type 71 | 72 | :: 73 | 74 | source deactivate 75 | 76 | Equivalnetly, one can set 77 | 78 | :: 79 | 80 | export PATH=$RIVER_DIR/anaconda/bin:$PATH 81 | 82 | Setting up a python virtual environment 83 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 84 | 85 | In case you have a prexisting python environment with virtualenv, numpy, 86 | and ipython already installed, create a virtual environment for this 87 | project as follows: 88 | 89 | :: 90 | 91 | virtualenv --system-site-packages $RIVER_DIR 92 | 93 | To activate this environment, type 94 | 95 | :: 96 | 97 | source $RIVER_DIR/bin/activate 98 | 99 | and to deactivate 100 | 101 | :: 102 | 103 | source deactivate 104 | 105 | Build additional package requirements 106 | ------------------------------------- 107 | 108 | In addition to the packages installed by conda, pyproj, rtree are 109 | required, and they are not directly part of the official anaconda 110 | distribution. There are two ways to install these packages: either 111 | through conda and `binstar `__ or by using (inside 112 | the conda virtual environment) pip and compilation. 113 | 114 | Install Using Binstar (Simplest) 115 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 116 | 117 | There are multiple versions of these packages in binstar for linux or 118 | osx architectures. The user can select which one he wants to install by 119 | searching in the binstar site. The following instructions are valid as 120 | of December 30, 2014. 121 | 122 | For osx or linux installation of pyproj: 123 | 124 | :: 125 | 126 | conda install -c https://conda.binstar.org/pingucarsti pyproj 127 | 128 | For osx or linux installation of rtree and the required library 129 | libspatial index: 130 | 131 | :: 132 | 133 | conda install -c https://conda.binstar.org/dougal libspatialindex 134 | conda install -c https://conda.binstar.org/dougal rtree 135 | 136 | Install Using pip and brew or manual compilation 137 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 138 | 139 | Working inside the virtual environment, the following command: 140 | 141 | :: 142 | 143 | pip install pyproj 144 | pip install rtree 145 | 146 | In addition, `rtree `__ requires the 147 | `libspatialindex `__ library. On a Mac 148 | with `brew `__ this can be done easily: 149 | 150 | :: 151 | 152 | brew install spatialindex 153 | 154 | On a generic Unix system, this can be done by downloading the code from 155 | `osgeo `__, and following the 156 | usual Unix install process. To avoid pat/ownership conflict, one can 157 | install into the anaconda installation: 158 | 159 | :: 160 | 161 | tar xvzf spatialindex-src-1.8.1.tar.gz 162 | cd spatialindex-src-1.8.1 163 | ./configure --prefix=~/anaconda 164 | make 165 | make install 166 | 167 | Install numpydoc for sphinx documentation (Optional) 168 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 169 | 170 | This is only required if you want to build the sphinx documentation: 171 | 172 | :: 173 | 174 | pip install numpydoc 175 | 176 | Build the package 177 | ----------------- 178 | 179 | Then, to build the RiverObs and associated packages: 180 | 181 | :: 182 | 183 | cd $RIVER_DIR 184 | python setup.py install --force 185 | 186 | For an anaconada local virtual environment, this will install the 187 | libraries in 188 | 189 | :: 190 | 191 | $RIVER_DIR/anaconda/python2.7/site-packages 192 | 193 | and the executables in 194 | 195 | :: 196 | 197 | $RIVER_DIR/anaconda/bin 198 | 199 | Otherwise, they are in similar directories in ~/anaconda/envs/RiverObs 200 | 201 | For a virtualenv virtual environment, this will install the libraries in 202 | 203 | :: 204 | 205 | $RIVER_DIR/lib/python2.7/site-packages 206 | 207 | and the executables in 208 | 209 | :: 210 | 211 | $RIVER_DIR/bin 212 | 213 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/RiverObs.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/RiverObs.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/RiverObs" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/RiverObs" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/Overview.rst: -------------------------------------------------------------------------------- 1 | RiverObs Packages Overview 2 | ======== 3 | 4 | This is a package written initially by `Ernesto 5 | Rodriguez `__ to estimate various 6 | river parameters starting from remote sensing data. 7 | 8 | Detailed installation instructions are in the Install.md file. 9 | 10 | Summary of packages provided 11 | ---------------------------- 12 | 13 | **RiverObs**: This is the main package for associating data with river 14 | reaches, and estimating hydrology parameters base on reach averaging (or 15 | not...). In addition to the homegrown packages listed below, this 16 | package requires the following open source packages: 17 | 18 | - `scipy `__: Science algorithms swiss army 19 | knife. 20 | - `numpy `__: Numerics swiss army knife. 21 | - `netCDF4 `__: Reading netcdf4 22 | files, including SWOT L2 data files. 23 | - `StatsModels `__: Fitting and 24 | estimation tools. 25 | - `pysal `__: nice interface to shapefiles and 26 | shapely bridge. 27 | - `pyproj `__: Cartographic 28 | projections swiss army knife. 29 | - `pandas `__: The Python Data Analysis 30 | Library for DataFrames and HDFStore. 31 | - `pytables `__: easy HDF5 support, required 32 | for pandas HDFStore. 33 | 34 | **Centerline**: Provides a class that can be used to project data or 35 | refine a river center line. Requires the following packages: 36 | 37 | - `scipy `__: Science algorithms swiss army 38 | knife. 39 | - `numpy `__: Numerics swiss army knife. 40 | 41 | **GeometryDataBase**: Find quickly which reach intersects with a 42 | geometry of interest. The geometries are assumed to be stored in a 43 | shapefile. Requires the following packages: 44 | 45 | - `Rtree `__: Fast bounding box 46 | queries. 47 | - `libspatialindex `__: Required by 48 | Rtree. 49 | - `pysal `__: nice interface to shapefiles and 50 | shapely bridge. 51 | - `shapely `__: geometry 52 | calculations. 53 | 54 | **SWOTRiver**: This package contains classes that use the RiverObs 55 | capabilities to produce hydrology outputs from SWOT (simulated) data. 56 | 57 | - `numpy `__: Numerics swiss army knife. 58 | - `netCDF4 `__: Reading netcdf4 59 | files, including SWOT L2 data files. 60 | - `pyproj `__: Cartographic 61 | projections swiss army knife. 62 | - `pandas `__: The Python Data Analysis 63 | Library for DataFrames and HDFStore. 64 | - `pytables `__: easy HDF5 support, required 65 | for pandas HDFStore. 66 | 67 | **GDALOGRUtilities**: Provides homegrown utilities for reading and 68 | writing various GIS files. Requires the following packages: 69 | 70 | - `gdal `__: GIS files swiss army knife. 71 | - `pyproj `__: Cartographic 72 | projections swiss army knife. 73 | 74 | **GWDLR**: This is an optional package to convert Global Width 75 | Database-Large Rivers raster data provided by `Dai 76 | Yamazaki `__ to vectors that can be 77 | used as centerlines. Requires: 78 | 79 | - `grass `__: for raster to vector program. 80 | - `scikit-image `__: for skeletonize. 81 | 82 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/RDF.rst: -------------------------------------------------------------------------------- 1 | RDF package 2 | =========== 3 | 4 | Submodules 5 | ---------- 6 | 7 | RDF.ExecuteRDF module 8 | --------------------- 9 | 10 | .. automodule:: RDF.ExecuteRDF 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | RDF.MRDF module 16 | --------------- 17 | 18 | .. automodule:: RDF.MRDF 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | RDF.RDF module 24 | -------------- 25 | 26 | .. automodule:: RDF.RDF 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | RDF.RDF_to_class module 32 | ----------------------- 33 | 34 | .. automodule:: RDF.RDF_to_class 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | RDF.version module 40 | ------------------ 41 | 42 | .. automodule:: RDF.version 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | 48 | Module contents 49 | --------------- 50 | 51 | .. automodule:: RDF 52 | :members: 53 | :undoc-members: 54 | :show-inheritance: 55 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/RiverNodeUsageExample.rst: -------------------------------------------------------------------------------- 1 | .. _river_node_usage: 2 | 3 | RiverNode Usage Example 4 | ================= 5 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/RiverObs.rst: -------------------------------------------------------------------------------- 1 | RiverObs package 2 | ================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | RiverObs.FitRiver module 8 | ------------------------ 9 | 10 | .. automodule:: RiverObs.FitRiver 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | RiverObs.IteratedRiverObs module 16 | -------------------------------- 17 | 18 | .. automodule:: RiverObs.IteratedRiverObs 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | RiverObs.LatLonRegion module 24 | ---------------------------- 25 | 26 | .. automodule:: RiverObs.LatLonRegion 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | RiverObs.ReachExtractor module 32 | ------------------------------ 33 | 34 | .. automodule:: RiverObs.ReachExtractor 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | RiverObs.ReachPreProcessor module 40 | --------------------------------- 41 | 42 | .. automodule:: RiverObs.ReachPreProcessor 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | RiverObs.RiverNode module 48 | ------------------------- 49 | 50 | .. automodule:: RiverObs.RiverNode 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | RiverObs.RiverObs module 56 | ------------------------ 57 | 58 | .. automodule:: RiverObs.RiverObs 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | RiverObs.RiverReach module 64 | -------------------------- 65 | 66 | .. automodule:: RiverObs.RiverReach 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | RiverObs.RiverReachWriter module 72 | -------------------------------- 73 | 74 | .. automodule:: RiverObs.RiverReachWriter 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | RiverObs.WidthDataBase module 80 | ----------------------------- 81 | 82 | .. automodule:: RiverObs.WidthDataBase 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | RiverObs.version module 88 | ----------------------- 89 | 90 | .. automodule:: RiverObs.version 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | 95 | 96 | Module contents 97 | --------------- 98 | 99 | .. automodule:: RiverObs 100 | :members: 101 | :undoc-members: 102 | :show-inheritance: 103 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/RiverObsConcepts.rst: -------------------------------------------------------------------------------- 1 | .. _Concepts: 2 | 3 | RiverObs Concepts 4 | ============= 5 | 6 | The major components of the RiverObs concept are illustrated in the 7 | :ref:`centerline_nodes` figure, which illustrates the three key concepts 8 | underlying RiverObs: the :ref:`centerline-overview`, the 9 | :ref:`river-node-overview`, and the :ref:`river-obs-overview`. 10 | 11 | 12 | .. _centerline_nodes: 13 | .. figure:: images/centerline_nodes.png 14 | :width: 400 px 15 | :scale: 100 % 16 | :align: center 17 | 18 | *Illustration of the concepts underlying RiverObs.* 19 | 20 | In the figure, the physical river is shown in blue, the Centerline is the black line running along the middle. 21 | The centerline consists of a set of RiverNodes (red dots), and every RiverNode 22 | has an associated coordinate system (in green), one of whose axes (the *s* or 23 | *along-track* axis) is tangent to the Centerline, and other axis (the *n* or *normal* 24 | axis) is perpendicular to it and defines a right-handed system. Data (for example, 25 | the orange cross) are associated with the closest node (node regions of influence 26 | are shown as dashed lines) and assigned node index, *s* and *n* coordinates. 27 | A RiverObs object contains a Centerline, a list of nodes (and their associated data), 28 | and functions to gather information for all the nodes. 29 | 30 | 31 | .. _centerline-overview: 32 | 33 | Centerline 34 | --------- 35 | 36 | The Centerline object can be thought of as a curved one-dimensional 37 | coordinate line, with a set of river node locations defined along it, and with 38 | the capability to provide a mapping between any point in the plane (as 39 | long as it is not too far from the Centerline) to one of its node locations, 40 | and assigning normal and tangential coordinates relative to the node 41 | location. The basic functionality of the Centerline is reviewed in 42 | :ref:`centerline-usage`. Refining the Centerline so that it follows 43 | the data more closely is reviewed in :ref:`centerline-refinement`. 44 | 45 | .. _river-node-overview: 46 | 47 | RiverNode 48 | --------- 49 | 50 | A RiverNode is a data container associated with points on the 51 | Centerline. At a minimum, a RiverNode has the following elements: 52 | 53 | index : int 54 | index in the center line corresponding to this node 55 | d : array_like 56 | distance from the node to each of the data points 57 | x : array_like 58 | x coordinate of each measurement associated with the node 59 | y : array_like 60 | y coordinate of each measurement associated with the node 61 | s : array_like 62 | along-track coordinate (relative to the node center) for each point 63 | n : array_like 64 | across-track (normal) coordinate (relative to the node center) for each point 65 | ds : float 66 | along-track dimension for this node. Defaults to 1. Needs to be set 67 | correctly for width_area to work. 68 | 69 | In addition to this basic data, any other object can be stored in a 70 | RiverNode. Once data is stored in a node, it can be queried to produce 71 | a node statistic; e.g., the mean and standard deviations of the data 72 | stored in the node. Several statistic functions are provided in the 73 | RiverNode API. 74 | 75 | .. _river-obs-overview: 76 | 77 | RiverObs 78 | -------- 79 | 80 | A RiverObs is an object which contains a Centerline and a set of 81 | RiverNodes associated with that centerline. In addition, it stores the 82 | observation data and can provide statistic lists for each node. A 83 | derived class, IteratedRiverObs, also has the capability to iterate 84 | the centerline to fit the data better. An example of using an 85 | IteratedRiberObs to refine the centerline and load data onto all the 86 | nodes is provided in :ref:`centerline-refinement`. 87 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/RiverObsUsageExample.rst: -------------------------------------------------------------------------------- 1 | .. _river_obs_usage: 2 | 3 | RiverObs Usage Example 4 | ================ 5 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/SWOTRiver.rst: -------------------------------------------------------------------------------- 1 | SWOTRiver package 2 | ================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | SWOTRiver.SWOTL2 module 8 | ----------------------- 9 | 10 | .. automodule:: SWOTRiver.SWOTL2 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | SWOTRiver.SWOTRiverEstimator module 16 | ----------------------------------- 17 | 18 | .. automodule:: SWOTRiver.SWOTRiverEstimator 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | SWOTRiver.version module 24 | ------------------------ 25 | 26 | .. automodule:: SWOTRiver.version 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | 32 | Module contents 33 | --------------- 34 | 35 | .. automodule:: SWOTRiver 36 | :members: 37 | :undoc-members: 38 | :show-inheritance: 39 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/images/CenterlinePicture.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/doc/sphinx/RiverObs/images/CenterlinePicture.pptx -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/images/centerline_nodes.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/doc/sphinx/RiverObs/images/centerline_nodes.pdf -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | RiverObs's documentation 3 | ==================================== 4 | 5 | The intent of the RiverObs package is to provide a set of Python 6 | classes that enable associating data, remote sensing or *in situ*, 7 | with rivers, as a whole, and with specific river locations. Although 8 | the package was developed to support the generation of hydrology 9 | observables from the NASA `SWOT `__ mission, 10 | it is quite general and can be used for muliple data sets including 11 | point gauge data, irregularly distributed data, such as lidar point 12 | clouds, or line data, such as GPS surveys. 13 | 14 | Before taking a look at the detailed :ref:`API`, the novice user 15 | should get familiar with the basic :ref:`Concepts`. The 16 | :ref:`Overview` presents a brief description of the packages that form 17 | part of the distribution, and their dependencies. The :ref:`Installation` 18 | give detailed instructions on how to set up and build the 19 | package. The :ref:`API` contains a detailed listing of all of 20 | the packages, and interface documentation. 21 | 22 | Tutorial examples of how to use some of the basic classes using simulated data 23 | are contained in :ref:`centerline-usage` and 24 | :ref:`centerline-refinement`. 25 | 26 | Finally, a typical workflow will consist in making a set of input 27 | reaches, reading these reaches and some data, estimating river 28 | widths, heights and slopes, and, finally, writing out the results in 29 | files that can be read by GIS programs. These steps are illustrated in 30 | :ref:`reach-preprocessing` and :ref:`end-to-end-example`. 31 | 32 | 33 | Contents: 34 | 35 | .. toctree:: 36 | :maxdepth: 1 37 | 38 | RiverObsConcepts 39 | Overview 40 | Installation 41 | CenterlineUsageExample 42 | CenterlineRefinementExample 43 | ReachPreProcessorExample 44 | EndToEndProcessingExample.rst 45 | API 46 | 47 | 48 | Indices and tables 49 | ================== 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | * :ref:`search` 54 | 55 | -------------------------------------------------------------------------------- /doc/sphinx/RiverObs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\RiverObs.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\RiverObs.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: RiverObs 2 | channels: 3 | - defaults 4 | - conda-forge 5 | dependencies: 6 | - python=3.9 7 | - numpy>=1.13.3 8 | - pandas>=0.22.0 9 | - rtree>=0.8.3 10 | - cython>=0.27.3 11 | - scikit-image>=0.13.1 12 | - matplotlib>=2.1.2 13 | - fiona>=1.7.11 14 | - pyproj>=1.9.5.1 15 | - scipy>=1.0.0 16 | - pysal>=1.14.3 17 | - shapely>=1.6.3 18 | - statsmodels>=0.8.0 19 | - pytest>=3.3.2 20 | - netcdf4>=1.4.3 21 | - gdal>=2.2.4 22 | - bottleneck>=1.3.7 23 | - pip 24 | - pip: 25 | - piecewise-regression>=1.5.0 26 | - osr>=0.0.1 27 | # - gdal>=2.2.4 -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.13.3 2 | pandas>=0.22.0 3 | Rtree>=0.8.3 4 | Cython>=0.27.3 5 | scikit_image>=0.13.1 6 | matplotlib>=2.1.2 7 | Fiona>=1.7.11 8 | pyproj>=1.9.5.1 9 | #GDAL>=2.2.4 10 | scipy>=1.0.0 11 | PySAL>=1.14.3 12 | Shapely>=1.6.3 13 | statsmodels>=0.8.0 14 | pytest>=3.3.2 15 | netCDF4>=1.4.3 16 | osr>=0.0.1 17 | bottleneck>=1.3.7 18 | piecewise-regression>=1.5.0 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | This setup.py script is used to manage the installation of multiple packages 4 | related to RiverObs. The script consolidates the setup configuration for 5 | several packages into a single file to minimize changes versus its original 6 | design. The version of the meta-package (SWOTRiverAlgorithms) matches the 7 | RiverObs version number. You should be able to import subpackages like RDF, 8 | SWOTRiver, RiverObs etc following pip install. 9 | """ 10 | 11 | from setuptools import setup, find_packages 12 | 13 | def get_version(package): 14 | version_file = f'src/{package}/version.py' 15 | with open(version_file) as f: 16 | exec(f.read()) 17 | return locals()['__version__'] 18 | 19 | setup( 20 | name='SWOTRiverAlgorithms', 21 | version=get_version("RiverObs"), 22 | description='Meta-package for SWOT River Algorithms', 23 | author='Ernesto Rodriguez', 24 | author_email='ernesto.rodriguez@jpl.nasa.gov', 25 | package_dir={'': 'src'}, 26 | packages=find_packages(where='src'), 27 | scripts=[ 28 | 'src/SWOTRiver/scripts/make_simulation_catalog.py', 29 | 'src/SWOTRiver/scripts/estimate_swot_rivers.py', 30 | ], 31 | extras_require={ 32 | 'RDF': [f'RDF=={get_version("RDF")}'], 33 | 'GDALOGRUtilities': [f'GDALOGRUtilities=={get_version("GDALOGRUtilities")}'], 34 | 'Centerline': [f'Centerline=={get_version("Centerline")}'], 35 | 'GWDLR': [f'GWDLR=={get_version("GWDLR")}'], 36 | 'GeometryDataBase': [f'GeometryDataBase=={get_version("GeometryDataBase")}'], 37 | 'SWOTWater': [f'SWOTWater=={get_version("SWOTWater")}'], 38 | 'SWOTRiver': [f'SWOTRiver=={get_version("SWOTRiver")}'], 39 | 'RiverObs': [f'RiverObs=={get_version("RiverObs")}'], 40 | 'toggle_input': [f'toggle_input=={get_version("toggle_input")}'], 41 | }, 42 | install_requires=[ 43 | line.strip() for line in open('requirements.txt') 44 | ], 45 | 46 | 47 | ) 48 | -------------------------------------------------------------------------------- /src/Centerline/__init__.py: -------------------------------------------------------------------------------- 1 | """A class for computing the location of a point or set of points 2 | relative to a curved line defined by a series of two dimentional 3 | points.""" 4 | 5 | from __future__ import absolute_import 6 | 7 | from .Centerline import Centerline 8 | from .version import __version__ 9 | -------------------------------------------------------------------------------- /src/Centerline/test_Centerline.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import pytest 4 | import numpy as np 5 | 6 | import Centerline 7 | 8 | class TestCenterLine(): 9 | @pytest.fixture(scope='class') 10 | def centerline_tester(self): 11 | xx = np.arange(10)/10.0 12 | yy = np.arange(10)/10.0 13 | return Centerline.Centerline(xx, yy) 14 | 15 | def test_distance(self, centerline_tester): 16 | i, d, x, y, s, n = centerline_tester(0.1, 100) 17 | assert d == pytest.approx(99.10322901, abs=0.001) 18 | 19 | def test_along_reach(self, centerline_tester): 20 | i, d, x, y, s, n = centerline_tester(0.1, 100) 21 | assert s == pytest.approx(69.50859659, abs=0.001) 22 | 23 | def test_cross_reach(self, centerline_tester): 24 | i, d, x, y, s, n = centerline_tester(0.1, 100) 25 | assert n == pytest.approx(70.63996744, abs=0.001) 26 | -------------------------------------------------------------------------------- /src/Centerline/version.py: -------------------------------------------------------------------------------- 1 | # Version 0.1: basic functionality 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Version 0.1.1: added along-track resampling 6 | 7 | #__version__ = '0.1.1' 8 | 9 | # Version 0.1.2: added IteratedCenterline 10 | 11 | #__version__ = '0.1.2' 12 | 13 | # Version 0.1.3: moved IteratedCenterline to RiverObs package 14 | 15 | #__version__ = '0.1.3' 16 | 17 | # Version 0.1.4: Python 2.7/3.6 compatibility. 18 | 19 | __version__ = '0.1.4' 20 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/CoordinateTransformations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Classes to take objects from one coordinate system to another. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | from os.path import join 8 | from osgeo import gdal, gdalconst, osr 9 | from pyproj import Proj, transform 10 | 11 | 12 | class CoordinateTransformation: 13 | """Base class for coordinate transformations. 14 | 15 | Initialize ccordinate systems with source and destination projections as 16 | proj4 strings. 17 | """ 18 | 19 | def __init__( 20 | self, 21 | source_projection, 22 | destination_projection='+units=m +ellps=WGS84 +datum=WGS84 +proj=longlat ' 23 | ): 24 | self.source_projection = Proj(source_projection) 25 | self.destination_projection = Proj(destination_projection) 26 | 27 | def transform_xy(self, x, y): 28 | """Transform sequences of x, y coordinates in the source coordinate system 29 | to x,y in the target coordinate system.""" 30 | 31 | return transform(self.source_projection, self.destination_projection, 32 | x, y) 33 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/GDALWriter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Write gdal files from various inputs. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | from os.path import join 8 | import numpy as np 9 | from numpy.ma import masked_array, is_masked 10 | from osgeo import gdal 11 | try: 12 | import osr 13 | except ImportError: 14 | from osgeo import osr 15 | 16 | 17 | def get_gdal_type(dtype): 18 | """Given a numpy data type, return the corresponding """ 19 | 20 | if (dtype == np.int8) or (dtype == np.uint8): 21 | gdal_type = gdal.GDT_Byte 22 | elif dtype == np.int16: 23 | gdal_type = gdal.GDT_Int16 24 | elif dtype == np.uint16: 25 | gdal_type = gdal.GDT_UInt16 26 | elif dtype == np.int32: 27 | gdal_type = gdal.GDT_Int32 28 | elif dtype == np.uint32: 29 | gdal_type = gdal.GDT_UInt32 30 | elif dtype == np.float32: 31 | gdal_type = gdal.GDT_Float32 32 | elif dtype == np.float64: 33 | gdal_type = gdal.GDT_Float64 34 | else: 35 | raise Exception('Unknown dtype: %s' % dtype) 36 | return gdal_type 37 | 38 | 39 | def write_llh_to_gdal(llh_data, 40 | lon_min, 41 | dlon, 42 | lat_min, 43 | dlat, 44 | gdal_format, 45 | dst_filename, 46 | origin_up=True, 47 | options=None, 48 | nodata_value=None, 49 | vflip_data=False): 50 | """Write an LLH layer to a GIS file in a gdal supported format. 51 | 52 | vflip_data: if True llh_data => llh_data[::-1,:]. Use in case the data 53 | is not aligned with the desired geotranform. 54 | """ 55 | 56 | gdal_type = get_gdal_type(llh_data.dtype) 57 | 58 | # Get the driver and open the output file 59 | 60 | driver = gdal.GetDriverByName(gdal_format) 61 | if driver == None: 62 | raise Exception('Unimplented gdal driver: %s' % driver) 63 | 64 | dst_ds = driver.Create( 65 | dst_filename, 66 | llh_data.shape[1], 67 | llh_data.shape[0], 68 | bands=1, 69 | eType=gdal_type) #, options=options ) 70 | 71 | # Flip the data if needed to be consistent with the geotransform 72 | 73 | if vflip_data: 74 | llh_data = llh_data[::-1, :] 75 | 76 | # Set all of the transform information 77 | 78 | if origin_up: 79 | nlat = llh_data.shape[0] 80 | lat_max = lat_min + (nlat - 1) * dlat 81 | dst_ds.SetGeoTransform([lon_min, dlon, 0, lat_max, 0, -dlat]) 82 | else: 83 | dst_ds.SetGeoTransform([lon_min, dlon, 0, lat_min, 0, dlat]) 84 | srs = osr.SpatialReference() 85 | srs.SetWellKnownGeogCS('WGS84') 86 | dst_ds.SetProjection(srs.ExportToWkt()) 87 | 88 | # Now write the raster 89 | 90 | band = dst_ds.GetRasterBand(1) 91 | 92 | if nodata_value != None: 93 | band.SetNoDataValue(nodata_value) 94 | 95 | if is_masked(llh_data): 96 | if nodata_value != None: 97 | llh_data.data[llh_data.mask] = nodata_value 98 | band.WriteArray(llh_data.data) 99 | else: 100 | band.WriteArray(llh_data) 101 | 102 | # Clean up by closing the dataset 103 | 104 | dst_ds = None 105 | src_ds = None 106 | 107 | 108 | def write_numpy_to_gdal(data, 109 | geotransform, 110 | wkt_proj, 111 | dst_filename, 112 | gdal_format='GTiff', 113 | origin_up=True, 114 | options=None, 115 | nodata_value=None): 116 | """Given numpy data and projection information, write to a gdal file. 117 | 118 | Parameters 119 | ---------- 120 | 121 | data : 122 | a 2D numpy array 123 | geotransform : 124 | a list containing the affine transformation 125 | (e.g., the result of gdal data_set.GetGeoTransform()) 126 | wkt_proj : 127 | well known text projection information 128 | (e.g., the data_set.GetProjection() ) 129 | dst_filename : str 130 | destination file name 131 | origin_up : bool 132 | if origin_up == True, the data is reversed in its first axis 133 | option : 134 | options to pass to gdal. 135 | nodata_value : 136 | nodata_value value. If None, no nodata_value value is set. 137 | """ 138 | 139 | gdal_type = get_gdal_type(data.dtype) 140 | 141 | # Get the driver and open the output file 142 | 143 | driver = gdal.GetDriverByName(gdal_format) 144 | if driver == None: 145 | raise Exception('Unimplented gdal driver: %s' % driver) 146 | 147 | dst_ds = driver.Create( 148 | dst_filename, data.shape[1], data.shape[0], bands=1, 149 | eType=gdal_type) #, options=options ) 150 | 151 | # Set all of the transform information 152 | 153 | if origin_up: 154 | data = data[::-1, :] 155 | 156 | dst_ds.SetGeoTransform(geotransform) 157 | dst_ds.SetProjection(wkt_proj) 158 | 159 | # Now write the raster 160 | 161 | band = dst_ds.GetRasterBand(1) 162 | 163 | if nodata_value != None: 164 | band.SetNoDataValue(nodata_value) 165 | 166 | if is_masked(data): 167 | if nodata_value != None: 168 | data.data[data.mask] = nodata_value 169 | band.WriteArray(data.data) 170 | else: 171 | band.WriteArray(data.data) 172 | 173 | # Clean up by closing the dataset 174 | 175 | dst_ds = None 176 | src_ds = None 177 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/GDALutilities.py: -------------------------------------------------------------------------------- 1 | """ 2 | Interfaces to the gdal utility programs, http://www.gdal.org/gdal_utilities.html. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | import shlex 8 | from subprocess import Popen, PIPE 9 | from GDALOGRUtilities import GDALInfo 10 | 11 | 12 | class WarpToLayer: 13 | """Given a reference gdal layer, extract data from other files, 14 | reproject, if required, using gdalwarp and extract the data corresponding to 15 | the reference file at the reference file resolution.""" 16 | 17 | def __init__(self, reference_layer_file): 18 | """Initialize with a reference layer file.""" 19 | 20 | self.reference_layer = GDALInfo(reference_layer_file) 21 | 22 | def __call__(self, 23 | srcfile, 24 | dstfile, 25 | dstnodata_value=None, 26 | executable='gdalwarp', 27 | resampling_method='bilinear', 28 | gdal_format='GTiff'): 29 | """Warp srcfile to dstfile (same projection, window, and resolution). 30 | 31 | resampling_method: Resampling method to use. Available methods are: 32 | 'near': nearest neighbour resampling (default, fastest algorithm, worst interpolation quality). 33 | 'bilinear': bilinear resampling. 34 | 'cubic': cubic resampling. 35 | 'cubicspline': cubic spline resampling. 36 | 'lanczos': Lanczos windowed sinc resampling. 37 | 'average': average resampling, computes the average of all non-NODATA contributing pixels. (GDAL >= 1.10.0) 38 | 'mode': mode resampling, selects the value which appears most often of all the sampled points. (GDAL >= 1.10.0) 39 | """ 40 | 41 | self.init_source_layer(srcfile) 42 | self.warp( 43 | dstfile, 44 | dstnodata_value=dstnodata_value, 45 | executable=executable, 46 | resampling_method=resampling_method, 47 | gdal_format=gdal_format) 48 | 49 | def init_source_layer(self, source_layer_file): 50 | """Get the gdal information from the source layer file, and decide whether 51 | reprojection is necessary.""" 52 | 53 | self.source_layer_file = source_layer_file 54 | self.source_layer = GDALInfo(source_layer_file) 55 | 56 | # Check to see if the projections are the same 57 | 58 | self.same_projection = self.reference_layer.spatial_reference.IsSame( 59 | self.source_layer.spatial_reference) 60 | 61 | def warp(self, 62 | dstfile, 63 | dstnodata_value=None, 64 | executable='gdalwarp', 65 | resampling_method='bilinear', 66 | gdal_format='GTiff'): 67 | """Warp the input layer to the same coordinate system as the reference layer 68 | by calling gdalwarp. 69 | 70 | resampling_method: Resampling method to use. Available methods are: 71 | 'near': nearest neighbour resampling (default, fastest algorithm, worst interpolation quality). 72 | 'bilinear': bilinear resampling. 73 | 'cubic': cubic resampling. 74 | 'cubicspline': cubic spline resampling. 75 | 'lanczos': Lanczos windowed sinc resampling. 76 | 'average': average resampling, computes the average of all non-NODATA contributing pixels. (GDAL >= 1.10.0) 77 | 'mode': mode resampling, selects the value which appears most often of all the sampled points. (GDAL >= 1.10.0) 78 | 79 | gdal_format: one of the formats supported by gdalwarp (e.g., gdalwarp --formats). 80 | """ 81 | 82 | self.warp_file = dstfile 83 | if dstnodata_value == None: 84 | dstnodata_value = self.source_layer.nodata_value 85 | 86 | proj4 = self.reference_layer.proj4_proj 87 | srcfile = self.source_layer_file 88 | 89 | warp_command = "%(executable)s -t_srs '%(proj4)s' -r %(resampling_method)s " 90 | 91 | # Set the no data values 92 | 93 | if self.source_layer.nodata_value != None: 94 | srcnodata_value = self.source_layer.nodata_value 95 | warp_command += " -srcnodata %(srcnodata_value)s " 96 | 97 | if dstnodata_value != None: 98 | warp_command += " -dstnodata %(dstnodata_value)s " 99 | 100 | # Set the destination pixel size 101 | 102 | xres = abs(self.reference_layer.pixel_width) 103 | yres = abs(self.reference_layer.pixel_height) 104 | warp_command += " -tr %(xres)s %(yres)s " 105 | 106 | # Set the destination output window 107 | 108 | xmin = self.reference_layer.llx 109 | ymin = self.reference_layer.lly 110 | xmax = self.reference_layer.urx 111 | ymax = self.reference_layer.ury 112 | 113 | warp_command += " -te %(xmin)s %(ymin)s %(xmax)s %(ymax)s " 114 | 115 | # Set the destination file and format 116 | 117 | warp_command += " -of %(gdal_format)s %(srcfile)s %(dstfile)s" 118 | warp_command = warp_command % locals() 119 | 120 | print(warp_command) 121 | args = shlex.split(warp_command) 122 | p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE) 123 | 124 | self.warp_stdout, self.warp_stderr = p.communicate() 125 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/GeodeticPath.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities for calculating 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | from pyproj import Geod 8 | import numpy as np 9 | 10 | 11 | class GeodeticPath: 12 | """Calculate the geodetic path between two points using pyproj Geod""" 13 | 14 | def __init__(self, lon0, lat0, lon1, lat1, ellps='WGS84', radians=False): 15 | """Initialize with the start and stop points.""" 16 | 17 | self.geod = Geod(ellps=ellps) 18 | 19 | self.lon0 = lon0 20 | self.lat0 = lat0 21 | self.lon1 = lon1 22 | self.lat1 = lat1 23 | 24 | # Get the forward and backward azimuths and distance between the two points 25 | 26 | self.azimuth0, self.back_azimuth1, self.distance = self.geod.inv( 27 | lon0, lat0, lon1, lat1) 28 | 29 | def get_path_lonlats(self, separation): 30 | """Get the latitude and longitude of the path points, given a point separation in meters.""" 31 | 32 | self.npts = int(self.distance / separation + 0.5) 33 | self.lonlats = np.array( 34 | self.geod.npts(self.lon0, self.lat0, self.lon1, self.lat1, 35 | self.npts)) 36 | self.lon = self.lonlats[:, 0] 37 | self.lat = self.lonlats[:, 1] 38 | 39 | def get_path(self, separation=None): 40 | """Get the path latitude, longitude, heading, and distance. 41 | If separation is None, assume get_path_lon_lats has already been called.""" 42 | 43 | if separation != None: 44 | self.get_path_lonlats(separation) 45 | 46 | # Declare heading and distance arrays 47 | 48 | self.heading = np.zeros(len(self.lat), dtype=self.lat.dtype) 49 | self.along_track_distance = np.zeros( 50 | len(self.lat), dtype=self.lat.dtype) 51 | 52 | for i in range(len(self.lat) - 1): 53 | self.heading[i], back_azimuth, d = self.geod.inv( 54 | self.lon[i], self.lat[i], self.lon[i + 1], self.lat[i + 1]) 55 | self.along_track_distance[i + 1] = self.along_track_distance[i] + d 56 | 57 | if back_azimuth < 0: 58 | self.heading[-1] = 180 + back_azimuth 59 | else: 60 | self.heading[-1] = back_azimuth - 180. 61 | 62 | 63 | class GeodeticPathFromPegPoint(GeodeticPath): 64 | """Calulate a geodetic path given a peg-point and a path length.""" 65 | 66 | def __init__(self, 67 | peg_lat, 68 | peg_lon, 69 | peg_heading, 70 | distance, 71 | peg_h=0., 72 | mode='center', 73 | ellps='WGS84', 74 | radians=False): 75 | """Initialize with a peg-point and a heading and a distance. 76 | 77 | mode: 'center', 'start', or 'end'. The peg-point location in the path. 78 | ellps: ellipsoid. 79 | """ 80 | 81 | self.peg_lat = peg_lat 82 | self.peg_lon = peg_lon 83 | self.peg_heading = peg_heading 84 | self.distance = distance 85 | self.peg_h = peg_h 86 | self.mode = mode 87 | 88 | self.geod = Geod(ellps=ellps) 89 | 90 | if mode == 'start': 91 | lat0 = peg_lat 92 | lon0 = peg_lon 93 | lon1, lat1, hdg = self.geod.fwd(lon0, lat0, peg_heading, distance) 94 | elif mode == 'end': 95 | lat1 = peg_lat 96 | lon1 = peg_lon 97 | hdg = (peg_heading + 180.) % 360. 98 | lon0, lat0, hdg = self.geod.fwd(lon1, lat1, hdg, distance) 99 | else: 100 | hdg = (peg_heading + 180.) % 360. 101 | lon0, lat0, hdg = self.geod.fwd(peg_lon, peg_lat, hdg, 102 | distance / 2.) 103 | lon1, lat1, hdg = self.geod.fwd(peg_lon, peg_lat, peg_heading, 104 | distance / 2.) 105 | 106 | GeodeticPath.__init__( 107 | self, lon0, lat0, lon1, lat1, ellps=ellps, radians=False) 108 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/OGR2Shapely.py: -------------------------------------------------------------------------------- 1 | """ 2 | Read OGR supported vectors from/to shapely arrays. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | from osgeo import ogr, osr 8 | import numpy as np 9 | from shapely import wkt 10 | 11 | 12 | class ShapelyDataSource: 13 | """Hold all of the information in an ogr data source as shapely arrays. 14 | 15 | Initilize and, optionally, read ogr data from an ogr supported file 16 | or an ogr DataSource. 17 | """ 18 | 19 | def __init__(self, ogr_file=None, ogr_data_source=None): 20 | # The layers are stored as a dictionary of ShapelyLayers 21 | 22 | self.nlayers = 0 23 | self.layer = {} 24 | self.name = None 25 | 26 | # This sets the layer to use when iterating over the data source 27 | 28 | self.layer_index = 0 29 | 30 | # Read from a file or an OGR data source, if available 31 | 32 | if ogr_file != None: 33 | self.from_ogr_file(ogr_file) 34 | elif ogr_data_source != None: 35 | self.from_ogr_data_source(ogr_data_source) 36 | else: 37 | self.ogr_file = None 38 | self.ogr_data_source = None 39 | 40 | def from_ogr_file(self, ogr_file): 41 | """Initialize from an ogr_supported data file.""" 42 | 43 | self.ogr_file = ogr_file 44 | ogr_data_source = ogr.Open(ogr_file) 45 | if ogr_data_source == None: 46 | raise Exception('Cannot open ogr file: %s' % ogr_file) 47 | self.from_ogr_data_source(ogr_data_source) 48 | 49 | def from_ogr_data_source(self, ogr_data_source): 50 | """Initialize from an open ogr DataSource.""" 51 | 52 | self.ogr_data_source = ogr_data_source 53 | self.name = ogr_data_source.GetName() 54 | self.nlayers = ogr_data_source.GetLayerCount() 55 | 56 | for i in range(self.nlayers): 57 | ogr_layer = ogr_data_source.GetLayer(i) 58 | self.layer[i] = ShapelyLayer(ogr_layer=ogr_layer) 59 | 60 | def __getitem__(self, index): 61 | """Return the shape in the index feature in the active layer. This is useful for iterating over the shapes in the active layer.""" 62 | 63 | return self.layer[self.layer_index].feature[index].shape 64 | 65 | def get_shape(self, index, layer_index=0): 66 | """Return the shape in the index feature.""" 67 | 68 | return self.layer[layer_index].feature[index].shape 69 | 70 | def get_numpy_shape(self, index, layer_index=0): 71 | """Return the shape in the index feature as a numpy array.""" 72 | 73 | return np.array(self.layer[layer_index].feature[index].shape) 74 | 75 | def get_field(self, index, layer_index=0): 76 | """Return the field in the index feature.""" 77 | 78 | return self.layer[layer_index].feature[index].field 79 | 80 | def get_shapes(self, layer_index=0): 81 | """Return the all the shapes in the layer features.""" 82 | 83 | return [ 84 | self.layer[layer_index].feature[i].shape 85 | for i in range(self.layer[layer_index].nfeatures) 86 | ] 87 | 88 | def get_numpy_shapes(self, layer_index=0): 89 | """Return the all the shapes in the layer features.""" 90 | 91 | return np.array([ 92 | np.array(self.layer[layer_index].feature[i].shape) 93 | for i in range(self.layer[layer_index].nfeatures) 94 | ]) 95 | 96 | def get_fields(self, layer_index=0): 97 | """Return the all the fields in the layer features.""" 98 | 99 | return [ 100 | self.layer[layer_index].feature[i].field 101 | for i in range(self.layer[layer_index].nfeatures) 102 | ] 103 | 104 | 105 | class ShapelyLayer: 106 | """Holds layer information in a dictionary of ShapelyFeatures.""" 107 | 108 | def __init__(self, ogr_layer=None): 109 | """Initialize and, optionally copy an ogr layer.""" 110 | 111 | self.nfeatures = 0 112 | self.feature = [] 113 | self.name = '' 114 | ## self.geometries = [] # a list containing the geometry for each feature 115 | ## self.fields = [] # a list containing the fields for each feature 116 | 117 | if ogr_layer != None: self.from_ogr(ogr_layer) 118 | 119 | def from_ogr(self, ogr_layer): 120 | """Initialize from an OGR layer.""" 121 | 122 | self.ogr_layer = ogr_layer 123 | self.spatial_ref = ogr_layer.GetSpatialRef() 124 | self.nfeatures = ogr_layer.GetFeatureCount() 125 | self.name = ogr_layer.GetName() 126 | 127 | for i in range(self.nfeatures): # Notice that the count starts at 1! 128 | ogr_feature = ogr_layer.GetNextFeature() 129 | self.feature.append(ShapelyFeature(ogr_feature=ogr_feature)) 130 | ## self.geometries.append(self.feature[i].shape) 131 | ## self.fields.append(self.feature[i].field) 132 | 133 | def __getitem__(self, index): 134 | """Return the shape in the index feature. This is useful for iterating over the shapes in the layer.""" 135 | 136 | return self.feature[index].shape 137 | 138 | def get_shape(self, index): 139 | """Return the shape in the index feature.""" 140 | 141 | return self.feature[index].shape 142 | 143 | def get_numpy_shape(self, index): 144 | """Return the shape in the index feature as a numpy array.""" 145 | 146 | return np.array(self.feature[index].shape) 147 | 148 | def get_field(self, index): 149 | """Return the field in the index feature.""" 150 | 151 | return self.feature[index].field 152 | 153 | def get_shapes(self): 154 | """Return the all the shapes in the layer features.""" 155 | 156 | return [self.feature[i].shape for i in range(self.nfeatures)] 157 | 158 | def get_numpy_shapes(self): 159 | """Return the all the shapes in the layer features.""" 160 | 161 | return np.array( 162 | [np.array(self.feature[i].shape) for i in range(self.nfeatures)]) 163 | 164 | def get_fields(self): 165 | """Return the all the fields in the layer features.""" 166 | 167 | return [self.feature[i].field for i in range(self.nfeatures)] 168 | 169 | 170 | class ShapelyFeature: 171 | """Holds feature information in shapely arrays.""" 172 | 173 | def __init__(self, ogr_feature=None): 174 | """Initilaize from an ogr layer or a shapely shape""" 175 | 176 | self.nfields = 0 177 | self.field = {} 178 | 179 | if type(ogr_feature) != type( 180 | None): # awkward for direct comparison is broken 181 | self.from_ogr(ogr_feature) 182 | 183 | def from_ogr(self, ogr_feature): 184 | """Initialize from an OGR feature.""" 185 | 186 | self.ogr_feature = ogr_feature 187 | self.nfields = ogr_feature.GetFieldCount() 188 | 189 | for i in range(self.nfields): 190 | self.field[list(ogr_feature.keys())[i]] = ogr_feature.GetField(i) 191 | 192 | # Now get the geometry 193 | 194 | self.geometry = ogr_feature.GetGeometryRef() 195 | self.geometry_name = self.geometry.GetGeometryName() 196 | self.npoints = self.geometry.GetPointCount() 197 | 198 | self.shape = wkt.loads(self.geometry.ExportToWkt()) 199 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, print_function 2 | 3 | from .version import __version__ 4 | try: 5 | from .OGR2Shapely import ShapelyDataSource, ShapelyLayer, ShapelyFeature 6 | from .OGRWriter import OGRWriter 7 | from .GDALInfo import GDALInfo 8 | from .GDALutilities import WarpToLayer 9 | except: 10 | print( 11 | "Warning: ShapelyDataSource, ShapelyLayer, ShapelyFeature, OGRWriter disabled." 12 | ) 13 | print("Warning: GDALInfo, WarpToLayer disabled.") 14 | print("Please install shapely if you would like to use the classes.") 15 | from .GDALLatLonLayer import GDALLatLonLayer, GDALDEMLayer 16 | from .GeodeticPath import GeodeticPath, GeodeticPathFromPegPoint 17 | from .GDALWriter import write_llh_to_gdal, write_numpy_to_gdal 18 | from .CoordinateTransformations import CoordinateTransformation 19 | -------------------------------------------------------------------------------- /src/GDALOGRUtilities/version.py: -------------------------------------------------------------------------------- 1 | # Track the version number and associated changes. 2 | # First version tracked is 0.2.0 3 | 4 | #__version__ = '0.2.0' 5 | 6 | # Minor modification to allow a limited version of the package 7 | # to be imported without requiring shapely. 8 | 9 | #__version__ = '0.2.1' 10 | 11 | # Fixed GDALWriter to accommodate NS or SN row order on input. 12 | 13 | #__version__ = '0.2.2' 14 | 15 | # Fixed GDALWriter and OGRWriter after compiling with cython 16 | 17 | #__version__ = '0.2.3' 18 | 19 | # Python 2.7/3.6 compatibility. 20 | 21 | __version__ = '0.2.4' 22 | -------------------------------------------------------------------------------- /src/GWDLR/GWDLR.py: -------------------------------------------------------------------------------- 1 | """ 2 | A class to read GWDLR files, make masks, and output to GIS raster data. 3 | 4 | The files are in GrADS format, described in http://www.iges.org/grads/gadoc/aboutgriddeddata.html. 5 | A simple GrADS parser is implemented here. 6 | """ 7 | 8 | from __future__ import absolute_import, division, print_function 9 | 10 | import os 11 | import skimage.morphology 12 | import numpy as np 13 | 14 | import GDALOGRUtilities 15 | 16 | class GWDLR: 17 | """A class to read GWDLR files, make masks, and output to GIS raster data.""" 18 | 19 | def __init__(self, rootname, data_dir='.'): 20 | """Read the bin and ctl files, and parse the ctl.""" 21 | 22 | self.ctl_file = os.path.join(data_dir, rootname + '.ctl') 23 | self.parse_ctl(self.ctl_file) 24 | 25 | self.bin_file = os.path.join(data_dir, rootname + '.bin') 26 | self.data = np.reshape( 27 | np.fromfile(self.bin_file, dtype=np.float32), 28 | (self.y.size, self.x.size)) 29 | 30 | def parse_ctl(self, ctl_file): 31 | """Parse the ctl file to extract variables and store as object variables.""" 32 | 33 | # Read the ctl file and close 34 | 35 | with open(ctl_file) as fin: 36 | header = fin.readlines() 37 | 38 | # parse the header, line by line 39 | 40 | for line in header: 41 | s = line.split() 42 | if not len(s) > 0: 43 | continue 44 | kwd = s[0] 45 | if kwd in ['xdef', 'ydef', 'tdef', 'zdef']: 46 | self.parse_def(line) 47 | elif kwd in ['dset', 'title', 'options']: 48 | #exec('self.%s = "%s"'%(s[0],s[1])) 49 | setattr(self, s[0], s[1]) 50 | elif kwd in ['undef', 'vars']: 51 | #exec('self.%s = %s'%(s[0],s[1])) 52 | setattr(self, s[0], s[1]) 53 | elif 'endian' in line: 54 | for wd in s: 55 | if 'endian' in wd: 56 | #exec('self.endian = "%s"'%wd) 57 | setattr(self, 'endian', wd) 58 | 59 | def parse_def(self, line): 60 | """Store the def into a an object with the following attributes: 61 | name, size, scale, origin, step.""" 62 | 63 | # An empty class to hold the variables 64 | 65 | class Axis: 66 | pass 67 | 68 | # Parse the line and store in the class 69 | 70 | kwd, size, scale, origin, step = line.split() 71 | axis = Axis() 72 | axis.name = kwd[0] 73 | axis.size = int(size) 74 | axis.scale = scale 75 | try: 76 | axis.origin = float(origin) 77 | except: 78 | axis.origin = origin 79 | try: 80 | axis.step = float(step) 81 | except: 82 | axis.step = step 83 | if axis.size > 1: 84 | axis.coordinates = axis.origin + np.arange(axis.size) * axis.step 85 | 86 | # Store the Axis instance as a member 87 | 88 | #exec('self.%s = axis'%(axis.name)) 89 | setattr(self, axis.name, axis) 90 | 91 | def to_mask(self, width, overwrite=True, thin=False): 92 | """Return a mask with 1's when the data is >= width, 0 otherwise. 93 | If overwrite == True, the self.data is replaced with the mask. 94 | 95 | If skeletonize: thin the mask 96 | """ 97 | 98 | mask = (self.data >= width).astype(np.uint8) 99 | 100 | if thin: 101 | mask = skimage.morphology(mask).astype(np.uint8) 102 | 103 | if overwrite: 104 | self.data = mask 105 | 106 | return mask 107 | 108 | def to_gdal(self, dst_filename, gdal_format='GTiff', nodata_value=None): 109 | """Write the data to a georeferenced GIS file compatible with gdal.""" 110 | 111 | lon_min, dlon, lat_min, dlat = (self.x.coordinates.min(), self.x.step, 112 | self.y.coordinates.min(), self.y.step) 113 | 114 | GDALOGRUtilities.write_llh_to_gdal( 115 | self.data, 116 | lon_min, 117 | dlon, 118 | lat_min, 119 | dlat, 120 | gdal_format, 121 | dst_filename, 122 | origin_up=True, 123 | options=None, 124 | nodata_value=nodata_value, 125 | vflip_data=False) 126 | -------------------------------------------------------------------------------- /src/GWDLR/GWDLR2shape.py: -------------------------------------------------------------------------------- 1 | """ 2 | Take GWDLR files and turn them into shape files. These programs need to be run in 3 | a grass shell. 4 | """ 5 | 6 | from __future__ import absolute_import, division, print_function 7 | 8 | import os 9 | from os.path import join 10 | from subprocess import call 11 | import shlex 12 | from .GWDLR import GWDLR 13 | 14 | 15 | class GWDLR2shape: 16 | """Take GWDLR files and turn them into shape files. These programs need to be run in 17 | a grass shell.""" 18 | 19 | def __init__(self, gwdlr_data_dir, output_dir): 20 | """Initialize with input and output data specifications. 21 | 22 | gwdlr_data_dir: contains .bin and .ctl data 23 | output_dir: write shapefiles and mask files to this directory""" 24 | 25 | # Check that we are working in a grass shell (for the moment) 26 | 27 | if not 'GISBASE' in os.environ: 28 | raise Exception('Need to be working inside a grass shell') 29 | 30 | self.gwdlr_data_dir = gwdlr_data_dir 31 | self.output_dir = output_dir 32 | 33 | def process_tile(self, rootname, min_width, create_location=True): 34 | """Process a tile based on selecting all widths > min_width.""" 35 | 36 | # Make the mask file 37 | 38 | gwdlr = GWDLR(rootname, data_dir=self.gwdlr_data_dir) 39 | gwdlr.to_mask(min_width, overwrite=True, thin=True) 40 | 41 | root = rootname.split('_')[0] 42 | mask_file = join(self.output_dir, 43 | root + '_mask_width%d.tif' % min_width) 44 | gwdlr.to_gdal(mask_file) 45 | 46 | # Start a new location with the mask data 47 | 48 | if create_location: 49 | command = 'g.proj -c georef=%(mask_file)s location=%(root)s --verbose' % locals( 50 | ) 51 | self.exec_command(command) 52 | 53 | # Switch to the new location 54 | 55 | command = 'g.mapset mapset=PERMANENT location=%(root)s' % locals() 56 | self.exec_command(command) 57 | 58 | # Read the mask data into the location 59 | 60 | command = 'r.in.gdal input=%(mask_file)s output=center_line_mask_width%(min_width)d' % locals( 61 | ) 62 | self.exec_command(command) 63 | 64 | # Thin the mask to make sure that r.to.vect does not crash 65 | 66 | command = 'r.thin input=center_line_mask_width%(min_width)d output=center_line_mask_thin_width%(min_width)d --verbose' % locals( 67 | ) 68 | self.exec_command(command) 69 | 70 | # Make a vector from the thinned data 71 | 72 | command = 'r.to.vect input=center_line_mask_thin_width%(min_width)d output=center_line_width%(min_width)d feature=line --verbose' % locals( 73 | ) 74 | self.exec_command(command) 75 | 76 | # Export to a shapefile 77 | 78 | dsn = join(self.output_dir, root + '_center_lines_width%d' % min_width) 79 | command = 'v.out.ogr input=center_line_width%(min_width)d dsn=%(dsn)s' % locals( 80 | ) 81 | self.exec_command(command) 82 | 83 | def exec_command(self, command): 84 | """Execute a grass command and catch errors.""" 85 | 86 | print('Executing command: %s' % command) 87 | args = shlex.split(command) 88 | status = call(args) 89 | 90 | if status: 91 | print('Command: "%s" exited with status: %s' % (command, status)) 92 | 93 | return status 94 | -------------------------------------------------------------------------------- /src/GWDLR/README.md: -------------------------------------------------------------------------------- 1 | #Global Width Database for Large Rivers (GWDLR) 2 | 3 | This package contains for turning GWDLR data set tiles into vector centerlines. 4 | 5 | The format of the GWDLR is (Dai Yamazaki personal communication): 6 | 7 | *The width data is prepared as 5degx5deg tiles. It's 4 byte real plain binary data, and you can find the details in .ctl files. Note that width value is not accurate around 60N because it's on the domain boundary. The value >0 represents river width (on river centerline), value=-1 represents non-centerline waterbody, value=-2 represents islands. value=0 represents land, value=-9999 represents sea.* 8 | 9 | The data are in [GrADS](http://www.iges.org/grads/gadoc/aboutgriddeddata.html) format, but a simpleminded GrADS 10 | parser is part of this package. 11 | 12 | ##Requirements 13 | 14 | To go from raster to vectors, [GRASS](http://grass.osgeo.org/) is required. This version was tested 15 | with grass64, Notice that grass70 version 7.0 (beta1) has a broken 16 | version of thin. 17 | 18 | ##Setting up a Grass GISDBASE 19 | 20 | To use the raster to vector tool, the data will have to be imported to grass. 21 | As a first step an empty repository will have to be created. To avoid this, 22 | the SWOTRiver/data directory contains a GISDBASE with a location called TEMP 23 | that can be used as a starting point. Copy this grassdata directory to the 24 | place that will contain all of the grass output. 25 | 26 | The following are the steps to create an GISDBASE: 27 | 28 | 1. Create a directory to hold it: 29 | 30 | mkdir grassdata 31 | 32 | 2. Start grass in text version 33 | 34 | grass -text 35 | 36 | 3. The follwing screen pops up: 37 | 38 | 39 | GRASS 6.4.2 40 | 41 | DATABASE: A directory (folder) on disk to contain all GRASS maps and data. 42 | 43 | LOCATION: This is the name of a geographic location. It is defined by a 44 | co-ordinate system and a rectangular boundary. 45 | 46 | MAPSET: Each GRASS session runs under a particular MAPSET. This consists of 47 | a rectangular REGION and a set of maps. Every LOCATION contains at 48 | least a MAPSET called PERMANENT, which is readable by all sessions. 49 | 50 | The REGION defaults to the entire area of the chosen LOCATION. 51 | You may change it later with the command: g.region 52 | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 53 | 54 | LOCATION: ________________ (enter list for a list of locations) 55 | MAPSET: ________________ (or mapsets within a location) 56 | 57 | DATABASE: /home/erodrigu_______________________________________________________ 58 | 59 | 60 | 61 | AFTER COMPLETING ALL ANSWERS, HIT TO CONTINUE 62 | (OR TO CANCEL) 63 | 64 | 65 | replace the location with temp and the MASET with PERMANENT. Modify the 66 | DATABASE to where the grassdata directory is going to be. 67 | 68 | 4. Create the location and follow the instructions to create an x,y dataset with unit 69 | resolution and extent. End by exiting grass. 70 | 71 | ##Grass steps to create vector data sets 72 | 73 | Assuming grass v6.4 and the working directory is grassdata, start 74 | grass on the mac using: 75 | 76 | /Applications/GRASS-6.4.app/Contents/MacOS/grass.sh -text grassdata/temp/PERMANENT/ 77 | 78 | in linux: 79 | 80 | grass -text grassdata/n35w125/PERMANENT/ 81 | 82 | Although the following is implemented in Python, for the record, below 83 | are the grass commands required to get to vectors from the sample data: 84 | 85 | # Start grass on the temp dataset 86 | 87 | grass -text grassdata/temp/PERMANENT 88 | 89 | # Start a new location with the mask data 90 | 91 | g.proj -c georef=n35w125_wth_center_line_25.tif location=n35w125 --verbose 92 | 93 | # Switch to the new location 94 | 95 | g.mapset mapset=PERMANENT location=n35w125 96 | 97 | # Read the mask data into the location 98 | 99 | r.in.gdal input=n35w125_wth_center_line_25.tif output=center_line_mask 100 | 101 | # Thin the mask to make sure that r.to.vect does not crash 102 | 103 | r.thin input=center_line_mask output=center_line_mask_thin --verbose 104 | 105 | # Make a vector from the thinned data 106 | 107 | r.to.vect input=center_line_mask_thin output=center_line feature=line --verbose 108 | 109 | # Export to a shapefile 110 | 111 | v.out.ogr input=center_line dsn=n35w125_center_lines 112 | -------------------------------------------------------------------------------- /src/GWDLR/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package contains for turning GWDLR data set tiles into vector centerlines. 3 | 4 | The format of the GWDLR is (Dai Yamazaki personal communication): 5 | 6 | "The width data is prepared as 5degx5deg tiles. It's 4 byte real plain binary data, 7 | and you can find the details in .ctl files. Note that width value is not accurate 8 | around 60N because it's on the domain boundary. The value >0 represents river width 9 | (on river centerline), value=-1 represents non-centerline waterbody, 10 | value=-2 represents islands. value=0 represents land, value=-9999 represents sea."" 11 | 12 | The data are in [GrADS](http://www.iges.org/grads/gadoc/aboutgriddeddata.html) format, but a simpleminded GrADS 13 | parser is part of this package. 14 | """ 15 | 16 | from __future__ import absolute_import 17 | 18 | from .GWDLR import GWDLR 19 | from .GWDLR2shape import GWDLR2shape 20 | 21 | from .version import __version__ 22 | -------------------------------------------------------------------------------- /src/GWDLR/version.py: -------------------------------------------------------------------------------- 1 | # The beginning of version counts for this package 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Python 2.7/3.6 compatibility. 6 | 7 | __version__ = '0.1.1' 8 | -------------------------------------------------------------------------------- /src/GeometryDataBase/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fast access to a data base containing a set of geometries. 3 | 4 | The data base is created by reading a shapefile, and write an rtree index for it for future use. 5 | 6 | All bounding boxes are assumed to be an iterable (xmin,ymin,xmax,ymax) 7 | """ 8 | 9 | from __future__ import absolute_import 10 | 11 | from .GeometryDataBase import GeometryDataBase2D, GeometryDataBase3D 12 | from .GeometryDataBase import bbox_generator_3D, bbox_generator_2D, shape_bbox_dbf_as_tuple 13 | from .GeometryDataBase import shape_bbox_as_tuple, write_shape_rtree_3D, write_shape_rtree_2D 14 | 15 | from .version import __version__ 16 | -------------------------------------------------------------------------------- /src/GeometryDataBase/version.py: -------------------------------------------------------------------------------- 1 | # Intial release 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Python 2.7/3.6 compatibility. 6 | 7 | __version__ = '0.1.1' 8 | -------------------------------------------------------------------------------- /src/RDF/ExecuteRDF.py: -------------------------------------------------------------------------------- 1 | """ 2 | A python wrapper for calling a program that takes as input 3 | an RDF file. This is supposed to be subclassed for different 4 | programs. 5 | """ 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | from threading import Thread 10 | from subprocess import Popen, PIPE 11 | from string import Template 12 | from distutils.spawn import find_executable 13 | from tempfile import NamedTemporaryFile 14 | 15 | 16 | class ExecuteRDF(Thread): 17 | """A class meant to be subclassed to run a program that takes as an input 18 | an RDF file in a separate thread.""" 19 | 20 | rdf_template = Template( 21 | "") # This will need to be defined for derived classes 22 | 23 | def __init__(self, executable): 24 | """Intialize the thread. As an option, pass the path to the executable. 25 | If not passed, it will be searched for in the PATH.""" 26 | 27 | self.stdout = None 28 | self.stderr = None 29 | Thread.__init__(self) 30 | 31 | if find_executable(executable) == None: 32 | raise Exception('Cannot find executable: %s' % executable) 33 | 34 | self.executable = executable 35 | 36 | def set_params(self, rdf=None, **kwargs): 37 | """Set the parameters in the template by passing an RDF 38 | object and/or kwargs. kwargs override the rdf object.""" 39 | 40 | if rdf != None: 41 | for k in list(rdf.keys()): 42 | #exec('self.%s = rdf["%s"]'%(k,k)) 43 | setattr(self, k, k) 44 | 45 | for k in kwargs: 46 | #exec('self.%s = kwargs["%s"]'%(k,k)) 47 | setattr(self, k, kwargs[k]) 48 | 49 | def set_template(self, rdf_template): 50 | """Set the rdf template by passing a string with $variable 51 | substitutions corresponding to the class variables set 52 | using set_params.""" 53 | 54 | self.rdf_template = Template(rdf_template) 55 | 56 | def write_rdf(self, rdf_file=None, dir=None, delete=True): 57 | """Open an RDF file and write the rdf inputs. If rdf_file = None, 58 | a temporary file will be created. It is assumed that all appropriate 59 | parameters have been defined in set_params.""" 60 | 61 | if rdf_file == None: 62 | self.fout_rdf = NamedTemporaryFile(delete=delete, dir=dir) 63 | self.rdf_name = self.fout_rdf.name 64 | else: 65 | self.fout_rdf = open(rdf_file, 'w') 66 | self.rdf_name = rdf_file 67 | 68 | self.fout_rdf.write(self.rdf_template.substitute(self.__dict__)) 69 | self.fout_rdf.flush() 70 | 71 | def run(self, args=[], args_start=True): 72 | """This is the thread definition. args is a list of optional arguments 73 | to be passed to the executable by Popen. If args_start == True, the 74 | call sequence is 'executable args rdf_file'. Otherwise, it is 75 | 'executable args rdf_file'.""" 76 | 77 | command = [self.executable] 78 | if args != [] and args_start: 79 | command += args 80 | command.append(self.rdf_name) 81 | if args != [] and not args_start: 82 | command += args 83 | 84 | print(command) 85 | p = Popen(command, shell=False, stdout=PIPE, stderr=PIPE) 86 | 87 | self.stdout, self.stderr = p.communicate() 88 | -------------------------------------------------------------------------------- /src/RDF/RDF_to_class.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | from __future__ import absolute_import, division, print_function 4 | 5 | from .RDF import RDF 6 | 7 | 8 | class RDF_to_class: 9 | """Convert the data in an RDF file or class to a class whose 10 | members are assigned desired variable names and corresponting 11 | values. 12 | 13 | To initialize the class a dictionary with the following structure 14 | 15 | d = { 16 | keyword1: (variableName1,format1,[default1]), 17 | keyword2: (variableName2,format2,[default2]), 18 | ... 19 | keywordN: (variableNameN,formatN,[defaultN]) 20 | } 21 | 22 | where keywordI is the RDF keyword for the ith varaible (string) 23 | varaiableNameI is the name to be assigned to the ith variable (string) 24 | 25 | formatN is "s" or "f" or "d" depending whether the value returned 26 | is a string, float, or integer. (This also applies to arrays.) 27 | 28 | defaultI is an (optional) default value (string) which will be used 29 | to initialize the value. If no default value is specified, the 30 | value will be set to None until read from the RDF instance. If the 31 | RDF keyword is not present, no exception will be raised, and the 32 | value will be set to the default value 33 | 34 | As an example, if one entries is for class A is 35 | 36 | "x_keyword":("x","f","4.5") 37 | 38 | or 39 | 40 | "x_keyword":("x","f") 41 | 42 | the A.x will return the floating value of rdf["x_keyword"] 43 | 44 | Parameters 45 | ---------- 46 | 47 | d : dictionary 48 | Dictionary containing the defintions, as above. 49 | file : str 50 | Path to a file containing RDF inputs. 51 | rdf : RDF instance 52 | Initialize from an RDF instance. 53 | 54 | Notes 55 | ----- 56 | 57 | Intialize the variables to None. If rdf instance is given, 58 | load the values from the rdf structure. If a file name is given, 59 | the rdf is read from the file. file takes precedence over rdf. 60 | """ 61 | 62 | def __init__(self, d, file=None, rdf=None): 63 | self.d = d 64 | for tpl in list(d.values()): 65 | if len(tpl) < 3: 66 | name, format = tpl 67 | #exec("self.%s = None"%(name)) 68 | setattr(self, name, None) 69 | else: 70 | name, format, default = tpl 71 | #exec("self.%s = %s"%(name,default)) 72 | setattr(self, name, default) 73 | 74 | if rdf != None: self.fromRDF(rdf) 75 | if file != None: self.fromFile(file) 76 | 77 | def fromRDF(self, rdf): 78 | """Read the values from an rdf instance.""" 79 | 80 | for key in list(self.d.keys()): 81 | format = self.d[key][1] 82 | try: 83 | if format == "s": 84 | setattr(self, self.d[key][0], rdf[key]) 85 | 86 | elif format == "d": 87 | setattr(self, self.d[key][0], rdf.int(key)) 88 | 89 | elif format == "f": 90 | setattr(self, self.d[key][0], rdf.float(key)) 91 | 92 | except: 93 | name = self.d[key][0] 94 | if name not in self.__dict__: 95 | raise ValueError("Cannot find keyword: %s" % key) 96 | 97 | def fromFile(self, file): 98 | """Read from a file.""" 99 | self.fromRDF(RDF().rdfParse(file)) 100 | 101 | 102 | def test(): 103 | 104 | rdf = RDF() 105 | rdf["key1"] = "1.2 2.3 3.4" 106 | rdf["key2"] = "1 2 3" 107 | rdf["key3"] = "1.2 2.3 3.4" 108 | 109 | d = { 110 | "key1": ("x", "f", "None"), 111 | "key2": ("y", "d", "None"), 112 | "key3": ("z", "s", "None") 113 | } 114 | 115 | a = RDF_to_class(d) 116 | 117 | print(a.__dict__) 118 | 119 | a.fromRDF(rdf) 120 | 121 | print(a.__dict__) 122 | 123 | rdf["key1"] = "2.2 3.3 4.4" 124 | rdf["key2"] = "2 3 4" 125 | rdf["key3"] = "2.2 3.3 4.4" 126 | 127 | rdf.writeRDF("junk.dat") 128 | 129 | a.fromFile("junk.dat") 130 | 131 | print(a.__dict__) 132 | 133 | 134 | if __name__ == '__main__': test() 135 | -------------------------------------------------------------------------------- /src/RDF/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Read and write keyword = value files. 3 | """ 4 | 5 | from __future__ import absolute_import 6 | 7 | from .RDF import RDF 8 | from .MRDF import MRDF 9 | from .RDF_to_class import RDF_to_class 10 | from .ExecuteRDF import ExecuteRDF 11 | 12 | from .version import __version__ 13 | -------------------------------------------------------------------------------- /src/RDF/version.py: -------------------------------------------------------------------------------- 1 | # The beginning of version counts for this package 2 | 3 | #__version__ = '1.1.0' 4 | 5 | # Python 2.7/3.6 compatibility. 6 | 7 | __version__ = '1.1.1' 8 | -------------------------------------------------------------------------------- /src/RiverObs/LatLonRegion.py: -------------------------------------------------------------------------------- 1 | """ 2 | A base class implemention the LatLonRegion protocol. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | import pyproj 7 | 8 | import SWOTRiver.SWOTL2 9 | 10 | class LatLonRegion: 11 | """Access SWOT L2 data conveniently. SWOTL2 implements the LatLonRegion object 12 | interfaces in that it provides the following members: 13 | 14 | lat_lon_region.bounding_box: (lonmin,latmin,lonmax,latmax) 15 | 16 | lat_lon_region.proj: a pyproj.Proj projection (lon,lat) -> (x,y) 17 | and (x,y) -> (lon,lat) when called when called with inverse=True 18 | 19 | Parameters 20 | ---------- 21 | 22 | lonmin : float 23 | Minimum longitude, in degrees. 24 | latmin : float 25 | Minimum latitude, in degrees. 26 | lonmax : float 27 | Maximum longitude, in degrees. 28 | latmax : float 29 | Maximum latitude, in degrees. 30 | 31 | Notes 32 | ------ 33 | 34 | The final set of keywords are projection options for pyproj. 35 | A full list of projection options to set plus explanations of their 36 | meaning can be found here: https://trac.osgeo.org/proj/wiki/GenParms 37 | 38 | The default projection is Lambert equiarea, which has a proj4 string with the 39 | following parameters: 40 | 41 | +proj=laea 42 | +lat_0=Latitude at projection center, set to bounding box center lat 43 | +lon_0=Longitude at projection center, set to bounding box center lon 44 | +x_0=False Easting, set to 0 45 | +y_0=False Northing, set to 0 46 | """ 47 | 48 | def __init__(self, 49 | lonmin, 50 | latmin, 51 | lonmax, 52 | latmax, 53 | proj='laea', 54 | x_0=0, 55 | y_0=0, 56 | lat_0=None, 57 | lon_0=None, 58 | ellps='WGS84', 59 | **proj_kwds): 60 | 61 | self.lonmin, self.latmin, self.lonmax, self.latmax = lonmin, latmin, lonmax, latmax 62 | self.bounding_box = self.lonmin, self.latmin, self.lonmax, self.latmax 63 | 64 | # Find lat_0 and lon_0 if not specified previously 65 | 66 | if lat_0 == None: 67 | lat_0 = (latmax + latmin) / 2. 68 | 69 | if lon_0 == None: 70 | lon_0 = SWOTRiver.SWOTL2.wrap( 71 | SWOTRiver.SWOTL2.wrap(lonmax - lonmin)/2 + lonmin) 72 | 73 | self.proj = pyproj.Proj( 74 | proj=proj, 75 | lat_0=lat_0, 76 | lon_0=lon_0, 77 | x_0=x_0, 78 | y_0=y_0, 79 | ellps=ellps, 80 | **proj_kwds) 81 | -------------------------------------------------------------------------------- /src/RiverObs/ReachExtractor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Extract all of the reaches overlapping a given bounding box and 3 | computes the coordinates in the same projection used by the data. 4 | The result is contained in a set of RiverReach objects, which can be clipped 5 | to the same bounding box as the data set. 6 | """ 7 | 8 | from __future__ import absolute_import, division, print_function 9 | 10 | import numpy as np 11 | import pysal 12 | 13 | from GeometryDataBase import GeometryDataBase2D 14 | from .RiverReach import RiverReach 15 | 16 | 17 | class ReachExtractor: 18 | """Extract all of the reaches overlapping a given bounding box and 19 | computes the coordinates in the same projection used by the data. 20 | The result is contained in a set of RiverReach objects, which can be clipped 21 | to the same bounding box as the data set. 22 | 23 | Initialize with the shape file database location and a lat_lon_region 24 | instance. 25 | 26 | Parameters 27 | ---------- 28 | 29 | shape_file_root : str 30 | path to shapefile database (no suffix) 31 | 32 | lat_lon_region : object 33 | an object providing the following members: 34 | lat_lon_region.bounding_box (lonmin,latmin,lonmax,latmax) 35 | lat_lon_region.proj: a pyproj.Proj projection (lon,lat) -> (x,y) 36 | and (x,y) -> (lon,lat) when called when called with inverse=True 37 | clip : bool 38 | Clip to the bounding box? 39 | clip_buffer : float 40 | buffer to add around bounding box. 41 | 42 | Notes 43 | ------ 44 | 45 | If clip is true, the reach is clipped to lie in a bounding box defined 46 | by the data bounding box plus a buffer given by clip_buffer 47 | (default is 0.1 deg or ~11 km). 48 | 49 | """ 50 | 51 | def __init__(self, 52 | shape_file_root, 53 | lat_lon_region, 54 | clip=True, 55 | clip_buffer=0.1): 56 | # Open the geometry data base and shape files 57 | #print('shape_file_root:',shape_file_root) 58 | self.db = GeometryDataBase2D(shape_file_root) 59 | 60 | # Open the shape and dbf files 61 | 62 | self.shp = pysal.open(shape_file_root + '.shp') 63 | self.dbf = pysal.open(shape_file_root + '.dbf') 64 | self.dbf_header = self.dbf.header 65 | 66 | # Get the list of applicable reaches and extract them 67 | self.shape_idx = self.db.intersects_xy_bbox( 68 | lat_lon_region.bounding_box) 69 | #print "####### SHAPE_IDX:",self.shape_idx 70 | self.reach_idx = [] 71 | 72 | # Store the reaches in a list of RiverReaches 73 | 74 | self.reach = [] 75 | bbox = lat_lon_region.bounding_box 76 | #print('bbox:',bbox) 77 | for i in self.shape_idx: 78 | 79 | # Get the coordinates as arrays 80 | 81 | lon, lat = np.asarray(self.shp[i].vertices).T 82 | 83 | # Clip the data 84 | 85 | if clip: 86 | inbbox = ((lon >= bbox[0] - clip_buffer) & 87 | (lat >= bbox[1] - clip_buffer) & 88 | (lon <= bbox[2] + clip_buffer) & 89 | (lat <= bbox[3] + clip_buffer)) 90 | lon = lon[inbbox] 91 | lat = lat[inbbox] 92 | 93 | # Project into the L2 projection 94 | 95 | x, y = lat_lon_region.proj(lon, lat) 96 | 97 | # Get the metadata and reach index 98 | # Brent Williams, May 2017: Changed a few things here to handle 99 | # newer river reach database (may have broken ability to read old 100 | # one though, havent tested) 101 | metadata = {} 102 | record = self.dbf[i][0] 103 | reach_index = i 104 | max_width = None 105 | for j, field in enumerate(self.dbf_header): 106 | metadata[field] = record[j] 107 | if field == 'reach_idx': #old grwl way 108 | reach_index = record[j] 109 | if field == 'reachID': #new database 110 | reach_index = record[j] 111 | if field == 'Reach_ID': #osu centerline 112 | reach_index = record[j] 113 | #if field == 'Wmean':#new database mean width 114 | # max_width = record[j] 115 | # print "max width:", max_width 116 | self.reach_idx.append(reach_index) 117 | 118 | #print "reachID:",reach_index 119 | #print "reach x:",x 120 | # Append the river reach 121 | #if max_width==None: 122 | self.reach.append( 123 | RiverReach( 124 | lon=lon, 125 | lat=lat, 126 | x=x, 127 | y=y, 128 | metadata=metadata, 129 | reach_index=reach_index)) 130 | #else: 131 | # self.reach.append(RiverReach(lon=lon,lat=lat,x=x,y=y, 132 | # metadata=metadata, 133 | # reach_index=reach_index, 134 | # width_max=width_max)) 135 | 136 | # Set the iterator indexes 137 | self.idx = 0 138 | self.nreaches = len(self.reach) 139 | 140 | def __iter__(self): 141 | """This and the next function define an iterator over reaches.""" 142 | return self 143 | 144 | def __next__(self): ## Python 3: def __next__(self) 145 | """This and the previous function define an iterator over reaches.""" 146 | if self.idx >= self.nreaches: 147 | self.idx = 0 148 | raise StopIteration 149 | 150 | self.idx += 1 151 | return self.reach[self.idx - 1] 152 | 153 | next = __next__ 154 | 155 | def __len__(self): 156 | """Number of reaches.""" 157 | return self.nreaches 158 | 159 | def __getitem__(self, index): 160 | """Get reaches or slices of reaches.""" 161 | return self.reach[index] 162 | -------------------------------------------------------------------------------- /src/RiverObs/RiverReach.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base class containing river reach coordinates and metadata. 3 | """ 4 | from __future__ import absolute_import, division, print_function 5 | 6 | class RiverReach: 7 | """Base class containing river reach coordinates and metadata. 8 | 9 | Initialize with empty information and, optionally, 10 | a set of keywords that get turned into class members. 11 | The metadata is stored as dictionary variable_name:value. 12 | 13 | This base class can be derived to implement specific reach behavior. 14 | """ 15 | 16 | def __init__(self, **kwds): 17 | self.lat = None 18 | self.lon = None 19 | self.x = None 20 | self.y = None 21 | self.node_indx = None 22 | self.metadata = {} 23 | 24 | for k in kwds: 25 | v = kwds[k] 26 | setattr(self, k, v) 27 | -------------------------------------------------------------------------------- /src/RiverObs/ShapeWriter.py: -------------------------------------------------------------------------------- 1 | import RiverObs.RiverReachWriter 2 | 3 | 4 | def write(reach_collection, fout_node, fout_reach, driver='ESRI Shapefile'): 5 | """ 6 | Writes shapefiles 7 | """ 8 | reach_variables = list(reach_collection[0].metadata.keys()) 9 | 10 | # get node output variables from populated attributes of river_reaches 11 | node_variables = list(reach_collection[0].__dict__.keys()) 12 | node_variables.remove('ds') 13 | node_variables.remove('metadata') 14 | 15 | # Write shapefiles 16 | writer = RiverObs.RiverReachWriter(reach_collection, node_variables, 17 | reach_variables) 18 | 19 | writer.write_nodes_ogr(fout_node, driver) 20 | writer.write_reaches_ogr(fout_reach, driver) 21 | -------------------------------------------------------------------------------- /src/RiverObs/WidthDataBase.py: -------------------------------------------------------------------------------- 1 | """ 2 | Query a pandas HDFStore width data base for rivers. 3 | """ 4 | 5 | from __future__ import absolute_import, division, print_function 6 | 7 | import numpy as np 8 | import pandas 9 | 10 | 11 | class WidthDataBase: 12 | """Query a pandas HDFStore width data base for rivers. 13 | 14 | Parameters 15 | ---------- 16 | 17 | db_file : str 18 | pandas HDFStore file containing river and reach data frames. 19 | river_df_name : str 20 | Name of the pandas DataFrame containing point 21 | width information (default: 'river'). 22 | reach_df_name : str 23 | Name of the pandas DataFrame containing 24 | reach statistical information (default: 'reach'). 25 | mode: str 26 | how to open the file (default: 'r' read only). 27 | reach_index_kwd: str 28 | column name uniquely identifying the reach. 29 | (default: 'reach_index', but could be the reach name, etc.) 30 | """ 31 | 32 | def __init__(self, 33 | db_file, 34 | river_df_name='river', 35 | reach_df_name='reach', 36 | mode='r', 37 | reach_index_kwd='reach_index'): 38 | """ 39 | """ 40 | 41 | self.h5 = pandas.HDFStore(db_file, mode=mode) 42 | 43 | self.river_df = self.h5[river_df_name] 44 | self.reach_df = self.h5[reach_df_name] 45 | 46 | def get_river(self, 47 | reach_index, 48 | columns=None, 49 | asarray=False, 50 | transpose=False, 51 | lat_kwd='lat', 52 | lon_kwd='long', 53 | bounding_box=None, 54 | clip_buffer=0): 55 | """Return selected information for the reach index by reach_index. 56 | 57 | Parameters 58 | ----------- 59 | 60 | reach_index : int 61 | index identifying the reach. 62 | columns : list 63 | if None, all of the iformation associated with the reach is 64 | returned. Otherwise, pass either a column name or list of 65 | column names;e.g., columns='width' orcolumns=['long','lat','width']. 66 | asarray : bool 67 | if True, returns a numpy ndarray rather than a pandas DataFrame. 68 | transpose: bool 69 | if True and asarray=True, return the transpose array. 70 | This is useful to unpack arrays easily. 71 | lat_kwd : str 72 | latitude column name in the data base (default 'lat') 73 | lon_kwd : str 74 | latitude column name in the data base (default 'long') 75 | bounding_box: tuple 76 | (lonmin, latmin, lonmax, latmax). If not None, the 77 | only lon/lat in the bounding box + clip_buffer are returned. 78 | """ 79 | 80 | if bounding_box != None: 81 | lon, lat, inbbox = self.get_lon_lat( 82 | reach_index, 83 | lat_kwd=lat_kwd, 84 | lon_kwd=lon_kwd, 85 | bounding_box=bounding_box, 86 | clip_buffer=clip_buffer) 87 | else: 88 | inbbox = None 89 | 90 | # Select the DataFrame for this river 91 | 92 | df = self.river_df[self.river_df['reach_index'] == reach_index] 93 | 94 | # Select the desired columns 95 | 96 | if columns != None: 97 | df = df[columns] 98 | 99 | # If a bounding box has been specified, extract the appropriate records 100 | 101 | if type(inbbox) != type(None): 102 | df = df.iloc[inbbox] 103 | 104 | # Return the desired columns in the desired format 105 | 106 | if not asarray: 107 | return df 108 | else: 109 | if transpose: 110 | return np.asarray(df).T 111 | else: 112 | return np.asarray(df) 113 | 114 | def get_lon_lat(self, 115 | reach_index, 116 | lat_kwd='lat', 117 | lon_kwd='long', 118 | bounding_box=None, 119 | clip_buffer=0): 120 | """Return the latitude and longitude associated with a reach and 121 | a clip box. 122 | 123 | Parameters 124 | ---------- 125 | 126 | lat_kwd : str 127 | latitude column name in the data base (default 'lat') 128 | lon_kwd : str 129 | latitude column name in the data base (default 'long') 130 | bounding_box: tuple 131 | (lonmin, latmin, lonmax, latmax). If not None, the 132 | only lon/lat in the bounding box + clip_buffer are returned. 133 | 134 | Returns 135 | ------- 136 | 137 | Returns lon, lat numpy arrays. If bounding_box != None, also returns 138 | an index array for the good data. 139 | """ 140 | 141 | lon, lat = self.get_river( 142 | reach_index, 143 | columns=[lon_kwd, lat_kwd], 144 | asarray=True, 145 | transpose=True) 146 | 147 | if bounding_box != None: 148 | inbbox = ((lon >= bounding_box[0] - clip_buffer) & 149 | (lat >= bounding_box[1] - clip_buffer) & 150 | (lon <= bounding_box[2] + clip_buffer) & 151 | (lat <= bounding_box[3] + clip_buffer)) 152 | lon = lon[inbbox] 153 | lat = lat[inbbox] 154 | return lon, lat, inbbox 155 | 156 | return lon, lat 157 | 158 | def get_xy(self, 159 | reach_index, 160 | proj, 161 | lat_kwd='lat', 162 | lon_kwd='long', 163 | bounding_box=None, 164 | clip_buffer=0): 165 | """Given a projection function (e.g., from pyproj.Proj ) return x,y. 166 | 167 | Parameters 168 | ---------- 169 | 170 | proj : function 171 | x,y = proj(lon,lat) 172 | lat_kwd: str 173 | latitude column name in the data base (default 'lat') 174 | lon_kwd: str 175 | latitude column name in the data base (default 'long') 176 | 177 | Returns 178 | ------- 179 | 180 | Returns x,y numpy arrays. 181 | """ 182 | 183 | if bounding_box == None: 184 | lon, lat = self.get_lon_lat( 185 | reach_index, 186 | lat_kwd=lat_kwd, 187 | lon_kwd=lon_kwd, 188 | bounding_box=bounding_box, 189 | clip_buffer=clip_buffer) 190 | else: 191 | lon, lat, inbbox = self.get_lon_lat( 192 | reach_index, 193 | lat_kwd=lat_kwd, 194 | lon_kwd=lon_kwd, 195 | bounding_box=bounding_box, 196 | clip_buffer=clip_buffer) 197 | 198 | return proj(lon, lat) 199 | -------------------------------------------------------------------------------- /src/RiverObs/__init__.py: -------------------------------------------------------------------------------- 1 | """Associate observations with a river.""" 2 | 3 | from __future__ import absolute_import 4 | 5 | from .RiverNode import RiverNode 6 | from .RiverObs import RiverObs 7 | from .WidthDataBase import WidthDataBase 8 | from .IteratedRiverObs import IteratedRiverObs 9 | from .LatLonRegion import LatLonRegion 10 | from .RiverReach import RiverReach 11 | from .version import __version__ 12 | -------------------------------------------------------------------------------- /src/RiverObs/version.py: -------------------------------------------------------------------------------- 1 | # Initial version 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Code reorganization 6 | 7 | #__version__ = '0.1.1' 8 | 9 | # Added functionality, LatLonRegion, ReachPreProcessor 10 | 11 | #__version__ = '0.2.0' 12 | 13 | # Python 2.7/3.6 compatibility. 14 | 15 | __version__ = '0.2.1' 16 | -------------------------------------------------------------------------------- /src/SWOTRiver/__init__.py: -------------------------------------------------------------------------------- 1 | """Extract hydrology observables from SWOT data.""" 2 | 3 | from __future__ import absolute_import 4 | 5 | from .SWOTL2 import SWOTL2 6 | from .SWOTRiverEstimator import SWOTRiverEstimator 7 | from . import EstimateSWOTRiver 8 | 9 | from .version import __version__ 10 | -------------------------------------------------------------------------------- /src/SWOTRiver/analysis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/src/SWOTRiver/analysis/__init__.py -------------------------------------------------------------------------------- /src/SWOTRiver/errors.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for RiverObs Exceptions 3 | """ 4 | 5 | class RiverObsException(Exception): 6 | pass 7 | 8 | class RiverObsUseageException(Exception): 9 | pass 10 | -------------------------------------------------------------------------------- /src/SWOTRiver/products/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/src/SWOTRiver/products/__init__.py -------------------------------------------------------------------------------- /src/SWOTRiver/products/riversp.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Copyright (c) 2019-, California Institute of Technology ("Caltech"). U.S. 3 | Government sponsorship acknowledged. 4 | All rights reserved. 5 | 6 | Author (s): Alex Fore 7 | ''' 8 | import copy 9 | import numpy as np 10 | from collections import OrderedDict as odict 11 | 12 | from SWOTWater.products.product import Product, FILL_VALUES, textjoin 13 | from SWOTRiver.products.rivertile import \ 14 | L2HRRiverTile, RiverTileNodes, RiverTileReaches, RIVER_PRODUCT_ATTRIBUTES 15 | from RiverObs.RiverObs import \ 16 | MISSING_VALUE_FLT, MISSING_VALUE_INT4, MISSING_VALUE_INT9 17 | 18 | RIVERSP_ATTRIBUTES = copy.deepcopy(RIVER_PRODUCT_ATTRIBUTES) 19 | RIVERSP_ATTRIBUTES['short_name'] = { 20 | 'dtype': 'str', 'docstr': 'L2_HR_RiverSP', 'value': 'L2_HR_RiverSP'} 21 | ATTRIBUTE_KEYS2POP = [ 22 | "_".join([a, b, c]) for a in ['inner', 'outer'] for b in ['first', 'last'] 23 | for c in ['latitude', 'longitude']] + [ 24 | 'xref_l2_hr_river_tile_param_file', 'tile_number', 'swath_side', 25 | 'tile_name'] 26 | 27 | for key in ATTRIBUTE_KEYS2POP: 28 | RIVERSP_ATTRIBUTES.pop(key, None) 29 | 30 | for key in ['Conventions', 'title', 'platform']: 31 | RIVERSP_ATTRIBUTES[key]['value'] = RIVERSP_ATTRIBUTES[key]['docstr'] 32 | 33 | class L2HRRiverSP(L2HRRiverTile): 34 | """ 35 | Class for River SP data product 36 | """ 37 | UID = "l2_hr_riversp" 38 | ATTRIBUTES = odict() 39 | GROUPS = odict([ 40 | ['nodes', 'RiverSPNodes'], 41 | ['reaches', 'RiverSPReaches'], 42 | ]) 43 | 44 | @classmethod 45 | def from_rivertiles(cls, rivertiles): 46 | """ 47 | Builds a River SP product from a list of rivetile data products 48 | """ 49 | klass = cls() 50 | # It does somthing like this 51 | for rivertile in rivertiles: 52 | klass += rivertile 53 | 54 | # sort them by increasing reach id 55 | klass.sort() 56 | return klass 57 | 58 | def __add__(self, other): 59 | """Adds other to self""" 60 | klass = L2HRRiverSP() 61 | klass.nodes = self.nodes + other.nodes 62 | klass.reaches = self.reaches + other.reaches 63 | return klass 64 | 65 | @staticmethod 66 | def dump_xmls(node_xml_file, reach_xml_file): 67 | with open(node_xml_file, 'w') as ofp: 68 | RiverSPNodes.print_xml(ofp=ofp, is_shapefile=True) 69 | with open(reach_xml_file, 'w') as ofp: 70 | RiverSPReaches.print_xml(ofp=ofp, is_shapefile=True) 71 | 72 | 73 | class RiverSPNodes(RiverTileNodes): 74 | ATTRIBUTES = RIVERSP_ATTRIBUTES.copy() 75 | ATTRIBUTES['title'] = { 76 | 'dtype': 'str', 77 | 'value': 'Level 2 KaRIn High Rate River Single Pass Vector Product - Node', 78 | 'docstr': 'Level 2 KaRIn High Rate River Single Pass Vector Product - Node'} 79 | ATTRIBUTES['product_file_id'] = { 80 | 'dtype': 'str', 'value': 'Node', 'docstr': 'Node'} 81 | 82 | def __add__(self, other): 83 | """Adds other to self""" 84 | klass = RiverSPNodes() 85 | for key in klass.VARIABLES: 86 | setattr(klass, key, np.concatenate(( 87 | getattr(self, key), getattr(other, key)))) 88 | return klass 89 | 90 | 91 | class RiverSPReaches(RiverTileReaches): 92 | ATTRIBUTES = RIVERSP_ATTRIBUTES.copy() 93 | ATTRIBUTES['title'] = { 94 | 'dtype': 'str', 95 | 'value': 'Level 2 KaRIn High Rate River Single Pass Vector Product - Reach', 96 | 'docstr': 'Level 2 KaRIn High Rate River Single Pass Vector Product - Reach'} 97 | ATTRIBUTES['product_file_id'] = { 98 | 'dtype': 'str', 'value': 'Reach', 'docstr': 'Reach'} 99 | def __add__(self, other): 100 | """Adds other to self""" 101 | self_n_reaches, self_n_centerlines = self.centerline_lon.shape 102 | other_n_reaches, other_n_centerlines = other.centerline_lon.shape 103 | cl_len = max([self_n_centerlines, other_n_centerlines]) 104 | 105 | cl_lon = MISSING_VALUE_FLT * np.ones( 106 | [self_n_reaches+other_n_reaches, cl_len]) 107 | cl_lat = MISSING_VALUE_FLT * np.ones( 108 | [self_n_reaches+other_n_reaches, cl_len]) 109 | 110 | cl_lon[0:self_n_reaches, 0:self_n_centerlines] = self.centerline_lon 111 | cl_lat[0:self_n_reaches, 0:self_n_centerlines] = self.centerline_lat 112 | cl_lon[self_n_reaches:, 0:other_n_centerlines] = other.centerline_lon 113 | cl_lat[self_n_reaches:, 0:other_n_centerlines] = other.centerline_lat 114 | 115 | klass = RiverSPReaches() 116 | for key in klass.VARIABLES: 117 | if key in ['centerline_lon', 'centerline_lat']: 118 | value = { 119 | 'centerline_lon': cl_lon, 'centerline_lat': cl_lat}[key] 120 | setattr(klass, key, value) 121 | else: 122 | setattr(klass, key, np.concatenate(( 123 | getattr(self, key), getattr(other, key)))) 124 | return klass 125 | 126 | 127 | -------------------------------------------------------------------------------- /src/SWOTRiver/scripts/README.md: -------------------------------------------------------------------------------- 1 | This code is not to be used 2 | 3 | use bin/estimate_swot_river.py intead 4 | -------------------------------------------------------------------------------- /src/SWOTRiver/scripts/cythorun.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Compile a Python script into an executable that embeds CPython and run it. 4 | Requires CPython to be built as a shared library ('libpythonX.Y'). 5 | Basic usage: 6 | python cythonrun somefile.py [ARGS] 7 | """ 8 | 9 | from Cython.Build.BuildExecutable import build, build_and_run 10 | 11 | if __name__ == '__main__': 12 | import sys 13 | build_and_run(sys.argv[1:]) 14 | -------------------------------------------------------------------------------- /src/SWOTRiver/scripts/estimate_swot_river.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os, os.path 4 | import sys 5 | 6 | from SWOTRiver import EstimateSWOTRiver 7 | 8 | if __name__ == '__main__': EstimateSWOTRiver.main() 9 | -------------------------------------------------------------------------------- /src/SWOTRiver/scripts/make_simulation_catalog.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """ 3 | Make a GIS file containing the lat/lon polygons covered by 4 | a SWOT simulation run. 5 | """ 6 | 7 | from __future__ import absolute_import, division, print_function 8 | 9 | # make sure the libraries are importable 10 | 11 | 12 | def search_for_libraries(): 13 | """Search for the libraries.""" 14 | 15 | import os, os.path 16 | import sys 17 | 18 | # Try importing the root library 19 | try: 20 | from GDALOGRUtilities import OGRWriter 21 | from shapely.geometry import box 22 | except: 23 | sys.stderr.write( 24 | "Libraries not found. Make sure you are running in the SWOTRiver environment.\n" 25 | ) 26 | sys.exit(1) 27 | 28 | 29 | search_for_libraries() 30 | 31 | # Imports 32 | 33 | from os.path import join, split 34 | import argparse 35 | from glob import glob 36 | from GDALOGRUtilities import OGRWriter 37 | from shapely.geometry import box 38 | 39 | 40 | def parse_inputs(): 41 | """Create the argument parser.""" 42 | 43 | parser = argparse.ArgumentParser( 44 | description= 45 | 'Generate GIS file with coverage polygons for a SWOT simulation.') 46 | 47 | parser.add_argument('output_file', help='Output file name') 48 | parser.add_argument( 49 | 'data_directories', 50 | metavar='data_dir', 51 | nargs='+', 52 | help='Director containing simulation data') 53 | 54 | parser.add_argument( 55 | '-f', 56 | '--format', 57 | help="OGR file format (default 'ESRI Shapefile')", 58 | default='ESRI Shapefile') 59 | parser.add_argument( 60 | '--dlat', 61 | type=float, 62 | help='latitude tile size (default +1)', 63 | default=1.) 64 | parser.add_argument( 65 | '--dlon', 66 | type=float, 67 | help='longitude tile size (default +1)', 68 | default=1.) 69 | 70 | args = parser.parse_args() 71 | 72 | return args 73 | 74 | 75 | def get_bbox_from_files(sim_files, dlat, dlon): 76 | """Given a list of files, return a list of bounding box 77 | coordinates suitable for passing to OGRWriter.""" 78 | 79 | bounding_boxes = [] 80 | for name in sim_files: 81 | location = split(name)[-1].split('_')[0] 82 | EW = location[0] 83 | NS = location[4] 84 | 85 | if EW.lower() == 'w': 86 | lonmin = -float(location[1:4]) 87 | else: 88 | lonmin = float(location[1:4]) 89 | 90 | if NS.lower() == 's': 91 | latmin = -float(location[5:7]) 92 | else: 93 | latmin = float(location[5:7]) 94 | 95 | bounding_boxes.append( 96 | box(lonmin, latmin, lonmin + dlon, latmin + dlat)) 97 | 98 | return bounding_boxes 99 | 100 | 101 | def write_catalog(output_file, format, bounding_boxes, sim_files): 102 | """Write coverage polygons with metadata.""" 103 | 104 | fields = {'file': ('str', 42)} 105 | 106 | writer = OGRWriter( 107 | output_file, fields=fields, driver=format, geometry='Polygon') 108 | 109 | for i, bbox in enumerate(bounding_boxes): 110 | field_record = {'file': split(sim_files[i])[-1]} 111 | print(split(sim_files[i])[-1][0:7], bbox.wkt) 112 | writer.add_wkt_feature(bbox.wkt, field_record) 113 | writer.close() 114 | 115 | 116 | def main(): 117 | 118 | args = parse_inputs() 119 | 120 | # Get a list of the files 121 | 122 | sim_files = [] 123 | for directory in args.data_directories: 124 | files = glob(join(directory, '*_cycle_*_pass_*.*.nc')) 125 | if len(files) > 0: 126 | sim_files += files 127 | 128 | bounding_boxes = get_bbox_from_files(sim_files, args.dlat, args.dlon) 129 | print(bounding_boxes) 130 | 131 | write_catalog(args.output_file, args.format, bounding_boxes, sim_files) 132 | 133 | print('Successfuly wrote catalog file') 134 | 135 | 136 | if __name__ == '__main__': main() 137 | -------------------------------------------------------------------------------- /src/SWOTRiver/version.py: -------------------------------------------------------------------------------- 1 | # Initial version 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Reorganized code 6 | 7 | #__version__ = '0.1.1' 8 | 9 | # Python 2.7/3.6 compatibility 10 | # __version__ = '0.1.2' 11 | 12 | # refactor runner script and add stuff 13 | __version__ = '0.1.3' 14 | -------------------------------------------------------------------------------- /src/SWOTWater/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/src/SWOTWater/__init__.py -------------------------------------------------------------------------------- /src/SWOTWater/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for swot constants 3 | """ 4 | 5 | PIXC_CLASSES = { 6 | 'invalid': -1, 7 | 'land': 1, 8 | 'land_near_water': 2, 9 | 'water_near_land': 3, 10 | 'open_water': 4, 11 | 'dark_water': 5, 12 | 'low_coh_water_near_land': 6, 13 | 'low_coh_water': 7, 14 | 'land_near_dark_water': 22, # legacy/depreciated 15 | 'dark_water_near_land': 23, # legacy/depreciated 16 | 'dark_water_legacy': 24 # legacy/depreciated 17 | } 18 | 19 | # TODO: figure out how we want to aggregate low-coherence water near shore 20 | # When coherence is low, but it is bright it is likely due to layover 21 | # in which case the water fraction may be bad, but so is the total area 22 | # since there is water from two areas on the ground... 23 | # for now just aggregate the pixel area so it will only map to the one 24 | # ground-plane area that it is assigned to, while the other laid-over 25 | # water will just be missing 26 | AGG_CLASSES = { 27 | 'interior_water_klasses':[ 28 | PIXC_CLASSES['open_water'], 29 | PIXC_CLASSES['low_coh_water'], 30 | PIXC_CLASSES['low_coh_water_near_land'], # TODO: do we want this? 31 | ], 32 | 'water_edge_klasses':[ 33 | PIXC_CLASSES['water_near_land'], 34 | #PIXC_CLASSES['low_coh_water_near_land'], # TODO: do we want this? 35 | ], 36 | 'land_edge_klasses':[ 37 | PIXC_CLASSES['land_near_water'], 38 | ], 39 | 'dark_water_klasses':[ 40 | PIXC_CLASSES['dark_water'], 41 | PIXC_CLASSES['land_near_dark_water'], 42 | PIXC_CLASSES['dark_water_near_land'], 43 | PIXC_CLASSES['dark_water_legacy'], 44 | ] 45 | } 46 | 47 | GDEM_PIXC_CLASSES = { 48 | 'open_water': 4, 'dark_water': 5, # 49 | 'open_water_lake': 34, 'dark_water_lake': 5} # 54 50 | -------------------------------------------------------------------------------- /src/SWOTWater/products/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWOTAlgorithms/RiverObs/94640dc3ce0a526d6b8eabb40a0c6c870ae7bab4/src/SWOTWater/products/__init__.py -------------------------------------------------------------------------------- /src/SWOTWater/products/constants.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Copyright (c) 2017-, California Institute of Technology ("Caltech"). U.S. 3 | Government sponsorship acknowledged. 4 | All rights reserved. 5 | 6 | Author (s): Alex Fore 7 | 8 | ''' 9 | FILL_VALUES = { 10 | 'double': 9.9692099683868690e+36, 11 | 'single': 9.96921e+36, 12 | 'int64': 2**63-1, 'uint64': 2**64-1, 13 | 'int32': 2**31-1, 'uint32': 2**32-1, 14 | 'int16': 2**15-1, 'uint16': 2**16-1, 15 | 'int8': 2**7-1, 'uint8': 2**8-1, 16 | 'char': '*', 17 | } 18 | FILL_VALUES['c16'] = FILL_VALUES['double'] + 1j*FILL_VALUES['double'] 19 | FILL_VALUES['c8'] = FILL_VALUES['single'] + 1j*FILL_VALUES['single'] 20 | FILL_VALUES['f8'] = FILL_VALUES['double'] 21 | FILL_VALUES['f4'] = FILL_VALUES['single'] 22 | FILL_VALUES['i8'] = FILL_VALUES['int64'] 23 | FILL_VALUES['i4'] = FILL_VALUES['int32'] 24 | FILL_VALUES['i2'] = FILL_VALUES['int16'] 25 | FILL_VALUES['i1'] = FILL_VALUES['int8'] 26 | FILL_VALUES['u8'] = FILL_VALUES['uint64'] 27 | FILL_VALUES['u4'] = FILL_VALUES['uint32'] 28 | FILL_VALUES['u2'] = FILL_VALUES['uint16'] 29 | FILL_VALUES['u1'] = FILL_VALUES['uint8'] 30 | FILL_VALUES['S1'] = FILL_VALUES['char'] 31 | -------------------------------------------------------------------------------- /src/SWOTWater/products/netcdf.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Copyright (c) 2017-, California Institute of Technology ("Caltech"). U.S. 3 | Government sponsorship acknowledged. 4 | All rights reserved. 5 | 6 | Author (s): Dustin Lagoy 7 | 8 | Tools for working with memory-mapped variables. 9 | ''' 10 | from __future__ import print_function 11 | import os 12 | import netCDF4 as nc 13 | import numpy as np 14 | import warnings 15 | 16 | from SWOTWater.products.constants import FILL_VALUES 17 | 18 | DEPTH_DIMNAMES = ['depth', 'complex_depth'] 19 | 20 | def get_fill(dtype): 21 | """Get the SWOT approved fill value for a data type""" 22 | dtype_str = np.dtype(dtype).str[1:] 23 | if (dtype_str[0] == 'S') or (dtype_str[0] == 'U') or (dtype_str[0]=='O'): 24 | # handle arbitrary-length strings 25 | dtype_str = 'S1' 26 | return FILL_VALUES[dtype_str] 27 | 28 | 29 | def set_variable( 30 | dataset, key, array, dimensions, attributes=None, default_complevel=None): 31 | '''Set the NetCDF variable, dealing with complex numbers. 32 | 33 | If array is complex, it is stored in the dataset with a third dimension, 34 | 'complex_depth', so that: 35 | variable.shape == (lines, pixels, 2) 36 | variable[:, :, 0] == array.real 37 | variable[:, :, 1] == array.imag 38 | ''' 39 | # np.ma.mask_array has fill_value attr, else use default fill value 40 | if attributes is None: 41 | fill_value = getattr(array, 'fill_value', get_fill(array.dtype)) 42 | complevel = default_complevel 43 | else: 44 | complevel = attributes.get('complevel', default_complevel) 45 | fill_value = attributes.get('_FillValue', get_fill(array.dtype)) 46 | # drop complex part of fill_value 47 | if 'complex' in array.dtype.name: 48 | fill_value = np.real(fill_value) 49 | 50 | def _make_variable(key, data, dimensions, attributes=None, complevel=None): 51 | dtype = data.dtype 52 | if dtype == object: 53 | # assume that objects are strings 54 | dtype = str 55 | if complevel is not None and dtype.str[1] != 'U': 56 | if complevel not in range(1, 10): 57 | raise Exception( 58 | 'Invalid complevel {} specified in _make_variable'.format( 59 | complevel)) 60 | dataset.createVariable( 61 | key, dtype, dimensions, fill_value=fill_value, zlib=True, 62 | complevel=complevel) 63 | else: 64 | dataset.createVariable( 65 | key, dtype, dimensions, fill_value=fill_value) 66 | 67 | if ((data.dtype.str[1] == 'S') or (data.dtype.str[1] == 'U') or ( 68 | data.dtype.str[1] == 'O')) and np.ma.isMaskedArray(data): 69 | dataset[key][:] = data.data 70 | else: 71 | dataset[key][:] = data 72 | if attributes is not None: 73 | for name, value in attributes.items(): 74 | if name in ('dtype', 'dimensions', '_FillValue', 'complevel'): 75 | continue 76 | if np.iscomplexobj(value): 77 | value = value.real 78 | # cast min/max/fill 79 | if name in ['valid_min', 'valid_max']: 80 | value = data.dtype.type(value) 81 | dataset[key].setncattr(name, value) 82 | 83 | if 'complex' in array.dtype.name: 84 | # Add the depth dimension 85 | if 'complex_depth' not in dataset.dimensions: 86 | dataset.createDimension('complex_depth', 2) 87 | shape = array.shape 88 | n_bytes = int(array.dtype.itemsize/2) 89 | float_type = np.dtype('f'+str(n_bytes)) 90 | if isinstance(array, np.ma.core.MaskedArray): 91 | # Somehow MaskedArray.view() doesn't work, so convert to a normal 92 | # array. 93 | array = array.filled() 94 | tmp = array.view(dtype=float_type).reshape(shape+(2,)) 95 | _make_variable( 96 | key, tmp, dimensions+['complex_depth'], attributes, 97 | complevel=complevel) 98 | else: 99 | _make_variable(key, array, dimensions, attributes, complevel=complevel) 100 | 101 | 102 | def get_variable_keys(dataset): 103 | '''Return a list of NetCDF variables, dealing with complex numbers.''' 104 | # Now, complex variables are stored with the same names, so this function 105 | # is trivial. 106 | return [key for key in dataset.variables] 107 | 108 | 109 | def get_variable(dataset, key): 110 | '''Get the NetCDF variable and dimensions, dealing with complex numbers. 111 | 112 | Key is assumed to be complex if the variable has a first or last dimension 113 | of 'complex_depth' or 'depth' with length 2. 114 | ''' 115 | variable = dataset[key] 116 | if len(variable.dimensions) == 0: 117 | return variable[0] 118 | if variable.dimensions[0] in DEPTH_DIMNAMES and variable.shape[0] == 2: 119 | tmp = np.ma.MaskedArray(variable[0] + 1j*variable[1]) 120 | return tmp 121 | # Only attempt to deconstruct complex datatypes from floats 122 | if np.issubdtype(variable.dtype, np.floating): 123 | if variable.dimensions[-1] in DEPTH_DIMNAMES and variable.shape[-1] == 2: 124 | n_bytes = int(variable.dtype.itemsize*2) 125 | complex_type = np.dtype('c'+str(n_bytes)) 126 | tmp = variable[:] 127 | fill_value = get_fill(complex_type) 128 | mask = None 129 | if isinstance(tmp, np.ma.MaskedArray): 130 | # Somehow MaskedArray.view() doesn't work, so convert to a normal 131 | # array. 132 | # Keep the current fill value and mask, TODO: use default fill? 133 | fill_value = tmp.fill_value 134 | # Assume the real mask matches the imaginary mask 135 | if isinstance(tmp.mask, np.ndarray): 136 | mask = tmp.mask[..., 0] 137 | else: 138 | mask = tmp.mask 139 | tmp = tmp.data 140 | # Make the data complex 141 | tmp = tmp.view(dtype=complex_type).squeeze() 142 | # Turn back into a masked array and re-fill 143 | tmp = tmp.view(np.ma.MaskedArray) 144 | tmp.set_fill_value(fill_value) 145 | if mask is not None: 146 | tmp[mask] = np.ma.masked 147 | return tmp 148 | with warnings.catch_warnings(): 149 | warnings.simplefilter("ignore") 150 | variable = variable[:] 151 | 152 | if isinstance(variable, np.ma.MaskedArray): 153 | return variable 154 | variable = variable.view(np.ma.MaskedArray) 155 | if dataset[key].dtype is str: 156 | # handle arrays of strings 157 | fill_value = get_fill('S1') 158 | else: 159 | fill_value = get_fill(dataset[key].dtype.str[1:]) 160 | variable.set_fill_value(fill_value) 161 | return variable 162 | 163 | 164 | def get_variable_dimensions(dataset, key): 165 | '''Get the NetCDF variable dimensons, dealing with complex numbers. 166 | 167 | Key is assumed to be complex if the variable has a first or last dimension 168 | of 'depth' with length 2. This dimension is removed from the returned list. 169 | ''' 170 | variable = dataset[key] 171 | if len(variable.dimensions) == 0: 172 | return variable.dimensions 173 | if variable.dimensions[0] in DEPTH_DIMNAMES and variable.shape[0] == 2: 174 | return variable.dimensions[1:] 175 | if variable.dimensions[-1] in DEPTH_DIMNAMES and variable.shape[-1] == 2: 176 | return variable.dimensions[:-1] 177 | return variable.dimensions 178 | -------------------------------------------------------------------------------- /src/SWOTWater/version.py: -------------------------------------------------------------------------------- 1 | # Initial version 2 | __version__ = '0.1.0' 3 | -------------------------------------------------------------------------------- /src/bin/README.md: -------------------------------------------------------------------------------- 1 | Here are the main scripts / executables for running RiverObs and ananlyzing its outputs. 2 | 3 | 4 | # swot_pixc2rivertile.py 5 | ``` 6 | usage: swot_pixc2rivertile.py [-h] [--shpbasedir SHPBASEDIR] [-l LOG_LEVEL] 7 | [--gdem-file GDEM_FILE] 8 | pixc_file out_riverobs_file out_pixc_vector_file 9 | rdf_file 10 | ``` 11 | The main river processing script. 12 | 13 | # fake_pixc_from_gdem.py 14 | ``` 15 | usage: fake_pixc_from_gdem.py [-h] [--subsample-factor SUBSAMPLE_FACTOR] 16 | pixc_file gdem_file fake_pixc_file 17 | ``` 18 | Creates a pixel cloud file from a gdem suitable for using in RiverObs processing. The pixc.nc is a pixel cloud file that is used to determine the correct format for RiverObs. 19 | 20 | # plot_riverobs.py 21 | ``` 22 | usage: plot_riverobs.py [-h] [-t TITLE] [-p] pixc_rivertile gdem_rivertile 23 | ``` 24 | Compares the pixel cloud rivertile data product to that which was generated using the fake pixc from the GDEM. Plots height and width errors for nodes. 25 | 26 | # plot_reach_stats.py 27 | ``` 28 | usage: plot_reach_stats.py [-h] [-t TITLE] [-p] pixc_rivertile gdem_rivertile 29 | ``` 30 | Compares the pixel cloud and gdem rivertile data products similar to plot_riverobs.py but compares reaches instead. 31 | 32 | # preproc_gdem.py 33 | ``` 34 | usage: preproc_gdem.py [-h] [-l LOG_LEVEL] [--plot] 35 | [--erosion-iter EROSION_ITER] 36 | in_gdem_file out_gdem_file reachdb_path 37 | ``` 38 | Pre-processes the GDEM to trim out some non-river / main channel water. 39 | 40 | Basically, it works in two steps: 41 | * segmenting the water features into disconnected (or close-to-disconnected) regions 42 | * assigning a particular feature label to each river reach using proximity to the river database. 43 | 44 | There is an optional parameter called ```--erosion-iter``` that you can fiddle with that can be used to disconnect features that are technically touching, but only barely. What it does is first erode the water mask before the initial segmentation, then figures out how to handle the things that got eroded in a fancy way. 45 | -------------------------------------------------------------------------------- /src/bin/analyze_reach_table.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (c) 2017-, California Institute of Technology ("Caltech"). U.S. 4 | Government sponsorship acknowledged. 5 | All rights reserved. 6 | 7 | Author(s): Brent Williams 8 | 9 | Read in the river tables and make some plots 10 | 11 | ''' 12 | import pdb 13 | 14 | import numpy as np 15 | import matplotlib.pyplot as plt 16 | import argparse 17 | import SWOTRiver.analysis.tabley 18 | 19 | plt.rcParams.update({'font.size': 12}) 20 | 21 | 22 | def get_percentiles(data): 23 | return np.nanpercentile(data, 16), np.nanpercentile(data, 50), \ 24 | np.nanpercentile(data, 84) 25 | 26 | 27 | def plot_hist(data1, xlab='', legend_label=['',], bins=100, data2=None): 28 | h1, bins1 = np.histogram(data1, bins) 29 | b = bins1[0:-1] + (bins1[1]-bins1[0])/2 30 | pcntle1 = get_percentiles(data1) 31 | legend_label[0] = legend_label[0] + '[16, 50, 84]%-ile = '\ 32 | + '[%2.1f, %2.1f, %2.1f]'%pcntle1 33 | plt.figure() 34 | plt.plot(b, h1) 35 | if data2 is not None: 36 | pcntle2 = get_percentiles(data2) 37 | legend_label[1] = legend_label[1] + '[16, 50, 84]%-ile = '\ 38 | + '[%2.1f, %2.1f, %2.1f]'%pcntle2 39 | h2, _ = np.histogram(data2, bins1) 40 | plt.plot(b, h2) 41 | plt.grid() 42 | legend = plt.legend(legend_label) 43 | legend.set_draggable(state=True) 44 | plt.xlabel(xlab) 45 | plt.ylabel('count') 46 | 47 | 48 | def plot_cdf(data, requirement, xlabel='', bins=250): 49 | plt.figure() 50 | abs_data = abs(data) 51 | plt.hist(abs_data, cumulative=True, label=xlabel+' CDF', histtype='step', 52 | bins=bins, density=True, linewidth=1.5) 53 | plt.xlabel('|' + xlabel + '|') 54 | plt.ylabel('Likelihood of occurrence') 55 | plt.axhline(y=0.68, color='r', linestyle='--', label='|68%ile|') 56 | plt.axvline(x=requirement, color='b', linestyle='--', label='requirement') 57 | x_min = -1 58 | x_max = np.percentile(abs_data, 98) 59 | plt.xlim([x_min, x_max]) 60 | plt.grid() 61 | legend = plt.legend(loc='lower right') 62 | legend.set_draggable(state=True) 63 | plt.plot() 64 | 65 | 66 | def plot_2d_hist(data, xdata, bins=100, xlab='', ylab=''): 67 | h, ye, xe = np.histogram2d(data, xdata, bins) 68 | xb = xe[0:-1] + (xe[1]-xe[0])/2 69 | #print(xb) 70 | #yb = ye[0:-1] + (ye[1]-ye[0])/2 71 | extent = (xe[0], xe[-1], ye[0], ye[-1]) 72 | #print(extent) 73 | hp = h.copy() 74 | hp[h == 0] = np.nan 75 | plt.figure() 76 | plt.imshow(hp, aspect='auto', interpolation='none', origin='lower', 77 | extent=extent) 78 | plt.xlabel(xlab) 79 | plt.ylabel(ylab) 80 | plt.grid() 81 | # compute percentiles vs x-dim 82 | p_negsig = [] 83 | med = [] 84 | p_possig = [] 85 | for i in range(len(xe)-1): 86 | msk = np.where(np.logical_and(xdata >= xe[i], xdata < xe[i+1])) 87 | #print(msk) 88 | #breakpoint() 89 | #dat = data[msk] 90 | pt = get_percentiles(data[msk]) 91 | p_negsig.append(pt[0]) 92 | med.append(pt[1]) 93 | p_possig.append(pt[2]) 94 | plt.plot(xb, p_negsig, '--r') 95 | plt.plot(xb, med, 'r') 96 | plt.plot(xb, p_possig, '--r') 97 | 98 | 99 | def main(): 100 | parser = argparse.ArgumentParser() 101 | parser.add_argument('input_file', help="""river errors table file with each 102 | observed reach itemized (not the summary stats table) as produced by 103 | plot_reach_stats.py""", type=str) 104 | args = parser.parse_args() 105 | table = SWOTRiver.analysis.tabley.Table.from_file(args.input_file) 106 | wse_reach_e = np.array([line[0] for line in table.data]) 107 | wse_node_e = np.array([line[1] for line in table.data]) 108 | slp_e = np.array([line[4] for line in table.data]) 109 | area_tot_e = np.array([line[8] for line in table.data]) 110 | area_det_e = np.array([line[9] for line in table.data]) 111 | width_e = np.array([line[10] for line in table.data]) 112 | wse_bins = np.linspace(-100, 100, 50) 113 | slp_bins = np.linspace(-10, 10, 50) 114 | area_bins = np.linspace(-70, 70, 50) 115 | width_bins = np.linspace(-150, 150, 50) 116 | 117 | reach_ids = np.array([line[12] for line in table.data]) 118 | scene_pass_tiles = np.array([line[14] for line in table.data]) 119 | tiles = scene_pass_tiles 120 | plot_hist( 121 | wse_reach_e, 'wse error (cm)', ['reach, ', 'node, '], wse_bins, 122 | wse_node_e 123 | ) 124 | plot_hist(slp_e, 'slope error (cm/km)', ['',], slp_bins) 125 | plot_hist( 126 | area_tot_e, 'area error (%)', ['tot, ', 'det, '], area_bins, 127 | area_det_e 128 | ) 129 | plot_hist(width_e, 'river width (m)', ['',], width_bins) 130 | 131 | plot_cdf(wse_reach_e, 10, 'wse error (cm)') 132 | plot_cdf(slp_e, 1.7, 'slope error (cm/km)') 133 | plot_cdf(area_tot_e, 15, 'area error (%)') 134 | 135 | # plot 2d histograms vs river width 136 | plot_2d_hist(area_tot_e, width_e, [area_bins, width_bins], 137 | 'river width error (m)', 'area_tot error (%)') 138 | plot_2d_hist(area_det_e, width_e, [area_bins, width_bins], 139 | 'river width error (m)', 'area_det error (%)') 140 | plt.show() 141 | """ 142 | h_wse, b_wse = np.histogram(wse, 100) 143 | h_wse_n, _ = np.histogram(wse_n, 100) 144 | bins_wse = b_wse[0:-1] + (b_wse[1]-b_wse[0])/2 145 | plt.figure() 146 | plt.plot(bins_wse, h_wse) 147 | plt.plot(bins_wse, h_wse_n) 148 | plt.xlabel('wse error (cm)') 149 | 150 | h_slp, b_slp = np.histogram(slp, 100) 151 | bins_slp = b_wse[0:-1] + (b_slp[1]-b_slp[0])/2 152 | plt.figure() 153 | plt.plot(bins_slp,h_slp) 154 | plt.xlabel('slp error (cm)') 155 | 156 | h_area_tot, b_area = np.histogram(area_tot, 100) 157 | h_area_det, _ = np.histogram(area_det, b_area) 158 | bins_area = b_area[0:-1] + (b_area[1]-b_area[0])/2 159 | plt.figure() 160 | plt.plot(bins_area,h_area_tot) 161 | plt.plot(bins_area,h_area_det) 162 | plt.xlabel('area % error (%)') 163 | plt.legend(['area_tot', 'area_det']) 164 | plt.show() 165 | """ 166 | 167 | 168 | if __name__ == '__main__': 169 | main() 170 | 171 | -------------------------------------------------------------------------------- /src/bin/calval2rivertile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (c) 2022-, California Institute of Technology ("Caltech"). U.S. 4 | Government sponsorship acknowledged. 5 | All rights reserved. 6 | 7 | Author (s): Alex Fore 8 | ''' 9 | import os 10 | import ast 11 | import argparse 12 | import logging 13 | import warnings 14 | import tempfile 15 | 16 | import RDF 17 | import SWOTRiver.Estimate 18 | from SWOTRiver.products.calval import \ 19 | SimplePixelCloud, Drifter, PressureTransducers 20 | from SWOTRiver.errors import RiverObsException 21 | 22 | LOGGER = logging.getLogger('calval2rivertile') 23 | 24 | FORMATS = ['simple_pixc', 'drifter', 'geotiff', 'pt', 'water_mask'] 25 | 26 | def main(): 27 | """Sample script for running calval data through RiverObs""" 28 | parser = argparse.ArgumentParser() 29 | parser.add_argument('input_file', help='Input calval file to process') 30 | parser.add_argument( 31 | 'format', help='Input calval file format', choices=FORMATS) 32 | parser.add_argument( 33 | 'out_riverobs_file', help='Output RiverTile NETCDF file') 34 | parser.add_argument('out_pixcvec_file', help='Output PIXCVecRiver file') 35 | parser.add_argument('rdf_file', help='Static config params') 36 | parser.add_argument( 37 | 'out_pixc_file', 38 | help='If specified, write reformatted SimplePixelCloud to this file', 39 | default=None, nargs='?') 40 | parser.add_argument( 41 | '-l', '--log-level', type=str, default="info", 42 | help="logging level, one of: debug info warning error") 43 | args = parser.parse_args() 44 | 45 | level = {'debug': logging.DEBUG, 'info': logging.INFO, 46 | 'warning': logging.WARNING, 'error': logging.ERROR 47 | }[args.log_level] 48 | format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 49 | logging.basicConfig(level=level, format=format) 50 | 51 | config = RDF.RDF() 52 | config.rdfParse(args.rdf_file) 53 | config = dict(config) 54 | 55 | if args.format == 'drifter': 56 | # Use file extension to determine Drifter classmethod to use 57 | if args.input_file.lower().endswith('.nc'): 58 | drifter = Drifter.from_ncfile(args.input_file) 59 | 60 | elif args.input_file.lower().endswith('.shp'): 61 | drifter = Drifter.from_shp(args.input_file) 62 | 63 | elif args.input_file.lower().endswith('.txt'): 64 | drifter = Drifter.from_native(args.input_file) 65 | 66 | else: 67 | raise Exception("Unknown drifter file format!") 68 | 69 | pixc_simple = SimplePixelCloud.from_drifter(drifter) 70 | 71 | elif args.format == 'geotiff': 72 | pixc_simple = SimplePixelCloud.from_geotif(args.input_file) 73 | 74 | elif args.format == 'pt': 75 | pixc_simple = SimplePixelCloud.from_pressure_transducer( 76 | PressureTransducers.from_native(args.input_file)) 77 | 78 | elif args.format == 'water_mask': 79 | pixc_simple = SimplePixelCloud.from_cnes_watermask(args.input_file) 80 | 81 | elif args.format == 'simple_pixc': 82 | pixc_simple = SimplePixelCloud.from_ncfile(args.input_file) 83 | 84 | output_pixc_file = args.out_pixc_file 85 | 86 | # If args.out_pixc_file not specified use a tempfile and clean it later 87 | if args.out_pixc_file is None: 88 | fid, output_pixc_file = tempfile.mkstemp() 89 | pixc_simple.to_ncfile(output_pixc_file) 90 | 91 | # typecast most config values with eval since RDF won't do it for me 92 | # (excluding strings) 93 | for key in config.keys(): 94 | if key in ['geolocation_method', 'reach_db_path', 'height_agg_method', 95 | 'area_agg_method', 'slope_method', 'outlier_method', 96 | 'pixc_quality_handling', 'fractional_inundation_kwd']: 97 | if config[key].lower() != 'none': 98 | continue 99 | config[key] = ast.literal_eval(config[key]) 100 | 101 | 102 | estimator = SWOTRiver.Estimate.CalValToRiverTile( 103 | output_pixc_file, args.out_pixcvec_file) 104 | estimator.load_config(config) 105 | 106 | # generate empty output file on errors 107 | try: 108 | estimator.do_river_processing() 109 | except RiverObsException as exception: 110 | LOGGER.error( 111 | 'Unable to continue river processing: {}'.format(exception)) 112 | 113 | # Build and write a rivertile-style output file 114 | estimator.build_products() 115 | 116 | # Suppress litany of warnings from Product class writer 117 | with warnings.catch_warnings(): 118 | warnings.simplefilter("ignore") 119 | estimator.rivertile_product.to_ncfile(args.out_riverobs_file) 120 | 121 | # Delete tempfile if args.out_pixc_file is not specified 122 | if args.out_pixc_file is None: 123 | os.remove(output_pixc_file) 124 | 125 | if __name__ == "__main__": 126 | main() 127 | -------------------------------------------------------------------------------- /src/bin/compare_tables.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright(c) 2017-, California Institute of Technology("Caltech"). U.S. 4 | Government sponsorship acknowledged. 5 | All rights reserved. 6 | 7 | Author(s): Brent Williams 8 | ''' 9 | import argparse 10 | import pdb 11 | 12 | import SWOTRiver.analysis.tabley 13 | 14 | 15 | def main(): 16 | parser = argparse.ArgumentParser() 17 | parser.add_argument('table_file1', type=str) 18 | parser.add_argument('table_file2', type=str) 19 | parser.add_argument('--output_file', '-o', type=str, default=None) 20 | args = parser.parse_args() 21 | print(args) 22 | 23 | # read in each file 24 | table1 = SWOTRiver.analysis.tabley.Table.from_file(args.table_file1) 25 | table2 = SWOTRiver.analysis.tabley.Table.from_file(args.table_file2) 26 | n_missing = 0 27 | #print(table1) 28 | #print(table2) 29 | # match-up lines 30 | if not (table1.headers == table2.headers): 31 | print("tables are not comparable, headers dont match") 32 | return 33 | #print(table1.headers) 34 | print('Total reaches in', args.table_file1, 'is', len(table1.data)) 35 | print('Total reaches in', args.table_file2, 'is', len(table2.data)) 36 | 37 | tile_ind = -1 38 | reach_ind = -1 39 | for k, hdr in enumerate(table1.headers): 40 | if 'tile' in hdr: 41 | tile_ind = k 42 | if 'reach' == hdr: 43 | reach_ind = k 44 | outdata = [] 45 | for line1 in table1.data: 46 | #print(line1) 47 | tile1 = None 48 | reach1 = None 49 | matchlinenum = -1 50 | if tile_ind > 0: 51 | tile1 = line1[tile_ind] 52 | if reach_ind > 0: 53 | reach1 = line1[reach_ind] 54 | for k2,line2 in enumerate(table2.data): 55 | tile2 = None 56 | reach2 = None 57 | if tile_ind > 0: 58 | tile2 = line2[tile_ind] 59 | if reach_ind > 0: 60 | reach2 = line2[reach_ind] 61 | if (tile1==tile2) and (reach1==reach2) and (tile1 is not None): 62 | matchlinenum = k2 63 | #print (tile1, tile2, reach1, reach2) 64 | if matchlinenum > 0: 65 | matchline = table2.data[matchlinenum] 66 | outline = [] 67 | for k, item in enumerate(line1): 68 | if isinstance(item, str): 69 | outitem = item 70 | else: 71 | # difference the matched line 72 | outitem = item - matchline[k] 73 | outline.append(outitem) 74 | outdata.append(outline) 75 | #print("##",matchline) 76 | #print("***",outline) 77 | else: 78 | # no matching line in table2 for this table1 line 79 | print('No matching line in', args.table_file2, 80 | 'for', tile1, reach1, 'in', args.table_file1) 81 | n_missing += 1 82 | # create the difference table 83 | # setup preamble 84 | print('total missing reaches', n_missing) 85 | preamble = 'Difference table: %s - %s'%(args.table_file1, args.table_file2) 86 | # TODO: setup passfail dictionary for difference 87 | SWOTRiver.analysis.tabley.print_table( 88 | outdata, headers=table1.headers, style=None, 89 | precision=table1.precision, width=table1.fixed_width, 90 | passfail={}, fname=args.output_file, preamble=preamble) 91 | 92 | 93 | if __name__ == '__main__': 94 | try: 95 | main() 96 | except KeyboardInterrupt: 97 | pass 98 | 99 | -------------------------------------------------------------------------------- /src/bin/plot_pixcvec.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Plots pixels from pixcvec and node locations 4 | """ 5 | import numpy as np 6 | import argparse 7 | import matplotlib.axes 8 | import matplotlib.pyplot as plt 9 | 10 | import RiverObs.ReachDatabase 11 | from SWOTRiver.products.rivertile import L2HRRiverTile 12 | from SWOTRiver.products.pixcvec import L2PIXCVector 13 | 14 | def main(): 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument('rivertile', help='rivertile.nc') 17 | parser.add_argument('pixcvec', help='pixel cloud vector.nc') 18 | args = parser.parse_args() 19 | 20 | rivertile = L2HRRiverTile.from_ncfile(args.rivertile) 21 | pixc_vector = L2PIXCVector.from_ncfile(args.pixcvec) 22 | 23 | scatter_colors = ['k', 'r', 'g', 'b', 'y', 'c'] 24 | figure, axis = plt.subplots() 25 | for ii, node_id in enumerate(rivertile.nodes.node_id): 26 | 27 | mask = pixc_vector.node_id == node_id 28 | this_color = scatter_colors[ii%len(scatter_colors)] 29 | axis.scatter( 30 | pixc_vector.longitude_vectorproc[mask], 31 | pixc_vector.latitude_vectorproc[mask], 32 | s=1, c=this_color, edgecolor='none') 33 | 34 | is_bad_width = rivertile.nodes.node_q & 0x1 > 0 35 | 36 | axis.scatter( 37 | rivertile.nodes.lon, 38 | rivertile.nodes.lat, 39 | marker='d', c='m', s=3, label='Rivertile Nodes') 40 | 41 | axis.scatter( 42 | rivertile.nodes.lon_prior[is_bad_width], 43 | rivertile.nodes.lat_prior[is_bad_width], 44 | marker='s', c='k', s=3, label='Prior nodes, bad width') 45 | 46 | axis.scatter( 47 | rivertile.nodes.lon_prior[~is_bad_width], 48 | rivertile.nodes.lat_prior[~is_bad_width], 49 | marker='x', c='k', s=4, label='Prior nodes, good width') 50 | 51 | axis.set_xlabel('longitude') 52 | axis.set_ylabel('latitude') 53 | axis.legend() 54 | axis.grid() 55 | plt.show() 56 | 57 | if __name__ == "__main__": 58 | main() 59 | -------------------------------------------------------------------------------- /src/bin/swot_pixc2rivertile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Stand-in for RiverObs SDS-like processing 4 | 5 | Useage: 6 | swot_pixc2rivertile.py l2pixc rivertile.nc pixcvector.nc config.rdf 7 | 8 | Optional args: 9 | --shpbas nodebase reachbase -- writes shapefiles in nodebase.shp reachbase.shp 10 | --sensor-file sensor.nc -- gets sensor info from sensor.nc 11 | --gdem-file gdem.nc -- will make a fake pixel cloud from a gdem and run 12 | riverobs on that instead. 13 | 14 | template config file: 15 | 16 | reach_db_path (-) = REPLACE_ME 17 | class_list (-) = [2, 3, 4, 5, 6, 7] 18 | use_fractional_inundation (-) = [True, True, False, False, False, False] 19 | use_segmentation (-) = [False, True, True, True, True, True] 20 | use_heights (-) = [False, True, True, False, True, True] 21 | min_points (-) = 0 22 | minobs (-) = 1 23 | trim_ends (-) = False 24 | min_fit_points (-) = 2 25 | do_improved_geolocation (-) = True 26 | geolocation_method (-) = taylor 27 | height_agg_method (-) = weight 28 | area_agg_method (-) = composite 29 | slope_method (-) = bayes 30 | prior_wse_method (-) = fit 31 | prior_unc_alpha (-) = 3.0 32 | char_length_tau (-) = 10000 33 | use_multiple_reaches (-) = True 34 | use_ext_dist_coef (-) = True 35 | outlier_method (-) = piecewise_linear 36 | outlier_abs_thresh (-) = 1.5 37 | outlier_rel_thresh (-) = 68 38 | outlier_upr_thresh (-) = 80 39 | outlier_iter_num (-) = 30 40 | outlier_breakpoint_min_dist (-) = 0.1 41 | outlier_edge_min_dist (-) = 0.1 42 | outlier_n_boot (-) = 10 43 | pixc_quality_handling (-) = nominal 44 | num_good_sus_pix_thresh_wse (-) = 1 45 | num_good_sus_pix_thresh_area (-) = 1 46 | use_bright_land (-) = True 47 | geo_qual_wse_suspect (-) = 0x0000ffff 48 | geo_qual_wse_degraded (-) = 0x01ff0000 49 | geo_qual_wse_bad (-) = 0xfe000000 50 | class_qual_area_suspect (-) = 0x0000ffff 51 | class_qual_area_degraded (-) = 0x01ff0000 52 | class_qual_area_bad (-) = 0xfe000000 53 | sig0_qual_suspect (-) = 0x01ffffff 54 | sig0_qual_bad (-) = 0xfe000000 55 | 56 | Config file just has processing parameters, no filenames (shape_file_root 57 | will be overwritten in SDS env with "prior_rivers" in current 58 | working directory by SDS pre-processor). 59 | 60 | For using with GDEMs change to these key/value pairs: 61 | class_list (-) = [4, 5, 24] 62 | use_fractional_inundation (-) = [False, False, False] 63 | use_segmentation (-) = [True, True, True] 64 | use_heights (-) = [True, True, True] 65 | do_improved_geolocation (-) = False 66 | slope_method (-) = first_to_last 67 | use_ext_dist_coef (-) = False 68 | outlier_method (-) = None 69 | 70 | Author (s): Alex Fore 71 | """ 72 | import sys 73 | import os 74 | import ast 75 | import argparse 76 | import netCDF4 77 | import logging 78 | import subprocess 79 | 80 | import RDF 81 | import SWOTRiver.Estimate 82 | from SWOTRiver.products.pixcvec import L2PIXCVector 83 | from SWOTRiver.errors import RiverObsException 84 | 85 | LOGGER = logging.getLogger('swot_pixc2rivertile') 86 | 87 | 88 | def main(): 89 | parser = argparse.ArgumentParser() 90 | parser.add_argument('pixc_file', help='pixel cloud file') 91 | parser.add_argument('out_riverobs_file', help='Output NetCDF file') 92 | parser.add_argument('out_pixc_vector_file', help='Output PIXC vector file') 93 | parser.add_argument('rdf_file', help='Static config params') 94 | parser.add_argument('--shpbase', type=str, nargs=2, default=[None, None]) 95 | parser.add_argument( 96 | '-l', '--log-level', type=str, default="info", 97 | help="logging level, one of: debug info warning error") 98 | parser.add_argument( 99 | '--gdem-file', '-g', type=str, default=None, 100 | help="GDEM file; if commanded makes a fake pixc from GDEM and runs"+ 101 | "RiverObs on that instead of on pixc_file") 102 | args = parser.parse_args() 103 | 104 | level = {'debug': logging.DEBUG, 'info': logging.INFO, 105 | 'warning': logging.WARNING, 'error': logging.ERROR}[args.log_level] 106 | format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 107 | logging.basicConfig(level=level, format=format) 108 | 109 | config = RDF.RDF() 110 | config.rdfParse(args.rdf_file) 111 | config = dict(config) 112 | 113 | # typecast most config values with eval since RDF won't do it for me 114 | # (excluding strings) 115 | for key in config.keys(): 116 | if key in ['geolocation_method', 'reach_db_path', 'height_agg_method', 117 | 'area_agg_method', 'slope_method', 'outlier_method', 118 | 'prior_wse_method', 'pixc_quality_handling']: 119 | if config[key].lower() != 'none': 120 | continue 121 | config[key] = ast.literal_eval(config[key]) 122 | 123 | pixc_file = args.pixc_file 124 | if args.gdem_file is not None: 125 | import fake_pixc_from_gdem 126 | import tempfile 127 | pixc_file = tempfile.mktemp() 128 | fake_pixc_from_gdem.fake_pixc_from_gdem( 129 | args.gdem_file, args.pixc_file, pixc_file) 130 | 131 | l2pixc_to_rivertile = SWOTRiver.Estimate.L2PixcToRiverTile( 132 | pixc_file, args.out_pixc_vector_file) 133 | 134 | l2pixc_to_rivertile.load_config(config) 135 | 136 | # generate empty output file on errors 137 | try: 138 | l2pixc_to_rivertile.validate_inputs() 139 | l2pixc_to_rivertile.do_river_processing() 140 | l2pixc_to_rivertile.match_pixc_idx() 141 | l2pixc_to_rivertile.do_improved_geolocation() 142 | 143 | except RiverObsException as exception: 144 | LOGGER.error( 145 | 'Unable to continue river processing: {}'.format(exception)) 146 | 147 | l2pixc_to_rivertile.build_products() 148 | 149 | l2pixc_to_rivertile.rivertile_product.to_ncfile(args.out_riverobs_file) 150 | if args.shpbase[0] is not None: 151 | l2pixc_to_rivertile.rivertile_product.nodes.write_shapes( 152 | args.shpbase[0]+'.shp') 153 | l2pixc_to_rivertile.rivertile_product.reaches.write_shapes( 154 | args.shpbase[1]+'.shp') 155 | 156 | if args.gdem_file is not None: 157 | os.remove(pixc_file) 158 | 159 | 160 | if __name__ == "__main__": 161 | main() 162 | -------------------------------------------------------------------------------- /src/bin/swot_rivertiles2riversp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Stand-in for RiverObs SDS-like processing 4 | 5 | Useage: 6 | swot_rivertiles2riversp.py ... 7 | """ 8 | import os 9 | import logging 10 | import argparse 11 | import numpy as np 12 | 13 | from SWOTRiver.products.riversp import L2HRRiverSP 14 | from SWOTRiver.products.rivertile import L2HRRiverTile 15 | 16 | def main(): 17 | parser = argparse.ArgumentParser() 18 | parser.add_argument( 19 | 'rivertile_files', type=str, nargs='*', 20 | help='space serperated list of rivertiles') 21 | parser.add_argument('river_sp_ncfile', type=str) 22 | parser.add_argument('--from-shapes', help='From shapefiles', 23 | action='store_true', default=False) 24 | parser.add_argument('--shpbasedir', type=str, default=None) 25 | parser.add_argument( 26 | '-l', '--log-level', type=str, default="info", 27 | help="logging level, one of: debug info warning error") 28 | args = parser.parse_args() 29 | 30 | level = {'debug': logging.DEBUG, 'info': logging.INFO, 31 | 'warning': logging.WARNING, 'error': logging.ERROR}[args.log_level] 32 | format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 33 | logging.basicConfig(level=level, format=format) 34 | 35 | if args.from_shapes: 36 | rivertile_files = list(zip( 37 | args.rivertile_files[0::2], args.rivertile_files[1::2])) 38 | else: 39 | rivertile_files = args.rivertile_files 40 | 41 | river_tiles = [] 42 | for ii, rivertile_file in enumerate(rivertile_files): 43 | if args.from_shapes: 44 | river_tile = L2HRRiverTile.from_shapes( 45 | rivertile_file[0], rivertile_file[1]) 46 | else: 47 | river_tile = L2HRRiverTile.from_ncfile(rivertile_file) 48 | 49 | river_tiles.append(river_tile) 50 | 51 | # write river sp 52 | river_sp = L2HRRiverSP.from_rivertiles(river_tiles) 53 | 54 | # write shapefile version of river sp 55 | if args.shpbasedir is not None: 56 | if not os.path.isdir(args.shpbasedir): 57 | os.mkdir(args.shpbasedir) 58 | river_sp.nodes.write_shapes( 59 | os.path.join(args.shpbasedir, 'nodes.shp')) 60 | river_sp.reaches.write_shapes( 61 | os.path.join(args.shpbasedir, 'reaches.shp')) 62 | 63 | if args.from_shapes: 64 | fill_value = river_sp.nodes.VARIABLES['time_str']['fill_value'] 65 | river_sp.nodes.time_str = ( 66 | np.ones(river_sp.nodes.time.shape)*fill_value) 67 | river_sp.reaches.time_str = ( 68 | np.ones(river_sp.reaches.time.shape)*fill_value) 69 | river_sp.to_ncfile(args.river_sp_ncfile) 70 | 71 | if __name__ == "__main__": 72 | main() 73 | -------------------------------------------------------------------------------- /src/toggle_input/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hide the input cells in a notebook. 3 | 4 | To use, place the following in your notebook: 5 | 6 | from IPython.display import HTML 7 | from toggle_input import toggle_input 8 | HTML(toggle_input) 9 | """ 10 | 11 | from __future__ import absolute_import, division, print_function 12 | 13 | from .toggle_input import * 14 | 15 | from .version import __version__ 16 | -------------------------------------------------------------------------------- /src/toggle_input/toggle_input.py: -------------------------------------------------------------------------------- 1 | """ 2 | Toggles the input cells of a python notebook. 3 | 4 | To use, from the notebook, execute this cell: 5 | 6 | from IPython.display import HTML 7 | from toggle_input import toggle_input 8 | HTML(toggle_input) 9 | """ 10 | ##from IPython.display import HTML 11 | 12 | ##HTML(''' 24 | ##The raw code for this IPython notebook is by default hidden for easier reading. 25 | ##To toggle on/off the raw code, click here.''') 26 | ## 27 | 28 | toggle_input = ''' 40 | The raw code for this IPython notebook is by default hidden for easier reading. 41 | To toggle on/off the raw code, click here.''' 42 | -------------------------------------------------------------------------------- /src/toggle_input/version.py: -------------------------------------------------------------------------------- 1 | # Initial version 2 | 3 | #__version__ = '0.1.0' 4 | 5 | # Python 2.7/3.6 compatibility 6 | 7 | __version__ = '0.1.1' 8 | --------------------------------------------------------------------------------