├── .coveragerc ├── .flake8 ├── .gitattributes ├── .github └── workflows │ └── test.yaml ├── .gitignore ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── LICENSE.md ├── MANIFEST.in ├── README.md ├── ci └── test_environment.yaml ├── examples ├── Notebooks │ ├── IHmethod_demo.ipynb │ ├── NWIS_example.ipynb │ └── NWIS_gw_example.ipynb └── data │ ├── bbox.dbf │ ├── bbox.prj │ ├── bbox.shp │ ├── bbox.shx │ ├── pang.csv │ ├── pangbf.csv │ └── pangresults.png ├── long_description.rst ├── pydrograph ├── __init__.py ├── _version.py ├── attributes.py ├── baseflow.py ├── nwis.py └── tests │ ├── __init__.py │ ├── conftest.py │ ├── data │ ├── UV_04087088_Discharge_20071001_ab.csv │ ├── cacheeastcr_chd_perimeter_bufferinside50m.dbf │ ├── cacheeastcr_chd_perimeter_bufferinside50m.prj │ ├── cacheeastcr_chd_perimeter_bufferinside50m.shp │ └── cacheeastcr_chd_perimeter_bufferinside50m.shx │ ├── test.py │ ├── test_baseflow.py │ ├── test_examples.py │ ├── test_notebooks.py │ ├── test_nwis.py │ ├── test_readme.py │ └── test_rivergages.py ├── requirements-dev.txt ├── requirements-dev.yml ├── requirements.txt ├── requirements.yml ├── setup.cfg ├── setup.py └── versioneer.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = pydrograph 3 | [report] 4 | omit = /Users/aleaf/Documents/GitHub/gisutils/* 5 | pydrograph/_version.py 6 | 7 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = .git,__pycache__,build,dist,versioneer.py,mfsetup/_version.py,docs/source/conf.py 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | pydrograph/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | # Based on github template: https://github.com/actions/starter-workflows/blob/main/ci/python-package.yml 2 | 3 | name: Tests 4 | 5 | on: 6 | schedule: 7 | - cron: '0 9 * * 1' # run every Monday at 9 AM UTC (3 am PST) 8 | push: 9 | pull_request: 10 | 11 | jobs: 12 | build: 13 | name: ${{ matrix.python-version }}, ${{ matrix.os }} 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | os: [ ubuntu-latest, macos-latest, windows-latest ] 18 | python-version: ['3.11', '3.10'] 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | #with: 23 | # fetch-depth: 50 24 | - uses: conda-incubator/setup-miniconda@v2 25 | with: 26 | auto-update-conda: true 27 | activate-environment: pydrograph_ci 28 | environment-file: ci/test_environment.yaml 29 | python-version: ${{ matrix.python-version }} 30 | #condarc-file: ci/example-condarc.yml 31 | auto-activate-base: false 32 | - name: Conda info 33 | shell: bash -l {0} 34 | run: conda info 35 | - name: Install pydrograph and ipykernel 36 | shell: bash -l {0} 37 | run: | 38 | pip install -e . 39 | python -m ipykernel install --user --name pydrograph_ci --display-name "pydrograph_ci" 40 | - name: Conda list 41 | shell: bash -l {0} 42 | run: conda list 43 | #- name: Lint with flake8 44 | # run: | 45 | # # stop the build if there are Python syntax errors or undefined names 46 | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 47 | # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 48 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 49 | - name: Run tests and upload coverage 50 | shell: bash -l {0} 51 | run: | 52 | coverage run -m pytest -v --durations=20 53 | coverage report -m 54 | codecov -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | venv/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit tests / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/build/ 58 | docs/source/generated/ 59 | 60 | # pytest 61 | .pytest_cache/ 62 | 63 | # PyBuilder 64 | target/ 65 | 66 | # Editor files 67 | #mac 68 | .DS_Store 69 | *~ 70 | 71 | #vim 72 | *.swp 73 | *.swo 74 | 75 | #pycharm 76 | .idea/* 77 | 78 | 79 | #Ipython Notebook 80 | .ipynb_checkpoints 81 | 82 | # QGIS 83 | *.aux.xml 84 | 85 | *.cpg 86 | *.chk 87 | *.qpj 88 | 89 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Maintainer 6 | ---------- 7 | 8 | * Andrew Leaf 9 | 10 | Contributors 11 | ------------ 12 | 13 | None yet. Why not be the first? See: CONTRIBUTING.rst 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/aleaf/pydrograph/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Any details about your local setup that might be helpful in troubleshooting. 21 | * Detailed steps to reproduce the bug. 22 | 23 | Fix Bugs 24 | ~~~~~~~~ 25 | 26 | Look through the GitHub issues for bugs. Anything tagged with "bug" 27 | is open to whoever wants to implement it. 28 | 29 | Implement Features 30 | ~~~~~~~~~~~~~~~~~~ 31 | 32 | Look through the GitHub issues for features. Anything tagged with "feature" 33 | is open to whoever wants to implement it. 34 | 35 | Write Documentation 36 | ~~~~~~~~~~~~~~~~~~~ 37 | 38 | pydrograph could always use more documentation, whether 39 | as part of the official pydrograph docs, in docstrings, 40 | or even on the web in blog posts, articles, and such. 41 | 42 | Submit Feedback 43 | ~~~~~~~~~~~~~~~ 44 | 45 | The best way to send feedback is to file an issue at https://github.com/aleaf/pydrograph/issues. 46 | 47 | If you are proposing a feature: 48 | 49 | * Explain in detail how it would work. 50 | * Keep the scope as narrow as possible, to make it easier to implement. 51 | * Remember that this is a volunteer-driven project, and that contributions 52 | are welcome :) 53 | 54 | Get Started! 55 | ------------ 56 | 57 | Ready to contribute? Here's how to set up `pydrograph` for local development. 58 | 59 | 1. Fork the `pydrograph` repo on GitHub. 60 | 2. Clone your fork locally:: 61 | 62 | $ git clone git@github.com:your_name_here/pydrograph.git 63 | 64 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 65 | 66 | $ mkvirtualenv pydrograph 67 | $ cd pydrograph/ 68 | $ python setup.py develop 69 | 70 | 4. Create a branch for local development:: 71 | 72 | $ git checkout -b name-of-your-bugfix-or-feature 73 | 74 | Now you can make your changes locally. 75 | 76 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 77 | 78 | $ flake8 pydrograph tests 79 | $ python setup.py test 80 | $ tox 81 | 82 | To get flake8 and tox, just pip install them into your virtualenv. 83 | 84 | 6. Commit your changes and push your branch to GitHub:: 85 | 86 | $ git add . 87 | $ git commit -m "Your detailed description of your changes." 88 | $ git push origin name-of-your-bugfix-or-feature 89 | 90 | 7. Submit a pull request through the GitHub website. 91 | 92 | Pull Request Guidelines 93 | ----------------------- 94 | 95 | Before you submit a pull request, check that it meets these guidelines: 96 | 97 | 1. The pull request should include tests. 98 | 2. If the pull request adds functionality, the docs should be updated. Put 99 | your new functionality into a function with a docstring, and add the 100 | feature to the list in README.rst. 101 | 3. The pull request should work for Python 2.7, 3.3, 3.4, 3.5 and for PyPy. Check 102 | https://travis-ci.org/aleaf/pydrograph/pull_requests 103 | and make sure that the tests pass for all supported Python versions. 104 | 105 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | Unless otherwise noted, This project is in the public domain in the United 5 | States because it contains materials that originally came from the United 6 | States Geological Survey, an agency of the United States Department of 7 | Interior. For more information, see the official USGS copyright policy at 8 | https://www2.usgs.gov/visual-id/credit_usgs.html#copyright 9 | 10 | Additionally, we waive copyright and related rights in the work 11 | worldwide through the CC0 1.0 Universal public domain dedication. 12 | 13 | 14 | CC0 1.0 Universal Summary 15 | ------------------------- 16 | 17 | This is a human-readable summary of the 18 | [Legal Code (read the full text)][1]. 19 | 20 | 21 | ### No Copyright 22 | 23 | The person who associated a work with this deed has dedicated the work to 24 | the public domain by waiving all of his or her rights to the work worldwide 25 | under copyright law, including all related and neighboring rights, to the 26 | extent allowed by law. 27 | 28 | You can copy, modify, distribute and perform the work, even for commercial 29 | purposes, all without asking permission. 30 | 31 | 32 | ### Other Information 33 | 34 | In no way are the patent or trademark rights of any person affected by CC0, 35 | nor are the rights that other persons may have in the work or in how the 36 | work is used, such as publicity or privacy rights. 37 | 38 | Unless expressly stated otherwise, the person who associated a work with 39 | this deed makes no warranties about the work, and disclaims liability for 40 | all uses of the work, to the fullest extent permitted by applicable law. 41 | When using or citing the work, you should not imply endorsement by the 42 | author or the affirmer. 43 | 44 | 45 | 46 | [1]: https://creativecommons.org/publicdomain/zero/1.0/legalcode 47 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include LICENSE 4 | include README.rst 5 | include requirements.txt 6 | 7 | recursive-exclude * __pycache__ 8 | recursive-exclude * *.py[co] 9 | 10 | recursive-include docs *.rst conf.py Makefile make.bat 11 | 12 | include versioneer.py 13 | include pydrograph/_version.py 14 | 15 | # If including data files in the package, add them like: 16 | # include path/to/data_file 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pydrograph 2 | Package for getting and processing stream flow and groundwater level measurements from the USGS National Water Information System (NWIS). Data can be queried by site number, lat/lon limits, or with a polygon shapefile. URLs are constructed for the rdb (tab-delimted) format. Data returned by the URLs are then parsed into pandas dataframes. Summary tables of available field measurements and daily values are generated automatically on instantiation of an `Nwis` object. Field measurements or daily values can then be fetched by site numbers. A baseflow module can perform hydrograph separation on daily streamflow values using the modified Base-Flow-Index (BFI) method (Wahl and Wahl, 1988; Institute of Hydrology, 1980). Annual base flows can also be estimated from miscellaneous measurements using the Wisconsin Statewide Equation method of Gebert and others (2007). 3 | 4 | ### Version 0 5 | 6 | ![Tests](https://github.com/aleaf/pydrograph/workflows/Tests/badge.svg) 7 | [![codecov](https://codecov.io/gh/aleaf/pydrograph/branch/master/graph/badge.svg)](https://codecov.io/gh/aleaf/pydrograph) 8 | 9 | Getting Started 10 | ----------------------------------------------- 11 | ### Example notebooks 12 | 13 | 14 | [Demonstration/benchmark of the BFI method](https://github.com/aleaf/pydrograph/blob/master/examples/Notebooks/IHmethod_demo.ipynb) 15 | 16 | 17 | ### Get site information for an area defined by a lat/lon bounding box: 18 | ```python 19 | import gisutils 20 | import pydrograph 21 | from pydrograph.attributes import streamflow_attributes 22 | 23 | ll_bbox = [-91.497, 46.748, -90.228, 46.156] # nw lon, nw lat, se lon, se lat 24 | 25 | nwis = pydrograph.Nwis(ll_bbox) 26 | 27 | # Generate a url to get field measurements for the bounding box 28 | url = nwis.make_site_url('field_measurements', streamflow_attributes) 29 | 30 | # Get a dataframe of site information for the bounding box (url is generated internally) 31 | field_sites = nwis.get_siteinfo('field_measurements') 32 | 33 | # Write the site information out to a shapefile 34 | gisutils.df2shp(field_sites, 'NWIS_field_measurements.shp') 35 | 36 | # Get inventory of daily values sites 37 | dv_sites = nwis.get_siteinfo('daily_values') 38 | 39 | # Get daily values for a single site 40 | df = nwis.get_dvs(4015475) 41 | ``` 42 | ### Bugs 43 | 44 | If you think you have discovered a bug in pydrograph in which you feel that the program does not work as intended, then we ask you to submit a [Github issue](https://github.com/aleaf/pydrograph/labels/bug). 45 | 46 | 47 | Installation 48 | ----------------------------------------------- 49 | 50 | **Python versions:** 51 | 52 | pydrograph requires **Python** 3.8 (or higher) 53 | 54 | **Dependencies:** 55 | numpy 56 | pandas 57 | fiona 58 | shapely 59 | pyproj 60 | gisutils (available from pip or atleaf conda channel) 61 | 62 | ### Install python and dependency packages 63 | Download and install the [Anaconda python distribution](https://www.anaconda.com/distribution/). 64 | Open an Anaconda Command Prompt on Windows or a terminal window on OSX. 65 | From the root folder for the package (that contains `requirements.yml`), install the above packages from `requirements.yml`: 66 | 67 | ``` 68 | conda env create -f requirements.yml 69 | ``` 70 | activate the environment: 71 | 72 | ``` 73 | conda activate pydrograph 74 | ``` 75 | 76 | ### Install to site_packages folder 77 | ``` 78 | python setup.py install 79 | ``` 80 | ### Install in current location (to current python path) 81 | (i.e., for development) 82 | 83 | ``` 84 | pip install -e . 85 | ``` 86 | 87 | 88 | 89 | Disclaimer 90 | ---------- 91 | 92 | This software is preliminary or provisional and is subject to revision. It is 93 | being provided to meet the need for timely best science. The software has not 94 | received final approval by the U.S. Geological Survey (USGS). No warranty, 95 | expressed or implied, is made by the USGS or the U.S. Government as to the 96 | functionality of the software and related material nor shall the fact of release 97 | constitute any such warranty. The software is provided on the condition that 98 | neither the USGS nor the U.S. Government shall be held liable for any damages 99 | resulting from the authorized or unauthorized use of the software. 100 | 101 | 102 | References 103 | ---------- 104 | Gebert, W.A., Radloff, M.J., Considine, E.J., and Kennedy, J.L., 2007, 105 | Use of streamflow data to estimate base flow/ground-water recharge for Wisconsin: 106 | Journal of the American Water Resources Association, 107 | v. 43, no. 1, p. 220-236, http://dx.doi.org/10.1111/j.1752-1688.2007.00018.x 108 | 109 | Gebert, W.A., Walker, J.F., and Kennedy, J.L., 2011, 110 | Estimating 1970-99 average annual groundwater recharge in Wisconsin using streamflow data: 111 | U.S. Geological Survey Open-File Report 2009-1210, 14 p., plus appendixes, 112 | available at http://pubs.usgs.gov/ofr/2009/1210/. 113 | 114 | Institute of Hydrology, 1980b, Low flow studies report no. 3--Research report: Wallingford, Oxon, United Kingdom, Institute of Hydrology Report no. 3, p. 12-19 115 | 116 | Wahl, K.L and Wahl, T.L., 1988. Effects of regional ground-water level declines 117 | on streamflow in the Oklahoma Panhandle. In Proceedings of the Symposium on 118 | Water-Use Data for Water Resources Management, American Water Resources Association. -------------------------------------------------------------------------------- /ci/test_environment.yaml: -------------------------------------------------------------------------------- 1 | name: pydrograph_ci 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - ipython 7 | - jupyter 8 | - notebook 9 | - ipykernel 10 | - numpy 11 | - scipy 12 | - pandas 13 | - xmltodict 14 | - fiona 15 | - shapely 16 | - pyproj 17 | - codecov 18 | - coverage 19 | - flake8 20 | - pytest 21 | - sphinx 22 | - matplotlib 23 | - numpydoc 24 | - sphinx-copybutton 25 | - sphinx_rtd_theme 26 | - pip 27 | - pip: 28 | - gis-utils -------------------------------------------------------------------------------- /examples/Notebooks/NWIS_gw_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd\n", 11 | "import matplotlib.pyplot as plt\n", 12 | "from matplotlib.backends.backend_pdf import PdfPages\n", 13 | "import fiona\n", 14 | "from shapely.geometry import Point, shape\n", 15 | "from gisutils import df2shp, project\n", 16 | "from pydrograph import Nwis" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "### instantiate an NWIS object using a polygon shapefile of the study area\n", 24 | "* `bbox.shp` is in UTM 83 zone 15 N\n", 25 | "* pydrograph will automatically reproject the extent to GCS NAD83 by default (EPSG: 4269), which is the typical GCS used by NWIS. Alternatively, the NAD27 datum can be used (EPSG: 4267) by specifying `datum=NAD27` to the `NWIS()` constructor." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stdout", 35 | "output_type": "stream", 36 | "text": [ 37 | "reading extent from ../data/bbox.shp...\n", 38 | "reprojecting extent from +init=epsg:26915 to +proj=longlat +ellps=GRS80 +datum=NAD83 +no_defs\n" 39 | ] 40 | }, 41 | { 42 | "name": "stderr", 43 | "output_type": "stream", 44 | "text": [ 45 | "/Users/aleaf/Documents/GitHub/pydrograph/pydrograph/nwis.py:163: FionaDeprecationWarning: Collection.__next__() is buggy and will be removed in Fiona 2.0. Switch to `next(iter(collection))`.\n", 46 | " g = shape(shp.next()['geometry'])\n", 47 | "/Users/aleaf/opt/anaconda3/envs/gis/lib/python3.8/site-packages/pyproj/crs/crs.py:280: FutureWarning: '+init=:' syntax is deprecated. ':' is the preferred initialization method. When making the change, be mindful of axis order changes: https://pyproj4.github.io/pyproj/stable/gotchas.html#axis-order-changes-in-proj-6\n", 48 | " projstring = _prepare_from_string(projparams)\n" 49 | ] 50 | } 51 | ], 52 | "source": [ 53 | "nwis = Nwis(extent='../data/bbox.shp')" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "### field measurements inventory table\n", 61 | "* fetches inventory tables of all misc field measurements within the polygon" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 3, 67 | "metadata": {}, 68 | "outputs": [ 69 | { 70 | "name": "stdout", 71 | "output_type": "stream", 72 | "text": [ 73 | "getting site inventory for gwlevels...\n", 74 | "url: http://nwis.waterdata.usgs.gov/usa/nwis/gwlevels?nw_longitude_va=-92.700&nw_latitude_va=46.800&se_longitude_va=-92.600&se_latitude_va=46.700&coordinate_format=decimal_degrees&group_key=NONE&format=sitefile_output&sitefile_output_format=rdb&column_name=site_no&column_name=station_nm&column_name=site_tp_cd&column_name=dec_lat_va&column_name=dec_long_va&column_name=coord_meth_cd&column_name=coord_acy_cd&column_name=coord_datum_cd&column_name=dec_coord_datum_cd&column_name=district_cd&column_name=state_cd&column_name=county_cd&column_name=country_cd&column_name=land_net_ds&column_name=map_nm&column_name=map_scale_fc&column_name=alt_va&column_name=alt_meth_cd&column_name=alt_acy_va&column_name=alt_datum_cd&column_name=huc_cd&column_name=basin_cd&column_name=topo_cd&column_name=data_types_cd&column_name=instruments_cd&column_name=construction_dt&column_name=inventory_dt&column_name=tz_cd&column_name=local_time_fg&column_name=reliability_cd&column_name=gw_file_cd&column_name=nat_aqfr_cd&column_name=aqfr_cd&column_name=aqfr_type_cd&column_name=well_depth_va&column_name=hole_depth_va&column_name=depth_src_cd&column_name=project_no&column_name=rt_bol&column_name=peak_begin_date&column_name=peak_end_date&column_name=peak_count_nu&column_name=qw_begin_date&column_name=qw_end_date&column_name=qw_count_nu&column_name=gw_begin_date&column_name=gw_end_date&column_name=gw_count_nu&column_name=sv_begin_date&column_name=sv_end_date&column_name=sv_count_nu&date_format=YYYY-MM-DD&rdb_compression=file&list_of_search_criteria=lat_long_bounding_box\n", 75 | "reading data with pandas...\n", 76 | "finished in 1.70s\n", 77 | "\n", 78 | "culling 11 sites to those within extent...\n", 79 | "finished inventory in 1.74s\n", 80 | "\n" 81 | ] 82 | } 83 | ], 84 | "source": [ 85 | "gw_field_sites = nwis.get_siteinfo('gwlevels')" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "### daily values sites inventory table" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": 4, 98 | "metadata": {}, 99 | "outputs": [ 100 | { 101 | "name": "stdout", 102 | "output_type": "stream", 103 | "text": [ 104 | "getting site inventory for gwdv...\n", 105 | "url: http://nwis.waterdata.usgs.gov/usa/nwis/dv?referred_module=gw&site_tp_cd=GW&nw_longitude_va=-92.700&nw_latitude_va=46.800&se_longitude_va=-92.600&se_latitude_va=46.700&coordinate_format=decimal_degrees&group_key=NONE&format=sitefile_output&sitefile_output_format=rdb&column_name=site_no&column_name=station_nm&column_name=site_tp_cd&column_name=dec_lat_va&column_name=dec_long_va&column_name=coord_meth_cd&column_name=coord_acy_cd&column_name=coord_datum_cd&column_name=dec_coord_datum_cd&column_name=district_cd&column_name=state_cd&column_name=county_cd&column_name=country_cd&column_name=land_net_ds&column_name=map_nm&column_name=map_scale_fc&column_name=alt_va&column_name=alt_meth_cd&column_name=alt_acy_va&column_name=alt_datum_cd&column_name=huc_cd&column_name=basin_cd&column_name=topo_cd&column_name=data_types_cd&column_name=instruments_cd&column_name=construction_dt&column_name=inventory_dt&column_name=tz_cd&column_name=local_time_fg&column_name=reliability_cd&column_name=gw_file_cd&column_name=nat_aqfr_cd&column_name=aqfr_cd&column_name=aqfr_type_cd&column_name=well_depth_va&column_name=hole_depth_va&column_name=depth_src_cd&column_name=project_no&column_name=rt_bol&column_name=peak_begin_date&column_name=peak_end_date&column_name=peak_count_nu&column_name=qw_begin_date&column_name=qw_end_date&column_name=qw_count_nu&column_name=gw_begin_date&column_name=gw_end_date&column_name=gw_count_nu&column_name=sv_begin_date&column_name=sv_end_date&column_name=sv_count_nu&date_format=YYYY-MM-DD&rdb_compression=file&list_of_search_criteria=lat_long_bounding_box\n", 106 | "reading data with pandas...\n", 107 | "finished in 1.59s\n", 108 | "\n", 109 | "culling 2 sites to those within extent...\n", 110 | "finished inventory in 1.63s\n", 111 | "\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "gwdv_sites = nwis.get_siteinfo('gwdv')" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "### write shapefiles of the inventory tables\n", 124 | "* `shapely Points` were created from the lat/lon coordinates when the tables were assembled; these are used to write the feature geometries for the shapefiles" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": 5, 130 | "metadata": {}, 131 | "outputs": [ 132 | { 133 | "name": "stdout", 134 | "output_type": "stream", 135 | "text": [ 136 | "writing gw_dv_sites.shp... Done\n", 137 | "writing gw_misc_sites.shp... Done\n" 138 | ] 139 | } 140 | ], 141 | "source": [ 142 | "df2shp(gwdv_sites, 'gw_dv_sites.shp', epsg=4269)\n", 143 | "df2shp(gw_field_sites, 'gw_misc_sites.shp', epsg=4269)" 144 | ] 145 | }, 146 | { 147 | "cell_type": "markdown", 148 | "metadata": {}, 149 | "source": [ 150 | "### get daily values for a list of sites" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": 6, 156 | "metadata": {}, 157 | "outputs": [ 158 | { 159 | "data": { 160 | "text/plain": [ 161 | "['464222092403801', '464322092401401']" 162 | ] 163 | }, 164 | "execution_count": 6, 165 | "metadata": {}, 166 | "output_type": "execute_result" 167 | } 168 | ], 169 | "source": [ 170 | "sites = gwdv_sites.site_no.tolist()[0:2]\n", 171 | "sites" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": 7, 177 | "metadata": {}, 178 | "outputs": [ 179 | { 180 | "name": "stdout", 181 | "output_type": "stream", 182 | "text": [ 183 | "http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=464222092403801&startDT=1990-01-01¶meterCd=72019\n", 184 | "http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=464322092401401&startDT=1990-01-01¶meterCd=72019\n" 185 | ] 186 | }, 187 | { 188 | "data": { 189 | "text/plain": [ 190 | "dict_keys(['464222092403801', '464322092401401'])" 191 | ] 192 | }, 193 | "execution_count": 7, 194 | "metadata": {}, 195 | "output_type": "execute_result" 196 | } 197 | ], 198 | "source": [ 199 | "dvs = nwis.get_all_dvs(sites, 'gwlevels', start_date='1990-01-01')\n", 200 | "dvs.keys()" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": 8, 206 | "metadata": {}, 207 | "outputs": [ 208 | { 209 | "data": { 210 | "text/html": [ 211 | "
\n", 212 | "\n", 225 | "\n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | "
agency_cdsite_nodatetime74007_72019_0000374007_72019_00003_cd
datetime
2006-07-01USGS4642220924038012006-07-018.48A
2006-07-02USGS4642220924038012006-07-028.51A
2006-07-03USGS4642220924038012006-07-038.54A
2006-07-04USGS4642220924038012006-07-048.56A
2006-07-05USGS4642220924038012006-07-058.59A
\n", 287 | "
" 288 | ], 289 | "text/plain": [ 290 | " agency_cd site_no datetime 74007_72019_00003 \\\n", 291 | "datetime \n", 292 | "2006-07-01 USGS 464222092403801 2006-07-01 8.48 \n", 293 | "2006-07-02 USGS 464222092403801 2006-07-02 8.51 \n", 294 | "2006-07-03 USGS 464222092403801 2006-07-03 8.54 \n", 295 | "2006-07-04 USGS 464222092403801 2006-07-04 8.56 \n", 296 | "2006-07-05 USGS 464222092403801 2006-07-05 8.59 \n", 297 | "\n", 298 | " 74007_72019_00003_cd \n", 299 | "datetime \n", 300 | "2006-07-01 A \n", 301 | "2006-07-02 A \n", 302 | "2006-07-03 A \n", 303 | "2006-07-04 A \n", 304 | "2006-07-05 A " 305 | ] 306 | }, 307 | "execution_count": 8, 308 | "metadata": {}, 309 | "output_type": "execute_result" 310 | } 311 | ], 312 | "source": [ 313 | "dvs['464222092403801'].head()" 314 | ] 315 | }, 316 | { 317 | "cell_type": "code", 318 | "execution_count": 9, 319 | "metadata": {}, 320 | "outputs": [ 321 | { 322 | "data": { 323 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEECAYAAADTdnSRAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/Il7ecAAAACXBIWXMAAAsTAAALEwEAmpwYAABJGklEQVR4nO2dd5wb9Zn/34/K9l3bW9zLGtw7YIopNsUhoYSSI3dpBNK45FIghdyRShLSk9+l3KWQhJAKoV4KoQRC7zbYNDfccN9dby/aVfn+/pgZaVar3dWuZqSR9H2/Xn5ZK400j0bSZ555vk8RpRQajUajKSx8uTZAo9FoNM6jxV2j0WgKEC3uGo1GU4BocddoNJoCRIu7RqPRFCCBXBsAUF9frxobG3Nthkaj0eQVGzdubFFKNaR6zBPi3tjYyIYNG3Jthkaj0eQVIrJ3uMd0WEaj0WgKEC3uGo1GU4BocddoNJoCRIu7RqPRFCBa3DUajaYA0eKu0Wg0BYgWd03aRGOKvoEou5q7+evmg7k2R6PRjIAn8tw1+cGFP36C3S3dhMIxAFbOnMjsuoocW6XRaFKhPXfNqNz/6mGWfOk+thzqjAs7wNrvPsw7bnyagUhshGdrNJpcUPTiHonGiMX0wJLhiMYU//67jfQORFM+/syuVu58YX+WrdJoNKNR1OLeH4ny5h88xlW/25hrUzzLuu8+POS+6tIAu795Pju/cT4+gTdae3NgmUajGYmiFfeD7X2c8LUH2dncw4NbjnDdXS/n2iTPsa+1l/1tfQA89Ol1/L9/XQnA1evnIyL4fUJ9VSmt3QO5NFOj0aSgaMX9o398ge7+CFesmQPALc+9Qe9AJMdWeYvfP2P0JLpm/XyObaji4lUz+Om7j+c9p8yJb1NbWcLRnv5cmajRaIahKMX94W1NvPhGO5esms5XLl7Gd/5lBQCtPdoDtdjZ3M2vn9zDm5ZM4Zr1CwDw+4Tzlk+jLOiPb1dXVcJRfdw0Gs9RFOLe1jPA6hse5McP7WAgEuOzd7xEdWmA6y9aCsCkyhJAi7udHz64g0gsxqfetGDE7eoqS/Vx02g8iCviLiJXi8grIvKqiFzjxj7Gwqdv30xLdz+/enI3j+9oprmrn+9ctoKJFYao11YGAS3uFkopntrZwkUrp7N4Ws2I29ZWluiYexo8vqOZM7/7MNuPdOXaFE2R4HgRk4gsAz4EnAQMAPeJyD1KqR1O72s4ItEYu1p6mFtfyfzP3xu/f9n0CTy18yglAR9nLZocv7+2shTQ4m7x2qFOWroHOHVe/ajb1lWW0NUfoT8SpTTgH3X7YuSpnS1c/qvnjNuvt7BgSnWOLdIUA2547ouBZ5RSvUqpCPAocKkL+xmWGx/fxfk/fJyNe9sG3b/9SBeb9rWzbHrNoLhxbYUOy9h5dHszAGcuSDm9axC1Vcaxa+sJu2pTvhKNKT592+b43yFd8KXJEm6I+yvAWhGpE5EK4HxgVvJGInKViGwQkQ3Nzc2OGvDnFw8SiSkefO0IADe/70RWzppIU1c/G/e2Mbe+atD2NeUB/D7R4m7yyLZmlkyrYXJN2ajb1pnrFTpjJjWPbm/iUEeI715mLNr3DVMMptE4jePirpTaAnwb+AdwH7AZGJJjqJS6USm1Wim1uqFhdA8xXbYd7mKbGdd8eFsTfp+w5tg6Ll01Pb5NddngaJSIMKmihLZeLe49/RFe2NvG2jS8dtAhrZEIR2Ncc+smpk8o4+JVMygL+giFtbhrsoMrC6pKqV8ppY5XSq0FWoGsxdv/9lKiW+HO5h7mT66iNODnytPm8vR1Z7N0eg1vXTl9yPMaqkvZ19qXLTM9yy8f300kpjj12Lq0tp9VWw7AjiPdbpqVl1z12w10hiL8ywkzKQn4KA/6+e3Te9mnK3o1WcCtbJnJ5v+zgbcBt7ixn2Raewb48T9f57R5dZQEjLf25qVT449Pm1DOPZ84gxPmTBry3NPn1fHs7qN0hoo3dhwKR/nz5gNUlvhZk6a4T5tQzuzaCp7aedRl6/KL15u6eHhbMxMrglxuFspFYoq+cJRP3755lGdrNJnjVp77nSLyGvBX4KNKqbbRnuAE971yGIB3nzyHP3zwZGbXVnDJcTPSeu5blk0lHFU8vLXJTRM9zRU3Pceu5h7eedJsgv70vxrrFjTw5OsthKN6sdDil4/vpjzo58FPrWNytbF2ccWaRgD8Ijm0TFMsuBWWOUMptUQptVIp9ZAb+0jF/a8eZk5dBectm8qJjbU89tmzmFtfmdZzj5s1iQnlQZ7ZVZweqFKKZ3e3ArBsxoQxPffEubX0haNsO1x8OdyRaIyOvsFXe/947Qi3Pr+PC1ZMo76qNH7/Z968kBPmTMJXFKWDmlxTMF+zaEyxcW8bZ8yvR8bhGfl8wqKp1fx18yEOthdf7P21Q50AvPvk2Vy8auiaxEgcN2siAC/ua3fYKu/z/X9sZ+VXHhi0oHzrc28A8JEzjx2yfUWJf9j2yRqNkxSMuO9s7qa7P8Jxs4bG09Pl2jcvpLs/wo8eytr6r2d4aEsTInDN+gVjPjnOnFROfVVJ0YW0wtEYP31kJ5Boe9zTH+GR7c1ceWojxzZUDXlOedCv0yE1WaFgxN0SlpOPqR33a6xurGX94sk8t6fVKbPyhoe2NrFy5kQaqktH3zgJEeEdJ87m4W1NHO4IuWCdN3n5QEf8dr+Z4vjKgQ6iMcXaBamre7XnrskWBSPu9716mOUzJjBzUmYzPVfNmsiu5p6iyttu6gqxeV8759haMoyVfzlhJkrBnzcdcNAyb/N6UyL9s9+sPN1khqZWzpyY8jnlJQEt7pqsUBDivu1wFy++0c6bl07J+LXWLTAE7gcPbs/4tfKFm5/cA8A5i8d//ObWV7JsRg0Pbyue0MxOm7hbc2Q3729nVm05dVWpr4AmVgRp7x3Qox01rlMQ4n7Lc29QHvTzrpPnjL7xKCybUcPiaTXctmEf0SL4AYajMW7bsJ/qsgCLp2XW0GrBlGr2Hi2eAp0X3mij3OxRFPfc32gf1msHmDahjEhM0aLbNQDw2PZmPvmnTXrIugvkvbgrpbj1+TdorK+k1uxzkgkiwvtPayQUjvHH595AqcIW+Me2N9PS3c83Ll0+riwjO411lRzqCBXFgmF/JMqmfe2cubAh/vdDW45wsCPEipnDp5JOMfv1HOnQ4g7wkd9v5O4XD7D3aE+uTSk48l7cX2/qJhSOsXzGyH3Hx8KiqcZrffH/XuG7929z7HW9yKZ97fh9wvoMQjIWVnZIMfQsP9wRIhxVLJ1ufFf6I7F4ltVpI7RKnmqK+6GO4ku3TUYpRY/pCPQUgUOQbRzv555tHjKzZD597kLHXnPe5EQK208e2cmWQ53cdOWJiBidI1t7Bnh0ezNNXSFOmVvHGfPrCYyhotNLbNrXzsIp1ZSXZN6L/cRGIw316V1HWWnmvhcqVlbQMeYJ7bHtzWze38HyGRNYOn14z31WrbHgX0zhq+E4YKsn6e3X84udJu/FfcOeVubWV8Yvd52gvMTPe06ZzYY9bWw9bPQImXvd35luxkubuhKX1D9/dBfvOHEW3zLnsOYTsZhi8752LlgxtqKl4ZhcU8aiqdU8tr2ZD68bWsBTKMRiKt7zfr7pCNxrtr7oj4zsgdZWllBfVRrvXFrMbLIVvWnP3XnyWtxjZlWqEyGFZG64ZDlgNNP62B9f4MEtTRwcJof71uf38eZlUzlr4fhTCXPB7qM9dIYi8QpTJzh+ziT+/vIhlFIZx/C9yo/+uYOfmMVLUyYMdip6+kcXqUVTq4sidDUam23i3jugPXenyWtx39XSTVtvmNWN469KHY2yoJ+fX76a9/zyWfrCxiLapIogG7/wJnw+IRSOcvH/PMm1t2/mvmvWDuol4nU+9adNAI6GUObWVdLeG6YzFGFCedCx1/USj+9oid+uLg3w3jVzOLahiqkTylJWpSazcGo1v3piNzubu9PavlDZtK+dGRPLOdDeR7cOyzhOfgaKTQ60h5hUEWR14/irUtPB7xNuueoUfvP+kwC49LiZ+HyGV1oW9POjdx5HZyjCtbdvzpvsmvbeATbvNyos7WsMmWJ5skc6C7dStcvWFlpE+OrFy7ji1EbevHRqWsfyX46fCcBNT+x2zUavE4nGePlAR3xuQG8aVzyasZHX4r5uQQMvfPFNHJNm58dMmVAe5NnPncN15y8adP/CqdV87rxFPLytmd8+vTcrtmSK1SjsW29bjt/nXPjEygYp5DYEnX2ZeZlLptdwUmNtUQ842Xaki1A4xqnzDHH/+t+38EgRFcBlg7wWdzA8p2zGdqfUlKXsdX7FqY2ctbCBr/99S17EU7ccMmzMpCo1FXFxL2DP3YmBLjNry9nXVrwZM5v3GVeNx89OhFSvveOlXJlTkOS9uHsFEeE7l62kpizAJ2550fOzMl892EFDdem4GoWNxOQa4/UK2XN3ojfMzEkVHO4MFW1l5nO7j1JfVcLs2kQvqICDV5AaLe6O0lBdyncvW8nWw1185z5vFz+9drAzXoDjJGVBP7WVJQXruTu1pjJrUjlKUZSzA0LhKH/ZfJDT5xmzF1abYy9nTirPsWWFhRZ3hzlr0WSuWDOHm57c7dkYYigcZUdTtyviDsaPtFDLyZ1qN2QVMxVjaOa/7nyJmIJ1ZuuG333gZGorS8iTXIS8QYu7C1x3/mIWTqnmM7e/REu393qI7D3aSzSmWDAls0Zhw7FkWg2vHezMm8yhseBUM7k5dYa4W0M+ioWBSIx/bm1iybQa3moWz5WX+Dl+9iSdDukwWtxdoCzo54fvXEVnKMx/3vGS50Rud4uRpXFMvTs51kun19DWG+ZQAcbdnRL3KdVllAZ87GkpzCuc4XjhjTY6QxGuXj9/UMuOqlI/PbqQyVG0uLvEoqk1/NdbFvHQ1iZ+/4y30iN3mYLSWJ/ZYJPhWGoO2H5pf7srr59LorYT9TtPmjXu1/H5hDl1FexoKq50SGvAyfKkIeyVpYG0qns16aPF3UXed1oj6xY0cMM9W9jhofTI3c09NFSXUl3mTgXpsukTqCjx8+TrR115/Vxiee5fvHAJ33xbZv2Ezlo4mUe2NfP2nz3l+ewqp9h7tIfSgC+eMmtRVRrQYRmH0eLuIiLCd9++gqrSAJ+4ddOoTaWyxd7WXhrr3PHaAUoCPtYcU8djO5pd20eusCYo+R3I2vvAGXMBeH5PG49tL7xjlYo9R3uZU1cRr/C2qCwNMBCJFW1qqBtocXeZydVlfOeyFWw51Ml3PZIeebC9jxkT3U07Wzq9hjdaewtunFzEEncHWjxPri7jhS++CZFExXChs/doD3PqhlaUV5Yaba56bN77zuZuvvznV4hEteCPh7xuHJYvnLN4CpefModfPrGbtQsaWLugIWe2RGOKwx0hprss7pWlAZSCvnA0/sMtBGLK8tydKbiprSxhbl0lrxzocOT1vEwspth7tJczU3RPnWg2mevoC9MzEOH0bz8cf+zfTpzNEpfSdgsZVzx3EfmkiLwqIq+IyC0i4lyz9Tzl8xcsZv7kKj59+2aOmumRT73eQmvPQFbtaO7qJxJTTHNZ3CtSeGKFgBVzd3I2y+rGSbzwRrtzL+hR/m/TAfojsXgaqJ3aKmNE5nO7W7l9w/5BjxVbuqhTOO5SicgM4BPAEqVUn4jcBrwDuNnpfeUTZUE/P3zHcVzyv09ywg0Pxu9/y9Kp/OzyE7Jmh5V3P9nhtgPJVJUak51GGsLwgwe3c9MTu5k/pZqFU6tZPLWahVNrWDi12rPtgi1x9znYz6i2srTgFxNbuvv51G2bAThn0dB+RvWVxvfxs3cO7S9zVA8THxduXS8HgHIRCQMVwEGX9pNXLJlew6fOXcC37t0av++J11tGeIbzdPYZTa/cFs/y4Mie+3V3vcQtz+0DjBDH3zYf5I/PJradNqGMhVOrWTS1hkVTDfE/tqGKkoCP6+56mc5QmP991/GuvodUWOIecGJF1aQ86GcgEiMaU4526PQS/9xiVGsvmlrN1AlDL+TrqoYfbt82zNXtUztbeNcvnuXTb1rAx8+Z74yhBYTj4q6UOiAi3wPeAPqAB5RSDyRvJyJXAVcBzJ4922kzPMuHzjgmLu6XnTCTu17YT09/JGtx6Q5T3GtcSoO0sEQqVf3Ws7uOxoX94c+cydz6SpRSHO4MsfVwF1sPdbHtcCdbD3fx5OsthKOmoPqE6RPL45fpD7z6dz5//mLefcqcQZ0623oGePz1FpbPmMBch9tBW3nuTnruFeb82r5wlKoCWp+w89qhTipL/Nx79RkpH6+tHF7cdzWnLvT6wYPGQPLv/2M7Hz1r3pAMnGLHjbDMJOBiYC7QDtwuIu9RSv3evp1S6kbgRoDVq1cXVkrFCFiit2LmBC5YPo07Nu7n5QMdnHJMXVb2b7WrrSl3V0Ss35li8EfbNxDlXb98loBPePZz51BnTq4SEaZNKGfahPJB4wrD0Ri7mnvYeriTbYe72Ha4Ky7u4aji+r++ht8nXL6mMf6cC3/8RHz48u5vnu9oS+hEzN1Bz90U996BSMGK++tN3Rw7uWrYz6IsOPyA9p3NQwu9lFJssWUYbdjbxklz3R3ak2+4saC6HtitlGpWSoWBu4BTXdhP3rLpS2/itn9fw6pZE/EJ/OO1I1nbtzVowu2wjOXZJmdC3vHCfqIxxdXnzI8L+0gE/T4WTq3m4lUz+OxbFvGrK09k1zfO54ZLlnH+8qkAvGguRh7pDHGgvS8u7AA3PbnHkfdjEQ/LOCnuprD1ZXFI9P62Xs7/4eP8ZXMiYhqLKZ7a2cJX//oav3tmr6NtM7Yf6Rp1StXvP3AyHzh97qD7ls2oSdlhtKmrn65QhPeumQMYIRrNYNxwE94AThGRCoywzDnABhf2k7dMrDAuQcuCfk6bV89tG/bxhQsWZ2XoSEdfGJ9AZYm7HqL1VmJJAvHF/3sFgA+tPWbcr+3zCe85ZQ7vOWUOH/zNBja+0caRzhAX/vgJmruMxbcPrzuWnz26k6/97TWuWDNnUB+TTHBjQbUi7rm7L+6RaIyfPbqT7z2wHYBP/mkTF62cTlcozPLrB0dPaytKuGDFtIz32dLdT1NXP0unTxhxu9Pn13PavDpmTSrn+r++BsDyGRO584X9Q7a1BuK8ZdlUHtrSVHQ9etLBcc9dKfUscAfwAvCyuY8bnd5PoTC1poyuUIT7X82O997RF6a6LOh6fNI6Udm9P/u0+5Euw8fC6fPq2Hu0l5O/8VBc2MFo/WDhZG/5eJ67C2GZPpdbEOxq7uaynz0dF3ZITM76xC0vxu/72sVLmV1bwV0pRHWsxGKKrebUr+SWA6kQEa48bS7/+ORavvzWJcYVZooLiO3miMIFU6pprK9g91GdLpmMK3nuSqkvK6UWKaWWKaUuV0rpXKZh+PCZxwLw4r62rOxvZ3P3oOk3bhGPudt+mD97dCcVJX5evv5cx/ZjL4hZPK2GL1ywmF9dsZopNWX81hxofqDNuYEYVoWqkydHt8MysZjiN0/t4fwfPc7ulh5+9M7j2POtC/jg6XM50N7H2d9/hIe3Ge0Ptt9wHpevaWTJtBr2ONCT/xeP7+I9v3oWGHnRNJn5U6p532lzERm6bgOw40gXtZUl1FeVMre+kt3N3Z7rvpprCnP1Jo84tqGKBVOq2H7Y/cZiSileOdDBBWYfbTdJjrm/3tTFva8c5qKV0x1tWNZYX4nfJ0Rjir9/4vRBoa0Z5mSfAw5OO4q5EHOvMENkboRlDrb3ce0dm3ny9aOcubCBb//LCqaYHnRVmbFfKxvl9g+voSRg+HuVpQFC4czL/v/20qH47foR0h2HQ0idcbX9SBfzzRh+Y10lnaEIbb3hMZ1ACh0t7h5g2fQJWcl339faR2coMqTdqhskx9yf221cmVyVQax9OB75zJmUBnxD1iys/jlOeu7xbBkHY+5uhGWUUtz5wgG+8pdXiSrFNy5dzjtPmjXoGL199SwOtvfxwGtH+P7bV3JiYyLbJOgXwg70dLFXYI+nKtonMsRvV0qx40g3Fx9nOClWuuvulh4t7ja0uHuAYxoquevFA3SGwlSXBlxbWH3loNG/ZNkM9/t0CJbnbvw0N+1rY1JF0JXRfrOGCTOVBf3UV5Wy3wVxdzQsY4m7Q8MqWrr7+dxdL/PAa0c4sXES33/7KmanKPmfMbGc71y2ku+keI2g3+eIuNvXO8aT5ikydFH+cGeIrv5IfJJYoynue1p6OMGcx6rRXSE9gXWZvOL6B/jWfVtH2Xp0QuEoV9/6YnwwgoX1t1vj9ewkx9w37Wtn5ayJWckIsjNtQhl/2rCPrlDYkQ6VURcWVCuCzmXL3PfKYd7834/xyLZmPnf+Im69ak1KYR8NQ9wzO14t3f0ZT65KFZaxFlPnTza+x7MmVeATCnZu73jR4u4BTp9fH7/980d3Zfx6f9l0kD9vOsgvHhv8Wke7+6kuCziWqTISPluFand/hB1N3ayaNdH1/SYzzSx1X379A3zpL69k/HruFjGNX9w7+sJ86rZNfPj3G5k6oYy/fvx0rlp77LjtDAaEgQw9950252LcU6tSOAPW4JsFU4yYe0nAx6SKElqy3ITPjlKKnz2601NjNXVYxgNMm+Bsh8ZmsznYhIrBC5etvWHqshST9Nli7i/tb0cpciLuX3rrEh7Z1sxANMbvn3mDGy5ZntHruRFzN9YLGPc0pid2tHDtHZtp6urnE2fP42Nnz48vjI6XEr8v4z7qR02xvffqM1g8bXzhuMQVoIpf9e040k1dZcmgIrjayhJau3Mn7tfd9TK3Pm+01PjQ2mNGLdjKBtpz9wg/eudxgOGFZHrmt/K9k/Wntac/awtOIomY++Z9Rqx/5cyJWdm3nZmTKnjss2cxt74yIwG1cMNzFxEqgv4xe+59A1G+/OdXeM+vnqW8xM+dHzmVT527MGNhByMsE1OZDQRv6zXENpPvXGLtJnHf9qYu5k8ZLJ6TKkto7c2NuD++o5lbn98Xn2622yMFVVrcPcJFK6dzwyXLGIjEePVgZlN5rNS/7tDgBbqj3QPUVrrb6tfCkj6ljMXUxroKJuUok2HqhDKuWT8fpTL/4cVcaBwGhjhZff7T4YU32rjgR4/zm6f38r7TGrnn42c4emVkdb3MZFG1pcsQ20xaXYjNc7f+f/1I95B1o7rKkqzPRgDDWfjgbzZQUxbg1qvWALDPI/3ntbh7iPOXT6O6NMDvnt6b0etYqX+RpAWx1p6BLIZlzJg7ipf2d7AiB167HUv4nt/TmtHrWFrnZMtfMBYF0znxDERifO/+bVz206foj8T44wdP5stvXRqP2ztFidmuIZO4+zO7jrJoanVGazxxJ8H8/1CHkSkzP0ncJyWJ+ysHOrIydPzeVw7RH4lx0arpTKkppaLEz742Le6aJGorSzh+ziQ272/P6HUszz1qC+8opWjrHYhPvHEbS9x7+qMc6gjFF79yxezaCkoDvoy9qkjMEDunPffVjZN46UAHvSOkQ2493Mkl//sk//Pw67zt+Jnce80ZnDqvftjtM8FqoRwe58Dq7v4IG/a2sm5hZiMlfUmto63PL3nA+9SaMlp7BgiFo7R093Phj5/gv1IM/nCSp143mqzNqavgaxcvQ0SYNamCfa3Opd5mghZ3jzFvchVbD3exK0Wb03ToCoXjPdvtqX+doQjhqKK2Ilsxd+N/qz3vcLno2UJEqK8qpSXDRTc3esuA0TrBCGG1D3ksGlP89JGdXPTjJ2nqCnHj5SfwvbevdLUnf1zcx5kO+eyuo4SjinUOzQu2jvsRcz1pSlKfGquQ6aN/eCHexuH5Pe609IhEY3znvq2865fP0tYb5osXLImvMc2qLWe/9tw1qbA04+L/fXJczz/YnigasXvu1iVrthZULc/W6taXjX42ozGpMkh7hotu8bCMw+K+dkEDVaUB/vzi4KFle4/28G8/f5pv37eVsxdN5v5r1nLu0qmO7jsVwQxj7nvMRl6Lp2ZWtJZ8gdRkFkUlj4m0xP2hrU3x/u/WVZbTfP8f2/nJIzupryrhN+8/ifVLEmMDa8qCnhmZqMXdY1xxaiNgxFbHg+W1w+BMh1ZzDmW2wjLWj3Kv+SP3griXB/0Z90uJWmEZh8W9qjTA2gX1PGnrS/6P145w3g8fZ9uRLv7731by0/ccn1YPfCewMm7GK+5HOkOUBnxMrMjs6sLKlrH8lOaufkoCviGLtI22iVvW1U/ympMTDERi/On5faxb0MCGL7xpyJVJadCX9m83Eo3xyT9t4r5XDjtuJ2hx9xwzJ1Vw9TnzGYjG6I+MfUHIPrPUnlF51AxHZHtBdW9rD5Ulfk/0/CgL+unNOBXS+N/JPHeLydVldPQmTs43P7Wb2soS7r9mLZceNzOr1b2ZhmUOdYSYOqEsY5uTJ3o1dfXTUFU65HXtrQ2s4S1OSntXKMyRzhBP7WyhtWcgPiQkmdKAn/40xf32jfu5+8UDbHOpaaAWdw+ycGo1SsHL+zvG/FzrklQk2XPPdljG+P9IZz+zaiuy3nYgFRUlfkIZlvjHZ6i68MupKQvQPRCJr5V0hyLMm1zF9HE03MoUK+w0bs+9IzQkLj4eEg3ojP+bukJMqUl99fLCF99EXWUJj2432heXOZDv39QVYv7n/87y6x/gjO88zPN7WvH7hDXHph6LWRLwpeWUKaX4xeO7mDGxnE+cMy9jO1Ohxd2DnGzOgky1uDYSR7v7ueGeLQBUlwYGxdytasG6bOW528TcCyEZMMIyfeEorx7s4OGtTeN6jagpdgEX1L26LIhS0GNmzHSFcjdTNRjILBXycGcoreEco5EIy5gLqp39TK5O/bq1lSX8x1kJoZyc4f47Q2FO+vpD8auXgUiMh7Y0sWx6TbxNczKlASMsM1oh4oa9bexq7uHq9fNdc3y0uHuQuqpSGqpLuWPj2CbhHOpILKZOqiwZlC3T1jNAedDveD70cNi/r7nOlLEoLzHE/YIfPcH7bn5+XK8RcaFC1aKydHBf967+CNVluRH3kgxSIZVShrhPcM5zt77JTZ0hJg/juQN84PS5fObcBUDmc4LP+PbD8dtXnzMfgK2HuzhhzvCDuEsDRmVvZJTK3t89vZfqsgAXLM98jOFwaHH3KNMmlLH1cNeYFlbtY+YmlAeHZMtkM+5tzwNvqM7O1cJolAUzD8sc7Rkg6BeqXfCok6tCu3PpuZviPppIpaKtN8xAJOaM5y6JBdVQOEpnKDIkUyaZj509n+NnT8yodcLL+zviyQlfvXgpaxck6glWzZ447PNKA4bzNFrc/ZWDHaw5pi5+QncDLe4e5Yo1jQBjypm1TxzyiQz6ch/NurgnbufK+0ymoiTzBdWWrn7qKktdmUFrpR9GoopINEZfOEpVqXu57OnYMp6wzGHzCtIRz938Xylle93R1yACPl9GqZAPb0uE7abUlA1aPxipIC8Q/wyH33coHGVPSw+LprrbeluLu0dprDdCGWOZY3nQFPeb33cifp8MGnLQ3hfOOC1tLNg9dyfH6mVCedA/6ISXboM2e9Voz4B7oRIrjh+OxujpN05CuToxZlKhetRMu3Xiii3RWybx/Z4+cfSTRsAv406FbO7q56eP7KSxroI/f/Q0zl0yZVCcv7GuctjnWgvRI101vN7UTUzBwgxrAEZDi7tHmTXJEPexTBE60N7HrNpyzlw4GZ+A3XHpDoVdrWhMRjzouSf3OEknZW3j3jaWfOl+/rn1iPGccMyRroupSBQOKTpDRkgg12GZ8aRCWplZkxxwJuy9ZQ6anvv0dDx3v4/wOMMyT+1soS8c5euXLo8PmCkJ+PjhO1Zx7ZsXjtgrx5eGuFtV23Prhz9JOIE3fnWaIVghlKNjKJc/2N4X/+L7RIgqxdM7j/Lzx3ZysD2U1RFk9gyA+ixl6IxG8mJyKBwdtamVlVXz4hvtnL1oCgNRN8XdinPH4ieesiwtgA+1ZfypkG1xcc88DJjoLaM4ZHru6YR7Aj6JF5yNlY172ygL+uJZaxYXr5ox6nOt+ofoCFeF7WYtg9thUu25e5SA38ekimD8EjcdDraH4kOh/T4hFlN8/4FtPLKtOevxW3tI2isLqhVJQpnOYqHVk9x6D/2RWDyTxGkCNm/Z8vycbnOQLsEMukK29YYRyTxbBRKee0wZQ2gmlAfT6jIZ8I0vLLO/rZc/Pb+P9YunxD+PseBPw3O3FmqdOD4joT13D1NXVZq25x6JxjjcGWLGpIS4R5Witz+xgFiVxfCIPebu9pc4XZJzk5MHLydzxU3PxQtiSq2870jMtTBT0FY4ZHnMbqRcpoN1dTIegWzrHaCmLDgucRyCrXV0S3c/9Wm2zxjvgO+v37MFEbju/MVjfi6kL+4lfh9lQXd9a+25e5i6yhKe2XWUJ19vGXXbI13GMGKrmtEnhuduvzx0I31vOOzi7vaXOF0qk8V9lN++JeyQqJAciMTiQu80lhhGbJ570OG+8emSiLmPXSCdTLuNv3sFLd0DaffWCfhlzKmQj+9o5t5XDvOxs+bFr4DHSnriPsCEiqDrVduufEtFZKGIbLL96xSRa9zYVyEzq7aCtt4w7/7ls/G2AsORyCQY7LnbS6HdzKlNxh668ELrARgacx/Nc7djbepuzN303GMxW7FUbk6MmcTc23vDjiymgn3oC7R0G31l0qHE70u7xwsYJ+3r//Iqc+oq+OAZx4zHVCB9zz0bV7OufHOUUtuUUquUUquAE4Be4G439lXIXH/R0vhta+L7cFjiPsNME7OyZXpyFJZxSwAzIfkKYiyendW4asDFmLs9/TASb3OQW8/9hnu2cKhjbMMnWnsGHFlMBXtvGWXUGKQZlqkoGdtM2t88tYedzT186cIlGU2OSmdBNa/FPYlzgJ1KqcxmxxUhVaUBfvru44HB1adNXSEa/+seGv/rHq6762UgsUgz0fxR+X1CJBajK5ToMpjNlEQvivuSaTX84N9W8fVLlwGDu2aOhrVt70CU8mH6imSK5fXFlHcWVAF+9NDrY3puR184/j3MFOvdD0RidIYi1KfpuZeXBOJDO0ajqTPEDx7cztmLJnPO4imjP2EE0kmFPNqdnXGX2fgFvgO4JflOEblKRDaIyIbm5uYUT9MA8UEAR23zIV89kBigfctzbwBGkylI5EWXBvy09gwMylOeMkzDJTfI1ULgSIgIlxw3I541M5J3ldzZz9qydyBCpUvpidYxi8RUPCzj9KzWsdoCjLn4rbMvTE25MydAy3O3JmiNxXMfiMZGrBS1+Oa9WwlHFV+6cMm47bRIp4ipuauf+ixkkLkq7iJSAlwE3J78mFLqRqXUaqXU6oYGZ0ZxFSJBv4+askA8dxhIuaDX0x8h4JP4Y6UB35CRck6UgxcCVhx3pJh7u62vOgBKEYsp+sJRKlxau7DsisZUvHQ+VzF3OxVjCFNEY4qu/ohjBXPWeo115Zqu526dwEdrN/H8nlbufvEAH1o7d9DAj/EymuceicZo7R1I+31kZIvLr38e8IJS6ojL+yloaitLBnnuqejuj1BZGoj/GEpTZKg4tciV78TFfQTvyqqyXDrdKBGPKQhFoig1NF/eKexen5WCmKuwjJ2xdBLtNq8gnQoBWu9+yyHjajXdVEgrpt06QipxNKb40p9fZdqEMj56ljM91Ufz3Ft7B1AKGrIwEc1tcX8nKUIymrFRW1kSL6YB6LN5IytmTgCGpuiV+If+IL2StZJrErHt4bexrpQ+Zv7olVLxxWm3wzLRmC3mnqOwjJ2xpH5abRNqHFowtL6zP3xoBwDT0mg9AMZEMxi5fccfn93LlkOdfOGCJcP2Zx8rfhlZ3Js6neu7MxqurbCJSAXwJuDf3dpHsVBTHhxUzGSJ+zENlYTM2+GoGrQIZvfc1y5oYJV5Esg2XhnUYcdyhkeKi7b2Dp5cpTDykwEmOLRYmIx9QTWc4wXV8RIXd4c9d4t0p1LNqjW2G66ramvPAN97YDunHlvH+cudGzgeD8sME/KzZi6ke5LKBNfEXSnVC6SeRaUZE2UB/6AFPmvI88TyYDyuHonFBnl5dm/rxstPyCi9a7w8//n1roUwMiGdmHubGXO3FvCUSsThJ7qUxmZfUI16KOY+llqgzj4jLONUzN3+9t998uy0nze1pgyfJFKEk/nu/dvo7o9w/UVLHb2iHS0sk1yP4ia5/+ZoRqU0OLggw/LcK0sDcYGKRNUgL89ejZkLYQfj0jObhVPpkpa4JzW/Mjx3Z0MOw9oVU/EhLV5IKU232OuuF/Zz1e82AA6GZWy++1haRwf8PkoD/kEhTIuX93dw6/NvcOWpjSyY4mxP9dEWVA+291ES8GUlFdJ7vzzNEMoCfvrDCXG3pglVlgTii4LhaGxQWGZFjsIw+UA6MffWngFqygLxWaJKqUSnRpfaKdi9vvi+PCHu6W33qds2x2875Zlm0jraGFY9NBXya/e8Rl1lKVevn5+peUMYLea+9XAXc2orXBn2kkzuvzmaUSkN+ggNCssYtytK/PEfXiSmBoVlshHTy1cknZi72R8lMQmIhDftUoWqzxaWsU7mpTm66rKT7lATO25kZo21t701rNrO0e5+ntvdypWnznFlvsFI7Qci0RjP72nl5GOGn8HqJNpzzwNKA75BnntfOErAZwwQiIdlYmpQfNZajXd7lFc+Eu9XMoJotfYMMKmyJDHDE/dDJfYFVWtfbjUpGwvj0HbH4tj2BnRjFfdUnvvze1oBWHNsfaqnZIz9M0zmUEeI3oEoy6Zn56o6998czaiUBvxmjrXxhekLRykP+hGRhOcejcVbxoKRm3z7h9dw05Un5sJkT5NOc6fWHqNEfJDnHnVX3BNhGaNvvE9ymy3zj0+uBcbWYM1pMgnLpPLctx7uQsRoReEGAdvVVzLWjGOrLbfbaHHPA0oCPpRKiFEoHKM06McnCe8zElVDcqJPbKzNyqp8vpFoRjX8NlbzK3tXQtfDMvF4bSw+JSqXtQmzzDTWdGLu9j4uTh4f+4LqWBfnW7oHuOflQ/T0J2bg7jjSzezaijEVZo2FkRZUD7RZzf20uGtM4hPV4+IepbzEZ/RsN8U9HBu8oKoZHn8aYRmrc5+9K6Hbnrvf5rn3haM5D8nY3/toHLClHKaqjs7UBhhbpSwkspv+uvlg/L7tR7qYP9m9UKV/hEysA1lMgwQt7nlB0Iylf+6ulwlHDa+u3PTcE2EZlXcFL7litEITMLKP7CKulNGKF9z03I3/o7EYh9pDTKnJbS+gdNYmLOziXu7gIrD9Kz3emgnrcx6IxNjd0sOCKVVOmJaSeK1CiglWB9v7qK8qzVpqshb3PMDy3O968QAPb22iz7xk9/lsnns05sxYsyIgkeee+nGljK6MQb9vkOc4EI0h4l7HSysEs/toLwfa+5iZpdjscIx2nOwcsJX5Oyle9gXVsZ407vnE6UBiQXh3Sw+RmHI8t93OSAuq+9v6shZvBy3ueYE93BKOKvoGTHEXiX9xLW9eMzqWNg/XOMxqkxz0Szzmq8wMlhK/z/U4+I4jXRzp9ILnbvyfTljGXgnqZB2APylJYCxY6cBW/HvrYaP5WDbEPdWC6p6jPTTWZa8dhxb3PMA+R3MgGuXZ3a08t7vVDMskFlm1uKfHSN4VJEbLBf2+uMBZ2TJuV4xeuGIahztDtPWGcy7uMhbP3a2wjG/8nnuy0P726b1Mm1DGvMnuh2WSHYfbNuxjf1sfjXWZtxVOFy3ueUDAlr9+sN1oPLR0es2gBdW+cNS1DIBCwzdKFWEk7rn7Bgmcm8OxLWbXVsR72EzNsbgDgzKyRqKlOzEpzMnCK38GYZlEamkMpRSvHezk/OXTXD1BD1eh+tk7XgJgrgM949NFi3seYE9xtLrcffrcBYPy3EPhqKNZCoXMaLHkgbjnnkjEs4qY3M5IsgvY5Br328KOht2BGAlrEhg4G3O3h2XGuqZkzz66fcN++sJRjm1wz2sH8PuHD8sAnLVosqv7t6PVIA+wZ2c0d5ltZ8tL4l5VzOxFosMy6WFdCKUTlhFbWCY5g8YN7MKY67AMWOI++nZdoXD8WDnZD8eXwfqG3+a5P7PrKABvO36GI3YNu88UqZDWHOPrzluUlcHYFlrc8wC7x2L1FK8s9cd/eFbnOy3u6TFSLjKkDssozJi7y567ImGTF8RdJL0F1e7+SLysfu0C58ZmZpKYZH3OkZji9eZuzphf73oaYnKcXynFUzuNE0u2x1xqcc8D7N6iFY+tLAnEF1S7+63RZnqMXjrIKDF3KywzqOLXypZx2XM/0pmIXXthLKJPBEbR9ke3N3Oks5+T5tbyzHXnjKnv+mhkknbq8wkiRrXxS/s7XF1ItUheUL1twz7+/XcbgcTIxmyhG4flAfapNu1m1Z01L1WpxGVflUPTbwod6wc4nEOa3LTLJ5bnrlyPudvFzAtjEX1peO5X3PQcYDT2cto7zbQ1bsAn/PbpvQCcPNf9boyJBVXj72d3tcYfm+diZWwqtOeeB9jjdJ2muJebee4AndZQYg8OxvAio43Zs9orW1kfIsLdLx5gf1uv69kyHz97HmcvmsxP3n28q/tJFyv01xkK8+sndxOJDu2PbuHUUGw7/gxPcPaTZUO1+2ERny3OD4kTdDauGpLRapAHTLLN7Oy3tZ21vreW4GvPPT1Gm8Rk9csvCxjiHo2p+KDlWZPcLUKpLgt6q5On6bl/9vaXuO/Vw6ycNZHjZ0+KP2xPkxxrS950cLIa+LhZEx17rZEI+CTe8qClu5/6qlLu/MipWdm3He255wGTKkt48FPr4n/7fYLfJ3EvwfLcKx2a4F7oWMft2jte4rWDnUMet3rnp6q0dGsKk1exqqD3thopuPYOiwC99m6QLlzVZJItA4l5wz9653FZmX4ExvfLWlA90hli1awJWc2SiduR9T1qxsW8yVVxYbEqViXJc3fjsrgQsV/q3/3i/iGPxz33FJkVxZaRZMXcLQ+9OzRY3Nt6B+K33ehtZKWtBv2ZCfMpWZp+BMb3K2YT91xlPWlxzyMsL8ZKx0vE3M2wjI65p4XdgQun6N5nxdxTCXmuho3nCquIyVqf6E7y3Nt6wvHbbnQl9Sd958dLQ1X2CsICPiEaM5yEtt5wziqNtbjnEXFxt2VxQKI6cKzDDIoV++V5JDZ0gdAKNaRq51Bs4m5VQVux7/227o8t3f289X+eiP/tRh639Vll2tIgm5lHPp8QjcVoMtNap2Q5v91Cq0EeYWlSMNlz7wtTGvC5noNdKNjjuKkyZqxaglRx0mIT93gVtBmWsSo9AS7+nyfjt7/5tuWDFlqdIlPPfd2CBt4w1wuyhbWgerjT6AOVq7CMFvc8wvJiLBEXWyqkjrenjz16kMJxj58sUwl5MS6oxmLQ2WdcHdo9d3snyLcsnerK/q1T73gdl9+8/yTnjEkTw3NXHDHFveDCMiIyUUTuEJGtIrJFRNa4ta9iwfI4E567cX9nX1jH28eAPSyjUpRftveGmThMdWgxeu4xpeLrOj0DkZTb1biUDZJcUJYP+MUb4u6mIvwQuE8pdZmIlADZ61JfoFialGpBVee4p489LJOqjsman5qKYsuWETHS+qx1iORUSAu3plNZWTJuDthwGr+5oNrU1U9pwEdNeW5+m67sVURqgLXAlQBKqQFgYKTnaEYn7rlbC6rmD6qjN+yJ9rD5gj0VMlUd00jiXnRhGV9i0HR9VQkt3QP0R6KUBrJzkjumoYpfvnc1a46ty8r+nMBvLqhaaZC5aiPh1jf1GKAZ+LWIvCgivxSRQV3qReQqEdkgIhuam5tdMqOwSKRCDv6/tXdAh2XGgP23lmoQRXtfmAnlJUPuh2IMywjtZi67lQ3T2x+N1wIA3P0f7lZfrl8yJa8ywfw+IargcEeIKTl0utwS9wBwPPBTpdRxQA/wX/YNlFI3KqVWK6VWNzQ41yK0kImHZUzP3fq/vVfH3MeCf1DMfSidI3ruxSfuzeaUpak1xkzSgWgsnlH0hQsWc5wLWTL5jN9nFDE1dfUzOYdtm90S9/3AfqXUs+bfd2CIvSYDJGlBtcSfEJpclDfnK75BYZlUC6oDekHVRAQOmaMd55jDnQcisXhl6oyJ5Tmzzav4RYiYYZlcjkp0RdyVUoeBfSKy0LzrHOA1N/ZVTFgeZ1zcbRkEDdU65p4ug1Ihk7Q9HI3RMxAd3nPPo6wNJ/BJok/KNDMs0x+JcbTbEPe6LFZ+5gt+n9DZF6F3IJrT36Wb1/IfB/5gZsrsAt7n4r6KguSwjD09TP/I0se+wJXst1uLh8N57sU2hNy+FDjR7E56oL2Pg2aOe11V6rWJYsbvk/jA8NqK3B0f19wQpdQmM6a+Qil1iVKqza19FQvJvWXsnnuNnsI0Jp7//Hpm1ZYPaftribvdc//hO1bFbxdbWMYewppoHpMrbnqO6+56GYD6Su1UJBPwC01dhrgP5yRkg+K6xsxzJCnP3S7uukJ1bDRUlxrpfEmue6rWAxevSgxVLstSCqBXiA+9Dg6t2A34JGc53F6mLOCPOwmTKgvQc9c4TzzmHhjab0OL+9gRhg7s6EzhudspKymun4zluVeWBAbPlMUQLi+MAvQa9tBdLufgFtc3Nc/ZfqQbSCyo2n9sejj22LEGUdhp7zMWCicOEysturCMqRAVpX7m1lcOcijqcuiVehl7odtw36NsoMU9D7FCB/b+2TXacx8zkmL4c8cIHSGh+MIyludeEQwwpaaM7V8/jytPbQSgVot7SuzfkYk5TFHW4p6HWDnGfp89LKM997EiIkOyZdrNsMxwJ8tMJwLlG1bYpaI0IVhW2CGX8WQvY/Wery4LuDKdKl20uOchVg9yu+debD1PnEAYWsTU0RemunT4H2WxxZitr5h9Pm+lKe7Vuio6JVZzuUk5DMmAFve8JC7uNi+y2ETHCXy+oY3DOnrDrrWvzUfiYRnbIqEVR46kaqmpiTtauc4k0qfePMT6UbnVZrVYECRlnnuq3OQ/fPBkthzqzJZpnsH6ilUMygAxxH3R1Pxpw5tNrEX3gC+3vrMW9zwkFg/L6AuvTPBJ6grVVIupp82r57R59dkxzEMkYu4JqThv2VRuunI1Zy6YnCuzPI0VlknVtyibaHXII6wY+6XHG0U1yXnHmjFiDn+20z6M516sJGLuCc/d5xPOXjRl0EQrTQIrLJProJX23POIHV8/j5hK/OAC+seVEdbwZzsjDeooZipKtFSki5Utkxzyyzb6E8sjRAS7s65j7plhZMsk/lZK0dE7/KCOYsSaYVpsDdMywYq5pxq+nk10WCaP0TH3zPCJDBqQHQrHGIjGtOduIxw1jk8+DajONfGYe47t0J9YHqM998wQGexdtZrFYVrcE4SjxgEq0eKeNvGYuw7LaDLhrIUNvH31rFybkZdIkue+/XAXAMc2VA73lKIjLu45rLTMN6ywTDDHx0yLe57z6/edlGsT8hajK2Ti78Odxji5WbUVuTHIg1g1FdpzTx/LYc91qwr9iWmKFp8MTnT/6+aDQO7Lxr1ERMfcx4x1tZNrz11/YpqiJbkr5FM7jwK6T4+dSEzH3MfK8pkTqK8q5dPnLhx9YxfRYRlN0eJL0RUSdJ8eO1bYqsSvUyHTpaYsyIYvrM+1GVrcNcVLsufeUF3K+sW6pN6OdXi0555/6E9MU7RI0iSm7lCEKt3GdhBWOp+Ouecf+hPTFC32fu7RmKIvHKVSi/sgrHOf9tzzD/2JaYoWe1fI7v4IgPbck7DCVlrc8w/9iWmKFpFEP/ceU9yr9SzaQcRj7rqIKe/Qn5imaLE3Dkt47rr1gB0dc89fXHNTRGQP0AVEgYhSarVb+9JoxoPY+rl3hQxxryzVKX92dLZM/uL2NehZSqkWl/eh0YwLvy8x1ao7HpbRnrsda01CN6nLP3SAUVO0BP0+wmYFZndIx9xT8ccPncwdG/frheY8xM1PTAEPiIgCfq6UutH+oIhcBVwFMHv2bBfN0GhSU+L3xfuAdPeHAZ0tk8yKmRNZMXNirs3QjAM3A2mnKaWOB84DPioia+0PKqVuVEqtVkqtbmhocNEMjSY1Qb+PcMQIPFgx9yrtuWsKBNfEXSl10Py/Cbgb0L1pNZ4iGBAGTM89vqCqZ4VqCgRXxF1EKkWk2roNnAu84sa+NJrxYnjuVljGaD2gFw41hYJbbsoU4G6zu14A+KNS6j6X9qXRjIuSgC/uueu+MppCw5Vvs1JqF7DSjdfWaJxi0ILqQIQKneOuKSB0ZYKmaAn6fcQURKIxevsjOt6uKSi0uGuKFmviUl84Su9AlPIS7blrCgct7pqixeoj090foS8cpUKLu6aA0OKuKVqsatSuUITeAS3umsJCi7umaEmIe5i+gSgVOuauKSC0uGuKlvKg4amHwjF6ByLac9cUFFrcNUVL0Gxje6gjRFtvWC+oagoKLe6aoiXoM77+n7l9MwAVQR2W0RQOWtw1RUswMLjVQMCvWw9oCgct7pqiJeAb/PX3iRZ3TeGgxV1TtASTPPWAbhqmKSC0uGuKlqA/yXPX4q4pILS4a4qW5Bi7DrlrCgkt7pqipSTJc9e93DWFhBZ3TdESSBL3C1dMz5ElGo3zaHHXFC32BdXPn7+YSZUlObRGo3EWLe6aoiVoS4WsKdcFTJrCQou7pmixZ8dMKA/m0BKNxnm0uGs0QI0Wd02BocVdo0F77prCQ4u7RgPUlGlx1xQWWtw1GqChujTXJmg0jqLFXaMByoK6l7umsNDirtFoNAWIFneNRqMpQFyr3BARP7ABOKCUutCt/Wg0mTCnroLZtRW5NkOjcRw3y/KuBrYANS7uQ6PJiEevPSvXJmg0ruBKWEZEZgIXAL904/U1Go1GMzJuxdx/AHwWiA23gYhcJSIbRGRDc3OzS2ZoNBpNceK4uIvIhUCTUmrjSNsppW5USq1WSq1uaGhw2gyNRqMpatzw3E8DLhKRPcCtwNki8nsX9qPRaDSaYXBc3JVS1ymlZiqlGoF3AP9USr3H6f1oNBqNZnh0nrtGo9EUIK5OKFBKPQI84uY+NBqNRjMUUUrl2gZEpBnYO86n1wMtDprjNNq+zPCyfV62DbR9meJl+yzb5iilUmakeELcM0FENiilVufajuHQ9mWGl+3zsm2g7csUL9uXjm065q7RaDQFiBZ3jUajKUAKQdxvzLUBo6Dtywwv2+dl20Dblyletm9U2/I+5q7RaDSaoRSC567RaDSaJLS4azQaTQGixV2j0WgKkLwQdxH5tIica96WXNuTjBdtstDHLjP08Rs/+tiNHyeOnafFXUTOFZH7gf8E3gugPLQCLCIXi8hvgJW5tiUZfewyQx+/8aOP3fhx8ti52ltmPJhnqSDwJWAd8E2gBDhRRIJAJJdfFBERpZQSkbOArwFhYI2I7FVKteXKLss29LEbN/r4ZWYb+tiN2zbcOHZKKc/8w0zNNG+faLu9DtjpMfsagWnA2cDNwDoP2aaPnT5++tgV+bHzTFhGRD4G3CUinxSRaUqp5837g0qpR4FdInKeR+ybqpTao5Q6pJT6J3AEWCciMzxgmz52mdmnj9/4bdPHbvy2OX7sPCHuInIpcAXwI2AF8HkRseJhERGpxegaGfWIfV8QkVW2Tf4ALABOTnqe6ws2+tg5bp8+fuO3TR+78dvm+LHzhLhjHNyfKqUeBq4HdgNXg7GYoJRqBcqBswBEJNt2p7LvE9aDSqmXgOeBZSJytoj8p2V7jmzTxy4z+/TxG79t+tiN3zZHj11Oxd12htwFvAtAKbUXuAeoFJGLbJv/HjhJRMqUUjEP2ncL8EHgTxi9ll31APSxy6p9rh+/5PfrpeOXoW362I3ftoyOXVbFXURWi8hk62/bGfIOoFdELjb/PoQxwWmJ7WCUYwzcdu0ST0TWi8gJY7VPRKqAHwIvAyuUUtcmPT9ntpn3ZePYTbDdFi8du0zsM+9z/fiRlLnmseM3LtvM+7Jx7ILjsS9Lx25ctpn3ZXTssiLuIrJURJ4CvgxMtN1vvYk24G7gI+YPrwOoAkptB+PPSqlfKKXCLth3nIjca9owbwz2lZn2hYCrlVIXKKUOecS2bB27k0Xkz8AvROT9IlKqlFIi4h/FPtePXYb2Zev4nSIifwC+IiLzLbtExBLUXH73xmtbto7dGhG5HfiuiCzx2LEbr22OHbtsee5XA3crpd6qlNoORgzJ9ibKgfsxzl43ish04DggYr2AUsrxM7+I+EXkRuAXGC00/wgsNh8LpGFf2LQtopRq8phtrh47044VwP9ieCF3YKSXzUvaZ9aPnUP2ZeP4LQN+DPwNaAKuIlG4Yu0/J8cvQ9uycewmA/8D/B04iqEx70/TPrePXSa2OXfslPt5nPXAz4Fy8+9LgZlAlfn3DeabPA6oNf9+BPgJ4M+CfW+32fZm4FGMM7v1+PW5ss/Ltpn7/yBwq3l7EoaAVpNoJf01bd+I9v078DvzdiXwFeBB4Jhc2+dl28z9nw3cYrPvzRgnokXmfTnTFa/Y5sYbe6f5RXir+Xc58BpwIcYCwX3Ar4GfYsSj/gjMS3qNChcP/DuBrwIXJd0vwHoMT7nWvG+yad+x2bDPy7YlfbYXm39PAfqBbwD7gWeAm4DPAGU5+mzzwb6LzL9XAQ8D882/v4xxAvqKKQrZ/u550jbztdcBJ9v+rgN2WDZgiOSXgW8DFVk+dp60zck3KMCHgReB9wHbgA+Zj10DvAFcbv49A3gWOMf2fJ9bX4wR7Hsf5hWEuc1MjFXs6Sme75p9XrZtBPuuMh+bC3wHeK/59zqMy9HjtX3D2rcdI8d5CoYH/Djwfxje3TuA79ltyvJ3zzO2ma9fDdwFtGKcmCfZHrsB+IFlB3A6NgcoC8fOs7Yp5WCFqjKsXQN8Syn1a+CjwFki8hbzjQfMLwxKqQMYIYYgxOPvrqboDWPfemCttTiplNqPcdK5zP5ct+3zsm0j2HemiJynlNqNEcc+YG6+ETiIIRravtT2/QdwLrBKKfVFDHG9WSl1IfA6sNSyKQffPc/YZjIA/BN4D8bn9nbbY3cCi0RkvWnHURJXa9mwz8u2ZSbuIvJeEVknRjUVwBZghrng9yDwEnAmxkH4OPBeEVklIh/BEK/dAG69yTTtOx3DK0aMJj07gB7767hhn5dtS9O+zRgCOhkjfvgl80T0DmAZ0KLtG/XzPUtEZiqlXlVK/Z+53dnAM7aTeq6+ezmxLcm+iUqpfuCXGPH+7cBqEVlobroZI1Xwv0VkHnAOxkk76JZ9XrYtmTF3hTQ/2KkYcaMYsBMj+f4jwD5gOYantBXjzf0AWKKUulNESoF/BZZihGi2OfEmMrDvT8B/Yyz67lNKhcXIfW102i6v25aBfdOVUj8XkXXAvRjfqfcrozBD25fe57tfRE7CiMnGMEJKqlhsG8G+q0TkaqVUi7nN0xjtAv4V+JopkDebJ/DrgIUYoeD2YrFtRMYYY/Kb/y8Afm/eDmCs8v4G46x0E3A5MMF8/GbgBttryFj2mSX7vuK2fV62LQP7foPxRcZ8fKq2b8yf71fN2/W41KHQy7aNYt+PgTuTtr3UtHsexsKuz7y/pNhsG+1fWp67GIn3XwX8IvJ3oAazakopFRGju9khYAnG2e0SjHDCNzHOdE9br6XMd+skDtj3nFv2edk2B+yLYmSgoIxCi8PavjF/vs+a27ZgrEMVhW1p2vcJ4KCIrFNGl0SUUneLyGKMrLsqjN4rW5RSA8ViW7qMGnM3L2c3YuQJv06i0f1Z5uUayrgE+SrwbWXE7G4ETheRZ83nPeKK9R63z8u2afsK2z4v2zYG+5Rp3/W2570d+DxGmuYKpdSWYrJtTKRxWXIGZgqj+fdPgI8AVwIbzft8GDGp24FG876JwAy3Lz28bJ+XbdP2FbZ9XrZtHPbdBsy1Pe+MYrVtLP/SyZbZCNwmiV4cTwKzlVI3Y1yyfFwZHsBMjHFQewCUUu3KSHl0Gy/b52XbtH2FbZ+XbRurfVFlpLSilHpcKfV4EduWNqOKu1KqVynVrxJ9Dt4ENJu33wcsFpG/YbTOfMEdM/PTPi/bpu0rbPu8bJvX7fOybWMh7VRI8yymMBLx/2Le3QV8DiNveHeWzvgp8bJ9XrYNtH2Z4mX7vGwbeNs+L9uWDmMpYophpEy1ACvMM9cXgZhS6gkPvEkv2+dl20Dblylets/LtoG37fOybaMzlgA9cArGG34C+MBYnpuNf162z8u2afsK2z4v2+Z1+7xs22j/rNanaSEiMzEKHf6fMkpvPYWX7fOybaDtyxQv2+dl28Db9nnZttEYk7hrNBqNJj/I6YBsjUaj0biDFneNRqMpQLS4azQaTQGixV2j0WgKEC3uGo1GU4BocdcULCJyvYh8ZoTHLxGRJWm8zqDtROSrIrLeKTs1GjfQ4q4pZi7B6GU+pu2UUl9SRotcjcazaHHXFBQi8nkR2SYiD2KMNkNEPiQiz4vIZhG5U0QqRORU4CLguyKySUSONf/dJyIbReRxEVk0zHY3i8hl5mvvEZFviMjTIrJBRI4XkftFZKeIfNhm17WmDS+JyFdycGg0RcaYZ6hqNF5FRE7AGIB9HMZ3+wWM9q13KaV+YW5zA0YZ+Y9F5C/A35RSd5iPPQR8WCm1Q0ROBn6ilDo7xXbJu96nlFojIv+NMZ7uNKAMeBX4mYicC8wHTsIYkvwXEVmrlHrMtYOhKXq0uGsKiTOAu5VSvQCmKAMsM0V9Isb4s/uTnyjG8PFTgdtt4l2a5n6t/bwMVCmluoAuEQmJyETgXPPfi+Z2VRhir8Vd4xpa3DWFRqp+GjcDlyilNovIlcCZKbbxAe1KqVXj2KfVcyRmu239HcDw1r+plPr5OF5boxkXOuauKSQeAy4VkXIRqQbeat5fDRwSkSDwbtv2XeZjKKU6gd3mHEzEYGXyduPkfuD95tUBIjJDRCZn8HoazahocdcUDEqpF4A/AZuAOwFr5NkXgWeBfwBbbU+5FbhWRF4UkWMxhP8DIrIZI15+8TDbjdWuB4A/Ak+LyMvAHWR2stBoRkV3hdRoNJoCRHvuGo1GU4BocddoNJoCRIu7RqPRFCBa3DUajaYA0eKu0Wg0BYgWd41GoylAtLhrNBpNAfL/AQJxdyJ5biNUAAAAAElFTkSuQmCC\n", 324 | "text/plain": [ 325 | "
" 326 | ] 327 | }, 328 | "metadata": { 329 | "needs_background": "light" 330 | }, 331 | "output_type": "display_data" 332 | } 333 | ], 334 | "source": [ 335 | "ax = dvs['464222092403801']['74007_72019_00003'].plot()" 336 | ] 337 | }, 338 | { 339 | "cell_type": "markdown", 340 | "metadata": {}, 341 | "source": [ 342 | "### get a single site" 343 | ] 344 | }, 345 | { 346 | "cell_type": "code", 347 | "execution_count": 10, 348 | "metadata": {}, 349 | "outputs": [ 350 | { 351 | "name": "stdout", 352 | "output_type": "stream", 353 | "text": [ 354 | "http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=464322092401401&startDT=1880-01-01¶meterCd=72019\n" 355 | ] 356 | }, 357 | { 358 | "data": { 359 | "text/html": [ 360 | "
\n", 361 | "\n", 374 | "\n", 375 | " \n", 376 | " \n", 377 | " \n", 378 | " \n", 379 | " \n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | " \n", 385 | " \n", 386 | " \n", 387 | " \n", 388 | " \n", 389 | " \n", 390 | " \n", 391 | " \n", 392 | " \n", 393 | " \n", 394 | " \n", 395 | " \n", 396 | " \n", 397 | " \n", 398 | " \n", 399 | " \n", 400 | " \n", 401 | " \n", 402 | " \n", 403 | " \n", 404 | " \n", 405 | " \n", 406 | " \n", 407 | " \n", 408 | " \n", 409 | " \n", 410 | " \n", 411 | " \n", 412 | " \n", 413 | " \n", 414 | " \n", 415 | " \n", 416 | " \n", 417 | " \n", 418 | " \n", 419 | " \n", 420 | " \n", 421 | " \n", 422 | " \n", 423 | " \n", 424 | " \n", 425 | " \n", 426 | " \n", 427 | " \n", 428 | " \n", 429 | " \n", 430 | " \n", 431 | " \n", 432 | " \n", 433 | " \n", 434 | " \n", 435 | "
agency_cdsite_nodatetime74012_72019_0000374012_72019_00003_cd
datetime
2006-07-01USGS4643220924014012006-07-015.06A
2006-07-02USGS4643220924014012006-07-025.10A
2006-07-03USGS4643220924014012006-07-035.12A
2006-07-04USGS4643220924014012006-07-045.15A
2006-07-05USGS4643220924014012006-07-055.19A
\n", 436 | "
" 437 | ], 438 | "text/plain": [ 439 | " agency_cd site_no datetime 74012_72019_00003 \\\n", 440 | "datetime \n", 441 | "2006-07-01 USGS 464322092401401 2006-07-01 5.06 \n", 442 | "2006-07-02 USGS 464322092401401 2006-07-02 5.10 \n", 443 | "2006-07-03 USGS 464322092401401 2006-07-03 5.12 \n", 444 | "2006-07-04 USGS 464322092401401 2006-07-04 5.15 \n", 445 | "2006-07-05 USGS 464322092401401 2006-07-05 5.19 \n", 446 | "\n", 447 | " 74012_72019_00003_cd \n", 448 | "datetime \n", 449 | "2006-07-01 A \n", 450 | "2006-07-02 A \n", 451 | "2006-07-03 A \n", 452 | "2006-07-04 A \n", 453 | "2006-07-05 A " 454 | ] 455 | }, 456 | "execution_count": 10, 457 | "metadata": {}, 458 | "output_type": "execute_result" 459 | } 460 | ], 461 | "source": [ 462 | "df = nwis.get_dvs('464322092401401', 'gwlevels')\n", 463 | "df.head()" 464 | ] 465 | }, 466 | { 467 | "cell_type": "markdown", 468 | "metadata": {}, 469 | "source": [ 470 | "### just get the url for dvs for a site\n", 471 | "(`start_date` is 1880-01-01 by default; `end_date` is `None` by default)" 472 | ] 473 | }, 474 | { 475 | "cell_type": "code", 476 | "execution_count": 11, 477 | "metadata": {}, 478 | "outputs": [ 479 | { 480 | "name": "stdout", 481 | "output_type": "stream", 482 | "text": [ 483 | "http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=464322092401401&startDT=1880-01-01¶meterCd=72019\n" 484 | ] 485 | }, 486 | { 487 | "data": { 488 | "text/plain": [ 489 | "'http://waterservices.usgs.gov/nwis/dv/?format=rdb&sites=464322092401401&startDT=1880-01-01¶meterCd=72019'" 490 | ] 491 | }, 492 | "execution_count": 11, 493 | "metadata": {}, 494 | "output_type": "execute_result" 495 | } 496 | ], 497 | "source": [ 498 | "url = nwis.make_dv_url('464322092401401', parameter_code=72019)\n", 499 | "url" 500 | ] 501 | }, 502 | { 503 | "cell_type": "markdown", 504 | "metadata": {}, 505 | "source": [ 506 | "## Fetch some miscellaneous measurements" 507 | ] 508 | }, 509 | { 510 | "cell_type": "code", 511 | "execution_count": 12, 512 | "metadata": {}, 513 | "outputs": [ 514 | { 515 | "data": { 516 | "text/plain": [ 517 | "['464205092364101',\n", 518 | " '464205092371801',\n", 519 | " '464222092403801',\n", 520 | " '464244092372701',\n", 521 | " '464321092402301']" 522 | ] 523 | }, 524 | "execution_count": 12, 525 | "metadata": {}, 526 | "output_type": "execute_result" 527 | } 528 | ], 529 | "source": [ 530 | "sites = gw_field_sites.site_no.tolist()[:5]\n", 531 | "sites" 532 | ] 533 | }, 534 | { 535 | "cell_type": "code", 536 | "execution_count": 13, 537 | "metadata": {}, 538 | "outputs": [ 539 | { 540 | "name": "stdout", 541 | "output_type": "stream", 542 | "text": [ 543 | "464205092364101\n", 544 | "http://nwis.waterdata.usgs.gov/nwis/gwlevels?site_no=464205092364101&agency_cd=USGS&format=rdb\n", 545 | "no data returned.\n", 546 | "464205092371801\n", 547 | "http://nwis.waterdata.usgs.gov/nwis/gwlevels?site_no=464205092371801&agency_cd=USGS&format=rdb\n", 548 | "no data returned.\n", 549 | "464222092403801\n", 550 | "http://nwis.waterdata.usgs.gov/nwis/gwlevels?site_no=464222092403801&agency_cd=USGS&format=rdb\n", 551 | "464244092372701\n", 552 | "http://nwis.waterdata.usgs.gov/nwis/gwlevels?site_no=464244092372701&agency_cd=USGS&format=rdb\n", 553 | "no data returned.\n", 554 | "464321092402301\n", 555 | "http://nwis.waterdata.usgs.gov/nwis/gwlevels?site_no=464321092402301&agency_cd=USGS&format=rdb\n" 556 | ] 557 | } 558 | ], 559 | "source": [ 560 | "fm = nwis.get_all_measurements(sites, txt='gwlevels')" 561 | ] 562 | }, 563 | { 564 | "cell_type": "code", 565 | "execution_count": 14, 566 | "metadata": {}, 567 | "outputs": [ 568 | { 569 | "data": { 570 | "text/html": [ 571 | "
\n", 572 | "\n", 585 | "\n", 586 | " \n", 587 | " \n", 588 | " \n", 589 | " \n", 590 | " \n", 591 | " \n", 592 | " \n", 593 | " \n", 594 | " \n", 595 | " \n", 596 | " \n", 597 | " \n", 598 | " \n", 599 | " \n", 600 | " \n", 601 | " \n", 602 | " \n", 603 | " \n", 604 | " \n", 605 | " \n", 606 | " \n", 607 | " \n", 608 | " \n", 609 | " \n", 610 | " \n", 611 | " \n", 612 | " \n", 613 | " \n", 614 | " \n", 615 | " \n", 616 | " \n", 617 | " \n", 618 | " \n", 619 | " \n", 620 | " \n", 621 | " \n", 622 | " \n", 623 | " \n", 624 | " \n", 625 | " \n", 626 | " \n", 627 | " \n", 628 | " \n", 629 | " \n", 630 | " \n", 631 | " \n", 632 | " \n", 633 | " \n", 634 | " \n", 635 | " \n", 636 | " \n", 637 | " \n", 638 | " \n", 639 | " \n", 640 | " \n", 641 | " \n", 642 | " \n", 643 | " \n", 644 | " \n", 645 | " \n", 646 | " \n", 647 | " \n", 648 | " \n", 649 | " \n", 650 | " \n", 651 | " \n", 652 | " \n", 653 | " \n", 654 | " \n", 655 | " \n", 656 | " \n", 657 | " \n", 658 | " \n", 659 | " \n", 660 | " \n", 661 | " \n", 662 | " \n", 663 | " \n", 664 | " \n", 665 | " \n", 666 | " \n", 667 | " \n", 668 | " \n", 669 | " \n", 670 | " \n", 671 | " \n", 672 | " \n", 673 | " \n", 674 | " \n", 675 | " \n", 676 | " \n", 677 | " \n", 678 | " \n", 679 | " \n", 680 | " \n", 681 | " \n", 682 | " \n", 683 | " \n", 684 | " \n", 685 | " \n", 686 | " \n", 687 | " \n", 688 | " \n", 689 | " \n", 690 | " \n", 691 | " \n", 692 | " \n", 693 | " \n", 694 | " \n", 695 | " \n", 696 | " \n", 697 | " \n", 698 | " \n", 699 | " \n", 700 | " \n", 701 | " \n", 702 | " \n", 703 | " \n", 704 | " \n", 705 | " \n", 706 | " \n", 707 | " \n", 708 | " \n", 709 | " \n", 710 | " \n", 711 | " \n", 712 | " \n", 713 | " \n", 714 | " \n", 715 | " \n", 716 | " \n", 717 | " \n", 718 | " \n", 719 | " \n", 720 | " \n", 721 | " \n", 722 | " \n", 723 | " \n", 724 | " \n", 725 | " \n", 726 | " \n", 727 | " \n", 728 | " \n", 729 | " \n", 730 | " \n", 731 | " \n", 732 | " \n", 733 | "
agency_cdsite_nosite_tp_cdlev_dtlev_tmlev_tz_cdlev_vasl_lev_vasl_datum_cdlev_status_cdlev_agency_cdlev_dt_acy_cdlev_acy_cdlev_src_cdlev_meth_cdlev_age_cdmeasurement_dt
site_nodatetime
4642220924038012006-06-01USGS464222092403801GW2006-06-0120:34UTCNaN1290.93NGVD291USGSmNaNSVA2006-06-01
2006-06-01USGS464222092403801GW2006-06-0120:34UTCNaN1291.37NAVD881USGSmNaNSVA2006-06-01
2006-06-01USGS464222092403801GW2006-06-0120:34UTC7.59NaNNaN1USGSmNaNSVA2006-06-01
2006-06-23USGS464222092403801GW2006-06-2312:58UTCNaN1290.22NGVD291USGSmNaNSVA2006-06-23
2006-06-23USGS464222092403801GW2006-06-2312:58UTCNaN1290.66NAVD881USGSmNaNSVA2006-06-23
\n", 734 | "
" 735 | ], 736 | "text/plain": [ 737 | " agency_cd site_no site_tp_cd lev_dt \\\n", 738 | "site_no datetime \n", 739 | "464222092403801 2006-06-01 USGS 464222092403801 GW 2006-06-01 \n", 740 | " 2006-06-01 USGS 464222092403801 GW 2006-06-01 \n", 741 | " 2006-06-01 USGS 464222092403801 GW 2006-06-01 \n", 742 | " 2006-06-23 USGS 464222092403801 GW 2006-06-23 \n", 743 | " 2006-06-23 USGS 464222092403801 GW 2006-06-23 \n", 744 | "\n", 745 | " lev_tm lev_tz_cd lev_va sl_lev_va sl_datum_cd \\\n", 746 | "site_no datetime \n", 747 | "464222092403801 2006-06-01 20:34 UTC NaN 1290.93 NGVD29 \n", 748 | " 2006-06-01 20:34 UTC NaN 1291.37 NAVD88 \n", 749 | " 2006-06-01 20:34 UTC 7.59 NaN NaN \n", 750 | " 2006-06-23 12:58 UTC NaN 1290.22 NGVD29 \n", 751 | " 2006-06-23 12:58 UTC NaN 1290.66 NAVD88 \n", 752 | "\n", 753 | " lev_status_cd lev_agency_cd lev_dt_acy_cd \\\n", 754 | "site_no datetime \n", 755 | "464222092403801 2006-06-01 1 USGS m \n", 756 | " 2006-06-01 1 USGS m \n", 757 | " 2006-06-01 1 USGS m \n", 758 | " 2006-06-23 1 USGS m \n", 759 | " 2006-06-23 1 USGS m \n", 760 | "\n", 761 | " lev_acy_cd lev_src_cd lev_meth_cd lev_age_cd \\\n", 762 | "site_no datetime \n", 763 | "464222092403801 2006-06-01 NaN S V A \n", 764 | " 2006-06-01 NaN S V A \n", 765 | " 2006-06-01 NaN S V A \n", 766 | " 2006-06-23 NaN S V A \n", 767 | " 2006-06-23 NaN S V A \n", 768 | "\n", 769 | " measurement_dt \n", 770 | "site_no datetime \n", 771 | "464222092403801 2006-06-01 2006-06-01 \n", 772 | " 2006-06-01 2006-06-01 \n", 773 | " 2006-06-01 2006-06-01 \n", 774 | " 2006-06-23 2006-06-23 \n", 775 | " 2006-06-23 2006-06-23 " 776 | ] 777 | }, 778 | "execution_count": 14, 779 | "metadata": {}, 780 | "output_type": "execute_result" 781 | } 782 | ], 783 | "source": [ 784 | "fm.head()" 785 | ] 786 | }, 787 | { 788 | "cell_type": "code", 789 | "execution_count": null, 790 | "metadata": {}, 791 | "outputs": [], 792 | "source": [] 793 | } 794 | ], 795 | "metadata": { 796 | "anaconda-cloud": {}, 797 | "kernelspec": { 798 | "display_name": "Python 3", 799 | "language": "python", 800 | "name": "python3" 801 | }, 802 | "language_info": { 803 | "codemirror_mode": { 804 | "name": "ipython", 805 | "version": 3 806 | }, 807 | "file_extension": ".py", 808 | "mimetype": "text/x-python", 809 | "name": "python", 810 | "nbconvert_exporter": "python", 811 | "pygments_lexer": "ipython3", 812 | "version": "3.8.6" 813 | } 814 | }, 815 | "nbformat": 4, 816 | "nbformat_minor": 1 817 | } 818 | -------------------------------------------------------------------------------- /examples/data/bbox.dbf: -------------------------------------------------------------------------------- 1 | {AidN 0 -------------------------------------------------------------------------------- /examples/data/bbox.prj: -------------------------------------------------------------------------------- 1 | PROJCS["NAD_1983_UTM_Zone_15N",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-93.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /examples/data/bbox.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/examples/data/bbox.shp -------------------------------------------------------------------------------- /examples/data/bbox.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/examples/data/bbox.shx -------------------------------------------------------------------------------- /examples/data/pang.csv: -------------------------------------------------------------------------------- 1 | date,Q 2 | 1/1/70,0.422 3 | 1/2/70,0.426 4 | 1/3/70,0.421 5 | 1/4/70,0.398 6 | 1/5/70,0.389 7 | 1/6/70,0.395 8 | 1/7/70,0.364 9 | 1/8/70,0.379 10 | 1/9/70,0.593 11 | 1/10/70,0.775 12 | 1/11/70,1.02 13 | 1/12/70,0.74 14 | 1/13/70,0.63 15 | 1/14/70,0.988 16 | 1/15/70,0.74 17 | 1/16/70,0.708 18 | 1/17/70,0.628 19 | 1/18/70,0.657 20 | 1/19/70,0.633 21 | 1/20/70,0.628 22 | 1/21/70,0.645 23 | 1/22/70,0.784 24 | 1/23/70,1.55 25 | 1/24/70,1.18 26 | 1/25/70,0.937 27 | 1/26/70,0.99 28 | 1/27/70,0.82 29 | 1/28/70,0.786 30 | 1/29/70,0.856 31 | 1/30/70,1.28 32 | 1/31/70,0.916 33 | 2/1/70,0.883 34 | 2/2/70,0.941 35 | 2/3/70,0.879 36 | 2/4/70,0.851 37 | 2/5/70,0.797 38 | 2/6/70,0.756 39 | 2/7/70,0.776 40 | 2/8/70,0.781 41 | 2/9/70,0.805 42 | 2/10/70,0.729 43 | 2/11/70,0.706 44 | 2/12/70,0.813 45 | 2/13/70,0.749 46 | 2/14/70,0.708 47 | 2/15/70,0.667 48 | 2/16/70,0.673 49 | 2/17/70,0.879 50 | 2/18/70,0.907 51 | 2/19/70,1 52 | 2/20/70,0.975 53 | 2/21/70,1.04 54 | 2/22/70,1.04 55 | 2/23/70,0.982 56 | 2/24/70,0.856 57 | 2/25/70,0.806 58 | 2/26/70,0.761 59 | 2/27/70,0.748 60 | 2/28/70,0.731 61 | 3/1/70,0.733 62 | 3/2/70,0.737 63 | 3/3/70,0.7113 64 | 3/4/70,0.81 65 | 3/5/70,0.765 66 | 3/6/70,0.768 67 | 3/7/70,0.79 68 | 3/8/70,0.759 69 | 3/9/70,0.708 70 | 3/10/70,0.6983 71 | 3/11/70,0.978 72 | 3/12/70,0.997 73 | 3/13/70,0.859 74 | 3/14/70,0.798 75 | 3/15/70,0.7343 76 | 3/16/70,0.741 77 | 3/17/70,0.785 78 | 3/18/70,0.759 79 | 3/19/70,0.697 80 | 3/20/70,0.708 81 | 3/21/70,0.694 82 | 3/22/70,0.686 83 | 3/23/70,0.732 84 | 3/24/70,0.734 85 | 3/25/70,0.692 86 | 3/26/70,0.676 87 | 3/27/70,0.638 88 | 3/28/70,0.648 89 | 3/29/70,0.658 90 | 3/30/70,0.649 91 | 3/31/70,0.679 92 | 4/1/70,0.665 93 | 4/2/70,0.633 94 | 4/3/70,0.637 95 | 4/4/70,0.63 96 | 4/5/70,0.657 97 | 4/6/70,0.777 98 | 4/7/70,0.724 99 | 4/8/70,0.699 100 | 4/9/70,0.675 101 | 4/10/70,0.669 102 | 4/11/70,0.653 103 | 4/12/70,0.863 104 | 4/13/70,0.806 105 | 4/14/70,0.748 106 | 4/15/70,0.715 107 | 4/16/70,0.706 108 | 4/17/70,0.735 109 | 4/18/70,0.71 110 | 4/19/70,0.689 111 | 4/20/70,0.649 112 | 4/21/70,0.682 113 | 4/22/70,0.672 114 | 4/23/70,0.644 115 | 4/24/70,0.615 116 | 4/25/70,0.669 117 | 4/26/70,0.647 118 | 4/27/70,0.622 119 | 4/28/70,0.622 120 | 4/29/70,0.605 121 | 4/30/70,0.607 122 | 5/1/70,0.596 123 | 5/2/70,0.577 124 | 5/3/70,0.58 125 | 5/4/70,0.592 126 | 5/5/70,0.58 127 | 5/6/70,0.568 128 | 5/7/70,0.628 129 | 5/8/70,0.572 130 | 5/9/70,0.619 131 | 5/10/70,0.634 132 | 5/11/70,0.608 133 | 5/12/70,0.641 134 | 5/13/70,0.624 135 | 5/14/70,0.593 136 | 5/15/70,0.596 137 | 5/16/70,0.563 138 | 5/17/70,0.564 139 | 5/18/70,0.545 140 | 5/19/70,0.581 141 | 5/20/70,0.506 142 | 5/21/70,0.526 143 | 5/22/70,0.536 144 | 5/23/70,0.418 145 | 5/24/70,0.487 146 | 5/25/70,0.488 147 | 5/26/70,0.538 148 | 5/27/70,0.491 149 | 5/28/70,0.517 150 | 5/29/70,0.486 151 | 5/30/70,0.475 152 | 5/31/70,0.485 153 | 6/1/70,0.466 154 | 6/2/70,0.439 155 | 6/3/70,0.449 156 | 6/4/70,0.468 157 | 6/5/70,0.44 158 | 6/6/70,0.431 159 | 6/7/70,0.426 160 | 6/8/70,0.404 161 | 6/9/70,0.424 162 | 6/10/70,0.428 163 | 6/11/70,0.431 164 | 6/12/70,0.414 165 | 6/13/70,0.407 166 | 6/14/70,0.367 167 | 6/15/70,0.387 168 | 6/16/70,0.389 169 | 6/17/70,0.403 170 | 6/18/70,0.408 171 | 6/19/70,0.382 172 | 6/20/70,0.378 173 | 6/21/70,0.355 174 | 6/22/70,0.411 175 | 6/23/70,0.391 176 | 6/24/70,0.443 177 | 6/25/70,0.379 178 | 6/26/70,0.373 179 | 6/27/70,0.378 180 | 6/28/70,0.359 181 | 6/29/70,0.358 182 | 6/30/70,0.37 183 | 7/1/70,0.39 184 | 7/2/70,0.357 185 | 7/3/70,0.358 186 | 7/4/70,0.359 187 | 7/5/70,0.347 188 | 7/6/70,0.341 189 | 7/7/70,0.312 190 | 7/8/70,0.371 191 | 7/9/70,0.347 192 | 7/10/70,0.332 193 | 7/11/70,0.335 194 | 7/12/70,0.316 195 | 7/13/70,0.308 196 | 7/14/70,0.359 197 | 7/15/70,0.354 198 | 7/16/70,0.352 199 | 7/17/70,0.359 200 | 7/18/70,0.34 201 | 7/19/70,0.379 202 | 7/20/70,0.355 203 | 7/21/70,0.347 204 | 7/22/70,0.348 205 | 7/23/70,0.34 206 | 7/24/70,0.357 207 | 7/25/70,0.352 208 | 7/26/70,0.358 209 | 7/27/70,0.333 210 | 7/28/70,0.349 211 | 7/29/70,0.348 212 | 7/30/70,0.328 213 | 7/31/70,0.291 214 | 8/1/70,0.31 215 | 8/2/70,0.278 216 | 8/3/70,0.286 217 | 8/4/70,0.281 218 | 8/5/70,0.29 219 | 8/6/70,0.317 220 | 8/7/70,0.323 221 | 8/8/70,0.313 222 | 8/9/70,0.31 223 | 8/10/70,0.319 224 | 8/11/70,0.315 225 | 8/12/70,0.305 226 | 8/13/70,0.298 227 | 8/14/70,0.299 228 | 8/15/70,0.32 229 | 8/16/70,0.313 230 | 8/17/70,0.291 231 | 8/18/70,0.301 232 | 8/19/70,0.427 233 | 8/20/70,0.384 234 | 8/21/70,0.359 235 | 8/22/70,0.349 236 | 8/23/70,0.368 237 | 8/24/70,0.34 238 | 8/25/70,0.32 239 | 8/26/70,0.31 240 | 8/27/70,0.315 241 | 8/28/70,0.311 242 | 8/29/70,0.306 243 | 8/30/70,0.293 244 | 8/31/70,0.28 245 | -------------------------------------------------------------------------------- /examples/data/pangbf.csv: -------------------------------------------------------------------------------- 1 | Date of turning point,Discharge (cumecs),Time span between turning points (days),Average discharge (cumecs),Increment of Baseflow (cumec-days) 2 | 7/1,.364,10,.496,4.96 3 | 17/1,.628,4,.637,2.55 4 | 21/1,.645,16,.701,11.21 5 | 6/2,.756,5,.731,3.66 6 | 11/2,.706,4,.687,2.75 7 | 15/2,.667,13,.699,9.09 8 | 28/2,.731,3,.721,2.16 9 | 3/3,.711,7,.705,4.93 10 | 10/3,.698,5,.716,3.58 11 | 15/3,.734,6,.714,4.28 12 | 21/3,.694,5,.685,3.43 13 | 26/3,.676,1,.657,0.66 14 | 27/3,.638,8,.634,5.07 15 | 4/4,.630,6,.650,3.90 16 | 10/4,.669,1,.661,0.66 17 | 11/4,.653,9,.651,5.86 18 | 20/4,.649,4,.632,2.53 19 | 24/4,.615,5,.610,3.05 20 | 29/4,.605,3,.591,1.77 21 | 2/5,.577,4,.573,2.29 22 | 6/5,.568,17,.493,8.38 23 | 23/5,.418,10,.429,4.29 24 | 2/6,.439,6,.422,2.53 25 | 8/6,.404,6,.386,2.31 26 | 14/6,.367,5,.375,1.87 27 | 19/6,.382,2,.369,0.74 28 | 21/6,.355,8,.366,2.85 29 | 29/6,.358,8,.335,2.68 30 | 7/7,.312,6,.310,1.86 31 | 13/7,.308,5,.324,1.62 32 | 18/7,.340,5,.340,1.70 33 | 23/7,.340,10,.309,3.09 34 | 2/8,.278,2,.280,0.56 35 | 4/8,.281,9,.290,2.61 36 | 13/8,.298,4,.295,1.18 37 | 17/8,.291,,, 38 | -------------------------------------------------------------------------------- /examples/data/pangresults.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/examples/data/pangresults.png -------------------------------------------------------------------------------- /long_description.rst: -------------------------------------------------------------------------------- 1 | Package for getting and processing stream flow and groundwater level measurements from the USGS National Water Information System (NWIS). -------------------------------------------------------------------------------- /pydrograph/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import get_versions 2 | __version__ = get_versions()['version'] 3 | del get_versions 4 | 5 | from .nwis import Nwis 6 | from .attributes import gw_attributes, streamflow_attributes 7 | from .baseflow import get_upstream_area, IHmethod, WI_statewide_eqn 8 | from . import _version 9 | __version__ = _version.get_versions()['version'] 10 | -------------------------------------------------------------------------------- /pydrograph/_version.py: -------------------------------------------------------------------------------- 1 | 2 | # This file helps to compute a version number in source trees obtained from 3 | # git-archive tarball (such as those provided by githubs download-from-tag 4 | # feature). Distribution tarballs (built by setup.py sdist) and build 5 | # directories (produced by setup.py build) will contain a much shorter file 6 | # that just contains the computed version number. 7 | 8 | # This file is released into the public domain. 9 | # Generated by versioneer-0.29 10 | # https://github.com/python-versioneer/python-versioneer 11 | 12 | """Git implementation of _version.py.""" 13 | 14 | import errno 15 | import os 16 | import re 17 | import subprocess 18 | import sys 19 | from typing import Any, Callable, Dict, List, Optional, Tuple 20 | import functools 21 | 22 | 23 | def get_keywords() -> Dict[str, str]: 24 | """Get the keywords needed to look up the version information.""" 25 | # these strings will be replaced by git during git-archive. 26 | # setup.py/versioneer.py will grep for the variable names, so they must 27 | # each be defined on a line of their own. _version.py will just call 28 | # get_keywords(). 29 | git_refnames = " (HEAD -> develop)" 30 | git_full = "615ef85a5308350b6c0a8e7c4009653fc521777c" 31 | git_date = "2023-11-17 13:50:40 -0600" 32 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 33 | return keywords 34 | 35 | 36 | class VersioneerConfig: 37 | """Container for Versioneer configuration parameters.""" 38 | 39 | VCS: str 40 | style: str 41 | tag_prefix: str 42 | parentdir_prefix: str 43 | versionfile_source: str 44 | verbose: bool 45 | 46 | 47 | def get_config() -> VersioneerConfig: 48 | """Create, populate and return the VersioneerConfig() object.""" 49 | # these strings are filled in when 'setup.py versioneer' creates 50 | # _version.py 51 | cfg = VersioneerConfig() 52 | cfg.VCS = "git" 53 | cfg.style = "pep440-post" 54 | cfg.tag_prefix = "v" 55 | cfg.parentdir_prefix = "None" 56 | cfg.versionfile_source = "pydrograph/_version.py" 57 | cfg.verbose = False 58 | return cfg 59 | 60 | 61 | class NotThisMethod(Exception): 62 | """Exception raised if a method is not valid for the current scenario.""" 63 | 64 | 65 | LONG_VERSION_PY: Dict[str, str] = {} 66 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 67 | 68 | 69 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator 70 | """Create decorator to mark a method as the handler of a VCS.""" 71 | def decorate(f: Callable) -> Callable: 72 | """Store f in HANDLERS[vcs][method].""" 73 | if vcs not in HANDLERS: 74 | HANDLERS[vcs] = {} 75 | HANDLERS[vcs][method] = f 76 | return f 77 | return decorate 78 | 79 | 80 | def run_command( 81 | commands: List[str], 82 | args: List[str], 83 | cwd: Optional[str] = None, 84 | verbose: bool = False, 85 | hide_stderr: bool = False, 86 | env: Optional[Dict[str, str]] = None, 87 | ) -> Tuple[Optional[str], Optional[int]]: 88 | """Call the given command(s).""" 89 | assert isinstance(commands, list) 90 | process = None 91 | 92 | popen_kwargs: Dict[str, Any] = {} 93 | if sys.platform == "win32": 94 | # This hides the console window if pythonw.exe is used 95 | startupinfo = subprocess.STARTUPINFO() 96 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW 97 | popen_kwargs["startupinfo"] = startupinfo 98 | 99 | for command in commands: 100 | try: 101 | dispcmd = str([command] + args) 102 | # remember shell=False, so use git.cmd on windows, not just git 103 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, 104 | stdout=subprocess.PIPE, 105 | stderr=(subprocess.PIPE if hide_stderr 106 | else None), **popen_kwargs) 107 | break 108 | except OSError as e: 109 | if e.errno == errno.ENOENT: 110 | continue 111 | if verbose: 112 | print("unable to run %s" % dispcmd) 113 | print(e) 114 | return None, None 115 | else: 116 | if verbose: 117 | print("unable to find command, tried %s" % (commands,)) 118 | return None, None 119 | stdout = process.communicate()[0].strip().decode() 120 | if process.returncode != 0: 121 | if verbose: 122 | print("unable to run %s (error)" % dispcmd) 123 | print("stdout was %s" % stdout) 124 | return None, process.returncode 125 | return stdout, process.returncode 126 | 127 | 128 | def versions_from_parentdir( 129 | parentdir_prefix: str, 130 | root: str, 131 | verbose: bool, 132 | ) -> Dict[str, Any]: 133 | """Try to determine the version from the parent directory name. 134 | 135 | Source tarballs conventionally unpack into a directory that includes both 136 | the project name and a version string. We will also support searching up 137 | two directory levels for an appropriately named parent directory 138 | """ 139 | rootdirs = [] 140 | 141 | for _ in range(3): 142 | dirname = os.path.basename(root) 143 | if dirname.startswith(parentdir_prefix): 144 | return {"version": dirname[len(parentdir_prefix):], 145 | "full-revisionid": None, 146 | "dirty": False, "error": None, "date": None} 147 | rootdirs.append(root) 148 | root = os.path.dirname(root) # up a level 149 | 150 | if verbose: 151 | print("Tried directories %s but none started with prefix %s" % 152 | (str(rootdirs), parentdir_prefix)) 153 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 154 | 155 | 156 | @register_vcs_handler("git", "get_keywords") 157 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: 158 | """Extract version information from the given file.""" 159 | # the code embedded in _version.py can just fetch the value of these 160 | # keywords. When used from setup.py, we don't want to import _version.py, 161 | # so we do it with a regexp instead. This function is not used from 162 | # _version.py. 163 | keywords: Dict[str, str] = {} 164 | try: 165 | with open(versionfile_abs, "r") as fobj: 166 | for line in fobj: 167 | if line.strip().startswith("git_refnames ="): 168 | mo = re.search(r'=\s*"(.*)"', line) 169 | if mo: 170 | keywords["refnames"] = mo.group(1) 171 | if line.strip().startswith("git_full ="): 172 | mo = re.search(r'=\s*"(.*)"', line) 173 | if mo: 174 | keywords["full"] = mo.group(1) 175 | if line.strip().startswith("git_date ="): 176 | mo = re.search(r'=\s*"(.*)"', line) 177 | if mo: 178 | keywords["date"] = mo.group(1) 179 | except OSError: 180 | pass 181 | return keywords 182 | 183 | 184 | @register_vcs_handler("git", "keywords") 185 | def git_versions_from_keywords( 186 | keywords: Dict[str, str], 187 | tag_prefix: str, 188 | verbose: bool, 189 | ) -> Dict[str, Any]: 190 | """Get version information from git keywords.""" 191 | if "refnames" not in keywords: 192 | raise NotThisMethod("Short version file found") 193 | date = keywords.get("date") 194 | if date is not None: 195 | # Use only the last line. Previous lines may contain GPG signature 196 | # information. 197 | date = date.splitlines()[-1] 198 | 199 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 200 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 201 | # -like" string, which we must then edit to make compliant), because 202 | # it's been around since git-1.5.3, and it's too difficult to 203 | # discover which version we're using, or to work around using an 204 | # older one. 205 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 206 | refnames = keywords["refnames"].strip() 207 | if refnames.startswith("$Format"): 208 | if verbose: 209 | print("keywords are unexpanded, not using") 210 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 211 | refs = {r.strip() for r in refnames.strip("()").split(",")} 212 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 213 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 214 | TAG = "tag: " 215 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} 216 | if not tags: 217 | # Either we're using git < 1.8.3, or there really are no tags. We use 218 | # a heuristic: assume all version tags have a digit. The old git %d 219 | # expansion behaves like git log --decorate=short and strips out the 220 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 221 | # between branches and tags. By ignoring refnames without digits, we 222 | # filter out many common branch names like "release" and 223 | # "stabilization", as well as "HEAD" and "master". 224 | tags = {r for r in refs if re.search(r'\d', r)} 225 | if verbose: 226 | print("discarding '%s', no digits" % ",".join(refs - tags)) 227 | if verbose: 228 | print("likely tags: %s" % ",".join(sorted(tags))) 229 | for ref in sorted(tags): 230 | # sorting will prefer e.g. "2.0" over "2.0rc1" 231 | if ref.startswith(tag_prefix): 232 | r = ref[len(tag_prefix):] 233 | # Filter out refs that exactly match prefix or that don't start 234 | # with a number once the prefix is stripped (mostly a concern 235 | # when prefix is '') 236 | if not re.match(r'\d', r): 237 | continue 238 | if verbose: 239 | print("picking %s" % r) 240 | return {"version": r, 241 | "full-revisionid": keywords["full"].strip(), 242 | "dirty": False, "error": None, 243 | "date": date} 244 | # no suitable tags, so version is "0+unknown", but full hex is still there 245 | if verbose: 246 | print("no suitable tags, using unknown + full revision id") 247 | return {"version": "0+unknown", 248 | "full-revisionid": keywords["full"].strip(), 249 | "dirty": False, "error": "no suitable tags", "date": None} 250 | 251 | 252 | @register_vcs_handler("git", "pieces_from_vcs") 253 | def git_pieces_from_vcs( 254 | tag_prefix: str, 255 | root: str, 256 | verbose: bool, 257 | runner: Callable = run_command 258 | ) -> Dict[str, Any]: 259 | """Get version from 'git describe' in the root of the source tree. 260 | 261 | This only gets called if the git-archive 'subst' keywords were *not* 262 | expanded, and _version.py hasn't already been rewritten with a short 263 | version string, meaning we're inside a checked out source tree. 264 | """ 265 | GITS = ["git"] 266 | if sys.platform == "win32": 267 | GITS = ["git.cmd", "git.exe"] 268 | 269 | # GIT_DIR can interfere with correct operation of Versioneer. 270 | # It may be intended to be passed to the Versioneer-versioned project, 271 | # but that should not change where we get our version from. 272 | env = os.environ.copy() 273 | env.pop("GIT_DIR", None) 274 | runner = functools.partial(runner, env=env) 275 | 276 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, 277 | hide_stderr=not verbose) 278 | if rc != 0: 279 | if verbose: 280 | print("Directory %s not under git control" % root) 281 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 282 | 283 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 284 | # if there isn't one, this yields HEX[-dirty] (no NUM) 285 | describe_out, rc = runner(GITS, [ 286 | "describe", "--tags", "--dirty", "--always", "--long", 287 | "--match", f"{tag_prefix}[[:digit:]]*" 288 | ], cwd=root) 289 | # --long was added in git-1.5.5 290 | if describe_out is None: 291 | raise NotThisMethod("'git describe' failed") 292 | describe_out = describe_out.strip() 293 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 294 | if full_out is None: 295 | raise NotThisMethod("'git rev-parse' failed") 296 | full_out = full_out.strip() 297 | 298 | pieces: Dict[str, Any] = {} 299 | pieces["long"] = full_out 300 | pieces["short"] = full_out[:7] # maybe improved later 301 | pieces["error"] = None 302 | 303 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], 304 | cwd=root) 305 | # --abbrev-ref was added in git-1.6.3 306 | if rc != 0 or branch_name is None: 307 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 308 | branch_name = branch_name.strip() 309 | 310 | if branch_name == "HEAD": 311 | # If we aren't exactly on a branch, pick a branch which represents 312 | # the current commit. If all else fails, we are on a branchless 313 | # commit. 314 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 315 | # --contains was added in git-1.5.4 316 | if rc != 0 or branches is None: 317 | raise NotThisMethod("'git branch --contains' returned error") 318 | branches = branches.split("\n") 319 | 320 | # Remove the first line if we're running detached 321 | if "(" in branches[0]: 322 | branches.pop(0) 323 | 324 | # Strip off the leading "* " from the list of branches. 325 | branches = [branch[2:] for branch in branches] 326 | if "master" in branches: 327 | branch_name = "master" 328 | elif not branches: 329 | branch_name = None 330 | else: 331 | # Pick the first branch that is returned. Good or bad. 332 | branch_name = branches[0] 333 | 334 | pieces["branch"] = branch_name 335 | 336 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 337 | # TAG might have hyphens. 338 | git_describe = describe_out 339 | 340 | # look for -dirty suffix 341 | dirty = git_describe.endswith("-dirty") 342 | pieces["dirty"] = dirty 343 | if dirty: 344 | git_describe = git_describe[:git_describe.rindex("-dirty")] 345 | 346 | # now we have TAG-NUM-gHEX or HEX 347 | 348 | if "-" in git_describe: 349 | # TAG-NUM-gHEX 350 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 351 | if not mo: 352 | # unparsable. Maybe git-describe is misbehaving? 353 | pieces["error"] = ("unable to parse git-describe output: '%s'" 354 | % describe_out) 355 | return pieces 356 | 357 | # tag 358 | full_tag = mo.group(1) 359 | if not full_tag.startswith(tag_prefix): 360 | if verbose: 361 | fmt = "tag '%s' doesn't start with prefix '%s'" 362 | print(fmt % (full_tag, tag_prefix)) 363 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 364 | % (full_tag, tag_prefix)) 365 | return pieces 366 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 367 | 368 | # distance: number of commits since tag 369 | pieces["distance"] = int(mo.group(2)) 370 | 371 | # commit: short hex revision ID 372 | pieces["short"] = mo.group(3) 373 | 374 | else: 375 | # HEX: no tags 376 | pieces["closest-tag"] = None 377 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) 378 | pieces["distance"] = len(out.split()) # total number of commits 379 | 380 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 381 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 382 | # Use only the last line. Previous lines may contain GPG signature 383 | # information. 384 | date = date.splitlines()[-1] 385 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 386 | 387 | return pieces 388 | 389 | 390 | def plus_or_dot(pieces: Dict[str, Any]) -> str: 391 | """Return a + if we don't already have one, else return a .""" 392 | if "+" in pieces.get("closest-tag", ""): 393 | return "." 394 | return "+" 395 | 396 | 397 | def render_pep440(pieces: Dict[str, Any]) -> str: 398 | """Build up version string, with post-release "local version identifier". 399 | 400 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 401 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 402 | 403 | Exceptions: 404 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 405 | """ 406 | if pieces["closest-tag"]: 407 | rendered = pieces["closest-tag"] 408 | if pieces["distance"] or pieces["dirty"]: 409 | rendered += plus_or_dot(pieces) 410 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 411 | if pieces["dirty"]: 412 | rendered += ".dirty" 413 | else: 414 | # exception #1 415 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 416 | pieces["short"]) 417 | if pieces["dirty"]: 418 | rendered += ".dirty" 419 | return rendered 420 | 421 | 422 | def render_pep440_branch(pieces: Dict[str, Any]) -> str: 423 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 424 | 425 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 426 | (a feature branch will appear "older" than the master branch). 427 | 428 | Exceptions: 429 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 430 | """ 431 | if pieces["closest-tag"]: 432 | rendered = pieces["closest-tag"] 433 | if pieces["distance"] or pieces["dirty"]: 434 | if pieces["branch"] != "master": 435 | rendered += ".dev0" 436 | rendered += plus_or_dot(pieces) 437 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 438 | if pieces["dirty"]: 439 | rendered += ".dirty" 440 | else: 441 | # exception #1 442 | rendered = "0" 443 | if pieces["branch"] != "master": 444 | rendered += ".dev0" 445 | rendered += "+untagged.%d.g%s" % (pieces["distance"], 446 | pieces["short"]) 447 | if pieces["dirty"]: 448 | rendered += ".dirty" 449 | return rendered 450 | 451 | 452 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: 453 | """Split pep440 version string at the post-release segment. 454 | 455 | Returns the release segments before the post-release and the 456 | post-release version number (or -1 if no post-release segment is present). 457 | """ 458 | vc = str.split(ver, ".post") 459 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 460 | 461 | 462 | def render_pep440_pre(pieces: Dict[str, Any]) -> str: 463 | """TAG[.postN.devDISTANCE] -- No -dirty. 464 | 465 | Exceptions: 466 | 1: no tags. 0.post0.devDISTANCE 467 | """ 468 | if pieces["closest-tag"]: 469 | if pieces["distance"]: 470 | # update the post release segment 471 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 472 | rendered = tag_version 473 | if post_version is not None: 474 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 475 | else: 476 | rendered += ".post0.dev%d" % (pieces["distance"]) 477 | else: 478 | # no commits, use the tag as the version 479 | rendered = pieces["closest-tag"] 480 | else: 481 | # exception #1 482 | rendered = "0.post0.dev%d" % pieces["distance"] 483 | return rendered 484 | 485 | 486 | def render_pep440_post(pieces: Dict[str, Any]) -> str: 487 | """TAG[.postDISTANCE[.dev0]+gHEX] . 488 | 489 | The ".dev0" means dirty. Note that .dev0 sorts backwards 490 | (a dirty tree will appear "older" than the corresponding clean one), 491 | but you shouldn't be releasing software with -dirty anyways. 492 | 493 | Exceptions: 494 | 1: no tags. 0.postDISTANCE[.dev0] 495 | """ 496 | if pieces["closest-tag"]: 497 | rendered = pieces["closest-tag"] 498 | if pieces["distance"] or pieces["dirty"]: 499 | rendered += ".post%d" % pieces["distance"] 500 | if pieces["dirty"]: 501 | rendered += ".dev0" 502 | rendered += plus_or_dot(pieces) 503 | rendered += "g%s" % pieces["short"] 504 | else: 505 | # exception #1 506 | rendered = "0.post%d" % pieces["distance"] 507 | if pieces["dirty"]: 508 | rendered += ".dev0" 509 | rendered += "+g%s" % pieces["short"] 510 | return rendered 511 | 512 | 513 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: 514 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 515 | 516 | The ".dev0" means not master branch. 517 | 518 | Exceptions: 519 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 520 | """ 521 | if pieces["closest-tag"]: 522 | rendered = pieces["closest-tag"] 523 | if pieces["distance"] or pieces["dirty"]: 524 | rendered += ".post%d" % pieces["distance"] 525 | if pieces["branch"] != "master": 526 | rendered += ".dev0" 527 | rendered += plus_or_dot(pieces) 528 | rendered += "g%s" % pieces["short"] 529 | if pieces["dirty"]: 530 | rendered += ".dirty" 531 | else: 532 | # exception #1 533 | rendered = "0.post%d" % pieces["distance"] 534 | if pieces["branch"] != "master": 535 | rendered += ".dev0" 536 | rendered += "+g%s" % pieces["short"] 537 | if pieces["dirty"]: 538 | rendered += ".dirty" 539 | return rendered 540 | 541 | 542 | def render_pep440_old(pieces: Dict[str, Any]) -> str: 543 | """TAG[.postDISTANCE[.dev0]] . 544 | 545 | The ".dev0" means dirty. 546 | 547 | Exceptions: 548 | 1: no tags. 0.postDISTANCE[.dev0] 549 | """ 550 | if pieces["closest-tag"]: 551 | rendered = pieces["closest-tag"] 552 | if pieces["distance"] or pieces["dirty"]: 553 | rendered += ".post%d" % pieces["distance"] 554 | if pieces["dirty"]: 555 | rendered += ".dev0" 556 | else: 557 | # exception #1 558 | rendered = "0.post%d" % pieces["distance"] 559 | if pieces["dirty"]: 560 | rendered += ".dev0" 561 | return rendered 562 | 563 | 564 | def render_git_describe(pieces: Dict[str, Any]) -> str: 565 | """TAG[-DISTANCE-gHEX][-dirty]. 566 | 567 | Like 'git describe --tags --dirty --always'. 568 | 569 | Exceptions: 570 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 571 | """ 572 | if pieces["closest-tag"]: 573 | rendered = pieces["closest-tag"] 574 | if pieces["distance"]: 575 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 576 | else: 577 | # exception #1 578 | rendered = pieces["short"] 579 | if pieces["dirty"]: 580 | rendered += "-dirty" 581 | return rendered 582 | 583 | 584 | def render_git_describe_long(pieces: Dict[str, Any]) -> str: 585 | """TAG-DISTANCE-gHEX[-dirty]. 586 | 587 | Like 'git describe --tags --dirty --always -long'. 588 | The distance/hash is unconditional. 589 | 590 | Exceptions: 591 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 592 | """ 593 | if pieces["closest-tag"]: 594 | rendered = pieces["closest-tag"] 595 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 596 | else: 597 | # exception #1 598 | rendered = pieces["short"] 599 | if pieces["dirty"]: 600 | rendered += "-dirty" 601 | return rendered 602 | 603 | 604 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: 605 | """Render the given version pieces into the requested style.""" 606 | if pieces["error"]: 607 | return {"version": "unknown", 608 | "full-revisionid": pieces.get("long"), 609 | "dirty": None, 610 | "error": pieces["error"], 611 | "date": None} 612 | 613 | if not style or style == "default": 614 | style = "pep440" # the default 615 | 616 | if style == "pep440": 617 | rendered = render_pep440(pieces) 618 | elif style == "pep440-branch": 619 | rendered = render_pep440_branch(pieces) 620 | elif style == "pep440-pre": 621 | rendered = render_pep440_pre(pieces) 622 | elif style == "pep440-post": 623 | rendered = render_pep440_post(pieces) 624 | elif style == "pep440-post-branch": 625 | rendered = render_pep440_post_branch(pieces) 626 | elif style == "pep440-old": 627 | rendered = render_pep440_old(pieces) 628 | elif style == "git-describe": 629 | rendered = render_git_describe(pieces) 630 | elif style == "git-describe-long": 631 | rendered = render_git_describe_long(pieces) 632 | else: 633 | raise ValueError("unknown style '%s'" % style) 634 | 635 | return {"version": rendered, "full-revisionid": pieces["long"], 636 | "dirty": pieces["dirty"], "error": None, 637 | "date": pieces.get("date")} 638 | 639 | 640 | def get_versions() -> Dict[str, Any]: 641 | """Get version information or return default if unable to do so.""" 642 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 643 | # __file__, we can work backwards from there to the root. Some 644 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 645 | # case we can only use expanded keywords. 646 | 647 | cfg = get_config() 648 | verbose = cfg.verbose 649 | 650 | try: 651 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 652 | verbose) 653 | except NotThisMethod: 654 | pass 655 | 656 | try: 657 | root = os.path.realpath(__file__) 658 | # versionfile_source is the relative path from the top of the source 659 | # tree (where the .git directory might live) to this file. Invert 660 | # this to find the root from __file__. 661 | for _ in cfg.versionfile_source.split('/'): 662 | root = os.path.dirname(root) 663 | except NameError: 664 | return {"version": "0+unknown", "full-revisionid": None, 665 | "dirty": None, 666 | "error": "unable to find root of source tree", 667 | "date": None} 668 | 669 | try: 670 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 671 | return render(pieces, cfg.style) 672 | except NotThisMethod: 673 | pass 674 | 675 | try: 676 | if cfg.parentdir_prefix: 677 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 678 | except NotThisMethod: 679 | pass 680 | 681 | return {"version": "0+unknown", "full-revisionid": None, 682 | "dirty": None, 683 | "error": "unable to compute version", "date": None} 684 | -------------------------------------------------------------------------------- /pydrograph/attributes.py: -------------------------------------------------------------------------------- 1 | # site_no -- Site identification number 2 | # station_nm -- Site name 3 | # site_tp_cd -- Site type 4 | # lat_va -- DMS latitude 5 | # long_va -- DMS longitude 6 | # dec_lat_va -- Decimal latitude 7 | # dec_long_va -- Decimal longitude 8 | # coord_meth_cd -- Latitude-longitude method 9 | # coord_acy_cd -- Latitude-longitude accuracy 10 | # coord_datum_cd -- Latitude-longitude datum 11 | # dec_coord_datum_cd -- Decimal Latitude-longitude datum 12 | # district_cd -- District code 13 | # state_cd -- State code 14 | # county_cd -- County code 15 | # country_cd -- Country code 16 | # land_net_ds -- Land net location description 17 | # map_nm -- Name of location map 18 | # map_scale_fc -- Scale of location map 19 | # alt_va -- Altitude of Gage/land surface 20 | # alt_meth_cd -- Method altitude determined 21 | # alt_acy_va -- Altitude accuracy 22 | # alt_datum_cd -- Altitude datum 23 | # huc_cd -- Hydrologic unit code 24 | # basin_cd -- Drainage basin code 25 | # topo_cd -- Topographic setting code 26 | # data_types_cd -- Flags for the type of data collected 27 | # instruments_cd -- Flags for instruments at site 28 | # construction_dt -- Date of first construction 29 | # inventory_dt -- Date site established or inventoried 30 | # drain_area_va -- Drainage area 31 | # contrib_drain_area_va -- Contributing drainage area 32 | # tz_cd -- Mean Greenwich time offset 33 | # local_time_fg -- Local standard time flag 34 | # reliability_cd -- Data reliability code 35 | # gw_file_cd -- Data-other GW files 36 | # nat_aqfr_cd -- National aquifer code 37 | # aqfr_cd -- Local aquifer code 38 | # aqfr_type_cd -- Local aquifer type code 39 | # well_depth_va -- Well depth 40 | # hole_depth_va -- Hole depth 41 | # depth_src_cd -- Source of depth data 42 | # project_no -- Project number 43 | # rt_bol -- Real-time data flag 44 | # peak_begin_date -- Peak-streamflow data begin date 45 | # peak_end_date -- Peak-streamflow data end date 46 | # peak_count_nu -- Peak-streamflow data count 47 | # qw_begin_date -- Water-quality data begin date 48 | # qw_end_date -- Water-quality data end date 49 | # qw_count_nu -- Water-quality data count 50 | # gw_begin_date -- Field water-level measurements begin date 51 | # gw_end_date -- Field water-level measurements end date 52 | # gw_count_nu -- Field water-level measurements count 53 | # sv_begin_date -- Site-visit data begin date 54 | # sv_end_date -- Site-visit data end date 55 | # sv_count_nu -- Site-visit data count 56 | 57 | streamflow_attributes = [ \ 58 | 'site_no', 59 | 'station_nm', 60 | 'site_tp_cd', 61 | 'dec_lat_va', 62 | 'dec_long_va', 63 | 'coord_meth_cd', 64 | 'coord_acy_cd', 65 | 'coord_datum_cd', 66 | 'dec_coord_datum_cd', 67 | 'district_cd', 68 | 'state_cd', 69 | 'county_cd', 70 | 'country_cd', 71 | 'land_net_ds', 72 | 'map_nm', 73 | 'map_scale_fc', 74 | 'alt_va', 75 | 'alt_meth_cd', 76 | 'alt_acy_va', 77 | 'alt_datum_cd', 78 | 'huc_cd', 79 | 'basin_cd', 80 | 'topo_cd', 81 | 'inventory_dt', 82 | 'drain_area_va', 83 | 'contrib_drain_area_va', 84 | 'tz_cd', 85 | 'local_time_fg', 86 | 'reliability_cd', 87 | 'project_no', 88 | 'rt_bol', 89 | 'peak_begin_date', 90 | 'peak_end_date', 91 | 'peak_count_nu', 92 | 'qw_begin_date', 93 | 'qw_end_date', 94 | 'qw_count_nu', 95 | 'sv_begin_date', 96 | 'sv_end_date', 97 | 'sv_count_nu'] 98 | 99 | iv_attributes = [ \ 100 | 'agency_cd', 101 | 'site_no', 102 | 'station_nm', 103 | 'site_tp_cd', 104 | 'dec_lat_va', 105 | 'dec_long_va', 106 | 'coord_acy_cd', 107 | 'dec_coord_datum_cd', 108 | 'alt_va', 109 | 'alt_acy_va', 110 | 'alt_datum_cd', 111 | 'huc_cd', 112 | 'data_type_cd', 113 | 'parm_cd', 114 | 'stat_cd', 115 | 'ts_id', 116 | 'loc_web_ds', 117 | 'medium_grp_cd', 118 | 'parm_grp_cd', 119 | 'srs_id', 120 | 'access_cd', 121 | 'begin_date', 122 | 'end_date', 123 | 'count_nu'] 124 | 125 | gw_attributes = [ 126 | 'site_no', 127 | 'station_nm', 128 | 'site_tp_cd', 129 | 'dec_lat_va', 130 | 'dec_long_va', 131 | 'coord_meth_cd', 132 | 'coord_acy_cd', 133 | 'coord_datum_cd', 134 | 'dec_coord_datum_cd', 135 | 'district_cd', 136 | 'state_cd', 137 | 'county_cd', 138 | 'country_cd', 139 | 'land_net_ds', 140 | 'map_nm', 141 | 'map_scale_fc', 142 | 'alt_va', 143 | 'alt_meth_cd', 144 | 'alt_acy_va', 145 | 'alt_datum_cd', 146 | 'huc_cd', 147 | 'basin_cd', 148 | 'topo_cd', 149 | 'data_types_cd', 150 | 'instruments_cd', 151 | 'construction_dt', 152 | 'inventory_dt', 153 | 'tz_cd', 154 | 'local_time_fg', 155 | 'reliability_cd', 156 | 'gw_file_cd', 157 | 'nat_aqfr_cd', 158 | 'aqfr_cd', 159 | 'aqfr_type_cd', 160 | 'well_depth_va', 161 | 'hole_depth_va', 162 | 'depth_src_cd', 163 | 'project_no', 164 | 'rt_bol', 165 | 'peak_begin_date', 166 | 'peak_end_date', 167 | 'peak_count_nu', 168 | 'qw_begin_date', 169 | 'qw_end_date', 170 | 'qw_count_nu', 171 | 'gw_begin_date', 172 | 'gw_end_date', 173 | 'gw_count_nu', 174 | 'sv_begin_date', 175 | 'sv_end_date', 176 | 'sv_count_nu' 177 | ] 178 | -------------------------------------------------------------------------------- /pydrograph/baseflow.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | import numpy as np 3 | import pandas as pd 4 | import gisutils 5 | 6 | 7 | def get_upstream_area(points, PlusFlow, NHDFlowlines, NHDCatchments, nearfield=None): 8 | """For each point in points, get upstream drainage area in km2, using 9 | NHDPlus PlusFlow routing table and NHDPlus Catchment areas. Upstream area 10 | within the containing catchment is estimated as a fraction of proportional 11 | to the distance of the measurment point along the NHDPlus Flowline associated with the catchment. 12 | 13 | Parameters 14 | ---------- 15 | points : list of shapely Point objects 16 | Locations of streamflow measurements. Must be in same coordinate system as NHDCatchments 17 | PlusFlow : str or list of strings 18 | Path(s) to PlusFlow routing tables 19 | NHDFlowlines : str or list of strings 20 | Path(s) to Flowlines shapefiles 21 | NHDCatchments : str or list of strings 22 | Path(s) to Catchment shapefiles 23 | nearfield : shapefile or shapely Polygon 24 | Nearfield area of model. Used to filter NHDPlus flowlines and catchments to 25 | greatly speed reading them in and finding the COMIDs associated with points. 26 | Must be in same coordinate system as points and NHDPlus shapefiles. 27 | 28 | Returns 29 | ------- 30 | upstream_area : list 31 | List of areas in km2, for each point in points. 32 | """ 33 | try: 34 | import fiona 35 | from shapely.geometry import LineString, Polygon, shape 36 | from GISio import shp2df 37 | except ImportError: 38 | print('This method requires fiona, shapely and GIS_utils.') 39 | 40 | if isinstance(nearfield, Polygon): 41 | bbox = nearfield.bounds 42 | elif isinstance(nearfield, str): 43 | bbox = shape(fiona.open(nearfield).next()['geometry']).bounds() 44 | else: 45 | bbox = None 46 | 47 | # dialate the bounding box by half, so that features aren't missed. 48 | x = 0.5 * (bbox[2] - bbox[0]) 49 | y = 0.5 * (bbox[3] - bbox[1]) 50 | bbox = (bbox[0]-x, bbox[1]-y, bbox[2]+x, bbox[3]+y) 51 | 52 | pf = shp2df(PlusFlow) 53 | fl = shp2df(NHDFlowlines, index='COMID', filter=bbox) 54 | cmt = shp2df(NHDCatchments, index='FEATUREID', filter=bbox) 55 | 56 | # find the catchment containing each point in points 57 | comids = [] 58 | for p in points: 59 | comids += cmt.FEATUREID[np.array([p.within(g) for g in cmt.geometry])].tolist() 60 | 61 | upstream_area = [] 62 | for i, comid in enumerate(comids): 63 | comids = {comid} 64 | upstream = [comid] 65 | for j in range(1000): 66 | upstream = set(pf.loc[pf.TOCOMID.isin(upstream), 'FROMCOMID']).difference({0}) 67 | if len(upstream) == 0: 68 | break 69 | comids.update(upstream) 70 | 71 | total_upstream_area = cmt.loc[comids, 'AreaSqKM'].sum() 72 | if comid == 11951607: 73 | j=2 74 | # estimate fraction of containing catchment that is upstream 75 | # by finding closest vertex on flowline, 76 | # and then dividing upstream length by downstream length 77 | #X = np.array(fl.ix[comid, 'geometry'].coords.xy[0]) 78 | #Y = np.array(fl.ix[comid, 'geometry'].coords.xy[1]) 79 | g = points[i] # misc measurement point 80 | #i = np.argmin(np.sqrt((X-g.x)**2 + (Y-g.y)**2)) # closest point on flowline 81 | 82 | # should be able to just project point onto flowline and divide by total length 83 | l = fl.loc[comid, 'geometry'] 84 | frac = l.project(g)/l.length 85 | #frac = LineString(zip(X[:i+1], Y[:i+1])).length/LineString(zip(X[i:], Y[i:])).length 86 | upstream_in_catchment = cmt.loc[comid, 'AreaSqKM'] * frac 87 | total_upstream_area += upstream_in_catchment 88 | upstream_area.append(total_upstream_area) 89 | 90 | return upstream_area 91 | 92 | 93 | def IHmethod(Qseries, block_length=5, tp=0.9, interp_semilog=True, freq='D', limit=100): 94 | """Baseflow separation using the Institute of Hydrology method, as documented in 95 | Institute of Hydrology, 1980b, Low flow studies report no. 3--Research report: 96 | Wallingford, Oxon, United Kingdom, Institute of Hydrology Report no. 3, p. 12-19, 97 | and 98 | Wahl, K.L and Wahl, T.L., 1988. Effects of regional ground-water level declines 99 | on streamflow in the Oklahoma Panhandle. In Proceedings of the Symposium on 100 | Water-Use Data for Water Resources Management, American Water Resources Association. 101 | 102 | Parameters 103 | ---------- 104 | Qseries : pandas Series 105 | Pandas time series (with datetime index) containing measured streamflow values. 106 | block_length : int 107 | N parameter in IH method. Streamflow is partitioned into N-day intervals; 108 | a minimum flow is recorded for each interval. 109 | tp : float 110 | f parameter in IH method. For each three N-day minima, if f * the central value 111 | is less than the adjacent two values, the central value is considered a 112 | turning point. Baseflow is interpolated between the turning points. 113 | interp_semilog : boolean 114 | If False, linear interpolation is used to compute baseflow between turning points 115 | (as documented in the IH method). If True, the base-10 logs of the turning points 116 | are interpolated, and the interpolated values are transformed back to 117 | linear space (producing a curved hydrograph). Semi-logarithmic interpolation 118 | as documented in Wahl and Wahl (1988), is used in the Base-Flow Index (BFI) 119 | fortran program. This method reassigns zero values to -2 in log space (0.01) 120 | for the interpolation. 121 | freq : str or DateOffset, default ‘D’ 122 | Any `pandas frequency alias `_ 123 | Regular time interval that forms the basis for base-flow separation. Input data are 124 | resampled to this frequency, and block lengths represent the number of time increments 125 | of the frequency. By default, days ('D'), which is what all previous BFI methods 126 | are based on. Note that this is therefore an experimental option; it is up to the user t 127 | o verify any results produced by other frequencies. 128 | limit : int 129 | Maximum number of timesteps allowed during linear interploation between baseflow 130 | ordinances. Must be greater than zero. 131 | 132 | 133 | Returns 134 | ------- 135 | Q : pandas DataFrame 136 | DataFrame containing the following columns: 137 | minima : N-day minima 138 | ordinate : selected turning points 139 | n : block number for each N-day minima 140 | QB : computed baseflow 141 | Q : discharge values 142 | 143 | Notes 144 | ----- 145 | Whereas this program only selects turning points following the methodology above, 146 | the BFI fortran program adds artificial turning points at the start and end of 147 | each calendar year. Therefore results for datasets consisting of multiple years 148 | will differ from those produced by the BFI program. 149 | 150 | """ 151 | if len(Qseries) < 2 * block_length: 152 | raise ValueError('Input Series must be at ' 153 | 'least two block lengths\nblock_length: ' 154 | '{}\n{}'.format(block_length, Qseries)) 155 | 156 | # convert flow values to numeric if they are objects 157 | # (pandas will cast column as objects if there are strings such as "ICE") 158 | # coerce any strings into np.nan values 159 | if Qseries.dtype.name == 'object': 160 | Qseries = pd.to_numeric(Qseries, errors='coerce') 161 | 162 | # convert the series to a dataframe; resample to daily values 163 | # missing days will be filled with nan values 164 | df = pd.DataFrame(Qseries).resample(freq).mean() 165 | df.columns = ['Q'] 166 | 167 | # compute block numbers for grouping values on blocks 168 | nblocks = int(np.floor(len(df) / float(block_length))) 169 | 170 | # make list of ints, one per measurement, denoting the block 171 | # eg [1,1,1,1,1,2,2,2,2,2...] for block_length = 5 172 | n = [] 173 | for i in range(nblocks): 174 | n += [i + 1] * block_length 175 | n += [np.nan] * (len(df) - len(n)) # pad any leftover values with nans 176 | df['n'] = n 177 | 178 | # compute the minimum for each block 179 | # create dataframe Q, which only has minimums for each block 180 | blocks = df[['Q', 'n']].reset_index(drop=True).dropna(axis=0).groupby('n') 181 | Q = blocks.min() 182 | Q = Q.rename(columns={'Q': 'block_Qmin'}) 183 | Q['n'] = Q.index 184 | # get the index position of the minimum Q within each block 185 | idx_Qmins = blocks.idxmin()['Q'].values.astype(int) 186 | # get the date associated with each Q minimum 187 | Q['datetime'] = df.index[idx_Qmins] 188 | 189 | # compute baseflow ordinates 190 | Q['ordinate'] = [np.nan] * len(Q) 191 | Qlist = Q.block_Qmin.tolist() 192 | Q['Qi-1'] = [np.nan] + Qlist[:-2] + [np.nan] 193 | Q['Qi'] = [np.nan] + Qlist[1:-1] + [np.nan] 194 | Q['Qi+1'] = [np.nan] + Qlist[2:] + [np.nan] 195 | isordinate = tp * Q.Qi < Q[['Qi-1', 'Qi+1']].min(axis=1) 196 | Q.loc[isordinate, 'ordinate'] = Q.loc[isordinate, 'block_Qmin'] 197 | 198 | # reset the index of Q to datetime 199 | Q.index = Q.datetime 200 | 201 | # expand Q dataframe back out to include row for each day 202 | Q = Q.dropna(subset=['datetime'], axis=0).resample(freq).mean() 203 | 204 | # interpolate between baseflow ordinates 205 | if interp_semilog: 206 | iszero = Q.ordinate.values == 0 207 | logQ = np.log10(Q.ordinate) 208 | logQ[iszero] = -2 209 | QB = np.power(10.0, logQ.interpolate(limit=limit).values) 210 | else: 211 | QB = Q.ordinate.interpolate(limit=limit).values 212 | Q['QB'] = QB 213 | 214 | # reassign the original flow values back to Q 215 | Q['Q'] = df.Q.loc[Q.index] 216 | 217 | # ensure that no baseflow values are > Q measured 218 | QBgreaterthanQ = Q.QB.values > Q.Q.values 219 | Q.loc[QBgreaterthanQ, 'QB'] = Q.loc[QBgreaterthanQ, 'Q'] 220 | return Q 221 | 222 | 223 | def WI_statewide_eqn(Qm, A, Qr, Q90): 224 | """Regression equation of Gebert and others (2007, 2011) 225 | for estimating average annual baseflow from a field measurement of streamflow 226 | during low-flow conditions. 227 | 228 | Parameters 229 | ---------- 230 | Qm : float or 1-D array of floats 231 | Measured streamflow. 232 | A : float or 1-D array of floats 233 | Drainage area in watershed upstream of where Qm was taken. 234 | Qr : float or 1-D array of floats 235 | Recorded flow at index station when Qm was taken. 236 | Q90 : float or 1-D array of floats 237 | Q90 flow at index station. 238 | 239 | Returns 240 | ------- 241 | Qb : float or 1-D array of floats, of length equal to input arrays 242 | Estimated average annual baseflow at point where Qm was taken. 243 | Bf : float or 1-D array of floats, of length equal to input arrays 244 | Baseflow factor. see Gebert and others (2007, 2011). 245 | 246 | Notes 247 | ----- 248 | Gebert, W.A., Radloff, M.J., Considine, E.J., and Kennedy, J.L., 2007, 249 | Use of streamflow data to estimate base flow/ground-water recharge for Wisconsin: 250 | Journal of the American Water Resources Association, 251 | v. 43, no. 1, p. 220-236, http://dx.doi.org/10.1111/j.1752-1688.2007.00018.x 252 | 253 | Gebert, W.A., Walker, J.F., and Kennedy, J.L., 2011, 254 | Estimating 1970-99 average annual groundwater recharge in Wisconsin using streamflow data: 255 | U.S. Geological Survey Open-File Report 2009-1210, 14 p., plus appendixes, 256 | available at http://pubs.usgs.gov/ofr/2009/1210/. 257 | """ 258 | Bf = (Qm / A) * (Q90 / Qr) 259 | Qb = 0.907 * A**1.02 * Bf**0.52 260 | return Qb.copy(), Bf.copy() 261 | 262 | 263 | def baseflow_summary(nwis_obj, field_sites, field_measurements, daily_values, q90_window=20, crs=None): 264 | 265 | fm = field_measurements.copy() 266 | dvs = daily_values.copy() 267 | 268 | if fm['measurement_dt'].dtype != 'datetime64[ns]': 269 | fm['measurement_dt'] = pd.to_datetime(fm.measurement_dt) 270 | 271 | if crs is not None: 272 | dest_crs = gisutils.get_authority_crs(crs) 273 | print('reprojecting output from\n{}\nto\n{}...'.format(nwis_obj.crs, dest_crs)) 274 | field_sites['geometry'] = gisutils.project(field_sites['geometry'], nwis_obj.crs, dest_crs) 275 | 276 | fm_site_no = [] 277 | Qm = [] 278 | measurement_dt = [] 279 | measured_rating_diff = [] 280 | drainage_area = [] 281 | station_nm = [] 282 | index_station = [] 283 | indexQr = [] 284 | indexQ90 = [] 285 | X, Y = [], [] 286 | for i in range(len(fm)): 287 | mdt = fm.measurement_dt.tolist()[i] 288 | Dt = dt.datetime(mdt.year, mdt.month, mdt.day) 289 | for site_no, data in list(dvs.items()): 290 | 291 | # check if index station covers measurement date 292 | try: 293 | dv = data.loc[Dt] 294 | except KeyError: 295 | continue 296 | dv = data.loc[Dt] 297 | site_no = dv.site_no 298 | DDcd = [k for k in list(data.keys()) if '00060' in k and not 'cd' in k][0] 299 | try: 300 | Qr = float(dv[DDcd]) # handle ice and other non numbers 301 | except: 302 | continue 303 | 304 | # get q90 values for window 305 | q90start = pd.Timestamp(Dt) - pd.Timedelta(0.5 * q90_window * 365.25, unit='days') 306 | q90end = pd.Timestamp(Dt) + pd.Timedelta(0.5 * q90_window * 365.25, unit='days') 307 | values = pd.to_numeric(data.loc[q90start:q90end, DDcd], errors='coerce') 308 | q90 = values.quantile(q=0.1) 309 | 310 | # append last to avoid mismatches in length 311 | site_info = field_sites.loc[fm.site_no.values[i]] 312 | fm_site_no.append(fm.site_no.values[i]) 313 | station_nm.append(site_info['station_nm']) 314 | Qm.append(fm.discharge_va.values[i]) 315 | measurement_dt.append(fm.measurement_dt.tolist()[i]) 316 | measured_rating_diff.append(fm.measured_rating_diff.values[i]) 317 | drainage_area.append(site_info['drain_area_va']) 318 | index_station.append(site_no) 319 | indexQr.append(Qr) 320 | indexQ90.append(q90) 321 | X.append(site_info['geometry'].xy[0][0]) 322 | Y.append(site_info['geometry'].xy[1][0]) 323 | 324 | df = pd.DataFrame({'site_no': fm_site_no, 325 | 'station_nm': station_nm, 326 | 'datetime': measurement_dt, 327 | 'Qm': Qm, 328 | 'quality': measured_rating_diff, 329 | 'drn_area': drainage_area, 330 | 'idx_station': index_station, 331 | 'indexQr': indexQr, 332 | 'indexQ90': indexQ90, 333 | 'X': X, 334 | 'Y': Y}) 335 | df['est_error'] = [nwis_obj.est_error.get(q.lower(), nwis_obj.default_error) for q in df.quality] 336 | df = df[['site_no', 'datetime', 'Qm', 'quality', 'est_error', 337 | 'idx_station', 'indexQr', 'indexQ90', 'drn_area', 'station_nm', 'X', 'Y']] 338 | return df -------------------------------------------------------------------------------- /pydrograph/nwis.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | from pathlib import Path 3 | import time 4 | from urllib.request import urlopen 5 | import numpy as np 6 | import pandas as pd 7 | from shapely.geometry import Point, Polygon, shape, box 8 | import gisutils 9 | from .attributes import streamflow_attributes, gw_attributes, iv_attributes 10 | 11 | 12 | coord_datums_epsg = {'NAD83': 4269, 13 | 'NAD27': 4267} 14 | 15 | 16 | def WI_statewide_eqn(Qm, A, Qr, Q90): 17 | Bf = (Qm / A) * (Q90 / Qr) 18 | Qb = 0.907 * A**1.02 * Bf**0.52 19 | return Qb.copy(), Bf.copy() 20 | 21 | 22 | class Nwis: 23 | """ 24 | NWIS error codes: 25 | E Excellent The data is within 2% (percent) of the actual flow 26 | G Good The data is within 5% (percent) of the actual flow 27 | F Fair The data is within 8% (percent) of the actual flow 28 | P Poor The data are >8% (percent) of the actual flow 29 | """ 30 | 31 | est_error = {'excellent': 0.02, 32 | 'good': 0.05, 33 | 'fair': 0.08 34 | } 35 | default_error = 0.50 36 | 37 | urlbase = 'http://nwis.waterdata.usgs.gov/usa/nwis/' 38 | urlbase_iv = 'http://waterservices.usgs.gov/nwis/site/?format=rdb&bBox=' 39 | dtypes_dict = {'dv': 'dv?referred_module=sw&site_tp_cd=ST&', 40 | 'daily_values': 'dv?referred_module=sw&site_tp_cd=ST&', 41 | 'field_measurements': 'measurements?', 42 | 'gwlevels': 'gwlevels?', 43 | 'gwdv': 'dv?referred_module=gw&site_tp_cd=GW&', 44 | 'gw_daily_values': 'dv?referred_module=gw&site_tp_cd=GW&', 45 | 'inventory': 'inventory?'} 46 | 47 | parameter_codes = {'discharge': '00060', 48 | 'gwlevels': '72019'} 49 | 50 | coordinate_format = 'decimal_degrees' #coordinate_format=decimal_degrees& 51 | group_key = 'NONE' #group_key=NONE& 52 | output_format = 'sitefile_output' #format=sitefile_output& 53 | sitefile_output_format = 'rdb' #sitefile_output_format=rdb& 54 | 55 | now = dt.datetime.now() 56 | range_selection = 'days' #range_selection=days 57 | period = 365 #period=365 58 | begin_date = '1880-01-01' #begin_date=2014-04-14 59 | end_date = '{:02d}-{:02d}-{:02d}'.format(now.year, now.month, now.day-1) #end_date=2015-04-13 60 | 61 | date_cols = ['measurement_dt', 'lev_dt'] 62 | 63 | logscale = 1 #'set_logscale_y=1' 64 | channel_html_info = 0 #'channel_html_info=0' 65 | date_format = 'YYYY-MM-DD' #'date_format=YYYY-MM-DD' 66 | channel_rdb_info = 0 #'channel_rdb_info=0' 67 | rdb_compression = 'file' #'rdb_compression=file' 68 | list_of_search_criteria = 'lat_long_bounding_box' #'list_of_search_criteria=lat_long_bounding_box' 69 | 70 | log_cols = ['site_no', 'url', 'retrieved', 'data_found'] 71 | 72 | def __init__(self, bounds_latlon=None, extent=None, datum='NAD83', 73 | log=False): 74 | """Class for retrieving data from NWIS. 75 | 76 | Parameters 77 | ---------- 78 | bounds_latlon: sequence of floats 79 | Sequence containing bounding latitudes and longitudes of query area 80 | in decimal degrees, in the following order: 81 | [northwest longitude, northwest latitude, southeast longitude, southeast latitude] 82 | extent: filepath (str) or shapely polygon 83 | Polygon of area to query. A polygon from a shapefile 84 | will be automatically reprojected to lat/lon (epsg:4269); 85 | shapely polygons are assumed to be in geographic coordinates. 86 | 87 | see the code for a list of default parameters 88 | """ 89 | 90 | self.bounds_latlon = bounds_latlon 91 | self.datum = datum 92 | self.crs = gisutils.get_authority_crs(coord_datums_epsg[self.datum]) 93 | self.log = pd.DataFrame(columns=self.log_cols) 94 | self._extent = None 95 | self.extent = extent 96 | self._bounds_latlon = None 97 | self.bounds_latlon = bounds_latlon 98 | self.write_log_file = log 99 | 100 | @property 101 | def extent(self): 102 | """Polygon of query area in lat/lon (epsg:4269)""" 103 | return self._extent 104 | 105 | @extent.setter 106 | def extent(self, extent=None): 107 | if extent is not None: 108 | if isinstance(extent, str) or isinstance(extent, Path): 109 | # _read_extent_shapefile should automatically reproject to 4269 110 | self._extent = self._read_extent_shapefile(extent) 111 | elif isinstance(extent, Polygon): 112 | self._extent = extent 113 | else: 114 | try: 115 | self._extent = box(*extent) 116 | except: 117 | print('Warning: extent argument of unknown datatype!') 118 | self._extent = None 119 | else: 120 | self._extent = None 121 | 122 | @property 123 | def bounds_latlon(self): 124 | """Bounding box of query area in lat/lon (epsg:4269)""" 125 | if self._bounds_latlon is None and self.extent is not None: 126 | self._bounds_latlon = self.extent.bounds 127 | return self._bounds_latlon 128 | 129 | @bounds_latlon.setter 130 | def bounds_latlon(self, bounds_latlon=None): 131 | self._bounds_latlon = bounds_latlon 132 | 133 | def _compute_geometries(self, df): 134 | 135 | datum = np.array([int(coord_datums_epsg[d]) for d in df.dec_coord_datum_cd]) 136 | datums = np.unique(datum) 137 | x1, y1 = df.dec_long_va.values, df.dec_lat_va.values 138 | x2 = np.ones(len(df), dtype=float) * np.nan 139 | y2 = np.ones(len(df), dtype=float) * np.nan 140 | for dtm in datums: 141 | pr1 = gisutils.get_authority_crs(int(dtm)) 142 | loc = datum == dtm 143 | x2[loc], y2[loc] = gisutils.project((x1[loc], y1[loc]), pr1, self.crs) 144 | geoms = [Point(x, y) for x, y in zip(x2, y2)] 145 | return geoms 146 | 147 | def _cull_to_extent(self, df): 148 | 149 | if not 'geometry' in df.columns: 150 | df['geometry'] = self._compute_geometries(df) 151 | 152 | within = np.array([g.within(self.extent) for g in df.geometry]) 153 | return df[within].copy() 154 | 155 | def _read_extent_shapefile(self, shpfile, buffer=0): 156 | 157 | import fiona 158 | from fiona.crs import to_string, from_epsg 159 | 160 | print('reading extent from {}...'.format(shpfile)) 161 | with fiona.open(shpfile) as src: 162 | g = shape(next(iter(src))['geometry']) 163 | shpfile_crs = gisutils.get_shapefile_crs(shpfile) 164 | if self.crs != shpfile_crs: 165 | print(f'reprojecting extent from\n{shpfile_crs}\nto\n{self.crs}') 166 | return gisutils.project(g, shpfile_crs, self.crs) 167 | else: 168 | return g 169 | 170 | def make_site_url(self, data_type='inventory', attributes=None): 171 | """ 172 | Parameters 173 | ---------- 174 | data_type: str 175 | 'dv' for Daily Values 176 | 'field_measurements' for Field Measurements 177 | 'inventory' for all measurements 178 | 179 | Returns 180 | ------- 181 | url string 182 | """ 183 | self.bbox_url = 'nw_longitude_va={:.3f}&'.format(self.bounds_latlon[0]) +\ 184 | 'nw_latitude_va={:.3f}&'.format(self.bounds_latlon[3]) +\ 185 | 'se_longitude_va={:.3f}&'.format(self.bounds_latlon[2]) +\ 186 | 'se_latitude_va={:.3f}&'.format(self.bounds_latlon[1]) 187 | 188 | self.stuff_at_beginning = 'coordinate_format={}&'.format(self.coordinate_format) +\ 189 | 'group_key={}&'.format(self.group_key) +\ 190 | 'format={}&'.format(self.output_format) +\ 191 | 'sitefile_output_format={}&'.format(self.sitefile_output_format) 192 | 193 | self.dv_info = 'range_selection={}&'.format(self.range_selection) +\ 194 | 'period={}&'.format(self.period) +\ 195 | 'begin_date={}&'.format(self.begin_date) +\ 196 | 'end_date={}&'.format(self.end_date) 197 | 198 | self.stuff_at_end = 'date_format={}&'.format(self.date_format) +\ 199 | 'rdb_compression={}&'.format(self.rdb_compression) +\ 200 | 'list_of_search_criteria={}'.format(self.list_of_search_criteria) 201 | 202 | url = self.urlbase + self.dtypes_dict.get(data_type, data_type+'?') 203 | url += self.bbox_url 204 | url += self.stuff_at_beginning 205 | if attributes is not None: 206 | for a in attributes: 207 | url += 'column_name=' + a + '&' 208 | 209 | if data_type in {'dv', 'daily_values'}: 210 | url += self.dv_info 211 | 212 | url += self.stuff_at_end 213 | #print '{}'.format(url) 214 | return url 215 | 216 | def make_iv_site_url(self): 217 | """ This function makes a url to pull instantaneous values from the nwis webservice. lat/lon box must be defined earlier using a llbox initialization of the nwis class. 218 | 219 | Parameters 220 | ---------- 221 | data_type: str 222 | 223 | Returns 224 | ------- 225 | url string 226 | """ 227 | 228 | self.bbox_url = '{:.3f},'.format(self.bounds_latlon[0]) +\ 229 | '{:.3f},'.format(self.bounds_latlon[1]) +\ 230 | '{:.3f},'.format(self.bounds_latlon[2]) +\ 231 | '{:.3f}'.format(self.bounds_latlon[3]) 232 | 233 | self.stuff_at_end = '&outputDataTypeCd=iv,id&siteStatus=all&hasDataTypeCd=iv' 234 | 235 | url = self.urlbase_iv 236 | url += self.bbox_url 237 | 238 | url += self.stuff_at_end 239 | #print '{}'.format(url) 240 | return url 241 | 242 | def make_dv_url(self, station_IDs, parameter_code='00060', start_date='1880-01-01', end_date=None): 243 | """Creates url to retrieve daily values for a site 244 | 245 | 246 | Parameters 247 | ---------- 248 | stationIDs: int, str or list of ints or strings 249 | USGS station IDs 250 | 251 | parameter_code: (string) 252 | e.g. 00060 for discharge. 253 | See http://help.waterdata.usgs.gov/codes-and-parameters/parameters. 254 | 255 | start_date: (string) 'YYYY-MM-DD' 256 | To obtain the entire period-of-record use a start date of 1880-01-01 (default)... 257 | 258 | Notes 259 | ----- 260 | A leading zero is added to the site number if the first digit is greater than 1 261 | (this can happend for basins 01 - 09 if the site number gets converted to an int). 262 | Note that this may cause site numbers for basin 01 (North Atlantic slope) to get confused with 263 | basins 10-16 (west coast and hawaii). 264 | See 265 | 266 | """ 267 | 268 | if not isinstance(station_IDs, list): 269 | station_IDs = [str(station_IDs)] 270 | 271 | def add_leading_zero(station_ID): 272 | if 1 < int(str(station_ID)[0]) < 10: 273 | station_ID = '0{}'.format(station_IDs) 274 | return station_ID 275 | 276 | #station_IDs = ','.join(['0{}'.format(int(str(s))) for s in station_IDs]) 277 | station_IDs = ','.join([Nwis.correct_stationID(s) for s in station_IDs]) 278 | 279 | url = 'http://waterservices.usgs.gov/nwis/dv/?format=rdb' 280 | 281 | url += '&sites={}'.format(station_IDs) 282 | url += '&startDT={}'.format(start_date) 283 | if end_date is not None: 284 | url += '&endDT={}'.format(end_date) 285 | url += '¶meterCd={}'.format(parameter_code) 286 | print('{}'.format(url)) 287 | return url 288 | 289 | def make_iv_url(self, station_IDs, parameter_code='00060', start_date='2000-01-01', end_date= '2000-12-31'): 290 | """Creates url to retrieve instantaneous values for a site given a list of station IDs. 291 | 292 | 293 | Parameters 294 | ---------- 295 | stationIDs: int, str or list of ints or strings 296 | USGS station IDs 297 | 298 | parameter_code: (string) 299 | e.g. 00060 for discharge. 300 | See http://help.waterdata.usgs.gov/codes-and-parameters/parameters. 301 | 302 | start_date: (string) 'YYYY-MM-DD' 303 | default start and end dates are written to pull the year 2000, not the entire history of data, 304 | as these are instantaneous values, so there is a datapoint every ~15 minutes. pull smaller chunks of data 305 | to ensure that the url and code will run. 306 | 307 | Notes 308 | ----- 309 | A leading zero is added to the site number if the first digit is greater than 1 310 | (this can happend for basins 01 - 09 if the site number gets converted to an int). 311 | Note that this may cause site numbers for basin 01 (North Atlantic slope) to get confused with 312 | basins 10-16 (west coast and hawaii). 313 | See 314 | 315 | """ 316 | #need to code in a more systematic way of choosing dates, but for now, pulling a whole year is more than enough 317 | if not isinstance(station_IDs, list): 318 | station_IDs = [str(station_IDs)] 319 | 320 | def add_leading_zero(station_ID): 321 | if 1 < int(str(station_ID)[0]) < 10: 322 | station_ID = '0{}'.format(station_IDs) 323 | return station_ID 324 | 325 | #station_IDs = ','.join(['0{}'.format(int(str(s))) for s in station_IDs]) 326 | station_IDs = ','.join([Nwis.correct_stationID(s) for s in station_IDs]) 327 | 328 | url = 'http://waterservices.usgs.gov/nwis/iv/?format=rdb' 329 | 330 | url += '&sites={}'.format(station_IDs) 331 | url += '&startDT={}'.format(start_date) 332 | if end_date is not None: 333 | url += '&endDT={}'.format(end_date) 334 | url += '¶meterCd={}'.format(parameter_code) 335 | print('{}'.format(url)) 336 | return url 337 | 338 | def make_measurements_url(self, site_number, txt='measurements', data_format='rdb'): 339 | """Creates url to retrieve daily values for a site 340 | 341 | Parameters 342 | ---------- 343 | site_number : str 344 | USGS site number 345 | txt : str 346 | String in url specifying type of measurement 347 | measurements : field measurements 348 | dv : daily values 349 | gwlevels : groundwater levels 350 | qwdata : water quality data 351 | data_format : str, {'rdb', 'rdb_expanded'} 352 | NWIS format for returned data. 353 | 'rdb': Tab-separated data without channel data 354 | 'rdb_expanded': Tab-separated data with channel data 355 | Default is 'rdb'. 356 | 357 | """ 358 | site_number = Nwis.correct_stationID(site_number) 359 | 360 | url = (f'http://nwis.waterdata.usgs.gov/nwis/{txt}?' 361 | f'site_no={site_number}&' 362 | f'agency_cd=USGS&format={data_format}') 363 | print('{}'.format(url)) 364 | return url 365 | 366 | def get_header_length(self, sitefile_text, col0): 367 | knt = 0 368 | for line in sitefile_text: 369 | if '#' not in str(line) and col0 in str(line): 370 | knt += 2 371 | break 372 | else: 373 | knt += 1 374 | return knt 375 | 376 | def get_datetime_retrieved(self, sitefile_text): 377 | for line in sitefile_text: 378 | if 'retrieved' in str(line): 379 | return str(line).strip().split('retrieved:')[-1][:30].strip() 380 | elif '#' not in str(line): 381 | return None 382 | 383 | def get_siteinfo(self, data_type, attributes=None): 384 | """Retrieves site information for the bounding box supplied to the NWIS class instance 385 | 386 | Parameters 387 | ---------- 388 | data_type: str 389 | 'daily_values' for Daily Values 390 | 'field_measurements' for Field Measurements 391 | 'gwlevels' for groundwater field measurements 392 | 'gwdv' for groundwater daily values 393 | 394 | attributes: list of strings 395 | List of NWIS attributes to include (e.g. 'site_no', 'station_nm', etc.) 396 | Default sets of attributes for streamflow and groundwater levels data can 397 | be imported from the attributes.py file (work in progress) 398 | 399 | Returns 400 | ------- 401 | the contents of an NWIS site information file in a dataframe format 402 | """ 403 | print('getting site inventory for {}...'.format(data_type)) 404 | t0 = time.time() 405 | if attributes is None: 406 | if data_type in {'dv', 'daily_values', 'field_measurements'}: 407 | attributes = streamflow_attributes 408 | elif data_type in {'gwdv', 'gw_daily_values', 'gwlevels'}: 409 | attributes = gw_attributes 410 | url = self.make_site_url(data_type, attributes) 411 | print('url: {}'.format(url)) 412 | sitefile_text = urlopen(url).readlines() 413 | if 'DOCTYPE html' in sitefile_text[0].decode(): 414 | print(f"No sites found for {self.bounds_latlon}!") 415 | return None 416 | 417 | skiprows = self.get_header_length(sitefile_text, attributes[0]) 418 | 419 | print('reading data with pandas...') 420 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=attributes, 421 | dtype={'site_no': object}) 422 | print("finished in {:.2f}s\n".format(time.time() - t0)) 423 | df['geometry'] = self._compute_geometries(df) 424 | df.index = df.site_no 425 | n_sites = len(df) 426 | if self.extent is not None: 427 | print('culling {} sites to those within extent...'.format(n_sites)) 428 | within = np.array([g.within(self.extent) for g in df.geometry]) 429 | df = df[within].copy() 430 | print("finished inventory in {:.2f}s\n".format(time.time() - t0)) 431 | return df 432 | 433 | @property 434 | def _get_dv_sites(self): 435 | print('Fetching info for sites with daily values...') 436 | self.dv_sites = self.get_siteinfo('dv', streamflow_attributes) 437 | 438 | def _get_date_col(self, df): 439 | return [d for d in self.date_cols if d in df.columns][0] 440 | 441 | def get_dvs(self, station_ID, parameter_code='00060', start_date='1880-01-01', end_date=None): 442 | """Retrieves daily values for a site. 443 | 444 | Parameters 445 | ---------- 446 | stationID: (string) 447 | USGS station ID 448 | 449 | parameter_code: string, default is 00060 for discharge. 450 | See http://help.waterdata.usgs.gov/codes-and-parameters/parameters. 451 | 452 | start_date: (string) 'YYYY-MM-DD' 453 | To obtain the entire period-of-record use a start date of 1880-01-01 (default)... 454 | 455 | Returns 456 | ------- 457 | dv: a datetime-index dataframe of daily discharge, with datagaps filled with NaNs 458 | """ 459 | if parameter_code in list(self.parameter_codes.keys()): 460 | parameter_code = self.parameter_codes[parameter_code] 461 | 462 | url = self.make_dv_url(station_ID, parameter_code=parameter_code, 463 | start_date=start_date, end_date=end_date) 464 | sitefile_text = urlopen(url).readlines() 465 | skiprows = self.get_header_length(sitefile_text, 'agency_cd') 466 | cols = sitefile_text[skiprows - 2].decode('utf-8').strip().split('\t') 467 | loginfo = [str(station_ID), url, self.get_datetime_retrieved(sitefile_text)] 468 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=cols, 469 | dtype={'site_no': object}) 470 | if len(df) > 0: 471 | df.index = pd.to_datetime(df.datetime) 472 | loginfo.append(True) 473 | else: 474 | loginfo.append(False) 475 | self.log = pd.concat([self.log, 476 | pd.DataFrame([loginfo], columns=self.log_cols)]) 477 | return df 478 | 479 | def get_iv_siteinfo(self, attributes = 'iv_attributes'): 480 | """Retrieves site information for the bounding box supplied to the NWIS class instance. 481 | 482 | Parameters 483 | ---------- 484 | 485 | attributes: preset, 'iv_attributes' 486 | 487 | Returns 488 | ------- 489 | the contents of an NWIS site information file in a dataframe format 490 | """ 491 | #print('getting site inventory for {}...'.format(data_type)) 492 | t0 = time.time() 493 | attributes = iv_attributes 494 | url = self.make_iv_site_url() 495 | print('url: {}'.format(url)) 496 | sitefile_text = urlopen(url).readlines() 497 | skiprows = self.get_header_length(sitefile_text, attributes[0]) 498 | 499 | print('reading data with pandas...') 500 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=attributes, dtype={'site_no': object}) 501 | print("finished in {:.2f}s\n".format(time.time() - t0)) 502 | df['geometry'] = self._compute_geometries(df) 503 | df.index = df.site_no 504 | n_sites = len(df) 505 | if self.extent is not None: 506 | print('culling {} sites to those within extent...'.format(n_sites)) 507 | within = np.array([g.within(self.extent) for g in df.geometry]) 508 | df = df[within].copy() 509 | print("finished inventory in {:.2f}s\n".format(time.time() - t0)) 510 | return df 511 | 512 | def get_ivs(self, station_ID, parameter_code='00060', start_date='1900-01-01', end_date=None, 513 | sample_period='D', agg_method='mean'): 514 | """Retrieves daily values for a site. 515 | 516 | Parameters 517 | ---------- 518 | stationID: (string) 519 | USGS station ID 520 | 521 | parameter_code: string, default is 00060 for discharge. 522 | See http://help.waterdata.usgs.gov/codes-and-parameters/parameters. 523 | 524 | start_date: (string) 'YYYY-MM-DD' 525 | To obtain the entire period-of-record use a start date of 2000-01-01 (default)... 526 | end_date: (string) 'YYYY-MM-DD' 527 | preset to take the year 2000, don't set the range too long (longer than a year or so) or the code will be very slow and may not finish running 528 | sample_period: (string), default 'D' for daily 529 | change this string to how you would like the instantaneous values aggregated. this can be daily, weekly, monthly, etc. 530 | None will give just the raw data which is generally in 15 minute increments 531 | agg_method: (string), default 'mean' 532 | change this to chose how the aggregated instantaneous values are calculated. options include 'mean', 'median', 'max', etc. 533 | 534 | Returns 535 | ------- 536 | df: a datetime-index dataframe of daily discharge, with datagaps filled with NaNs, aggregated daily or however specified with 'sample_period' 537 | """ 538 | if parameter_code in list(self.parameter_codes.keys()): 539 | parameter_code = self.parameter_codes[parameter_code] 540 | 541 | url = self.make_iv_url(station_ID, parameter_code=parameter_code, 542 | start_date=start_date, end_date=end_date) 543 | sitefile_text = urlopen(url).readlines() 544 | skiprows = self.get_header_length(sitefile_text, 'agency_cd') 545 | cols = sitefile_text[skiprows - 2].decode('utf-8').strip().split('\t') 546 | loginfo = [str(station_ID), url, self.get_datetime_retrieved(sitefile_text)] 547 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=cols, dtype={'site_no': object}) 548 | 549 | if len(df) > 2: 550 | 551 | if len(df) > 0: 552 | df.index = pd.to_datetime(df.datetime) 553 | loginfo.append(True) 554 | else: 555 | loginfo.append(False) 556 | self.log = pd.concat([self.log, 557 | pd.DataFrame([loginfo], columns=self.log_cols)]) 558 | 559 | if sample_period is not None: 560 | df = df.resample(sample_period).agg(agg_method) 561 | df = df.rename(columns = {df.columns[0]: f'{parameter_code}_{sample_period}_{agg_method}'}) 562 | #else: 563 | #df = df.rename(columns = {df.columns[4]: f'{parameter_code}', 564 | # df.columns[5]: 'code'}) 565 | 566 | else: 567 | print('No data at this site during this timeframe.') 568 | 569 | if len(df) > 2: 570 | return df 571 | else: 572 | return None 573 | 574 | def get_measurements(self, site_number, txt='measurement', data_format='rdb'): 575 | """Retrieves field measurements for a site. 576 | 577 | Parameters 578 | ---------- 579 | site_number : str 580 | USGS site number 581 | txt : str 582 | String in url specifying type of measurement 583 | measurements : field measurements 584 | dv : daily values 585 | gwlevels : groundwater levels 586 | qwdata : water quality data 587 | data_format : str, {'rdb', 'rdb_expanded'} 588 | NWIS format for returned data. 589 | 'rdb': Tab-separated data without channel data 590 | 'rdb_expanded': Tab-separated data with channel data 591 | Default is 'rdb'. 592 | 593 | Returns 594 | ------- 595 | dv: a datetime-index dataframe of the measurements 596 | """ 597 | 598 | url = self.make_measurements_url(site_number, txt, data_format=data_format) 599 | sitefile_text = urlopen(url).readlines() 600 | skiprows = self.get_header_length(sitefile_text, 'agency_cd') 601 | cols = sitefile_text[skiprows - 2].decode('utf-8').strip().split('\t') 602 | loginfo = [str(site_number), url, self.get_datetime_retrieved(sitefile_text)] 603 | try: 604 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=cols, 605 | dtype={'site_no': object}) 606 | except: 607 | # try fixing any bad line erros 608 | def line_fixer(x): 609 | expected_length = len(cols) 610 | # truncate long lines 611 | if len(x) > expected_length: 612 | return x[:expected_length] 613 | # pad short lines with None 614 | elif len(x) < expected_length: 615 | return x + [None] * (expected_length - len(x)) 616 | return None 617 | df = pd.read_csv(url, sep='\t', skiprows=skiprows, header=None, names=cols, 618 | dtype={'site_no': object}, 619 | engine='python', on_bad_lines=line_fixer) 620 | if len(df) > 0: 621 | df.index = pd.to_datetime(df[self._get_date_col(df)]) 622 | loginfo.append(True) 623 | else: 624 | loginfo.append(False) 625 | self.log = pd.concat([self.log, 626 | pd.DataFrame([loginfo], columns=self.log_cols)]) 627 | return df 628 | 629 | def get_all_measurements(self, site_numbers, txt='measurements', data_format='rdb'): 630 | """Get measurements for a list of site numbers. 631 | 632 | Parameters 633 | ---------- 634 | site_numbers : list or 1D array 635 | USGS site numbers 636 | txt : str 637 | String in url specifying type of measurement 638 | measurements : field measurements 639 | dv : daily values 640 | gwlevels : groundwater levels 641 | qwdata : water quality data 642 | data_format : str, {'rdb', 'rdb_expanded'} 643 | NWIS format for returned data. 644 | 'rdb': Tab-separated data without channel data 645 | 'rdb_expanded': Tab-separated data with channel data 646 | Default is 'rdb'. 647 | 648 | """ 649 | all_measurements = pd.DataFrame() 650 | for s in site_numbers: 651 | print(s) 652 | df = self.get_measurements(s, txt=txt, data_format=data_format) 653 | if len(df) == 0: 654 | print('no data returned.') 655 | continue 656 | df.index = pd.MultiIndex.from_product([[df.site_no.values[0]], df.index.values], 657 | names=['site_no', 'datetime']) 658 | df['measurement_dt'] = pd.to_datetime(df[self._get_date_col(df)]) 659 | all_measurements = pd.concat([all_measurements, df]) 660 | if self.write_log_file: 661 | out_logfile = 'retrieved_{}_log_{}.csv'.format(txt, time.strftime('%Y%m%d%H%M%S')) 662 | self.log.to_csv(out_logfile, index=False) 663 | print('Log of query saved to {}'.format(out_logfile)) 664 | self.log = pd.DataFrame(columns=self.log_cols) # reset the log 665 | return all_measurements 666 | 667 | def get_all_dvs(self, stations, parameter_code='00060', start_date='1880-01-01', end_date=None): 668 | all_dvs = {} 669 | for station in stations: 670 | try: 671 | df = self.get_dvs(station, parameter_code=parameter_code, start_date=start_date, end_date=end_date) 672 | except Exception as e: 673 | print(e) 674 | continue 675 | all_dvs[station] = df 676 | if self.write_log_file: 677 | out_logfile = 'retrieved_{}_dvs_log_{}.csv'.format(parameter_code, 678 | time.strftime('%Y%m%d%H%M%S')) 679 | self.log.to_csv(out_logfile, index=False) 680 | print('Log of query saved to {}'.format(out_logfile)) 681 | self.log = pd.DataFrame(columns=self.log_cols) # reset the log 682 | return all_dvs 683 | 684 | def get_all_ivs(self, stations, parameter_code='00060', start_date='2000-01-01', end_date='2000-12-31'): 685 | ''' This function gets all instantaneous values for a list of station IDs, and places them in a dictionary of dataframes 686 | ''' 687 | 688 | all_ivs = {} 689 | for station in stations: 690 | try: 691 | df = self.get_ivs(station, parameter_code=parameter_code, start_date=start_date, end_date=end_date) 692 | except Exception as e: 693 | print(e) 694 | continue 695 | all_ivs[station] = df 696 | if self.write_log_file: 697 | out_logfile = 'retrieved_{}_ivs_log_{}.csv'.format(parameter_code, 698 | time.strftime('%Y%m%d%H%M%S')) 699 | self.log.to_csv(out_logfile, index=False) 700 | print('Log of query saved to {}'.format(out_logfile)) 701 | self.log = pd.DataFrame(columns=self.log_cols) # reset the log 702 | return all_ivs 703 | 704 | def number_of_sites_measured_by_year(self, df): 705 | """Computes the number of sites measured in each year. The dataframe is grouped by year, 706 | then by site, and the number of sites for each year is summed. 707 | 708 | Parameters 709 | ---------- 710 | df: 711 | Dataframe of NWIS field measurement values indexed by datetime 712 | 713 | returns: nm 714 | Series of number of measurements, indexed by year 715 | """ 716 | grouped = df.groupby(df.measurement_dt.dt.year) 717 | grouped = [(y, g.groupby('site_no').agg('mean')) for y, g in grouped] 718 | nmeasurements = [(y, len(g)) for y, g in grouped] 719 | nm = pd.DataFrame(nmeasurements, columns=['year', 'n']) 720 | nm.index = nm.year 721 | return nm['n'] 722 | 723 | def write_shp(self, df, shpname='NWIS_export.shp', **kwargs): 724 | """Write a shapefile of points from NWIS site file 725 | 726 | Parameters 727 | ---------- 728 | df: dataframe 729 | dataframe of site info, must have dec_long_va and dec_lat_va columns with lon/lat in DD 730 | 731 | shpname: string 732 | Name for output shapefile 733 | 734 | Notes 735 | ----- 736 | NAD83 is assumed for dec_long_va and dec_lat_va. 737 | If some entries are in NAD27, a difference of ~5 to >15m will result for WI 738 | (see http://en.wikipedia.org/wiki/North_American_Datum#/media/File:Datum_Shift_Between_NAD27_and_NAD83.png) 739 | """ 740 | shpdf = df.copy() 741 | shpdf['geometry'] = [Point(r.dec_long_va, r.dec_lat_va) for i, r in shpdf.iterrows()] 742 | gisutils.df2shp(shpdf, shpname, epsg=4269) 743 | 744 | @staticmethod 745 | def correct_stationID(stationID): 746 | try: 747 | if 1 < int(str(stationID)[0]) < 10 and len(str(stationID)) < 15: 748 | return '0{}'.format(stationID) 749 | except: 750 | j=2 751 | return str(stationID) 752 | 753 | ''' 754 | field measurements url: 755 | 756 | url = "http://waterdata.usgs.gov/nwis/measurements? \ 757 | nw_longitude_va=-91.497& \ 758 | nw_latitude_va=46.748& 759 | se_longitude_va=-90.228& 760 | se_latitude_va=46.156& 761 | coordinate_format=decimal_degrees& 762 | group_key=NONE& 763 | format=sitefile_output& 764 | sitefile_output_format=rdb& 765 | column_name=agency_cd& 766 | column_name=site_no& 767 | column_name=station_nm& 768 | column_name=site_tp_cd& 769 | column_name=lat_va& 770 | column_name=long_va& 771 | column_name=dec_lat_va& 772 | column_name=dec_long_va& 773 | column_name=coord_meth_cd& 774 | column_name=coord_acy_cd& 775 | column_name=coord_datum_cd& 776 | column_name=dec_coord_datum_cd& 777 | column_name=district_cd& 778 | column_name=state_cd& 779 | column_name=county_cd& 780 | column_name=country_cd& 781 | column_name=land_net_ds& 782 | column_name=map_nm& 783 | column_name=map_scale_fc& 784 | column_name=alt_va& 785 | column_name=alt_meth_cd& 786 | column_name=alt_acy_va& 787 | column_name=alt_datum_cd& 788 | column_name=huc_cd& 789 | column_name=basin_cd& 790 | column_name=topo_cd& 791 | column_name=data_types_cd& 792 | column_name=instruments_cd& 793 | column_name=construction_dt& 794 | column_name=inventory_dt& 795 | column_name=drain_area_va& 796 | column_name=contrib_drain_area_va& 797 | column_name=tz_cd& 798 | column_name=local_time_fg& 799 | column_name=reliability_cd& 800 | column_name=gw_file_cd& 801 | column_name=nat_aqfr_cd& 802 | column_name=aqfr_cd& 803 | column_name=aqfr_type_cd& 804 | column_name=well_depth_va& 805 | column_name=hole_depth_va& 806 | column_name=depth_src_cd& 807 | column_name=project_no& 808 | column_name=rt_bol& 809 | column_name=peak_begin_date& 810 | column_name=peak_end_date& 811 | column_name=peak_count_nu& 812 | column_name=qw_begin_date&column_name=qw_end_date&column_name=qw_count_nu&column_name=gw_begin_date&column_name=gw_end_date&column_name=gw_count_nu&column_name=sv_begin_date&column_name=sv_end_date&column_name=sv_count_nu&set_logscale_y=1&channel_html_info=0&date_format=YYYY-MM-DD&channel_rdb_info=0&rdb_compression=file&list_of_search_criteria=lat_long_bounding_box" 813 | 814 | Daily values url 815 | http://waterdata.usgs.gov/nwis/dv?referred_module=sw&site_tp_cd=ST&nw_longitude_va=-91&nw_latitude_va=47&se_longitude_va=-90&se_latitude_va=46&coordinate_format=decimal_degrees&group_key=NONE&format=sitefile_output&sitefile_output_format=rdb&column_name=agency_cd&column_name=site_no&column_name=station_nm&range_selection=days&period=365&begin_date=2014-04-14&end_date=2015-04-13&date_format=YYYY-MM-DD&rdb_compression=file&list_of_search_criteria=lat_long_bounding_box 816 | ''' 817 | -------------------------------------------------------------------------------- /pydrograph/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/pydrograph/tests/__init__.py -------------------------------------------------------------------------------- /pydrograph/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | import shutil 4 | import pytest 5 | from .test_examples import nwis_instance, extent_poly, field_sites 6 | 7 | 8 | @pytest.fixture(scope="session") 9 | def project_root_path(): 10 | filepath = os.path.split(os.path.abspath(__file__))[0] 11 | return os.path.normpath(os.path.join(filepath, '../../')) 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def test_data_path(project_root_path): 16 | """Root folder for the project (with setup.py), 17 | two levels up from the location of this file. 18 | """ 19 | return Path(project_root_path, 'pydrograph/tests/data') 20 | 21 | 22 | @pytest.fixture(scope="session", autouse=True) 23 | def tmpdir(project_root_path): 24 | folder = project_root_path + '/pydrograph/tests/tmp' 25 | if os.path.isdir(folder): 26 | shutil.rmtree(folder) 27 | os.makedirs(folder) 28 | return folder -------------------------------------------------------------------------------- /pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.dbf -------------------------------------------------------------------------------- /pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.prj: -------------------------------------------------------------------------------- 1 | PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Albers"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-96.0],PARAMETER["Standard_Parallel_1",29.5],PARAMETER["Standard_Parallel_2",45.5],PARAMETER["Latitude_Of_Origin",23.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.shp -------------------------------------------------------------------------------- /pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/pydrograph/tests/data/cacheeastcr_chd_perimeter_bufferinside50m.shx -------------------------------------------------------------------------------- /pydrograph/tests/test.py: -------------------------------------------------------------------------------- 1 | import fiona 2 | from shapely.geometry import shape 3 | import pytest 4 | from gisutils import shp2df, project 5 | from pydrograph.baseflow import get_upstream_area 6 | 7 | 8 | @pytest.mark.skip(reason="need small test versions of input files") 9 | def test_get_upstream_area(): 10 | 11 | catchments = ['/Users/aleaf/Documents/NHDPlus/NHDPlusGL/NHDPlus04/NHDPlusCatchment/Catchment.shp', 12 | '/Users/aleaf/Documents/NHDPlus/NHDPlusMS/NHDPlus07/NHDPlusCatchment/Catchment.shp'] 13 | plusflow = ['/Users/aleaf/Documents/NHDPlus/NHDPlusGL/NHDPlus04/NHDPlusAttributes/PlusFlow.dbf', 14 | '/Users/aleaf/Documents/NHDPlus/NHDPlusMS/NHDPlus07/NHDPlusAttributes/PlusFlow.dbf'] 15 | nodasites = '/Users/aleaf/Documents/USFS/Nicolet/targets/north/flux_field_no_da.shp' 16 | flowlines = ['/Users/aleaf/Documents/NHDPlus/NHDPlusGL/NHDPlus04/NHDSnapshot/Hydrography/NHDFlowline.shp', 17 | '/Users/aleaf/Documents/NHDPlus/NHDPlusMS/NHDPlus07/NHDSnapshot/Hydrography/NHDFlowline.shp'] 18 | nearfield = '/Users/aleaf/Documents/USFS/Nicolet/shps/Nicolet_north_NF.shp' 19 | 20 | nf = shape(fiona.open(nearfield).next()['geometry']) 21 | nf = project(nf, '+init=epsg:26716', '+init=epsg:4269') 22 | bbox = nf.bounds 23 | 24 | noda = shp2df(nodasites) 25 | 26 | get_upstream_area(noda.geometry.tolist(), plusflow, flowlines, catchments, nf) 27 | 28 | 29 | if __name__ == '__main__': 30 | test_get_upstream_area() 31 | -------------------------------------------------------------------------------- /pydrograph/tests/test_baseflow.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import pytest 4 | from pydrograph.baseflow import IHmethod 5 | 6 | 7 | @pytest.fixture(scope='function') 8 | def test_data(test_data_path): 9 | data = pd.read_csv(test_data_path / 'UV_04087088_Discharge_20071001_ab.csv') 10 | data.index = pd.to_datetime(data.date) 11 | return data 12 | 13 | 14 | @pytest.mark.parametrize('freq', ('D', '6H')) 15 | @pytest.mark.parametrize('interp_semilog', (False, True)) 16 | @pytest.mark.parametrize('block_length', [1, 2, 3]) 17 | def test_IHmethod(test_data, block_length, interp_semilog, freq): 18 | results = IHmethod(test_data.Q, block_length=block_length, tp=0.9, 19 | freq=freq, 20 | interp_semilog=interp_semilog) 21 | minimum_points = ~results.block_Qmin.isna() 22 | assert np.all(results.loc[minimum_points, 'block_Qmin'] <= results.loc[minimum_points, 'Q']) 23 | 24 | 25 | @pytest.mark.parametrize('data', (pytest.param(pd.Series(), 26 | marks=pytest.mark.xfail(reason="index isn't datetime")), 27 | pytest.param(pd.Series(index=pd.to_datetime([])), 28 | marks=pytest.mark.xfail(reason="index isn't datetime")), 29 | pytest.param(pd.Series([15.2, 14.8, 14.5, 14.2], 30 | index=pd.date_range('2020-09-30', '2020-10-03')), 31 | marks=pytest.mark.xfail(reason="index isn't datetime")), 32 | pd.Series([15.2, 14.8, 14.5, 14.2, 13.9, 13.8, 13.8, 13.8, 13.8, 13.8], 33 | index=pd.date_range('2020-01-01', '2020-01-10')) 34 | ) 35 | ) 36 | def test_IHmethod_with_not_enough_data(data): 37 | results = IHmethod(data) 38 | j=2 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /pydrograph/tests/test_examples.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from shapely.geometry import box 4 | import pytest 5 | from gisutils import project, df2shp 6 | from pydrograph import Nwis 7 | 8 | 9 | @pytest.fixture(scope='session') 10 | def extent_poly(): 11 | extent_poly_ll = box(-92.7, 46.7, -92.6, 46.8) 12 | 13 | extent_poly = project(extent_poly_ll, 4269, "+init=epsg:26915") 14 | df = pd.DataFrame({'geometry': [extent_poly], 15 | 'id': [0]}) 16 | df2shp(df, 'examples/data/bbox.shp', epsg=26915) 17 | return extent_poly_ll 18 | 19 | 20 | @pytest.fixture(scope='session') 21 | def nwis_instance(extent_poly): 22 | nwis = Nwis(extent=extent_poly) 23 | 24 | assert nwis.extent.bounds == (-92.7, 46.7, -92.6, 46.8) 25 | return nwis 26 | 27 | 28 | def test_nwis_from_shapefile(nwis_instance, extent_poly): 29 | """Check that supplying a shapefile directly in a different CRS 30 | results in the same extent as supplying a shapely polygon 31 | in the same CRS.""" 32 | nwis = Nwis(extent='examples/data/bbox.shp') 33 | area_of_overlap = nwis.extent.intersection(nwis_instance.extent).area 34 | assert np.allclose(area_of_overlap, nwis_instance.extent.area) 35 | assert np.allclose(area_of_overlap, nwis.extent.area) 36 | assert np.allclose(area_of_overlap, extent_poly.area) 37 | return nwis 38 | 39 | 40 | @pytest.fixture(scope='session') 41 | def field_sites(nwis_instance): 42 | field_sites = nwis_instance.get_siteinfo('field_measurements') 43 | return field_sites 44 | 45 | 46 | @pytest.fixture(scope='session') 47 | def gw_field_sites(nwis_instance): 48 | field_sites = nwis_instance.get_siteinfo('gwlevels') 49 | return field_sites 50 | 51 | 52 | @pytest.fixture(scope='session') 53 | def dv_sites(nwis_instance): 54 | dv_sites = nwis_instance.get_siteinfo('daily_values') 55 | return dv_sites 56 | 57 | 58 | @pytest.fixture(scope='session') 59 | def gw_dv_sites(nwis_instance): 60 | dv_sites = nwis_instance.get_siteinfo('gw_daily_values') 61 | return dv_sites 62 | 63 | 64 | def test_get_sw_sites(nwis_instance): 65 | nwis = nwis_instance 66 | field_sites = nwis.get_siteinfo('field_measurements') 67 | dv_sites = nwis.get_siteinfo('daily_values') 68 | assert isinstance(field_sites, pd.DataFrame) 69 | assert len(field_sites) > 0 70 | assert field_sites.site_no.dtype == object 71 | assert isinstance(dv_sites, pd.DataFrame) 72 | assert len(dv_sites) > 0 73 | assert dv_sites.site_no.dtype == object 74 | 75 | 76 | def test_get_gw_sites(nwis_instance): 77 | nwis = nwis_instance 78 | gwfield_sites = nwis.get_siteinfo('gwlevels') 79 | gwdv_sites = nwis.get_siteinfo('gw_daily_values') 80 | assert isinstance(gwfield_sites, pd.DataFrame) 81 | assert len(gwfield_sites) > 0 82 | assert gwfield_sites.site_no.dtype == object 83 | assert isinstance(gwdv_sites, pd.DataFrame) 84 | assert len(gwdv_sites) > 0 85 | assert gwdv_sites.site_no.dtype == object 86 | 87 | 88 | def test_get_daily_values_sw(nwis_instance, dv_sites): 89 | nwis = nwis_instance 90 | sites = dv_sites.site_no.tolist()[0:2] 91 | dvs = nwis.get_all_dvs(sites, start_date='1990-01-01') 92 | assert isinstance(dvs, dict) 93 | assert isinstance(dvs['04021520'], pd.DataFrame) 94 | 95 | 96 | def test_get_daily_values_gw(nwis_instance, gw_dv_sites): 97 | nwis = nwis_instance 98 | sites = gw_dv_sites.site_no.tolist()[0:2] 99 | dvs = nwis.get_all_dvs(sites, 'gwlevels', start_date='1990-01-01') 100 | assert isinstance(dvs, dict) 101 | assert isinstance(dvs['464222092403801'], pd.DataFrame) 102 | 103 | 104 | def test_get_single_site_sw(nwis_instance): 105 | nwis = nwis_instance 106 | df = nwis.get_dvs(4021520) 107 | assert isinstance(df, pd.DataFrame) 108 | 109 | 110 | def test_get_single_site_gw(nwis_instance): 111 | nwis = nwis_instance 112 | df = nwis.get_dvs(464322092401401, 'gwlevels') 113 | assert isinstance(df, pd.DataFrame) 114 | 115 | 116 | def test_make_url(nwis_instance): 117 | nwis = nwis_instance 118 | url = nwis.make_dv_url(4015475) 119 | assert isinstance(url, str) 120 | 121 | 122 | def test_make_url_gw(nwis_instance): 123 | nwis = nwis_instance 124 | url = nwis.make_dv_url(464322092401401, parameter_code=72019) 125 | assert isinstance(url, str) 126 | 127 | 128 | def test_get_field_measurements(nwis_instance, field_sites): 129 | nwis = nwis_instance 130 | sites = field_sites.site_no.tolist()[:5] 131 | fm = nwis.get_all_measurements(sites) 132 | assert isinstance(fm, pd.DataFrame) 133 | assert fm.site_no.dtype == object 134 | 135 | 136 | def test_get_expanded_field_measurements(nwis_instance, field_sites): 137 | nwis = nwis_instance 138 | sites = field_sites.site_no.tolist()[:5] 139 | fm = nwis.get_all_measurements(sites, data_format='rdb_expanded') 140 | assert isinstance(fm, pd.DataFrame) 141 | assert fm.site_no.dtype == object 142 | assert 'chan_width' in fm.columns 143 | 144 | 145 | def test_get_gw_field_measurements(nwis_instance, gw_field_sites): 146 | nwis = nwis_instance 147 | sites = gw_field_sites.site_no.tolist()[:5] 148 | fm = nwis.get_all_measurements(sites, txt='gwlevels') 149 | assert isinstance(fm, pd.DataFrame) 150 | assert fm.site_no.dtype == object 151 | 152 | 153 | 154 | 155 | 156 | -------------------------------------------------------------------------------- /pydrograph/tests/test_notebooks.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | 4 | import pytest 5 | 6 | 7 | def included_notebooks(): 8 | include = ['examples/Notebooks'] 9 | files = [] 10 | for folder in include: 11 | files += glob.glob(os.path.join(folder, '*.ipynb')) 12 | return sorted(files) 13 | 14 | 15 | @pytest.fixture(params=included_notebooks(), scope='module') 16 | def notebook(request): 17 | return request.param 18 | 19 | 20 | @pytest.fixture(scope='session') 21 | def kernel_name(): 22 | """Pick a Jupyter Notebook kernel from the ones available. 23 | """ 24 | import jupyter_client 25 | M = jupyter_client.kernelspec.KernelSpecManager() 26 | specs = M.find_kernel_specs() 27 | 28 | # try using the first one of these kernels that is found 29 | try_kernel_names = ['test', 'pydrograph', 'gis'] 30 | for name in try_kernel_names: 31 | if name in specs: 32 | return name 33 | # otherwise use the first kernel listed in specs 34 | return list(specs.keys())[0] 35 | 36 | 37 | # even though test runs locally on Windows 10, and on Travis 38 | @pytest.mark.xfail(os.environ.get('APPVEYOR') == 'True', 39 | reason="jupyter kernel has timeout issue on appveyor for some reason") 40 | def test_notebook(notebook, kernel_name, tmpdir, project_root_path): 41 | # run autotest on each notebook 42 | notebook = os.path.join(project_root_path, notebook) 43 | path, fname = os.path.split(notebook) 44 | 45 | # save the rendered notebook to the documentation folder 46 | # so that nbsphinx can render it in the docs 47 | # the docs get built when the tests are run on travis 48 | # so successful execution of this tests will build the notebooks for the docs 49 | output_folder = os.path.join(project_root_path, 'docs/source/notebooks') 50 | 51 | cmd = ('jupyter ' + 'nbconvert ' 52 | '--ExecutePreprocessor.timeout=600 ' 53 | '--ExecutePreprocessor.kernel_name={} '.format(kernel_name) + 54 | '--to ' + 'notebook ' 55 | '--execute ' + '{} '.format(notebook) + 56 | '--output-dir ' + '{} '.format(output_folder) + 57 | '--output ' + '{}'.format(fname)) 58 | ival = os.system(cmd) 59 | assert ival == 0, 'could not run {}'.format(os.path.abspath(notebook)) 60 | -------------------------------------------------------------------------------- /pydrograph/tests/test_nwis.py: -------------------------------------------------------------------------------- 1 | from shapely.geometry import box, Polygon 2 | import pytest 3 | from gisutils import project 4 | from pydrograph import Nwis 5 | import numpy as np 6 | 7 | @pytest.fixture(scope='session') 8 | def extent_poly(): 9 | extent_poly = box(390000, 1330000, 500000, 1455000) 10 | extent_poly_ll = project(extent_poly, "+init=epsg:{}".format(5070), "+init=epsg:4269") 11 | return extent_poly_ll 12 | 13 | @pytest.fixture(scope='session') 14 | def nwis_instance(extent_poly): 15 | nwis_instance = Nwis(extent=extent_poly) 16 | return nwis_instance 17 | 18 | @pytest.fixture(scope='session') 19 | def field_sites(nwis_instance): 20 | field_sites = nwis_instance.get_siteinfo('field_measurements') 21 | return field_sites 22 | 23 | @pytest.fixture(scope='session') 24 | def stations(nwis_instance): 25 | stations = nwis_instance.get_iv_siteinfo(attributes='iv_attributes') 26 | stations = stations.site_no.unique() 27 | #stations = stations.to_list() 28 | return stations 29 | 30 | @pytest.mark.parametrize('shapefile', ( 31 | # esri? shapefile that failed with old-style fiona CRS handling 32 | ('cacheeastcr_chd_perimeter_bufferinside50m.shp'), 33 | )) 34 | def test_extent_shapefile(test_data_path, shapefile): 35 | shapefile = test_data_path / shapefile 36 | nwis_instance = Nwis(extent=shapefile) 37 | assert isinstance(nwis_instance.extent, Polygon) 38 | 39 | def test_compute_geometries(extent_poly, nwis_instance, field_sites): 40 | geoms = nwis_instance._compute_geometries(field_sites) 41 | assert all([g.within(extent_poly) for g in geoms]) 42 | 43 | def test_instantaneous_value(nwis_instance): 44 | #make sure all inputs are available for this function 45 | #check that it creates a dataframe (use assert and exists) 46 | df = nwis_instance.get_iv_siteinfo(attributes = 'iv_attributes') 47 | assert len(df) > 0 48 | assert 'site_no' in df.columns 49 | assert df.site_no.dtype == object 50 | 51 | def test_tuple_extent_no_data(): 52 | 53 | bbox = (-91.45793026894977, 47.2, 54 | -90.20509548401013, 47.3) 55 | nwis = Nwis(extent=bbox) 56 | assert np.allclose(bbox, nwis.extent.bounds) 57 | gwdv_sites = nwis.get_siteinfo('gwdv') 58 | assert gwdv_sites is None 59 | 60 | def test_get_all_ivs(nwis_instance, stations): 61 | all_sites = nwis_instance.get_all_ivs(stations) 62 | site_one = list(all_sites.values())[0] 63 | assert len(all_sites) > 0 64 | assert len(site_one) > 2 65 | 66 | def test_get_measurements(nwis_instance): 67 | 68 | # as of 4/6/2023, 69 | # the rdb data for this side had an extra \t on one line 70 | nwis_instance.get_measurements( 71 | '07029500', data_format='rdb_expanded') -------------------------------------------------------------------------------- /pydrograph/tests/test_readme.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pydrograph 3 | from ..attributes import streamflow_attributes 4 | 5 | 6 | def test_get_info_from_bounding_box(tmpdir): 7 | ll_bbox = [-91.497, 46.748, -90.228, 46.156] # nw lon, nw lat, se lon, se lat 8 | 9 | nwis = pydrograph.Nwis(ll_bbox) 10 | 11 | # Generate a url to get field measurements for the bounding box 12 | url = nwis.make_site_url('field_measurements', streamflow_attributes) 13 | 14 | # Get a dataframe of site information for the bounding box (url is generated internally) 15 | fm_siteinfo = nwis.get_siteinfo('field_measurements', streamflow_attributes) 16 | 17 | # Write the site information out to a shapefile 18 | nwis.write_shp(fm_siteinfo, '{}/NWIS_field_measurements.shp'.format(tmpdir)) 19 | 20 | # Get site information for daily values 21 | dv_siteinfo = nwis.get_siteinfo('dv', streamflow_attributes) 22 | 23 | 24 | def test_readme(tmpdir): 25 | 26 | # copy the python snippet in readme to a .py file in tests output folder 27 | dest_py_file = os.path.join(tmpdir, 'test_readme.py') 28 | with open('README.md') as src: 29 | with open(dest_py_file, 'w') as dest: 30 | for line in src: 31 | if "```python" in line: 32 | for line in src: 33 | if "```" in line: 34 | break 35 | dest.write(line) 36 | 37 | # execute the .py file 38 | wd = os.getcwd() 39 | os.chdir(tmpdir) 40 | ival = os.system('python test_readme.py') 41 | assert ival == 0, 'could not run python code in README.md' 42 | os.chdir(wd) -------------------------------------------------------------------------------- /pydrograph/tests/test_rivergages.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aleaf/pydrograph/615ef85a5308350b6c0a8e7c4009653fc521777c/pydrograph/tests/test_rivergages.py -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # These are required for developing the package (running the tests, building 2 | # the documentation) but not necessarily required for _using_ it. 3 | ipython 4 | jupyter 5 | notebook 6 | ipykernel 7 | numpy 8 | pandas 9 | fiona 10 | shapely 11 | pyproj>=2.0 12 | codecov 13 | coverage 14 | flake8 15 | pytest 16 | sphinx 17 | pip 18 | gis-utils 19 | # These are dependencies of various sphinx extensions for documentation. 20 | matplotlib 21 | numpydoc 22 | sphinx-copybutton 23 | sphinx_rtd_theme 24 | -------------------------------------------------------------------------------- /requirements-dev.yml: -------------------------------------------------------------------------------- 1 | name: pydrograph_dev 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python 7 | - ipython 8 | - jupyter 9 | - notebook 10 | - ipykernel 11 | - numpy 12 | - scipy 13 | - pandas 14 | - xmltodict 15 | - fiona 16 | - shapely 17 | - pyproj>=2.0 18 | - pytest 19 | - codecov 20 | - coverage 21 | - flake8 22 | - sphinx 23 | - numpydoc 24 | - nbsphinx # for rendering notebooks in sphinx-generated docs 25 | - sphinx-copybutton 26 | - sphinx_rtd_theme 27 | - twine # for uploading releases to PyPI 28 | - pip 29 | - pip: 30 | - gis-utils 31 | - doctr # for publishing docs via Travis 32 | - versioneer # semantic versioning 33 | 34 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # List required packages in this file, one per line. 2 | ipython 3 | jupyter 4 | notebook 5 | ipykernel 6 | matplotlib 7 | numpy 8 | pandas 9 | fiona 10 | shapely 11 | pyproj>=2.0 12 | pip 13 | gis-utils 14 | -------------------------------------------------------------------------------- /requirements.yml: -------------------------------------------------------------------------------- 1 | name: pydrograph 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python 7 | - ipython 8 | - jupyter 9 | - notebook 10 | - ipykernel 11 | - matplotlib 12 | - numpy 13 | - scipy 14 | - pandas 15 | - xmltodict 16 | - fiona 17 | - shapely 18 | - pyproj>=2.0 19 | - pip 20 | - pip: 21 | - gis-utils 22 | 23 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440-post 4 | versionfile_source = pydrograph/_version.py 5 | versionfile_build = pydrograph/_version.py 6 | tag_prefix = v 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | from setuptools import setup, find_packages 3 | import sys 4 | import versioneer 5 | 6 | 7 | # NOTE: This file must remain Python 2 compatible for the foreseeable future, 8 | # to ensure that we error out properly for people with outdated setuptools 9 | # and/or pip. 10 | min_version = (3, 7) 11 | if sys.version_info < min_version: 12 | error = """ 13 | pydrograph does not support Python {0}.{1}. 14 | Python {2}.{3} and above is required. Check your Python version like so: 15 | 16 | python3 --version 17 | 18 | This may be due to an out-of-date pip. Make sure you have pip >= 9.0.1. 19 | Upgrade pip like so: 20 | 21 | pip install --upgrade pip 22 | """.format(*(sys.version_info[:2] + min_version)) 23 | sys.exit(error) 24 | 25 | here = path.abspath(path.dirname(__file__)) 26 | 27 | with open(path.join(here, 'long_description.rst'), encoding='utf-8') as readme_file: 28 | readme = readme_file.read() 29 | 30 | with open(path.join(here, 'requirements.txt')) as requirements_file: 31 | # Parse requirements.txt, ignoring any commented-out lines. 32 | requirements = [line for line in requirements_file.read().splitlines() 33 | if not line.startswith('#')] 34 | 35 | 36 | setup( 37 | name='pydrograph', 38 | version=versioneer.get_version(), 39 | cmdclass=versioneer.get_cmdclass(), 40 | description="Package for getting and processing stream flow and groundwater level measurements from the USGS National Water Information System (NWIS). ", 41 | long_description=readme, 42 | author="Andrew Leaf", 43 | author_email='aleaf@usgs.gov', 44 | url='https://github.com/aleaf/pydrograph', 45 | python_requires='>={}'.format('.'.join(str(n) for n in min_version)), 46 | packages=find_packages(exclude=['docs', 'tests']), 47 | entry_points={ 48 | 'console_scripts': [ 49 | # 'command = some.module:some_function', 50 | ], 51 | }, 52 | include_package_data=True, 53 | package_data={ 54 | 'pydrograph': [ 55 | # When adding files here, remember to update MANIFEST.in as well, 56 | # or else they will not be included in the distribution on PyPI! 57 | # 'path/to/data_file', 58 | ] 59 | }, 60 | install_requires=requirements, 61 | license="BSD (3-clause)", 62 | classifiers=[ 63 | 'Development Status :: 2 - Pre-Alpha', 64 | 'Natural Language :: English', 65 | 'Programming Language :: Python :: 3', 66 | ], 67 | ) 68 | --------------------------------------------------------------------------------