├── .bumpversion.cfg ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml ├── release.yml └── workflows │ ├── cd.yml │ ├── ci.yaml │ └── main.yml ├── .gitignore ├── .readthedocs.yaml ├── CHANGELOG.rst ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── docker-compose.yml ├── docs ├── Makefile ├── _config.yml ├── _templates │ └── layout.html ├── _toc.yml ├── api.rst ├── cheat_sheet.rst ├── community │ ├── authors.rst │ ├── contributing.rst │ ├── how_to_contribute.rst │ └── index.rst ├── conf.py ├── data_notes.rst ├── database_structure.rst ├── gallery │ ├── api_intro_example.ipynb │ ├── api_plot_pit_density_example.ipynb │ ├── camera_derived_snow_depths_example.ipynb │ ├── card-images │ │ ├── camera-snow-depths.png │ │ ├── compare-SSA-instruments.png │ │ ├── compare-UAVSAR-depths.png │ │ ├── graupel-pits.png │ │ ├── graupel-smp.png │ │ ├── plot-raster.png │ │ ├── plot-swe.png │ │ ├── raster-union-and-more.png │ │ └── spiral.png │ ├── compare_SSA_instruments_example.ipynb │ ├── compare_UAVSAR_to_depths_example.ipynb │ ├── get_spiral_example.ipynb │ ├── getting_started_example.ipynb │ ├── graupel_pits_example.ipynb │ ├── graupel_smp_example.ipynb │ ├── index.md │ ├── overview_example.ipynb │ ├── plot_pit_swe_example.ipynb │ ├── plot_raster_example.ipynb │ ├── raster_union_and_more_example.ipynb │ └── what_is_in_the_db_example.ipynb ├── history.rst ├── images │ ├── aso_depths_with_sites_example.png │ ├── connect_db_qgis.png │ ├── gpr_example.png │ ├── pit_spiral.png │ └── qgis_db_setup.png ├── logo.png ├── make.bat ├── qgis.rst ├── readme.rst ├── requirements.txt └── usage.rst ├── pyproject.toml ├── snowexsql ├── __init__.py ├── api.py ├── conversions.py ├── db.py ├── functions.py ├── tables │ ├── __init__.py │ ├── base.py │ ├── image_data.py │ ├── layer_data.py │ ├── point_data.py │ └── site_data.py └── utilities.py └── tests ├── __init__.py ├── credentials.json ├── data ├── LWC.csv ├── LWC2.csv ├── README.txt ├── S06M0874_2N12_20200131.CSV ├── S19M1013_5S21_20200201.CSV ├── SSA.csv ├── be_gm1_0287 │ ├── ..aux.xml │ ├── dblbnd.adf │ ├── hdr.adf │ ├── metadata.xml │ ├── prj.adf │ ├── sta.adf │ ├── w001001.adf │ └── w001001x.adf ├── be_gm1_0328 │ ├── dblbnd.adf │ ├── hdr.adf │ ├── metadata.xml │ ├── prj.adf │ ├── sta.adf │ ├── w001001.adf │ └── w001001x.adf ├── density.csv ├── depths.csv ├── gpr.csv ├── pole_depths.csv ├── site_5S21.csv ├── site_details.csv ├── smp_log.csv ├── stratigraphy.csv ├── temperature.csv ├── uavsar.ann ├── uavsar │ ├── uavsar_utm.amp1.real.tif │ ├── uavsar_utm.amp2.real.tif │ ├── uavsar_utm.cor.real.tif │ ├── uavsar_utm.int.imaginary.tif │ └── uavsar_utm.int.real.tif ├── uavsar_latlon.amp1.grd ├── uavsar_latlon.amp1.real.tif ├── uavsar_latlon.amp2.grd ├── uavsar_latlon.ann ├── uavsar_latlon.cor.grd └── uavsar_latlon.int.grd ├── map.html ├── scratch.py ├── sql_test_base.py ├── test_api.py ├── test_conversions.py ├── test_db.py └── test_functions.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.5.0 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | 10 | [bumpversion:file:snowexsql/__init__.py] 11 | search = __version__ = '{current_version}' 12 | replace = __version__ = '{new_version}' 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | 20 | [aliases] 21 | test = pytest 22 | 23 | [tool:pytest] 24 | collect_ignore = ['setup.py'] 25 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @micah-prime @micahjohnson150 @jomey -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Sample code that produces the behavior 15 | ``` 16 | Your code here 17 | ``` 18 | 19 | **Error Message** 20 | Any error messages you get 21 | 22 | **Expected behavior** 23 | A clear and concise description of what you expected to happen. 24 | 25 | **Desktop (please complete the following information):** 26 | - OS: [e.g. Mac, Linux, Windows] 27 | - Package Version 28 | 29 | **Additional context** 30 | Add any other context about the problem here. 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the challenge is. 12 | Ex. It would makes things easier/better when [...] 13 | Or is this a data request? 14 | 15 | **Describe the solution you'd like** 16 | A clear and concise description of what you want to happen. 17 | Have some sample syntax? 18 | ``` 19 | Some code here? 20 | ``` 21 | 22 | **Additional context** 23 | Add any other context that helps understand the intention. 24 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "monthly" 8 | groups: 9 | actions: 10 | patterns: 11 | - "*" 12 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - dependabot 5 | - pre-commit-ci 6 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: CD 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - master 8 | - main 9 | release: 10 | types: 11 | - published 12 | 13 | concurrency: 14 | group: ${{ github.workflow }}-${{ github.ref }} 15 | cancel-in-progress: true 16 | 17 | env: 18 | FORCE_COLOR: 3 19 | 20 | jobs: 21 | dist: 22 | name: Distribution build 23 | runs-on: ubuntu-latest 24 | 25 | steps: 26 | - uses: actions/checkout@v4 27 | with: 28 | fetch-depth: 0 29 | 30 | - uses: hynek/build-and-inspect-python-package@v2 31 | 32 | test-built-dist: 33 | needs: [dist] 34 | name: Test built distribution 35 | runs-on: ubuntu-latest 36 | permissions: 37 | id-token: write 38 | steps: 39 | - uses: actions/setup-python@v5 40 | name: Install Python 41 | with: 42 | python-version: "3.10" 43 | - uses: actions/download-artifact@v4 44 | with: 45 | name: Packages 46 | path: dist 47 | - name: List contents of built dist 48 | run: | 49 | ls -ltrh 50 | ls -ltrh dist 51 | - name: Publish to Test PyPI 52 | uses: pypa/gh-action-pypi-publish@v1.12.4 53 | with: 54 | repository-url: https://test.pypi.org/legacy/ 55 | verbose: true 56 | skip-existing: true 57 | 58 | check-test-pypi: 59 | needs: [test-built-dist] 60 | name: Check Test Distribution Python ${{ matrix.python-version }} on ${{ matrix.runs-on }} 61 | runs-on: ${{ matrix.runs-on }} 62 | strategy: 63 | fail-fast: false 64 | matrix: 65 | python-version: ["3.8", "3.9", "3.10"] 66 | runs-on: [ubuntu-latest, macos-14] 67 | steps: 68 | - name: Check pypi packages 69 | run: | 70 | sleep 3 71 | python -m pip install --upgrade pip 72 | 73 | echo "=== Testing wheel file ===" 74 | # Install wheel to get dependencies and check import 75 | python -m pip install --extra-index-url https://test.pypi.org/simple --upgrade --pre snowexsql 76 | python -c "import snowexsql; print(snowexsql.__version__)" 77 | echo "=== Done testing wheel file ===" 78 | 79 | echo "=== Testing source tar file ===" 80 | # Install tar gz and check import 81 | python -m pip uninstall --yes snowexsql 82 | python -m pip install --extra-index-url https://test.pypi.org/simple --upgrade --pre --no-binary=snowexsql snowexsql 83 | python -c "import snowexsql; print(snowexsql.__version__)" 84 | echo "=== Done testing source tar file ===" 85 | 86 | publish: 87 | needs: [dist, test-built-dist] 88 | name: Publish to PyPI 89 | environment: pypi 90 | permissions: 91 | id-token: write 92 | runs-on: ubuntu-latest 93 | if: github.event_name == 'release' && github.event.action == 'published' 94 | 95 | steps: 96 | - uses: actions/download-artifact@v4 97 | with: 98 | name: artifact 99 | path: dist 100 | 101 | - uses: pypa/gh-action-pypi-publish@v1.12.4 102 | if: startsWith(github.ref, 'refs/tags') 103 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: Code Quality 2 | on: 3 | push: 4 | branches: [ master ] 5 | paths-ignore: 6 | - 'docs/**' 7 | pull_request: 8 | branches: [ master ] 9 | paths-ignore: 10 | - 'docs/**' 11 | jobs: 12 | run: 13 | runs-on: ubuntu-latest 14 | services: 15 | postgis: 16 | image: kartoza/postgis:14-3.2 17 | env: 18 | POSTGRES_PASSWORD: db_builder 19 | POSTGRES_USER: builder 20 | POSTGRES_DB: test 21 | TZ: 'UTC' 22 | PGTZ: 'UTC' 23 | POSTGIS_GDAL_ENABLED_DRIVERS: 'ENABLE_ALL' 24 | POSTGIS_ENABLE_OUTDB_RASTERS: 'True' 25 | ports: 26 | - 5432:5432 27 | volumes: 28 | - /home/runner/work/:/home/runner/work/ 29 | steps: 30 | - name: Checkout 31 | uses: actions/checkout@v4 32 | with: 33 | fetch-depth: 0 34 | - name: Set up Python 3.10 35 | uses: actions/setup-python@v5 36 | with: 37 | python-version: '3.10' 38 | - name: Install dependencies 39 | run: python3 -m pip install -e ".[dev]" 40 | - name: Run tests and collect coverage 41 | run: pytest --cov snowexsql --cov-report=xml 42 | - name: Upload coverage to Codecov 43 | uses: codecov/codecov-action@v5 44 | with: 45 | verbose: true 46 | env: 47 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 48 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Pytest 3 | 4 | # Controls when the action will run. 5 | on: 6 | # Triggers the workflow on push or pull request events but only for the master branch 7 | push: 8 | branches: [ master ] 9 | paths-ignore: 10 | - 'docs/**' 11 | pull_request: 12 | branches: [ master ] 13 | paths-ignore: 14 | - 'docs/**' 15 | 16 | jobs: 17 | build: 18 | runs-on: ubuntu-latest 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | python-version: [3.8, 3.9, '3.10'] 23 | 24 | services: 25 | 26 | postgis: 27 | image: kartoza/postgis:14-3.2 28 | env: 29 | POSTGRES_PASSWORD: db_builder 30 | POSTGRES_USER: builder 31 | POSTGRES_DB: test 32 | TZ: 'UTC' 33 | PGTZ: 'UTC' 34 | POSTGIS_GDAL_ENABLED_DRIVERS: 'ENABLE_ALL' 35 | POSTGIS_ENABLE_OUTDB_RASTERS: 'True' 36 | ports: 37 | - 5432:5432 38 | volumes: 39 | - /home/runner/work/:/home/runner/work/ 40 | 41 | steps: 42 | - uses: actions/checkout@v4 43 | - name: Set up Python ${{ matrix.python-version }} 44 | uses: actions/setup-python@v5 45 | with: 46 | python-version: ${{ matrix.python-version }} 47 | - name: Install dependencies 48 | run: | 49 | sudo apt-get update 50 | sudo apt-get install -y postgis gdal-bin 51 | python3 -m pip install --upgrade pip 52 | python3 -m pip install -e ".[dev]" 53 | - name: Test with pytest 54 | run: | 55 | pytest -s 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | .eggs/* 3 | snowexsql.egg-info/* 4 | dist/* 5 | *.db 6 | **/__pycache__/** 7 | *.cpg 8 | *.dbf 9 | *.prj 10 | *.shp 11 | *.shx 12 | 13 | docs/modules.rst 14 | docs/snowexsql.rst 15 | docs/_build/* 16 | 17 | **/.ipynb_checkpoints/* 18 | 19 | .coverage 20 | coverage.xml 21 | htmlcov/* 22 | scripts/upload/test*.txt 23 | .idea/ 24 | scripts/download/data/* 25 | *.egg-info 26 | 27 | # Version 28 | _version.py 29 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | jobs: 14 | post_checkout: 15 | # https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition 16 | - | 17 | if [ "$READTHEDOCS_VERSION_TYPE" = "external" ] && git diff --quiet origin/main -- docs/ .readthedocs.yaml; 18 | then 19 | exit 183; 20 | fi 21 | pre_build: 22 | # Generate the Sphinx configuration for this Jupyter Book so it builds. 23 | - "jupyter-book config sphinx docs" 24 | 25 | # Build documentation in the docs/ directory with Sphinx 26 | sphinx: 27 | configuration: docs/conf.py 28 | fail_on_warning: false 29 | 30 | # Optionally build your docs in additional formats such as PDF and ePub 31 | formats: [] 32 | 33 | # Optionally set the version of Python and requirements required to build your docs 34 | python: 35 | install: 36 | - method: pip 37 | path: . 38 | extra_requirements: 39 | - docs 40 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | ------------- 6 | Version 0.5.0 7 | ------------- 8 | * Brought in by PR [#98](https://github.com/SnowEx/snowexsql/pull/98) 9 | * Improvements made to retrieving single value queries. 10 | * Added in `from_unique_entries` to find unique options given a query 11 | * Added in more support around rasters. 12 | * Added in `RasterMeasurements.all_descriptions` to get unique descriptions 13 | * Added in checking for whether a raster query would generate data from more than one unique dataset 14 | * Added support for Geopandas > 1.0 15 | 16 | ------------- 17 | Version 0.4.0 18 | ------------- 19 | * PR [#82](https://github.com/SnowEx/snowexsql/pull/82) 20 | 21 | ----------------- 22 | 0.3.0 (2022-07-6) 23 | ----------------- 24 | * New columns were added to the LayerData table for flags 25 | * Converted surveyors to observers 26 | * Changed utm zone to be an integer 27 | 28 | ----------------------------- 29 | 0.2.0 Repo Split (2022-06-20) 30 | ----------------------------- 31 | * Repo was split into an access client and a db builder to reduce overhead 32 | * snowexsql is now an access client and python tools to make life easy 33 | * snowex_db_ is now a repo containing all necessary assets to build db. 34 | 35 | .. _snowex_db: https://github.com/SnowEx/snowex_db 36 | 37 | -------------------------- 38 | Hackweek 2021 (2021-07-15) 39 | -------------------------- 40 | * Fully deployed database with around 100 users 41 | * Uploaded with fully reproducible DB using SnowEx Data for Grand Mesa 2020 42 | * Timezones all in Mountain Standard 43 | 44 | ------------------ 45 | 0.1.0 (2020-06-12) 46 | ------------------ 47 | * Project Created. 48 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Covenant Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We as members, contributors, and leaders pledge to make participation in our 7 | community a harassment-free experience for everyone, regardless of age, body 8 | size, visible or invisible disability, ethnicity, sex characteristics, gender 9 | identity and expression, level of experience, education, socio-economic status, 10 | nationality, personal appearance, race, caste, color, religion, or sexual 11 | identity and orientation. 12 | 13 | We pledge to act and interact in ways that contribute to an open, welcoming, 14 | diverse, inclusive, and healthy community. 15 | 16 | ## Our Standards 17 | 18 | Examples of behavior that contributes to a positive environment for our 19 | community include: 20 | 21 | * Demonstrating empathy and kindness toward other people 22 | * Being respectful of differing opinions, viewpoints, and experiences 23 | * Giving and gracefully accepting constructive feedback 24 | * Accepting responsibility and apologizing to those affected by our mistakes, 25 | and learning from the experience 26 | * Focusing on what is best not just for us as individuals, but for the overall 27 | community 28 | 29 | Examples of unacceptable behavior include: 30 | 31 | * The use of sexualized language or imagery, and sexual attention or advances of 32 | any kind 33 | * Trolling, insulting or derogatory comments, and personal or political attacks 34 | * Public or private harassment 35 | * Publishing others' private information, such as a physical or email address, 36 | without their explicit permission 37 | * Other conduct which could reasonably be considered inappropriate in a 38 | professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our standards of 43 | acceptable behavior and will take appropriate and fair corrective action in 44 | response to any behavior that they deem inappropriate, threatening, offensive, 45 | or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or reject 48 | comments, commits, code, wiki edits, issues, and other contributions that are 49 | not aligned to this Code of Conduct, and will communicate reasons for moderation 50 | decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also applies when 55 | an individual is officially representing the community in public spaces. 56 | Examples of representing our community include using an official email address, 57 | posting via an official social media account, or acting as an appointed 58 | representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported to the [community leaders](https://github.com/orgs/SnowEx/teams/db-project-admins). 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series of 86 | actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or permanent 93 | ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within the 113 | community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.1, available at 119 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 120 | 121 | Community Impact Guidelines were inspired by 122 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 123 | 124 | For answers to common questions about this code of conduct, see the FAQ at 125 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at 126 | [https://www.contributor-covenant.org/translations][translations]. 127 | 128 | [homepage]: https://www.contributor-covenant.org 129 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 130 | [Mozilla CoC]: https://github.com/mozilla/diversity 131 | [FAQ]: https://www.contributor-covenant.org/faq 132 | [translations]: https://www.contributor-covenant.org/translations 133 | 134 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | *********************** 2 | Contributing Guidelines 3 | *********************** 4 | 5 | .. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg 6 | :target: https://github.com/SnowEx/snowexsql/blob/master/CODE_OF_CONDUCT.md 7 | 8 | Thank you for your interest in contributing to snowexsql! We welcome 9 | contributions of any size from anyone regardless of career stage and level of 10 | coding experience. 11 | 12 | Here we provide a set of guidelines and information for contributing to 13 | snowexsql. This project is released with a Contributor Code of Conduct. By 14 | participating in this project you agree to abide by its terms. 15 | 16 | |Contributor Covenant| 17 | 18 | Ways to Contribute 19 | ================== 20 | 21 | Report a Bug 22 | ------------ 23 | 24 | Find the *Issues* tab at the top of GitHub repository and click the *New Issue* button. Give your issue a clear title and describe the steps required to recreate it in as much detail as possible. If you can, include a small example that reproduces the error. More information and minimal examples will help us resolve issues faster. 25 | 26 | Asking a Question 27 | ----------------- 28 | 29 | Please do not create issues to ask for help. A faster way to reach the community is through our `snowex-sql Discussions page `_. We are excited to have you join an existing conversation or start a new post! 30 | 31 | Write Documentation 32 | ------------------- 33 | 34 | Found a typo in the documentation or have a suggestion to make it clearer? Consider letting us know by creating an issue or (better yet!) submitting a fix. This is a great, low stakes way to practice the pull request process! 35 | 36 | Discovered a currently untested case? Please share your test, either by creating an issue or submitting a pull request to add it to our suite of test cases. 37 | 38 | Add an Example 39 | ^^^^^^^^^^^^^^ 40 | 41 | Examples are a great way to show how to use snowexsql. If you have a use case that you think would be helpful to others, please consider adding it to the `examples directory `_. 42 | 43 | Request a Feature 44 | ----------------- 45 | 46 | The best way to request a feature is to file an issue at `https://github.com/SnowEx/snowexsql/issues `_. 47 | 48 | When requesting a feature, please: 49 | 50 | - Explain in detail how it would work. 51 | - Keep the scope as narrow as possible, to make it easier to implement. 52 | - Let us know if this is something you would like to work on! 53 | 54 | Contribute Code 55 | --------------- 56 | 57 | .. _how to contribute: https://snowexsql.readthedocs.io/en/latest/community/how_to_contribute.html 58 | 59 | Please see our `how to contribute`_ guide on getting started to add your code 60 | and ideas. 61 | 62 | Licensing 63 | ^^^^^^^^^ 64 | SnowExSQL is licensed under the `BSD-3 license `_. Contributed code will also be licensed under BSD-3. If you did not write the code yourself, it is your responsibility to ensure that the existing license is compatible and included in the contributed files or you have documented permission from the original author to relicense the code. 65 | 66 | Attribution 67 | =========== 68 | .. _list of current contributors: https://snowexsql.readthedocs.io/en/latest/community/authors.html 69 | 70 | See our `list of current contributors`_ that have helped shape this project. 71 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include docs/authors.rst 2 | include docs/contributing.rst 3 | include docs/history.rst 4 | include LICENSE 5 | include README.rst 6 | include requirements.txt 7 | 8 | recursive-exclude tests * 9 | recursive-exclude * __pycache__ 10 | recursive-exclude * *.py[co] 11 | 12 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 13 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -f {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr htmlcov/ 48 | rm -fr .pytest_cache 49 | 50 | lint: ## check style with flake8 51 | flake8 snowexsql tests 52 | 53 | test: ## run tests quickly with the default Python 54 | pytest 55 | 56 | test-all: ## run tests on every Python version with tox 57 | tox 58 | 59 | coverage: ## check code coverage quickly with the default Python 60 | coverage run --source snowexsql -m pytest 61 | coverage report -m 62 | coverage html 63 | $(BROWSER) htmlcov/index.html 64 | 65 | docs: ## generate Sphinx HTML documentation, including API docs 66 | rm -rf docs/api docs/_build 67 | sphinx-apidoc -o docs snowexsql 68 | rm -rf docs/modules.rst 69 | # for f in docs/api/*.rst; do\ 70 | # perl -pi -e 's/(module|package)$$// if $$. == 1' $$f ;\ 71 | # done 72 | $(MAKE) -C docs clean 73 | $(MAKE) -C docs html 74 | 75 | open-browser: 76 | $(BROWSER) docs/_build/html/index.html 77 | 78 | servedocs: docs ## compile the docs watching for changes 79 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 80 | 81 | release: dist ## package and upload a release 82 | twine upload dist/* 83 | 84 | dist: clean ## builds source and wheel package 85 | python setup.py sdist 86 | python setup.py bdist_wheel 87 | ls -l dist 88 | 89 | install: clean ## install the package to the active Python's site-packages 90 | python setup.py install 91 | 92 | line_count: 93 | cloc snowexsql tests scripts 94 | 95 | pep8: 96 | isort snowexsql/*.py tests/*.py scripts/*/*.py 97 | autopep8 --aggressive --in-place snowexsql/*.py snowexsql/*.py scripts/*/*.py 98 | 99 | 100 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | Welcome to snowexsql 3 | ==================== 4 | 5 | .. image:: https://readthedocs.org/projects/snowexsql/badge/?version=latest 6 | :target: https://snowexsql.readthedocs.io/en/latest/?badge=latest 7 | :alt: Documentation Status 8 | 9 | .. image:: https://img.shields.io/github/workflow/status/SnowEx/snowexsql/snowexsql 10 | :target: https://github.com/SnowEx/snowexsql/actions/workflows/main.yml 11 | :alt: Testing Status 12 | 13 | .. image:: https://img.shields.io/pypi/v/snowexsql.svg 14 | :target: https://pypi.org/project/snowexsql/ 15 | :alt: Code Coverage 16 | 17 | .. image:: https://codecov.io/gh/SnowEx/snowexsql/graph/badge.svg?token=B27OKGBOTR 18 | :target: https://codecov.io/gh/SnowEx/snowexsql 19 | 20 | About 21 | ----- 22 | Database access and tools for using the `SnowEx database`_. This tool is 23 | simply a client for accessing the database using python 24 | 25 | .. _SnowEx database: https://www.github.com/SnowEx/snowex_db 26 | 27 | WARNING - This is under active development in preparation for SnowEx Hackweek. Use at your own risk. Data will change as it is QA/QC'd and the end goal is for all data in this database to be pulled from NSIDC. The goal is for this to become a community database open to all. 28 | 29 | 30 | Features 31 | -------- 32 | 33 | * Database access for SnowEx Database 34 | * Analysis tools 35 | * Useful conversions to pandas and geopandas 36 | * Lots of examples_ 37 | 38 | .. _examples: https://snowexsql.readthedocs.io/en/latest/gallery/index.html 39 | 40 | 41 | Installing 42 | ---------- 43 | If you are just planning on using the database, then only install the 44 | python package instructions below. 45 | 46 | I just want to use it 47 | --------------------- 48 | Install using pip: 49 | 50 | .. code-block:: 51 | 52 | pip install snowexsql 53 | 54 | I want data fast 55 | ----------------- 56 | A programmatic API has been created for fast and standard 57 | access to Point and Layer data. There are two examples_ covering the 58 | features and usage of the api. See the specific api_ documentation for 59 | detailed description. 60 | 61 | .. _api: https://snowexsql.readthedocs.io/en/latest/api.html 62 | 63 | .. code-block:: python 64 | 65 | from snowexsql.api import PointMeasurements, LayerMeasurements 66 | # The main functions we will use are `from_area` and `from_filter` like this 67 | df = PointMeasurements.from_filter( 68 | date=date(2020, 5, 28), instrument='camera' 69 | ) 70 | print(df.head()) 71 | 72 | I need help 73 | ------------ 74 | Jump over to `our discussion forum `_ 75 | and get help from our community. 76 | 77 | 78 | I want to contribute 79 | --------------------- 80 | Thank you for the interest! 81 | 82 | Our community follows the |Contributor Covenant| 83 | 84 | .. |Contributor Covenant| image:: https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg 85 | :target: code_of_conduct.md 86 | .. _contribution guide: https://snowexsql.readthedocs.io/en/latest/community/contributing.html 87 | 88 | Have a look at our `contribution guide`_ and see the many ways to get involved! 89 | 90 | Documentation 91 | ------------- 92 | 93 | There is a whole host of resources for users in the documentation. It has been 94 | setup for you to preview in your browser. 95 | 96 | In there you will find: 97 | 98 | * Examples of database use 99 | * Database structure 100 | * API to the python package snowexsql 101 | * Links to other resources 102 | * Notes about the data uploaded 103 | * And more! 104 | 105 | To see the documentation in your browser: 106 | 107 | **Warning**: To see the examples/gallery, the snowex db needs to be up. Otherwise they will be left with the 108 | last image submitted to GitHub. 109 | 110 | .. code-block:: bash 111 | 112 | make docs 113 | 114 | DOI 115 | --- 116 | .. |HW22| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7618102.svg 117 | :target: https://doi.org/10.5281/zenodo.7618102 118 | .. |HW24| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.13312706.svg 119 | :target: https://doi.org/10.5281/zenodo.13312706 120 | 121 | * `SnowEx Hackweek 2022 `_ - |HW22| 122 | * `SnowEx Hackweek 2024 `_ - |HW24| 123 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | 3 | services: 4 | 5 | postgres: 6 | image: kartoza/postgis:14-3.2 7 | environment: 8 | POSTGRES_PASSWORD: 'db_builder' 9 | POSTGRES_USER: 'builder' 10 | POSTGRES_DB: 'test' 11 | POSTGIS_GDAL_ENABLED_DRIVERS: 'ENABLE_ALL' 12 | POSTGIS_ENABLE_OUTDB_RASTERS: 'True' 13 | TZ: 'UTC' 14 | PGTZ: 'UTC' 15 | ports: 16 | - 5432:5432 17 | healthcheck: 18 | test: "exit 0" 19 | volumes: 20 | - $PWD/:$PWD/ # amazing this works. Allows for outdb testing of local rasters 21 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = -b html -d _build/doctrees 6 | SPHINXBUILD = python -m sphinx 7 | SPHINXPROJ = snowexsql 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | jupyter-book config sphinx docs 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | # Book settings 2 | # Learn more at https://jupyterbook.org/customize/config.html 3 | 4 | title: Snowexsql Documentation 5 | author: SnowExSQL Developers 6 | logo: logo.png 7 | 8 | # Force re-execution of notebooks on each build. 9 | # See https://jupyterbook.org/content/execute.html 10 | execute: 11 | execute_notebooks: 'off' 12 | 13 | # Define the name of the latex output file for PDF builds 14 | latex: 15 | latex_documents: 16 | targetname: book.tex 17 | 18 | # Add a bibtex file so that we can create citations 19 | bibtex_bibfiles: 20 | - references.bib 21 | 22 | # Information about where the book exists on the web 23 | repository: 24 | url: https://github.com/SnowEx/snowexsql # Online location of your book 25 | path_to_book: docs # Optional path to your book, relative to the repository root 26 | branch: master # Which branch of the repository should be used when creating links (optional) 27 | 28 | # Add GitHub buttons to your book 29 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository 30 | html: 31 | use_issues_button: true 32 | use_repository_button: true 33 | 34 | exclude_patterns: ['_build', 'Thumbs.db', '.DS_Store'] 35 | 36 | sphinx: 37 | extra_extensions: 38 | - 'sphinx.ext.autodoc' 39 | - 'sphinxcontrib.apidoc' 40 | - 'sphinx.ext.viewcode' 41 | - 'sphinx_gallery.load_style' 42 | - 'sphinx.ext.autosectionlabel' 43 | config: 44 | html_js_files: 45 | - https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js 46 | -------------------------------------------------------------------------------- /docs/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | 3 | {% block footer %} 4 | 5 | {{ super() }} 6 | {% endblock %} 7 | -------------------------------------------------------------------------------- /docs/_toc.yml: -------------------------------------------------------------------------------- 1 | # Table of contents 2 | # Learn more at https://jupyterbook.org/customize/toc.html 3 | 4 | format: jb-book 5 | root: readme 6 | chapters: 7 | - file: usage 8 | - file: community/index 9 | - file: database_structure 10 | - file: data_notes 11 | - file: gallery/index 12 | sections: 13 | - file: gallery/what_is_in_the_db_example 14 | - file: gallery/raster_union_and_more_example 15 | - file: gallery/plot_raster_example 16 | - file: gallery/plot_pit_swe_example 17 | - file: gallery/overview_example 18 | - file: gallery/graupel_smp_example 19 | - file: gallery/graupel_pits_example 20 | - file: gallery/getting_started_example 21 | - file: gallery/get_spiral_example 22 | - file: gallery/compare_UAVSAR_to_depths_example 23 | - file: gallery/compare_SSA_instruments_example 24 | - file: gallery/camera_derive_snow_depths_example 25 | - file: gallery/api_intro_example 26 | - file: gallery/api_plot_pit_density_example 27 | - file: cheat_sheet 28 | - file: qgis 29 | - file: api 30 | - file: history -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Documentation 2 | ================= 3 | .. role:: python(code) 4 | :language: python 5 | 6 | Background 7 | ---------- 8 | The API (not a rest API, more of an SDK) is a set of python classes 9 | designed for easy and standardized access to the database data. 10 | 11 | The classes can both describe what data is available, and return 12 | data in a GeoPandas dataframe. 13 | 14 | Components 15 | ---------- 16 | There are two main API classes for data access. 17 | 18 | .. code-block:: python 19 | 20 | from snowexsql.api import PointMeasurements, LayerMeasurements 21 | 22 | :code:`PointMeasurements` gives access to the PointData (depths, GPR, etc), and 23 | :code:`LayerMeasurements` gives access to the LayerData (pits, etc). 24 | 25 | Both of the classes have the same methods, although they access different 26 | tables in the database. 27 | 28 | The primary methods for accessing data are :code:`.from_area` and 29 | :code:`.from_filter`. Both of these methods return a GeoPandas dataframe. 30 | 31 | .from_filter 32 | ------------ 33 | 34 | The :code:`.from_filter` is the simpler of the two search methods. It takes in 35 | a variety of key word args (kwargs) and returns a dataset that meets 36 | all of the criteria. 37 | 38 | .. code-block:: python 39 | 40 | df = LayerMeasurements.from_filter( 41 | type="density", 42 | site_name="Boise River Basin", 43 | limit=1000 44 | ) 45 | 46 | In this example, we filter to all the layer measurements of `density` 47 | that were taken in the `Boise River Basin`, and we `limit` to the top 48 | 1000 measurements. 49 | 50 | Each kwarg (except date) **can take in a list or a single value** so you could change 51 | this to :code:`site_name=["Boise River Basin", "Grand Mesa"]` 52 | 53 | To find what `kwargs` are allowed, we can check the class 54 | 55 | .. code-block:: python 56 | 57 | LayerMeasurements.ALLOWED_QRY_KWARGS 58 | 59 | For :code:`LayerMeasurements` this will return 60 | :code:`["site_name", "site_id", "date", "instrument", "observers", "type", "utm_zone", "pit_id", "date_greater_equal", "date_less_equal"]` 61 | 62 | so we can filter by any of these as inputs to the function. 63 | 64 | **Notice `limit` is not specified here**. Limit is in the :code:`SPECIAL_KWARGS` 65 | and gets handled at the end of the query. 66 | 67 | **Notice `date_greater_equal` and `date_less_equal`** for filtering the `date` 68 | parameter using `>=` and `<=` logic. 69 | 70 | To find what values are allowed for each, we can check the propeties of the 71 | class. Both :code:`LayerMeasurements` and :code:`PointMeasurements` have 72 | the following properties. 73 | 74 | * all_site_names 75 | * all_types 76 | * all_dates 77 | * all_observers 78 | * all_instruments 79 | 80 | So you can find all the instruments for filtering like :code:`LayerMeasurements().all_instruments`. 81 | **Note** - these must be called from an instantiated class like shown earlier 82 | in this line. 83 | 84 | .from_area 85 | ---------- 86 | 87 | The signature for :code:`.from_area` looks like this 88 | 89 | .. code-block:: python 90 | 91 | def from_area(cls, shp=None, pt=None, buffer=None, crs=26912, **kwargs): 92 | 93 | It is a class method, so it *does not need an instantiated class*. 94 | The :code:`**kwargs` argument takes the same inputs as the :code:`from_filter` 95 | function. 96 | 97 | The big difference is that from area will filter to results either within 98 | :code:`shp` (a `shapely` polygon) **or** within :code:`buffer` radius 99 | around :code:`pt` (a `shapely` point). 100 | 101 | 102 | Large Query Exception and Limit 103 | ------------------------------- 104 | 105 | By default, if more than 1000 records will be returned, and **no limit** 106 | is provided. The query will fail. This is intentional so that we are aware 107 | of large queries. If you understand your query will be large and need 108 | more than 1000 records returned, add a :code:`limit` kwarg to your query 109 | with a value greater than the number you need returned. 110 | **This will override the default behavior** and return as many records as 111 | you requested. 112 | -------------------------------------------------------------------------------- /docs/cheat_sheet.rst: -------------------------------------------------------------------------------- 1 | Cheat Sheets 2 | ============ 3 | 4 | .. role:: python(code) 5 | :language: python 6 | 7 | Below is a list of common things you will likely want to use for querying in the database 8 | 9 | Querying 10 | -------- 11 | The table below shows a handful of useful ways to query the database. 12 | 13 | All querys can be built and expanded on by: 14 | 15 | ``` 16 | qry = session.query() 17 | qry = qry.filter() 18 | # Continue on chaining filters 19 | ``` 20 | 21 | .. list-table:: Querying 22 | :widths: 10 100 1000 23 | :header-rows: 1 24 | 25 | * - Technique 26 | - Usage 27 | - Description 28 | 29 | * - :python:`==`, :python:`!=`, :python:`>=`, :python:`<=`, :python:`>`, :python:`<` 30 | - :python:`qry.filter(SiteData.site_id == '1S20')` 31 | - Filter by a conditional to a value 32 | 33 | * - :python:`.in_()` 34 | - :python:`qry.filter(PointData.date.in_([date1, date2]))` 35 | - Filter by matching a value in a list 36 | 37 | * - :python:`.is_()`, :python:`isnot()` 38 | - :python:`qry.filter(LayerData.instrument.isnot(None))` 39 | - Filter a column that are/are not Null 40 | 41 | * - :python:`.contains()` 42 | - :python:`qry.filter(LayerData.comments).contains('graupel'))` 43 | - Filter by finding a substring 44 | 45 | * - :python:`.distinct()` 46 | - :python:`session.query(RasterData.surveyors).distinct()` 47 | - Reduce result to unique values 48 | 49 | * - :python:`.limit()` 50 | - :python:`session.query(PointsData).limit(10)` 51 | - Limit the number of records returned, useful for testing 52 | 53 | * - :python:`.count()` 54 | - :python:`qry.filter(PointsData).count()` 55 | - Count the number of records matching query/filtering 56 | 57 | 58 | Database Tables 59 | --------------- 60 | The table below shows the SQL table names to Python Object Relational Mapping (ORM) classes with descriptions of data 61 | in them 62 | 63 | .. list-table:: **Database Tables** 64 | :widths: 10 20 180 65 | :header-rows: 1 66 | 67 | * - SQL Table 68 | - snowexsql Class 69 | - Description 70 | 71 | * - :python:`sites` 72 | - :py:class:`snowexsql.data.SiteData` 73 | - Details describing pit sites 74 | 75 | * - :python:`points` 76 | - :py:class:`snowexsql.data.PointData` 77 | - Data with a single value and single location 78 | 79 | * - :python:`layers` 80 | - :py:class:`snowexsql.data.LayerData` 81 | - Data with a single value at a single location with a depth component 82 | 83 | * - :python:`images` 84 | - :py:class:`snowexsql.data.ImageData` 85 | - Raster Data 86 | 87 | Useful `snowexsql` Functions 88 | ---------------------------- 89 | The table below shows useful tools built with this library 90 | 91 | .. list-table:: 92 | :widths: 10 20 180 93 | :header-rows: 1 94 | 95 | * - Function 96 | - Usage 97 | - Description 98 | 99 | * - :py:func:`snowexsql.db.get_db` 100 | - :python:`eng, sesh = get_db(':@/snowex')` 101 | - Get `engine `_ / `session `_ objects to query db 102 | 103 | * - :py:func:`snowexsql.db.get_table_attributes` 104 | - :python:`cols = get_table_attributes(PointData)` 105 | - Get table column names 106 | 107 | * - :py:func:`snowexsql.conversions.query_to_geopandas` 108 | - :python:`df = query_to_geopands(qry, engine)` 109 | - Convert a query to a geopandas dataframe 110 | 111 | * - :py:func:`snowexsql.conversions.raster_to_rasterio` 112 | - :python:`ds = rasters_to_rasterio(records)` 113 | - Convert db result to rasterio datasets 114 | 115 | Useful PostGIS Tools 116 | -------------------- 117 | The table below shows useful tools that can be used in python from postgis. These are accessed in two ways. 118 | 119 | 1. :python:`import sqlalchemy.sql.func as func` 120 | 2. :python:`import geoalchemy2.functions as gfunc` 121 | 122 | 123 | .. list-table:: 124 | :widths: 10 20 180 125 | :header-rows: 1 126 | 127 | * - Function 128 | - Usage 129 | - Description 130 | 131 | * - `ST_AsTiff `_ 132 | - :python:`session.query(func.ST_AsTiff(ImageData.raster))` 133 | - Convert binary to GeoTiff format 134 | 135 | * - `ST_Union `_ 136 | - :python:`session.query(func.Union(ImageData.raster, _type=Raster))` 137 | - Merge queried tiles 138 | 139 | * - `ST_Clip `_ 140 | - :python:`session.query(func.ST_Clip(ImageData.raster, shp))` 141 | - Clip raster by polygon 142 | 143 | * - `ST_Intersects `_ 144 | - :python:`session.query(func.ST_Intersects(ImageData.raster, shp))` 145 | - Get tiles that touch polygon 146 | 147 | * - `ST_Rescale `_ 148 | - :python:`session.query(func.ST_Rescale(ImageData.raster, res, res)` 149 | - Rescale raster 150 | 151 | * - `ST_Hillshade `_ 152 | - :python:`session.query(func.ST_Hillshade(ImageData.raster))` 153 | - Get a hillshade of raster 154 | 155 | * - `ST_Envelope `_ 156 | - :python:`session.query(func.ST_Envelope(ImageData.raster))` 157 | - Get geometry outline of raster 158 | 159 | * - `ST_Centroid `_ 160 | - :python:`session.query(func.ST_Envelope(ImageData.raster))` 161 | - Get centroid of a polygon/points 162 | 163 | * - `ST_Within `_ 164 | - :python:`session.query(func.ST_Within(SiteData.geom, shp))` 165 | - Get data within polygon 166 | 167 | * - `ST_Distance `_ 168 | - :python:`session.query(func.ST_Distance(PointData.geom, shp))` 169 | - Get distances between points 170 | 171 | 172 | Common Issues 173 | ------------- 174 | 175 | Useful tools for debugging 176 | 177 | .. list-table:: **Debugging Tools** 178 | :widths: 20 180 179 | :header-rows: 1 180 | 181 | * - `session.rollback()` 182 | - Rolls back the last query, useful for querys that fail after execution. 183 | 184 | * - `session.close()` 185 | - Closes your connection with the DB. Useful when using jupyter notebooks 186 | -------------------------------------------------------------------------------- /docs/community/authors.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | Current Contributors 3 | ============================ 4 | .. _Micah Johnson: https://github.com/micahjohnson150 5 | .. _HP Marshall: https://github.com/hpmarshall 6 | .. _Micah Sandusky: https://github.com/micah-prime 7 | .. _Joachim Meyer: https://github.com/jomey 8 | .. _Anthony Arendt: https://github.com/aaarendt 9 | .. _Don Setiawan: https://github.com/lsetiawan 10 | 11 | Core Team 12 | --------- 13 | 14 | * `Micah Johnson`_ 15 | * `HP Marshall`_ 16 | * `Micah Sandusky`_ 17 | * `Joachim Meyer`_ 18 | * `Anthony Arendt`_ 19 | 20 | Community Contributors 21 | ---------------------- 22 | We are always looking for new members to actively contribute to this project! 23 | 24 | * `Don Setiawan`_ 25 | -------------------------------------------------------------------------------- /docs/community/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/community/how_to_contribute.rst: -------------------------------------------------------------------------------- 1 | ################## 2 | How to contribute? 3 | ################## 4 | 5 | The first step to a contribution is to set up a local development environment. 6 | Below are the steps and additional information that will get you from your idea 7 | to a code contribution, adding tests, and finally opening a pull request 8 | to the main stable code branch. 9 | 10 | Local Development Setup 11 | ======================= 12 | 13 | #. Fork the `snowexsql` repo on to your GitHub account. 14 | 15 | `Here the official GitHub docs on how to do this `_ 16 | 17 | #. Clone your fork locally 18 | 19 | .. attention:: Make sure to replace `your_name_here` with your GitHub user name 20 | 21 | .. code-block:: bash 22 | 23 | $ git clone git@github.com:your_name_here/snowexsql.git 24 | $ cd snowexsql/ 25 | 26 | #. Install your local copy into a virtualenv. 27 | 28 | Assuming you have virtualenvwrapper installed, this is how you set up your 29 | fork for local development: 30 | 31 | .. code-block:: bash 32 | 33 | $ mkvirtualenv snowexsql 34 | 35 | .. important:: Below command should be executed inside the cloned repository 36 | from the step above. 37 | 38 | .. code-block:: bash 39 | 40 | $ python setup.py develop 41 | 42 | If you are planning on running the tests or building the docs below also run: 43 | 44 | .. code-block:: bash 45 | 46 | $ pip install -r requirements_dev.txt 47 | $ pip install -r docs/requirements.txt 48 | 49 | #. Create a branch for local development 50 | 51 | We recommend using the `GitHub flow `_ 52 | workflow that is used in many open-source projects for editing an existing 53 | code base. 54 | 55 | .. code-block:: bash 56 | 57 | $ git checkout -b name-of-your-bugfix-or-feature 58 | 59 | #. Start editing the code and implement your idea. 60 | 61 | #. You completed your changes 62 | 63 | When you're done making changes, check that your changes pass flake8 and the 64 | tests, including testing other Python versions with tox. Also note the 65 | setup steps below to set up a `test database <#tests>`_ . 66 | 67 | .. code-block:: bash 68 | 69 | $ pytest 70 | 71 | #. Commit your changes and push your branch to GitHub 72 | 73 | .. code-block:: bash 74 | 75 | $ git add . 76 | $ git commit -m "Your detailed description of your changes." 77 | $ git push origin name-of-your-bugfix-or-feature 78 | 79 | #. Submit a pull request through the GitHub website. 80 | 81 | If you haven't done this before - have a look at 82 | `the official GitHub documentation `_ 83 | on how to do this. 84 | 85 | 86 | Tests 87 | ===== 88 | 89 | Before testing, in a separate terminal, we need to run a local instance 90 | of the database. This can be done with 91 | 92 | .. code-block:: bash 93 | 94 | $ docker-compose up -d 95 | 96 | When you are finished testing, make sure to turn the docker off 97 | 98 | .. code-block:: bash 99 | 100 | $ docker-compose down 101 | 102 | Quickly test your installation by running: 103 | 104 | .. code-block:: bash 105 | 106 | $ python3 -m pytest tests/ 107 | 108 | The goal of this project is to have high fidelity in data 109 | interpretation/submission to the database. To see the current 110 | test coverage run: 111 | 112 | .. code-block:: bash 113 | 114 | $ make coverage 115 | 116 | Tips 117 | ---- 118 | 119 | To run a subset of tests:: 120 | 121 | $ pytest tests.test_snowexsql 122 | 123 | Pull Request Guidelines 124 | ======================= 125 | 126 | Before you submit a pull request, check that it meets these guidelines: 127 | 128 | 1. The pull request should include tests. 129 | 2. If the pull request adds functionality, the docs should be updated. Put 130 | your new functionality into a function with a docstring. 131 | 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. 132 | Check 133 | .. https://github.com/SnowEx/snowexsql/pulls 134 | and make sure that the tests pass for all supported Python versions. 135 | 136 | 137 | Deploying 138 | ========= 139 | 140 | A reminder for the maintainers on how to deploy. 141 | Make sure all your changes are committed. 142 | Then run:: 143 | 144 | $ bump2version patch # possible: major / minor / patch 145 | $ git push 146 | $ git push --tags 147 | 148 | Once the tag is merged, a release can be published with 149 | .. https://github.com/SnowEx/snowexsql/releases/new 150 | The release name should follow the convention `snowexsql-v0.4.1` 151 | -------------------------------------------------------------------------------- /docs/community/index.rst: -------------------------------------------------------------------------------- 1 | Community 2 | ********* 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | contributing 8 | how_to_contribute 9 | authors 10 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Auto-generated by `jupyter-book config` 3 | # If you wish to continue using _config.yml, make edits to that file and 4 | # re-generate this one. 5 | ############################################################################### 6 | author = 'SnowExSQL Developers' 7 | bibtex_bibfiles = ['references.bib'] 8 | comments_config = {'hypothesis': False, 'utterances': False} 9 | copyright = '2023' 10 | exclude_patterns = ['**.ipynb_checkpoints', '.DS_Store', 'Thumbs.db', '_build'] 11 | extensions = ['sphinx_togglebutton', 'sphinx_copybutton', 'myst_nb', 'jupyter_book', 'sphinx_thebe', 'sphinx_comments', 'sphinx_external_toc', 'sphinx.ext.intersphinx', 'sphinx_design', 'sphinx_book_theme', 'sphinx.ext.autodoc', 'sphinxcontrib.apidoc', 'sphinx.ext.viewcode', 'sphinx_gallery.load_style', 'sphinx.ext.autosectionlabel', 'sphinxcontrib.bibtex', 'sphinx_jupyterbook_latex', 'sphinx_multitoc_numbering'] 12 | external_toc_exclude_missing = False 13 | external_toc_path = '_toc.yml' 14 | html_baseurl = '' 15 | html_favicon = '' 16 | html_js_files = ['https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js'] 17 | html_logo = 'logo.png' 18 | html_sourcelink_suffix = '' 19 | html_theme = 'sphinx_book_theme' 20 | html_theme_options = {'search_bar_text': 'Search this book...', 'launch_buttons': {'notebook_interface': 'classic', 'binderhub_url': '', 'jupyterhub_url': '', 'thebe': False, 'colab_url': ''}, 'path_to_docs': 'docs', 'repository_url': 'https://github.com/SnowEx/snowexsql', 'repository_branch': 'master', 'extra_footer': '', 'home_page_in_toc': True, 'announcement': '', 'analytics': {'google_analytics_id': ''}, 'use_repository_button': True, 'use_edit_page_button': False, 'use_issues_button': True} 21 | html_title = 'Snowexsql Documentation' 22 | latex_engine = 'pdflatex' 23 | myst_enable_extensions = ['colon_fence', 'dollarmath', 'linkify', 'substitution', 'tasklist'] 24 | myst_url_schemes = ['mailto', 'http', 'https'] 25 | nb_execution_allow_errors = False 26 | nb_execution_cache_path = '' 27 | nb_execution_excludepatterns = [] 28 | nb_execution_in_temp = False 29 | nb_execution_mode = 'off' 30 | nb_execution_timeout = 30 31 | nb_output_stderr = 'show' 32 | numfig = True 33 | pygments_style = 'sphinx' 34 | suppress_warnings = ['myst.domains'] 35 | use_jupyterbook_latex = True 36 | use_multitoc_numbering = True 37 | -------------------------------------------------------------------------------- /docs/data_notes.rst: -------------------------------------------------------------------------------- 1 | Data Notes 2 | ========== 3 | 4 | Below are various notes found or answers to questions about files or modifications 5 | to data that has been submitted to the database. This is not a complete list 6 | of variables stored in the database but only variables we felt needed notes 7 | due to decision making. 8 | 9 | 10 | General Gotchas 11 | ---------------- 12 | 13 | * In the database all data in the value column in the layers table is stored as 14 | a string to accommodate all type of data that is put into it. 15 | 16 | * Layer data coming from the the database may not be returned in order. So you 17 | should sort by depth to get a sensible profile. 18 | 19 | * All raster data returned from the database is returned in Well Known Binary 20 | format. 21 | 22 | Manual Snow Depths 23 | ------------------ 24 | 25 | * Originally downloaded from the NSIDC from https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_SD.001/ 26 | * Data stored as centimeters 27 | * Named depth and can be cross referenced by instruments (e.g. magnaprobe) 28 | 29 | Snow Micropen (SMP) 30 | ------------------- 31 | 32 | * Original data is sourced from the NSIDC from https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_SMP.001/ 33 | * During the GM 2020 campaign, Two SMPs were used until they both broke. Identified in the data is a 3rd SMP 34 | that is a frankestein SMP put together from parts from the original two. 35 | * SMP measurements have an orientation assigned. These are in reference to 36 | their location relative to the pit. Measurements were recorded in crossing 37 | transects aligned with cardinal directions and centered on the pit. N1 = 50M 38 | from the center to the North. Its also the farthest out. In each cardinal directions 39 | there are typically 3-5 depending on the sampling strategy. 40 | * Profiles Resampled to every 100th sample to expedite uploads. 41 | * SMP data depth in the original file is written positive depth from the snow 42 | surface toward the ground. To avoid confusion with other profiles which are 43 | stored in snow height format, SMP depths are written to the database negative 44 | from the surface (snow surface datum format). 45 | * Depth data is converted to centimeters 46 | 47 | UAVSAR 48 | ------ 49 | Files are originally in a unique binary format. The tools here for maintainers 50 | convert those to geotiffs which results in a lat long geographic coordinate system. 51 | This is then re-projected to UTM 12 NAD 83. Then on upload the images at tiled to 52 | 500x 500 pixels. 53 | 54 | * Initially downloaded from HP GDRIVE 55 | 56 | Amplitude (.amp#.grd) 57 | ~~~~~~~~~~~~~~~~~~~~~~~~ 58 | 59 | * There are two Amplitude files. The int, cor files are derived products that 60 | come from two overflights. amp1 refers to the first flight and amp2 the second. 61 | * The primary date for these is the same as the Time of flight mentioned in the 62 | annotation file. 63 | 64 | Interferogram (.int.grd) 65 | ~~~~~~~~~~~~~~~~~~~~~~~~ 66 | 67 | * The data is a complex format. Each component is 4 bits (8 total). Set in a 68 | standard real + imaginary j format. These values can be negative (e.g int4) 69 | * Stored in the Database as `insar interferogram` 70 | * The description in the database stores the flights dates 71 | * The primary date in the database is the same as the last flight 72 | * Labeled `insar interferogram real` and 'insar interferogram imaginary' 73 | for each component of the data 74 | * Stored in Linear power and radians 75 | 76 | Correlation 77 | ~~~~~~~~~~~ 78 | * Labeled as 'insar correlation' 79 | * Stored as a scalar between 0-1 80 | 81 | 82 | Ground Penetrating Radar (GPR) 83 | ------------------------------ 84 | * Original data downloaded from NSIDC at https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_BSU_GPR.001/ 85 | * The system is made by Sensors & Software, pulse EKKO Pro (pE) is the model, 86 | multi-polarization 1 GHz GPR 87 | * Tate Meehan was the surveyor for all BSU GPR data 88 | * Column Time is HHMMSS.sss (24 hour Zulu time of day with the colons removed.) 89 | * Uploaded to the DB: two_way_travel, depth, density, and swe 90 | 91 | SWE 92 | ~~~ 93 | * Stored in millimeters 94 | 95 | depth 96 | ~~~~~ 97 | * Stored in centimeters 98 | 99 | Two Way Travel Time 100 | ~~~~~~~~~~~~~~~~~~~ 101 | 102 | * Labeled at `twt` in the CSV and renamed to `two_way_travel` in database 103 | * Exists as point data (e.g. single value with lat long and other metadata) 104 | * Stored in nanoseconds 105 | 106 | density 107 | ~~~~~~~ 108 | * Stored as `avgDensity` renamed to `density` 109 | * Stored in kg/m^3 110 | 111 | 112 | Stratigraphy 113 | ------------ 114 | 115 | * Originally downlaoded from the NSIDC at https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_GM_SP.001/ 116 | 117 | * Any profile that has multiple samples are averaged and that data is used as the main value. The subsequent profiles 118 | representing a single sample are renamed from there original label to sample_ in the database e.g. 119 | Density A --> sample_a 120 | 121 | Known multisampled profiles are: 122 | 123 | * Density 124 | * LWC (Dielectric Constant) 125 | 126 | Density 127 | ~~~~~~~ 128 | 129 | * Density profiles all have multiple profiles. The value assigned is the 130 | average of the profiles. 131 | 132 | LWC 133 | ~~~ 134 | LWC files contain dielectric constant data 135 | 136 | * Dielectric constants have multiple samples. The main value is the average of 137 | these values horizontally 138 | 139 | Specific Surface Area 140 | --------------------- 141 | 142 | * Originally downloaded from https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_SSA.001/ 143 | 144 | 145 | USGS Snow Off DEM 146 | ------------------ 147 | 148 | The lidar snow off data is from the USGS 1m lidar acquisition which mostly 149 | covers the entire survey site. 150 | 151 | * Sources are described `./scripts/3DEP_sources_1m_DEM.txt`, but found by 152 | using https://viewer.nationalmap.gov/basic/ 153 | * Downloaded using `./download_snow_off.sh` 154 | * Labeled as `snow off digital elevation model` 155 | 156 | Camera Derived Snow Depths 157 | -------------------------- 158 | 159 | Cameras were installed in front of red painted PVC pipes with yellow duct-taped bands at the top and set to take 160 | 2-3 daily timelapse images. Depths were extracted by counting the number of pixels between the top and bottom of the 161 | poles. A ratio calculated using the full length of the pole (304.8cm), and unique to each camera, was used to convert 162 | pixels to centimeters. 163 | 164 | * Depths are in centimeters 165 | * Instrument assigned in the db is `camera` 166 | * Equipment is assigned `camera id = < CAMERA COLUMN >` 167 | * Data is not published yet and was received via email. 168 | -------------------------------------------------------------------------------- /docs/database_structure.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Database Structure 3 | ================== 4 | 5 | The data base is formed of 4 tables that end users will query. 6 | 7 | * **points**: Contains all data that has a single value with a single coordinate pair (e.g. snow depths). 8 | * **layers**: Contains all data that has a depth component associated to a single coordinate pair (e.g. density profiles). 9 | * **images**: Contains all rasters and any query for a raster tile should be done here. 10 | * **sites**: Contains all the pit site details. 11 | 12 | There are other tables available, but they are auto-generated to support the 4 13 | tables above. These other tables are: 14 | 15 | * geography_columns 16 | * geometry_columns 17 | * spatial_ref_sys 18 | * raster_columns 19 | * raster_overviews 20 | 21 | Points Table 22 | ============= 23 | 24 | The `points` table contains any data that can be described by a single 25 | coordinate pair and a single value. 26 | 27 | Examples of this type of data includes: 28 | * snow depths 29 | * GPR SWE 30 | * GPR two way travel 31 | * GPR Depth 32 | 33 | This table is mapped to python with :class:`snowexsql.data.PointData`. 34 | 35 | At a minimum an single entry in python would be similar to the following: 36 | 37 | .. code-block:: python 38 | 39 | PointData(value=52, easting= 758635.01, northing=4325308.56, type='depth') 40 | 41 | 42 | This table has the following columns: 43 | 44 | * date - Date data was collected 45 | * easting - UTM projected coordinate in the east direction in meters 46 | * elevation - Elevation at the site or acquisition in meters 47 | * equipment - String indentifying more info about the instruments used 48 | * geom - GIS software friendly version of the coordinates of where the data was collected in UTM. 49 | * id - Unique identifier that is automatically assigned when uploaded 50 | * instrument - Name of the instrument used to collect the data 51 | * latitude - Geographic northing coordinate of the acquisition location in degrees 52 | * longitude - Geographic westing coordinate of the acquisition location in degrees 53 | * northing - Northing coordinate projected in UTM in meters 54 | * site_id - Unique identifier to pit location 55 | * site_name - Name describing the general survey area ( e.g. Grand Mesa) 56 | * observers - Name of the people who acquired the data 57 | * time - Time (MST) that the data was collected 58 | * time_updated - Time this entry was last modified 59 | * type - Name of the data collected 60 | * units - Units of the data collected 61 | * utm_zone 62 | * value - Value of the data collected 63 | * version_number 64 | 65 | 66 | Layers Table 67 | ============ 68 | 69 | The `layers` table contains all data that can be described by a single 70 | coordinate pair, a depth in the snowpack, and a single value. This means that a 71 | single entry in the Layers table is a single layer from a vertical profile. 72 | 73 | Examples of this data include: 74 | * density profiles 75 | * SMP 76 | * SSA 77 | * temperature 78 | * Hand hardness 79 | 80 | This table is mapped to python with :class:`snowexsql.data.LayerData`. 81 | 82 | At a minimum an single entry would be similar to the following: 83 | 84 | .. code-block:: python 85 | 86 | LayerData(value='300', depth=30, easting= 758635.01, northing=4325308.56, type='density') 87 | 88 | 89 | **NOTE**: All values in this table are stored as strings to accommodate a wide 90 | range of data. 91 | 92 | This table contains the following columns: 93 | 94 | * bottom_depth 95 | * comments 96 | * date - Date data was collected 97 | * depth - Depth in centimeters in the snowpack that the data was collected 98 | * easting - UTM projected coordinate in the east direction in meters 99 | * elevation - Elevation at the site or acquisition in meters 100 | * flags - data that was flagged typically just pits 101 | * geom - GIS software friendly version of the coordinates of where the data was collected in UTM. 102 | * id - Unique identifier that is automatically assigned when uploaded 103 | * instrument - Name of the instrument used to collect the data 104 | * latitude - Geographic northing coordinate of the acquisition location in degrees 105 | * longitude - Geographic westing coordinate of the acquisition location in degrees 106 | * northing - Northing coordinate projected in UTM in meters 107 | * sample_a - 1 of potentially three samples that could have been taken for this measurement, e.g. density 108 | * sample_b - 1 of potentially three samples that could have been taken for this measurement, e.g. density 109 | * sample_c - 1 of potentially three samples that could have been taken for this measurement, e.g. density 110 | * site_id - Unique identifier to pit location 111 | * site_name - Name describing the general survey area ( e.g. Grand Mesa) 112 | * observers - Names of the people performing the acquisition 113 | * time - Time (MST) at the beginning of acquisition 114 | * time_created - Time this entry was uploaded 115 | * time_updated - Time this entry was last modified 116 | * type - Name of the data collected 117 | * units - Units of the data collected 118 | * utm_zone - UTM Zone 119 | * value - Value of the data collected 120 | 121 | 122 | Images Table 123 | ============ 124 | 125 | The `images` table contains all rasters. Its not called rasters because the 126 | tables named raster are reserved keywords for postgis. 127 | 128 | Examples of this include: 129 | * DEMS 130 | * UAVSAR products 131 | * Lidar acquisitions 132 | 133 | This table is mapped to python with :class:`snowexsql.data.ImageData`. 134 | 135 | At a minimum an single entry in python would be similar to the following: 136 | 137 | .. code-block:: python 138 | 139 | ImageData(raster=, type='depth') 140 | 141 | This table contains the following columns: 142 | 143 | * date - Date data was collected 144 | * description - Any notes to add 145 | * id - Unique identifier that is automatically assigned when uploaded 146 | * instrument - Name of the instrument used to collect the data 147 | * raster - Raster data in Well Known Binary Format (WKB) best generated using `raster2psql` in the command line 148 | * site_id - Unique identifier to pit location 149 | * site_name - Name describing the general survey area ( e.g. Grand Mesa) 150 | * observers - Names of the people or organization that acquired the data 151 | * time_created - Time this entry was uploaded 152 | * time_updated - Time this entry was last modified 153 | * type - Name of the data collected 154 | * units - Units of the data collected 155 | 156 | 157 | Sites Table 158 | =========== 159 | 160 | The sites table contains all the details regarding pit site details. This 161 | table is formed exclusively from the `SiteDetails.csv` files that were provided 162 | with `stratigraphy.csv` and `density.csv` files. 163 | 164 | This table is mapped to python with :class:`snowexsql.data.SiteData`. 165 | This table has a lot of columns. They are: 166 | 167 | * air_temp - Air temperature in degrees C at time of digging the pit 168 | * aspect - Slope Aspect in degrees from north 169 | * date - Date data was collected 170 | * easting - UTM projected coordinate in the east direction in meters 171 | * elevation - Elevation at the site or acquisition in meters 172 | * geom - GIS software friendly version of the coordinates of where the data was collected in UTM. 173 | * ground_condition - Description of the surface below snow 174 | * ground_roughness - A description of how rough the surface below the snow is 175 | * ground_vegetation - Description of the vegetation below snow 176 | * id - Unique identifier that is automatically assigned when uploaded 177 | * latitude - Geographic northing coordinate of the acquisition location in degrees 178 | * longitude - Geographic westing coordinate of the acquisition location in degrees 179 | * northing - Northing coordinate projected in UTM in meters 180 | * precip - Description of the precip during pit digging 181 | * site_id - Unique identifier to pit location 182 | * site_name - Name describing the general survey area ( e.g. Grand Mesa) 183 | * site_notes - Any special site specific notes 184 | * sky_cover - Description of the cloud cover 185 | * slope_angle - Angle of the slope in degrees 186 | * time - Time (MST) acquisition began 187 | * time_created - Time this entry was uploaded 188 | * time_updated - Time this entry was last modified 189 | * total_depth - Snow depth at location in centimeters 190 | * tree_canopy - Description of the tree canopy at location 191 | * utm_zone - UTM zone 192 | * vegetation_height - Estimated vegetation height 193 | * weather_description - Brief description of the weather during acquisition 194 | * wind - Description of the wind during acquisition 195 | -------------------------------------------------------------------------------- /docs/gallery/card-images/camera-snow-depths.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/camera-snow-depths.png -------------------------------------------------------------------------------- /docs/gallery/card-images/compare-SSA-instruments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/compare-SSA-instruments.png -------------------------------------------------------------------------------- /docs/gallery/card-images/compare-UAVSAR-depths.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/compare-UAVSAR-depths.png -------------------------------------------------------------------------------- /docs/gallery/card-images/graupel-pits.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/graupel-pits.png -------------------------------------------------------------------------------- /docs/gallery/card-images/graupel-smp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/graupel-smp.png -------------------------------------------------------------------------------- /docs/gallery/card-images/plot-raster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/plot-raster.png -------------------------------------------------------------------------------- /docs/gallery/card-images/plot-swe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/plot-swe.png -------------------------------------------------------------------------------- /docs/gallery/card-images/raster-union-and-more.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/raster-union-and-more.png -------------------------------------------------------------------------------- /docs/gallery/card-images/spiral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/gallery/card-images/spiral.png -------------------------------------------------------------------------------- /docs/gallery/getting_started_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Getting Started With Python and Postgresql\n", 8 | "This python package (snowexsql) is python package that provides easy access to the entire postgresql database. Here is how you establish a connection:" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "# Import the functions we need from the python snowexsql library\n", 18 | "from snowexsql.db import get_db\n", 19 | "\n", 20 | "# If you followed the install instructions the database name should be snowex\n", 21 | "db_name = 'db.snowexdata.org/snowex'\n", 22 | "\n", 23 | "# Using the function get_db, we receive 2 ways to interact with the database\n", 24 | "engine, session = get_db(db_name, credentials='./credentials.json')" 25 | ] 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "metadata": {}, 30 | "source": [ 31 | "### Using the Engine Object\n", 32 | "The `engine` object returned from the `get_db` function is not used much in the snowexsql library. It does allow you to use typical SQL \n", 33 | "strings to interact with the database. \n", 34 | "\n", 35 | "**Note**: Users who have used python + SQL before will likely be more familiar with this approach. Additionally those who don't know python but know SQL will also be more comfortable here.\n" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 4, 41 | "metadata": {}, 42 | "outputs": [ 43 | { 44 | "name": "stdout", 45 | "output_type": "stream", 46 | "text": [ 47 | "8C18, 1C1, Open 6, 2S4, 2S7, 6N36, Skyway Open, 6S22, 2S6, 9S40, 9N30, 1N1, 9N47, 1N3, 9N29, 8N45, 9N39, 8C35, 1C5, 6N31, 3S33, 5N24, 3N53, 1C7, 7N40, SNB 2, 8N55, County Line Open, 6N18, 5C20, 5N10, 8N52, 2N21, 1N5, 6S32, GML, 1S1, 2C3, 8S41, JPL 1, Forest 14, 6S53, C1, 8C11, Open, 2C13, Open Flat, Joe Wright, 8N54, 7S23, 6N16, 8C31, Open 2, 5S42, Mesa West Open, 8N34, Upper, 6C37, 5S49, 9S39, 1S17, 2S35, 1C8, BA Flux Tower, 8N9, FL2A, 5S31, 8N38, 6S26, Caples Lake, Open 4, 2S46, 8S28, 8C36, 5N15, 2C33, 6N46, 3S14, 6S15, 8N35, Skyway Tree, TLSFL2A, 2N13, 3N26, 1S8, Saddle, Banner Open, 3S47, 3S52, 4N2, 2S9, 9S51, Trench 13, 6C24, Panorama Dome, 5C27, Senator Beck, 2S25, Swamp Angel, FL1B, 9N42, 1N6, JPL 2, 2S11, 2N8, 9N59, 1N7, 8C25, 3S5, 8N58, 9C28, 2S10, 2S45, 5C21, 5S24, 7S50, 2N49, 8C22, Forest 13, 2N14, 9C17, 5N19, 2C9, 5N50, 2N4, Mores Creek Summit, LDP Tree, 1C14, 2C2, CUES, SNB 1, 8S18, Michigan River, 7C15, Irwin Barn, 2S20, 1S12, 6S44, 2S48, 9C19, 9N43, 9N56, 9N44, 8S30, 8C26, 7N57, 3S38, 9C16, 5N11, 6S34, 4N27, Forest 12, 5S21, 2C12, 2N12, 9C23, 1S2, 3N22, 2N48, 2S3, 5S29, 8N25, 2C4, Forest North, 2C6, Forest, 2S37, 2S16, HQ Met Station, 1S13, Alta Collins, 4C30, County Line Tree, Bogus Upper, 2S36, 6S19, 8N37, Forest Flat, Atwater, LDP Open, 9N28, 1N23, Gothic, 6N17, 8N51, 1N20, Forest South, 5N41, 8C32, 5S43, 8N33, 5N32, 6C10, Tower 4, Banner Snotel, 2S27, Mesa West Trees, 6C34, Aspen, 8C29\n", 48 | "\n" 49 | ] 50 | } 51 | ], 52 | "source": [ 53 | "# First connect to the db\n", 54 | "conn = engine.connect()\n", 55 | "\n", 56 | "# Form a typical SQL query and use python to populate the table name\n", 57 | "qry = \"SELECT DISTINCT site_id FROM sites\"\n", 58 | "\n", 59 | "# Then we execute the sql command and collect the results\n", 60 | "results = conn.execute(qry)\n", 61 | "\n", 62 | "# Create a nice readable string to print the site names using python \n", 63 | "out = ', '.join((row['site_id'] for row in results))\n", 64 | "\n", 65 | "# Print it with a line return for readability\n", 66 | "print(out + '\\n')\n", 67 | "\n", 68 | "# Close your connections to avoid hanging transactions \n", 69 | "conn.close()\n" 70 | ] 71 | }, 72 | { 73 | "cell_type": "markdown", 74 | "metadata": {}, 75 | "source": [ 76 | "### Using the Session Object\n", 77 | "The session object allows a user to interact with the database in a pure python form. This approach is called Object Relational Mapping (ORM). This is important because its super handy when your are googling for help. \n", 78 | "\n", 79 | "ORM *maps* the database tables and their columns to a python class and attributes. Here is how it works:\n", 80 | " " 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 5, 86 | "metadata": {}, 87 | "outputs": [ 88 | { 89 | "name": "stdout", 90 | "output_type": "stream", 91 | "text": [ 92 | "8C18, 1C1, Open 6, 2S4, 2S7, 6N36, Skyway Open, 6S22, 2S6, 9S40, 9N30, 1N1, 9N47, 1N3, 9N29, 8N45, 9N39, 8C35, 1C5, 6N31, 3S33, 5N24, 3N53, 1C7, 7N40, SNB 2, 8N55, County Line Open, 6N18, 5C20, 5N10, 8N52, 2N21, 1N5, 6S32, GML, 1S1, 2C3, 8S41, JPL 1, Forest 14, 6S53, C1, 8C11, Open, 2C13, Open Flat, Joe Wright, 8N54, 7S23, 6N16, 8C31, Open 2, 5S42, Mesa West Open, 8N34, Upper, 6C37, 5S49, 9S39, 1S17, 2S35, 1C8, BA Flux Tower, 8N9, FL2A, 5S31, 8N38, 6S26, Caples Lake, Open 4, 2S46, 8S28, 8C36, 5N15, 2C33, 6N46, 3S14, 6S15, 8N35, Skyway Tree, TLSFL2A, 2N13, 3N26, 1S8, Saddle, Banner Open, 3S47, 3S52, 4N2, 2S9, 9S51, Trench 13, 6C24, Panorama Dome, 5C27, Senator Beck, 2S25, Swamp Angel, FL1B, 9N42, 1N6, JPL 2, 2S11, 2N8, 9N59, 1N7, 8C25, 3S5, 8N58, 9C28, 2S10, 2S45, 5C21, 5S24, 7S50, 2N49, 8C22, Forest 13, 2N14, 9C17, 5N19, 2C9, 5N50, 2N4, Mores Creek Summit, LDP Tree, 1C14, 2C2, CUES, SNB 1, 8S18, Michigan River, 7C15, Irwin Barn, 2S20, 1S12, 6S44, 2S48, 9C19, 9N43, 9N56, 9N44, 8S30, 8C26, 7N57, 3S38, 9C16, 5N11, 6S34, 4N27, Forest 12, 5S21, 2C12, 2N12, 9C23, 1S2, 3N22, 2N48, 2S3, 5S29, 8N25, 2C4, Forest North, 2C6, Forest, 2S37, 2S16, HQ Met Station, 1S13, Alta Collins, 4C30, County Line Tree, Bogus Upper, 2S36, 6S19, 8N37, Forest Flat, Atwater, LDP Open, 9N28, 1N23, Gothic, 6N17, 8N51, 1N20, Forest South, 5N41, 8C32, 5S43, 8N33, 5N32, 6C10, Tower 4, Banner Snotel, 2S27, Mesa West Trees, 6C34, Aspen, 8C29\n", 93 | "\n" 94 | ] 95 | } 96 | ], 97 | "source": [ 98 | "# Import the table classes from our data module which is where our ORM classes are defined \n", 99 | "from snowexsql.data import SiteData, PointData, LayerData, ImageData\n", 100 | "\n", 101 | "# Form the query to receive all the site_id from the sites table\n", 102 | "qry = session.query(SiteData.site_id).distinct()\n", 103 | "\n", 104 | "# Execute the query and collect the result\n", 105 | "results = qry.all()\n", 106 | "\n", 107 | "# Form a nice string for readability\n", 108 | "out = ', '.join([row[0] for row in list(results)])\n", 109 | "\n", 110 | "# Print it with a line return for readability\n", 111 | "print(out + '\\n')\n" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 6, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "# Close your session to avoid hanging transactions\n", 121 | "session.close()" 122 | ] 123 | } 124 | ], 125 | "metadata": { 126 | "kernelspec": { 127 | "display_name": "Python 3 (ipykernel)", 128 | "language": "python", 129 | "name": "python3" 130 | }, 131 | "language_info": { 132 | "codemirror_mode": { 133 | "name": "ipython", 134 | "version": 3 135 | }, 136 | "file_extension": ".py", 137 | "mimetype": "text/x-python", 138 | "name": "python", 139 | "nbconvert_exporter": "python", 140 | "pygments_lexer": "ipython3", 141 | "version": "3.8.10" 142 | } 143 | }, 144 | "nbformat": 4, 145 | "nbformat_minor": 4 146 | } 147 | -------------------------------------------------------------------------------- /docs/gallery/index.md: -------------------------------------------------------------------------------- 1 | # Gallery 2 | 3 | This is a gallery of examples that you can use to get started with `snowexsql`. 4 | 5 | ::::{grid} 6 | :gutter: 3 7 | 8 | :::{grid-item-card} What is in the database? 9 | :link: what_is_in_the_db_example.html 10 | ::: 11 | 12 | :::{grid-item-card} Overview 13 | :link: overview_example.html 14 | ::: 15 | 16 | :::{grid-item-card} Getting Started 17 | :link: getting_started_example.html 18 | ::: 19 | :::: 20 | 21 | ::::{grid} 22 | :gutter: 3 23 | 24 | :::{grid-item-card} Raster Union and More 25 | :link: raster_union_and_more_example.html 26 | 27 | ![Raster union](card-images/raster-union-and-more.png) 28 | ::: 29 | 30 | :::{grid-item-card} Plot Raster 31 | :link: plot_raster_example.html 32 | 33 | ![Raster Plot](card-images/plot-raster.png) 34 | ::: 35 | 36 | :::{grid-item-card} Plot Pit SWE 37 | :link: plot_pit_swe_example.html 38 | 39 | ![Plot SWE](card-images/plot-swe.png) 40 | ::: 41 | :::: 42 | 43 | ::::{grid} 44 | :gutter: 3 45 | 46 | :::{grid-item-card} Graupel SMP 47 | :link: graupel_smp_example.html 48 | 49 | ![Graupel SMP](card-images/graupel-smp.png) 50 | ::: 51 | 52 | :::{grid-item-card} Graupel Pits 53 | :link: graupel_pits_example.html 54 | 55 | ![Graupel Pits](card-images/graupel-pits.png) 56 | ::: 57 | 58 | :::{grid-item-card} Get Spiral 59 | :link: get_spiral_example.html 60 | 61 | ![Spiral](card-images/spiral.png) 62 | ::: 63 | :::: 64 | 65 | ::::{grid} 66 | :gutter: 3 67 | 68 | :::{grid-item-card} Compare UAVSAR to Depths 69 | :link: compare_UAVSAR_to_depths_example.html 70 | 71 | ![UAVSAR depths](card-images/compare-UAVSAR-depths.png) 72 | ::: 73 | 74 | :::{grid-item-card} Compare SSA Instruments 75 | :link: compare_SSA_instruments_example.html 76 | 77 | ![compare SSA](card-images/compare-SSA-instruments.png) 78 | ::: 79 | 80 | :::{grid-item-card} Camera Derive Snow Depths 81 | :link: camera_derive_snow_depths_example.html 82 | 83 | ![Camera snow depths](card-images/camera-snow-depths.png) 84 | ::: 85 | :::: 86 | 87 | ::::{grid} 88 | :gutter: 3 89 | 90 | :::{grid-item-card} API Intro Example 91 | :link: api_intro_example.html 92 | ::: 93 | 94 | :::{grid-item-card} API Plot Pit Density 95 | :link: api_plot_pit_density_example.html 96 | ::: 97 | 98 | :::: 99 | -------------------------------------------------------------------------------- /docs/gallery/overview_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Make a Survey Overview\n", 8 | "\n", 9 | "**Goal**: Make a nice of all the data on a nice DEM \n", 10 | "\n", 11 | "**Approach**: \n", 12 | "\n", 13 | "1. Create an overview DEM with hillShade\n", 14 | "2. Grab locations of single location type data\n", 15 | "3. Grab centroids of all the raster tiles\n", 16 | "4. Plot it all!\n", 17 | "\n", 18 | "## Process:\n", 19 | "\n", 20 | "### Step 1. Create an Overview Raster with HillShade" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "from snowexsql.db import get_db\n", 30 | "from snowexsql.data import ImageData, LayerData, PointData, SiteData \n", 31 | "from snowexsql.conversions import raster_to_rasterio\n", 32 | "from rasterio.plot import show\n", 33 | "from sqlalchemy.sql import func\n", 34 | "from geoalchemy2.types import Raster\n", 35 | "import geoalchemy2.functions as gfunc\n", 36 | "import matplotlib.pyplot as plt \n", 37 | "import geopandas as gpd\n", 38 | "from shapely.geometry import Polygon\n", 39 | "from geoalchemy2.shape import from_shape, to_shape\n", 40 | "\n", 41 | "# Connect to the database we made.\n", 42 | "db_name = 'db.snowexdata.org/snowex'\n", 43 | "engine, session = get_db(db_name, credentials='./credentials.json')\n", 44 | "\n", 45 | "# DEM data name and surveyor\n", 46 | "data_name = 'DEM'\n", 47 | "observers = 'USGS'\n", 48 | "\n", 49 | "# Resolution to make our DEM/Hillshade\n", 50 | "res = 20\n", 51 | "\n", 52 | "# Form a polygon to clip at the max extent (figured out in advance)\n", 53 | "x1 = 735200.0\n", 54 | "x2 = 760000.0\n", 55 | "y1 = 4319989.0\n", 56 | "y2 = 4329803.0\n", 57 | "\n", 58 | "extent = Polygon([[x1, y1], [x1, y2], [x2, y2], [x2, y1]]) # Construct it using shapely\n", 59 | "\n", 60 | "# Make the polygon usable to the db\n", 61 | "extent_ewkt = from_shape(extent, srid=26912).ST_AsEWKT()\n", 62 | "\n", 63 | "# Make polygon plottable for later\n", 64 | "extent_df = gpd.GeoDataFrame({'geometry':[extent]}) " 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": null, 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "# Define a function to reduce the same code used for getting the dem and hillshade\n", 74 | "def filter_and_return(session, base, data_name, observers, extent):\n", 75 | " '''\n", 76 | " Small function to apply redundent filters and raster making\n", 77 | " '''\n", 78 | " # Save our query as a Tiff and clip it along the extents polygon\n", 79 | " q = session.query(func.ST_AsTiff(func.ST_Clip(base, extent)))\n", 80 | " \n", 81 | " # Filter by data name and surveyor\n", 82 | " q = q.filter(ImageData.type == data_name)\n", 83 | " q = q.filter(ImageData.observers == observers)\n", 84 | " \n", 85 | " # Execute the query\n", 86 | " rasters = q.all()\n", 87 | " \n", 88 | " # Convert the dataset from the DB to rasterio\n", 89 | " dataset = raster_to_rasterio(session, rasters)[0]\n", 90 | " return dataset" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Create the base query to reduce code: Collect Rasters and rescale them to our resolution and use bilinear interpolation \n", 100 | "bq = func.ST_Rescale(ImageData.raster, res, -1 * res, 'blinear')\n", 101 | "\n", 102 | "# Retrieve the dem, join all the tiles retrieved\n", 103 | "base = gfunc.ST_Union(bq, type_=Raster) # Is NOT EXECUTED until query is executed.\n", 104 | "dem = filter_and_return(session, base, data_name, observers, extent_ewkt) \n", 105 | "\n", 106 | "# Merge all the tiles retrieved, then make the hillshade.\n", 107 | "base = func.ST_Hillshade(gfunc.ST_Union(bq, type_=Raster)) # Is NOT EXECUTED until query is executed.\n", 108 | "hillshade = filter_and_return(session, base, data_name, observers, extent_ewkt)" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "metadata": {}, 114 | "source": [ 115 | "### Step 2. Grab locations of single location type data" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": null, 121 | "metadata": {}, 122 | "outputs": [], 123 | "source": [ 124 | "# Write a function to retrieve positional points of data\n", 125 | "def grab_points(session, TableClass, data_name, distinct=False, instrument=None, downsample=None):\n", 126 | " \"\"\"\n", 127 | " Returns a dataframe of geometric points of requested data. Use distinct to avoid collecting \n", 128 | " identical locations from layer data. Use instrument to isolate snow depths\n", 129 | " \n", 130 | " Returns: df: Geopandas dataframe with one column geometry for plotting\n", 131 | " \"\"\"\n", 132 | " q = session.query(TableClass.geom).filter(TableClass.type == data_name)\n", 133 | " \n", 134 | " if instrument != None:\n", 135 | " \n", 136 | " # Filter by what instruments are in a list provided\n", 137 | " if type(instrument) == list:\n", 138 | " q = q.filter(TableClass.instrument.in_(instrument))\n", 139 | " else:\n", 140 | " q = q.filter(TableClass.instrument == instrument)\n", 141 | " \n", 142 | " if downsample is not None:\n", 143 | " # Only sample some of the data \n", 144 | " q = q.filter(TableClass.id % downsample == 0)\n", 145 | " if distinct:\n", 146 | " q = q.distinct()\n", 147 | " \n", 148 | " points = q.all()\n", 149 | " \n", 150 | " # Make the points into a geopandas dataframe so we can easily plot it\n", 151 | " df = gpd.GeoDataFrame({'geometry':[to_shape(p[0]) for p in points]})\n", 152 | " print('Found {} points for {}...'.format(len(df.index), data_name))\n", 153 | " return df" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "\n", 163 | "# Show all instruments used to gather snow depths\n", 164 | "tools = session.query(PointData.instrument).filter(PointData.type == 'depth').distinct().all()\n", 165 | "\n", 166 | "# Grab our pits by selecting hand hardness profiles\n", 167 | "data = {}\n", 168 | "\n", 169 | "# Use distinct locations from hand hardness profiles to get pit locations\n", 170 | "pits = grab_points(session, LayerData, 'hand_hardness', distinct=True)\n", 171 | "\n", 172 | "# Get distinct locations of smp profiles\n", 173 | "smp = grab_points(session, LayerData, 'force', distinct=True)\n", 174 | "\n", 175 | "# Grab all snow depths collected with magnaprobe, mesa, or a pit rule\n", 176 | "depths = grab_points(session, PointData, 'depth', instrument=['magnaprobe', 'mesa'])\n", 177 | "\n", 178 | "# Grab all the GPR point data\n", 179 | "gpr = grab_points(session, PointData, 'two_way_travel', downsample=100)\n", 180 | "\n", 181 | "# Grab all the camera locations point data\n", 182 | "cameras = grab_points(session, PointData, 'depth', instrument=['camera'], distinct=True)\n" 183 | ] 184 | }, 185 | { 186 | "cell_type": "markdown", 187 | "metadata": {}, 188 | "source": [ 189 | "### Step 3. Grab centroids of all the raster tiles" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": null, 195 | "metadata": {}, 196 | "outputs": [], 197 | "source": [ 198 | "# Define a function to grab the center of each raster tile\n", 199 | "def get_tile_centers(session, data_name, observers=None):\n", 200 | " '''\n", 201 | " Simple function to grab the center of each tile given a data type and optionally a surveyor name\n", 202 | " '''\n", 203 | " # Use database to grab the centroid of each tile outline (envelope) filtering on type. Also return the surveyor.\n", 204 | " q = session.query(func.ST_Centroid(func.ST_Envelope(ImageData.raster))).filter(ImageData.type == data_name)\n", 205 | " \n", 206 | " # If observers is provided, filter on that too\n", 207 | " if observers != None:\n", 208 | " q = q.filter(ImageData.observers == observers)\n", 209 | " \n", 210 | " centers = q.all()\n", 211 | " \n", 212 | " # Form the data into plottable geopandas dataframe with only a geometry column\n", 213 | " df = gpd.GeoDataFrame({'geometry':[to_shape(p[0]) for p in centers]})\n", 214 | " print('Found {} tiles for {}...'.format(len(df.index), data_name))\n", 215 | " return df" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": null, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [ 224 | "# Grab all the names of the raster data so we know what to reference\n", 225 | "names = session.query(ImageData.type).distinct().all()\n", 226 | "\n", 227 | "# Use the get_tile_centers function to grab the dataframe containing the tile centroids and assign a color and marker to it for plotting\n", 228 | "tiles = {}\n", 229 | "\n", 230 | "# Grab all the ASO DEM centers, assign color as blue and use squares for symbols\n", 231 | "tiles['ASO Depths'] = (get_tile_centers(session, 'depth', observers='ASO Inc.'), 'steelblue', 's')\n", 232 | "# tiles['ASO SWE'] = (get_tile_centers(session, 'swe', observers='ASO Inc.'), 'plum', 's')\n", 233 | "\n", 234 | "# Grab all the USGS DEM centers, assign color as light red and use pentagons for symbols\n", 235 | "tiles['USGS DEM'] = (get_tile_centers(session, 'DEM', observers='USGS'), 'plum','p')\n", 236 | "\n", 237 | "# Grab all the insar data centers, assign color as some shade of orange and use diamonds for symbols\n", 238 | "tiles['INSAR Amplitudes'] = (get_tile_centers(session, 'insar amplitude'), 'gold', 'D')\n", 239 | "tiles['INSAR Derived'] = (get_tile_centers(session, 'insar interferogram real'), 'goldenrod', 'D') # Since imaginary and real are in the same location we only need one of them\n", 240 | "# tiles['INSAR Correlation.'] = (get_tile_centers(session, 'insar correlation'), 'bisque', 'D')" 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": null, 246 | "metadata": {}, 247 | "outputs": [], 248 | "source": [ 249 | "metadata = {'instrument': [], 'date':[],'doi':[]}\n", 250 | "\n", 251 | "for k in metadata.keys():\n", 252 | " for tbl in [PointData, LayerData, ImageData]:\n", 253 | " print(f'Counting {k} in {tbl.__name__}...')\n", 254 | " \n", 255 | " metadata[k] += [v[0] for v in session.query(getattr(tbl,k)).filter(getattr(tbl,k).isnot(None)).distinct()]\n", 256 | "\n", 257 | " metadata[k] = list(set(metadata[k]))\n", 258 | "\n", 259 | "# Find all observers (Temporarily not working because surveyor upload is incorrect)\n", 260 | "metadata['observers'] = []\n", 261 | "\n", 262 | "for tbl in [PointData, LayerData, ImageData]:\n", 263 | " print(f'Counting observers in {tbl.__name__}...')\n", 264 | " \n", 265 | " # Manage multiple names in the surveyor\n", 266 | " observers = session.query(tbl.observers).filter(tbl.observers.isnot(None)).distinct().all()\n", 267 | " for name in observers:\n", 268 | " if ',' in name[0] and 'JPL' not in name[0]:\n", 269 | " metadata['observers'] += name[0].split(',')\n", 270 | " elif '&' in name[0]:\n", 271 | " metadata['observers'] += name[0].split('&')\n", 272 | " else:\n", 273 | " metadata['observers'] += [name[0]]\n", 274 | "\n", 275 | "metadata['observers'] = len(list(set(metadata['observers'])))\n", 276 | "metadata['date'] = (min(metadata['date']).strftime('%Y-%m-%d'), max(metadata['date']).strftime('%Y-%m-%d'))\n", 277 | "metadata['instrument'] = len(metadata['instrument'])\n", 278 | "metadata['doi'] = len(metadata['doi'])\n" 279 | ] 280 | }, 281 | { 282 | "cell_type": "markdown", 283 | "metadata": {}, 284 | "source": [ 285 | "### Step 4. Plot it all!" 286 | ] 287 | }, 288 | { 289 | "cell_type": "code", 290 | "execution_count": null, 291 | "metadata": { 292 | "tags": [ 293 | "nbsphinx-thumbnail", 294 | "nbsphinx-gallery" 295 | ] 296 | }, 297 | "outputs": [], 298 | "source": [ 299 | "# Create a figure with one subplot. Give it size\n", 300 | "fig, ax = plt.subplots(1,1, figsize=(32, 16))\n", 301 | "\n", 302 | "# Plot the hillshade raster in gray\n", 303 | "show(hillshade, ax=ax, cmap='gray', transform=hillshade.transform)\n", 304 | "\n", 305 | "# Plot the DEM with partial transparency so the hillshade can be seen\n", 306 | "show(dem, ax=ax, alpha=0.5, cmap='terrain', transform=dem.transform)\n", 307 | "\n", 308 | "# Plot raster centers \n", 309 | "for n, d in tiles.items():\n", 310 | " df = d[0]\n", 311 | " color = d[1]\n", 312 | " marker = d[2]\n", 313 | " \n", 314 | " # Filter the data only to our map extent defined in the beginning\n", 315 | " ind = df.geometry.within(extent)\n", 316 | " \n", 317 | " # Plot with a name that shows the total tiles not just the tiles in the extent.\n", 318 | " df.loc[ind].plot(ax=ax, marker=marker, markersize=30, edgecolor='black', alpha=0.5, color=color, label='{} tiles of {}'.format(len(df.index), n))\n", 319 | "\n", 320 | "# Plot the GRP points as orange pixels\n", 321 | "gpr.plot(ax=ax, marker=',' , color='orange', markersize=1, label='{} GPR Points'.format(len(gpr.index)))\n", 322 | "\n", 323 | "# Plot the snow depths using aqua colored pixels\n", 324 | "depths.plot(ax=ax, marker=',' , color='aqua', markersize=1, label='{} Manual Depths'.format(len(depths.index)))\n", 325 | "\n", 326 | "# Plot the pits as magenta triangles \n", 327 | "pits.plot(ax=ax, marker='^' , color='magenta', edgecolor='black', markersize=70, label=' {} pits'.format(len(pits.index)))\n", 328 | "\n", 329 | "# Plot the SMP positions with a red plus\n", 330 | "smp.plot(ax=ax, marker='+' , color='lightgreen', markersize=5, label='{} SMP Profiles'.format(len(smp.index)))\n", 331 | "\n", 332 | "# Plot the pits as magenta triangles \n", 333 | "cameras.plot(ax=ax, marker='s' , color='lime', edgecolor='black', markersize=70, label=' {} Camera Traps (not all shown)'.format(len(cameras.index)))\n", 334 | "\n", 335 | "# Don't use scientific notation on the axis ticks\n", 336 | "ax.ticklabel_format(style='plain', useOffset=False)\n", 337 | "\n", 338 | "# Add some labeling\n", 339 | "ax.legend(loc='lower right', fontsize='xx-large', framealpha=1.0)\n", 340 | "ax.set_title('Overview of Measurements in the SnowEx Database for Grand Mesa', size=20)\n", 341 | "ax.set_xlabel('Easting [m]', size=20)\n", 342 | "ax.set_ylabel('Northing [m]', size=20)\n", 343 | "\n", 344 | "ax.set_xlim([extent.bounds[0], extent.bounds[2]])\n", 345 | "ax.set_ylim([extent.bounds[1], extent.bounds[3]])\n", 346 | "\n", 347 | "\n", 348 | "# Add a block showing off details \n", 349 | "\n", 350 | "\n", 351 | "textstr = '\\n'.join([f\"No. observers: {metadata['observers']}\",\n", 352 | " f\"No. Instruments: {metadata['instrument']}\", \n", 353 | " f\"Temporal Extent: {metadata['date'][0]} - {metadata['date'][1]}\",\n", 354 | " f\"Published Datasets: {metadata['doi']}\"])\n", 355 | " \n", 356 | "props = dict(boxstyle='round', facecolor='white', alpha=0.9)\n", 357 | "\n", 358 | "# place a text box in upper left in axes coords\n", 359 | "s = ax.text(0.78, 0.95, textstr, transform=ax.transAxes, fontsize='xx-large',\n", 360 | " verticalalignment='top', bbox=props)\n", 361 | "\n", 362 | "\n", 363 | "# Save the figure if you want\n", 364 | "#plt.savefig('/SOME/LOCATION/')\n" 365 | ] 366 | }, 367 | { 368 | "cell_type": "code", 369 | "execution_count": null, 370 | "metadata": {}, 371 | "outputs": [], 372 | "source": [ 373 | "session.close()" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": null, 379 | "metadata": {}, 380 | "outputs": [], 381 | "source": [] 382 | } 383 | ], 384 | "metadata": { 385 | "kernelspec": { 386 | "display_name": "Python 3 (ipykernel)", 387 | "language": "python", 388 | "name": "python3" 389 | }, 390 | "language_info": { 391 | "codemirror_mode": { 392 | "name": "ipython", 393 | "version": 3 394 | }, 395 | "file_extension": ".py", 396 | "mimetype": "text/x-python", 397 | "name": "python", 398 | "nbconvert_exporter": "python", 399 | "pygments_lexer": "ipython3", 400 | "version": "3.11.0" 401 | } 402 | }, 403 | "nbformat": 4, 404 | "nbformat_minor": 4 405 | } 406 | -------------------------------------------------------------------------------- /docs/gallery/what_is_in_the_db_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "39453dd1", 6 | "metadata": {}, 7 | "source": [ 8 | "# What's in the Database?\n", 9 | "\n", 10 | "Below is a simple count of what is in the database. \n" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 6, 16 | "id": "a3d24b6c", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "# Import all the python db table reflections \n", 21 | "from snowexsql.data import SiteData, PointData, LayerData, ImageData\n", 22 | "\n", 23 | "# Import the function to get access\n", 24 | "from snowexsql.db import get_db\n", 25 | "\n", 26 | "\n", 27 | "def get_all_distinct(attribute):\n", 28 | " \"\"\"\n", 29 | " Function to count all the unique & valid entries of a column \n", 30 | " in a table. \n", 31 | " \"\"\"\n", 32 | " final = {}\n", 33 | " final['Total'] = 0\n", 34 | "\n", 35 | " # Loop over all the tables\n", 36 | " tables = [PointData, LayerData, ImageData]\n", 37 | " for tbl_cls in tables:\n", 38 | " \n", 39 | " # Grab the column from the table class\n", 40 | " obj_att = getattr(tbl_cls, attribute)\n", 41 | " \n", 42 | " # Count all the distinct and non-None values\n", 43 | " result = session.query(obj_att).filter(obj_att != None).distinct().count()\n", 44 | " \n", 45 | " # Save the result and keep track of the total\n", 46 | " final[tbl_cls.__name__] = result\n", 47 | " final['Total'] += result \n", 48 | "\n", 49 | " return final \n", 50 | "\n", 51 | "\n", 52 | "def print_result(count_dict):\n", 53 | " \"\"\"\n", 54 | " Simple function to print out the counts nicely\n", 55 | " \"\"\"\n", 56 | " \n", 57 | " print('\\nDB Table Breakdown:')\n", 58 | " for k in ['PointData', 'LayerData', 'ImageData']:\n", 59 | " print(f'\\t* {k} = {count_dict[k]}')\n", 60 | " " 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 7, 66 | "id": "f597e21d", 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "# Get connection to the DB\n", 71 | "db_name = 'db.snowexdata.org/snowex'\n", 72 | "engine, session = get_db(db_name, credentials='./credentials.json')" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "c512d63f", 78 | "metadata": {}, 79 | "source": [ 80 | "## DOIS" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 8, 86 | "id": "350ad9b0", 87 | "metadata": {}, 88 | "outputs": [ 89 | { 90 | "name": "stdout", 91 | "output_type": "stream", 92 | "text": [ 93 | "Published Datasets: 6\n", 94 | "\n", 95 | "DB Table Breakdown:\n", 96 | "\t* PointData = 2\n", 97 | "\t* LayerData = 2\n", 98 | "\t* ImageData = 2\n" 99 | ] 100 | } 101 | ], 102 | "source": [ 103 | "dois = get_all_distinct('doi')\n", 104 | "print(f'Published Datasets: {dois[\"Total\"]}')\n", 105 | "print_result(dois)" 106 | ] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "id": "02057c62", 111 | "metadata": {}, 112 | "source": [ 113 | "## Datasets" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 9, 119 | "id": "7ba72325", 120 | "metadata": {}, 121 | "outputs": [ 122 | { 123 | "name": "stdout", 124 | "output_type": "stream", 125 | "text": [ 126 | "Unique Data types: 19\n", 127 | "\n", 128 | "DB Table Breakdown:\n", 129 | "\t* PointData = 3\n", 130 | "\t* LayerData = 9\n", 131 | "\t* ImageData = 7\n" 132 | ] 133 | } 134 | ], 135 | "source": [ 136 | "data_types = get_all_distinct('type')\n", 137 | "print(f'Unique Data types: {data_types[\"Total\"]}')\n", 138 | "print_result(data_types)" 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "id": "f85f6c8e", 144 | "metadata": {}, 145 | "source": [ 146 | "## Instruments" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 10, 152 | "id": "94502078", 153 | "metadata": {}, 154 | "outputs": [ 155 | { 156 | "name": "stdout", 157 | "output_type": "stream", 158 | "text": [ 159 | "Unique Instruments: 8\n", 160 | "\n", 161 | "DB Table Breakdown:\n", 162 | "\t* PointData = 5\n", 163 | "\t* LayerData = 1\n", 164 | "\t* ImageData = 2\n" 165 | ] 166 | } 167 | ], 168 | "source": [ 169 | "instruments = get_all_distinct('instrument')\n", 170 | "print(f'Unique Instruments: {instruments[\"Total\"]}')\n", 171 | "print_result(instruments)" 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "id": "22b847d7", 177 | "metadata": {}, 178 | "source": [ 179 | "## Pits" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": 13, 185 | "id": "1ed2a027", 186 | "metadata": {}, 187 | "outputs": [ 188 | { 189 | "name": "stdout", 190 | "output_type": "stream", 191 | "text": [ 192 | "Unique Pits: 155\n" 193 | ] 194 | } 195 | ], 196 | "source": [ 197 | "pits = session.query(SiteData.site_id, SiteData.date).distinct().count()\n", 198 | "print(f'Unique Pits: {pits}')" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 5, 204 | "id": "ad860ec0", 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "session.close()" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "id": "96d735a8", 215 | "metadata": {}, 216 | "outputs": [], 217 | "source": [] 218 | } 219 | ], 220 | "metadata": { 221 | "kernelspec": { 222 | "display_name": "Python 3", 223 | "language": "python", 224 | "name": "python3" 225 | }, 226 | "language_info": { 227 | "codemirror_mode": { 228 | "name": "ipython", 229 | "version": 3 230 | }, 231 | "file_extension": ".py", 232 | "mimetype": "text/x-python", 233 | "name": "python", 234 | "nbconvert_exporter": "python", 235 | "pygments_lexer": "ipython3", 236 | "version": "3.8.10" 237 | } 238 | }, 239 | "nbformat": 4, 240 | "nbformat_minor": 5 241 | } 242 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGELOG.rst 2 | -------------------------------------------------------------------------------- /docs/images/aso_depths_with_sites_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/images/aso_depths_with_sites_example.png -------------------------------------------------------------------------------- /docs/images/connect_db_qgis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/images/connect_db_qgis.png -------------------------------------------------------------------------------- /docs/images/gpr_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/images/gpr_example.png -------------------------------------------------------------------------------- /docs/images/pit_spiral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/images/pit_spiral.png -------------------------------------------------------------------------------- /docs/images/qgis_db_setup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/images/qgis_db_setup.png -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/docs/logo.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=snowexsql 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/qgis.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | QGIS 3 | ===== 4 | 5 | These instructions were created using QGIS 3.16.8-Hannover 6 | 7 | Connecting for the First Time 8 | ----------------------------- 9 | 1. Open QGIS 10 | 2. Click on the tool bar `Layer > Add Layer > Add PostGIS Layers` 11 | 3. Click `New` to open the Datasource manager. 12 | 4. Fill out the info as in the picture below, Click Ok. This should bring up a login screen 13 | 5. Enter `snow` for the user and the hack week password. 14 | 6. Click Connect and expand the `public` tab that appears after it loads. 15 | 7. You should now have sites, points, layers, and images tables showing. 16 | 17 | 18 | 19 | Forming Queries 20 | --------------- 21 | You can use just about anything that SQL and PostGIS has to offer to form your queries. 22 | 23 | **You should always use a filter to avoid crashing qgis. Each table does have a significant amount of data.** 24 | 25 | **Warning: DOUBLE Clicking on any table will attempt to add it all! This may crash QGIS** 26 | 27 | 1. Click on the tool bar `Layer > Add Layer > Add PostGIS Layers` 28 | 2A. If not already connnected select `snowex` in the dropdown and select `Connect` 29 | 2B. Single click on a table 30 | 3. Click `set filter` in the bottom right 31 | 4. Fill out a query and select `test` to see how many records you will get back. 32 | 5. Select `Ok` and then select `Add` 33 | 34 | Examples 35 | ~~~~~~~~ 36 | 37 | For each of the examples follow the instructions and add these code snippits in your filters, for multiple 38 | snippits repeat the process for each code block. 39 | 40 | 1. There are a ton of GPR derived depths. Here we query them by finding the GPR in the instrument name and selecting 41 | every 200th row. 42 | 43 | .. code-block:: sql 44 | 45 | instrument like '%GPR%' and 46 | type = 'depth' and 47 | id % 200 = 0 48 | 49 | .. image:: images/gpr_example.png 50 | :width: 400 51 | :alt: GPR QGIS Example 52 | :class: with-border 53 | 54 | 55 | 56 | 2. Show the ASO depths with sites locations overlaid. 57 | Add ASO data: 58 | 59 | .. code-block:: sql 60 | 61 | surveyors = 'ASO Inc.' and 62 | type = 'depth' and 63 | date = '2-2-2020' 64 | 65 | 66 | Add the sites table without filtering. 67 | 68 | .. image:: images/aso_depths_with_sites_example.png 69 | :width: 400 70 | :alt: ASO Depth and Sites QGIS Example 71 | :class: with-border 72 | 73 | 3. Use PostGIS `ST_Within` and `ST_Buffer` on a site to plot a depth spiral 74 | Add the snow depth points 75 | 76 | .. code-block:: sql 77 | 78 | ST_Within(geom, ST_Buffer(ST_GeomFromText('POINT(744913 4324095)', 26912), 200)) 79 | and instrument = 'magnaprobe' 80 | 81 | Add the site of interest 82 | 83 | .. code-block:: sql 84 | 85 | site_id = '8N34' 86 | 87 | 88 | .. image:: images/pit_spiral.png 89 | :width: 400 90 | :class: with-border 91 | :alt: Pit and Depth Spirals 92 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | ipython>7.0,<9.0 2 | nbconvert>=6.4.3,<6.5.0 3 | nbsphinx==0.9.4 4 | pandoc==1.0.2 5 | plotly==5.22.0 6 | sphinx-gallery==0.9.0 7 | sphinx>=7.1,<7.4 8 | sphinxcontrib-apidoc==0.3.0 9 | jupyter-book>=1.0.2,<1.1 10 | pyyaml<6.1 11 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | PostGres + PostGIS 6 | ------------------ 7 | The database thats created with this project is a postgres database with PostGIS 8 | abilities also sometimes just called PostGIS database. As a result, the database 9 | can be used like a normal postgres database for which there are hundreds of 10 | online resources and this page won't go into constructing those queries for brevity 11 | and high likelihood of it being inferior to many of the other resources. 12 | 13 | * PostGIS_ 14 | * PostGres_ 15 | 16 | .. _PostGIS: https://postgis.net/docs/manual-3.0/ 17 | .. _PostGres: https://www.postgresql.org/docs/10/index.html 18 | 19 | 20 | Python + GeoAlchemy2 21 | -------------------- 22 | The real power of the PostGIS database coupled with python is the simplicity 23 | and verbosity of forming queries. GeoAlchemy2 provides object relational mapper (ORM) 24 | abilities which simply means we can have python classes that represent things in the 25 | database. 26 | 27 | The SnowEx Database currently is formed of 4 tables. 28 | 29 | 1. **points** - Holds any data which has no other dimension beyond its value. 30 | E.g. Snow Depths, Federal Samplers, etc. 31 | 2. **layers** - Holds any data collected which was collected at a single point 32 | but has a vertical snow component. Each entry is at a single location with an 33 | assigned depth. E.g. Hand Hardness, Density profiles, SMP, etc. 34 | 3. **images** - Holds all raster data. 35 | 4. **sites** - Holds all site details data. 36 | 37 | Every query will need a session and access to a database via name:: 38 | 39 | from snowexsql.db import get_db 40 | 41 | # Connect to the database we made. This may not be named snowex. 42 | db_name = 'snowex' 43 | 44 | # Get an engine and session object for our db 45 | engine, session = get_db(db_name) 46 | 47 | 48 | Each table has a class already built in the snowexsql. At a minimum you need at 49 | least one of those classes to interact with it using this library. To grab 50 | all points in the table:: 51 | 52 | from snowexsql.data import PointData, LayerData, ImageData, SiteData 53 | 54 | # Grab the first 10 records from points table 55 | points = session.query(PointData).limit(10).all() 56 | 57 | # Close the session 58 | session.close() 59 | 60 | This approach can be done with any other tables as well. 61 | 62 | To grab all the layers associated to a single pit:: 63 | 64 | # Break up queries by splitting them like the below 65 | q = session.query(LayerData) 66 | 67 | # Filter our filter query to only the records associated to pit 5S31 68 | Layers = q.filter(LayerData.site_id=='5S31').all() 69 | 70 | In ORM example shown above, class attributes become column names in the 71 | database. In the example above, there is a column named `site_id` under our 72 | table layers (represented here as LayerData). 73 | 74 | Each record returned from the database using ORM will return an object with 75 | the associated attributes of the class used in the query. The query above 76 | will have a list of objects where each record has the attributes that match 77 | each column in the table. 78 | 79 | Functions 80 | ~~~~~~~~~ 81 | 82 | PostGIS offers a ton of very useful functions and often using them instead of 83 | them locally in python can save time and memory. 84 | 85 | Using these functions with the ORM style of database interactions, there a 3 86 | ways we can use these functions: 87 | 88 | 1. Calling them via `sqlalchemy.functions` 89 | 2. Calling them via `geoalchemy2.functions` 90 | 3. Calling a few directly from `snowexsql.functions` 91 | 92 | All functions available to postgres are available via option #1. They are however 93 | unaware of types and the object mapping that is occurring in geoalchemy2. 94 | Therefore especially when dealing with rasters, geoalchemy2 can be quite useful 95 | for prepping data right away. Not all functions though are mapped in geoalchemy2 and sometime its 96 | convenient to just make them for ourselves which is what is in snowexsql. 97 | 98 | 99 | Checkout our :ref:`Examples` for more detail looks at queries with python. 100 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "snowexsql" 7 | dynamic = ["version"] 8 | description = "SQL Database software for SnowEx data" 9 | keywords = ["snowex", "sql", "database", "snow"] 10 | readme = "README.rst" 11 | requires-python = ">=3.8" 12 | classifiers = [ 13 | 'Development Status :: 2 - Pre-Alpha', 14 | 'Intended Audience :: Developers', 15 | 'Natural Language :: English', 16 | 'Programming Language :: Python :: 3.8', 17 | 'Programming Language :: Python :: 3.9', 18 | 'Programming Language :: Python :: 3.10' 19 | ] 20 | dependencies = [ 21 | "utm>=0.5.0,<1.0", 22 | "geoalchemy2>=0.6,<1.0", 23 | "geopandas>=0.7,<2.0", 24 | "psycopg2-binary>=2.9.0,<2.10.0", 25 | "rasterio>=1.1.5", 26 | "SQLAlchemy >= 2.0.0", 27 | ] 28 | 29 | [project.optional-dependencies] 30 | dev = [ 31 | "pytest", 32 | "pytest-cov", 33 | "sphinx-autobuild<=2024.5", 34 | ] 35 | docs = [ 36 | "ipython>7.0,<9.0", 37 | "nbconvert>=6.4.3,<6.5.0", 38 | "nbsphinx==0.9.4", 39 | "pandoc==1.0.2", 40 | "plotly==5.22.0", 41 | "sphinx-gallery==0.9.0", 42 | "sphinx>=7.1,<7.4", 43 | "sphinxcontrib-apidoc==0.3.0", 44 | "jupyter-book>=1.0.2,<1.1", 45 | "pyyaml<6.1" 46 | ] 47 | all = ["snowexsql[dev,docs]"] 48 | 49 | [project.license] 50 | file = "LICENSE" 51 | 52 | [project.urls] 53 | Homepage = "https://github.com/SnowEx/snowexsql" 54 | Documentation = "https://snowexsql.readthedocs.io" 55 | Repository = "https://github.com/SnowEx/snowexsql.git" 56 | Issues = "https://github.com/SnowEx/snowexsql/issues" 57 | 58 | [tool.hatch.version] 59 | source = "vcs" 60 | 61 | [tool.hatch.build.hooks.vcs] 62 | version-file = "snowexsql/_version.py" 63 | 64 | [tool.hatch.version.raw-options] 65 | local_scheme = "no-local-version" 66 | 67 | [tool.hatch.build.targets.sdist] 68 | exclude = ["/tests"] 69 | -------------------------------------------------------------------------------- /snowexsql/__init__.py: -------------------------------------------------------------------------------- 1 | """Top-level package for snowexsql.""" 2 | 3 | from ._version import __version__ # noqa 4 | 5 | __author__ = """SnowEx SQL Development Team""" 6 | __version__ = __version__ 7 | -------------------------------------------------------------------------------- /snowexsql/conversions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module contains all conversions used for manipulating data. This includes: 3 | filetypes, datatypes, etc. Many tools here will be useful for most end users 4 | of the database. 5 | """ 6 | 7 | import geopandas as gpd 8 | import pandas as pd 9 | from geoalchemy2.shape import to_shape 10 | from rasterio import MemoryFile 11 | from sqlalchemy.dialects import postgresql 12 | 13 | from snowexsql.tables import PointData 14 | 15 | 16 | def points_to_geopandas(results): 17 | """ 18 | Converts a successful query list into a geopandas data frame 19 | 20 | Args: 21 | results: List of PointData objects 22 | 23 | Returns: 24 | df: geopandas.GeoDataFrame instance 25 | """ 26 | # grab all the attributes of the class to assign 27 | if isinstance(results[0], PointData): 28 | data = {a: [] for a in dir(PointData) if a[0:1] != '__'} 29 | 30 | for r in results: 31 | for k in data.keys(): 32 | v = getattr(r, k) 33 | 34 | if k == 'geom': 35 | v = to_shape(v) 36 | data[k].append(v) 37 | 38 | df = gpd.GeoDataFrame(data, geometry=data['geom']) 39 | return df 40 | 41 | 42 | def query_to_geopandas(query, engine, **kwargs): 43 | """ 44 | Convert a GeoAlchemy2 Query meant for postgis to a geopandas dataframe. 45 | Requires that a geometry column is included 46 | 47 | Args: 48 | query: GeoAlchemy2.Query Object 49 | engine: sqlalchemy engine 50 | 51 | Returns: 52 | df: geopandas.GeoDataFrame instance 53 | """ 54 | # Fill out the variables in the query 55 | sql = query.statement.compile(dialect=postgresql.dialect()) 56 | 57 | # Get dataframe from geopandas using the query and engine 58 | df = gpd.GeoDataFrame.from_postgis(sql, engine, **kwargs) 59 | 60 | return df 61 | 62 | 63 | def query_to_pandas(query, engine, **kwargs): 64 | """ 65 | Convert a GeoAlchemy2 Query meant for postgis to a pandas dataframe. 66 | 67 | Args: 68 | query: Query Object 69 | engine: sqlalchemy engine 70 | 71 | Returns: 72 | df: pandas.DataFrame instance 73 | """ 74 | # Fill out the variables in the query 75 | sql = query.statement.compile(dialect=postgresql.dialect()) 76 | 77 | # Get dataframe from geopandas using the query and engine 78 | df = pd.read_sql(sql, engine, **kwargs) 79 | 80 | return df 81 | 82 | 83 | def raster_to_rasterio(rasters): 84 | """ 85 | Retrieve the numpy array of a raster by converting to a temporary file 86 | 87 | Args: 88 | raster: list of :py:class:`geoalchemy2.types.Raster` 89 | 90 | Returns: 91 | dataset: list of rasterio datasets 92 | 93 | """ 94 | datasets = [] 95 | for r in rasters: 96 | if r[0] is not None: 97 | bdata = bytes(r[0]) 98 | 99 | with MemoryFile() as tmpfile: 100 | tmpfile.write(bdata) 101 | datasets.append(tmpfile.open()) 102 | return datasets 103 | -------------------------------------------------------------------------------- /snowexsql/db.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains tool used directly regarding the database. This includes 3 | getting a session, initializing the database, getting table attributes, etc. 4 | """ 5 | 6 | import json 7 | 8 | from sqlalchemy import MetaData, create_engine 9 | from sqlalchemy.orm import sessionmaker 10 | 11 | from snowexsql.tables.base import Base 12 | 13 | 14 | def initialize(engine): 15 | """ 16 | Creates the original database from scratch, currently only for 17 | point data 18 | 19 | """ 20 | meta = Base.metadata 21 | meta.drop_all(bind=engine) 22 | meta.create_all(bind=engine) 23 | 24 | 25 | def get_db(db_str, credentials=None, return_metadata=False): 26 | """ 27 | Returns the DB engine, MetaData, and session object 28 | 29 | Args: 30 | db_str: Just the name of the database 31 | credentials: Path to a json file containing username and password for the database 32 | return_metadata: Boolean indicating whether the metadata object is 33 | being returned, useful only for developers 34 | 35 | Returns: 36 | tuple: **engine** - sqlalchemy Engine object for directly sending 37 | querys to the DB 38 | **session** - sqlalchemy Session Object for using object 39 | relational mapping (ORM) 40 | **metadata** (optional) - sqlalchemy MetaData object for 41 | modifying the database 42 | """ 43 | 44 | # This library requires a postgres dialect and the psycopg2 driver 45 | prefix = f'postgresql+psycopg2://' 46 | 47 | if credentials is not None: 48 | # Read in the credentials 49 | with open(credentials) as fp: 50 | creds = json.load(fp) 51 | username = creds['username'] 52 | password = creds['password'] 53 | 54 | db = f"{prefix}{username}:{password}@{db_str}" 55 | else: 56 | db = f"{prefix}{db_str}" 57 | 58 | # Always create a Session in UTC time 59 | engine = create_engine( 60 | db, echo=False, connect_args={ 61 | "options": "-c timezone=UTC"}) 62 | 63 | Session = sessionmaker(bind=engine) 64 | metadata = MetaData() 65 | session = Session(expire_on_commit=False) 66 | 67 | if return_metadata: 68 | result = (engine, session, metadata) 69 | 70 | else: 71 | result = (engine, session) 72 | 73 | return result 74 | 75 | 76 | def get_table_attributes(DataCls): 77 | """ 78 | Returns a list of all the table columns to be used for each entry 79 | """ 80 | 81 | valid_attributes = [att for att in dir(DataCls) if att[0] != '_'] 82 | 83 | # Drop ID as it is (should) never provided 84 | valid_attributes = [v for v in valid_attributes if v != 'id'] 85 | return valid_attributes 86 | -------------------------------------------------------------------------------- /snowexsql/functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module holds all PostGIS functions that have not been mapped yet for use 3 | with ORM. Many function already exist in GeoAlchemy.functions module 4 | """ 5 | import geoalchemy2.functions as gfunc 6 | from geoalchemy2.types import CompositeType, Geometry, Raster 7 | from sqlalchemy.dialects import postgresql 8 | from sqlalchemy.types import Float, Integer 9 | 10 | 11 | class ST_PixelAsPoint(gfunc.GenericFunction): 12 | name = 'ST_PixelAsPoint' 13 | type = Geometry 14 | 15 | 16 | class ST_PixelAsPoints(gfunc.GenericFunction): 17 | name = 'ST_PixelAsPoints' 18 | type = CompositeType 19 | typemap = { 20 | 'x': postgresql.ARRAY(Integer), 21 | 'y': postgresql.ARRAY(Integer), 22 | 'val': postgresql.ARRAY(Float), 23 | 'geom': postgresql.ARRAY(Geometry)} 24 | 25 | 26 | class ST_RasterToWorldCoord(gfunc.GenericFunction): 27 | name = 'ST_RasterToWorldCoord' 28 | type = Geometry 29 | #typemap = {'geom':Geometry, 'val':float, } 30 | 31 | 32 | class ST_Clip(gfunc.GenericFunction): 33 | name = 'ST_Clip' 34 | type = Raster 35 | 36 | 37 | class ST_Count(gfunc.GenericFunction): 38 | name = 'ST_Count' 39 | type = Integer 40 | -------------------------------------------------------------------------------- /snowexsql/tables/__init__.py: -------------------------------------------------------------------------------- 1 | from .image_data import ImageData 2 | from .layer_data import LayerData 3 | from .point_data import PointData 4 | from .site_data import SiteData 5 | 6 | __all__ = [ 7 | 'ImageData', 8 | 'LayerData', 9 | 'PointData', 10 | 'SnowData', 11 | ] 12 | -------------------------------------------------------------------------------- /snowexsql/tables/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module contains all the data models for the database. Classes here actually 3 | represent tables where columns are mapped as attributed. Any class inheriting 4 | from Base is a real table in the database. This is called Object Relational 5 | Mapping in the sqlalchemy or ORM. 6 | """ 7 | 8 | from geoalchemy2 import Geometry 9 | from sqlalchemy import Column, Date, DateTime, Float, Integer, String, Time 10 | from sqlalchemy.orm import DeclarativeBase 11 | from sqlalchemy.sql import func 12 | 13 | 14 | class Base(DeclarativeBase): 15 | """ 16 | Base class for which all data will have these attributes 17 | """ 18 | # SQL Alchemy 19 | __table_args__ = {"schema": "public"} 20 | 21 | # Primary Key 22 | id = Column(Integer, primary_key=True) 23 | 24 | # Standard table columns 25 | time_created = Column(DateTime(timezone=True), server_default=func.now()) 26 | time_updated = Column(DateTime(timezone=True), onupdate=func.now()) 27 | 28 | date_accessed = Column(Date) 29 | site_name = Column(String(250)) 30 | date = Column(Date) 31 | doi = Column(String(50)) 32 | 33 | 34 | class SingleLocationData: 35 | """ 36 | Base class for points and profiles 37 | """ 38 | elevation = Column(Float) 39 | geom = Column(Geometry("POINT")) 40 | time = Column(Time(timezone=True)) 41 | site_id = Column(String(50)) 42 | 43 | 44 | class Measurement(object): 45 | """ 46 | Base Class providing attributes required for a measurement of any kind 47 | """ 48 | instrument = Column(String(50)) 49 | type = Column(String(50)) 50 | units = Column(String(50)) 51 | observers = Column(String(100)) 52 | -------------------------------------------------------------------------------- /snowexsql/tables/image_data.py: -------------------------------------------------------------------------------- 1 | from geoalchemy2 import Raster 2 | from sqlalchemy import Column, String 3 | 4 | from .base import Base, Measurement 5 | 6 | 7 | class ImageData(Base, Measurement): 8 | """ 9 | Class representing the images table. This table holds all images/rasters 10 | """ 11 | __tablename__ = 'images' 12 | raster = Column(Raster) 13 | description = Column(String(1000)) 14 | -------------------------------------------------------------------------------- /snowexsql/tables/layer_data.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Float, String 2 | 3 | from .base import Base, Measurement, SingleLocationData 4 | 5 | 6 | class LayerData(SingleLocationData, Measurement, Base): 7 | """ 8 | Class representing the layers table. This table holds all layers or 9 | profile data. Here a single data entry is a single value at depth in the 10 | snowpack and a single coordinate pair. e.g. SMP profiles, Hand hardness, 11 | temperature etc... 12 | """ 13 | __tablename__ = 'layers' 14 | 15 | depth = Column(Float) 16 | site_id = Column(String(50)) 17 | pit_id = Column(String(50)) 18 | bottom_depth = Column(Float) 19 | comments = Column(String(1000)) 20 | sample_a = Column(String(20)) 21 | sample_b = Column(String(20)) 22 | sample_c = Column(String(20)) 23 | value = Column(String(50)) 24 | flags = Column(String(20)) 25 | -------------------------------------------------------------------------------- /snowexsql/tables/point_data.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Float, Integer, String 2 | 3 | from .base import Base, Measurement, SingleLocationData 4 | 5 | 6 | class PointData(SingleLocationData, Measurement, Base): 7 | """ 8 | Class representing the points table. This table holds all point data. 9 | Here a single data entry is a single coordinate pair with a single value 10 | e.g. snow depths 11 | """ 12 | __tablename__ = 'points' 13 | 14 | version_number = Column(Integer) 15 | equipment = Column(String(50)) 16 | value = Column(Float) 17 | -------------------------------------------------------------------------------- /snowexsql/tables/site_data.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Float, String 2 | 3 | from .base import Base, SingleLocationData 4 | 5 | 6 | class SiteData(SingleLocationData, Base): 7 | """ 8 | Table for storing pit site meta data, This table doesn't represent any 9 | main data record but only support data for each site 10 | """ 11 | __tablename__ = 'sites' 12 | __table_args__ = {"schema": "public"} 13 | 14 | pit_id = Column(String(50)) 15 | slope_angle = Column(Float) 16 | aspect = Column(Float) 17 | air_temp = Column(Float) 18 | total_depth = Column(Float) 19 | weather_description = Column(String(500)) 20 | precip = Column(String(100)) 21 | sky_cover = Column(String(100)) 22 | wind = Column(String(100)) 23 | ground_condition = Column(String(100)) 24 | ground_roughness = Column(String(100)) 25 | ground_vegetation = Column(String(100)) 26 | vegetation_height = Column(String(100)) 27 | tree_canopy = Column(String(100)) 28 | site_notes = Column(String(1000)) 29 | -------------------------------------------------------------------------------- /snowexsql/utilities.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for storing misc. type functions that don't warrant a separate module 3 | but to provide some use in the code set. 4 | """ 5 | import logging 6 | 7 | 8 | def get_logger(name, debug=True, ext_logger=None): 9 | """ 10 | Retrieve a colored logs logger and assign a custom name to it. 11 | 12 | Args: 13 | name: Name of the loggger 14 | debug: Boolean for where to show debug statements 15 | ext_logger: Recieves a logger object and installs colored logs to it. 16 | Returns: 17 | log: Logger object with colored logs installed 18 | """ 19 | 20 | fmt = fmt = '%(name)s %(levelname)s %(message)s' 21 | if ext_logger is None: 22 | log = logging.getLogger(name) 23 | else: 24 | log = ext_logger 25 | if debug: 26 | level = 'DEBUG' 27 | else: 28 | level = 'INFO' 29 | return log -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ Test package for snowexsql.""" 2 | -------------------------------------------------------------------------------- /tests/credentials.json: -------------------------------------------------------------------------------- 1 | {"username":"builder", 2 | "password": "db_builder"} 3 | -------------------------------------------------------------------------------- /tests/data/LWC.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:30 5 | # UTM Zone,12N 6 | # Easting,743281 7 | # Northing,4324005 8 | # top (cm),bottom (cm),dielectric constant A,dielectric constant B 9 | 37,27,1.328,1.317 10 | 27,17,1.372,1.35 11 | 17,7,1.384,1.354 12 | 12,2,NaN,NaN 13 | -------------------------------------------------------------------------------- /tests/data/LWC2.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,Skyway Tree 3 | # PitID,COGMST_20200312 4 | # Date/Local Time,2020-03-12T14:45 5 | # UTM Zone,12N 6 | # Easting,754173 7 | # Northing,4325871 8 | # Top (cm),Bottom (cm),Avg Density (kg/m3),Permittivity A,Permittivity B,LWC-vol A (%),LWC-vol B (%) 9 | 83.0,73.0,164.5,1.335,1.291,0.6,0.3 10 | 73.0,63.0,257.0,1.507,1.521,0.7,0.8 11 | 63.0,53.0,268.5,1.33,1.38,0.0,0.0 12 | 53.0,43.0,283.0,1.435,1.362,0.1,0.0 13 | 43.0,33.0,266.5,1.379,1.486,0.0,0.5 14 | 33.0,23.0,283.0,1.417,1.414,0.0,0.0 15 | 23.0,13.0,298.0,1.44,1.458,0.0,0.1 16 | 15.0,5.0,276.5,1.427,1.414,0.1,0.0 17 | -------------------------------------------------------------------------------- /tests/data/README.txt: -------------------------------------------------------------------------------- 1 | WARNING! This data while originally taken from the SNOWEX Data repo has 2 | been modified in a way serving testing and are no longer valid experimental 3 | results. 4 | -------------------------------------------------------------------------------- /tests/data/S06M0874_2N12_20200131.CSV: -------------------------------------------------------------------------------- 1 | # Date: 2020-01-31 2 | # Time (UTC): 22:42:14 3 | # Lat: 39.03013229370117 4 | # Lon: -108.16268920898438 5 | # SMP Serial Number: 6 6 | # Original Total Samples: 153720 7 | # Data Subsampled To: Every 1000th 8 | Original Index,Depth (mm),Force (N) 9 | 999,4.0,0.11 10 | 1999,7.9,0.104 11 | 2999,11.9,0.11 12 | 3999,15.9,0.136 13 | 4999,19.8,0.143 14 | 5999,23.8,0.143 15 | 6999,27.8,0.13 16 | 7999,31.7,0.13 17 | 8999,35.7,0.117 18 | 9999,39.7,0.13 19 | 10999,43.6,0.123 20 | 11999,47.6,0.13 21 | 12999,51.6,0.13 22 | 13999,55.6,0.13 23 | 14999,59.5,0.123 24 | 15999,63.5,0.117 25 | 16999,67.5,0.117 26 | 17999,71.4,0.123 27 | 18999,75.4,0.162 28 | 19999,79.4,0.136 29 | 20999,83.3,0.149 30 | 21999,87.3,0.136 31 | 22999,91.3,0.149 32 | 23999,95.2,0.117 33 | 24999,99.2,0.143 34 | 25999,103.2,0.156 35 | 26999,107.1,0.156 36 | 27999,111.1,0.169 37 | 28999,115.1,0.149 38 | 29999,119.0,0.143 39 | 30999,123.0,0.156 40 | 31999,127.0,0.149 41 | 32999,130.9,0.13 42 | 33999,134.9,0.156 43 | 34999,138.9,0.156 44 | 35999,142.9,0.156 45 | 36999,146.8,0.162 46 | 37999,150.8,0.162 47 | 38999,154.8,0.169 48 | 39999,158.7,0.13 49 | 40999,162.7,0.13 50 | 41999,166.7,0.162 51 | 42999,170.6,0.162 52 | 43999,174.6,0.156 53 | 44999,178.6,0.156 54 | 45999,182.5,0.195 55 | 46999,186.5,0.182 56 | 47999,190.5,0.188 57 | 48999,194.4,0.195 58 | 49999,198.4,0.195 59 | 50999,202.4,0.234 60 | 51999,206.3,0.247 61 | 52999,210.3,0.162 62 | 53999,214.3,0.182 63 | 54999,218.3,0.195 64 | 55999,222.2,0.169 65 | 56999,226.2,0.182 66 | 57999,230.2,0.208 67 | 58999,234.1,0.221 68 | 59999,238.1,0.175 69 | 60999,242.1,0.247 70 | 61999,246.0,0.351 71 | 62999,250.0,0.357 72 | 63999,254.0,0.305 73 | 64999,257.9,0.312 74 | 65999,261.9,0.364 75 | 66999,265.9,0.331 76 | 67999,269.8,0.435 77 | 68999,273.8,0.409 78 | 69999,277.8,0.305 79 | 70999,281.7,0.481 80 | 71999,285.7,0.416 81 | 72999,289.7,0.422 82 | 73999,293.6,0.636 83 | 74999,297.6,1.0 84 | 75999,301.6,0.922 85 | 76999,305.6,0.558 86 | 77999,309.5,1.045 87 | 78999,313.5,0.974 88 | 79999,317.5,0.844 89 | 80999,321.4,0.643 90 | 81999,325.4,0.753 91 | 82999,329.4,1.117 92 | 83999,333.3,0.87 93 | 84999,337.3,0.656 94 | 85999,341.3,0.571 95 | 86999,345.2,0.675 96 | 87999,349.2,0.649 97 | 88999,353.2,0.532 98 | 89999,357.1,0.506 99 | 90999,361.1,0.519 100 | 91999,365.1,0.539 101 | 92999,369.0,0.519 102 | 93999,373.0,0.565 103 | 94999,377.0,0.481 104 | 95999,380.9,0.318 105 | 96999,384.9,0.506 106 | 97999,388.9,0.409 107 | 98999,392.9,0.162 108 | 99999,396.8,0.156 109 | 100999,400.8,0.156 110 | 101999,404.8,0.266 111 | 102999,408.7,0.266 112 | 103999,412.7,0.273 113 | 104999,416.7,0.208 114 | 105999,420.6,0.201 115 | 106999,424.6,0.24 116 | 107999,428.6,0.227 117 | 108999,432.5,0.143 118 | 109999,436.5,0.123 119 | 110999,440.5,0.143 120 | 111999,444.4,0.221 121 | 112999,448.4,0.13 122 | 113999,452.4,0.136 123 | 114999,456.3,0.156 124 | 115999,460.3,0.143 125 | 116999,464.3,0.156 126 | 117999,468.3,0.123 127 | 118999,472.2,0.097 128 | 119999,476.2,0.403 129 | 120999,480.2,0.286 130 | 121999,484.1,0.162 131 | 122999,488.1,0.299 132 | 123999,492.1,0.214 133 | 124999,496.0,0.24 134 | 125999,500.0,0.26 135 | 126999,504.0,0.331 136 | 127999,507.9,0.377 137 | 128999,511.9,0.305 138 | 129999,515.9,0.364 139 | 130999,519.8,0.416 140 | 131999,523.8,0.422 141 | 132999,527.8,0.24 142 | 133999,531.7,0.331 143 | 134999,535.7,0.104 144 | 135999,539.7,0.286 145 | 136999,543.6,0.331 146 | 137999,547.6,0.273 147 | 138999,551.6,0.019 148 | 139999,555.6,0.325 149 | 140999,559.5,0.071 150 | 141999,563.5,0.104 151 | 142999,567.5,0.058 152 | 143999,571.4,0.149 153 | 144999,575.4,0.091 154 | 145999,579.4,0.019 155 | 146999,583.3,0.039 156 | 147999,587.3,0.039 157 | 148999,591.3,0.026 158 | 149999,595.2,0.006 159 | 150999,599.2,0.045 160 | 151999,603.2,-0.013 161 | 152999,607.1,0.117 162 | 153719,610.0,-0.032 163 | -------------------------------------------------------------------------------- /tests/data/S19M1013_5S21_20200201.CSV: -------------------------------------------------------------------------------- 1 | # Date: 2020-02-01 2 | # Time (UTC): 23:16:49 3 | # Lat: 39.01906204223633 4 | # Lon: -108.17510986328125 5 | # SMP Serial Number: 19 6 | # Original Total Samples: 242000 7 | # Data Subsampled To: Every 1000th 8 | Original Index,Depth (mm),Force (N) 9 | 999,4.1,0.053 10 | 1999,8.3,0.053 11 | 2999,12.4,0.055 12 | 3999,16.5,0.053 13 | 4999,20.7,0.056 14 | 5999,24.8,0.055 15 | 6999,28.9,0.053 16 | 7999,33.1,0.053 17 | 8999,37.2,0.056 18 | 9999,41.3,0.055 19 | 10999,45.5,0.056 20 | 11999,49.6,0.055 21 | 12999,53.7,0.055 22 | 13999,57.8,0.053 23 | 14999,62.0,0.056 24 | 15999,66.1,0.614 25 | 16999,70.2,0.838 26 | 17999,74.4,0.811 27 | 18999,78.5,0.309 28 | 19999,82.6,0.211 29 | 20999,86.8,0.179 30 | 21999,90.9,0.167 31 | 22999,95.0,0.219 32 | 23999,99.2,0.151 33 | 24999,103.3,0.117 34 | 25999,107.4,0.104 35 | 26999,111.6,0.095 36 | 27999,115.7,0.104 37 | 28999,119.8,0.107 38 | 29999,124.0,0.108 39 | 30999,128.1,0.126 40 | 31999,132.2,0.127 41 | 32999,136.4,0.137 42 | 33999,140.5,0.118 43 | 34999,144.6,0.132 44 | 35999,148.8,0.121 45 | 36999,152.9,0.12 46 | 37999,157.0,0.112 47 | 38999,161.2,0.127 48 | 39999,165.3,0.125 49 | 40999,169.4,0.113 50 | 41999,173.5,0.135 51 | 42999,177.7,0.156 52 | 43999,181.8,0.169 53 | 44999,185.9,0.149 54 | 45999,190.1,0.188 55 | 46999,194.2,0.15 56 | 47999,198.3,0.134 57 | 48999,202.5,0.146 58 | 49999,206.6,0.155 59 | 50999,210.7,0.153 60 | 51999,214.9,0.168 61 | 52999,219.0,0.197 62 | 53999,223.1,0.168 63 | 54999,227.3,0.161 64 | 55999,231.4,0.169 65 | 56999,235.5,0.175 66 | 57999,239.7,0.215 67 | 58999,243.8,0.228 68 | 59999,247.9,0.206 69 | 60999,252.1,0.205 70 | 61999,256.2,0.228 71 | 62999,260.3,0.243 72 | 63999,264.5,0.23 73 | 64999,268.6,0.21 74 | 65999,272.7,0.191 75 | 66999,276.9,0.242 76 | 67999,281.0,0.268 77 | 68999,285.1,0.282 78 | 69999,289.3,0.238 79 | 70999,293.4,0.263 80 | 71999,297.5,0.295 81 | 72999,301.6,0.323 82 | 73999,305.8,0.334 83 | 74999,309.9,0.356 84 | 75999,314.0,0.376 85 | 76999,318.2,0.373 86 | 77999,322.3,0.345 87 | 78999,326.4,0.383 88 | 79999,330.6,0.351 89 | 80999,334.7,0.364 90 | 81999,338.8,0.374 91 | 82999,343.0,0.384 92 | 83999,347.1,0.365 93 | 84999,351.2,0.375 94 | 85999,355.4,0.385 95 | 86999,359.5,0.434 96 | 87999,363.6,0.483 97 | 88999,367.8,0.483 98 | 89999,371.9,0.484 99 | 90999,376.0,0.435 100 | 91999,380.2,0.465 101 | 92999,384.3,0.538 102 | 93999,388.4,0.629 103 | 94999,392.6,0.66 104 | 95999,396.7,0.682 105 | 96999,400.8,0.724 106 | 97999,405.0,0.759 107 | 98999,409.1,0.837 108 | 99999,413.2,0.698 109 | 100999,417.4,1.01 110 | 101999,421.5,1.125 111 | 102999,425.6,1.219 112 | 103999,429.7,1.143 113 | 104999,433.9,1.105 114 | 105999,438.0,1.245 115 | 106999,442.1,0.81 116 | 107999,446.3,0.71 117 | 108999,450.4,0.701 118 | 109999,454.5,0.647 119 | 110999,458.7,0.739 120 | 111999,462.8,1.216 121 | 112999,466.9,1.245 122 | 113999,471.1,0.927 123 | 114999,475.2,0.893 124 | 115999,479.3,0.922 125 | 116999,483.5,0.908 126 | 117999,487.6,0.944 127 | 118999,491.7,1.041 128 | 119999,495.9,1.078 129 | 120999,500.0,0.991 130 | 121999,504.1,1.001 131 | 122999,508.3,1.063 132 | 123999,512.4,1.165 133 | 124999,516.5,1.11 134 | 125999,520.7,1.432 135 | 126999,524.8,1.297 136 | 127999,528.9,1.33 137 | 128999,533.1,1.25 138 | 129999,537.2,1.167 139 | 130999,541.3,1.123 140 | 131999,545.5,1.27 141 | 132999,549.6,1.151 142 | 133999,553.7,1.254 143 | 134999,557.8,1.385 144 | 135999,562.0,1.512 145 | 136999,566.1,2.347 146 | 137999,570.2,2.711 147 | 138999,574.4,1.516 148 | 139999,578.5,1.5 149 | 140999,582.6,1.175 150 | 141999,586.8,1.328 151 | 142999,590.9,1.806 152 | 143999,595.0,0.327 153 | 144999,599.2,0.261 154 | 145999,603.3,0.599 155 | 146999,607.4,1.531 156 | 147999,611.6,1.322 157 | 148999,615.7,2.091 158 | 149999,619.8,2.135 159 | 150999,624.0,2.22 160 | 151999,628.1,1.962 161 | 152999,632.2,2.276 162 | 153999,636.4,2.817 163 | 154999,640.5,4.194 164 | 155999,644.6,4.501 165 | 156999,648.8,4.228 166 | 157999,652.9,4.048 167 | 158999,657.0,3.82 168 | 159999,661.2,3.737 169 | 160999,665.3,3.37 170 | 161999,669.4,3.36 171 | 162999,673.5,2.902 172 | 163999,677.7,3.709 173 | 164999,681.8,3.53 174 | 165999,685.9,3.352 175 | 166999,690.1,2.887 176 | 167999,694.2,3.301 177 | 168999,698.3,2.761 178 | 169999,702.5,3.176 179 | 170999,706.6,2.639 180 | 171999,710.7,2.566 181 | 172999,714.9,2.532 182 | 173999,719.0,2.556 183 | 174999,723.1,2.419 184 | 175999,727.3,2.11 185 | 176999,731.4,2.098 186 | 177999,735.5,1.536 187 | 178999,739.7,2.197 188 | 179999,743.8,2.731 189 | 180999,747.9,2.781 190 | 181999,752.1,2.945 191 | 182999,756.2,3.038 192 | 183999,760.3,2.744 193 | 184999,764.5,2.036 194 | 185999,768.6,2.767 195 | 186999,772.7,3.688 196 | 187999,776.9,2.537 197 | 188999,781.0,2.672 198 | 189999,785.1,2.569 199 | 190999,789.3,1.918 200 | 191999,793.4,1.764 201 | 192999,797.5,1.853 202 | 193999,801.6,1.601 203 | 194999,805.8,1.773 204 | 195999,809.9,1.714 205 | 196999,814.0,1.816 206 | 197999,818.2,2.214 207 | 198999,822.3,2.656 208 | 199999,826.4,1.357 209 | 200999,830.6,0.773 210 | 201999,834.7,0.273 211 | 202999,838.8,0.572 212 | 203999,843.0,0.354 213 | 204999,847.1,0.376 214 | 205999,851.2,0.619 215 | 206999,855.4,0.872 216 | 207999,859.5,0.844 217 | 208999,863.6,0.618 218 | 209999,867.8,0.431 219 | 210999,871.9,0.66 220 | 211999,876.0,0.805 221 | 212999,880.2,0.753 222 | 213999,884.3,0.876 223 | 214999,888.4,0.881 224 | 215999,892.6,0.56 225 | 216999,896.7,0.225 226 | 217999,900.8,0.36 227 | 218999,905.0,0.289 228 | 219999,909.1,0.196 229 | 220999,913.2,0.113 230 | 221999,917.4,0.153 231 | 222999,921.5,0.112 232 | 223999,925.6,0.229 233 | 224999,929.7,0.113 234 | 225999,933.9,0.28 235 | 226999,938.0,0.145 236 | 227999,942.1,0.184 237 | 228999,946.3,0.39 238 | 229999,950.4,0.214 239 | 230999,954.5,0.237 240 | 231999,958.7,0.328 241 | 232999,962.8,0.973 242 | 233999,966.9,1.296 243 | 234999,971.1,0.72 244 | 235999,975.2,0.718 245 | 236999,979.3,1.471 246 | 237999,983.5,1.425 247 | 238999,987.6,1.066 248 | 239999,991.7,0.766 249 | 240999,995.9,0.843 250 | 241999,1000.0,0.423 251 | -------------------------------------------------------------------------------- /tests/data/SSA.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:40 5 | # UTM Zone,12N 6 | # Easting [m],743281 7 | # Northing [m],4324005 8 | # Instrument,IS3-SP-11-01F 9 | # Profile ID,N/A 10 | # Operator,Juha Lemmetyinen 11 | # Timing,N/A 12 | # Notes, layer at 15 and 20 cm had exact same SSA 13 | # Total snow depth (cm), 80 14 | # 15 | # Sample signal (mV), Reflectance (%), Specific surface area (m^2/kg), Sample Top Height (cm), Deq (mm), Comments 16 | 485.5,54.95,62.10,80,0.1054 17 | 346.5,41.49,29.70,75,0.2203 18 | 360.8,43.00,32.20,70,0.2032 19 | 334.2,40.16,27.60,65,0.2371 20 | 335.7,40.32,27.80,60,0.2354 21 | 328.4,39.52,26.70,55,0.2451 22 | 292.5,35.47,21.40,50,0.3058 23 | 234.1,28.37,14.70,45,0.4451 24 | 232.0,28.10,14.50,40,0.4512 25 | 195.2,23.26,11.20,35,0.5842 26 | 190.4,22.60,10.70,30,0.6115 27 | 205.1,24.59,12.00,25,0.5453 28 | 201.1,24.05,11.70,20,0.5592 29 | 201.1,24.05,11.70,15,0.5592 30 | 186.9,22.12,10.40,10,0.6291 31 | 186.4,22.05,10.40,5,0.6291, brush 32 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/..aux.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 3063.669921875 5 | 3058.0052068735 6 | 3049.5700683594 7 | 2.7221675109354 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/dblbnd.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0287/dblbnd.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/hdr.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0287/hdr.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/metadata.xml: -------------------------------------------------------------------------------- 1 | 1.0FGDC CSDGM MetadataFALSEbe_gm1_0001file://Z:\2020_Regional_SnowEx2020COID_034542\07_Deliverables\00_Deliverables\Grand_Mesa_1\Rasters\Bare_Earth_Digital_Elevation_Models\be_gm1_0001Local Area Network002750000.000000750500.0000004319563.5000004320000.0000001ProjectedGCS_NAD_1983_2011Linear Unit: Meter (1.000000)NAD_1983_2011_UTM_Zone_12N<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.3'><WKT>PROJCS[&quot;NAD_1983_2011_UTM_Zone_12N&quot;,GEOGCS[&quot;GCS_NAD_1983_2011&quot;,DATUM[&quot;D_NAD_1983_2011&quot;,SPHEROID[&quot;GRS_1980&quot;,6378137.0,298.257222101]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Transverse_Mercator&quot;],PARAMETER[&quot;False_Easting&quot;,500000.0],PARAMETER[&quot;False_Northing&quot;,0.0],PARAMETER[&quot;Central_Meridian&quot;,-111.0],PARAMETER[&quot;Scale_Factor&quot;,0.9996],PARAMETER[&quot;Latitude_Of_Origin&quot;,0.0],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,6341]]</WKT><XOrigin>-5120900</XOrigin><YOrigin>-9998100</YOrigin><XYScale>450445547.3910538</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>102059</WKID><LatestWKID>6341</LatestWKID></ProjectedCoordinateSystem>32FALSENone1GRIDTRUEcontinuousfloating point-3.4028235e+038202005051139420020200505113942001500000005000FGDC Version 6.2 (Build 9200) ; Esri ArcGIS 10.3.1.4959be_gm1_0001Quantum Spatial1100 Circle Blvd. Suite 126CorvallisOR97330US541-752-1204http://www.quantumspatial.comQuantum Spatial2020-05-051-108.113577-108.10765138.99332238.989251SnowEx Grand Mesa Acquisition 1 Lidar2020-02-012020-02-02Provide high resolution terrain elevation data from the SnowEx Grand Mesa Acquisition 1 dataset. <DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The bare earth digital elevation model (DEM) represents the earth's surface with all vegetation and anthropogenic features removed. It is derived from NIR Lidar data using TIN processing of the ground point returns. Some elevation values have been interpolated across areas in the ground model where there is no elevation data (e.g. over water, under dense vegetation). The horizontal datum for this dataset is NAD83(2011), the vertical datum is NAVD88, Geoid 12B, and the data is projected in UTM 12 North. Units are in Meters. Quantum Spatial collected the SnowEx Grand Mesa Acquisition 1 Lidar data for Boise State University between 02/01/20 and 02/02/20.</SPAN></P></DIV></DIV></DIV>Boise State UniversityLidarLight Detection and Rangingelevation datatopographybare earthDEMdigital elevation modelGrand MesaColoradoMesaPalisadeCedaredgeLidar, Light Detection and Ranging, high-resolution, elevation data, topography, bare earth, DEM, digital elevation modelGrand MesaColoradoMesa, Palisade, Cedaredge0.5This data is assembled by 500 m x 500 m and projected in UTM 12 North.Hans-Peter MarshallBoise State University6563 W Summer Hill Dr.BoiseIdaho83714hpmarshall@boisestate.eduUS(303) 859-3106Hans-Peter MarshallPlease contact Boise State University for information regarding the use of this data.<DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>In some areas of heavy vegetation or forest cover, there may be relatively few ground points in the Lidar data. TINing the points produces large triangles and hence the elevations may be less accurate within such areas. Elevation values for open water surfaces should be considered approximate due to the increase of laser noise seen over water, frequent lack of returns, and the overall dynamic nature of water.</SPAN></P></DIV></DIV></DIV>Raster Dataset1Hans-Peter MarshallBoise State University6563 W Summer Hill Dr.BoiseIdaho83714hpmarshall@boisestate.eduUS(303) 859-3106datasetEPSG8.6.2210750000.000000 4319563.500000750000.000000 4320000.000000750500.000000 4320000.000000750500.000000 4319563.500000750250.000000 4319781.75000010000.5000008730.500000Band_13045.3798833017.14990232mBand_1ElevationDigital Elevation ModelFalseFalseFalseFalseFalse20200505Quantum Spatial1100 Circle Blvd. Suite 126CorvallisOR97330US541-752-1204http://www.quantumspatial.comQuantum SpatialQuantum SpatialLidar data has been collected and processed for all areas within the project study area.Flight plans are designed with sufficient sidelap to ensure there are no gaps between flightlines. Shaded relief images have been visually inspected for gaps.Shaded relief images have been visually inspected for data errors such as pits, border artifacts, and shifting. Lidar flightlines have been examined to ensure consistent elevation values across overlapping flightlines. The Root Mean Square Error (RMSE) of line to line relative accuracy for this dataset is 0.022 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.Data was examined at a 1:2000 scale. Relative accuracy of the flightlines was assessed in Microstation using TerraMatch. RMSEm0.022The Non-vegetated Vertical Accuracy (NVA) of this dataset, tested at 95% confidence level is 0.1 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.Non-vegetated Vertical Accuracy was assessed using 1 ground check point. This check point was not used in the calibration or post processing of the Lidar point cloud data. NVAm0.1Acquisition. Quantum Spatial collected the SnowEx Grand Mesa Acquisition 1 Lidar data between 02/01/20 and 02/02/20. The survey used a Reigl VQ-1560i laser system mounted in a Beechcraft King Air. Ground level GPS and aircraft IMU were collected during the flight. 2 | 3 | Sensor: Reigl VQ-1560i 4 | Maximum returns: Unlimited 5 | Nominal pulse density: 20 pulses/m^2 6 | Nominal pulse spacing: 0.22 m 7 | AGL: 1578 m 8 | Speed: 140 knots 9 | FOV: 58.5° 10 | Scan frequency: 31.2 hz 11 | Pulse rate: 161 kHz 12 | Pulse duration: 3 ns 13 | Pulse width: 28 cm 14 | Wavelength: 1064 nm 15 | Pulses in air mode: Multiple Times Around (MTA) 16 | Beam divergence: 0.18 mrads 17 | Swath width: 1767.4 m 18 | Overlap: 60% 19 | 2020-02-021. Flightlines and data were reviewed to ensure complete coverage of the study area and positional accuracy of the laser points. 20 | 2. Laser point return coordinates were computed using PosPac 8.3 and RiProcess 1.8.5 software based on independent data from the Lidar system, IMU, and aircraft. 21 | 3. The raw Lidar file was assembled into flightlines per return with each point having an associated x, y, and z coordinate. 22 | 4. Visual inspection of swath to swath laser point consistencies within the study area were used to perform manual refinements of system alignment. 23 | 5. Custom algorithms were designed to evaluate points between adjacent flightlines. Automated system alignment was computed based upon randomly selected swath to swath accuracy measurements that consider elevation, slope, and intensities. Specifically, refinement in the combination of system pitch, roll, and yaw offset parameters optimize internal consistency. 24 | 6. Noise (e.g., pits and birds) was filtered using post-processing software, based on known elevation ranges and included the removal of any cycle slips. 25 | 7. Using TerraScan and Microstation, ground classifications utilized custom settings appropriate to the study area. 26 | 8. The corrected and filtered return points were compared to the ground survey points collected to verify the vertical accuracy. 27 | 9. TIN processing of the ground point returns was used to create this bare earth DEM. 2020-05-05Vertical accuracy was also assessed using ground control points that were used in the calibration and post processing of the Lidar point cloud as they still serve as a good indication of the overall accuracy of the Lidar dataset. The Root Mean Square Error (RMSE) of the vertical accuracy of the Lidar dataset as compared to ground control points is 0.029 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.29 ground control points were collected and utilized in the calibration and post processing of the Lidar data point cloud.RMSE0.029m 28 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/prj.adf: -------------------------------------------------------------------------------- 1 | Projection UTM 2 | Zone 12 3 | Spheroid GRS80 4 | Units METERS 5 | Zunits NO 6 | Parameters 7 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/sta.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0287/sta.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/w001001.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0287/w001001.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0287/w001001x.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0287/w001001x.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/dblbnd.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0328/dblbnd.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/hdr.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0328/hdr.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/metadata.xml: -------------------------------------------------------------------------------- 1 | 1.0FGDC CSDGM MetadataFALSEbe_gm1_0001file://Z:\2020_Regional_SnowEx2020COID_034542\07_Deliverables\00_Deliverables\Grand_Mesa_1\Rasters\Bare_Earth_Digital_Elevation_Models\be_gm1_0001Local Area Network002750000.000000750500.0000004319563.5000004320000.0000001ProjectedGCS_NAD_1983_2011Linear Unit: Meter (1.000000)NAD_1983_2011_UTM_Zone_12N<ProjectedCoordinateSystem xsi:type='typens:ProjectedCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.3'><WKT>PROJCS[&quot;NAD_1983_2011_UTM_Zone_12N&quot;,GEOGCS[&quot;GCS_NAD_1983_2011&quot;,DATUM[&quot;D_NAD_1983_2011&quot;,SPHEROID[&quot;GRS_1980&quot;,6378137.0,298.257222101]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433]],PROJECTION[&quot;Transverse_Mercator&quot;],PARAMETER[&quot;False_Easting&quot;,500000.0],PARAMETER[&quot;False_Northing&quot;,0.0],PARAMETER[&quot;Central_Meridian&quot;,-111.0],PARAMETER[&quot;Scale_Factor&quot;,0.9996],PARAMETER[&quot;Latitude_Of_Origin&quot;,0.0],UNIT[&quot;Meter&quot;,1.0],AUTHORITY[&quot;EPSG&quot;,6341]]</WKT><XOrigin>-5120900</XOrigin><YOrigin>-9998100</YOrigin><XYScale>450445547.3910538</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>0.001</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><WKID>102059</WKID><LatestWKID>6341</LatestWKID></ProjectedCoordinateSystem>32FALSENone1GRIDTRUEcontinuousfloating point-3.4028235e+038202005051139420020200505113942001500000005000FGDC Version 6.2 (Build 9200) ; Esri ArcGIS 10.3.1.4959be_gm1_0001Quantum Spatial1100 Circle Blvd. Suite 126CorvallisOR97330US541-752-1204http://www.quantumspatial.comQuantum Spatial2020-05-051-108.113577-108.10765138.99332238.989251SnowEx Grand Mesa Acquisition 1 Lidar2020-02-012020-02-02Provide high resolution terrain elevation data from the SnowEx Grand Mesa Acquisition 1 dataset. <DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The bare earth digital elevation model (DEM) represents the earth's surface with all vegetation and anthropogenic features removed. It is derived from NIR Lidar data using TIN processing of the ground point returns. Some elevation values have been interpolated across areas in the ground model where there is no elevation data (e.g. over water, under dense vegetation). The horizontal datum for this dataset is NAD83(2011), the vertical datum is NAVD88, Geoid 12B, and the data is projected in UTM 12 North. Units are in Meters. Quantum Spatial collected the SnowEx Grand Mesa Acquisition 1 Lidar data for Boise State University between 02/01/20 and 02/02/20.</SPAN></P></DIV></DIV></DIV>Boise State UniversityLidarLight Detection and Rangingelevation datatopographybare earthDEMdigital elevation modelGrand MesaColoradoMesaPalisadeCedaredgeLidar, Light Detection and Ranging, high-resolution, elevation data, topography, bare earth, DEM, digital elevation modelGrand MesaColoradoMesa, Palisade, Cedaredge0.5This data is assembled by 500 m x 500 m and projected in UTM 12 North.Hans-Peter MarshallBoise State University6563 W Summer Hill Dr.BoiseIdaho83714hpmarshall@boisestate.eduUS(303) 859-3106Hans-Peter MarshallPlease contact Boise State University for information regarding the use of this data.<DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>In some areas of heavy vegetation or forest cover, there may be relatively few ground points in the Lidar data. TINing the points produces large triangles and hence the elevations may be less accurate within such areas. Elevation values for open water surfaces should be considered approximate due to the increase of laser noise seen over water, frequent lack of returns, and the overall dynamic nature of water.</SPAN></P></DIV></DIV></DIV>Raster Dataset1Hans-Peter MarshallBoise State University6563 W Summer Hill Dr.BoiseIdaho83714hpmarshall@boisestate.eduUS(303) 859-3106datasetEPSG8.6.2210750000.000000 4319563.500000750000.000000 4320000.000000750500.000000 4320000.000000750500.000000 4319563.500000750250.000000 4319781.75000010000.5000008730.500000Band_13045.3798833017.14990232mBand_1ElevationDigital Elevation ModelFalseFalseFalseFalseFalse20200505Quantum Spatial1100 Circle Blvd. Suite 126CorvallisOR97330US541-752-1204http://www.quantumspatial.comQuantum SpatialQuantum SpatialLidar data has been collected and processed for all areas within the project study area.Flight plans are designed with sufficient sidelap to ensure there are no gaps between flightlines. Shaded relief images have been visually inspected for gaps.Shaded relief images have been visually inspected for data errors such as pits, border artifacts, and shifting. Lidar flightlines have been examined to ensure consistent elevation values across overlapping flightlines. The Root Mean Square Error (RMSE) of line to line relative accuracy for this dataset is 0.022 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.Data was examined at a 1:2000 scale. Relative accuracy of the flightlines was assessed in Microstation using TerraMatch. RMSEm0.022The Non-vegetated Vertical Accuracy (NVA) of this dataset, tested at 95% confidence level is 0.1 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.Non-vegetated Vertical Accuracy was assessed using 1 ground check point. This check point was not used in the calibration or post processing of the Lidar point cloud data. NVAm0.1Acquisition. Quantum Spatial collected the SnowEx Grand Mesa Acquisition 1 Lidar data between 02/01/20 and 02/02/20. The survey used a Reigl VQ-1560i laser system mounted in a Beechcraft King Air. Ground level GPS and aircraft IMU were collected during the flight. 2 | 3 | Sensor: Reigl VQ-1560i 4 | Maximum returns: Unlimited 5 | Nominal pulse density: 20 pulses/m^2 6 | Nominal pulse spacing: 0.22 m 7 | AGL: 1578 m 8 | Speed: 140 knots 9 | FOV: 58.5° 10 | Scan frequency: 31.2 hz 11 | Pulse rate: 161 kHz 12 | Pulse duration: 3 ns 13 | Pulse width: 28 cm 14 | Wavelength: 1064 nm 15 | Pulses in air mode: Multiple Times Around (MTA) 16 | Beam divergence: 0.18 mrads 17 | Swath width: 1767.4 m 18 | Overlap: 60% 19 | 2020-02-021. Flightlines and data were reviewed to ensure complete coverage of the study area and positional accuracy of the laser points. 20 | 2. Laser point return coordinates were computed using PosPac 8.3 and RiProcess 1.8.5 software based on independent data from the Lidar system, IMU, and aircraft. 21 | 3. The raw Lidar file was assembled into flightlines per return with each point having an associated x, y, and z coordinate. 22 | 4. Visual inspection of swath to swath laser point consistencies within the study area were used to perform manual refinements of system alignment. 23 | 5. Custom algorithms were designed to evaluate points between adjacent flightlines. Automated system alignment was computed based upon randomly selected swath to swath accuracy measurements that consider elevation, slope, and intensities. Specifically, refinement in the combination of system pitch, roll, and yaw offset parameters optimize internal consistency. 24 | 6. Noise (e.g., pits and birds) was filtered using post-processing software, based on known elevation ranges and included the removal of any cycle slips. 25 | 7. Using TerraScan and Microstation, ground classifications utilized custom settings appropriate to the study area. 26 | 8. The corrected and filtered return points were compared to the ground survey points collected to verify the vertical accuracy. 27 | 9. TIN processing of the ground point returns was used to create this bare earth DEM. 2020-05-05Vertical accuracy was also assessed using ground control points that were used in the calibration and post processing of the Lidar point cloud as they still serve as a good indication of the overall accuracy of the Lidar dataset. The Root Mean Square Error (RMSE) of the vertical accuracy of the Lidar dataset as compared to ground control points is 0.029 m. Please see the Lidar data report for a discussion of the statistics related to this dataset.29 ground control points were collected and utilized in the calibration and post processing of the Lidar data point cloud.RMSE0.029m 28 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/prj.adf: -------------------------------------------------------------------------------- 1 | Projection UTM 2 | Zone 12 3 | Spheroid GRS80 4 | Units METERS 5 | Zunits NO 6 | Parameters 7 | -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/sta.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0328/sta.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/w001001.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0328/w001001.adf -------------------------------------------------------------------------------- /tests/data/be_gm1_0328/w001001x.adf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/be_gm1_0328/w001001x.adf -------------------------------------------------------------------------------- /tests/data/density.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:30 5 | # UTM Zone,12N 6 | # Easting,743281 7 | # Northing,4324005 8 | # top (cm),bottom (cm),density A (kg/m3),density B (kg/m3),density C (kg/m3) 9 | 35.0,25.0,190.0,245.0,NaN 10 | 25.0,15.0,228.0,241.0,NaN 11 | 15.0,5.0,217.0,253.0,NaN 12 | 12.0,2.0,236.0,NaN,NaN 13 | -------------------------------------------------------------------------------- /tests/data/depths.csv: -------------------------------------------------------------------------------- 1 | Measurement Tool (MP = Magnaprobe; M2 = Mesa 2; PR = Pit Ruler),ID,Date (yyyymmdd),"Time (hh:mm, local, MST)",Longitude,Latitude,Easting,Northing,Depth (cm),elevation (m),equipment,Version Number 2 | MP,100000,20200128,11:48,-108.13515,39.03045,747987.62,4324061.71,94,3148.20,CRREL_B,1 3 | MP,100001,20200128,11:48,-108.13516,39.03045,747986.75,4324061.68,74,3148.30,CRREL_B,1 4 | MP,100002,20200128,11:48,-108.13517,39.03045,747985.89,4324061.65,90,3148.20,CRREL_B,1 5 | MP,100003,20200128,11:48,-108.13519,39.03044,747984.19,4324060.49,87,3148.60,CRREL_B,1 6 | M2,201659,20200204,12:50,-108.14591,39.03157,747051.92,4324156.86,87,3134.17,Mesa2_1,1 7 | M2,201660,20200204,12:51,-108.14595,39.03156,747048.84,4324155.24,117,3133.88,Mesa2_1,1 8 | M2,201661,20200204,12:51,-108.14596,39.03154,747047.57,4324153.53,110,3134.00,Mesa2_1,1 9 | PR,300058,20200211,15:50,-108.18637,39.03184,743548.00,4324077.00,68,3062.959961,none,1 10 | PR,300059,20200130,13:30,-108.17499,39.02768,744548.00,4323646.00,72,3086.75,none,1 11 | PR,300060,20200205,13:45,-108.20413,39.05398,741935.00,4326488.00,89,3053.629883,none,1 12 | -------------------------------------------------------------------------------- /tests/data/gpr.csv: -------------------------------------------------------------------------------- 1 | UTCyear,UTCdoy,UTCtod,UTMzone,Easting,Northing,Elevation,TWT,avgVelocity,avgDensity,Depth,SWE 2 | 2019,28,161549.562,12S,743148.424234231,4324346.71484848,3057.18763636364,8.3,0.247379540774491,250.786035454008,102.662509421414,257.463237275561 3 | 2019,28,175318.388,12S,743508.438767731,4324227.13721936,3064.12378234139,10.0058518216919,0.242286048901651,280.938399439763,121.213915188656,340.536433229281 4 | 2019,29,180124.855,12S,742233.2186624,4322621.43895685,3037.97615180445,6.80619139048427,0.240565215764285,291.413916241337,81.8666450192435,238.570796345971 5 | 2019,35,213751.755,12S,745780.897105061,4321985.71147198,3087.98749783817,8.9,0.243455055947779,273.906577593421,108.337499896762,296.743538217496 6 | 2019,35,214021.205,12S,745376.686559302,4321807.15509872,3085.6150619139,9.2,0.244394265827249,268.305768389162,112.421362280535,301.632999900353 7 | 2019,35,214317.222,12S,744773.055741657,4321793.77659148,3079.53574947537,8.2,0.244220114940834,269.341032556241,100.130247125742,269.691841509589 8 | 2019,35,215155.305,12S,745532.360011645,4321816.1288482,3086.392,7,0.244370514453528,268.446874835432,85.5296800587348,229.601753174417 9 | 2019,35,223154.172,12S,745108.077926342,4322203.70940671,3087.21917969897,7.2,0.244348759074589,268.576147186575,87.965553266852,236.254493815466 10 | 2019,35,224611.005,12S,745274.664851329,4322559.47539613,3095.23918168282,7.3,0.243105715700231,276.000847209488,88.7335862305843,244.905449755774 11 | 2019,35,230353.759,12S,745251.754528696,4322583.32103968,3095.7947931184,8.6,0.243651116937365,272.733837098185,104.769980283067,285.743187353021 12 | -------------------------------------------------------------------------------- /tests/data/pole_depths.csv: -------------------------------------------------------------------------------- 1 | Camera,Date&Time,Latitude,Longitude,UTM WGS84 Northing (meters),UTM WGS84 Easting (meters),Depth (cm) 2 | W1B,1/27/2020 11:00,39.008078,-108.184794,4321444.155,743766.4795,57.01036 3 | W1B,3/14/2020 12:00,39.008078,-108.184794,4321444.155,743766.4795,40.50772 4 | W1B,5/3/2020 11:00,39.008078,-108.184794,4321444.155,743766.4795,-4.49948 5 | E9B,11/29/2019 11:00,39.100639,-107.900614,4332519.266,768027.6621,30.11831 6 | E9B,2/27/2020 13:00,39.100639,-107.900614,4332519.266,768027.6621,67.18007 7 | E9B,4/7/2020 13:00,39.100639,-107.900614,4332519.266,768027.6621,89.18549 8 | E9B,5/22/2020 13:00,39.100639,-107.900614,4332519.266,768027.6621,-2.31073 9 | E8A,10/28/2019 11:00,39.097329,-107.887477,4332190.721,769176.5498,12.16149 10 | E8A,11/28/2019 13:00,39.097329,-107.887477,4332190.721,769176.5498,38.26381 11 | E8A,12/14/2019 11:00,39.097329,-107.887477,4332190.721,769176.5498,54.91529 12 | E6A,11/27/2019 12:00,39.097464,-107.862476,4332280.166,771338.607,37.7048 13 | E6A,12/7/2019 11:00,39.097464,-107.862476,4332280.166,771338.607,47.3496 14 | E6A,12/31/2019 11:00,39.097464,-107.862476,4332280.166,771338.607,78.0376 15 | E6A,2/1/2020 13:00,39.097464,-107.862476,4332280.166,771338.607,101.2728 16 | -------------------------------------------------------------------------------- /tests/data/site_5S21.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,5S21 3 | # PitID,COGM5S21_20200201 4 | # Date/Time,2020-02-01-14:30 5 | # UTM Zone,12N 6 | # Easting [m],744561 7 | # Northing [m],4322721 8 | # Slope [deg],0 9 | # Aspect [deg],N 10 | # Air Temp [deg C],1.5 11 | # Total Depth [cm],110 12 | # Surveyors,"Carrie Vuyovich, Glen Liston" 13 | # Weather,Thin overcast 14 | # Precip,None 15 | # Sky,Overcast(complete cover) 16 | # Wind,Light 17 | # Ground Condition,Frozen 18 | # Ground Roughness,Smooth 19 | # Ground Vegetation,['Grass'] 20 | # Vegetation Height,"10,nan" 21 | # Tree Canopy,No Trees 22 | # Comments:,"surface temp = 14:44, bottom temp = 14:56" 23 | -------------------------------------------------------------------------------- /tests/data/site_details.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:30 5 | # UTM Zone,12N 6 | # Easting [m],743281 7 | # Northing [m],4324005 8 | # Slope [deg],5° 9 | # Aspect [deg],S 10 | # Air Temp [deg C],NaN 11 | # Total Depth [cm],35 12 | # Surveyors,"Chris Hiemstra, Hans Lievens" 13 | # Weather,"Sunny, cold, gusts 14 | # Ground roughness, rough, rocks in places" 15 | # Precip,None 16 | # Sky,Few (< 1/4 of sky) 17 | # Wind,Moderate 18 | # Ground Condition,Frozen 19 | # Ground Vegetation,['Grass'] 20 | # Vegetation Height,"5,nan" 21 | # Tree Canopy,No Trees 22 | # Comments:,"Start temperature measurements (top): 13:48 End temperature 23 | measurements (bottom): 13:53 LWC sampler broke, no measurements were 24 | possible" 25 | -------------------------------------------------------------------------------- /tests/data/smp_log.csv: -------------------------------------------------------------------------------- 1 | #SNOWEX IOP GRAND MESA,,,,,,,,, 2 | #SMP MEASUREMENTS,,,,,,,,, 3 | #Level_1=omitted files removed from level_0,,,,,,,,, 4 | #OBSERVER:,"Megan Mason (MM), HP Marshall (HP), Ioanna Merkouriadi (IM)",,,,,,,, 5 | "#P=pitwall measurement, P_top=top portion of pit, P_mid=mid portion, P_bot=bottom portion",,,,,,,,, 6 | #SMP instrument number: ,,"06=SMP06 (short), 19=SMP19 (1.75m, longest), 19b=SMP19re-born (parts harvested from SMP06)",,,,,,, 7 | #Orientation:,"ex, N5,N4,N3,N2,N1=10,20,30,40,50m, C=Center (transect crossing point)",,,,,,,, 8 | "#CK=check when plotting, something doesn't align with notes",,,,,,,,, 9 | #NA=measurement not taken or not applicable,,,,,,,,, 10 | Date,Pit ID,SMP instrument # ,Fname sufix,Orientation,Snow depth,Flag,Observer,Comments, 11 | 1/31/20,2N12,06,0874,S2,NA,,IM,started 1-2 cm below surface, 12 | 1/31/20,2N12,06,0875,S1,NA,,IM,started 1-2 cm below surface, 13 | 2/1/20,5S21,19b,1013,S3,NA,,HP,, 14 | 2/1/20,5S21,19b,1014,S2,NA,,HP,, 15 | -------------------------------------------------------------------------------- /tests/data/stratigraphy.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:30 5 | # UTM Zone,12N 6 | # Easting,743281 7 | # Northing,4324005 8 | # Top [cm],Bottom [cm],Grain Size [mm],Grain Type,Hand Hardness,Manual Wetness,Comments 9 | 35.0,33.0,< 1 mm,DF,F,D,NaN 10 | 33.0,30.0,< 1 mm,DF,4F,D,NaN 11 | 30.0,17.0,2-4 mm,FC,4F,D,NaN 12 | 17.0,7.0,2-4 mm,FC,4F,D,Cups 13 | 7.0,0.0,2-4 mm,FC,1F,D,NaN 14 | -------------------------------------------------------------------------------- /tests/data/temperature.csv: -------------------------------------------------------------------------------- 1 | # Location,Grand Mesa 2 | # Site,1N20 3 | # PitID,COGM1N20_20200205 4 | # Date/Time,2020-02-05-13:30 5 | # UTM Zone,12N 6 | # Easting,743281 7 | # Northing,4324005 8 | # Height [cm],Temperature [deg C] 9 | 35,-18.1 10 | 30,-11.8 11 | 20,-9.3 12 | 10,-5.9 13 | 0,-3.3 14 | -------------------------------------------------------------------------------- /tests/data/uavsar/uavsar_utm.amp1.real.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar/uavsar_utm.amp1.real.tif -------------------------------------------------------------------------------- /tests/data/uavsar/uavsar_utm.amp2.real.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar/uavsar_utm.amp2.real.tif -------------------------------------------------------------------------------- /tests/data/uavsar/uavsar_utm.cor.real.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar/uavsar_utm.cor.real.tif -------------------------------------------------------------------------------- /tests/data/uavsar/uavsar_utm.int.imaginary.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar/uavsar_utm.int.imaginary.tif -------------------------------------------------------------------------------- /tests/data/uavsar/uavsar_utm.int.real.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar/uavsar_utm.int.real.tif -------------------------------------------------------------------------------- /tests/data/uavsar_latlon.amp1.grd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar_latlon.amp1.grd -------------------------------------------------------------------------------- /tests/data/uavsar_latlon.amp1.real.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar_latlon.amp1.real.tif -------------------------------------------------------------------------------- /tests/data/uavsar_latlon.amp2.grd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar_latlon.amp2.grd -------------------------------------------------------------------------------- /tests/data/uavsar_latlon.cor.grd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar_latlon.cor.grd -------------------------------------------------------------------------------- /tests/data/uavsar_latlon.int.grd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SnowEx/snowexsql/fb71fa2a37ab13607e9a64c689646516d439e18d/tests/data/uavsar_latlon.int.grd -------------------------------------------------------------------------------- /tests/map.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 25 | 34 | 35 | 36 | 50 | 51 | 52 | 53 | 54 |
55 | 56 | 57 | -------------------------------------------------------------------------------- /tests/scratch.py: -------------------------------------------------------------------------------- 1 | from metloom.pointdata import SnotelPointData 2 | from os.path import join, dirname 3 | 4 | import pytest 5 | import geopandas as gpd 6 | from datetime import date 7 | 8 | from snowexsql.api import PointMeasurements 9 | 10 | 11 | def test_stuff(): 12 | sntl_point = SnotelPointData("622:CO:SNTL", "dummy name") 13 | geom = sntl_point.metadata 14 | geom = gpd.GeoSeries(geom).set_crs(4326).to_crs(26912).geometry.values[0] 15 | 16 | shp1 = gpd.GeoSeries( 17 | sntl_point.metadata 18 | ).set_crs(4326).buffer(.1).total_bounds 19 | bx = PointMeasurements.build_box( 20 | *list(shp1), 21 | 4326 22 | ) 23 | bx = bx.to_crs(26912) 24 | bx.explore().save("map.html") 25 | df = PointMeasurements.from_area( 26 | shp=bx.geometry.iloc[0], limit=30 27 | ) 28 | 29 | df = PointMeasurements.from_area( 30 | pt=geom, buffer=10000, instrument="magnaprobe", limit=250 31 | ) 32 | # df = PointMeasurements.from_filter( 33 | # instrument="magnaprobe", limit=20 34 | # ) 35 | print(df) 36 | -------------------------------------------------------------------------------- /tests/sql_test_base.py: -------------------------------------------------------------------------------- 1 | from os.path import dirname, join 2 | 3 | from numpy.testing import assert_almost_equal 4 | from sqlalchemy import asc 5 | 6 | from snowexsql.db import get_db, initialize 7 | 8 | 9 | def pytest_generate_tests(metafunc): 10 | """ 11 | Function used to parametrize functions. If the function is in the 12 | params keys then run it. Otherwise run all the tests normally. 13 | """ 14 | # Were params provided? 15 | if hasattr(metafunc.cls, 'params'): 16 | if metafunc.function.__name__ in metafunc.cls.params.keys(): 17 | funcarglist = metafunc.cls.params[metafunc.function.__name__] 18 | argnames = sorted(funcarglist[0]) 19 | metafunc.parametrize( 20 | argnames, [[funcargs[name] for name in argnames] for funcargs in funcarglist] 21 | ) 22 | 23 | 24 | class DBSetup: 25 | """ 26 | Base class for all our tests. Ensures that we clean up after every class that's run 27 | """ 28 | 29 | @classmethod 30 | def setup_class(self): 31 | """ 32 | Setup the database one time for testing 33 | """ 34 | self.db = 'localhost/test' 35 | self.data_dir = join(dirname(__file__), 'data') 36 | creds = join(dirname(__file__), 'credentials.json') 37 | 38 | self.engine, self.session, self.metadata = get_db(self.db, credentials=creds, return_metadata=True) 39 | 40 | initialize(self.engine) 41 | 42 | @classmethod 43 | def teardown_class(self): 44 | """ 45 | Remove the databse 46 | """ 47 | self.metadata.drop_all(bind=self.engine) 48 | self.session.close() # optional, depends on use case 49 | 50 | def teardown(self): 51 | self.session.flush() 52 | self.session.rollback() 53 | 54 | 55 | class TableTestBase(DBSetup): 56 | """ 57 | Test any table by picking 58 | """ 59 | # Class to use to upload the data 60 | UploaderClass = None 61 | 62 | # Positional arguments to pass to the uploader class 63 | args = [] 64 | 65 | # Keyword args to pass to the uploader class 66 | kwargs = {} 67 | 68 | # Always define this using a table class from data.py and is used for ORM 69 | TableClass = None 70 | 71 | # First filter to be applied is count_attribute == data_name 72 | count_attribute = 'type' 73 | 74 | # Define params which is a dictionary of test names and their args 75 | params = { 76 | 'test_count': [dict(data_name=None, expected_count=None)], 77 | 'test_value': [ 78 | dict(data_name=None, attribute_to_check=None, filter_attribute=None, filter_value=None, expected=None)], 79 | 'test_unique_count': [dict(data_name=None, attribute_to_count=None, expected_count=None)] 80 | } 81 | 82 | @classmethod 83 | def setup_class(self): 84 | """ 85 | Setup the database one time for testing 86 | """ 87 | super().setup_class() 88 | 89 | # Batches always provide a list of files 90 | if type(self.args[0]) == list: 91 | self.args[0] = [join(self.data_dir, f) for f in self.args[0]] 92 | # Single uploaders only upload a single file 93 | else: 94 | self.args[0] = join(self.data_dir, self.args[0]) 95 | 96 | # In case we have a smp_log file make it point to the data folder too 97 | if 'smp_log_f' in self.kwargs.keys(): 98 | if self.kwargs['smp_log_f'] != None: 99 | self.kwargs['smp_log_f'] = join(self.data_dir, self.kwargs['smp_log_f']) 100 | 101 | self.kwargs['db_name'] = self.db 102 | self.kwargs['credentials'] = join(dirname(__file__), 'credentials.json') 103 | u = self.UploaderClass(*self.args, **self.kwargs) 104 | 105 | # Allow for batches and single upload 106 | if 'batch' in self.UploaderClass.__name__.lower(): 107 | u.push() 108 | else: 109 | u.submit(self.session) 110 | 111 | def get_query(self, filter_attribute, filter_value, query=None): 112 | """ 113 | Return the base query using an attribute and value that it is supposed 114 | to be 115 | 116 | Args: 117 | filter_attribute: Name of attribute to search for 118 | filter_value: Value that attribute should be to filter db search 119 | query: If were extended a query use it instead of forming a new one 120 | Return: 121 | q: Uncompiled SQLalchemy Query object 122 | """ 123 | 124 | if query is None: 125 | query = self.session.query(self.TableClass) 126 | 127 | fa = getattr(self.TableClass, filter_attribute) 128 | q = query.filter(fa == filter_value).order_by(asc(fa)) 129 | return q 130 | 131 | def test_count(self, data_name, expected_count): 132 | """ 133 | Test the record count of a data type 134 | """ 135 | q = self.get_query(self.count_attribute, data_name) 136 | records = q.all() 137 | assert len(records) == expected_count 138 | 139 | def test_value(self, data_name, attribute_to_check, filter_attribute, filter_value, expected): 140 | """ 141 | Test that the first value in a filtered record search is as expected 142 | """ 143 | # Filter to the data type were querying 144 | q = self.get_query(self.count_attribute, data_name) 145 | 146 | # Add another filter by some attribute 147 | q = self.get_query(filter_attribute, filter_value, query=q) 148 | 149 | records = q.all() 150 | received = getattr(records[0], attribute_to_check) 151 | 152 | try: 153 | received = float(received) 154 | except: 155 | pass 156 | 157 | if type(received) == float: 158 | assert_almost_equal(received, expected, 6) 159 | else: 160 | assert received == expected 161 | 162 | def test_unique_count(self, data_name, attribute_to_count, expected_count): 163 | """ 164 | Test that the number of unique values in a given attribute is as expected 165 | """ 166 | # Add another filter by some attribute 167 | q = self.get_query(self.count_attribute, data_name) 168 | records = q.all() 169 | received = len(set([getattr(r, attribute_to_count) for r in records])) 170 | assert received == expected_count 171 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | from os.path import join, dirname 2 | import geopandas as gpd 3 | import numpy as np 4 | import pytest 5 | from datetime import date 6 | 7 | from snowexsql.api import ( 8 | PointMeasurements, LargeQueryCheckException, LayerMeasurements 9 | ) 10 | from snowexsql.db import get_db, initialize 11 | 12 | 13 | @pytest.fixture(scope="session") 14 | def data_dir(): 15 | return join(dirname(__file__), 'data') 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def creds(data_dir): 20 | return join(dirname(__file__), 'credentials.json') 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def db_url(): 25 | return 'localhost/test' 26 | 27 | 28 | class DBConnection: 29 | """ 30 | Base class for connecting to the test database and overwiting the URL 31 | so that we stay connected to our local testing DB 32 | """ 33 | CLZ = PointMeasurements 34 | 35 | @pytest.fixture(scope="class") 36 | def db(self, creds, db_url): 37 | engine, session, metadata = get_db( 38 | db_url, credentials=creds, return_metadata=True) 39 | 40 | initialize(engine) 41 | yield engine 42 | # cleanup 43 | session.flush() 44 | session.rollback() 45 | metadata.drop_all(bind=engine) 46 | session.close() 47 | 48 | @pytest.fixture(scope="class") 49 | def clz(self, db, db_url): 50 | """ 51 | Extend the class and overwrite the database name 52 | """ 53 | url = db.url 54 | class Extended(self.CLZ): 55 | DB_NAME = f"{url.username}:{url.password}@{url.host}/{url.database}" 56 | 57 | yield Extended 58 | 59 | 60 | def unsorted_list_tuple_compare(l1, l2): 61 | # turn lists into sets, but get rid of any Nones 62 | l1 = set([l[0] for l in l1 if l[0] is not None]) 63 | l2 = set([l[0] for l in l2 if l[0] is not None]) 64 | # compare the sets 65 | return l1 == l2 66 | 67 | 68 | class TestPointMeasurements(DBConnection): 69 | """ 70 | Test the Point Measurement class 71 | """ 72 | CLZ = PointMeasurements 73 | 74 | def test_all_types(self, clz): 75 | result = clz().all_types 76 | assert unsorted_list_tuple_compare( 77 | result, 78 | [] 79 | ) 80 | 81 | def test_all_site_names(self, clz): 82 | result = clz().all_site_names 83 | assert unsorted_list_tuple_compare( 84 | result, [] 85 | ) 86 | 87 | def test_all_dates(self, clz): 88 | result = clz().all_dates 89 | assert len(result) == 0 90 | 91 | def test_all_observers(self, clz): 92 | result = clz().all_observers 93 | assert unsorted_list_tuple_compare( 94 | result, [] 95 | ) 96 | 97 | def test_all_instruments(self, clz): 98 | result = clz().all_instruments 99 | assert unsorted_list_tuple_compare( 100 | result, [] 101 | ) 102 | 103 | @pytest.mark.parametrize( 104 | "kwargs, expected_length, mean_value", [ 105 | ({ 106 | "date": date(2020, 5, 28), 107 | "instrument": 'camera' 108 | }, 0, np.nan), 109 | ({"instrument": "magnaprobe", "limit": 10}, 0, np.nan), # limit works 110 | ({ 111 | "date": date(2020, 5, 28), 112 | "instrument": 'pit ruler' 113 | }, 0, np.nan), 114 | ({ 115 | "date_less_equal": date(2019, 10, 1), 116 | }, 0, np.nan), 117 | ({ 118 | "date_greater_equal": date(2020, 6, 7), 119 | }, 0, np.nan), 120 | ] 121 | ) 122 | def test_from_filter(self, clz, kwargs, expected_length, mean_value): 123 | result = clz.from_filter(**kwargs) 124 | assert len(result) == expected_length 125 | if expected_length > 0: 126 | assert pytest.approx(result["value"].mean()) == mean_value 127 | 128 | @pytest.mark.parametrize( 129 | "kwargs, expected_error", [ 130 | ({"notakey": "value"}, ValueError), 131 | # ({"instrument": "magnaprobe"}, LargeQueryCheckException), 132 | ({"date": [date(2020, 5, 28), date(2019, 10, 3)]}, ValueError), 133 | ] 134 | ) 135 | def test_from_filter_fails(self, clz, kwargs, expected_error): 136 | """ 137 | Test failure on not-allowed key and too many returns 138 | """ 139 | with pytest.raises(expected_error): 140 | clz.from_filter(**kwargs) 141 | 142 | def test_from_area(self, clz): 143 | shp = gpd.points_from_xy( 144 | [743766.4794971556], [4321444.154620216], crs="epsg:26912" 145 | ).buffer(10)[0] 146 | result = clz.from_area( 147 | shp=shp, 148 | date=date(2019, 10, 30) 149 | ) 150 | assert len(result) == 0 151 | 152 | def test_from_area_point(self, clz): 153 | pts = gpd.points_from_xy([743766.4794971556], [4321444.154620216]) 154 | crs = "26912" 155 | result = clz.from_area( 156 | pt=pts[0], buffer=10, crs=crs, 157 | date=date(2019, 10, 30) 158 | ) 159 | assert len(result) == 0 160 | 161 | 162 | class TestLayerMeasurements(DBConnection): 163 | """ 164 | Test the Layer Measurement class 165 | """ 166 | CLZ = LayerMeasurements 167 | 168 | def test_all_types(self, clz): 169 | result = clz().all_types 170 | assert result == [] 171 | 172 | def test_all_site_names(self, clz): 173 | result = clz().all_site_names 174 | assert result == [] 175 | 176 | def test_all_dates(self, clz): 177 | result = clz().all_dates 178 | assert len(result) == 0 179 | 180 | def test_all_observers(self, clz): 181 | result = clz().all_observers 182 | assert unsorted_list_tuple_compare(result, []) 183 | 184 | def test_all_instruments(self, clz): 185 | result = clz().all_instruments 186 | assert unsorted_list_tuple_compare(result, []) 187 | 188 | @pytest.mark.parametrize( 189 | "kwargs, expected_length, mean_value", [ 190 | ({ 191 | "date": date(2020, 3, 12), "type": "density", 192 | "pit_id": "COERIB_20200312_0938" 193 | }, 0, np.nan), # filter to 1 pit 194 | ({"instrument": "IRIS", "limit": 10}, 0, np.nan), # limit works 195 | ({ 196 | "date": date(2020, 5, 28), 197 | "instrument": 'IRIS' 198 | }, 0, np.nan), # nothing returned 199 | ({ 200 | "date_less_equal": date(2019, 12, 15), 201 | "type": 'density' 202 | }, 0, np.nan), 203 | ({ 204 | "date_greater_equal": date(2020, 5, 13), 205 | "type": 'density' 206 | }, 0, np.nan), 207 | ] 208 | ) 209 | def test_from_filter(self, clz, kwargs, expected_length, mean_value): 210 | result = clz.from_filter(**kwargs) 211 | assert len(result) == expected_length 212 | if expected_length > 0: 213 | assert pytest.approx( 214 | result["value"].astype("float").mean() 215 | ) == mean_value 216 | 217 | @pytest.mark.parametrize( 218 | "kwargs, expected_error", [ 219 | ({"notakey": "value"}, ValueError), 220 | # ({"date": date(2020, 3, 12)}, LargeQueryCheckException), 221 | ({"date": [date(2020, 5, 28), date(2019, 10, 3)]}, ValueError), 222 | ] 223 | ) 224 | def test_from_filter_fails(self, clz, kwargs, expected_error): 225 | """ 226 | Test failure on not-allowed key and too many returns 227 | """ 228 | with pytest.raises(expected_error): 229 | clz.from_filter(**kwargs) 230 | 231 | def test_from_area(self, clz): 232 | df = gpd.GeoDataFrame( 233 | geometry=gpd.points_from_xy( 234 | [743766.4794971556], [4321444.154620216], crs="epsg:26912" 235 | ).buffer(1000.0) 236 | ).set_crs("epsg:26912") 237 | result = clz.from_area( 238 | type="density", 239 | shp=df.iloc[0].geometry, 240 | ) 241 | assert len(result) == 0 242 | 243 | def test_from_area_point(self, clz): 244 | pts = gpd.points_from_xy([743766.4794971556], [4321444.154620216]) 245 | crs = "26912" 246 | result = clz.from_area( 247 | pt=pts[0], buffer=1000, crs=crs, 248 | type="density", 249 | ) 250 | assert len(result) == 0 251 | -------------------------------------------------------------------------------- /tests/test_conversions.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from os.path import isdir, join 4 | 5 | import pytest 6 | from sqlalchemy import func 7 | 8 | from snowexsql.conversions import * 9 | from .sql_test_base import DBSetup 10 | 11 | 12 | @pytest.mark.skip('Need to determine how to setup db for testing post splitting') 13 | class TestConversionsOnDB(DBSetup): 14 | """ 15 | Test any conversions that require a database 16 | """ 17 | 18 | def setup_class(self): 19 | """ 20 | Setup the database one time for testing 21 | """ 22 | super().setup_class() 23 | 24 | # Upload one raster 25 | raster_f = join(self.data_dir, 'be_gm1_0287', 'w001001x.adf') 26 | u = UploadRaster(filename=raster_f, epsg=26912, use_s3=False) 27 | u.submit(self.session) 28 | 29 | # Upload some point data 30 | fname = join(self.data_dir, 'depths.csv') 31 | csv = PointDataCSV(fname, depth_is_metadata=False, units='cm', site_name='Grand Mesa', 32 | epsg=26912) 33 | csv.submit(self.session) 34 | 35 | def test_points_to_geopandas(self): 36 | """ 37 | Test converting records of points to geopandas df 38 | """ 39 | records = self.session.query(PointData).all() 40 | df = points_to_geopandas(records) 41 | 42 | # Confirm the type 43 | assert isinstance(df, gpd.GeoDataFrame) 44 | 45 | # Confirm we have geometry 46 | assert 'geom' in df.columns 47 | 48 | # Confirm value count 49 | assert df['value'].count() == 10 50 | 51 | def test_query_to_geopandas_w_geom(self): 52 | """ 53 | Test converting a sqlalchemy query of points to geopandas df 54 | """ 55 | qry = self.session.query(PointData) 56 | df = query_to_geopandas(qry, self.engine) 57 | 58 | # Confirm the type 59 | assert isinstance(df, gpd.GeoDataFrame) 60 | 61 | # Confirm value count 62 | assert df['value'].count() == 10 63 | 64 | def test_query_to_geopandas_wo_geom(self): 65 | """ 66 | Test converting a sqlalchemy query of points to geopandas df where the geom column is not == 'geom' 67 | """ 68 | # Query the centroids of all the raster tiles and use that as the geometry column in geopandas 69 | qry = self.session.query(func.ST_Centroid(func.ST_Envelope(ImageData.raster))) 70 | df = query_to_geopandas(qry, self.engine, geom_col='ST_Centroid_1') 71 | 72 | # Confirm the type 73 | assert isinstance(df, gpd.GeoDataFrame) 74 | 75 | # Confirm value count 76 | assert df['ST_Centroid_1'].count() == 16 77 | 78 | def test_points_to_geopandas(self): 79 | """ 80 | Test converting returned records of points to geopandas df 81 | """ 82 | records = self.session.query(PointData).all() 83 | df = points_to_geopandas(records) 84 | 85 | # Confirm the type 86 | assert isinstance(df, gpd.GeoDataFrame) 87 | 88 | # Confirm we have geometry 89 | assert 'geom' in df.columns 90 | 91 | # Confirm value count 92 | assert df['value'].count() == 10 93 | 94 | def test_query_to_pandas(self): 95 | """ 96 | Test converting a query of a query to a dataframe using Imagedata which has no geom column 97 | """ 98 | qry = self.session.query(ImageData.id, ImageData.date) 99 | df = query_to_pandas(qry, self.engine) 100 | 101 | # Confirm the type 102 | assert isinstance(df, pd.DataFrame) 103 | 104 | # Confirm value count 105 | assert df['id'].count() == 16 106 | 107 | 108 | def test_raster_to_rasterio(self): 109 | """ 110 | Test numpy retrieval array of a raster via rasterio 111 | """ 112 | rasters = self.session.query(func.ST_AsTiff(ImageData.raster)).all() 113 | dataset = raster_to_rasterio(self.session, rasters)[0] 114 | 115 | arr = dataset.read(1) 116 | 117 | v = np.mean(arr) 118 | 119 | # Mean pulled from gdalinfo -stats be_gm1_0287/w001001x.adf 120 | np.testing.assert_approx_equal(v, 3058.005, significant=3) 121 | 122 | -------------------------------------------------------------------------------- /tests/test_db.py: -------------------------------------------------------------------------------- 1 | from os.path import join 2 | 3 | import pytest 4 | from sqlalchemy import Table 5 | 6 | from snowexsql.db import get_db, get_table_attributes 7 | from snowexsql.tables import ImageData, LayerData, PointData, SiteData 8 | from .sql_test_base import DBSetup 9 | 10 | 11 | class TestDB(DBSetup): 12 | base_atts = ['site_name', 'date', 'site_id'] 13 | single_loc_atts = ['elevation', 'geom', 'time'] 14 | 15 | meas_atts = ['instrument', 'type', 'units', 'observers'] 16 | 17 | site_atts = base_atts + single_loc_atts + \ 18 | ['slope_angle', 'aspect', 'air_temp', 'total_depth', 19 | 'weather_description', 'precip', 'sky_cover', 'wind', 20 | 'ground_condition', 'ground_roughness', 21 | 'ground_vegetation', 'vegetation_height', 22 | 'tree_canopy', 'site_notes'] 23 | 24 | point_atts = single_loc_atts + meas_atts + \ 25 | ['version_number', 'equipment', 'value'] 26 | 27 | layer_atts = single_loc_atts + meas_atts + \ 28 | ['depth', 'value', 'bottom_depth', 'comments', 'sample_a', 29 | 'sample_b', 'sample_c'] 30 | raster_atts = meas_atts + ['raster', 'description'] 31 | 32 | def setup_class(self): 33 | """ 34 | Setup the database one time for testing 35 | """ 36 | super().setup_class() 37 | site_fname = join(self.data_dir, 'site_details.csv') 38 | # only reflect the tables we will use 39 | self.metadata.reflect(self.engine, only=['points', 'layers']) 40 | 41 | def test_point_structure(self): 42 | """ 43 | Tests our tables are in the database 44 | """ 45 | t = Table("points", self.metadata, autoload=True) 46 | columns = [m.key for m in t.columns] 47 | 48 | for c in self.point_atts: 49 | assert c in columns 50 | 51 | def test_layer_structure(self): 52 | """ 53 | Tests our tables are in the database 54 | """ 55 | t = Table("layers", self.metadata, autoload=True) 56 | columns = [m.key for m in t.columns] 57 | 58 | for c in self.layer_atts: 59 | assert c in columns 60 | 61 | @pytest.mark.parametrize("DataCls,attributes", [ 62 | (SiteData, site_atts), 63 | (PointData, point_atts), 64 | (LayerData, layer_atts), 65 | (ImageData, raster_atts)]) 66 | def test_get_table_attributes(self, DataCls, attributes): 67 | """ 68 | Test we return a correct list of table columns from db.py 69 | """ 70 | atts = get_table_attributes(DataCls) 71 | 72 | for c in attributes: 73 | assert c in atts 74 | 75 | 76 | # Independent Tests 77 | @pytest.mark.parametrize("return_metadata, expected_objs", [ 78 | (False, 2), 79 | (True, 3)]) 80 | def test_getting_db(return_metadata, expected_objs): 81 | """ 82 | Test we can receive a connection and opt out of getting the metadata 83 | """ 84 | 85 | result = get_db('builder:db_builder@localhost/test', return_metadata=return_metadata) 86 | assert len(result) == expected_objs 87 | -------------------------------------------------------------------------------- /tests/test_functions.py: -------------------------------------------------------------------------------- 1 | from os.path import join 2 | 3 | from geoalchemy2.elements import WKBElement 4 | from geoalchemy2.shape import to_shape 5 | from shapely.geometry import Point 6 | 7 | from snowexsql.functions import * 8 | from .sql_test_base import DBSetup 9 | import pytest 10 | 11 | @pytest.mark.skip('Need to figure out how to upload a raster for testing') 12 | class TestFunctions(DBSetup): 13 | 14 | def setup_class(self): 15 | """ 16 | Setup the database one time for testing 17 | """ 18 | super().setup_class() 19 | 20 | self.raster_f = join(self.data_dir, 'be_gm1_0328', 'w001001x.adf') 21 | u = UploadRaster(filename=self.raster_f, epsg=26912, use_s3=False) 22 | u.submit(self.session) 23 | 24 | def test_pixel_as_point(self): 25 | """ 26 | Test coordinate retrieval of a single pixel 27 | """ 28 | 29 | # Get the first pixel as a point 30 | records = self.session.query(ST_PixelAsPoint(ImageData.raster, 1, 1)).limit(1).scalar() 31 | 32 | # Get the Geometry from the Well known binary format 33 | q = self.session.scalar(records.ST_GeomFromEWKB()) 34 | 35 | # Check that its the correct type 36 | assert isinstance(q, WKBElement) 37 | 38 | # Convert geom to shapely object and compare 39 | assert to_shape(q) == Point(743000, 4324500) 40 | 41 | # def test_pixel_as_points(self): 42 | # ''' 43 | # Test coordinate retrieval of a single pixel 44 | # ''' 45 | # 46 | # # Get the first pixel as a point 47 | # records = self.session.query(ST_PixelAsPoints(ImageData.raster)).limit(5).all() 48 | # 49 | # # Check that its the correct type 50 | # for r in records: 51 | # # Get the Geometry from the Well known binary format 52 | # q = self.session.scalar(r[0].ST_GeomFromEWKB()) 53 | # 54 | # assert isinstance(q, WKBElement) 55 | 56 | # Convert geom to shapely object and compare 57 | # assert to_shape(q) == Point(743000, 4324500) 58 | --------------------------------------------------------------------------------