├── .coveragerc ├── .deepsource.toml ├── .gitignore ├── .landscape.yml ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── make.bat └── source │ ├── conf.py │ ├── drive.rst │ ├── granule.png │ ├── index.rst │ ├── introduction.rst │ ├── l2ss.rst │ ├── mcc.rst │ ├── oceancolor.rst │ ├── podaac_logo.png │ ├── quickstart.rst │ ├── utilities.rst │ └── webservices.rst ├── examples ├── 2017_PO.DAAC_Citation_cleaned_and_verified.xlsx ├── ASCAT_geometry.jpg ├── Calculate Standard Deviation from Cross-Calibrated Multi-Platform Ocean Surface Wind Vector L3.0 First-Look Analyses.ipynb ├── README.rst ├── Using Podaacpy to retrieve CYGNSS Level 2 Science Data.ipynb ├── Using Podaacpy to retrieve CYGNSS Level 3 Science Data.ipynb ├── Using podaacpy to interact with PO.DAAC Drive.ipynb ├── podaac.ini ├── podaac_metrics.ipynb ├── podaacpy_getting_started_tutorial.ipynb └── subset.json ├── podaac ├── __init__.py ├── drive.py ├── l2ss.py ├── mcc.py ├── oceancolor.py ├── podaac.ini ├── podaac.py ├── podaac_utils.py └── tests │ ├── __init__.py │ ├── ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc │ ├── drive_test.py │ ├── l2ss_tests.py │ ├── mcc_test.py │ ├── oceancolor_test.py │ ├── podaac.ini │ ├── podaac_test.py │ └── test.json ├── setup.cfg └── setup.py /.coveragerc: -------------------------------------------------------------------------------- 1 | # Copyright 2017 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | [run] 16 | source = podaac.podaac,podaac.mcc,podaac.oceancolor,podaac.l2ss,podaac.podaac_utils 17 | [report] 18 | -------------------------------------------------------------------------------- /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | test_patterns = [ 4 | 'podaac/tests/**' 5 | ] 6 | 7 | exclude_patterns = [ 8 | 'docs/**', 9 | 'examples/**' 10 | ] 11 | 12 | [[ analyzers ]] 13 | name = 'python' 14 | enabled = true 15 | [analyzers.meta] 16 | max_line_length = 100 17 | skip_doc_coverage = ["module", "magic", "class"] 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | #Ipython Notebook 62 | .ipynb_checkpoints 63 | 64 | # Test artifacts 65 | *.png 66 | 67 | # Release and installation generated files 68 | doc.txt 69 | 70 | # Eclipse Files 71 | .project 72 | .pydevproject 73 | .settings/ 74 | 75 | # Example files 76 | examples/*.nc 77 | 78 | # Test drive data 79 | podaac/tests/allData 80 | podaac/*.zip 81 | -------------------------------------------------------------------------------- /.landscape.yml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | strictness : medium 19 | pylint: 20 | run: false 21 | python-targets: 22 | - 2 23 | test-warnings: no 24 | pep8: 25 | full: true 26 | ignore-paths: 27 | - docs/ 28 | requirements: 29 | - setup.py 30 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 Unless 6 | # 7 | # required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" 9 | # BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 10 | # express or implied. See the License for the specific language 11 | # governing permissions and limitations under the License. 12 | dist: xenial # required for Python >= 3.7 13 | language: python 14 | python: 15 | - "2.7" 16 | - "3.6" 17 | - "3.7" 18 | #- "3.7-dev" # 3.7 development branch 19 | #- "3.8-dev" # 3.8 development branch 20 | notifications: 21 | email: false 22 | cache: 23 | - pip 24 | install: 25 | - sudo apt-get update 26 | - sudo apt-get -y install python-dev python-pip 27 | - python setup.py install 28 | - pip install requests bs4 coveralls 29 | script: 30 | - nosetests --with-coverage --cover-package=podaac -v 31 | after_success: # Generate coverage reports only on success 32 | - coveralls 33 | deploy: # Runs on every commit to master branch 34 | # test pypi 35 | - provider: pypi 36 | server: https://test.pypi.org/legacy/ 37 | user: lewismc 38 | password: 39 | secure: Q4BwpJPlAR4WSeAmS4Q20dy35RHdfdzA6nQHC+5f76zDfKiebhFc7JZ6dl5Rsqpq0yJtmhoaLlj7qBbFtk1JjEkJhj7/M7jinYdaY0v3JMDuZPx/ReLmCtWtOcre9qb0x/XoGDO4hJfklvqesmIk9Ai9YVJq46BlRRLwM2M5ys08EGobr/3++B2lyMsexjhBMWzWzYGFFq26iHdEHKGskxjNkNCkTcZWnfCsMAPoG0ZxyCwhO0TGT8W7kloqN40q/+/l6DwrrAmYWvckrFy0ItHbtCVDNK1jxlijr1IrbXpMtpzyaCCsHc+21AbsJyMG9oyuMJw/e6YfzIoA3gAnUv99iFTAk5LQh6DIYeGRjggSjX9H4oelEc1c9LC3mfI0LqahIUDZQPiWGWBlHqRSUzrT9zhwiBs/5fBsQPLWSz8TvlqwSixYTwN9xm+D9Bczwftr8h4R3ucxuBbntGtmvMGBEHE14IrJcrleUBkZ1aWNEXkE/sos0bardMukM1FRlBIFTk9NCsAVBKOUvcZvdGQlVu1lVJ+nxeSROTMIFvRRs3n2L8Tpr4/uMslVW2zq66+gNES9f0B+kW+V77RVeii+B8sB9HFS1i56rV85rVURI15QPlx9L+5wbFTofnJNzdNCRr7WIlT133oMzn3cjRCS9mztSen+ywyPI7ZDmHI= 40 | on: 41 | branch: master 42 | tags: false 43 | # Runs only on tagged commits to master branch 44 | - provider: pypi 45 | user: lewismc 46 | password: 47 | secure: Q4BwpJPlAR4WSeAmS4Q20dy35RHdfdzA6nQHC+5f76zDfKiebhFc7JZ6dl5Rsqpq0yJtmhoaLlj7qBbFtk1JjEkJhj7/M7jinYdaY0v3JMDuZPx/ReLmCtWtOcre9qb0x/XoGDO4hJfklvqesmIk9Ai9YVJq46BlRRLwM2M5ys08EGobr/3++B2lyMsexjhBMWzWzYGFFq26iHdEHKGskxjNkNCkTcZWnfCsMAPoG0ZxyCwhO0TGT8W7kloqN40q/+/l6DwrrAmYWvckrFy0ItHbtCVDNK1jxlijr1IrbXpMtpzyaCCsHc+21AbsJyMG9oyuMJw/e6YfzIoA3gAnUv99iFTAk5LQh6DIYeGRjggSjX9H4oelEc1c9LC3mfI0LqahIUDZQPiWGWBlHqRSUzrT9zhwiBs/5fBsQPLWSz8TvlqwSixYTwN9xm+D9Bczwftr8h4R3ucxuBbntGtmvMGBEHE14IrJcrleUBkZ1aWNEXkE/sos0bardMukM1FRlBIFTk9NCsAVBKOUvcZvdGQlVu1lVJ+nxeSROTMIFvRRs3n2L8Tpr4/uMslVW2zq66+gNES9f0B+kW+V77RVeii+B8sB9HFS1i56rV85rVURI15QPlx9L+5wbFTofnJNzdNCRr7WIlT133oMzn3cjRCS9mztSen+ywyPI7ZDmHI= 48 | distributions: sdist bdist_wheel 49 | on: 50 | tags: true # Deploy only on tagged commits 51 | branch: master 52 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.rst 3 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | podaacpy 2 | ======== 3 | 4 | podaacpy has been retired please use https://github.com/podaac/data-subscriber 5 | ------------------------------------------------------------------------------ 6 | 7 | |DOI| |license| |PyPI| |documentation| |Travis| |Coveralls| |Requirements Status| |Anaconda-Server Version| |Anaconda-Server Downloads| 8 | 9 | |DeepSource| 10 | 11 | |image7| 12 | 13 | A python utility library for interacting with NASA JPL's 14 | `PO.DAAC `__ 15 | 16 | 17 | Software DOI 18 | ------------ 19 | 20 | If you are using Podaacpy in your research, please consider citing the software |DOI|. This DOI represents all versions, and will always resolve to the latest one. If you wish to reference actual versions, then please find the appropriate DOI's over at Zenodo. 21 | 22 | 23 | What is PO.DAAC? 24 | ---------------- 25 | 26 | | The Physical Oceanography Distributed Active Archive Center (PO.DAAC) 27 | is an element of the 28 | | Earth Observing System Data and Information System 29 | (`EOSDIS `__). 30 | | The EOSDIS provides science data to a wide community of users for 31 | NASA's Science Mission Directorate. 32 | 33 | What does podaacpy offer? 34 | ------------------------- 35 | 36 | The library provides a Python toolkit for interacting with all 37 | `PO.DAAC Web Services v3.2.2 APIs `__, namely 38 | 39 | - `PO.DAAC Web Services `__: services 40 | include 41 | - `Dataset 42 | Metadata `__ 43 | - retrieves the metadata of a dataset 44 | - `Granule 45 | Metadata `__ 46 | - retrieves the metadata of a granule 47 | - `Search 48 | Dataset `__ 49 | - searches PO.DAAC's dataset catalog, over Level 2, Level 3, and 50 | Level 4 datasets 51 | - `Search 52 | Granule `__ 53 | - does granule searching on PO.DAAC level 2 swath datasets 54 | (individual orbits of a satellite), and level 3 & 4 gridded datasets 55 | (time averaged to span the globe) 56 | - `Image 57 | Granule `__ - 58 | renders granules in the PO.DAAC's catalog to images such as jpeg 59 | and/or png 60 | - `Extract 61 | Granule `__ 62 | - subsets a granule in PO.DAAC catalog and produces either netcdf3 or 63 | hdf4 files 64 | 65 | - | `Metadata Compliance 66 | Checker `__: an online tool and 67 | web 68 | | service designed to check and validate the contents of netCDF and 69 | HDF granules for the 70 | | Climate and Forecast (CF) and Attribute Convention for Dataset 71 | Discovery (ACDD) metadata conventions. 72 | 73 | - | `Level 2 Subsetting 74 | `__: allows users to subset 75 | and download popular PO.DAAC level 2 (swath) datasets. 76 | 77 | - | `PO.DAAC Drive `__: an HTTP based 78 | data access service. PO.DAAC Drive replicates much of the functionality 79 | of FTP while addressing many of its issues. 80 | 81 | Additionally, Podaacpy provides the following ocean-related data services 82 | 83 | - `NASA OceanColor Web `_: 84 | 85 | - `File Search `_ - locate publically available files within the NASA Ocean Data Processing System (ODPS) 86 | - `Bulk data downloads via HTTP `_ - mimic FTP bulk data downloads using the `HTTP-based data distribution server `_. 87 | 88 | Installation 89 | ------------ 90 | 91 | From the cheeseshop 92 | 93 | :: 94 | 95 | pip3 install podaacpy 96 | 97 | or from conda 98 | 99 | :: 100 | 101 | conda install -c conda-forge podaacpy 102 | 103 | or from source 104 | 105 | :: 106 | 107 | git clone https://github.com/nasa/podaacpy.git && cd podaacpy 108 | python3 setup.py install 109 | 110 | Quickstart 111 | ---------- 112 | Check out the **examples** directory for our Jupyter notebook examples. 113 | 114 | Tests 115 | ----- 116 | 117 | | podaacpy uses the popular 118 | `nose `__ testing suite for 119 | unit tests. 120 | | You can run the podaacpy tests simply by running 121 | 122 | :: 123 | 124 | nosetests 125 | 126 | Additonally, click on the build sticker at the top of this readme to be 127 | directed to the most recent build on 128 | `travis-ci `__. 129 | 130 | Documentation 131 | ------------- 132 | 133 | You can view the documentation online at 134 | 135 | http://podaacpy.readthedocs.org/en/latest/ 136 | 137 | Alternatively, you can build the documentation manually as follows 138 | 139 | :: 140 | 141 | cd docs && make html 142 | 143 | Documentation is then available in docs/build/html/ 144 | 145 | Community, Support and Development 146 | ---------------------------------- 147 | 148 | | Please open a ticket in the `issue 149 | tracker `__. 150 | | Please use 151 | `labels `__ 152 | to 153 | | classify your issue. 154 | 155 | License 156 | ------- 157 | 158 | | podaacpy is licensed permissively under the `Apache License 159 | v2.0 `__. 160 | | A copy of that license is distributed with this software. 161 | 162 | Copyright and Export Classification 163 | ----------------------------------- 164 | 165 | :: 166 | 167 | Copyright 2016-2019, by the California Institute of Technology. ALL RIGHTS RESERVED. 168 | United States Government Sponsorship acknowledged. Any commercial use must be 169 | negotiated with the Office of Technology Transfer at the California Institute 170 | of Technology. 171 | This software may be subject to U.S. export control laws. By accepting this software, 172 | the user agrees to comply with all applicable U.S. export laws and regulations. 173 | User has the responsibility to obtain export licenses, or other export authority 174 | as may be required before exporting such information to foreign countries or 175 | providing access to foreign persons. 176 | 177 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.1751972.svg 178 | :target: https://doi.org/10.5281/zenodo.1751972 179 | .. |license| image:: https://img.shields.io/github/license/nasa/podaacpy.svg?maxAge=2592000 180 | :target: http://www.apache.org/licenses/LICENSE-2.0 181 | .. |PyPI| image:: https://img.shields.io/pypi/v/podaacpy.svg?maxAge=2592000?style=plastic 182 | :target: https://pypi.python.org/pypi/podaacpy 183 | .. |documentation| image:: https://readthedocs.org/projects/podaacpy/badge/?version=latest 184 | :target: http://podaacpy.readthedocs.org/en/latest/ 185 | .. |Travis| image:: https://img.shields.io/travis/nasa/podaacpy.svg?maxAge=2592000?style=plastic 186 | :target: https://travis-ci.org/nasa/podaacpy 187 | .. |Coveralls| image:: https://coveralls.io/repos/github/nasa/podaacpy/badge.svg?branch=master 188 | :target: https://coveralls.io/github/nasa/podaacpy?branch=master 189 | .. |Requirements Status| image:: https://requires.io/github/nasa/podaacpy/requirements.svg?branch=master 190 | :target: https://requires.io/github/nasa/podaacpy/requirements/?branch=master 191 | .. |Anaconda-Server Version| image:: https://anaconda.org/conda-forge/podaacpy/badges/version.svg 192 | :target: https://anaconda.org/conda-forge/podaacpy 193 | .. |Anaconda-Server Downloads| image:: https://anaconda.org/conda-forge/podaacpy/badges/downloads.svg 194 | :target: https://anaconda.org/conda-forge/podaacpy 195 | .. |image7| image:: https://podaac.jpl.nasa.gov/sites/default/files/image/custom_thumbs/podaac_logo.png 196 | .. |DeepSource| image:: https://static.deepsource.io/deepsource-badge-light.svg 197 | :target: https://deepsource.io/gh/nasa/podaacpy/?ref=repository-badge 198 | 199 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Makefile for Sphinx documentation 16 | # 17 | 18 | # You can set these variables from the command line. 19 | SPHINXOPTS = 20 | SPHINXBUILD = sphinx-build 21 | PAPER = 22 | BUILDDIR = build 23 | 24 | # User-friendly check for sphinx-build 25 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 26 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 27 | endif 28 | 29 | # Internal variables. 30 | PAPEROPT_a4 = -D latex_paper_size=a4 31 | PAPEROPT_letter = -D latex_paper_size=letter 32 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 33 | # the i18n builder cannot share the environment and doctrees with the others 34 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 35 | 36 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 37 | 38 | help: 39 | @echo "Please use \`make ' where is one of" 40 | @echo " html to make standalone HTML files" 41 | @echo " dirhtml to make HTML files named index.html in directories" 42 | @echo " singlehtml to make a single large HTML file" 43 | @echo " pickle to make pickle files" 44 | @echo " json to make JSON files" 45 | @echo " htmlhelp to make HTML files and a HTML help project" 46 | @echo " qthelp to make HTML files and a qthelp project" 47 | @echo " devhelp to make HTML files and a Devhelp project" 48 | @echo " epub to make an epub" 49 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 50 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 51 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 52 | @echo " text to make text files" 53 | @echo " man to make manual pages" 54 | @echo " texinfo to make Texinfo files" 55 | @echo " info to make Texinfo files and run them through makeinfo" 56 | @echo " gettext to make PO message catalogs" 57 | @echo " changes to make an overview of all changed/added/deprecated items" 58 | @echo " xml to make Docutils-native XML files" 59 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 60 | @echo " linkcheck to check all external links for integrity" 61 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 62 | 63 | clean: 64 | rm -rf $(BUILDDIR)/* 65 | 66 | html: 67 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 68 | @echo 69 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 70 | 71 | dirhtml: 72 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 73 | @echo 74 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 75 | 76 | singlehtml: 77 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 78 | @echo 79 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 80 | 81 | pickle: 82 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 83 | @echo 84 | @echo "Build finished; now you can process the pickle files." 85 | 86 | json: 87 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 88 | @echo 89 | @echo "Build finished; now you can process the JSON files." 90 | 91 | htmlhelp: 92 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 93 | @echo 94 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 95 | ".hhp project file in $(BUILDDIR)/htmlhelp." 96 | 97 | qthelp: 98 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 99 | @echo 100 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 101 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 102 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/podaacpy.qhcp" 103 | @echo "To view the help file:" 104 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/podaacpy.qhc" 105 | 106 | devhelp: 107 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 108 | @echo 109 | @echo "Build finished." 110 | @echo "To view the help file:" 111 | @echo "# mkdir -p $$HOME/.local/share/devhelp/podaacpy" 112 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/podaacpy" 113 | @echo "# devhelp" 114 | 115 | epub: 116 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 117 | @echo 118 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 119 | 120 | latex: 121 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 122 | @echo 123 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 124 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 125 | "(use \`make latexpdf' here to do that automatically)." 126 | 127 | latexpdf: 128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 129 | @echo "Running LaTeX files through pdflatex..." 130 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 131 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 132 | 133 | latexpdfja: 134 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 135 | @echo "Running LaTeX files through platex and dvipdfmx..." 136 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 137 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 138 | 139 | text: 140 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 141 | @echo 142 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 143 | 144 | man: 145 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 146 | @echo 147 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 148 | 149 | texinfo: 150 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 151 | @echo 152 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 153 | @echo "Run \`make' in that directory to run these through makeinfo" \ 154 | "(use \`make info' here to do that automatically)." 155 | 156 | info: 157 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 158 | @echo "Running Texinfo files through makeinfo..." 159 | make -C $(BUILDDIR)/texinfo info 160 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 161 | 162 | gettext: 163 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 164 | @echo 165 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 166 | 167 | changes: 168 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 169 | @echo 170 | @echo "The overview file is in $(BUILDDIR)/changes." 171 | 172 | linkcheck: 173 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 174 | @echo 175 | @echo "Link check complete; look for any errors in the above output " \ 176 | "or in $(BUILDDIR)/linkcheck/output.txt." 177 | 178 | doctest: 179 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 180 | @echo "Testing of doctests in the sources finished, look at the " \ 181 | "results in $(BUILDDIR)/doctest/output.txt." 182 | 183 | xml: 184 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 185 | @echo 186 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 187 | 188 | pseudoxml: 189 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 190 | @echo 191 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 192 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\podaacpy.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\podaacpy.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | # podaacpy documentation build configuration file, created by 17 | # sphinx-quickstart on Wed Feb 17 22:41:22 2016. 18 | # 19 | # This file is execfile()d with the current directory set to its 20 | # containing dir. 21 | # 22 | # Note that not all possible configuration values are present in this 23 | # autogenerated file. 24 | # 25 | # All configuration values have a default; values that are commented out 26 | # serve to show the default. 27 | 28 | import sys 29 | import os 30 | import sphinx_rtd_theme 31 | 32 | # If extensions (or modules to document with autodoc) are in another directory, 33 | # add these directories to sys.path here. If the directory is relative to the 34 | # documentation root, use os.path.abspath to make it absolute, like shown here. 35 | #sys.path.insert(0, os.path.abspath('.')) 36 | # Assuming a fresh checkout of master branch, this will point us to the 37 | # package(s) 38 | sys.path.insert(0, os.path.abspath('../../podaac')) 39 | 40 | # -- General configuration ------------------------------------------------ 41 | 42 | # If your documentation needs a minimal Sphinx version, state it here. 43 | #needs_sphinx = '1.0' 44 | 45 | # Add any Sphinx extension module names here, as strings. They can be 46 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 47 | # ones. 48 | extensions = [ 49 | 'sphinx.ext.autodoc', 50 | 'sphinx.ext.doctest', 51 | ] 52 | 53 | # Add any paths that contain templates here, relative to this directory. 54 | templates_path = ['_templates'] 55 | 56 | # The suffix of source filenames. 57 | source_suffix = '.rst' 58 | 59 | # The encoding of source files. 60 | #source_encoding = 'utf-8-sig' 61 | 62 | # The master toctree document. 63 | master_doc = 'index' 64 | 65 | # General information about the project. 66 | project = u'podaacpy' 67 | copyright = u'2018, by the California Institute of Technology' 68 | 69 | # The version info for the project you're documenting, acts as replacement for 70 | # |version| and |release|, also used in various other places throughout the 71 | # built documents. 72 | # 73 | # The short X.Y version. 74 | version = '2.4' 75 | # The full version, including alpha/beta/rc tags. 76 | release = '2.4.0' 77 | 78 | # The language for content autogenerated by Sphinx. Refer to documentation 79 | # for a list of supported languages. 80 | #language = None 81 | 82 | # There are two options for replacing |today|: either, you set today to some 83 | # non-false value, then it is used: 84 | #today = '' 85 | # Else, today_fmt is used as the format for a strftime call. 86 | #today_fmt = '%B %d, %Y' 87 | 88 | # List of patterns, relative to source directory, that match files and 89 | # directories to ignore when looking for source files. 90 | exclude_patterns = [] 91 | 92 | # The reST default role (used for this markup: `text`) to use for all 93 | # documents. 94 | #default_role = None 95 | 96 | # If true, '()' will be appended to :func: etc. cross-reference text. 97 | #add_function_parentheses = True 98 | 99 | # If true, the current module name will be prepended to all description 100 | # unit titles (such as .. function::). 101 | #add_module_names = True 102 | 103 | # If true, sectionauthor and moduleauthor directives will be shown in the 104 | # output. They are ignored by default. 105 | #show_authors = False 106 | 107 | # The name of the Pygments (syntax highlighting) style to use. 108 | pygments_style = 'sphinx' 109 | 110 | # A list of ignored prefixes for module index sorting. 111 | #modindex_common_prefix = [] 112 | 113 | # If true, keep warnings as "system message" paragraphs in the built documents. 114 | #keep_warnings = False 115 | 116 | 117 | # -- Options for HTML output ---------------------------------------------- 118 | 119 | # The theme to use for HTML and HTML Help pages. See the documentation for 120 | # a list of builtin themes. 121 | html_theme = 'sphinx_rtd_theme' 122 | 123 | # Theme options are theme-specific and customize the look and feel of a theme 124 | # further. For a list of options available for each theme, see the 125 | # documentation. 126 | #html_theme_options = {} 127 | 128 | # Add any paths that contain custom themes here, relative to this directory. 129 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 130 | 131 | # The name for this set of Sphinx documents. If None, it defaults to 132 | # " v documentation". 133 | #html_title = None 134 | 135 | # A shorter title for the navigation bar. Default is the same as html_title. 136 | #html_short_title = None 137 | 138 | # The name of an image file (relative to this directory) to place at the top 139 | # of the sidebar. 140 | html_logo = os.path.join('.', 'podaac_logo.png') 141 | 142 | # The name of an image file (within the static path) to use as favicon of the 143 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 144 | # pixels large. 145 | #html_favicon = None 146 | 147 | # Add any paths that contain custom static files (such as style sheets) here, 148 | # relative to this directory. They are copied after the builtin static files, 149 | # so a file named "default.css" will overwrite the builtin "default.css". 150 | html_static_path = ['_static'] 151 | 152 | # Add any extra paths that contain custom files (such as robots.txt or 153 | # .htaccess) here, relative to this directory. These files are copied 154 | # directly to the root of the documentation. 155 | #html_extra_path = [] 156 | 157 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 158 | # using the given strftime format. 159 | #html_last_updated_fmt = '%b %d, %Y' 160 | 161 | # If true, SmartyPants will be used to convert quotes and dashes to 162 | # typographically correct entities. 163 | #html_use_smartypants = True 164 | 165 | # Custom sidebar templates, maps document names to template names. 166 | #html_sidebars = {} 167 | 168 | # Additional templates that should be rendered to pages, maps page names to 169 | # template names. 170 | #html_additional_pages = {} 171 | 172 | # If false, no module index is generated. 173 | #html_domain_indices = True 174 | 175 | # If false, no index is generated. 176 | #html_use_index = True 177 | 178 | # If true, the index is split into individual pages for each letter. 179 | #html_split_index = False 180 | 181 | # If true, links to the reST sources are added to the pages. 182 | #html_show_sourcelink = True 183 | 184 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 185 | #html_show_sphinx = True 186 | 187 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 188 | #html_show_copyright = True 189 | 190 | # If true, an OpenSearch description file will be output, and all pages will 191 | # contain a tag referring to it. The value of this option must be the 192 | # base URL from which the finished HTML is served. 193 | #html_use_opensearch = '' 194 | 195 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 196 | #html_file_suffix = None 197 | 198 | # Output file base name for HTML help builder. 199 | htmlhelp_basename = 'podaacpydoc' 200 | 201 | 202 | # -- Options for LaTeX output --------------------------------------------- 203 | 204 | latex_elements = { 205 | # The paper size ('letterpaper' or 'a4paper'). 206 | #'papersize': 'letterpaper', 207 | 208 | # The font size ('10pt', '11pt' or '12pt'). 209 | #'pointsize': '10pt', 210 | 211 | # Additional stuff for the LaTeX preamble. 212 | #'preamble': '', 213 | } 214 | 215 | # Grouping the document tree into LaTeX files. List of tuples 216 | # (source start file, target name, title, 217 | # author, documentclass [howto, manual, or own class]). 218 | latex_documents = [ 219 | ('index', 'podaacpy.tex', u'podaacpy Documentation', 220 | u'Lewis John McGibbney', 'manual'), 221 | ] 222 | 223 | # The name of an image file (relative to this directory) to place at the top of 224 | # the title page. 225 | #latex_logo = None 226 | 227 | # For "manual" documents, if this is true, then toplevel headings are parts, 228 | # not chapters. 229 | #latex_use_parts = False 230 | 231 | # If true, show page references after internal links. 232 | #latex_show_pagerefs = False 233 | 234 | # If true, show URL addresses after external links. 235 | #latex_show_urls = False 236 | 237 | # Documents to append as an appendix to all manuals. 238 | #latex_appendices = [] 239 | 240 | # If false, no module index is generated. 241 | #latex_domain_indices = True 242 | 243 | 244 | # -- Options for manual page output --------------------------------------- 245 | 246 | # One entry per manual page. List of tuples 247 | # (source start file, name, description, authors, manual section). 248 | man_pages = [ 249 | ('index', 'podaacpy', u'podaacpy Documentation', 250 | [u'Lewis John McGibbney'], 1) 251 | ] 252 | 253 | # If true, show URL addresses after external links. 254 | #man_show_urls = False 255 | 256 | 257 | # -- Options for Texinfo output ------------------------------------------- 258 | 259 | # Grouping the document tree into Texinfo files. List of tuples 260 | # (source start file, target name, title, author, 261 | # dir menu entry, description, category) 262 | texinfo_documents = [ 263 | ('index', 'podaacpy', u'podaacpy Documentation', 264 | u'Lewis John McGibbney', 'podaacpy', 'One line description of project.', 265 | 'Miscellaneous'), 266 | ] 267 | 268 | # Documents to append as an appendix to all manuals. 269 | #texinfo_appendices = [] 270 | 271 | # If false, no module index is generated. 272 | #texinfo_domain_indices = True 273 | 274 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 275 | #texinfo_show_urls = 'footnote' 276 | 277 | # If true, do not generate a @detailmenu in the "Top" node's menu. 278 | #texinfo_no_detailmenu = False 279 | -------------------------------------------------------------------------------- /docs/source/drive.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | podaacpy drive API 17 | ****************** 18 | 19 | .. autoclass:: drive.Drive 20 | :members: 21 | -------------------------------------------------------------------------------- /docs/source/granule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/docs/source/granule.png -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | Welcome to podaacpy's documentation! 17 | ========================================= 18 | 19 | Contents: 20 | 21 | .. toctree:: 22 | :maxdepth: 4 23 | 24 | introduction 25 | quickstart 26 | utilities 27 | webservices 28 | mcc 29 | l2ss 30 | oceancolor 31 | drive 32 | 33 | 34 | 35 | Indices and tables 36 | ================== 37 | 38 | * :ref:`genindex` 39 | * :ref:`modindex` 40 | * :ref:`search` 41 | 42 | -------------------------------------------------------------------------------- /docs/source/introduction.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | Introduction to podaacpy 17 | ************************ 18 | 19 | ============ 20 | Introduction 21 | ============ 22 | podaacpy is a python utility library for interacting with `NASA JPL's PO.DAAC `_ 23 | 24 | ================ 25 | What is PO.DAAC? 26 | ================ 27 | The Physical Oceanography Distributed Active Archive Center (PO.DAAC) is an element of the Earth Observing System Data and Information System (`EOSDIS `_). The EOSDIS provides science data to a wide community of users for NASA's Science Mission Directorate. 28 | 29 | ========================= 30 | What does podaacpy offer? 31 | ========================= 32 | The library provides a Python toolkit for interacting with all of PO.DAACs API's, namely 33 | 34 | * `PO.DAAC Web Services `_: services include 35 | 36 | * `Dataset Metadata `_ - retrieves the metadata of a dataset 37 | * `Dataset Search `_ - searches PO.DAAC's dataset catalog, over Level 2, Level 3, and Level 4 datasets 38 | * `Dataset Variables `_ - provides list of dataset variables for the dataset 39 | * `Granule Metadata `_ - retrieves the metadata of a granule 40 | * `Granule Search `_ - does granule searching on PO.DAAC level 2 swath datasets (individual orbits of a satellite), and level 3 & 4 gridded datasets (time averaged to span the globe) 41 | * `Granule Preview `_ - the PODAAC preview Image service retrieves pre-generated preview images for selected granules 42 | * `Granule Subset `_ - Subset Granule service allows users to submit subset jobs 43 | * `Subset Status `_ - Subset Granule Status service allows users to check the status of submitted subset job 44 | 45 | * `Metadata Compliance Checker `_: an online tool and web service designed to check and validate the contents of netCDF and HDF granules for the Climate and Forecast (CF) and Attribute Convention for Dataset Discovery (ACDD) metadata conventions. 46 | * `PO.DAAC Drive `_: an HTTP based data access service. PO.DAAC Drive replicates much of the functionality of FTP while addressing many of its issues. 47 | 48 | Additionally, Podaacpy provides the following ocean-related data services 49 | * `NASA OceanColor Web `_: 50 | 51 | * `File Search `_ - locate publically available files within the NASA Ocean Data Processing System (ODPS) 52 | * `Bulk data downloads via HTTP `_ - mimic FTP bulk data downloads using the `HTTP-based data distribution server `_. 53 | -------------------------------------------------------------------------------- /docs/source/l2ss.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | podaacpy Level 2 Subsetting (L2SS) API 17 | ************************************** 18 | 19 | .. autoclass:: l2ss.L2SS 20 | :members: 21 | -------------------------------------------------------------------------------- /docs/source/mcc.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | podaacpy metadata compliance checker (mcc) API 17 | ********************************************** 18 | 19 | .. autoclass:: mcc.MCC 20 | :members: 21 | -------------------------------------------------------------------------------- /docs/source/oceancolor.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | NASA OceanColor API 17 | ******************* 18 | 19 | .. autoclass:: oceancolor.OceanColor 20 | :members: 21 | -------------------------------------------------------------------------------- /docs/source/podaac_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/docs/source/podaac_logo.png -------------------------------------------------------------------------------- /docs/source/quickstart.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | Quickstart 17 | ********** 18 | 19 | Purpose 20 | ------- 21 | The following document explains how to quickly get up and running with podaac. It explains how to execute the key commands and explains (at a high level) what those commands are doing e.g. what input and output we can expect. More detail on expressive use of the various API's including function level API documentation can be found in subsequent pages of this documentation guide. 22 | 23 | .. _data: 24 | 25 | Working with Data Webservices 26 | ----------------------------- 27 | 28 | Importing podaac 29 | ^^^^^^^^^^^^^^^^^^ 30 | This is very simple... :: 31 | 32 | # import the podaac package 33 | import podaac.podaac as podaac 34 | 35 | # then create an instance of the Podaac class 36 | p = podaac.Podaac() 37 | 38 | More on using Podaac functions later... first lets look at some convenience functionality. 39 | 40 | Convenience Functions 41 | ^^^^^^^^^^^^^^^^^^^^^ 42 | There are a number of convenience functions which aid various types of search. These help decypher the rather cryptic dataset id's, dataset short names, etc. present within PO.DAAC. These functions accept no parameters. They do however account for the fact that availability of certain datasets within PO.DAAC is not constant. Additionally some services are only available for certain datasets. The functions encapsulate those underlying variables and always return current, available results which can be interpreted and used within the other functions in this file. 43 | 44 | First lets define the relevant import :: 45 | 46 | # import the podaac_utils package 47 | import podaac.podaac_utils as utils 48 | 49 | # then create an instance of the PodaacUtils class 50 | u = utils.PodaacUtils() 51 | 52 | The convenience functions are; :: 53 | 54 | result = u.list_available_granule_search_level2_dataset_ids() 55 | 56 | result = u.list_available_granule_search_level2_dataset_short_names() 57 | 58 | result = u.list_available_granule_search_dataset_ids() 59 | 60 | result = u.list_available_granule_search_dataset_short_names() 61 | 62 | result = u.list_available_extract_granule_dataset_ids(): 63 | 64 | result = u.list_available_extract_granule_datasetShortNames(): 65 | 66 | For all of the above, the variable **result** now contains a Python List containing comma-separated values which can be processed appropriately. 67 | For more information on these functions, see :doc:`utilities` 68 | 69 | Retrieving Dataset Metadata 70 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 71 | `Dataset Metadata `_ - retrieves the metadata of a dataset. In the following code snippet lets retrieve dataset metadata for GHRSST Level 2P Atlantic Regional Skin Sea Surface Temperature from the Spinning Enhanced Visible and InfraRed Imager (SEVIRI) on the Meteosat Second Generation (MSG-2) satellite e.g. dataset id **PODAAC-GHMG2-2PO01** :: 72 | 73 | result = p.dataset_metadata(dataset_id='PODAAC-GHMG2-2PO01') 74 | 75 | The variable **result** now contains an XML response which can be processed appropriately. 76 | For more information on this function, see :doc:`webservices` 77 | 78 | Retrieving Granule Metadata 79 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 80 | `Granule Metadata `_ - retrieves the metadata of a granule. In the following code snippet we retrieve granule metadata for the above dataset e.g. granule_name **20120912-MSG02-OSDPD-L2P-MSG02_0200Z-v01.nc** :: 81 | 82 | result = p.granule_metadata(dataset_id='PODAAC-GHMG2-2PO01', granule_name='20120912-MSG02-OSDPD-L2P-MSG02_0200Z-v01.nc') 83 | 84 | The variable **result** now contains an XML response which can be processed appropriately. 85 | For more information on this function, see :doc:`webservices` 86 | 87 | Additionally, we can search metadata for list of granules archived within the last 24 hours in `Datacasting `_ format. :: 88 | 89 | result = p.last24hours_datacasting_granule_md(dataset_id='PODAAC-GHMG2-2PO01') 90 | 91 | The variable **result** now contains an XML response containing a list of data granules which can be processed appropriately. 92 | For more information on this function, see :doc:`webservices` 93 | 94 | Retrieving Dataset Variables 95 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 96 | `Dataset Variables `_ - provides a list of variable for the datset. In the following code snippet we retrieve the dataset variables for the dataset id **PODAAC-ASOP2-25X01** :: 97 | 98 | result = p..dataset_variables(dataset_id='PODAAC-ASOP2-25X01') 99 | 100 | The variable **result** now contains a dictionary of variables of the respective dataset. 101 | For more information on this function, see :doc:`webservices` 102 | 103 | Searching for Datasets 104 | ^^^^^^^^^^^^^^^^^^^^^^ 105 | `Search Dataset `_ - searches PO.DAAC's dataset catalog, over Level 2, Level 3, and Level 4 datasets. In the following code snippet we will search using a keyword e.g. **modis** :: 106 | 107 | result = p.dataset_search(keyword='modis') 108 | 109 | The variable **result** now contains an XML response containing a list of datasets which can be processed appropriately. 110 | For more information on this function, see :doc:`webservices` 111 | 112 | Searching for Granules 113 | ^^^^^^^^^^^^^^^^^^^^^^^ 114 | `Search Granule `_ - does granule searching on PO.DAAC level 2 swath datasets (individual orbits of a satellite), and level 3 & 4 gridded datasets (time averaged to span the globe). In the following code snippet we will search for granules within a specific dataset e.g. **PODAAC-ASOP2-25X01** :: 115 | 116 | result = p.granule_search(dataset_id='PODAAC-ASOP2-25X01', bbox='0,0,180,90',start_time='2013-01-01T01:30:00Z',end_time='2014-01-01T00:00:00Z',start_index='1')) 117 | 118 | The variable **result** now contains an XML response containing a list of granules for the given dataset which can be processed appropriately. 119 | For more information on this function, see :doc:`webservices` 120 | 121 | Retrieve granule images 122 | ^^^^^^^^^^^^^^^^^^^^^^^ 123 | `Granule Preview `_ - renders granules in the PO.DAAC's catalog to images such as jpeg and/or png. In the following code snippet we display a request using the dataset id **PODAAC-ASOP2-25X01** and image variable of the dataset **wind_speed** :: 124 | 125 | result = p.granule_preview(dataset_id='PODAAC-ASOP2-25X01', image_variable='wind_speed') 126 | 127 | The above request downloads us a nice image shown below 128 | 129 | .. image:: granule.png 130 | 131 | For more information on this function, see :doc:`webservices` 132 | 133 | Subsetting Granules 134 | ^^^^^^^^^^^^^^^^^^^ 135 | `Granule Subset `_ - the Granule Subset web service sets up a granule subsetting job using HTTP POST request. Upon a successful request, a token is returned which can be used to check the status of the subsetting job. In the following code snippet we will subset a granule using an input.json file which contains :: 136 | 137 | query={ 138 | "email":"abc@abcd.com", 139 | "query": [ 140 | { 141 | "compact":false, 142 | "datasetId":"PODAAC-ASOP2-25X01", 143 | "bbox":"-180,-90,0,90", 144 | "variables" : ["lat" , "lon","time","wind_speed" ], 145 | "granuleIds": ["ascat_20140520_005700_metopa_39344_eps_o_250_2300_ovw.l2.nc","ascat_20140411_175700_metopa_38800_eps_o_250_2300_ovw.l2.nc"] 146 | } 147 | ] 148 | } 149 | 150 | result = p.granule_subset(input_file_path='/path/to/input.json') 151 | 152 | The variable **result** contains a token on successful request reception. This can be further used to check the status of the request. 153 | For more information on this function, see :doc:`webservices` 154 | 155 | Subset Status 156 | ^^^^^^^^^^^^^ 157 | `Subset Status `_ - the subset status checks the status on the existing job. In the following code snippet we check the status using the token received from PO.DAAC when we submitted a job for subsetting :: 158 | 159 | result = p.granule_preview(dataset_id='PODAAC-ASOP2-25X01', image_variable='wind_speed') 160 | 161 | The variable **result** contains the status of the subset request. 162 | For more information on this function, see :doc:`webservices` 163 | 164 | Extract level4 granule 165 | ^^^^^^^^^^^^^^^^^^^^^^ 166 | Right now the `Extract Granule ` supports only level 2 granules. Extract l4 granule is an add-on over extract granule to extract level 4 gridded datasets from the PODAAC data source. In the following code snippet we extract a level4 granule with Dataset ID = **PODAAC-CCF30-01XXX**, short_name of **CCMP_MEASURES_ATLAS_L4_OW_L3_0_WIND_VECTORS_FLK** and provide a path to the directory you want to have it saved as **netcdf** :: 167 | 168 | result = p.extract_l4_granule(dataset_id='PODAAC-CCF30-01XXX', short_name='CCMP_MEASURES_ATLAS_L4_OW_L3_0_WIND_VECTORS_FLK', path='path/to/the/destination/directory') 169 | 170 | The above request downloads the relevant .netcdf file. For more information on this function, see :doc:`webservices` 171 | 172 | .. _mcc: 173 | 174 | Working with Metadata Compliance Webservices (mcc) 175 | -------------------------------------------------- 176 | 177 | Importing mcc 178 | ^^^^^^^^^^^^^ 179 | This is very simple... :: 180 | 181 | # import the mcc package 182 | import podaac.mcc as mcc 183 | 184 | # then create an instance of the MCC class 185 | m = mcc.MCC() 186 | 187 | Compliance Check a Local File 188 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 189 | The following example displays how to use the MCC to check and validate the contents of a local granule (netCDF or HDF) given the relevant input parameters. :: 190 | 191 | result = m.check_local_file(acdd_version='1.3', gds2_parameters='L4', file_upload='someLocalFile.nc', response='json') 192 | 193 | The result variable contains a JSON encoded report response which can be used for compliance checking activities. For more information on this function, see :doc:`mcc` 194 | 195 | Compliance Check a Remote File 196 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 197 | The following example displays how to use the MCC to check and validate the contents of a remote granule (netCDF or HDF) given the relevant input parameters. :: 198 | 199 | result = m.check_remote_file(checkers='CF', url_upload='http://test.opendap.org/opendap/data/ncml/agg/dated/CG2006158_120000h_usfc.nc', response='json') 200 | 201 | The result variable contains a JSON encoded report response which can be used for compliance checking activities. For more information on this function, see :doc:`mcc` 202 | 203 | .. _concl: 204 | 205 | Conclusion 206 | ---------- 207 | That concludes the quick start. Hopefully this has been helpful in providing an overview of the main podaacpy features. If you have any issues with this document then please register them at the `issue tracker `_. Please use `labels `_ to classify your issue. 208 | -------------------------------------------------------------------------------- /docs/source/utilities.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | podaacpy utilities API 17 | ********************** 18 | 19 | .. autoclass:: podaac_utils.PodaacUtils 20 | :members: 21 | -------------------------------------------------------------------------------- /docs/source/webservices.rst: -------------------------------------------------------------------------------- 1 | .. # encoding: utf-8 2 | # Copyright 2016-2019 California Institute of Technology. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | podaacpy webservices API 17 | ************************ 18 | 19 | .. autoclass:: podaac.Podaac 20 | :members: 21 | -------------------------------------------------------------------------------- /examples/2017_PO.DAAC_Citation_cleaned_and_verified.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/examples/2017_PO.DAAC_Citation_cleaned_and_verified.xlsx -------------------------------------------------------------------------------- /examples/ASCAT_geometry.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/examples/ASCAT_geometry.jpg -------------------------------------------------------------------------------- /examples/Calculate Standard Deviation from Cross-Calibrated Multi-Platform Ocean Surface Wind Vector L3.0 First-Look Analyses.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Licensed to the Apache Software Foundation (ASF) under one\n", 10 | "# or more contributor license agreements. See the NOTICE file\n", 11 | "# distributed with this work for additional information\n", 12 | "# regarding copyright ownership. The ASF licenses this file\n", 13 | "# to you under the Apache License, Version 2.0 (the\n", 14 | "# \"License\"); you may not use this file except in compliance\n", 15 | "# with the License. You may obtain a copy of the License at\n", 16 | "#\n", 17 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 18 | "#\n", 19 | "# Unless required by applicable law or agreed to in writing,\n", 20 | "# software distributed under the License is distributed on an\n", 21 | "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n", 22 | "# KIND, either express or implied. See the License for the\n", 23 | "# specific language governing permissions and limitations\n", 24 | "# under the License." 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "#\n", 34 | "# Podaacpy integration example\n", 35 | "# This example integrates Podaacpy within Apache Open Climate Workbench (OCW) to download \n", 36 | "# a PODACC dataset, execute some climate model evaluation and then plot a contour map.\n", 37 | "# \n", 38 | "# In this example:\n", 39 | "# 1. Download a remote PO.DAAC (https://podaac.jpl.nasa.gov/) dataset\n", 40 | "# and read it into an OCW dataset object. In this case we demonstrate the source dataset\n", 41 | "# 'Cross-Calibrated Multi-Platform Ocean Surface Wind Vector L3.0 First-Look Analyses'\n", 42 | "# which can be found at \n", 43 | "# https://podaac.jpl.nasa.gov/dataset/CCMP_MEASURES_ATLAS_L4_OW_L3_0_WIND_VECTORS_FLK\n", 44 | "# 2. Create a temporal Standard Deviation metric using one of the OCW standard metrics.\n", 45 | "# 3. Evaluate the dataset against the metric and plot a contour map.\n", 46 | "# OCW modules demonstrated:\n", 47 | "# 1. datasource/podaac_datasource\n", 48 | "# 2. metrics\n", 49 | "# 3. evaluation\n", 50 | "# 4. plotter" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "# imports \n", 60 | "# N.B. you NEED to have Apache OCW installed locally before attemtping the following imports,\n", 61 | "# you can do this as follows \n", 62 | "\n", 63 | "# $ conda install -c conda-forge ocw \n", 64 | "\n", 65 | "from __future__ import print_function\n", 66 | "\n", 67 | "import ocw.data_source.podaac_datasource as podaac\n", 68 | "import ocw.evaluation as evaluation\n", 69 | "import ocw.metrics as metrics\n", 70 | "import ocw.plotter as plotter" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": null, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "# Define some constants\n", 80 | "datasetId = 'PODAAC-CCF30-01XXX'\n", 81 | "variable = 'uwnd'\n", 82 | "name = 'Cross-Calibrated Multi-Platform Ocean Surface Wind Vector L3.0 First-Look Analyses'\n", 83 | "output_plot = \"ccmp_temporal_standard_deviation\"\n", 84 | "download_path = \"/tmp\"" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "# Step 1: Download remote PO.DAAC Dataset and read it into an OCW Dataset Object.\n", 94 | "print(\"Available Level4 PO.DAAC Granules: %s\" % (podaac.list_available_extract_granule_dataset_ids()))\n", 95 | "print(\"Attempting to download Level4 Granule from Collection '%s' - '%s'.\" % (datasetId, name))\n", 96 | "ccmp_dataset = podaac.extract_l4_granule(variable=variable, dataset_id=datasetId, name=name, path=download_path)\n", 97 | "print(\"Granule details: '%s'\" % (ccmp_dataset))\n", 98 | "#print(\"CCMP_Dataset.values shape: (times, lats, lons) - %s \\n\" % (ccmp_dataset.values.shape))\n", 99 | "\n", 100 | "# Acessing latittudes and longitudes of netCDF file\n", 101 | "lats = ccmp_dataset.lats\n", 102 | "lons = ccmp_dataset.lons" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "# Step 2: Build a Metric to use for Evaluation - Temporal Standard Deviation for this example.\n", 112 | "# You can build your own metrics, but OCW also ships with some common metrics\n", 113 | "print(\"Setting up a Temporal standard deviation metric to use for evaluation...\")\n", 114 | "std = metrics.TemporalStdDev()" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "# Step 3: Create an Evaluation Object using Datasets and our Metric.\n", 124 | "# The Evaluation Class Signature is:\n", 125 | "# Evaluation(reference, targets, metrics, subregions=None)\n", 126 | "# Evaluation can take in multiple targets and metrics, so we need to convert\n", 127 | "# our examples into Python lists. Evaluation will iterate over the lists\n", 128 | "print(\"Making the Evaluation definition...\")\n", 129 | "# Temporal STD Metric gets one target dataset then reference dataset\n", 130 | "# should be None\n", 131 | "std_evaluation = evaluation.Evaluation(None, [ccmp_dataset], [std])\n", 132 | "print(\"Executing the Evaluation using the object's run() method...\")\n", 133 | "std_evaluation.run()\n", 134 | "print(\"Completed model evaluation.\")" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "# Step 4: Make a Plot from the Evaluation.results.\n", 144 | "# The Evaluation.results are a set of nested lists to support many different\n", 145 | "# possible Evaluation scenarios.\n", 146 | "#\n", 147 | "# The Evaluation results docs say:\n", 148 | "# The shape of results is (num_metrics, num_target_datasets) if no subregion\n", 149 | "# Accessing the actual results when we have used 1 metric and 1 dataset is\n", 150 | "# done this way:\n", 151 | "print(\"Accessing the Results of the Evaluation run\")\n", 152 | "results = std_evaluation.unary_results[0][0]\n", 153 | "print(\"The results are of type: %s\" % type(results))\n", 154 | "print(\"Generating a contour map using ocw.plotter.draw_contour_map()\")\n", 155 | "\n", 156 | "fname = output_plot\n", 157 | "gridshape = (4, 5)\n", 158 | "plot_title = name + \" Temporal Standard Deviation\"\n", 159 | "sub_titles = range(2002, 2010, 1)\n", 160 | "\n", 161 | "plotter.draw_contour_map(results, lats, lons, fname,\n", 162 | " gridshape=gridshape, ptitle=plot_title,\n", 163 | "subtitles=sub_titles)" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "# Finally, view the image\n", 173 | "from IPython.display import Image\n", 174 | "Image(filename=output_plot + '.png') " 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": null, 180 | "metadata": {}, 181 | "outputs": [], 182 | "source": [] 183 | } 184 | ], 185 | "metadata": { 186 | "kernelspec": { 187 | "display_name": "Python 3", 188 | "language": "python", 189 | "name": "python3" 190 | }, 191 | "language_info": { 192 | "codemirror_mode": { 193 | "name": "ipython", 194 | "version": 3 195 | }, 196 | "file_extension": ".py", 197 | "mimetype": "text/x-python", 198 | "name": "python", 199 | "nbconvert_exporter": "python", 200 | "pygments_lexer": "ipython3", 201 | "version": "3.6.5" 202 | } 203 | }, 204 | "nbformat": 4, 205 | "nbformat_minor": 2 206 | } 207 | -------------------------------------------------------------------------------- /examples/README.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ======== 3 | 4 | You've arrived at our quickstart and examples. All of them are delivered as `Jupyter Notebooks `__. This means you need to have Jupyter installed before you start, please check out the `installation documentation `__. 5 | 6 | You can run the notebooks as follows 7 | 8 | :: 9 | 10 | $ cd examples 11 | $ jupyter notebook 12 | 13 | Quickstart 14 | ---------- 15 | Once the Jupyter server is running on http://localhost:8888/ you can access the quickstart notebook **podaacpy_getting_started_tutorial.ipynb**. 16 | 17 | Other Examples 18 | -------------- 19 | Over time we will add more examples involving Podaacpy. If you have some examples you would like to share, please open a `pull request `_. 20 | -------------------------------------------------------------------------------- /examples/Using Podaacpy to retrieve CYGNSS Level 2 Science Data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 10 | "# you may not use this file except in compliance with the License.\n", 11 | "# You may obtain a copy of the License at\n", 12 | "#\n", 13 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 14 | "#\n", 15 | "# Unless required by applicable law or agreed to in writing, software\n", 16 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 17 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 18 | "# See the License for the specific language governing permissions and\n", 19 | "# limitations under the License." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "#First lets import the libraries we require\n", 29 | "from pprint import pprint\n", 30 | "import podaac.podaac as podaac\n", 31 | "import podaac.podaac_utils as utils" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "#Then we can create instances of the classes we will use\n", 41 | "p = podaac.Podaac()\n", 42 | "u = utils.PodaacUtils()" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "#Print a list of CYGNSS dataset id's\n", 52 | "print('\\nHeres list_all_available_granule_search_dataset_ids()')\n", 53 | "result = u.list_all_available_granule_search_dataset_ids()\n", 54 | "dsetId = [i for i in result if 'CYG' in i]\n", 55 | "pprint(dsetId)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "#Print a list of CYGNSS dataset short names\n", 65 | "print('\\nHeres list_all_available_granule_search_dataset_short_names()')\n", 66 | "result = u.list_all_available_granule_search_dataset_short_names()\n", 67 | "dsetShortName = [i for i in result if 'CYG' in i]\n", 68 | "pprint(dsetShortName)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "#Perform a search on dataset\n", 78 | "#NOTE: dataset_id=dsetID pulled up nothing, had to use short_name=\n", 79 | "print('\\nHeres p.dataset_search()')\n", 80 | "result = p.dataset_search(short_name=dsetShortName[0])\n", 81 | "\n", 82 | "#Cache the dataset landing page URL\n", 83 | "searchStr = 'http://podaac.jpl.nasa.gov/dataset/'\n", 84 | "dataset_landing_page = [ str(i) for i in result.strip().split() if searchStr in i ][0]\n", 85 | "\n", 86 | "pprint(result)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "#Print total number of GYGNSS Level 2 granules\n", 96 | "print('\\nHeres total results using p.granule_search()')\n", 97 | "maxResultsPerPage = '400'\n", 98 | "result = p.granule_search(dataset_id=dsetId[1],items_per_page=maxResultsPerPage)\n", 99 | "searchStr = 'totalResults'\n", 100 | "numResultsStr = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 101 | "print(numResultsStr)" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "#print('\\nHeres the length of file listing: '+str(len(fileStrL))+'\\n')\n", 111 | "searchStr = 'cyg'\n", 112 | "fileStrL = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 113 | "podaacL3 = [ i.replace('<title>','').replace('','') for i in fileStrL ]\n", 114 | "pprint(podaacL3)" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "#Lets execute a search for specific granules from the following dataset\n", 124 | "# MetOp-A ASCAT Level 2 Ocean Surface Wind Vectors Optimized for Coastal Ocean\n", 125 | "# https://podaac.jpl.nasa.gov/dataset/ASCATA-L2-Coastal\n", 126 | "# ...based upon temporal (start and end) and spatial contraints. \n", 127 | "#note that no subsetting is available for these granules as of yet.\n", 128 | "result = p.granule_search(dataset_id='PODAAC-ASOP2-12C01', \n", 129 | " start_time='2017-06-01T00:00:00Z', \n", 130 | " end_time='2017-10-31T00:00:00Z', \n", 131 | " bbox='-105,5,-10,50', \n", 132 | " sort_by='timeAsc', \n", 133 | " format='atom', \n", 134 | " pretty='true')\n", 135 | "searchStr = 'totalResults'\n", 136 | "numResultsStr = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 137 | "print(numResultsStr)" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": null, 143 | "metadata": {}, 144 | "outputs": [], 145 | "source": [ 146 | "#Now lets subset and download one of those granules\n", 147 | "import os\n", 148 | "print(os.path.dirname(os.path.abspath('__file__')) + \"/subset.json\")\n", 149 | "print(os.path.dirname(os.path.abspath('__file__')))\n", 150 | "result = p.granule_subset(os.path.dirname(os.path.abspath('__file__')) + \"/subset.json\", path=os.path.dirname(os.path.abspath('__file__')))" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": {}, 157 | "outputs": [], 158 | "source": [ 159 | "#Again, using Elsevier's Scopus Search, lets see other CYGNSS resources we can retreive.\n", 160 | "url = 'https://api.elsevier.com/content/search/scopus?query=ALL:cygnss&APIKey=715b412c00f0b95e918a3e7abe6e6ee4'\n", 161 | "import requests\n", 162 | "try:\n", 163 | " metadata = requests.get(url)\n", 164 | " status_codes = [404, 400, 503, 408]\n", 165 | " if metadata.status_code in status_codes:\n", 166 | " metadata.raise_for_status()\n", 167 | "except requests.exceptions.HTTPError as error:\n", 168 | " print(error)\n", 169 | " raise\n", 170 | "pprint(metadata.text)" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "metadata": {}, 177 | "outputs": [], 178 | "source": [] 179 | } 180 | ], 181 | "metadata": { 182 | "kernelspec": { 183 | "display_name": "Python 3", 184 | "language": "python", 185 | "name": "python3" 186 | }, 187 | "language_info": { 188 | "codemirror_mode": { 189 | "name": "ipython", 190 | "version": 3 191 | }, 192 | "file_extension": ".py", 193 | "mimetype": "text/x-python", 194 | "name": "python", 195 | "nbconvert_exporter": "python", 196 | "pygments_lexer": "ipython3", 197 | "version": "3.6.5" 198 | } 199 | }, 200 | "nbformat": 4, 201 | "nbformat_minor": 2 202 | } 203 | -------------------------------------------------------------------------------- /examples/Using Podaacpy to retrieve CYGNSS Level 3 Science Data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 10 | "# you may not use this file except in compliance with the License.\n", 11 | "# You may obtain a copy of the License at\n", 12 | "#\n", 13 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 14 | "#\n", 15 | "# Unless required by applicable law or agreed to in writing, software\n", 16 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 17 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 18 | "# See the License for the specific language governing permissions and\n", 19 | "# limitations under the License." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "#First lets import the libraries we require\n", 29 | "from pprint import pprint\n", 30 | "import podaac.podaac as podaac\n", 31 | "import podaac.podaac_utils as utils" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "#Then we can create instances of the classes we will use\n", 41 | "p = podaac.Podaac()\n", 42 | "u = utils.PodaacUtils()" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "#Print a list of CYGNSS dataset id's\n", 52 | "print('\\nHeres list_all_available_granule_search_dataset_ids()')\n", 53 | "result = u.list_all_available_granule_search_dataset_ids()\n", 54 | "dsetId = [i for i in result if 'CYG' in i]\n", 55 | "print(dsetId)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "#Print a list of CYGNSS dataset short names\n", 65 | "print('\\nHeres list_all_available_granule_search_dataset_short_names()')\n", 66 | "result = u.list_all_available_granule_search_dataset_short_names()\n", 67 | "dsetShortName = [i for i in result if 'CYG' in i]\n", 68 | "print(dsetShortName)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "# Some dataset metadata in GCMD response format\n", 78 | "print('\\nHeres p.dataset_metadata()')\n", 79 | "print(p.dataset_metadata(dataset_id=dsetId[0], format='gcmd'))" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "#Perform a search on dataset\n", 89 | "#NOTE: dataset_id=dsetID pulled up nothing, had to use short_name=\n", 90 | "print('\\nHeres p.dataset_search()')\n", 91 | "result = p.dataset_search(short_name=dsetShortName[0])\n", 92 | "\n", 93 | "#Cache the dataset landing page URL\n", 94 | "searchStr = 'http://podaac.jpl.nasa.gov/dataset/'\n", 95 | "dataset_landing_page = [ str(i) for i in result.strip().split() if searchStr in i ][0]\n", 96 | "\n", 97 | "print(result)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "#Print total number of GYGNSS granules\n", 107 | "print('\\nHeres total results using p.granule_search()')\n", 108 | "maxResultsPerPage = '400'\n", 109 | "result = p.granule_search(dataset_id=dsetId[0],items_per_page=maxResultsPerPage)\n", 110 | "print(result)\n", 111 | "searchStr = 'totalResults'\n", 112 | "numResultsStr = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 113 | "print(numResultsStr)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "#print('\\nHeres the length of file listing: '+str(len(fileStrL))+'\\n')\n", 123 | "searchStr = 'cyg'\n", 124 | "fileStrL = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 125 | "podaacL3 = [ i.replace('<title>','').replace('','') for i in fileStrL ]\n", 126 | "pprint(podaacL3)" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "#Using Elsevier's Scopus Search, lets see if we can \n", 136 | "#retreieve any information from the above dataset landing page\n", 137 | "url = 'https://api.elsevier.com/content/search/scopus?query=ALL:' + dataset_landing_page + '&APIKey=715b412c00f0b95e918a3e7abe6e6ee4'\n", 138 | "import requests\n", 139 | "try:\n", 140 | " metadata = requests.get(url)\n", 141 | " status_codes = [404, 400, 503, 408]\n", 142 | " if metadata.status_code in status_codes:\n", 143 | " metadata.raise_for_status()\n", 144 | "except requests.exceptions.HTTPError as error:\n", 145 | " print(error)\n", 146 | " raise\n", 147 | "\n", 148 | "pprint(metadata.text)" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "#Again, using Elsevier's Scopus Search, lets see other CYGNSS resource we can retreive.\n", 158 | "url = 'https://api.elsevier.com/content/search/scopus?query=ALL:cygnss&APIKey=715b412c00f0b95e918a3e7abe6e6ee4'\n", 159 | "import requests\n", 160 | "try:\n", 161 | " metadata = requests.get(url)\n", 162 | " status_codes = [404, 400, 503, 408]\n", 163 | " if metadata.status_code in status_codes:\n", 164 | " metadata.raise_for_status()\n", 165 | "except requests.exceptions.HTTPError as error:\n", 166 | " print(error)\n", 167 | " raise\n", 168 | "\n", 169 | "pprint(metadata.text)" 170 | ] 171 | } 172 | ], 173 | "metadata": { 174 | "kernelspec": { 175 | "display_name": "Python 3", 176 | "language": "python", 177 | "name": "python3" 178 | }, 179 | "language_info": { 180 | "codemirror_mode": { 181 | "name": "ipython", 182 | "version": 3 183 | }, 184 | "file_extension": ".py", 185 | "mimetype": "text/x-python", 186 | "name": "python", 187 | "nbconvert_exporter": "python", 188 | "pygments_lexer": "ipython3", 189 | "version": "3.6.5" 190 | } 191 | }, 192 | "nbformat": 4, 193 | "nbformat_minor": 2 194 | } 195 | -------------------------------------------------------------------------------- /examples/Using podaacpy to interact with PO.DAAC Drive.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 10 | "# you may not use this file except in compliance with the License.\n", 11 | "# You may obtain a copy of the License at\n", 12 | "#\n", 13 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 14 | "#\n", 15 | "# Unless required by applicable law or agreed to in writing, software\n", 16 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 17 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 18 | "# See the License for the specific language governing permissions and\n", 19 | "# limitations under the License." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "# In this example we replicate the following workflow \n", 29 | "# * Define temporal and spatial characteristics of a given event \n", 30 | "# e.g. Hurricane Florence which was a very recent major hurricane \n", 31 | "# to impact Southeastern US\n", 32 | "# * Search for all available (whole orbit) granules for that event and \n", 33 | "# download them from PO.DAAC Drive to the local machine\n", 34 | "# * View files in the Panoply data viewer\n", 35 | "# more information on Panoply can be found at\n", 36 | "# https://www.giss.nasa.gov/tools/panoply/\n", 37 | "# * Streamline the above data acquisition task using PO.DAAC's\n", 38 | "# Level 2 Subsetting software to download a regional data subset for the\n", 39 | "# event at the given space and time.\n", 40 | "# * Finally, view the subsetted granule again in Panoply\n", 41 | "\n", 42 | "# In this example we has chosen the MetOp-A ASCAT Level 2 Ocean \n", 43 | "# Surface Wind Vectors Optimized for Coastal Ocean collection as our \n", 44 | "# subject matter.\n", 45 | "# This dataset contains operational near-real-time Level 2 coastal \n", 46 | "# ocean surface wind vector retrievals from the Advanced Scatterometer \n", 47 | "# (ASCAT) on MetOp-A at 12.5 km sampling resolution \n", 48 | "# (note: the effective resolution is 25 km). It is a product of \n", 49 | "# the European Organization for the Exploitation of Meteorological \n", 50 | "# Satellites (EUMETSAT) Ocean and Sea Ice Satellite Application \n", 51 | "# Facility (OSI SAF) provided through the Royal Netherlands \n", 52 | "# Meteorological Institute (KNMI)\n", 53 | "\n", 54 | "# More information on the MetOp mission specification can \n", 55 | "# be found at https://podaac.jpl.nasa.gov/MetOp" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "from IPython.display import Image\n", 65 | "Image(filename='ASCAT_geometry.jpg') " 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "#First lets import the libraries we require\n", 75 | "from pprint import pprint\n", 76 | "from podaac import podaac as podaac\n", 77 | "from podaac import podaac_utils as utils\n", 78 | "from podaac import drive as drive" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [ 87 | "#Then we can create instances of the classes we will use\n", 88 | "p = podaac.Podaac()\n", 89 | "u = utils.PodaacUtils()\n", 90 | "d = drive.Drive('podaac.ini', None, None)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Let's discover PO.DAAC Wind data relating to Hurricane Florence, which\n", 100 | "# was a very recent major hurricane to impact Southeastern US\n", 101 | "# https://en.wikipedia.org/wiki/Hurricane_Florence\n", 102 | "# Using specific parameters to confine the discovery space, we opt for the full \n", 103 | "# metadata record in atom format\n", 104 | "ds_result = p.dataset_search(keyword='ASCAT', \n", 105 | " start_time='2018-09-12T00:00:01Z', \n", 106 | " end_time='2018-09-14T11:59:59Z', \n", 107 | " short_name='ASCATA-L2-Coastal', \n", 108 | " process_level='2', \n", 109 | " bbox='-81,28,-67,40', \n", 110 | " pretty='True', \n", 111 | " _format='atom', \n", 112 | " full='True')\n", 113 | "print(ds_result)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "#Because we requested the Full response, we can actually extract the \n", 123 | "# PO.DAAC Drive URL for all granules contained within this dataset.\n", 124 | "search_str = 'https://podaac-tools.jpl.nasa.gov/drive/files/'\n", 125 | "drive_path = [ str(i) for i in ds_result.strip().split() if search_str in i ][0]\n", 126 | "print(drive_path[5:])" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "#Next, lets search for Granules of interest relating to the above discovery operation\n", 136 | "#Lets execute a search for specific granules from the following dataset\n", 137 | "# MetOp-A ASCAT Level 2 Ocean Surface Wind Vectors Optimized for Coastal Ocean\n", 138 | "# https://podaac.jpl.nasa.gov/dataset/ASCATA-L2-Coastal\n", 139 | "# ...based upon temporal (start and end) and spatial contraints. \n", 140 | "result = p.granule_search(dataset_id='PODAAC-ASOP2-12C01',\n", 141 | " start_time='2018-09-12T00:00:01Z',\n", 142 | " end_time='2018-09-14T11:59:59Z',\n", 143 | " bbox='-81,28,-67,40',\n", 144 | " sort_by='timeAsc',\n", 145 | " items_per_page='400',\n", 146 | " _format='atom')\n", 147 | "#print(result)\n", 148 | "searchStr = 'totalResults'\n", 149 | "numResultsStr = [ str(i) for i in result.strip().split() if searchStr in i ]\n", 150 | "print(numResultsStr)" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": {}, 157 | "outputs": [], 158 | "source": [ 159 | "#Here's the actual granule names\n", 160 | "pprint(u.mine_granules_from_granule_search(granule_search_response=str(result)))" 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "metadata": {}, 167 | "outputs": [], 168 | "source": [ 169 | "#Now we simply need to reproduce the Drive URL's for the above granules.\n", 170 | "granules = d.mine_drive_urls_from_granule_search(granule_search_response=(str(result)))\n", 171 | "pprint(granules)" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": null, 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [ 180 | "#Let's retrieve these granules from PO.DAAC Drive.\n", 181 | "#Note that the download_granules function actually decompresses\n", 182 | "#and removes the compressed archive files locally for us.\n", 183 | "d.download_granules(granule_collection=granules, path='./dummy')" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "#Let's merge the files together\n", 193 | "import glob, os, subprocess, shlex\n", 194 | "nc_files = []\n", 195 | "for file in glob.glob(\"*.nc\"):\n", 196 | " nc_files.append(os.path.abspath(file))\n", 197 | "str_nc_files = ' '.join(nc_files)\n", 198 | "# Let's open the granules within Panoply - https://www.giss.nasa.gov/tools/panoply/\n", 199 | "# which is a netCDF, HDF and GRIB Data Viewer\n", 200 | "# developed by NASA's Goddard Institute for Space Studies\n", 201 | "args = shlex.split('/Applications/Panoply.app/Contents/MacOS/Panoply ' + str_nc_files)\n", 202 | "subprocess.Popen(args)" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "metadata": {}, 209 | "outputs": [], 210 | "source": [ 211 | "#Finally, let's subset the granule using L2SS\n", 212 | "#and download only the area of interest.\n", 213 | "from podaac import l2ss as l2ss\n", 214 | "l = l2ss.L2SS()\n", 215 | "granule_id = 'ascat_20180913_134800_metopa_61756_eps_o_coa_2401_ovw.l2.nc'\n", 216 | "query = {\n", 217 | " \"email\": \"your_email@here.com\",\n", 218 | " \"query\":\n", 219 | " [\n", 220 | " {\n", 221 | " \"compact\": \"true\",\n", 222 | " \"datasetId\": \"PODAAC-ASOP2-12C01\",\n", 223 | " \"bbox\": \"-81,28,-67,40\",\n", 224 | " \"variables\": [\"lat\", \"lon\", \"time\", \"wind_speed\"],\n", 225 | " \"granuleIds\": [\"ascat_20180913_134800_metopa_61756_eps_o_coa_2401_ovw.l2.nc\"]\n", 226 | " }\n", 227 | " ]\n", 228 | " }\n", 229 | "l.granule_download(query_string=query)\n", 230 | "ss_granule = os.path.abspath('subsetted-' + granule_id)\n", 231 | "print(ss_granule)\n", 232 | "# Finally let's make a call to Panoply to open the subsetted granule.\n", 233 | "args = shlex.split('/Applications/Panoply.app/Contents/MacOS/Panoply ' + ss_granule)\n", 234 | "subprocess.Popen(args)" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": null, 240 | "metadata": {}, 241 | "outputs": [], 242 | "source": [ 243 | "# A final comment and some food for thought, if you were\n", 244 | "# to write the above python script from scratch, you would\n", 245 | "# have to write around 400 or so lines of code.\n", 246 | "# Less the print statements, we've achieved it above in less\n", 247 | "# than 30 lines of code!\n", 248 | "# What is more, the code we have used has been tested by users,\n", 249 | "# as well as by our rich unit testing suite. Every function\n", 250 | "# in Podaacpy has an accompanying test!\n", 251 | "\n", 252 | "# Please report any issues with the above notebook at \n", 253 | "# https://github.com/nasa/podaacpy/issues" 254 | ] 255 | } 256 | ], 257 | "metadata": { 258 | "kernelspec": { 259 | "display_name": "Python 3", 260 | "language": "python", 261 | "name": "python3" 262 | }, 263 | "language_info": { 264 | "codemirror_mode": { 265 | "name": "ipython", 266 | "version": 3 267 | }, 268 | "file_extension": ".py", 269 | "mimetype": "text/x-python", 270 | "name": "python", 271 | "nbconvert_exporter": "python", 272 | "pygments_lexer": "ipython3", 273 | "version": "3.7.4" 274 | } 275 | }, 276 | "nbformat": 4, 277 | "nbformat_minor": 2 278 | } 279 | -------------------------------------------------------------------------------- /examples/podaac.ini: -------------------------------------------------------------------------------- 1 | [drive] 2 | urs_username = podaacpy 3 | urs_password = hZHYQ17yuag25zivK8F 4 | webdav_url = https://podaac-tools.jpl.nasa.gov/drive/files 5 | -------------------------------------------------------------------------------- /examples/podaacpy_getting_started_tutorial.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", 10 | "# you may not use this file except in compliance with the License.\n", 11 | "# You may obtain a copy of the License at\n", 12 | "#\n", 13 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 14 | "#\n", 15 | "# Unless required by applicable law or agreed to in writing, software\n", 16 | "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", 17 | "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", 18 | "# See the License for the specific language governing permissions and\n", 19 | "# limitations under the License." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "import sys\n", 29 | "print(sys.version)" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "##################\n", 39 | "# Imports #\n", 40 | "##################\n", 41 | "# import the podaac package\n", 42 | "import podaac.podaac as podaac\n", 43 | "# import the podaac_utils package\n", 44 | "import podaac.podaac_utils as utils\n", 45 | "# import the mcc package\n", 46 | "import podaac.mcc as mcc\n", 47 | "\n", 48 | "#######################\n", 49 | "# Class instantiation #\n", 50 | "#######################\n", 51 | "# then create an instance of the Podaac class\n", 52 | "p = podaac.Podaac()\n", 53 | "# then create an instance of the PodaacUtils class\n", 54 | "u = utils.PodaacUtils()\n", 55 | "# then create an instance of the MCC class\n", 56 | "m = mcc.MCC()" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "###########################################\n", 66 | "# Lets look at some convenience functions #\n", 67 | "###########################################\n", 68 | "print(u.list_all_available_extract_granule_dataset_ids())" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "print(u.list_all_available_extract_granule_dataset_short_names())" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "print(u.list_all_available_granule_search_dataset_ids())" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "print(u.list_all_available_granule_search_dataset_short_names())" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "print(u.list_available_granule_search_level2_dataset_ids())" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": {}, 111 | "outputs": [], 112 | "source": [ 113 | "print(u.list_available_granule_search_level2_dataset_short_names())" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": { 120 | "scrolled": false 121 | }, 122 | "outputs": [], 123 | "source": [ 124 | "# Now lets take a look at using the results from above to interact with the PO.DAAC Webservices\n", 125 | "\n", 126 | "########################\n", 127 | "# PO.DAAC Web Services #\n", 128 | "########################\n", 129 | "\n", 130 | "# First lets retrieve dataset metadata\n", 131 | "print(p.dataset_metadata(dataset_id='PODAAC-GHMG2-2PO01'))" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": null, 137 | "metadata": { 138 | "scrolled": false 139 | }, 140 | "outputs": [], 141 | "source": [ 142 | "# Lets try searching for datasets\n", 143 | "print(p.dataset_search(keyword='modis'))" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "# Now retrieve dataset variables\n", 153 | "print(p.dataset_variables(dataset_id='PODAAC-GHMDA-2PJ02'))" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "# Now extracting an individual granule\n", 163 | "print(p.extract_l4_granule(dataset_id='PODAAC-AQR50-3YVAS'))" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "# Now retrieving granule metadata\n", 173 | "print(p.granule_metadata(dataset_id='PODAAC-GHMG2-2PO01'), granule_name='20120912-MSG02-OSDPD-L2P-MSG02_0200Z-v01.nc')" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "from IPython.display import Image\n", 183 | "from IPython.core.display import HTML \n", 184 | "result = p.granule_preview(dataset_id='PODAAC-ASOP2-25X01')" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": null, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "# Additionally, we can search metadata for list of granules archived within the last 24 hours in Datacasting format.\n", 194 | "print(p.last24hours_datacasting_granule_md(dataset_id='PODAAC-AQR50-3YVAS'))" 195 | ] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": null, 200 | "metadata": {}, 201 | "outputs": [], 202 | "source": [ 203 | "# Now Searching for Granules\n", 204 | "print(p.granule_search(dataset_id='PODAAC-ASOP2-25X01',bbox='0,0,180,90',start_time='2013-01-01T01:30:00Z',end_time='2014-01-01T00:00:00Z',start_index='1', pretty='True'))" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": null, 210 | "metadata": {}, 211 | "outputs": [], 212 | "source": [ 213 | "######################################################\n", 214 | "# Working with Metadata Compliance Webservices (mcc) #\n", 215 | "######################################################\n", 216 | "\n", 217 | "# Compliance Check a Local File\n", 218 | "print(m.check_local_file(acdd_version='1.3', gds2_parameters='L4', file_upload='../podaac/tests/ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc', response='json'))" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "# Compliance Check a Remote File\n", 228 | "print(m.check_remote_file(checkers='CF', url_upload='http://test.opendap.org/opendap/data/ncml/agg/dated/CG2006158_120000h_usfc.nc', response='json'))" 229 | ] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "metadata": {}, 235 | "outputs": [], 236 | "source": [ 237 | "# Thank you for trying out podaacpy\n", 238 | "# That concludes the quick start. Hopefully this has been helpful in providing an overview \n", 239 | "# of the main podaacpy features. If you have any issues with this document then please register \n", 240 | "# them at the issue tracker - https://github.com/nasa/podaacpy/issues\n", 241 | "# Please use labels to classify your issue.\n", 242 | "\n", 243 | "# Thanks, \n", 244 | "# Lewis John McGibbney" 245 | ] 246 | } 247 | ], 248 | "metadata": { 249 | "kernelspec": { 250 | "display_name": "Python 3", 251 | "language": "python", 252 | "name": "python3" 253 | }, 254 | "language_info": { 255 | "codemirror_mode": { 256 | "name": "ipython", 257 | "version": 3 258 | }, 259 | "file_extension": ".py", 260 | "mimetype": "text/x-python", 261 | "name": "python", 262 | "nbconvert_exporter": "python", 263 | "pygments_lexer": "ipython3", 264 | "version": "3.7.4" 265 | } 266 | }, 267 | "nbformat": 4, 268 | "nbformat_minor": 1 269 | } 270 | -------------------------------------------------------------------------------- /examples/subset.json: -------------------------------------------------------------------------------- 1 | { 2 | "email":"lewis.j.mcgibbney@jpl.nasa.gov", 3 | "query":[ 4 | { 5 | "datasetId":"PODAAC-ASOP2-12C01", 6 | "granuleIds":["ascat_20170601_035400_metopa_55087_eps_o_coa_2401_ovw.l2.nc"], 7 | "bbox":"-105,5,-10,50", 8 | "variables":["wvc_quality_flag","wind_speed","wind_dir","lat","lon","time"], 9 | "compact":false 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /podaac/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/podaac/__init__.py -------------------------------------------------------------------------------- /podaac/drive.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | import configparser 14 | import gzip 15 | import os 16 | import requests 17 | from requests.auth import HTTPBasicAuth 18 | 19 | 20 | class Drive: 21 | 22 | def __init__(self, file, username, password, webdav_url='https://podaac-tools.jpl.nasa.gov/drive/files'): 23 | ''' In order to access PODAAC Drive, all users are required to be registered \ 24 | with NASA Earthdata system. User can login to the PODAAC Drive using the \ 25 | following link https://podaac-tools.jpl.nasa.gov/drive/. \ 26 | Once you have authenticated, you will be able to view, retrieve and change \ 27 | your encrypted password. N.B. The encrypted password must then either be entered \ 28 | into `podaac.ini` and passes as an argument to `file`, or alternatively provided \ 29 | via the `username` parameter. 30 | ''' 31 | config = configparser.ConfigParser() 32 | if file: 33 | config_file_path = os.path.join(os.path.dirname(__file__), file) 34 | config.read_file(open(config_file_path, 'r')) 35 | self.USERNAME = config['drive']['urs_username'] 36 | self.PASSWORD = config['drive']['urs_password'] 37 | self.URL = config['drive']['webdav_url'] 38 | if username: 39 | self.USERNAME = username 40 | if password: 41 | self.PASSWORD = password 42 | if webdav_url: 43 | self.URL = webdav_url 44 | 45 | def mine_drive_urls_from_granule_search(self, granule_search_response=''): 46 | ''' Convenience function which extracts the PO.DAAC Drive URLs from \ 47 | a given granule search obtained using podaac.granule_search(). \ 48 | The response of this function is an array of strings denoting the \ 49 | PO.DAAC Drive URLs to the granules. 50 | 51 | :param granule_search_response: the output response of a podaac.granule_search() 52 | :type path: :mod:`string` 53 | 54 | :returns: prints an array of PO.DAAC Drive URLs. 55 | ''' 56 | from bs4 import BeautifulSoup 57 | soup = BeautifulSoup(granule_search_response, 'html.parser') 58 | drive_list = [] 59 | for drive_link in soup.find_all('link'): 60 | href = drive_link.get('href') 61 | if self.URL in href: 62 | drive_list.append(href) 63 | return drive_list 64 | 65 | def download_granules(self, granule_collection=None, path=''): 66 | ''' Granule download service downloads a granule collection \ 67 | from PO.DAAC Drive to the users' local machine at the given path. Note, as \ 68 | of https://github.com/nasa/podaacpy/issues/131 we now maintain the PO.DAAC \ 69 | Drive directory structure. This is to say, if the Drive URL was \ 70 | https://podaac-tools.jpl.nasa.gov/drive/files/allData/ghrsst/data/GDS2/L2P/AVHRR19_L/NAVO/v1/2019/088/20190329001403-NAVO-L2P_GHRSST-SST1m-AVHRR19_L-v02.0-fv01.0.nc \ 71 | then a directory structure would be created as follows \ 72 | allData/ghrsst/data/GDS2/L2P/AVHRR19_L/NAVO/v1/2019/088/20190329001403-NAVO-L2P_GHRSST-SST1m-AVHRR19_L-v02.0-fv01.0.nc 73 | 74 | :param granule_collection: a populated collection of PO.DAAC Drive Granule URLs. \ 75 | These can be obtained by using the drive.mine_drive_urls_from_granule_search() \ 76 | function which itself merely wraps a podaac.granule_search() request. 77 | :type granule_collection: :mod:`string` 78 | 79 | :param path: path to a directory where you want the data to be stored. 80 | :type path: :mod:`string` 81 | 82 | :returns: a zip file downloaded and extracted in the destination \ 83 | directory path provided. 84 | ''' 85 | if granule_collection is None: 86 | granule_collection = [] 87 | 88 | for granule_url in granule_collection: 89 | directory_structure, granule = os.path.split(granule_url[46:]) 90 | granule_name = os.path.splitext(granule)[0] 91 | if path == '': 92 | granule_path = os.path.join(os.path.dirname(__file__), directory_structure) 93 | else: 94 | granule_path = path + '/' + directory_structure 95 | r = requests.get(granule_url, auth=HTTPBasicAuth(self.USERNAME, self.PASSWORD), stream=True) 96 | if r.status_code != 200: 97 | raise PermissionError("Granule: '%s' not downloaded. Please check authentication configuration and try again." % (granule)) 98 | try: 99 | from pathlib import Path 100 | except ImportError: 101 | from pathlib2 import Path # python 2 backport 102 | Path(granule_path).mkdir(parents=True, exist_ok=True) 103 | with open(granule_path + "/" + granule, 'wb') as f: 104 | for chunk in r: 105 | f.write(chunk) 106 | 107 | if granule.endswith('.gz'): 108 | gzip_granule = gzip.open(granule_path + "/" + granule, 'rb') 109 | with open(granule_path + "/" + granule_name, 'wb') as uncompressed_granule: 110 | uncompressed_granule.write(gzip_granule.read()) 111 | gzip_granule.close() 112 | uncompressed_granule.close() 113 | os.remove(granule_path + "/" + granule) 114 | -------------------------------------------------------------------------------- /podaac/l2ss.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | import requests 14 | import os 15 | import json 16 | import time 17 | import zipfile 18 | from future.moves.urllib.request import urlopen, urlretrieve 19 | from future.moves.urllib.parse import urlencode 20 | from future.moves.http.client import HTTPSConnection 21 | 22 | 23 | class L2SS: 24 | 25 | def __init__(self): 26 | self.URL = 'https://podaac-tools.jpl.nasa.gov/l2ss-services/l2ss/' 27 | 28 | def dataset_search(self, dataset_id='', variable=None, sensor=None, provider=None, 29 | start_time='', end_time='', start_index='', items_per_page='50'): 30 | ''' Dataset search service lists available datasets and returns them. 31 | 32 | :param dataset_id: Search dataset belong to given PODAAC Dataset persistent ID. 33 | :type dataset_id: :mod:`string` 34 | 35 | :param variable: Search for datasets with variable name. For multi-value input, \ 36 | this input is taken as a list. Example: [ 'Sea Surface Temperature', 'Surface Wind'] 37 | :type variable: :mod:`list` 38 | 39 | :param sensor: Search for datasets with sensor. For multi-value input, \ 40 | this input is taken as a list. 41 | :type sensor: :mod:`list` 42 | 43 | :param provider: Search for datasets with provider. For multi-value input, \ 44 | this input is taken as a list. 45 | :type provider: :mod:`list` 46 | 47 | :param start_time: Lower time bound. If not specified, lower time bound of \ 48 | the dataset will be used. Example: '2011-12-31T23:59:59-06:00Z' 49 | :type start_time: :mod:`string` 50 | 51 | :param end_time: Upper time bound. If not specified, upper time bound of \ 52 | the dataset will be used. Example: 2019-12-31T23:59:59-06:00Z 53 | :type end_time: :mod:`string` 54 | 55 | :param items_per_page: number of results to return. Defaults to 50. 56 | :type items_per_page: :mod:`string` 57 | 58 | :param start_index: start index of result. 59 | :type start_index: :mod:`string` 60 | 61 | :returns: a json response containing the datasets. 62 | ''' 63 | 64 | if variable is None: 65 | variable = [] 66 | 67 | if sensor is None: 68 | sensor = [] 69 | 70 | if provider is None: 71 | provider = [] 72 | 73 | try: 74 | url = self.URL + 'dataset/search?' 75 | if dataset_id: 76 | url = url + 'datasetId=' + dataset_id 77 | if variable: 78 | for var in variable: 79 | url = url + '&variable=' + var 80 | if sensor: 81 | for item in sensor: 82 | url = url + '&sensor=' + item 83 | if provider: 84 | for item in provider: 85 | url = url + '&provider=' + item 86 | if start_time: 87 | url = url + '&startTime=' + start_time 88 | if end_time: 89 | url = url + '&endTime=' + end_time 90 | if start_index: 91 | url = url + '&startIndex=' + start_index 92 | if items_per_page: 93 | url = url + '&itemsPerPage=' + items_per_page 94 | 95 | datasets = requests.get(url) 96 | status_codes = [404, 400, 503, 408] 97 | if datasets.status_code in status_codes: 98 | datasets.raise_for_status() 99 | 100 | except requests.exceptions.HTTPError as error: 101 | print(error) 102 | raise 103 | 104 | return datasets.text 105 | 106 | def dataset_variables(self, dataset_id): 107 | ''' Dataset Variable retrieves dataset configuration information including variables. 108 | 109 | :param dataset_id: datasetId for the configuration information. 110 | :type dataset_id: :mod:`string` 111 | 112 | :returns: a json response containing the dataset variables. 113 | ''' 114 | try: 115 | url = self.URL + '/dataset/variable?datasetId=' + dataset_id 116 | variables = requests.get(url) 117 | status_codes = [404, 400, 503, 408] 118 | if variables.status_code in status_codes: 119 | variables.raise_for_status() 120 | 121 | except requests.exceptions.HTTPError as error: 122 | print(error) 123 | raise 124 | 125 | return variables.text 126 | 127 | def granule_search(self, dataset_id='', bbox='', start_time='', end_time='', 128 | name='', sort='', start_index='', items_per_page='50'): 129 | ''' Granule Search retrieves all base granule information (datasetId, start time, end time) \ 130 | matching the specified datasetId, date, and region. This approach may change if \ 131 | the data/querying turns out to be too expensive. Response is structured in a minimalistic\ 132 | way to cut down on the file size. 133 | 134 | :param dataset_id: Search granules belong to given PODAAC Dataset persistent ID. 135 | :type dataset_id: :mod:`string` 136 | 137 | :param bbox: Search granules with Bounding box Ex: '-180,-90,180,90' 138 | :type bbox: :mod:`string` 139 | 140 | :param start_time: Lower time bound. If not specified, lower time bound of \ 141 | the dataset will be used. Example: '2011-12-31T23:59:59-06:00Z' 142 | :type start_time: :mod:`string` 143 | 144 | :param end_time: Upper time bound. If not specified, upper time bound of \ 145 | the dataset will be used. Example: 2019-12-31T23:59:59-06:00Z 146 | :type end_time: :mod:`string` 147 | 148 | :param name : Search granules with exact name or name pattern using wildcard\ 149 | search Example: ascat* this matches name that starts with "ascat" 150 | :type name: :mod:`string` 151 | 152 | :param sort: Sort output. There are two strings delimited by space.\ 153 | The first string is the field name, and the second string is 'asc' or 'desc'\ 154 | Example: sort='Granule-Name asc' 155 | :type sort: :mod:`string` 156 | 157 | :param items_per_page: number of results to return. Default to 50. 158 | :type items_per_page: :mod:`string` 159 | 160 | :param start_index: start index of result. 161 | :type start_index: :mod:`string` 162 | 163 | :returns: a json response containing the dataset granules. 164 | ''' 165 | try: 166 | url = self.URL + 'granule/search?' 167 | if dataset_id: 168 | url = url + 'datasetId=' + dataset_id 169 | if bbox: 170 | url = url + '&bbox=' + bbox 171 | if start_time: 172 | url = url + '&startTime=' + start_time 173 | if end_time: 174 | url = url + '&endTime=' + end_time 175 | if start_index: 176 | url = url + '&startIndex=' + start_index 177 | if items_per_page: 178 | url = url + '&itemsPerPage=' + items_per_page 179 | if name: 180 | url = url + '&name=' + name 181 | if sort: 182 | url = url + '&sort=' + sort 183 | 184 | granules = requests.get(url) 185 | status_codes = [404, 400, 503, 408] 186 | if granules.status_code in status_codes: 187 | granules.raise_for_status() 188 | 189 | except requests.exceptions.HTTPError as error: 190 | print(error) 191 | raise 192 | 193 | return granules.text 194 | 195 | def granules_availability(self, dataset_id='', start_time='', end_time='', gap='', bbox=''): 196 | ''' Granules Availability calculates granule counts per day or month from given date range. 197 | 198 | :param dataset_id: Search granules belong to given PODAAC Dataset persistent ID. 199 | :type dataset_id: :mod:`string` 200 | 201 | :param start_time: Lower time bound. If not specified, lower time bound of \ 202 | the dataset will be used. Example: '2011-12-31T23:59:59-06:00Z' 203 | :type start_time: :mod:`string` 204 | 205 | :param end_time: Upper time bound. If not specified, upper time bound of \ 206 | the dataset will be used. Example: 2019-12-31T23:59:59-06:00Z 207 | :type end_time: :mod:`string` 208 | 209 | :param gap: The size of each date range expressed as an interval to be added\ 210 | to the lower bound. Example: 'DAY', 'MONTHS' 211 | :type gap: :mod:`string` 212 | 213 | :param bbox: Search granules with Bounding box Ex: '-180,-90,180,90' 214 | :type bbox: :mod:`string` 215 | 216 | :returns: a json response containing the granule count and other relevant information. 217 | ''' 218 | try: 219 | url = self.URL + 'granule/availability?' 220 | url = url + 'datasetId=' + dataset_id + '&startTime=' + \ 221 | start_time + '&endTime=' + end_time + '&gap=' + gap 222 | if bbox: 223 | url = url + '&bbox=' + bbox 224 | 225 | granule_availability = requests.get(url) 226 | status_codes = [404, 400, 503, 408] 227 | if granule_availability.status_code in status_codes: 228 | granule_availability.raise_for_status() 229 | 230 | except requests.exceptions.HTTPError as error: 231 | print(error) 232 | raise 233 | 234 | return granule_availability.text 235 | 236 | def granule_preview_image(self, dataset_id, granule, year, day, variable, path=''): 237 | ''' Granule Preview Image Service provides thumbnail image of selected variable for\ 238 | selected granule. 239 | 240 | :param dataset_id: Search granules belong to given PODAAC Dataset persistent ID. 241 | :type dataset_id: :mod:`string` 242 | 243 | :param granule: string granule name. 244 | :type granule: :mod:`string` 245 | 246 | :param year: year in 4 digits. Example= '2014' 247 | :type year: :mod:`string` 248 | 249 | :param day: day of year in 3 digits. Example= '140' 250 | :type day: :mod:`string` 251 | 252 | :param variable_id: Variable id described in dataset variable service. 253 | :type variable_id: :mod:`string` 254 | 255 | :returns: returns thumbnail image of selected variable for selected granule. 256 | ''' 257 | try: 258 | url = self.URL + 'preview/' + dataset_id + '/' + year + \ 259 | '/' + day + '/' + granule + '/' + variable + '.png' 260 | if path: 261 | path = path + '/' + dataset_id + '.png' 262 | else: 263 | path = os.path.join(os.path.dirname( 264 | __file__), dataset_id + '.png') 265 | with open(path, 'wb') as image_file: 266 | image = urlopen(url) 267 | image_file.write(image.read()) 268 | 269 | except Exception as e: 270 | print(e) 271 | raise 272 | 273 | return image 274 | 275 | def image_palette(self, palette_name): 276 | ''' Image Palette service retrieves palette descriptor in json format 277 | 278 | :param palette_name: palette_name whose palette descriptor we want to\ 279 | retrieve. 280 | :type palette_name: :mod:`string` 281 | 282 | :returns: returns palette descriptor in json format. 283 | ''' 284 | try: 285 | url = self.URL + 'palettes/' + palette_name + '.json' 286 | image_palette = requests.get(url) 287 | status_codes = [404, 400, 503, 408] 288 | if image_palette.status_code in status_codes: 289 | image_palette.raise_for_status() 290 | 291 | except requests.exceptions.HTTPError as error: 292 | print(error) 293 | raise 294 | 295 | return image_palette.text 296 | 297 | def granule_download(self, query_string, path=''): 298 | ''' Granule Download service submits a job to subset and download. Upon a successful request,\ 299 | token will be returned which can be used to check status. 300 | 301 | :param query_string: data collection query json as a string. 302 | :type query_string: :mod:`string` 303 | 304 | :param path: path to a directory where you want the subsetted \ 305 | dataset to be stored. 306 | :type path: :mod:`string` 307 | 308 | :returns: a zip file downloaded and extracted in the destination\ 309 | directory path provided. 310 | ''' 311 | params = urlencode({'query': query_string}) 312 | headers = { 313 | "Content-type": "application/x-www-form-urlencoded", "Accept": "*"} 314 | connection = HTTPSConnection("podaac-tools.jpl.nasa.gov") 315 | connection.request("POST", "/l2ss-services/l2ss/subset/submit", 316 | params, headers) 317 | response = connection.getresponse() 318 | data = response.read().decode('utf-8') 319 | result = json.loads(data) 320 | token = result['token'] 321 | connection.close() 322 | 323 | flag = 0 324 | while flag == 0: 325 | url = url = self.URL + "subset/status?token=" + token 326 | subset_response = requests.get(url).text 327 | subset_response_json = json.loads(subset_response) 328 | status = subset_response_json['status'] 329 | if status == "done": 330 | flag = 1 331 | if status == "error": 332 | raise Exception( 333 | "Unexpected error occured for the subset job you have requested") 334 | if status == "partial error": 335 | raise Exception( 336 | "The job was done but with some errors, please submit the job again") 337 | time.sleep(1) 338 | 339 | print("Done! downloading the dataset zip .....") 340 | download_url = subset_response_json['resultURLs'][0] 341 | split = download_url.split('/') 342 | length = len(split) 343 | zip_file_name = split[length - 1] 344 | if path == '': 345 | path = os.path.join(os.path.dirname(__file__), zip_file_name) 346 | else: 347 | path = path + zip_file_name 348 | response = urlretrieve(download_url, path) 349 | zip_content = zipfile.ZipFile(path) 350 | zip_content.extractall() 351 | os.remove(path) 352 | 353 | def subset_status(self, token): 354 | ''' Subset Status service check status on existing download job. 355 | The possible status that it returns include the following.. :: 356 | 357 | * "queued" 358 | * "processing" 359 | * "partial error" 360 | * "done" 361 | * "error" 362 | 363 | :param token: job token. job token is provided when submitting the job. 364 | :type token: :mod:`string` 365 | 366 | :returns: the status of the subset request. 367 | ''' 368 | try: 369 | url = self.URL + 'subset/status?token=' + token 370 | response = requests.get(url) 371 | response_json = json.loads(response.text) 372 | status = response_json['status'] 373 | if status == "unknown": 374 | raise Exception("Invalid Token : Please check your token") 375 | 376 | except Exception as e: 377 | print(e) 378 | raise 379 | -------------------------------------------------------------------------------- /podaac/mcc.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import os 16 | import requests 17 | 18 | class MCC: 19 | 20 | 21 | def __init__(self): 22 | self.URL = 'https://podaac-tools.jpl.nasa.gov/mcc/check' 23 | 24 | def check_remote_file(self, checkers, url_upload, response='json'): 25 | '''GET a remote file e.g. from an OPeNDAP URL and compliance \ 26 | check it against the endpoint at https://podaac-uat.jpl.nasa.gov/mcc/check. 27 | 28 | :param checkers: Must specify at least one test. Multiple tests are \ 29 | delimited by commas. Possible values include 'ACDD-x.x', \ 30 | 'CF' and 'GDS2' which also requires 'GDS2-parameters:levelAvailable'. \ 31 | Available levels are 'L2P', 'L3', and 'L4'. 32 | :type checkers: :mod:`string` 33 | 34 | :param url_upload: A valid url to a netCDF file; maximum 5.00 GB 35 | :type url_upload: :mod:`string` 36 | 37 | :param response: (Optional) Specify 'html', 'json', or 'pdf' result output. 38 | :type response: :mod:`string` 39 | 40 | :returns: one of 'html', 'json', or 'pdf'. 41 | 42 | :raises ValueError: If no dataset can be found for the supplied url_upload \ 43 | or if the requested dataset is a multi-file dataset. 44 | 45 | ''' 46 | 47 | try: 48 | url = self.URL + '?checkers=' + checkers + \ 49 | '&url-upload=' + url_upload + '&response=' + response 50 | result = requests.get(url) 51 | if result.status_code == 404 or result.status_code == 400 or result.status_code == 503 or result.status_code == 408: 52 | result.raise_for_status() 53 | 54 | except requests.exceptions.HTTPError as error: 55 | print(error) 56 | raise 57 | 58 | return result.text 59 | 60 | def check_local_file(self, acdd_version, gds2_parameters, file_upload, response='json'): 61 | '''POST a local file to the metadata compliance checker \ 62 | endpoint at https://podaac-uat.jpl.nasa.gov/mcc/check 63 | 64 | :param acdd_version: Must be present and and set to either 1.1 or 1.3. \ 65 | 'acdd' tag must also be present and must be set to 'on'. 66 | :type acdd_version: :mod:`string` 67 | 68 | :param gds2_parameters: Must be present and set to either 'L2P', 'L3', 'L4'. 69 | :type gds2_parameters: :mod:`string` 70 | 71 | :param file_upload: A valid location of a netCDF file; maximum 5.00 GB. 72 | :type file_upload: :mod:`string` 73 | 74 | :param response: Specify 'html', 'json', or 'pdf' result output. \ 75 | Default is 'json'. 76 | :type response: :mod:`string` 77 | 78 | :returns: one of 'html', 'json', or 'pdf'. 79 | 80 | :raises ValueError: If no dataset can be found for the supplied url_upload \ 81 | or if the requested dataset is a multi-file dataset. 82 | 83 | ''' 84 | 85 | try: 86 | if not os.path.exists(file_upload): 87 | raise Exception( 88 | "The file you are trying to upload does not exist in the local machine.") 89 | 90 | files = {'file-upload': open(file_upload, 'rb+')} 91 | data = {'CF': 'on', 'ACDD': 'on', 'ACDD-version': acdd_version, 92 | 'GDS2': 'on', 'GDS2-parameters': gds2_parameters, 'response': response} 93 | result = requests.post(self.URL, files=files, data=data) 94 | if result.status_code == 404 or result.status_code == 400 or result.status_code == 503 or result.status_code == 408: 95 | result.raise_for_status() 96 | 97 | except requests.exceptions.HTTPError as error: 98 | print(error) 99 | raise 100 | 101 | except Exception as e: 102 | print(e) 103 | raise 104 | 105 | return result.text 106 | -------------------------------------------------------------------------------- /podaac/oceancolor.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import requests 16 | from future.moves.urllib.request import urlretrieve 17 | from future.moves.urllib.parse import urlparse 18 | import os 19 | 20 | SEARCH_URL = 'https://oceandata.sci.gsfc.nasa.gov/api/file_search?' 21 | # GET_URL = 'https://oceandata.sci.gsfc.nasa.gov/cgi/getfile/' 22 | HEADERS = { 23 | 'User-Agent': 'Podaacpy Python Library' 24 | } 25 | 26 | class OceanColor: 27 | 28 | 29 | def __init__(self): 30 | self.SEARCH_URL = 'https://oceandata.sci.gsfc.nasa.gov/api/file_search?' 31 | # self.GET_URL = 'https://oceandata.sci.gsfc.nasa.gov/cgi/getfile/' 32 | 33 | def file_search(self, sensor='', sdate='', edate='', dtype='', add_url='1', results_as_file='1', 34 | search='', sub_id='', std_only='1', cksum='', output_format='json'): 35 | '''File search service retrieves publically available files within the \ 36 | NASA Ocean Data Processing System. 37 | 38 | :param sensor: mission name. valid options include: aquarius, seawifs, \ 39 | aqua, terra, meris, octs, czcs, hico, viirs 40 | :type sensor: :mod:`string` 41 | 42 | :param sdate: start date for a search 43 | :type sdate: :mod:`string` 44 | 45 | :param edate: end date for a search 46 | :type edate: :mod:`string` 47 | 48 | :param dtype: data type (i.e. level). valid options: L0, L1, L2, L3b \ 49 | (for binned data), L3m (for mapped data), MET (for ancillary \ 50 | data), misc (for sundry products) 51 | :type dtype: :mod:`string` 52 | 53 | :param add_url: include full url in search result (boolean, \ 54 | 1=yes, 0=no) 55 | :type add_url: :mod:`string` 56 | 57 | :param results_as_file: return results as a test file \ 58 | listing (boolean, 1=yes, 0=no, thus returns and HTML page) 59 | :type results_as_file: :mod:`string` 60 | 61 | :param search: text string search 62 | :type search: :mod:`string` 63 | 64 | :param sub_id: non-extracted subscription ID to search 65 | :type sub_id: :mod:`string` 66 | 67 | :param std_only: restrict results to standard products \ 68 | (i.e. ignore extracts, regional processings, etc.; boolean) 69 | :type std_only: :mod:`string` 70 | 71 | :param cksum: return a checksum file for search results \ 72 | (boolean; sha1sums except for Aquarius soil moisture \ 73 | products which are md5sums; forces results_as_file; ignores addurl) 74 | :type cksum: :mod:`string` 75 | 76 | :param output_format: valid options are: 'json', 'txt' and 'html' 77 | :type output_format: :mod:`string` 78 | 79 | :returns: by default a json response based on the requested 'output_format'. Options \ 80 | are 'json, 'txt' and 'html'. 81 | 82 | ''' 83 | try: 84 | url = SEARCH_URL 85 | if sensor: 86 | url = url + 'sensor=' + sensor 87 | else: 88 | raise Exception("'sensor' parameter is required!") 89 | if sdate: 90 | url = url + '&sdate=' + sdate 91 | if edate: 92 | url = url + '&edate=' + edate 93 | if dtype: 94 | url = url + '&dtype=' + dtype 95 | url = url + '&addurl=' + str(add_url) 96 | url = url + '&results_as_file=' + str(results_as_file) 97 | if search: 98 | url = url + '&search=' + search 99 | elif sub_id: 100 | url = url + '&subID=' + sub_id 101 | else: 102 | raise Exception("Either 'search' or 'sub_id' parameter is required!") 103 | url = url + '&std_only=' + str(std_only) 104 | if cksum: 105 | url = url + '&cksum=' + cksum 106 | url = url + '&format=' + output_format 107 | 108 | response = requests.post(url, headers=HEADERS) 109 | status_codes = [404, 400, 503, 408] 110 | if response.status_code in status_codes: 111 | response.raise_for_status() 112 | 113 | except requests.exceptions.HTTPError as error: 114 | print(error) 115 | raise 116 | 117 | return str(response.text) 118 | 119 | def get_file(self, url='', path=''): 120 | '''It is possible to mimic FTP bulk data downloads using the \ 121 | HTTP-based data distribution server at https://oceandata.sci.gsfc.nasa.gov. 122 | 123 | :param url: a single file name which can be obtained by calling #file_search() \ 124 | an example would be \ 125 | https://oceandata.sci.gsfc.nasa.gov/cgi/getfile/O1997001.L3b_DAY_CHL.nc 126 | :type url: :mod:`string` 127 | 128 | :param path: Destination directory into which the granule \ 129 | needs to be downloaded. 130 | :type path: :mod:`string` 131 | 132 | :returns: a file object downloaded from the \ 133 | HTTP-based data distribution server at https://oceandata.sci.gsfc.nasa.gov. 134 | 135 | ''' 136 | try: 137 | # url = GET_URL 138 | if url: 139 | url = url 140 | else: 141 | raise Exception("'file' parameter is required!") 142 | file = os.path.basename(urlparse(url).path) 143 | if path == '': 144 | path = os.path.join(os.path.dirname(__file__), file) 145 | else: 146 | path = path + '/' + file 147 | urlretrieve(url, path) 148 | print("Downloaded '%s' to '%s'" % (file, path)) 149 | return file 150 | 151 | except Exception as e: 152 | print(e) 153 | raise 154 | -------------------------------------------------------------------------------- /podaac/podaac.ini: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | [drive] 14 | urs_username = 15 | urs_password = 16 | webdav_url = https://podaac-tools.jpl.nasa.gov/drive/files -------------------------------------------------------------------------------- /podaac/podaac_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from bs4 import BeautifulSoup 16 | import requests 17 | import defusedxml.ElementTree as ET 18 | try: 19 | from . import podaac as p 20 | except: 21 | import podaac as p 22 | 23 | class PodaacUtils: 24 | 25 | def __init__(self): 26 | """Sets the base WebServices URL to https://podaac.jpl.nasa.gov/ws/""" 27 | self.URL = 'https://podaac.jpl.nasa.gov/ws/' 28 | 29 | def list_all_available_extract_granule_dataset_ids(self): 30 | '''Convenience function which returns an up-to-date \ 31 | list of all available granule dataset id's which can be \ 32 | used in the granule extraction service. 33 | 34 | :returns: a comma-seperated list of granule dataset id's. 35 | 36 | ''' 37 | dataset_ids = [] 38 | html = requests.get(self.URL + 'extract/granule/index.html') 39 | soup = BeautifulSoup(html.text, 'html.parser') 40 | 41 | table = soup.find("table", {"id": "tblDataset"}) 42 | rows = table.find_all('tr') 43 | rows.remove(rows[0]) 44 | 45 | for row in rows: 46 | x = row.find_all('td') 47 | dataset_ids.append(x[0].text.encode('utf-8')) 48 | 49 | return dataset_ids 50 | 51 | def list_all_available_extract_granule_dataset_short_names(self): 52 | '''Convenience function which returns an up-to-date \ 53 | list of all available granule dataset short names which can be \ 54 | used in the granule extraction service. 55 | 56 | :returns: a comma-seperated list of granule dataset short names. 57 | 58 | ''' 59 | dataset_short_names = [] 60 | html = requests.get(self.URL + 'extract/granule/index.html') 61 | soup = BeautifulSoup(html.text, 'html.parser') 62 | 63 | table = soup.find("table", {"id": "tblDataset"}) 64 | rows = table.find_all('tr') 65 | rows.remove(rows[0]) 66 | 67 | for row in rows: 68 | x = row.find_all('td') 69 | dataset_short_names.append(x[1].text.encode('utf-8')) 70 | 71 | return dataset_short_names 72 | 73 | def list_all_available_granule_search_dataset_ids(self): 74 | '''Convenience function which returns an up-to-date \ 75 | list of available all granule dataset id's. 76 | 77 | :returns: a comma-seperated list of granule dataset id's 78 | 79 | ''' 80 | data_part1 = requests.get( 81 | self.URL + 'search/dataset/?format=atom&itemsPerPage=400').text 82 | data_part2 = requests.get( 83 | self.URL + 'search/dataset?startIndex=400&itemsPerPage=400&format=atom').text 84 | root1 = ET.fromstring(data_part1.encode('utf-8')) 85 | root2 = ET.fromstring(data_part2.encode('utf-8')) 86 | 87 | dataset_ids = [] 88 | for entry in root1.findall('{http://www.w3.org/2005/Atom}entry'): 89 | dataset_id = entry.find( 90 | '{https://podaac.jpl.nasa.gov/opensearch/}datasetId').text 91 | dataset_ids.append(dataset_id) 92 | 93 | for entry in root2.findall('{http://www.w3.org/2005/Atom}entry'): 94 | dataset_id = entry.find( 95 | '{https://podaac.jpl.nasa.gov/opensearch/}datasetId').text 96 | dataset_ids.append(dataset_id) 97 | 98 | dataset_ids_level1 = [] 99 | dataset_ids_level2 = self.list_available_granule_search_level2_dataset_ids() 100 | dataset_ids_level1 = list(set(dataset_ids) - set(dataset_ids_level2)) 101 | 102 | return dataset_ids_level1 103 | 104 | def list_all_available_granule_search_dataset_short_names(self): 105 | '''Convenience function which returns an up-to-date \ 106 | list of available granule dataset short names. 107 | 108 | :returns: a comma-seperated list of granule dataset short names. 109 | 110 | ''' 111 | data_part1 = requests.get( 112 | self.URL + 'search/dataset/?format=atom&itemsPerPage=400').text 113 | data_part2 = requests.get( 114 | self.URL + 'search/dataset?startIndex=400&itemsPerPage=400&format=atom').text 115 | root1 = ET.fromstring(data_part1.encode('utf-8')) 116 | root2 = ET.fromstring(data_part2.encode('utf-8')) 117 | 118 | dataset_short_names = [] 119 | for entry in root1.findall('{http://www.w3.org/2005/Atom}entry'): 120 | name = entry.find( 121 | '{https://podaac.jpl.nasa.gov/opensearch/}shortName').text 122 | dataset_short_names.append(name) 123 | 124 | for entry in root2.findall('{http://www.w3.org/2005/Atom}entry'): 125 | name = entry.find( 126 | '{https://podaac.jpl.nasa.gov/opensearch/}shortName').text 127 | dataset_short_names.append(name) 128 | 129 | # dataset_short_names_level1 = [] 130 | dataset_short_names_level2 = \ 131 | self.list_available_granule_search_level2_dataset_short_names() 132 | dataset_short_names_level1 = list( 133 | set(dataset_short_names) - set(dataset_short_names_level2)) 134 | 135 | return dataset_short_names_level1 136 | 137 | def list_available_granule_search_level2_dataset_ids(self): 138 | '''Convenience function which returns an up-to-date \ 139 | list of available level2 granule dataset id's. 140 | 141 | :returns: a comma-seperated list of granule dataset id's 142 | 143 | ''' 144 | dataset_ids = [] 145 | url = 'https://podaac.jpl.nasa.gov/l2ssIngest/datasets' 146 | response = requests.get(url) 147 | data = response.json() 148 | 149 | for item in data["datasets"]: 150 | dataset_ids.append(item["persistentId"]) 151 | 152 | return dataset_ids 153 | 154 | def list_available_granule_search_level2_dataset_short_names(self): 155 | '''Convenience function which returns an up-to-date \ 156 | list of available level2 granule dataset short names. 157 | 158 | :returns: a comma-seperated list of granule dataset short names. 159 | 160 | ''' 161 | dataset_ids = [] 162 | url = 'https://podaac.jpl.nasa.gov/l2ssIngest/datasets' 163 | response = requests.get(url) 164 | data = response.json() 165 | 166 | for item in data["datasets"]: 167 | dataset_ids.append(item["shortName"]) 168 | 169 | return dataset_ids 170 | 171 | def list_level4_dataset_ids(self): 172 | '''Convenience function which returns an up-to-date \ 173 | list of level4 dataset id's. 174 | 175 | :returns: a comma-seperated list of level4 dataset id's 176 | 177 | ''' 178 | podaac = p.Podaac() 179 | data = podaac.dataset_search(process_level='4', items_per_page='400') 180 | root = ET.fromstring(data.encode('utf-8')) 181 | 182 | dataset_ids = [] 183 | for entry in root.findall('{http://www.w3.org/2005/Atom}entry'): 184 | dataset_id = entry.find( 185 | '{https://podaac.jpl.nasa.gov/opensearch/}datasetId').text 186 | dataset_ids.append(dataset_id) 187 | 188 | return dataset_ids 189 | 190 | def list_level4_dataset_short_names(self): 191 | '''Convenience function which returns an up-to-date \ 192 | list of level4 dataset short names. 193 | 194 | :returns: a comma-seperated list of level4 dataset short names. 195 | 196 | ''' 197 | podaac = p.Podaac() 198 | data = podaac.dataset_search(process_level='4', items_per_page='400') 199 | l4_dataset_short_names = [] 200 | root = ET.fromstring(data.encode('utf-8')) 201 | 202 | for entry in root.findall('{http://www.w3.org/2005/Atom}entry'): 203 | l4_dataset_short_name = entry.find( 204 | '{https://podaac.jpl.nasa.gov/opensearch/}shortName').text 205 | l4_dataset_short_names.append(l4_dataset_short_name) 206 | 207 | return l4_dataset_short_names 208 | 209 | def mine_granules_from_granule_search(self, granule_search_response=''): 210 | '''Convenience function which extracts the granule names for \ 211 | a given granule search obtained using podaac.granule_search(). \ 212 | The response of this function is an array of strings denoting the \ 213 | granule names for the granule search. 214 | 215 | :param granule_search_response: the output response of a podaac.granule_search() 216 | :type path: :mod:`string` 217 | 218 | :returns: prints an array of granule names. 219 | 220 | ''' 221 | search_str = '' 222 | granule_list = \ 223 | [str(i) for i in granule_search_response.strip().split() 224 | if search_str in i and 'PO.DAAC' not in i] 225 | strp_granule_list = \ 226 | [i.replace('<title>','').replace('','') for i in granule_list] 227 | return strp_granule_list 228 | 229 | @staticmethod 230 | def mine_opendap_urls_from_granule_search(granule_search_response=''): 231 | """ Convenience function which extracts the PO.DAAC OPeNDAP URLs from 232 | a given granule search obtained using podaac.granule_search(). 233 | The response of this function is an array of strings denoting the 234 | PO.DAAC OPeNDAP URLs to the granules. 235 | 236 | :param granule_search_response: the output response of a podaac.granule_search() 237 | :type path: :mod:`string` 238 | 239 | :returns: prints an array of PO.DAAC OPeNDAP URLs. 240 | """ 241 | soup = BeautifulSoup(granule_search_response, 'html.parser') 242 | opendap_list = [] 243 | for opendap_link in soup.find_all('link'): 244 | href = opendap_link.get('href') 245 | if 'opendap.jpl.nasa.gov/opendap/' in href: 246 | opendap_list.append(href) 247 | return opendap_list 248 | -------------------------------------------------------------------------------- /podaac/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/podaac/tests/__init__.py -------------------------------------------------------------------------------- /podaac/tests/ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nasa/podaacpy/4812ef96557b79a35f6566f22f9e1243856314d2/podaac/tests/ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc -------------------------------------------------------------------------------- /podaac/tests/drive_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ..drive import Drive 16 | import os 17 | from glob import glob 18 | import unittest 19 | 20 | class TestDrive(unittest.TestCase): 21 | 22 | 23 | def test_init_with_file(self): 24 | drive = Drive('./tests/podaac.ini', None, None) 25 | self.assertEqual(drive.USERNAME, 'podaacpy') 26 | self.assertEqual(drive.PASSWORD, 'hZHYQ17yuag25zivK8F') 27 | self.assertEqual(drive.URL, 'https://podaac-tools.jpl.nasa.gov/drive/files') 28 | 29 | def test_init_with_username_password_url(self): 30 | drive = Drive(None, 'podaac', 'ZAnTpYo', 'https://podaac-tools.jpl.nasa.gov/...') 31 | self.assertEqual(drive.USERNAME, 'podaac') 32 | self.assertEqual(drive.PASSWORD, 'ZAnTpYo') 33 | self.assertEqual(drive.URL, 'https://podaac-tools.jpl.nasa.gov/...') 34 | 35 | def test_mine_drive_urls_from_granule_search_and_download(self): 36 | granule_search_result = ' PO.DAAC Granule Search Results 2019-08-05T02:27:07.779386Z tag:podaac.jpl.nasa.gov,2019-08-05 PO.DAAC Granule Search Service 6 0 400 ascat_20180913_020000_metopa_61749_eps_o_coa_2401_ovw.l2.nc 2018-09-13T02:00:00Z PODAAC-ASOP2-12C01:ascat_20180913_020000_metopa_61749_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41408000000001 0.01425 89.30676000000001 359.99573000000004 0.01425 -89.41408000000001 359.99573000000004 89.30676000000001 2018-09-13T02:00:00Z 2018-09-13T03:41:58Z PODAAC-ASOP2-12C01:ascat_20180913_020000_metopa_61749_eps_o_coa_2401_ovw.l2.nc 2018-09-13T02:00:00Z/2018-09-13T03:41:58Z ascat_20180912_003900_metopa_61734_eps_o_coa_2401_ovw.l2.nc 2018-09-12T00:39:00Z PODAAC-ASOP2-12C01:ascat_20180912_003900_metopa_61734_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41189000000001 0.0015600000000000002 89.30624 359.98615 0.0015600000000000002 -89.41189000000001 359.98615 89.30624 2018-09-12T00:39:00Z 2018-09-12T02:20:58Z PODAAC-ASOP2-12C01:ascat_20180912_003900_metopa_61734_eps_o_coa_2401_ovw.l2.nc 2018-09-12T00:39:00Z/2018-09-12T02:20:58Z ascat_20180912_022100_metopa_61735_eps_o_coa_2401_ovw.l2.nc 2018-09-12T02:21:00Z PODAAC-ASOP2-12C01:ascat_20180912_022100_metopa_61735_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41475000000001 0.00415 89.30636000000001 359.99426000000005 0.00415 -89.41475000000001 359.99426000000005 89.30636000000001 2018-09-12T02:21:00Z 2018-09-12T04:02:58Z PODAAC-ASOP2-12C01:ascat_20180912_022100_metopa_61735_eps_o_coa_2401_ovw.l2.nc 2018-09-12T02:21:00Z/2018-09-12T04:02:58Z ascat_20180913_134800_metopa_61756_eps_o_coa_2401_ovw.l2.nc 2018-09-13T13:48:00Z PODAAC-ASOP2-12C01:ascat_20180913_134800_metopa_61756_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41489000000001 0.00041000000000000005 89.30873000000001 359.99274 0.00041000000000000005 -89.41489000000001 359.99274 89.30873000000001 2018-09-13T13:48:00Z 2018-09-13T15:29:58Z PODAAC-ASOP2-12C01:ascat_20180913_134800_metopa_61756_eps_o_coa_2401_ovw.l2.nc 2018-09-13T13:48:00Z/2018-09-13T15:29:58Z ascat_20180914_013900_metopa_61763_eps_o_coa_2401_ovw.l2.nc 2018-09-14T01:39:00Z PODAAC-ASOP2-12C01:ascat_20180914_013900_metopa_61763_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41182 0.0064600000000000005 89.3054 359.9968 0.0064600000000000005 -89.41182 359.9968 89.3054 2018-09-14T01:39:00Z 2018-09-14T03:20:58Z PODAAC-ASOP2-12C01:ascat_20180914_013900_metopa_61763_eps_o_coa_2401_ovw.l2.nc 2018-09-14T01:39:00Z/2018-09-14T03:20:58Z ascat_20180912_140900_metopa_61742_eps_o_coa_2401_ovw.l2.nc 2018-09-12T14:09:00Z PODAAC-ASOP2-12C01:ascat_20180912_140900_metopa_61742_eps_o_coa_2401_ovw.l2.nc PODAAC-ASOP2-12C01 ASCATA-L2-Coastal -89.41476 0.008650000000000001 89.30516 359.99994000000004 0.008650000000000001 -89.41476 359.99994000000004 89.30516 2018-09-12T14:09:00Z 2018-09-12T15:50:58Z PODAAC-ASOP2-12C01:ascat_20180912_140900_metopa_61742_eps_o_coa_2401_ovw.l2.nc 2018-09-12T14:09:00Z/2018-09-12T15:50:58Z ' 37 | drive = Drive('./tests/podaac.ini', None, None) 38 | drive_urls = drive.mine_drive_urls_from_granule_search(granule_search_response=(str(granule_search_result))) 39 | self.assertEqual(6, len(drive_urls)) 40 | drive.download_granules(granule_collection=drive_urls, path='./podaac/tests/') 41 | self.assertEqual(6, len([y for x in os.walk('./podaac/tests/allData') for y in glob(os.path.join(x[0], '*.nc'))])) 42 | -------------------------------------------------------------------------------- /podaac/tests/l2ss_tests.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | import unittest 14 | import json 15 | import requests 16 | import os 17 | from future.moves.urllib.error import HTTPError 18 | from ..l2ss import L2SS 19 | from nose.tools import assert_raises 20 | 21 | 22 | class TestL2SS(unittest.TestCase): 23 | 24 | @classmethod 25 | def setUp(cls): 26 | cls.l2ss = L2SS() 27 | 28 | # test case for the fucntion dataset_search() 29 | def test_dataset_search(self): 30 | dataset_id = 'PODAAC-ASOP2-25X01' 31 | variable = ['Surface Winds'] 32 | sensor = ['Advanced Scatterometer'] 33 | provider = ['KNMI'] 34 | start_time = '2016-12-4T22:39:52Z' 35 | start_index = '0' 36 | items_per_page = '7' 37 | dataset = self.l2ss.dataset_search(dataset_id=dataset_id, variable=variable, start_time=start_time, sensor=sensor, 38 | provider=provider, start_index=start_index, items_per_page=items_per_page) 39 | dataset_json = json.loads(dataset) 40 | 41 | assert dataset_json['response']['docs'][0][ 42 | 'Dataset-PersistentId'] == dataset_id 43 | 44 | # test case for the function dataset_variables() 45 | def test_dataset_variables(self): 46 | dataset_id = 'PODAAC-ASOP2-25X01' 47 | variables = json.loads( 48 | self.l2ss.dataset_variables(dataset_id=dataset_id)) 49 | variables_data = variables['imgVariables'] 50 | 51 | assert len(variables_data) != 0 52 | assert_raises(requests.exceptions.HTTPError, 53 | self.l2ss.dataset_variables, dataset_id='PODAAC') 54 | 55 | # test case for the function granule_search 56 | def test_granule_search(self): 57 | dataset_id = 'PODAAC-ASOP2-25X01' 58 | bbox = '-180,-90,180,90' 59 | start_time = '2016-07-16T04:18:00Z' 60 | end_time = '2016-07-16T05:56:56Z' 61 | items_per_page = '7' 62 | start_index = '0' 63 | name = 'ascat_20160716_041800_metopa_50541_eps_o_250_2401_ovw.l2.nc' 64 | sort = 'Granule-Name asc' 65 | granules = self.l2ss.granule_search(dataset_id=dataset_id, bbox=bbox, start_time=start_time, end_time=end_time, 66 | items_per_page=items_per_page, start_index=start_index, name=name, sort=sort) 67 | granules_json = json.loads(granules) 68 | 69 | assert granules_json['response']['docs'][0][ 70 | 'Granule-DatasetId'] == dataset_id 71 | 72 | # test case for the function granules_availability 73 | def test_granules_availability(self): 74 | dataset_id = 'PODAAC-ASOP2-25X01' 75 | bbox = '-180,-90,180,90' 76 | start_time = '2014-10-12T11:42:00Z' 77 | end_time = '2016-10-12T11:42:00Z' 78 | gap = 'DAY' 79 | granule_availability = json.loads(self.l2ss.granules_availability( 80 | dataset_id=dataset_id, start_time=start_time, end_time=end_time, bbox=bbox, gap=gap)) 81 | availability_data = granule_availability[ 82 | 'facet_counts']['facet_dates']['Granule-StartTime'] 83 | 84 | assert len(availability_data) != 0 85 | assert_raises(requests.exceptions.HTTPError, 86 | self.l2ss.granules_availability, dataset_id=dataset_id) 87 | 88 | # test case for the function granule_preview_image 89 | def test_granule_preview_image(self): 90 | dataset_id = 'PODAAC-ASOP2-25X01' 91 | granule = 'ascat_20140520_005700_metopa_39344_eps_o_250_2300_ovw.l2.nc' 92 | year = '2014' 93 | day = '140' 94 | variable = 'wind_speed' 95 | path = os.path.join(os.path.dirname(__file__)) 96 | image = self.l2ss.granule_preview_image( 97 | dataset_id=dataset_id, granule=granule, year=year, day=day, variable=variable, path=path) 98 | 99 | assert image != None 100 | path = os.path.join(os.path.dirname(__file__), 101 | dataset_id + '.png') 102 | os.remove(path) 103 | assert_raises(HTTPError, 104 | self.l2ss.granule_preview_image, dataset_id=dataset_id, granule=granule, year=year, day=day, variable='wind_spee') 105 | 106 | # test case for the function image_palette 107 | def test_image_palette(self): 108 | palette_name = 'paletteMedspirationIndexed' 109 | test_palette_name = 'medspiration' 110 | palette = json.loads( 111 | self.l2ss.image_palette(palette_name=palette_name)) 112 | palette_name = palette['Palette'][ 113 | 'attributes']['attribute'][0]['value'] 114 | 115 | assert palette_name == test_palette_name 116 | assert_raises(requests.exceptions.HTTPError, 117 | self.l2ss.image_palette, palette_name='SomeUnknownPalette') 118 | 119 | # test case for the function granule_download 120 | def test_granule_download(self): 121 | query = { 122 | "email": "unknown@unknown.com", 123 | "query": 124 | [ 125 | { 126 | "compact": "true", 127 | "datasetId": "PODAAC-ASOP2-25X01", 128 | "bbox": "-180,-90,180,90", 129 | "variables": ["lat", "lon", "time", "wind_speed"], 130 | "granuleIds": ["ascat_20140520_005700_metopa_39344_eps_o_250_2300_ovw.l2.nc"] 131 | } 132 | ] 133 | } 134 | self.l2ss.granule_download(query_string=query) 135 | assert os.path.isfile( 136 | './subsetted-ascat_20140520_005700_metopa_39344_eps_o_250_2300_ovw.l2.nc') == True 137 | os.remove( 138 | './subsetted-ascat_20140520_005700_metopa_39344_eps_o_250_2300_ovw.l2.nc') 139 | 140 | # test case for the function subset_status 141 | def test_subset_status(self): 142 | token = 'FakeToken' 143 | assert_raises(Exception, self.l2ss.subset_status, token=token) 144 | -------------------------------------------------------------------------------- /podaac/tests/mcc_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ..mcc import MCC 16 | import os 17 | import requests 18 | import json 19 | from nose.tools import assert_raises 20 | import unittest 21 | 22 | class TestMCC(unittest.TestCase): 23 | 24 | def setUp(self): 25 | self.mcc = MCC() 26 | 27 | def test_check_remote_file(self): 28 | url_upload = "https://github.com/ioos/compliance-checker/raw/master/compliance_checker/tests/data/test_cdl_nc_file.nc" 29 | data = self.mcc.check_remote_file('CF', url_upload) 30 | data_json = json.loads(data) 31 | 32 | assert data != None 33 | assert data_json["model"] == "NETCDF3_CLASSIC" 34 | assert data_json["fn"] == "test_cdl_nc_file.nc" 35 | assert_raises(requests.exceptions.HTTPError, self.mcc.check_remote_file, 36 | checkers='CF', url_upload='abc.xyz.com') 37 | 38 | #Tests mcc.check_local_file with the default JSON response format 39 | def test_check_local_file_json_response(self): 40 | file_upload = "ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc" 41 | path = os.path.join(os.path.dirname(__file__), file_upload) 42 | data = self.mcc.check_local_file(1.1, 'L2P', path, response='json') 43 | data_json = json.loads(data) 44 | 45 | assert data != None 46 | assert data_json["model"] == "NETCDF3_CLASSIC" 47 | assert data_json["fn"] == file_upload 48 | assert_raises(Exception, self.mcc.check_local_file, 1.1, 'L2P', " ") 49 | 50 | #Tests mcc.check_local_file with the html response format 51 | def test_check_local_file_html_response(self): 52 | file_upload = "ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc" 53 | path = os.path.join(os.path.dirname(__file__), file_upload) 54 | data = self.mcc.check_local_file(1.1, 'L2P', path, response='html') 55 | 56 | self.assertTrue(data.strip().startswith('')) 57 | assert_raises(Exception, self.mcc.check_local_file, 1.1, 'L2P', " ") 58 | 59 | #Tests mcc.check_local_file with the pdf response format 60 | def test_check_local_file_pdf_response(self): 61 | file_upload = "ascat_20130719_230600_metopa_35024_eps_o_250_2200_ovw.l2_subsetted_.nc" 62 | path = os.path.join(os.path.dirname(__file__), file_upload) 63 | data = self.mcc.check_local_file(1.1, 'L2P', path, response='pdf') 64 | 65 | assert data != None 66 | self.assertTrue(data.strip().startswith('%PDF-1.4')) 67 | assert_raises(Exception, self.mcc.check_local_file, 1.1, 'L2P', " ") 68 | 69 | -------------------------------------------------------------------------------- /podaac/tests/oceancolor_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ..oceancolor import OceanColor 16 | import os 17 | from nose.tools import assert_raises 18 | import unittest 19 | 20 | 21 | class TestOceanColor(unittest.TestCase): 22 | 23 | @classmethod 24 | def setUp(cls): 25 | cls.oceancolor = OceanColor() 26 | 27 | # test case for the function file_search() 28 | def test_file_search(self): 29 | data = self.oceancolor.file_search(sensor='octs', sdate='1996-11-01', edate='1997-01-01', 30 | dtype='L3b', add_url='1', results_as_file='1', search='*DAY_CHL*') 31 | 32 | assert data != None 33 | assert isinstance(data, str) 34 | assert len(data) != 0 35 | 36 | # must have a valid sensor AND either 'search' OR 'sub-id' 37 | data2 = self.oceancolor.file_search(sensor='octs', sub_id='2218') 38 | assert data2 != None 39 | 40 | assert_raises(Exception, self.oceancolor.file_search, sensor='random') 41 | assert_raises(Exception, self.oceancolor.file_search, sdate='1996-11-01', edate='1997-01-01', 42 | dtype='L3b', add_url='1', results_as_file='1', search='*DAY_CHL*') 43 | 44 | # test case for the function get_file(() 45 | def test_get_file(self): 46 | url = 'https://oceandata.sci.gsfc.nasa.gov/cgi/getfile/O1996307.L3b_DAY_CHL.nc' 47 | path = os.path.dirname(os.path.abspath(__file__)) 48 | granule_name = self.oceancolor.get_file(url, path) 49 | 50 | assert granule_name != None 51 | assert_raises(Exception, self.oceancolor.get_file, 52 | url='ABCDEF') 53 | 54 | path = os.path.join(os.path.dirname(__file__), granule_name) 55 | os.remove(path) 56 | -------------------------------------------------------------------------------- /podaac/tests/podaac.ini: -------------------------------------------------------------------------------- 1 | [drive] 2 | urs_username = podaacpy 3 | urs_password = hZHYQ17yuag25zivK8F 4 | webdav_url = https://podaac-tools.jpl.nasa.gov/drive/files 5 | -------------------------------------------------------------------------------- /podaac/tests/podaac_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ..podaac import Podaac 16 | from ..podaac_utils import PodaacUtils 17 | import os 18 | import requests 19 | import defusedxml.ElementTree as ET 20 | from nose.tools import assert_raises 21 | import unittest 22 | from future.moves.urllib.error import HTTPError 23 | 24 | 25 | class TestPodaac(unittest.TestCase): 26 | 27 | @classmethod 28 | def setUp(cls): 29 | cls.podaac = Podaac() 30 | cls.podaac_utils = PodaacUtils() 31 | 32 | # test case for the function dataset_metadata() 33 | def test_dataset_metadata(self): 34 | dataset_id = 'PODAAC-CCF35-01AD5' 35 | dataset_short_name = 'CCMP_MEASURES_ATLAS_L4_OW_L3_5A_5DAY_WIND_VECTORS_FLK' 36 | dataset_md = self.podaac.dataset_metadata( 37 | dataset_id, dataset_short_name) 38 | root = ET.fromstring(dataset_md.encode('utf-8')) 39 | short_name = root[1][0].attrib 40 | 41 | assert dataset_md is not None 42 | assert str(short_name['id']) == dataset_short_name 43 | assert_raises(requests.exceptions.HTTPError, 44 | self.podaac.dataset_metadata, 45 | 'PODAAC-CCF35-01AD5', 46 | 'CCMP_MEASURES_ATLAS_L4_OW_L3_5A_5DAY_WIND_VECTORS_FLK', 47 | 'is') 48 | assert_raises(Exception, self.podaac.dataset_metadata, 49 | short_name='CCMP_MEASURES_ATLAS_L4_OW_L3_5A_5DAY_WIND_VECTORS_FLK') 50 | 51 | # test case for the fucntion granule_metadata() 52 | def test_granule_metadata(self): 53 | dataset_id = 'PODAAC-GHK10-41N01' 54 | dataset_short_name = 'NAVO-L4HR1m-GLOB-K10_SST' 55 | granule_name = '20180312-NAVO-L4HR1m-GLOB-v01-fv01_0-K10_SST.nc' 56 | 57 | granule_md = self.podaac.granule_metadata( 58 | dataset_id=dataset_id, short_name=dataset_short_name, granule_name=granule_name) 59 | root = ET.fromstring(granule_md.encode('utf-8')) 60 | short_name = root[1][0].attrib 61 | 62 | assert granule_md is not None 63 | assert str(short_name['id']) == dataset_short_name 64 | assert_raises(requests.exceptions.HTTPError, 65 | self.podaac.granule_metadata, dataset_id='PODAAC', _format='is') 66 | assert_raises(Exception, 67 | self.podaac.granule_metadata, _format='is') 68 | 69 | # test case for the function last24hours_datacasting_granule_md() 70 | def test_last24hours_datacasting_granule_md(self): 71 | dataset_id = 'PODAAC-ASOP2-25X01' 72 | dataset_short_name = 'ASCATA-L2-25km' 73 | _format = 'datacasting' 74 | items_per_page = 10 75 | granule_md = self.podaac.last24hours_datacasting_granule_md( 76 | dataset_id, dataset_short_name, _format, items_per_page) 77 | root = ET.fromstring(granule_md.encode('utf-8')) 78 | dataset_id_ = root[0][3].text 79 | 80 | assert granule_md is not None 81 | assert dataset_id_ == dataset_id 82 | assert_raises(requests.exceptions.HTTPError, 83 | self.podaac.last24hours_datacasting_granule_md, 84 | 'PODAAC-ASOP2-25X01', 'ASCATA-L2-25km', _format='iso') 85 | assert_raises(Exception, self.podaac.last24hours_datacasting_granule_md, 86 | short_name='ASCATA-L2-25km', _format='iso') 87 | 88 | # test case for the function dataset_variables 89 | def test_dataset_variable(self): 90 | assert_raises(requests.exceptions.HTTPError, 91 | self.podaac.dataset_variables, dataset_id='PODAAC') 92 | 93 | # test case for the function dataset_search() 94 | def test_dataset_search(self): 95 | dataset_id = 'PODAAC-ASOP2-25X01' 96 | short_name = 'ASCATA-L2-25km' 97 | start_time = '2000-01-01T01:30:00Z' 98 | end_time = '2012-02-01T01:30:00Z' 99 | start_index = '0' 100 | keyword = 'modis' 101 | instrument = 'MODIS' 102 | satellite = 'AQUA' 103 | file_format = 'NetCDF' 104 | status = 'OPEN' 105 | process_level = '2' 106 | sort_by = 'timeAsc' 107 | bbox = '-45,-45,45,45' 108 | datasets = self.podaac.dataset_search( 109 | dataset_id=dataset_id, short_name=short_name, 110 | start_time=start_time, end_time=end_time, 111 | start_index=start_index, keyword=keyword, 112 | instrument=instrument, satellite=satellite, 113 | file_format=file_format, status=status, 114 | process_level=process_level, sort_by=sort_by, bbox=bbox) 115 | root = ET.fromstring(datasets.encode('utf-8')) 116 | service_name = "PO.DAAC Dataset Search Service" 117 | test_service_name = root[3][0].text 118 | 119 | assert datasets is not None 120 | assert test_service_name == service_name 121 | assert_raises(requests.exceptions.HTTPError, 122 | self.podaac.dataset_search, _format='iso') 123 | 124 | # test case for the function granule_search() 125 | def test_granule_search(self): 126 | test_dataset_id = 'PODAAC-ASOP2-25X01' 127 | start_time = '2013-01-01T01:30:00Z' 128 | end_time = '2014-01-01T00:00:00Z' 129 | bbox = '-45,-45,45,45' 130 | start_index = '1' 131 | _format = 'atom' 132 | granules = self.podaac.granule_search( 133 | dataset_id=test_dataset_id, 134 | start_time=start_time, 135 | end_time=end_time, 136 | bbox=bbox, 137 | start_index=start_index, 138 | _format=_format) 139 | root = ET.fromstring(granules.encode('utf-8')) 140 | dataset_id = root.find('{http://www.w3.org/2005/Atom}entry').find( 141 | '{https://podaac.jpl.nasa.gov/opensearch/}datasetId').text.rsplit('.')[0] 142 | 143 | assert granules is not None 144 | assert test_dataset_id == dataset_id 145 | assert_raises(requests.exceptions.HTTPError, 146 | self.podaac.granule_search, dataset_id='PODAAC', _format='html') 147 | assert_raises(Exception, 148 | self.podaac.granule_search, _format='html') 149 | 150 | # test case for the function granule_preview() 151 | def test_granule_preview(self): 152 | dataset_id = 'PODAAC-ASOP2-25X01' 153 | image_variable = 'wind_speed' 154 | path = os.path.dirname(os.path.abspath(__file__)) 155 | image_data = self.podaac.granule_preview( 156 | dataset_id=dataset_id, 157 | image_variable=image_variable, 158 | path=path) 159 | 160 | assert image_data is not None 161 | 162 | path = os.path.join(os.path.dirname(__file__), 163 | dataset_id + '.png') 164 | os.remove(path) 165 | assert_raises(Exception, 166 | self.podaac.granule_preview, image_variable='hello') 167 | assert_raises(HTTPError, 168 | self.podaac.granule_preview, 169 | dataset_id='PODAAC-ASOP2-25X01', image_variable='hello') 170 | 171 | # test case for the function granule_subset 172 | def test_granule_subset(self): 173 | path1 = os.path.dirname(os.path.abspath(__file__)) + "/test.json" 174 | path2 = os.path.dirname(__file__) 175 | self.podaac.granule_subset(input_file_path=path1, path=path2) 176 | granulename = '/subsetted-ascat_20160409_113000_metopa_49153_eps_o_250_2401_ovw.l2.nc' 177 | if os.path.isfile(path2 + granulename) is True: 178 | os.remove(path2 + granulename) 179 | 180 | # test case for the function subset_status 181 | def test_subset_status(self): 182 | test_status = "unknown" 183 | token_1 = "a" 184 | status_1 = self.podaac.subset_status(token=token_1) 185 | token_2 = "012" 186 | status_2 = self.podaac.subset_status(token=token_2) 187 | 188 | assert test_status == status_1 189 | assert test_status == status_2 190 | 191 | # test case for the function extract_l4_granule() 192 | def test_extract_l4_granule(self): 193 | dataset_id = 'PODAAC-GHCMC-4FM02' 194 | path = os.path.dirname(os.path.abspath(__file__)) 195 | granule_name = self.podaac.extract_l4_granule( 196 | dataset_id, path) 197 | 198 | assert granule_name is not None 199 | assert_raises(Exception, self.podaac.extract_l4_granule, 200 | dataset_id='ABCDEF') 201 | 202 | path = os.path.join(os.path.dirname(__file__), granule_name) 203 | os.remove(path) 204 | 205 | # test case for the function list_all_available_granule_search_dataset_ids() 206 | def test_list_available_granule_search_dataset_ids(self): 207 | data = self.podaac_utils.list_all_available_granule_search_dataset_ids() 208 | 209 | assert data is not None 210 | assert isinstance(data, list) 211 | assert len(data) != 0 212 | 213 | # test case for the function 214 | # list_all_available_granule_search_dataset_short_names() 215 | def test_list_available_granule_search_dataset_short_names(self): 216 | data = self.podaac_utils.list_all_available_granule_search_dataset_short_names() 217 | 218 | assert data is not None 219 | assert isinstance(data, list) 220 | assert len(data) != 0 221 | 222 | # test case for the function 223 | # list_available_granule_search_level2_dataset_ids() 224 | def test_list_available_granule_search_level2_dataset_ids(self): 225 | data = self.podaac_utils.list_available_granule_search_level2_dataset_ids() 226 | 227 | assert data is not None 228 | assert isinstance(data, list) 229 | assert len(data) != 0 230 | 231 | # test case for the function 232 | # list_available_granule_search_level2_dataset_short_names() 233 | def test_list_available_granule_search_level2_dataset_short_names(self): 234 | data = self.podaac_utils.list_available_granule_search_level2_dataset_short_names() 235 | 236 | assert data is not None 237 | assert isinstance(data, list) 238 | assert len(data) != 0 239 | 240 | # test case for the function list_all_available_extract_granule_dataset_ids() 241 | def test_list_available_extract_granule_dataset_ids(self): 242 | data = self.podaac_utils.list_all_available_extract_granule_dataset_ids() 243 | 244 | assert data is not None 245 | assert isinstance(data, list) 246 | assert len(data) != 0 247 | 248 | # test case for the function 249 | # list_all_available_extract_granule_dataset_short_names() 250 | def test_list_available_extract_granule_dataset_short_names(self): 251 | data = self.podaac_utils.list_all_available_extract_granule_dataset_short_names() 252 | 253 | assert data is not None 254 | assert isinstance(data, list) 255 | assert len(data) != 0 256 | 257 | # test case for the function 258 | # list_level4_dataset_ids() 259 | def test_list_level4_dataset_ids(self): 260 | data = self.podaac_utils.list_level4_dataset_ids() 261 | 262 | assert data is not None 263 | assert isinstance(data, list) 264 | assert len(data) != 0 265 | 266 | # test case for the function 267 | # list_level4_dataset_short_names() 268 | def test_list_level4_dataset_short_names(self): 269 | data = self.podaac_utils.list_level4_dataset_short_names() 270 | 271 | assert data is not None 272 | assert isinstance(data, list) 273 | assert len(data) != 0 274 | 275 | # test case for the function 276 | # list_level4_dataset_short_names() 277 | def test_mine_opendap_urls_from_granule_search(self): 278 | test_dataset_id = 'PODAAC-ASOP2-25X01' 279 | start_time = '2013-01-01T01:30:00Z' 280 | end_time = '2014-01-01T00:00:00Z' 281 | bbox = '-45,-45,45,45' 282 | start_index = '1' 283 | _format = 'atom' 284 | granules = self.podaac.granule_search( 285 | dataset_id=test_dataset_id, 286 | start_time=start_time, 287 | end_time=end_time, 288 | bbox=bbox, 289 | start_index=start_index, 290 | _format=_format) 291 | data = self.podaac_utils.mine_opendap_urls_from_granule_search( 292 | granule_search_response=granules) 293 | 294 | assert data is not None 295 | assert isinstance(data, list) 296 | assert len(data) != 0 297 | -------------------------------------------------------------------------------- /podaac/tests/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "email":"yournamem@emailaddress.com", 3 | "query":[ 4 | { 5 | "datasetId":"PODAAC-ASOP2-25X01", 6 | "granuleIds":["ascat_20160409_113000_metopa_49153_eps_o_250_2401_ovw.l2.nc"], 7 | "bbox":"-180,-90,180,90", 8 | "variables":["wvc_index","model_speed","model_dir","ice_prob","ice_age","wvc_quality_flag","wind_speed","wind_dir","bs_distance","lat","lon","time"], 9 | "compact":false 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE.txt file distributed with 3 | # this work for additional information regarding copyright ownership. The ASF 4 | # licenses this file to you under the Apache License, Version 2.0 (the 5 | # "License"); you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | 16 | [metadata] 17 | description-file = README.rst 18 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016-2019 California Institute of Technology. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import os.path 16 | from setuptools import find_packages, setup 17 | 18 | # Package data 19 | # ------------ 20 | _author = 'Lewis John McGibbney' 21 | _author_email = 'lewis.j.mcgibbney@jpl.nasa.gov' 22 | _classifiers = [ 23 | 'Environment :: Console', 24 | 'Intended Audience :: Developers', 25 | 'Intended Audience :: Information Technology', 26 | 'Intended Audience :: Science/Research', 27 | 'Topic :: Scientific/Engineering', 28 | 'Development Status :: 5 - Production/Stable', 29 | 'License :: OSI Approved :: Apache Software License', 30 | 'Operating System :: OS Independent', 31 | 'Programming Language :: Python', 32 | 'Topic :: Internet :: WWW/HTTP', 33 | 'Topic :: Software Development :: Libraries :: Python Modules', 34 | ] 35 | _description = 'PO.DAAC Python API' 36 | _download_url = 'http://pypi.python.org/pypi/podaacpy/' 37 | _requirements = ["beautifulsoup4", "configparser", "defusedxml", "future", "pathlib2", "requests"] 38 | _keywords = ['dataset', 'granule', 'compliance', 'nasa', 'jpl', 'podaac'] 39 | _license = 'Apache License, Version 2.0' 40 | _long_description = 'A python utility library for interacting with NASA JPLs PO.DAAC' 41 | _name = 'podaacpy' 42 | _namespaces = [] 43 | _test_suite = 'podaac.tests' 44 | _url = 'https://github.com/nasa/podaacpy' 45 | _version = '2.4.0' 46 | _zip_safe = False 47 | 48 | # Setup Metadata 49 | # -------------- 50 | 51 | 52 | def _read(*rnames): 53 | return open(os.path.join(os.path.dirname(__file__), *rnames)).read() 54 | 55 | 56 | _header = '*' * len(_name) + '\n' + _name + '\n' + '*' * len(_name) 57 | _longDescription = '\n\n'.join([ 58 | _header, 59 | _read('README.rst') 60 | ]) 61 | open('doc.txt', 'w').write(_longDescription) 62 | 63 | 64 | setup( 65 | author=_author, 66 | author_email=_author_email, 67 | classifiers=_classifiers, 68 | description=_description, 69 | download_url=_download_url, 70 | include_package_data=True, 71 | install_requires=_requirements, 72 | keywords=_keywords, 73 | license=_license, 74 | long_description=_long_description, 75 | name=_name, 76 | namespace_packages=_namespaces, 77 | packages=find_packages(), 78 | test_suite=_test_suite, 79 | url=_url, 80 | version=_version, 81 | zip_safe=_zip_safe, 82 | ) 83 | --------------------------------------------------------------------------------