├── .github ├── ISSUE_TEMPLATE │ ├── add-a-new-data-layer-to-a-dashboard--high-level-steps-.md │ ├── add-new-dataset-simple.md │ ├── identify-arco-dataset-for-dashboard.md │ └── story.md └── workflows │ ├── cicd.yml │ ├── deploy.yml │ ├── deploy_dev.yml │ ├── deploy_staging.yml │ ├── dispatch_deploy.yml │ ├── test_docker_lambda.yml │ └── test_python_lambda.yml ├── .gitignore ├── ARCHITECTURE.md ├── LICENSE ├── OPERATING.md ├── README.md ├── data ├── collections │ ├── CMIP245-winter-median-pr.json │ ├── CMIP245-winter-median-ta.json │ ├── CMIP585-winter-median-pr.json │ ├── CMIP585-winter-median-ta.json │ ├── HLSL30.002.json │ ├── HLSS30.002.json │ ├── IS2SITMOGR4-cog.json │ ├── OMI_trno2-COG.json │ ├── OMSO2PCA-COG.json │ ├── bangladesh-landcover-2001-2020.json │ ├── black_marble_hd.json │ ├── caldor-fire-behavior.json │ ├── caldor-fire-burn-severity.json │ ├── co2-diff.json │ ├── co2-mean.json │ ├── ecco-surface-height-change.json │ ├── epa-emissions-2012 │ │ ├── annual │ │ │ ├── EPA-annual-emissions_1A_Combustion_Mobile.json │ │ │ ├── EPA-annual-emissions_1A_Combustion_Stationary.json │ │ │ ├── EPA-annual-emissions_1B1a_Abandoned_Coal.json │ │ │ ├── EPA-annual-emissions_1B1a_Coal_Mining_Surface.json │ │ │ ├── EPA-annual-emissions_1B1a_Coal_Mining_Underground.json │ │ │ ├── EPA-annual-emissions_1B2a_Petroleum.json │ │ │ ├── EPA-annual-emissions_1B2b_Natural_Gas_Distribution.json │ │ │ ├── EPA-annual-emissions_1B2b_Natural_Gas_Processing.json │ │ │ ├── EPA-annual-emissions_1B2b_Natural_Gas_Production.json │ │ │ ├── EPA-annual-emissions_1B2b_Natural_Gas_Transmission.json │ │ │ ├── EPA-annual-emissions_2B5_Petrochemical_Production.json │ │ │ ├── EPA-annual-emissions_2C2_Ferroalloy_Production.json │ │ │ ├── EPA-annual-emissions_4A_Enteric_Fermentation.json │ │ │ ├── EPA-annual-emissions_4B_Manure_Management.json │ │ │ ├── EPA-annual-emissions_4C_Rice_Cultivation.json │ │ │ ├── EPA-annual-emissions_4F_Field_Burning.json │ │ │ ├── EPA-annual-emissions_5_Forest_Fires.json │ │ │ ├── EPA-annual-emissions_6A_Landfills_Industrial.json │ │ │ ├── EPA-annual-emissions_6A_Landfills_Municipal.json │ │ │ ├── EPA-annual-emissions_6B_Wastewater_Treatment_Domestic.json │ │ │ ├── EPA-annual-emissions_6B_Wastewater_Treatment_Industrial.json │ │ │ └── EPA-annual-emissions_6D_Composting.json │ │ ├── daily │ │ │ └── EPA-daily-emissions_5_Forest_Fires.json │ │ └── monthly │ │ │ ├── EPA-monthly-emissions_1A_Combustion_Stationary.json │ │ │ ├── EPA-monthly-emissions_1B2a_Petroleum.json │ │ │ ├── EPA-monthly-emissions_1B2b_Natural_Gas_Production.json │ │ │ ├── EPA-monthly-emissions_4B_Manure_Management.json │ │ │ ├── EPA-monthly-emissions_4C_Rice_Cultivation.json │ │ │ └── EPA-monthly-emissions_4F_Field_Burning.json │ ├── facebook-population-density.json │ ├── geoglam.json │ ├── grdi-vnl-slope-raster.json │ ├── hurricane_blue_tarps.json │ ├── hurricane_planetscope_images.json │ ├── lis-tws-anomaly.json │ ├── lis-tws-nonstationarity-index.json │ ├── lis-tws-trend.json │ ├── modis-annual-lai-2003-2020.json │ ├── mtbs-burn-severity.json │ ├── nceo-africa-2017.json │ ├── nightlights-500m-daily.json │ ├── nightlights-hd-1band.json │ ├── nightlights-hd-monthly.json │ ├── nightlights_3bands.json │ ├── no2-monthly-diff.json │ ├── no2-monthly.json │ ├── snow-projections-diff-245.json │ ├── snow-projections-diff-585.json │ ├── snow-projections-median-245.json │ ├── snow-projections-median-585.json │ ├── social-vulnerability-index-household-nopop.json │ ├── social-vulnerability-index-household.json │ ├── social-vulnerability-index-housing-nopop.json │ ├── social-vulnerability-index-housing.json │ ├── social-vulnerability-index-minority-nopop.json │ ├── social-vulnerability-index-minority.json │ ├── social-vulnerability-index-overall-nopop.json │ ├── social-vulnerability-index-overall.json │ ├── social-vulnerability-index-socioeconomic-nopop.json │ └── social-vulnerability-index-socioeconomic.json └── step_function_inputs │ ├── CMIP245-winter-median-pr.json │ ├── CMIP245-winter-median-ta.json │ ├── CMIP585-winter-median-pr.json │ ├── CMIP585-winter-median-ta.json │ ├── HLSL30.002-ida.json │ ├── HLSL30.002-maria.json │ ├── HLSS30.002-ida.json │ ├── HLSS30.002-maria.json │ ├── IS2SITMOGR4-cog.json │ ├── MO_NPP_npp_vgpm.json │ ├── OMI_trno2-COG.json │ ├── OMSO2PCA-cog.json │ ├── bangladesh-landcover-2001-2020.json │ ├── blue-tarp-detection.json │ ├── blue-tarp-planetscope.json │ ├── caldor-fire-behavior.json │ ├── caldor-fire-burn-severity.json │ ├── co2-diff.json │ ├── co2-mean.json │ ├── ecco-surface-height-change.json │ ├── epa-emissions-2012-annual.json │ ├── epa-emissions-2012-daily.json │ ├── epa-emissions-2012-monthly.json │ ├── epa-emissions-test.json │ ├── facebook-population-density.json │ ├── geoglam.json │ ├── grdi.json │ ├── lis-tws-anomaly.json │ ├── lis-tws-nonstationarity-index.json │ ├── lis-tws-trend.json │ ├── modis-annual-lai-2003-2020.json │ ├── mtbs-burn-severity.json │ ├── nceo-africa-2017.json │ ├── nightlights-500m-daily.json │ ├── nightlights-hd-1band.json │ ├── nightlights-hd-3bands.json │ ├── nightlights-hd-monthly-blackmarble.json │ ├── nightlights-hd-monthly.json │ ├── no2-monthly-diff.json │ ├── no2-monthly-orig.json │ ├── snow-projections-diff-245.json │ ├── snow-projections-diff-585.json │ ├── snow-projections-median-245.json │ ├── snow-projections-median-585.json │ └── social-vulnerability-index.json ├── deploy ├── .gitignore ├── README.md ├── app.py ├── cdk.json ├── cdk │ ├── __init__.py │ ├── lambda_stack.py │ ├── queue_stack.py │ └── step_function_stack.py ├── config.py └── requirements.txt ├── env.sample.sh ├── lambdas ├── build-stac │ ├── Dockerfile │ ├── README.md │ ├── handler.py │ ├── requirements-test.txt │ ├── requirements.txt │ ├── tests │ │ ├── __init__.py │ │ ├── conftest.py │ │ ├── test_handler.py │ │ └── test_regex.py │ └── utils │ │ ├── __init__.py │ │ ├── events.py │ │ ├── regex.py │ │ ├── role.py │ │ └── stac.py ├── cmr-query │ ├── Dockerfile │ ├── README.md │ ├── handler.py │ └── requirements.txt ├── cogify │ ├── Dockerfile │ ├── ERA5 │ │ └── fetch.py │ ├── README.md │ ├── example.ini │ ├── handler.py │ └── requirements.txt ├── data-transfer │ ├── conftest.py │ ├── handler.py │ ├── requirements-test.txt │ ├── requirements.txt │ └── tests │ │ ├── __init__.py │ │ └── test_handler.py ├── proxy │ ├── handler.py │ └── requirements.txt ├── s3-discovery │ ├── Dockerfile │ ├── README.md │ ├── handler.py │ └── requirements.txt └── submit-stac │ ├── Dockerfile │ ├── README.md │ ├── handler.py │ └── requirements.txt ├── poetry.lock ├── pyproject.toml ├── scripts ├── __init__.py ├── cdk.py ├── collection.py ├── item.py └── utils.py └── veda-data_ingest_pipeline.png /.github/ISSUE_TEMPLATE/add-a-new-data-layer-to-a-dashboard--high-level-steps-.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Add a new dataset to the API (high-level steps) 3 | about: All steps for adding a new dataset or indicator to the VEDA STAC API 4 | title: Add a new dataset to the API (high-level steps) 5 | labels: 'dataset' 6 | assignees: '' 7 | 8 | --- 9 | 10 | # Research + prepare processing for the dataset: Identify the dataset and what the processing needs are 11 | 12 | 1. Identify dataset and where it will be accessed from (HTTP from DAAC vs S3, for example). Check it's a good source with science team. Ask about specific variables and required spatial and temporal extent. Note many datasets will require back processing (e.g. generating cloud-optimized data for historical data). 13 | 14 | [Future: 2. If the dataset is ongoing (i.e. new files are continuously added and should be included in the dashboard), design and construct the scheduling + forward-processing workflow.] 15 | 16 | 2. If necessary, create COG or any other conversion / processing code and verify the COG output with a data product expert (for example, someone at the DAAC which hosts the native format) by sharing in a visual interface. 17 | 18 | 3. Identify the point of contact and ensure someone is providing them updates! 19 | 20 | # Design the metadata and publish to the Dev API 21 | 22 | 1. If not already familiar with these conventions for generating STAC collection and item metadata: 23 | - Collections: https://github.com/NASA-IMPACT/delta-backend/issues/29 and STAC version 1.0 specification for collections 24 | - Items: https://github.com/NASA-IMPACT/delta-backend/issues/28 and STAC version 1.0 specification for items 25 | - NOTE: The delta-backend instructions are specific to datasets for the climate dashboard, however not all datasets are going to be a part of the visual layers for the dashboard so you can ignore the instructions that are specific to "dashboard" extension, "item_assets" in the collection and "cog_default" asset type in the item. 26 | 27 | A collection will need the following fields, some of which may be self-evident through the filename or an about page for the product, however there are many cases in which we may need to reach out to product owners to define the right values for these fields: 28 | 29 | - temporal interval 30 | - license 31 | - id 32 | - title 33 | - description 34 | - whether it is periodic or not on the dashboard 35 | - the dashboard time density 36 | 37 | 4. Review and follow https://github.com/NASA-IMPACT/cloud-optimized-data-pipelines/blob/main/OPERATING.md 38 | 39 | 40 | ## Publish to the Staging API 41 | 42 | Once the PR is approved, we can merge and publish those datasets to the Staging API 43 | 44 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/add-new-dataset-simple.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Add a new dataset to the API (simple) 3 | about: Gathering information for dataset ingestion 4 | title: Add 5 | labels: 'dataset' 6 | assignees: '' 7 | 8 | --- 9 | 10 | - [ ] Identify the point of contact and ensure someone is providing them updates: 11 | - [ ] Data provider has read [guidelines on data preparation](https://github.com/NASA-IMPACT/veda-workflows-api/blob/main/how-to.md#prepare-the-data) 12 | - [ ] Identify data location: 13 | - [ ] Number of items: 14 | - [ ] Verify that files are valid COGs (e.g. with [`rio cogeo validate`](https://cogeotiff.github.io/rio-cogeo/Is_it_a_COG/#3-cog-validation)) 15 | - [ ] Gather STAC collection metadata 16 | 17 | - id: 18 | - title: 19 | - description: 20 | - license: 21 | - provider(s): (producer, processor, licensor) 22 | - temporal interval: 23 | - whether it is periodic on the dashboard (periodic = regular time series of layers without gaps): 24 | - the dashboard time density: 25 | 26 | - [ ] Review and follow https://github.com/NASA-IMPACT/cloud-optimized-data-pipelines/blob/main/OPERATING.md 27 | - [ ] Open PR for publishing those datasets to the Staging API: 28 | - [ ] Notify QA / move ticket to QA state 29 | - [ ] Once approved, merge and close. 30 | 31 | ## Resources on metadata 32 | 33 | If not already familiar with these conventions for generating STAC collection and item metadata: 34 | - Collections: https://github.com/NASA-IMPACT/delta-backend/issues/29 and STAC version 1.0 specification for collections 35 | - Items: https://github.com/NASA-IMPACT/delta-backend/issues/28 and STAC version 1.0 specification for items 36 | - NOTE: The delta-backend instructions are specific to datasets for the climate dashboard, however not all datasets are going to be a part of the visual layers for the dashboard so you can ignore the instructions that are specific to "dashboard" extension, "item_assets" in the collection and "cog_default" asset type in the item. 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/identify-arco-dataset-for-dashboard.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Identify ARCO dataset for dashboard 3 | about: Early identification of ARCO dataset and expert 4 | title: Identify ARCO datasets for thematic area of climate dashboard 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | [Direction given in Trilateral Dataset list](https://docs.google.com/spreadsheets/d/1WvwifHCN44fntlUKPDBMkfzpvrGQcbyMsBCFtn7Pu1M/edit#gid=1098798450) 11 | 12 | - [ ] Propose initial dataset(s) for thematic area climate dashboard 13 | - [ ] Check with Manil and team 14 | - [ ] Identify data experts who will validate COG for those datasets 15 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/story.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Story 3 | about: Detailed overview of a task 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Epic 11 | 12 | 13 | ## Description 14 | 15 | 16 | ## Examples 17 | 18 | 19 | ### Concept Diagrams (If applicable) 20 | 21 | 22 | ### Acceptance Criteria: 23 | 24 | - [ ] Criteria 1 25 | - [ ] Criteria 2 26 | 27 | ## Checklist: 28 | 29 | 30 | - [ ] Epic Link 31 | - [ ] Detailed description 32 | - [ ] Concept diagrams 33 | - [ ] Assignee 34 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | 3 | on: 4 | push: 5 | 6 | jobs: 7 | lint: 8 | name: Lint shell scripts 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | 14 | - name: ShellCheck 15 | uses: ludeeus/action-shellcheck@master 16 | 17 | format: 18 | name: Format code 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - uses: psf/black@stable 24 | 25 | test_build-stac: 26 | name: Test lambdas/build-stac 27 | uses: ./.github/workflows/test_docker_lambda.yml 28 | with: 29 | path_to_lambda: lambdas/build-stac 30 | 31 | test_data-transfer: 32 | name: Test lambdas/data-transfer 33 | uses: ./.github/workflows/test_python_lambda.yml 34 | with: 35 | path_to_lambda: lambdas/data-transfer 36 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | inputs: 4 | deploy_env: 5 | required: true 6 | type: string 7 | secrets: 8 | aws_access_key_id: 9 | required: true 10 | aws_secret_access_key: 11 | required: true 12 | cognito_app_secret: 13 | required: true 14 | stac_ingestor_url: 15 | required: true 16 | external_role_arn: 17 | required: true 18 | 19 | jobs: 20 | deploy: 21 | runs-on: ubuntu-latest 22 | environment: ${{ inputs.deploy_env }} 23 | steps: 24 | - name: Check out repo 25 | uses: actions/checkout@v3 26 | 27 | - name: Set up Python, caching dependencies 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: '3.8' 31 | cache: 'pip' 32 | 33 | - name: Manually install requirements.txt 34 | run: pip install -r deploy/requirements.txt 35 | 36 | - name: Install node and related deps 37 | uses: actions/setup-node@v3 38 | with: 39 | node-version: 17.3.0 40 | 41 | - name: Install AWS CDK 42 | run: npm install -g aws-cdk 43 | 44 | - name: Configure awscli 45 | uses: aws-actions/configure-aws-credentials@v1 46 | with: 47 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 48 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 49 | aws-region: us-west-2 50 | 51 | - name: deploy 52 | run: cd deploy && cdk deploy --all --require-approval never 53 | env: 54 | ENV: ${{ inputs.deploy_env }} 55 | COGNITO_APP_SECRET: ${{ secrets.COGNITO_APP_SECRET }} 56 | STAC_INGESTOR_URL: ${{ secrets.STAC_INGESTOR_URL }} 57 | APP_NAME: "veda-data-pipelines" 58 | EXTERNAL_ROLE_ARN: ${{ secrets.EXTERNAL_ROLE_ARN }} 59 | -------------------------------------------------------------------------------- /.github/workflows/deploy_dev.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: [develop] 4 | 5 | jobs: 6 | deploy: 7 | name: Deploy to dev environment 8 | uses: ./.github/workflows/deploy.yml 9 | with: 10 | deploy_env: dev 11 | secrets: 12 | cognito_app_secret: ${{ secrets.cognito_app_secret }} 13 | aws_access_key_id: ${{ secrets.aws_access_key_id }} 14 | aws_secret_access_key: ${{ secrets.aws_secret_access_key }} 15 | stac_ingestor_url: ${{ secrets.stac_ingestor_url }} 16 | external_role_arn: ${{ secrets.external_role_arn }} 17 | 18 | -------------------------------------------------------------------------------- /.github/workflows/deploy_staging.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: [main] 4 | 5 | jobs: 6 | deploy: 7 | name: Deploy to staging environment 8 | uses: ./.github/workflows/deploy.yml 9 | with: 10 | deploy_env: staging 11 | secrets: 12 | cognito_app_secret: ${{ secrets.cognito_app_secret }} 13 | aws_access_key_id: ${{ secrets.aws_access_key_id }} 14 | aws_secret_access_key: ${{ secrets.aws_secret_access_key }} 15 | stac_ingestor_url: ${{ secrets.stac_ingestor_url }} 16 | external_role_arn: ${{ secrets.external_role_arn }} 17 | -------------------------------------------------------------------------------- /.github/workflows/dispatch_deploy.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | inputs: 4 | deploy_env: 5 | description: 'Deploy environment' 6 | required: true 7 | default: 'dev' 8 | type: choice 9 | options: 10 | - dev 11 | - staging 12 | 13 | jobs: 14 | deploy: 15 | uses: ./.github/workflows/deploy.yml 16 | with: 17 | deploy_env: ${{ inputs.deploy_env }} 18 | secrets: 19 | cognito_app_secret: ${{ secrets.cognito_app_secret }} 20 | aws_access_key_id: ${{ secrets.aws_access_key_id }} 21 | aws_secret_access_key: ${{ secrets.aws_secret_access_key }} 22 | stac_ingestor_url: ${{ secrets.stac_ingestor_url }} 23 | external_role_arn: ${{ secrets.external_role_arn }} 24 | -------------------------------------------------------------------------------- /.github/workflows/test_docker_lambda.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | inputs: 4 | path_to_lambda: 5 | description: Path to lambda directory, relative to root of repo 6 | required: true 7 | type: string 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | 15 | - uses: satackey/action-docker-layer-caching@v0.0.11 16 | continue-on-error: true 17 | 18 | - name: Build docker images 19 | run: | 20 | docker build \ 21 | --platform=linux/amd64 \ 22 | --target test \ 23 | -t local \ 24 | ${{ inputs.path_to_lambda }} 25 | 26 | - name: Run tests 27 | run: docker run local 28 | -------------------------------------------------------------------------------- /.github/workflows/test_python_lambda.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | inputs: 4 | path_to_lambda: 5 | description: Path to lambda directory, relative to root of repo 6 | required: true 7 | type: string 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | 15 | - name: Install poetry 16 | run: pipx install poetry 17 | 18 | - uses: actions/setup-python@v4 19 | with: 20 | python-version: "3.8" # NOTE: Should match Lambda runtime specified in CDK code 21 | cache: "poetry" 22 | cache-dependency-path: | 23 | poetry.lock 24 | ${{ inputs.path_to_lambda }}/requirements**.txt 25 | 26 | - name: Install requirements 27 | run: | 28 | poetry install 29 | poetry add $(cat ${{ inputs.path_to_lambda }}/requirements**.txt ) 30 | 31 | - name: Run tests 32 | run: poetry run pytest ${{ inputs.path_to_lambda }} 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore files downloaded when installing HDF4 libraries 2 | HDF-4.2.15-Darwin/ 3 | hdf-4.2.15-osx1013_64-clang.tar.gz 4 | pyhdf* 5 | 6 | # Ignore MacOSX desktop services store 7 | *.DS_Store 8 | 9 | # Ignore pycache 10 | __pycache__ 11 | .python-version 12 | 13 | # Ignore data files which are downloaded for local testing 14 | *.xml 15 | *.vrt 16 | *.he5 17 | *.img 18 | *.img.hdr 19 | *.hdf 20 | *.hdf5 21 | *.tif 22 | *.nc 23 | *.HDF5 24 | *.h5 25 | *.he5 26 | sample-files/ 27 | cdk.out/ 28 | cdk.context.json 29 | 30 | # Vim 31 | *.swo 32 | *cdk.context.json 33 | 34 | # Virtual Environments 35 | *env/ 36 | 37 | # Avoid checking in filled out env file 38 | env.sh 39 | .env* 40 | 41 | .hypothesis 42 | Makefile 43 | -------------------------------------------------------------------------------- /ARCHITECTURE.md: -------------------------------------------------------------------------------- 1 | # Architecture 2 | 3 | ## Architecture Diagram 4 | 5 | The following architecture diagram shows the **data transformation/ingestion pipeline**. Data can be discovered from CMR or from AWS S3. If the data needs to be converted into a cloud optimized geotiff, it's done in the cogification step. Then, it's uploaded to the official VEDA S3 bucket (if needed) and published to the STAC database and API. 6 | ![image](veda-data_ingest_pipeline.png) 7 | 8 | 9 | ## The architecture as step functions 10 | 11 | The architecture defined above has been implemented as step functions (+ other resources in AWS) and the pictures below show how they look in AWS console step function graph view. 12 | 13 | ### Discovery 14 | 15 | ![image](https://user-images.githubusercontent.com/7830949/171733787-f088b7a1-3741-491e-afc1-90e8de95185f.png) 16 | 17 | ### Cogification 18 | 19 | ![image](https://user-images.githubusercontent.com/7830949/171733963-7eabfb61-c5cc-4610-8f16-40ad3e998f8e.png) 20 | 21 | ### Ingest and Publish 22 | 23 | ![image](https://user-images.githubusercontent.com/7830949/171734056-85b194b9-659c-4a57-814c-f91ffc37fdd2.png) 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2022 NASA 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # veda-data-pipelines 2 | 3 | **NOTE**: This repository is being sunsetted. 4 | Please do not open any new issues or PRs on this repository; use instead. 5 | Once all issues are transferred from this repository to **veda-data**, this repository will be archived. 6 | 7 | This repo houses function code and deployment code for producing cloud-optimized 8 | data products and STAC metadata for interfaces such as https://github.com/NASA-IMPACT/delta-ui. 9 | 10 | ## Requirements 11 | 12 | ### Docker 13 | 14 | See [get-docker](https://docs.docker.com/get-docker/) 15 | 16 | ### AWS CDK 17 | 18 | See [cdk-getting-started](https://docs.aws.amazon.com/cdk/v2/guide/getting_started.html) 19 | 20 | ```bash 21 | nvm install 17.3.0 22 | nvm use 17.3.0 23 | npm install -g aws-cdk 24 | ``` 25 | 26 | ### Poetry 27 | 28 | See [poetry-landing-page](https://pypi.org/project/poetry/) 29 | 30 | ```bash 31 | pip install poetry 32 | ``` 33 | 34 | ## Deployment 35 | 36 | This project uses AWS CDK to deploy AWS resources to the cloud. 37 | 38 | ### Make sure the following environment variables are set 39 | 40 | ```bash 41 | ENV="" 42 | COGNITO_APP_SECRET="" 43 | APP_NAME="veda-data-pipelines" 44 | STAC_INGESTOR_URL="" 45 | EXTERNAL_ROLE_ARN="" 46 | ``` 47 | 48 | **Note:** You can use the handy `env.sample.sh` script to set these variables. Just rename the file to `env.sh` and populate it with appropriate values. Then run the following commands: 49 | 50 | ```bash 51 | chmod +x env.sh 52 | source env.sh 53 | ``` 54 | 55 | > If anything other than dev/stage is provided as the env, the dev credentials are used (for now). 56 | 57 | ## To deploy 58 | 59 | ### Using poetry 60 | 61 | ```bash 62 | # deploy 63 | poetry run deploy 64 | 65 | # destroy 66 | poetry run destroy 67 | ``` 68 | 69 | ### Else 70 | 71 | 1. Go to `deploy/` directory 72 | 2. Create a virtual environment with `python -m venv venv` 73 | 3. Activate the virtual environment with `source venv/bin/activate` 74 | 4. Install the requirements with `pip install -r requirements.txt` 75 | 5. Run `cdk deploy --all` 76 | 6. Useful: `cdk destroy --all` to destroy the infrastructure 77 | 78 | # License 79 | This project is licensed under **Apache 2**, see the [LICENSE](LICENSE) file for more details. 80 | 81 | -------------------------------------------------------------------------------- /data/collections/CMIP245-winter-median-pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "CMIP245-winter-median-pr", 3 | "type": "Collection", 4 | "links":[], 5 | "title":"Projected changes to winter (January, February, and March) cumulative daily precipitation", 6 | "extent":{ 7 | "spatial":{ 8 | "bbox":[ 9 | [ 10 | -127, 11 | 29, 12 | -103, 13 | 52 14 | ] 15 | ] 16 | }, 17 | "temporal":{ 18 | "interval":[ 19 | [ 20 | "1995-01-01T00:00:00Z", 21 | "2095-03-31T00:00:00Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license":"MIT", 27 | "description": "Differences in winter (January, February, and March) cumulative daily precipitation between a historical period (1995 - 2014) and multiple 20-year periods from an ensemble of CMIP6 climate projections (SSP2-4.5) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": null, 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | 44 | -------------------------------------------------------------------------------- /data/collections/CMIP245-winter-median-ta.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "CMIP245-winter-median-ta", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Projected changes to winter (January, February, and March) average daily air temperature", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-127, 29, -103, 52]] 9 | }, 10 | "temporal": { 11 | "interval": [["1995-01-01T00:00:00Z", "2095-03-31T00:00:00Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "Differences in winter (January, February, and March) average daily air temperature between a historical period (1995 - 2014) and multiple 20-year periods from an ensemble of CMIP6 climate projections (SSP2-4.5) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": false, 18 | "dashboard:time_density": null, 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/CMIP585-winter-median-pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "CMIP585-winter-median-pr", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Projected changes to winter (January, February, and March) cumulative daily precipitation", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-127, 29, -103, 52]] 9 | }, 10 | "temporal": { 11 | "interval": [["1995-01-01T00:00:00Z", "2095-03-31T00:00:00Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "Differences in winter (January, February, and March) cumulative daily precipitation between a historical period (1995 - 2014) and multiple 20-year periods from an ensemble of CMIP6 climate projections (SSP5-8.5) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": false, 18 | "dashboard:time_density": null, 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/CMIP585-winter-median-ta.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "CMIP585-winter-median-ta", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Projected changes to winter (January, February, and March) average daily air temperature", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-127, 29, -103, 52]] 9 | }, 10 | "temporal": { 11 | "interval": [["1995-01-01T00:00:00Z", "2095-03-31T00:00:00Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "Differences in winter (January, February, and March) average daily air temperature between a historical period (1995 - 2014) and multiple 20-year periods from an ensemble of CMIP6 climate projections (SSP5-8.5) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": false, 18 | "dashboard:time_density": null, 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/HLSL30.002.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "HLSL30.002", 3 | "type": "Collection", 4 | "title": "HLSL30.002", 5 | "extent": { 6 | "spatial": { 7 | "bbox": [ 8 | [ 9 | -180, 10 | -90, 11 | 180, 12 | 90 13 | ] 14 | ] 15 | }, 16 | "temporal": { 17 | "interval": [ 18 | [ 19 | "2013-04-11T00:00:00Z", 20 | null 21 | ] 22 | ] 23 | } 24 | }, 25 | "license": "MIT", 26 | "description": "Read more on the NASA CMR Landing page: https://cmr.earthdata.nasa.gov/search/concepts/C2021957657-LPCLOUD.html", 27 | "provider": { 28 | "name": "Land Processes Distributed Active Archive Center (LP DAAC)", 29 | "roles": [ 30 | "processor" 31 | ], 32 | "url": "https://lpdaac.usgs.gov/products/hlsl30v002/" 33 | }, 34 | "links": [ 35 | { 36 | "rel": "external", 37 | "title": "NASA Common Metadata Repository Record for this Dataset", 38 | "href": "https://cmr.earthdata.nasa.gov/search/concepts/C2021957657-LPCLOUD.html", 39 | "type": "text/html" 40 | } 41 | ], 42 | "dashboard:is_periodic": true, 43 | "dashboard:time_density": "day", 44 | "item_assets": { 45 | "cog_default": { 46 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 47 | "roles": [ 48 | "data", 49 | "layer" 50 | ], 51 | "title": "Default COG Layer", 52 | "description": "Cloud optimized default layer to display on map" 53 | } 54 | }, 55 | "stac_version": "1.0.0" 56 | } 57 | -------------------------------------------------------------------------------- /data/collections/HLSS30.002.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "HLSS30.002", 3 | "type": "Collection", 4 | "title": "HLSS30.002", 5 | "extent": { 6 | "spatial": { 7 | "bbox": [ 8 | [ 9 | -180, 10 | -90, 11 | 180, 12 | 90 13 | ] 14 | ] 15 | }, 16 | "temporal": { 17 | "interval": [ 18 | [ 19 | "2015-12-01T00:00:00Z", 20 | null 21 | ] 22 | ] 23 | } 24 | }, 25 | "license": "MIT", 26 | "description": "Read more on the NASA CMR Landing page: https://cmr.earthdata.nasa.gov/search/concepts/C2021957295-LPCLOUD.html", 27 | "provider": { 28 | "name": "Land Processes Distributed Active Archive Center (LP DAAC)", 29 | "roles": [ 30 | "processor" 31 | ], 32 | "url": "https://lpdaac.usgs.gov/products/hlss30v002/" 33 | }, 34 | "links": [ 35 | { 36 | "rel": "external", 37 | "title": "NASA Common Metadata Repository Record for this Dataset", 38 | "href": "https://cmr.earthdata.nasa.gov/search/concepts/C2021957295-LPCLOUD.html", 39 | "type": "text/html" 40 | } 41 | ], 42 | "dashboard:is_periodic": true, 43 | "dashboard:time_density": "day", 44 | "item_assets": { 45 | "cog_default": { 46 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 47 | "roles": [ 48 | "data", 49 | "layer" 50 | ], 51 | "title": "Default COG Layer", 52 | "description": "Cloud optimized default layer to display on map" 53 | } 54 | }, 55 | "stac_version": "1.0.0" 56 | } 57 | -------------------------------------------------------------------------------- /data/collections/IS2SITMOGR4-cog.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "IS2SITMOGR4-cog", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "ICESat-2 L4 Monthly Gridded Sea Ice Thickness (COGs)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | 30, 12 | 180, 13 | 89 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2018-11-01T00:00:00Z", 21 | "2021-04-30T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "ICESat-2 L4 Monthly Gridded Sea Ice Thickness (COGs)", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/OMI_trno2-COG.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "OMI_trno2-COG", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "OMI_trno2 - 0.10 x 0.10 Annual as Cloud-Optimized GeoTIFFs (COGs)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2005-01-01T00:00:00Z", 21 | "2021-01-01T00:00:00Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "OMI_trno2 - 0.10 x 0.10 Annual as Cloud-Optimized GeoTIFFs (COGs)", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/OMSO2PCA-COG.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "OMSO2PCA-COG", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "OMI/Aura Sulfur Dioxide (SO2) Total Column L3 1 day Best Pixel in 0.25 degree x 0.25 degree V3 as Cloud-Optimized GeoTIFFs (COGs)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2005-01-01T00:00:00Z", 21 | "2021-01-01T00:00:00Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "OMI/Aura Sulfur Dioxide (SO2) Total Column L3 1 day Best Pixel in 0.25 degree x 0.25 degree V3 as Cloud-Optimized GeoTIFFs (COGs)", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/bangladesh-landcover-2001-2020.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "bangladesh-landcover-2001-2020", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Annual land cover maps for 2001 and 2020", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | 88.02591469087191, 11 | 20.742099910319755, 12 | 92.68367943903164, 13 | 26.63504817414382 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2001-01-01T00:00:00Z", 21 | "2020-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0-1.0", 27 | "description": "The annual land cover maps of 2001 and 2021 were captured using combined Moderate Resolution Imaging Spectroradiometer (MODIS) Annual Land Cover Type dataset (MCD12Q1 V6, dataset link: https://lpdaac.usgs.gov/products/mcd12q1v006/). The actual data product provides global land cover types at yearly intervals (2001-2020) at 500 meters with six different types of land cover classification. Among six different schemes, The International Geosphere–Biosphere Programme (IGBP) land cover classification selected and further simplified to dominant land cover classes (water, urban, cropland, native vegetation) for two different years to illustrate the changes in land use and land cover of the country.", 28 | "item_assets": { 29 | "cog_default": { 30 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 31 | "roles": [ 32 | "data", 33 | "layer" 34 | ], 35 | "title": "Default COG Layer", 36 | "description": "Cloud optimized default layer to display on map" 37 | } 38 | }, 39 | "stac_version": "1.0.0", 40 | "dashboard:is_periodic": false, 41 | "dashboard:time_density": "year" 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/black_marble_hd.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nightlights-hd-monthly", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Black Marble High Definition Nightlights Monthly Dataset", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -180, 12 | -90, 13 | 180, 14 | 90 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2017-07-21T00:00:00Z", 22 | "2021-09-30T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"public-domain", 28 | "description": "The High Definition Nightlights dataset is processed to eliminate light sources, including moonlight reflectance and other interferences. Darker colors indicate fewer night lights and less activity. Lighter colors indicate more night lights and more activity.", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": "month", 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /data/collections/caldor-fire-behavior.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "caldor-fire-behavior", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Caldor Fire Behavior", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | "-180", 12 | "90", 13 | "-90", 14 | "180" 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2021-08-14T00:00:00Z", 22 | "2021-10-21T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0", 28 | "description": "`.geojson` and `tif` files describing the progression and active fire behavior of the 2021 Caldor Fire in California via the algorithm detailed in https://www.nature.com/articles/s41597-022-01343-0. This includes an extra `.tif` file detailing the soil burn severity (SBS) conditions provided by the [Burned Area Emergency Response](https://burnseverity.cr.usgs.gov/baer/) team.", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /data/collections/caldor-fire-burn-severity.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "caldor-fire-burn-severity", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Caldor Fire Burn Severity", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | "-180", 12 | "90", 13 | "-90", 14 | "180" 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2021-08-14T00:00:00Z", 22 | "2021-10-21T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0", 28 | "description": "`.geojson` and `tif` files describing the progression and active fire behavior of the 2021 Caldor Fire in California via the algorithm detailed in https://www.nature.com/articles/s41597-022-01343-0. This includes an extra `.tif` file detailing the soil burn severity (SBS) conditions provided by the [Burned Area Emergency Response](https://burnseverity.cr.usgs.gov/baer/) team.", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /data/collections/co2-diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "co2-diff", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "CO₂ (Diff)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-180, -90, 180, 90]] 9 | }, 10 | "temporal": { 11 | "interval": [["2015-01-01T00:00:00Z", "2022-02-13T00:00:00Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "The changes in carbon dioxide (CO₂) levels in our atmosphere versus previous years.", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": true, 18 | "dashboard:time_density": "day", 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/co2-mean.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "co2-mean", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "CO₂ (Avg)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-180, -90, 180, 90]] 9 | }, 10 | "temporal": { 11 | "interval": [["2015-01-01T00:00:00Z", "2022-02-13T00:00:00Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "The average background concentration of carbon dioxide (CO₂) in our atmosphere.", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": true, 18 | "dashboard:time_density": "day", 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/ecco-surface-height-change.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "ecco-surface-height-change", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"ECCO sea-surface height change from 1992 to 2017", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | "-180", 12 | "90", 13 | "-90", 14 | "180" 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "1992-01-01T00:00:00Z", 22 | "2017-12-31T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"MIT", 28 | "description": "Gridded global sea-surface height change from 1992 to 2017 from the Estimating the Circulation and Climate of the Ocean (ECCO) ocean state estimate. The dataset was calculated as the difference between the annual means over 2017 and 1992, from the 0.5 degree, gridded monthly mean data product available on PO.DAAC (https://podaac.jpl.nasa.gov/dataset/ECCO_L4_SSH_05DEG_MONTHLY_V4R4).", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1A_Combustion_Mobile.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1A_Combustion_Mobile", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Mobile Combustion", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Mobile emissions from sector 1A, including on-road and non-road vehicles, waterborne, rail, and air.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1A_Combustion_Stationary.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1A_Combustion_Stationary", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Stationary Combustion", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Stationary (non-mobile) emissions from sector 1A, including boilers, heaters, furnaces, kilns, ovens, flares, thermal oxidizers, dryers, and any other equipment or machinery that combusts carbon bearing fuels or waste stream materials.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B1a_Abandoned_Coal.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B1a_Abandoned_Coal", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Abandoned Coal Mines", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 1B1a from abandoned coal mines.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B1a_Coal_Mining_Surface.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B1a_Coal_Mining_Surface", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Surface Coal Mines", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 1B1a from surface coal mining.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B1a_Coal_Mining_Underground.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B1a_Coal_Mining_Underground", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Underground Coal Mines", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 1B1a from underground coal mining.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B2a_Petroleum.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B2a_Petroleum", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Petroleum", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2a for petroleum systems, including production, transportation, and refining.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B2b_Natural_Gas_Distribution.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B2b_Natural_Gas_Distribution", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Natural Gas Distribution", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2b for natural gas distribution.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B2b_Natural_Gas_Processing.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B2b_Natural_Gas_Processing", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Natural Gas Processing", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2b for natural gas processing.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B2b_Natural_Gas_Production.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B2b_Natural_Gas_Production", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Natural Gas Production", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2b for natural gas production.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_1B2b_Natural_Gas_Transmission.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_1B2b_Natural_Gas_Transmission", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Natural Gas Transmission", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2b for natural gas transmission.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_2B5_Petrochemical_Production.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_2B5_Petrochemical_Production", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Petrochemical Production", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 2B5 from petrochemical production.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_2C2_Ferroalloy_Production.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_2C2_Ferroalloy_Production", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Ferroalloy Production", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 2C2 from ferroalloy production.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_4A_Enteric_Fermentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_4A_Enteric_Fermentation", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Enteric Fermentation", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4A from enteric fermentation (fermentation that takes place in the digestive systems of animals).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_4B_Manure_Management.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_4B_Manure_Management", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Manure Management", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4B from manure management.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_4C_Rice_Cultivation.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_4C_Rice_Cultivation", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Rice Cultivation", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4C from rice cultivation.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_4F_Field_Burning.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_4F_Field_Burning", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Field Burning", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4F from agricultural field burning.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_5_Forest_Fires.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_5_Forest_Fires", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Forest Fires", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 5 from forest fires.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_6A_Landfills_Industrial.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_6A_Landfills_Industrial", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Industrial Landfills", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 6A from non-municipal solid waste landfills used to to dispose of industrial solid waste.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_6A_Landfills_Municipal.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_6A_Landfills_Municipal", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Municipal Landfills", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 6A from municipal solid waste landfills receiving household waste.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_6B_Wastewater_Treatment_Domestic.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_6B_Wastewater_Treatment_Domestic", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Domestic Wastewater Treatment", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 6B from wastewater treatment of domestic sewage.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_6B_Wastewater_Treatment_Industrial.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_6B_Wastewater_Treatment_Industrial", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Industrial Wastewater Treatment", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 6B from wastewater treatment of industrial and commercial sources.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/annual/EPA-annual-emissions_6D_Composting.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-annual-emissions_6D_Composting", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Composting", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 6D from composting.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/daily/EPA-daily-emissions_5_Forest_Fires.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-daily-emissions_5_Forest_Fires", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Forest Fires (daily)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 5 from forest fires (daily).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "day", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_1A_Combustion_Stationary.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_1A_Combustion_Stationary", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Stationary Combustion (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Stationary (non-mobile) emissions from sector 1A, including boilers, heaters, furnaces, kilns, ovens, flares, thermal oxidizers, dryers, and any other equipment or machinery that combusts carbon bearing fuels or waste stream materials.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_1B2a_Petroleum.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_1B2a_Petroleum", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Petroleum (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2a for petroleum systems, including production, transportation, and refining (monthly).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_1B2b_Natural_Gas_Production.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_1B2b_Natural_Gas_Production", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Natural Gas Production (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Non-combustion emissions from sector 1B2b for natural gas production (monthly).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_4B_Manure_Management.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_4B_Manure_Management", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Manure Management (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4B from manure management (monthly).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_4C_Rice_Cultivation.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_4C_Rice_Cultivation", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Rice Cultivation (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4C from rice cultivation (monthly).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/epa-emissions-2012/monthly/EPA-monthly-emissions_4F_Field_Burning.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "EPA-monthly-emissions_4F_Field_Burning", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Gridded 2012 EPA Methane Emissions - Field Burning (monthly)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -124.848974, 11 | 24.396308, 12 | -66.885444, 13 | 49.384358 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2012-01-01T00:00:00Z", 21 | "2012-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0", 27 | "description": "Emissions from sector 4F from agricultural field burning (monthly).", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/facebook-population-density.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "facebook_population_density", 3 | "type": "Collection", 4 | "links": [ 5 | { 6 | "rel": "external", 7 | "href": "https://arxiv.org/pdf/1712.05839.pdf", 8 | "type": "application/pdf", 9 | "title": "Mapping the world population one building at a time" 10 | } 11 | ], 12 | "title": "Population Density Maps using satellite imagery built by Meta", 13 | "extent": { 14 | "spatial": { 15 | "bbox": [ 16 | [ 17 | -180.00041666666667, -55.985972222324634, 179.82041666695605, 18 | 71.33069444444445 19 | ] 20 | ] 21 | }, 22 | "temporal": { 23 | "interval": [["2015-01-01T00:00:00Z", "2022-06-08T00:00:00Z"]] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "Facebook high-resolution population density: Darker areas indicate higher population density areas and lighter areas indicate lower population density areas, with a 30m² resolution.", 28 | "item_assets": { 29 | "cog_default": { 30 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 31 | "roles": ["data", "layer"], 32 | "title": "Default COG Layer", 33 | "description": "Cloud optimized default layer to display on map" 34 | } 35 | }, 36 | "stac_version": "1.0.0", 37 | "dashboard:is_periodic": false, 38 | "dashboard:time_density": null 39 | } 40 | -------------------------------------------------------------------------------- /data/collections/geoglam.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "geoglam", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "GEOGLAM Crop Monitor", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [[-180, -90, 180, 90]] 9 | }, 10 | "temporal": { 11 | "interval": [["2020-01-01T00:00:00Z", "2022-04-30T23:59:59Z"]] 12 | } 13 | }, 14 | "license": "MIT", 15 | "description": "The Crop Monitors were designed to provide a public good of open, timely, science-driven information on crop conditions in support of market transparency for the G20 Agricultural Market Information System (AMIS). Reflecting an international, multi-source, consensus assessment of crop growing conditions, status, and agro-climatic factors likely to impact global production, focusing on the major producing and trading countries for the four primary crops monitored by AMIS (wheat, maize, rice, and soybeans). The Crop Monitor for AMIS brings together over 40 partners from national, regional (i.e. sub-continental), and global monitoring systems, space agencies, agriculture organizations and universities. Read more: https://cropmonitor.org/index.php/about/aboutus/", 16 | "stac_version": "1.0.0", 17 | "dashboard:is_periodic": false, 18 | "dashboard:time_density": "month", 19 | "item_assets": { 20 | "cog_default": { 21 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 22 | "roles": ["data", "layer"], 23 | "title": "Default COG Layer", 24 | "description": "Cloud optimized default layer to display on map" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /data/collections/grdi-vnl-slope-raster.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "grdi-vnl-slope-raster", 3 | "type": "Collection", 4 | "title": "GRDI VNL Slope Constituent Raster", 5 | "description": "Global Gridded Relative Deprivation Index (GRDI) VIIRS Night Lights (VNL) Slope Constituent raster", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -180.0, 14 | -56.0, 15 | 180, 16 | 82.18 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2012-01-01T00:00:00Z", 24 | "2020-12-31T23:59:59Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": null, 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/hurricane_blue_tarps.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "blue-tarp-detection", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Hurricane Ida - Detected Blue Tarps", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -90.300691019583, 12 | 29.791754950316868, 13 | -89.86300184384689, 14 | 30.099979027371006 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2021-08-23T00:00:00Z", 22 | "2022-02-12T00:00:00Z" 23 | ] 24 | 25 | ] 26 | } 27 | }, 28 | "license":"MIT", 29 | "description":"Blue tarps were detected in the aftermath of Hurricane Ida using Planet Imagery. The detection algorithm involved segmenting out blue pixels from the buildings in the affected region.", 30 | "stac_version":"1.0.0", 31 | "dashboard:is_periodic": false, 32 | "dashboard:time_density": "day", 33 | "item_assets": { 34 | "cog_default": { 35 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 36 | "roles": [ 37 | "data", 38 | "layer" 39 | ], 40 | "title": "Default COG Layer", 41 | "description": "Cloud optimized default blue-tarp detection layer to display on map" 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /data/collections/hurricane_planetscope_images.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "blue-tarp-planetscope", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Hurricane Ida - Blue Tarps PlanetScope Image", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -90.300691019583, 12 | 29.791754950316868, 13 | -89.86300184384689, 14 | 30.099979027371006 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2021-08-23T00:00:00Z", 22 | "2022-02-12T00:00:00Z" 23 | ] 24 | 25 | ] 26 | } 27 | }, 28 | "license":"MIT", 29 | "description":"Blue tarps were detected in the aftermath of Hurricane Ida using Planet Imagery. The detection algorithm involved segmenting out blue pixels from the buildings in the affected region. This collection contains PlanetScope images used for detection.", 30 | "stac_version":"1.0.0", 31 | "dashboard:is_periodic": false, 32 | "dashboard:time_density": "day", 33 | "item_assets": { 34 | "cog_default": { 35 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 36 | "roles": [ 37 | "data", 38 | "layer" 39 | ], 40 | "title": "Default COG Layer", 41 | "description": "Cloud optimized default base PlanetScope image (used for blue-tarp detection) layer to display on map" 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /data/collections/lis-tws-anomaly.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "lis-tws-anomaly", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Terrestrial Water Storage (TWS) Anomalies", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, -90, 180, 90 11 | ] 12 | ] 13 | }, 14 | "temporal": { 15 | "interval": [ 16 | [ 17 | "2002-09-01T00:00:00Z", 18 | "2021-12-01T00:00:00Z" 19 | ] 20 | ] 21 | } 22 | }, 23 | "license": "MIT", 24 | "description": "Anomalies in Terrestrial Water Storage (TWS) modeled using LIS framework by assimilating GRACE TWS, soil moisture, and leaf area index over 2001-2021.", 25 | "item_assets": { 26 | "cog_default": { 27 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 28 | "roles": [ 29 | "data", 30 | "layer" 31 | ], 32 | "title": "Default COG Layer", 33 | "description": "Cloud optimized default layer to display on map" 34 | } 35 | }, 36 | "stac_version": "1.0.0", 37 | "dashboard:is_periodic": true, 38 | "dashboard:time_density": "day" 39 | } 40 | -------------------------------------------------------------------------------- /data/collections/lis-tws-nonstationarity-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "lis-tws-nonstationarity-index", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Global TWS Non-Stationarity Index", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, -90, 180, 90 11 | ] 12 | ] 13 | }, 14 | "temporal": { 15 | "interval": [ 16 | [ 17 | "2003-01-01T00:00:00Z", 18 | "2020-01-01T00:00:00Z" 19 | ] 20 | ] 21 | } 22 | }, 23 | "license": "Creative Commons Zero (CC0-1.0)", 24 | "description": "The global Terrestrial Water Storage (TWS) non-stationarity index integrates the trend, seasonal shifts, and variability change of TWS for the period of 2003 - 2020. TWS is derived by jointly assimilating the MODIS Leaf Area Index, the ESA CCI surface soil moisture, and the GSFC GRACE mascon-based TWS anomalies into the Noah-MP land surface model within the NASA Land Information System (LIS) at 10 km spatial resolution forced by the combination of MERRA2 and IMERG meteorological fields. The smaller the non-stationarity index is, the more the water cycle is under a non-stationary process. Glaciers and Greenland are excluded from the analysis.", 25 | "item_assets": { 26 | "cog_default": { 27 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 28 | "roles": [ 29 | "data", 30 | "layer" 31 | ], 32 | "title": "Default COG Layer", 33 | "description": "Cloud optimized default layer to display on map" 34 | } 35 | }, 36 | "stac_version": "1.0.0", 37 | "dashboard:is_periodic": false, 38 | "dashboard:time_density": null 39 | } 40 | -------------------------------------------------------------------------------- /data/collections/lis-tws-trend.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "lis-tws-trend", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Trend in Terrestrial Water Storage (TWS) Anomalies", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, -90, 180, 90 11 | ] 12 | ] 13 | }, 14 | "temporal": { 15 | "interval": [ 16 | [ 17 | "2001-01-01T00:00:00Z", 18 | "2021-12-31T23:59:59Z" 19 | ] 20 | ] 21 | } 22 | }, 23 | "license": "MIT", 24 | "description": "Trends in anomalies of Terrestrial Water Storage (TWS) modeled using LIS framework by assimilating GRACE TWS, soil moisture, and leaf area index over 2001-2021", 25 | "item_assets": { 26 | "cog_default": { 27 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 28 | "roles": [ 29 | "data", 30 | "layer" 31 | ], 32 | "title": "Default COG Layer", 33 | "description": "Cloud optimized default layer to display on map" 34 | } 35 | }, 36 | "stac_version": "1.0.0", 37 | "dashboard:is_periodic": false, 38 | "dashboard:time_density": null 39 | } 40 | -------------------------------------------------------------------------------- /data/collections/modis-annual-lai-2003-2020.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "modis-annual-lai-2003-2020", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Annual LAI maps for 2003 and 2021 (Bangladesh)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | 88.02591469087191, 11 | 20.742099910319755, 12 | 92.68367943903164, 13 | 26.63504817414382 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2003-01-01T00:00:00Z", 21 | "2020-12-31T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "CC0-1.0", 27 | "description": "The annual median Leaf Area Index (LAI) maps of 2003 and 2021 were captured using combined Moderate Resolution Imaging Spectroradiometer (MODIS) Level 4 dataset (MCD15A3H Version 6.1, dataset link: https://modis.gsfc.nasa.gov/data/dataprod/mod15.php). The actual dataset represents one-sided green leaf area per unit ground area at 500 meters spatial resolution and provides information at every 4 days. Annual median of the LAI datasets were calculated for both the years of 2003 and 2021 to illustrate the difference in vegetation cover.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": null, 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/mtbs-burn-severity.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "mtbs-burn-severity", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"MTBS Burn Severity", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | "-126.49459612498832", 12 | "24.0478678762251", 13 | "-71.50752568733597", 14 | "50.55916724898132" 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2016-01-01T00:00:00Z", 22 | "2020-12-31T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"MIT", 28 | "description": "Burn severities and extents of fires from Monitoring Trends in Burn Severity (MTBS) program during the years 2016-2020 over Western US", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": "true", 31 | "dashboard:time_density": "year", 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /data/collections/nceo-africa-2017.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nceo_africa_2017", 3 | "type": "Collection", 4 | "links": [ 5 | { 6 | "rel": "external", 7 | "href": "https://ceos.org/gst/africa-biomass.html", 8 | "type": "text/html", 9 | "title": "NCEO Africa Aboveground Woody Biomass 2017 (CEOS Website)" 10 | } 11 | ], 12 | "title": "NCEO Africa Aboveground Woody Biomass 2017", 13 | "extent": { 14 | "spatial": { 15 | " bbox": [[-18.2735295, -35.054059, 51.8642329, 37.7310386]] 16 | }, 17 | "temporal": { 18 | "interval": [["2017-01-01T00:00:00Z", "2018-01-01T00:00:00Z"]] 19 | } 20 | }, 21 | "license": "MIT", 22 | "summaries": { 23 | "datetime": ["2017-01-01T00:00:00Z"], 24 | "cog_default": { 25 | "max": 429, 26 | "min": 0 27 | } 28 | }, 29 | "description": "The NCEO Africa Aboveground Woody Biomass (AGB) map for the year 2017 at 100 m spatial resolution was developed using a combination of LiDAR, Synthetic Aperture Radar (SAR) and optical based data. This product was developed by the UK’s National Centre for Earth Observation (NCEO) through the Carbon Cycle and Official Development Assistance (ODA) programmes.", 30 | "item_assets": { 31 | "cog_default": { 32 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 33 | "roles": ["data", "layer"], 34 | "title": "Default COG Layer", 35 | "description": "Cloud optimized default layer to display on map" 36 | } 37 | }, 38 | "stac_version": "1.0.0", 39 | "dashboard:is_periodic": false, 40 | "dashboard:time_density": null 41 | } 42 | -------------------------------------------------------------------------------- /data/collections/nightlights-500m-daily.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nightlights-500m-daily", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Black Marble 500m Nightlights Daily Dataset", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2020-01-01T00:00:00Z", 21 | "2021-03-01T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "Darker colors indicate fewer night lights and less activity. Lighter colors indicate more night lights and more activity. Check out the HD dataset to see a light-corrected version of this dataset.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "day", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/nightlights-hd-1band.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nightlights-hd-1band", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Black Marble High Definition Nightlights 1 band Dataset", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2017-07-21T00:00:00Z", 21 | "2021-09-30T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "The High Definition Nightlights dataset is processed to eliminate light sources, including moonlight reflectance and other interferences. Darker colors indicate fewer night lights and less activity. Lighter colors indicate more night lights and more activity.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": null, 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/nightlights-hd-monthly.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nightlights-hd-monthly", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "Black Marble High Definition Nightlights Monthly Dataset", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2017-07-21T00:00:00Z", 21 | "2021-09-30T23:59:59Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "The High Definition Nightlights dataset is processed to eliminate light sources, including moonlight reflectance and other interferences. Darker colors indicate fewer night lights and less activity. Lighter colors indicate more night lights and more activity.", 28 | "stac_version": "1.0.0", 29 | "dashboard:is_periodic": true, 30 | "dashboard:time_density": "month", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/nightlights_3bands.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "nightlights-hd-3bands", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Black Marble High Definition Nightlights 3 bands Dataset", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -180, 12 | -90, 13 | 180, 14 | 90 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "2017-07-21T00:00:00Z", 22 | "2021-09-30T23:59:59Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"public-domain", 28 | "description": "The High Definition Nightlights dataset is processed to eliminate light sources, including moonlight reflectance and other interferences. Darker colors indicate fewer night lights and less activity. Lighter colors indicate more night lights and more activity.", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /data/collections/no2-monthly-diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "no2-monthly-diff", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "NO₂ (Diff)", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2015-01-01T00:00:00Z", 21 | "2022-01-01T00:00:00Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "This layer shows changes in nitrogen dioxide (NO₂) levels. Redder colors indicate increases in NO₂. Bluer colors indicate lower levels of NO₂. Missing pixels indicate areas of no data most likely associated with cloud cover or snow.", 28 | "item_assets": { 29 | "cog_default": { 30 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 31 | "roles": [ 32 | "data", 33 | "layer" 34 | ], 35 | "title": "Default COG Layer", 36 | "description": "Cloud optimized default layer to display on map" 37 | } 38 | }, 39 | "stac_version": "1.0.0", 40 | "stac_extensions": [ 41 | "https://stac-extensions.github.io/item-assets/v1.0.0/schema.json" 42 | ], 43 | "dashboard:is_periodic": true, 44 | "dashboard:time_density": "month" 45 | } 46 | -------------------------------------------------------------------------------- /data/collections/no2-monthly.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "no2-monthly", 3 | "type": "Collection", 4 | "links": [], 5 | "title": "NO₂", 6 | "extent": { 7 | "spatial": { 8 | "bbox": [ 9 | [ 10 | -180, 11 | -90, 12 | 180, 13 | 90 14 | ] 15 | ] 16 | }, 17 | "temporal": { 18 | "interval": [ 19 | [ 20 | "2016-01-01T00:00:00Z", 21 | "2022-01-01T00:00:00Z" 22 | ] 23 | ] 24 | } 25 | }, 26 | "license": "MIT", 27 | "description": "Darker colors indicate higher nitrogen dioxide (NO₂) levels and more activity. Lighter colors indicate lower levels of NO₂ and less activity. Missing pixels indicate areas of no data most likely associated with cloud cover or snow.", 28 | "item_assets": { 29 | "cog_default": { 30 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 31 | "roles": [ 32 | "data", 33 | "layer" 34 | ], 35 | "title": "Default COG Layer", 36 | "description": "Cloud optimized default layer to display on map" 37 | } 38 | }, 39 | "stac_version": "1.0.0", 40 | "stac_extensions": [ 41 | "https://stac-extensions.github.io/item-assets/v1.0.0/schema.json" 42 | ], 43 | "dashboard:is_periodic": true, 44 | "dashboard:time_density": "month" 45 | } 46 | -------------------------------------------------------------------------------- /data/collections/snow-projections-diff-245.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "snow-projections-diff-245", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Projections of Snow Water Equivalent (SWE) Losses - SSP2-4.5", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -125, 12 | 34, 13 | -104, 14 | 50 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "1995-04-01T00:00:00Z", 22 | "2095-05-01T00:00:00Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0-1.0", 28 | "description": "Percent-change to future SWE, relative to the historical period (1995 - 2014), simulated using the LIS framework and CMIP6 climate projections (SSP2-4.5 scenario) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /data/collections/snow-projections-diff-585.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "snow-projections-diff-585", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Projections of Snow Water Equivalent (SWE) Losses - SSP5-8.5", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -125, 12 | 34, 13 | -104, 14 | 50 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "1995-04-01T00:00:00Z", 22 | "2095-05-01T00:00:00Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0-1.0", 28 | "description": "Percent-change to future SWE, relative to the historical period (1995 - 2014), simulated using the LIS framework and CMIP6 climate projections (SSP5-8.5 scenario) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /data/collections/snow-projections-median-245.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "snow-projections-median-245", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Projections of Snow Water Equivalent (SWE) - SSP2-4.5", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -125, 12 | 34, 13 | -104, 14 | 50 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "1995-04-01T00:00:00Z", 22 | "2095-05-01T00:00:00Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0-1.0", 28 | "description": "Historical (1995 - 2014) and future SWE simulated using the LIS framework and CMIP6 climate projections (SSP2-4.5 scenario) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /data/collections/snow-projections-median-585.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "snow-projections-median-585", 3 | "type": "Collection", 4 | "links":[ 5 | ], 6 | "title":"Projections of Snow Water Equivalent (SWE) - SSP5-8.5", 7 | "extent":{ 8 | "spatial":{ 9 | "bbox":[ 10 | [ 11 | -125, 12 | 34, 13 | -104, 14 | 50 15 | ] 16 | ] 17 | }, 18 | "temporal":{ 19 | "interval":[ 20 | [ 21 | "1995-04-01T00:00:00Z", 22 | "2095-05-01T00:00:00Z" 23 | ] 24 | ] 25 | } 26 | }, 27 | "license":"CC0-1.0", 28 | "description": "Historical (1995 - 2014) and future SWE simulated using the LIS framework and CMIP6 climate projections (SSP5-8.5 scenario) downscaled by NASA Earth Exchange (NEX-GDDP-CMIP6)", 29 | "stac_version": "1.0.0", 30 | "dashboard:is_periodic": false, 31 | "dashboard:time_density": null, 32 | "item_assets": { 33 | "cog_default": { 34 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 35 | "roles": [ 36 | "data", 37 | "layer" 38 | ], 39 | "title": "Default COG Layer", 40 | "description": "Cloud optimized default layer to display on map" 41 | } 42 | } 43 | } 44 | 45 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-household-nopop.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-household-nopop", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Household) (Masked)", 5 | "description": "Household Composition & Disability (Aged 65 or Older, Aged 17 or Younger, Civilian with a Disability, Single-Parent Households) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-household.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-household", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Household)", 5 | "description": "Household Composition & Disability (Aged 65 or Older, Aged 17 or Younger, Civilian with a Disability, Single-Parent Households) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-housing-nopop.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-housing-nopop", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Housing) (Masked)", 5 | "description": "Housing Type & Transportation (Multi-Unit Structures, Mobile Homes, Crowding, No Vehicle, Group Quarters) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-housing.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-housing", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Housing)", 5 | "description": "Housing Type & Transportation (Multi-Unit Structures, Mobile Homes, Crowding, No Vehicle, Group Quarters) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-minority-nopop.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-minority-nopop", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Minority) (Masked)", 5 | "description": "Minority Status & Language (Minority, Speaks English “Less than Well”) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-minority.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-minority", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Minority)", 5 | "description": "Minority Status & Language (Minority, Speaks English “Less than Well”) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-overall-nopop.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-overall-nopop", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Overall) (Masked)", 5 | "description": "Overall Social Vulnerability Index - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-overall.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-overall", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (Overall)", 5 | "description": "Overall Social Vulnerability Index - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-socioeconomic-nopop.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-socioeconomic-nopop", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (SocioEconomic) (Masked)", 5 | "description": "Socioeconomic Status (Below Poverty, Unemployed, Income, No High School Diploma) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/collections/social-vulnerability-index-socioeconomic.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "social-vulnerability-index-socioeconomic", 3 | "type": "Collection", 4 | "title": "Social Vulnerability Index (SocioEconomic)", 5 | "description": "Socioeconomic Status (Below Poverty, Unemployed, Income, No High School Diploma) - Percentile ranking", 6 | "stac_version": "1.0.0", 7 | "license": "MIT", 8 | "links": [], 9 | "extent": { 10 | "spatial": { 11 | "bbox": [ 12 | [ 13 | -178.23333334, 14 | 18.908332897999998, 15 | -66.958333785, 16 | 71.383332688 17 | ] 18 | ] 19 | }, 20 | "temporal": { 21 | "interval": [ 22 | [ 23 | "2000-01-01T00:00:00Z", 24 | "2018-01-01T00:00:00Z" 25 | ] 26 | ] 27 | } 28 | }, 29 | "dashboard:is_periodic": false, 30 | "dashboard:time_density": "year", 31 | "item_assets": { 32 | "cog_default": { 33 | "type": "image/tiff; application=geotiff; profile=cloud-optimized", 34 | "roles": [ 35 | "data", 36 | "layer" 37 | ], 38 | "title": "Default COG Layer", 39 | "description": "Cloud optimized default layer to display on map" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /data/step_function_inputs/CMIP245-winter-median-pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "CMIP245-winter-median-pr", 3 | "prefix": "EIS/NEX-GDDP-CMIP6/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)_pr(.*)ssp245(.*).cog.tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/CMIP245-winter-median-ta.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "CMIP245-winter-median-ta", 3 | "prefix": "EIS/NEX-GDDP-CMIP6/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)_tas(.*)ssp245(.*).cog.tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/CMIP585-winter-median-pr.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "CMIP585-winter-median-pr", 3 | "prefix": "EIS/NEX-GDDP-CMIP6/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)_pr(.*)ssp585(.*).cog.tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/CMIP585-winter-median-ta.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "CMIP585-winter-median-ta", 3 | "prefix": "EIS/NEX-GDDP-CMIP6/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)_tas(.*)ssp585(.*).cog.tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/HLSL30.002-ida.json: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "stac", 3 | "queue_messages": "true", 4 | "collection": "HLSL30", 5 | "version": "2.0", 6 | "temporal": ["2021-08-23T00:00:00Z","2022-02-12T00:00:00Z"], 7 | "bounding_box": [-90.300691019583,29.791754950316868,-89.86300184384689,30.099979027371006] 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/HLSL30.002-maria.json: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "stac", 3 | "queue_messages": "true", 4 | "collection": "HLSL30", 5 | "version": "2.0", 6 | "bounding_box": [-67.2716765, 17.9121390, -65.5747876, 18.5156946], 7 | "temporal": ["2017-07-21T00:00:00Z","2018-03-20T23:59:59Z"] 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/HLSS30.002-ida.json: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "stac", 3 | "queue_messages": "true", 4 | "collection": "HLSS30", 5 | "version": "2.0", 6 | "temporal": ["2021-08-23T00:00:00Z","2022-02-12T00:00:00Z"], 7 | "bounding_box": [-90.300691019583,29.791754950316868,-89.86300184384689,30.099979027371006] 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/HLSS30.002-maria.json: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "stac", 3 | "queue_messages": "true", 4 | "collection": "HLSS30", 5 | "version": "2.0", 6 | "bounding_box": [-67.2716765, 17.9121390, -65.5747876, 18.5156946], 7 | "temporal": ["2017-07-21T00:00:00Z","2018-03-20T23:59:59Z"] 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/IS2SITMOGR4-cog.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "IS2SITMOGR4-cog", 3 | "prefix": "IS2SITMOGR4-cog/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/MO_NPP_npp_vgpm.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "MO_NPP_npp_vgpm", 3 | "prefix": "MO_NPP_npp_vgpm/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month", 8 | "upload": false 9 | } 10 | -------------------------------------------------------------------------------- /data/step_function_inputs/OMI_trno2-COG.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "OMI_trno2-COG", 3 | "prefix": "OMI_trno2-COG/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "year" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/OMSO2PCA-cog.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "OMSO2PCA-COG", 3 | "prefix": "OMSO2PCA-COG/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "year" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/bangladesh-landcover-2001-2020.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "bangladesh-landcover-2001-2020", 4 | "prefix": "EIS/COG/coastal-flooding-and-slr/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)MODIS_LC_2001_BD_v2.cog.tif$", 7 | "discovery": "s3", 8 | "start_datetime": "2001-01-01T00:00:00Z", 9 | "end_datetime": "2001-12-31T23:59:59Z", 10 | "upload": false 11 | }, 12 | { 13 | "collection": "bangladesh-landcover-2001-2020", 14 | "prefix": "EIS/COG/coastal-flooding-and-slr/", 15 | "bucket": "veda-data-store-staging", 16 | "filename_regex": "^(.*)MODIS_LC_2020_BD.cog.tif$", 17 | "discovery": "s3", 18 | "start_datetime": "2020-01-01T00:00:00Z", 19 | "end_datetime": "2020-12-31T23:59:59Z", 20 | "upload": false 21 | } 22 | ] 23 | -------------------------------------------------------------------------------- /data/step_function_inputs/blue-tarp-detection.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "blue-tarp-detection", 3 | "prefix": "blue-tarp-detection/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/blue-tarp-planetscope.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "blue-tarp-planetscope", 3 | "prefix": "blue-tarp-planetscope/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/caldor-fire-behavior.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "caldor-fire-behavior", 3 | "prefix": "EIS/COG/Fire-Hydro/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)frp_to_save.tif$", 6 | "discovery": "s3", 7 | "upload": false, 8 | "start_datetime": "2021-08-15T00:00:00Z", 9 | "end_datetime": "2021-10-21T12:00:00Z" 10 | } 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/caldor-fire-burn-severity.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "caldor-fire-burn-severity", 3 | "prefix": "EIS/COG/Fire-Hydro/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)bs_to_save.tif$", 6 | "discovery": "s3", 7 | "upload": false, 8 | "start_datetime": "2021-08-15T00:00:00Z", 9 | "end_datetime": "2021-10-21T12:00:00Z" 10 | } 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/co2-diff.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "co2-diff", 4 | "prefix": "co2-diff/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)2015.*.tif$", 7 | "discovery": "s3" 8 | }, 9 | { 10 | "collection": "co2-diff", 11 | "prefix": "co2-diff/", 12 | "bucket": "veda-data-store-staging", 13 | "filename_regex": "^(.*)2016.*.tif$", 14 | "discovery": "s3" 15 | }, 16 | { 17 | "collection": "co2-diff", 18 | "prefix": "co2-diff/", 19 | "bucket": "veda-data-store-staging", 20 | "filename_regex": "^(.*)2017.*.tif$", 21 | "discovery": "s3" 22 | }, 23 | { 24 | "collection": "co2-diff", 25 | "prefix": "co2-diff/", 26 | "bucket": "veda-data-store-staging", 27 | "filename_regex": "^(.*)2018.*.tif$", 28 | "discovery": "s3" 29 | }, 30 | { 31 | "collection": "co2-diff", 32 | "prefix": "co2-diff/", 33 | "bucket": "veda-data-store-staging", 34 | "filename_regex": "^(.*)2019.*.tif$", 35 | "discovery": "s3" 36 | }, 37 | { 38 | "collection": "co2-diff", 39 | "prefix": "co2-diff/", 40 | "bucket": "veda-data-store-staging", 41 | "filename_regex": "^(.*)2020.*.tif$", 42 | "discovery": "s3" 43 | }, 44 | { 45 | "collection": "co2-diff", 46 | "prefix": "co2-diff/", 47 | "bucket": "veda-data-store-staging", 48 | "filename_regex": "^(.*)2021.*.tif$", 49 | "discovery": "s3" 50 | }, 51 | { 52 | "collection": "co2-diff", 53 | "prefix": "co2-diff/", 54 | "bucket": "veda-data-store-staging", 55 | "filename_regex": "^(.*)2022.*.tif$", 56 | "discovery": "s3" 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /data/step_function_inputs/co2-mean.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "co2-mean", 4 | "prefix": "co2-mean/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)2015.*.tif$", 7 | "discovery": "s3" 8 | }, 9 | { 10 | "collection": "co2-mean", 11 | "prefix": "co2-mean/", 12 | "bucket": "veda-data-store-staging", 13 | "filename_regex": "^(.*)2016.*.tif$", 14 | "discovery": "s3" 15 | }, 16 | { 17 | "collection": "co2-mean", 18 | "prefix": "co2-mean/", 19 | "bucket": "veda-data-store-staging", 20 | "filename_regex": "^(.*)2017.*.tif$", 21 | "discovery": "s3" 22 | }, 23 | { 24 | "collection": "co2-mean", 25 | "prefix": "co2-mean/", 26 | "bucket": "veda-data-store-staging", 27 | "filename_regex": "^(.*)2018.*.tif$", 28 | "discovery": "s3" 29 | }, 30 | { 31 | "collection": "co2-mean", 32 | "prefix": "co2-mean/", 33 | "bucket": "veda-data-store-staging", 34 | "filename_regex": "^(.*)2019.*.tif$", 35 | "discovery": "s3" 36 | }, 37 | { 38 | "collection": "co2-mean", 39 | "prefix": "co2-mean/", 40 | "bucket": "veda-data-store-staging", 41 | "filename_regex": "^(.*)2020.*.tif$", 42 | "discovery": "s3" 43 | }, 44 | { 45 | "collection": "co2-mean", 46 | "prefix": "co2-mean/", 47 | "bucket": "veda-data-store-staging", 48 | "filename_regex": "^(.*)2021.*.tif$", 49 | "discovery": "s3" 50 | }, 51 | { 52 | "collection": "co2-mean", 53 | "prefix": "co2-mean/", 54 | "bucket": "veda-data-store-staging", 55 | "filename_regex": "^(.*)2022.*.tif$", 56 | "discovery": "s3" 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /data/step_function_inputs/ecco-surface-height-change.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "ecco-surface-height-change", 3 | "discovery": "s3", 4 | "prefix": "EIS/COG/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)2017_minus_1992.cog.tif$", 7 | "start_datetime": "1992-01-01T00:00:00Z", 8 | "end_datetime": "2017-12-31T23:59:59Z", 9 | "cogify": "false" 10 | } 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/epa-emissions-2012-daily.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "EPA-daily-emissions_5_Forest_Fires", 3 | "prefix": "EIS/cog/EPA-inventory-2012/daily/emissions_5_Forest_Fires/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)Forest_Fires_(.*).tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/epa-emissions-2012-monthly.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "EPA-monthly-emissions_1A_Combustion_Stationary", 4 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)Combustion_Stationary_(.*).tif$", 7 | "discovery": "s3", 8 | "upload": false, 9 | "datetime_range": "month" 10 | }, 11 | { 12 | "collection": "EPA-monthly-emissions_1B2a_Petroleum", 13 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 14 | "bucket": "veda-data-store-staging", 15 | "filename_regex": "^(.*)Petroleum_(.*).tif$", 16 | "discovery": "s3", 17 | "upload": false, 18 | "datetime_range": "month" 19 | }, 20 | { 21 | "collection": "EPA-monthly-emissions_1B2b_Natural_Gas_Production", 22 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 23 | "bucket": "veda-data-store-staging", 24 | "filename_regex": "^(.*)Natural_Gas_Production_(.*).tif$", 25 | "discovery": "s3", 26 | "upload": false, 27 | "datetime_range": "month" 28 | }, 29 | { 30 | "collection": "EPA-monthly-emissions_4C_Rice_Cultivation", 31 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 32 | "bucket": "veda-data-store-staging", 33 | "filename_regex": "^(.*)Rice_Cultivation_(.*).tif$", 34 | "discovery": "s3", 35 | "upload": false, 36 | "datetime_range": "month" 37 | }, 38 | { 39 | "collection": "EPA-monthly-emissions_4F_Field_Burning", 40 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 41 | "bucket": "veda-data-store-staging", 42 | "filename_regex": "^(.*)Field_Burning_(.*).tif$", 43 | "discovery": "s3", 44 | "upload": false, 45 | "datetime_range": "month" 46 | }, 47 | { 48 | "collection": "EPA-monthly-emissions_4B_Manure_Management", 49 | "prefix": "EIS/cog/EPA-inventory-2012/monthly/", 50 | "bucket": "veda-data-store-staging", 51 | "filename_regex": "^(.*)Manure_Management_(.*).tif$", 52 | "discovery": "s3", 53 | "upload": false, 54 | "datetime_range": "month" 55 | } 56 | ] 57 | -------------------------------------------------------------------------------- /data/step_function_inputs/epa-emissions-test.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "EPA-annual-emissions_1B1a_Abandoned_Coal", 4 | "prefix": "EIS/cog/EPA-inventory-2012/annual/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)Abandoned_Coal.tif$", 7 | "discovery": "s3", 8 | "start_datetime": "2012-01-01T00:00:00Z", 9 | "end_datetime": "2012-12-31T23:59:59Z", 10 | "upload": false, 11 | "cogify": false 12 | } 13 | ] 14 | -------------------------------------------------------------------------------- /data/step_function_inputs/facebook-population-density.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "facebook_population_density", 3 | "prefix": "facebook_population_density/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "upload": false 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/geoglam.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "geoglam", 3 | "prefix": "geoglam/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month", 8 | "upload": false 9 | } 10 | -------------------------------------------------------------------------------- /data/step_function_inputs/grdi.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "grdi-v1-raster", 4 | "prefix": "grdi-v1-raster/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)v1_2010(.*).tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | }, 10 | { 11 | "collection": "grdi-v1-built", 12 | "prefix": "grdi-v1-built/", 13 | "bucket": "veda-data-store-staging", 14 | "filename_regex": "^(.*)v1_BUILT(.*).tif$", 15 | "discovery": "s3", 16 | "upload": false 17 | }, 18 | { 19 | "collection": "grdi-imr-raster", 20 | "prefix": "grdi-imr-raster/", 21 | "bucket": "veda-data-store-staging", 22 | "filename_regex": "^(.*)IMR(.*).tif$", 23 | "discovery": "s3", 24 | "upload": false 25 | }, 26 | { 27 | "collection": "grdi-shdi-raster", 28 | "prefix": "grdi-shdi-raster/", 29 | "bucket": "veda-data-store-staging", 30 | "filename_regex": "^(.*)SHDI(.*).tif$", 31 | "discovery": "s3", 32 | "upload": false 33 | }, 34 | { 35 | "collection": "grdi-vnl-raster", 36 | "prefix": "grdi-vnl-raster/", 37 | "bucket": "veda-data-store-staging", 38 | "filename_regex": "^(.*)VNL_(.*).tif$", 39 | "discovery": "s3", 40 | "upload": false 41 | }, 42 | { 43 | "collection": "grdi-vnl-slope-raster", 44 | "prefix": "grdi-vnl-slope-raster/", 45 | "bucket": "veda-data-store-staging", 46 | "filename_regex": "^(.*)_VNL-slope_(.*).tif$", 47 | "discovery": "s3", 48 | "upload": false 49 | }, 50 | { 51 | "collection": "grdi-cdr-raster", 52 | "prefix": "grdi-cdr-raster/", 53 | "bucket": "veda-data-store-staging", 54 | "filename_regex": "^(.*)CDR(.*).tif$", 55 | "discovery": "s3", 56 | "upload": false 57 | }, 58 | { 59 | "collection": "grdi-filled-missing-values-count", 60 | "prefix": "grdi-filled-missing-values-count/", 61 | "bucket": "veda-data-store-staging", 62 | "filename_regex": "^(.*)FilledMissingValues(.*).tif$", 63 | "discovery": "s3", 64 | "upload": false 65 | } 66 | ] 67 | -------------------------------------------------------------------------------- /data/step_function_inputs/lis-tws-nonstationarity-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "lis-tws-nonstationarity-index", 3 | "prefix": "EIS/Global_TWS_data/DATWS_nonstationarity_index_v2.cog.tif", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "start_datetime": "2003-01-01T00:00:00Z", 8 | "end_datetime": "2020-01-01T00:00:00Z", 9 | "upload": false 10 | } 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/lis-tws-trend.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "lis-tws-trend", 3 | "prefix": "EIS/COG/LIS_TWS_TREND/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "start_datetime": "2001-01-01T00:00:00Z", 8 | "end_datetime": "2021-12-31T23:59:59Z", 9 | "upload": false 10 | } 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/modis-annual-lai-2003-2020.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "modis-annual-lai-2003-2020", 4 | "prefix": "EIS/COG/coastal-flooding-and-slr/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)MODIS_LAI_2003_BD.cog.tif$", 7 | "discovery": "s3", 8 | "upload": false, 9 | "start_datetime": "2003-01-01T00:00:00Z", 10 | "end_datetime": "2003-12-31T23:59:00Z" 11 | }, 12 | { 13 | "collection": "modis-annual-lai-2003-2020", 14 | "prefix": "EIS/COG/coastal-flooding-and-slr/", 15 | "bucket": "veda-data-store-staging", 16 | "filename_regex": "^(.*)MODIS_LAI_2020_BD.cog.tif$", 17 | "discovery": "s3", 18 | "upload": false, 19 | "start_datetime": "2020-01-01T00:00:00Z", 20 | "end_datetime": "2020-12-31T23:59:00Z" 21 | } 22 | ] 23 | -------------------------------------------------------------------------------- /data/step_function_inputs/mtbs-burn-severity.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "mtbs-burn-severity", 4 | "discovery": "s3", 5 | "prefix": "EIS/COG/Fire-Hydro/", 6 | "bucket": "veda-data-store-staging", 7 | "filename_regex": "mtbs_CONUS_2016.cog.tif", 8 | "start_datetime": "2016-01-01T00:00:00Z", 9 | "end_datetime": "2016-12-31T23:59:59Z" 10 | }, 11 | { 12 | "collection": "mtbs-burn-severity", 13 | "discovery": "s3", 14 | "prefix": "EIS/COG/Fire-Hydro/", 15 | "bucket": "veda-data-store-staging", 16 | "filename_regex": "mtbs_CONUS_2017.cog.tif", 17 | "start_datetime": "2017-01-01T00:00:00Z", 18 | "end_datetime": "2017-12-31T23:59:59Z" 19 | }, 20 | { 21 | "collection": "mtbs-burn-severity", 22 | "discovery": "s3", 23 | "prefix": "EIS/COG/Fire-Hydro/", 24 | "bucket": "veda-data-store-staging", 25 | "filename_regex": "mtbs_CONUS_2018.cog.tif", 26 | "start_datetime": "2018-01-01T00:00:00Z", 27 | "end_datetime": "2018-12-31T23:59:59Z" 28 | }, 29 | { 30 | "collection": "mtbs-burn-severity", 31 | "discovery": "s3", 32 | "prefix": "EIS/COG/Fire-Hydro/", 33 | "bucket": "veda-data-store-staging", 34 | "filename_regex": "mtbs_CONUS_2019.cog.tif", 35 | "start_datetime": "2019-01-01T00:00:00Z", 36 | "end_datetime": "2019-12-31T23:59:59Z" 37 | }, 38 | { 39 | "collection": "mtbs-burn-severity", 40 | "discovery": "s3", 41 | "prefix": "EIS/COG/Fire-Hydro/", 42 | "bucket": "veda-data-store-staging", 43 | "filename_regex": "mtbs_CONUS_2020.cog.tif", 44 | "start_datetime": "2020-01-01T00:00:00Z", 45 | "end_datetime": "2020-12-31T23:59:59Z" 46 | } 47 | ] 48 | -------------------------------------------------------------------------------- /data/step_function_inputs/nceo-africa-2017.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "nceo_africa_2017", 3 | "prefix": "file-staging/nasa-map/nceo-africa-2017/", 4 | "bucket": "nasa-maap-data-store", 5 | "filename_regex": "^(.*)AGB_map_2017v0m_COG.tif$", 6 | "discovery": "s3", 7 | "datetime_range": "year", 8 | "upload": false 9 | } 10 | -------------------------------------------------------------------------------- /data/step_function_inputs/nightlights-500m-daily.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "nightlights-500m-daily", 4 | "prefix": "nightlights-500m-daily/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)V011_be_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 7 | "discovery": "s3", 8 | "upload": true 9 | }, 10 | { 11 | "collection": "nightlights-500m-daily", 12 | "prefix": "nightlights-500m-daily/", 13 | "bucket": "veda-data-store-staging", 14 | "filename_regex": "^(.*)V011_ca_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 15 | "discovery": "s3", 16 | "upload": true 17 | }, 18 | { 19 | "collection": "nightlights-500m-daily", 20 | "prefix": "nightlights-500m-daily/", 21 | "bucket": "veda-data-store-staging", 22 | "filename_regex": "^(.*)V011_la_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 23 | "discovery": "s3", 24 | "upload": true 25 | }, 26 | { 27 | "collection": "nightlights-500m-daily", 28 | "prefix": "nightlights-500m-daily/", 29 | "bucket": "veda-data-store-staging", 30 | "filename_regex": "^(.*)V011_ny_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 31 | "discovery": "s3", 32 | "upload": true 33 | }, 34 | { 35 | "collection": "nightlights-500m-daily", 36 | "prefix": "nightlights-500m-daily/", 37 | "bucket": "veda-data-store-staging", 38 | "filename_regex": "^(.*)V011_sf_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 39 | "discovery": "s3", 40 | "upload": true 41 | }, 42 | { 43 | "collection": "nightlights-500m-daily", 44 | "prefix": "nightlights-500m-daily/", 45 | "bucket": "veda-data-store-staging", 46 | "filename_regex": "^(.*)EUPorts_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 47 | "discovery": "s3", 48 | "upload": true 49 | }, 50 | { 51 | "collection": "nightlights-500m-daily", 52 | "prefix": "nightlights-500m-daily/", 53 | "bucket": "veda-data-store-staging", 54 | "filename_regex": "^(.*)V011_tk_\\d{4}-\\d{2}-\\d{2}(.*).tif$", 55 | "discovery": "s3", 56 | "upload": true 57 | } 58 | ] 59 | -------------------------------------------------------------------------------- /data/step_function_inputs/nightlights-hd-1band.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "nightlights-hd-1band", 4 | "prefix": "nightlights-hd-1band/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)BMHD_Ida(.*)1band.tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | }, 10 | { 11 | "collection": "nightlights-hd-1band", 12 | "prefix": "nightlights-hd-1band/", 13 | "bucket": "veda-data-store-staging", 14 | "filename_regex": "^(.*)Maria(.*)1band.tif$", 15 | "discovery": "s3", 16 | "upload": false 17 | } 18 | ] 19 | -------------------------------------------------------------------------------- /data/step_function_inputs/nightlights-hd-3bands.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "nightlights-hd-3bands", 3 | "prefix": "delivery/FinalBMHD_Ida2021/", 4 | "bucket": "climatedashboard-data", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "year", 8 | "upload": true 9 | } 10 | -------------------------------------------------------------------------------- /data/step_function_inputs/nightlights-hd-monthly-blackmarble.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "nightlights-hd-monthly", 3 | "prefix": "nightlights-hd-monthly/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*)BMHD_VNP46A2(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/no2-monthly-diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "no2-monthly-diff", 3 | "prefix": "no2-monthly-diff/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/no2-monthly-orig.json: -------------------------------------------------------------------------------- 1 | { 2 | "collection": "no2-monthly", 3 | "prefix": "no2-monthly/", 4 | "bucket": "veda-data-store-staging", 5 | "filename_regex": "^(.*).tif$", 6 | "discovery": "s3", 7 | "datetime_range": "month" 8 | } 9 | -------------------------------------------------------------------------------- /data/step_function_inputs/snow-projections-diff-245.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "snow-projections-diff-245", 4 | "prefix": "EIS/snowProjections/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)ssp245(.*)percChange.cog.tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /data/step_function_inputs/snow-projections-diff-585.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "snow-projections-diff-585", 4 | "prefix": "EIS/snowProjections/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)ssp585(.*)percChange.cog.tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | } 10 | ] -------------------------------------------------------------------------------- /data/step_function_inputs/snow-projections-median-245.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "snow-projections-median-245", 4 | "prefix": "EIS/snowProjections/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)ssp245(.*)median.cog.tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | } 10 | ] -------------------------------------------------------------------------------- /data/step_function_inputs/snow-projections-median-585.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "snow-projections-median-585", 4 | "prefix": "EIS/snowProjections/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)ssp585(.*)median.cog.tif$", 7 | "discovery": "s3", 8 | "upload": false 9 | } 10 | ] -------------------------------------------------------------------------------- /data/step_function_inputs/social-vulnerability-index.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "collection": "social-vulnerability-index-household", 4 | "prefix": "social-vulnerability-index-household/", 5 | "bucket": "veda-data-store-staging", 6 | "filename_regex": "^(.*)_household_wgs84_cog.tif$", 7 | "discovery": "s3", 8 | "datetime_range": "year" 9 | }, 10 | { 11 | "collection": "social-vulnerability-index-household-nopop", 12 | "prefix": "social-vulnerability-index-household-nopop/", 13 | "bucket": "veda-data-store-staging", 14 | "filename_regex": "^(.*)_household_wgs84_nopop_cog.tif$", 15 | "discovery": "s3", 16 | "datetime_range": "year" 17 | }, 18 | { 19 | "collection": "social-vulnerability-index-housing", 20 | "prefix": "social-vulnerability-index-housing/", 21 | "bucket": "veda-data-store-staging", 22 | "filename_regex": "^(.*)_housing_wgs84_cog.tif$", 23 | "discovery": "s3", 24 | "datetime_range": "year" 25 | }, 26 | { 27 | "collection": "social-vulnerability-index-housing-nopop", 28 | "prefix": "social-vulnerability-index-housing-nopop/", 29 | "bucket": "veda-data-store-staging", 30 | "filename_regex": "^(.*)_housing_wgs84_nopop_cog.tif$", 31 | "discovery": "s3", 32 | "datetime_range": "year" 33 | }, 34 | { 35 | "collection": "social-vulnerability-index-minority", 36 | "prefix": "social-vulnerability-index-minority/", 37 | "bucket": "veda-data-store-staging", 38 | "filename_regex": "^(.*)_minority_wgs84_cog.tif$", 39 | "discovery": "s3", 40 | "datetime_range": "year" 41 | }, 42 | { 43 | "collection": "social-vulnerability-index-minority-nopop", 44 | "prefix": "social-vulnerability-index-minority-nopop/", 45 | "bucket": "veda-data-store-staging", 46 | "filename_regex": "^(.*)_minority_wgs84_nopop_cog.tif$", 47 | "discovery": "s3", 48 | "datetime_range": "year" 49 | }, 50 | { 51 | "collection": "social-vulnerability-index-overall", 52 | "prefix": "social-vulnerability-index-overall/", 53 | "bucket": "veda-data-store-staging", 54 | "filename_regex": "^(.*)_overall_wgs84_cog.tif$", 55 | "discovery": "s3", 56 | "datetime_range": "year" 57 | }, 58 | { 59 | "collection": "social-vulnerability-index-overall-nopop", 60 | "prefix": "social-vulnerability-index-overall-nopop/", 61 | "bucket": "veda-data-store-staging", 62 | "filename_regex": "^(.*)_overall_wgs84_nopop_cog.tif$", 63 | "discovery": "s3", 64 | "datetime_range": "year" 65 | }, 66 | { 67 | "collection": "social-vulnerability-index-socioeconomic", 68 | "prefix": "social-vulnerability-index-socioeconomic/", 69 | "bucket": "veda-data-store-staging", 70 | "filename_regex": "^(.*)_socioeconomic_wgs84_cog.tif$", 71 | "discovery": "s3", 72 | "datetime_range": "year" 73 | }, 74 | { 75 | "collection": "social-vulnerability-index-socioeconomic-nopop", 76 | "prefix": "social-vulnerability-index-socioeconomic-nopop/", 77 | "bucket": "veda-data-store-staging", 78 | "filename_regex": "^(.*)_socioeconomic_wgs84_nopop_cog.tif$", 79 | "discovery": "s3", 80 | "datetime_range": "year" 81 | } 82 | ] 83 | -------------------------------------------------------------------------------- /deploy/.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | package-lock.json 3 | __pycache__ 4 | .pytest_cache 5 | .env 6 | .venv 7 | *.egg-info 8 | 9 | # CDK asset staging directory 10 | .cdk.staging 11 | cdk.out 12 | -------------------------------------------------------------------------------- /deploy/README.md: -------------------------------------------------------------------------------- 1 | 2 | # CDK for COG and STAC generation pipelines 3 | 4 | The `cdk.json` file tells the CDK Toolkit how to execute your app (see root of this repo) 5 | 6 | The CDK code in this repository currently deploys state machines and the tasks they depend on to discover data, transform that data (into cloud-optimized forms) and publish metadata to a STAC database. 7 | 8 | Current tasks included are: 9 | 10 | * CMR Query (Discovery Task) -> Outputs a list of `.he5` files, each one becomes input for a Map iterator. 11 | * Inputs to the Map iterator are submitted to: 12 | * Generate COG: Creates and writes COG to S3, pass granule ID and S3 location to Stac Generation task 13 | * STAC Generation: Creates STAC item from COG and posts to STAC database. Credentials are provided to the CDK workflow via environment variables. See `../stac-gen/README.txt` for more details. 14 | 15 | To have dates assigned as the temporal extent(s) in the STAC item metadata for a given file, use the following conventions for including a datetime in the filename: 16 | 17 | * Date string/s in filename (following the yyyy-mm-dd, yyyy, yyyymm, yyyymmdd format): You can supply dates in any part of the filename followed by `_` in the formats `yyyy-mm-dd`, `yyyy`, `yyyymm`, `yyyymmdd`. Ideally, the dates will be towards the end of the filename. Eg: `1234_BeforeMaria_Stage0_2017-09-19_2017-07-21.tif` will extract start date as `2017-07-21` and end date as `2017-09-19` while disregarding `1234`. 18 | * `datetime_range`: If the `datetime_range` is not provided we just set the `datetime` field in the metadatda using the provided date. However, if `datetime_range` is provided (choice of `month` or `year`) we calculate the `start` and `end` date we need to ingest for the metadata. Eg: `1234_BeforeMaria_Stage0_2017.tif` is the filename, the date is set to `2017-01-01` if `datetime_range` is not provided. If it is set to `month`, `start_datetime` is set to `2017-01-01T00:00:00Z` and `end_datetime` is set to `2017-01-31T00:00:00Z` while also setting `date_time` as null. If `datetime_range` is set to `year`, `start_datetime` is set to `2017-01-01T00:00:00Z` and `end_datetime` is set to `2017-12-31T00:00:00Z` while also setting `date_time` as null. 19 | 20 | ## Useful commands 21 | 22 | * `cdk ls` list all stacks in the app 23 | * `cdk synth` emits the synthesized CloudFormation template 24 | * `cdk deploy` deploy this stack to your default AWS account/region 25 | * `cdk diff` compare deployed stack with current state 26 | * `cdk docs` open CDK documentation 27 | 28 | Enjoy! 29 | -------------------------------------------------------------------------------- /deploy/app.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | 4 | from aws_cdk import core 5 | 6 | from cdk.lambda_stack import LambdaStack 7 | from cdk.step_function_stack import StepFunctionStack 8 | from cdk.queue_stack import QueueStack 9 | 10 | import config 11 | 12 | app = core.App() 13 | 14 | env_details = core.Environment( 15 | region=os.environ["CDK_DEFAULT_REGION"], 16 | account=os.environ["CDK_DEFAULT_ACCOUNT"], 17 | ) 18 | 19 | lambda_stack = LambdaStack( 20 | app, 21 | f"{config.APP_NAME}-{config.ENV}-lambda", 22 | env=env_details, 23 | ) 24 | 25 | queue_stack = QueueStack( 26 | app, 27 | f"{config.APP_NAME}-{config.ENV}-queue", 28 | lambda_stack, 29 | env=env_details, 30 | ) 31 | 32 | step_function_stack = StepFunctionStack( 33 | app, 34 | f"{config.APP_NAME}-{config.ENV}-stepfunction", 35 | lambda_stack, 36 | queue_stack, 37 | env=env_details, 38 | ) 39 | 40 | # Need to build arn manually otherwise it'll result in cyclic dependency 41 | cogify_arn = step_function_stack.build_arn(env_details, "cogify") 42 | pub_arn = step_function_stack.build_arn(env_details, "publication") 43 | 44 | lambda_stack.grant_execution_privileges( 45 | lambda_function=lambda_stack.trigger_cogify_lambda, 46 | workflow_arn=cogify_arn, 47 | ) 48 | lambda_stack.grant_execution_privileges( 49 | lambda_function=lambda_stack.trigger_ingest_lambda, 50 | workflow_arn=pub_arn, 51 | ) 52 | 53 | app.synth() 54 | -------------------------------------------------------------------------------- /deploy/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "python3 app.py", 3 | "context": { 4 | "@aws-cdk/core:enableStackNameDuplicates": "true", 5 | "aws-cdk:enableDiffNoFail": "true", 6 | "@aws-cdk/core:stackRelativeExports": "true", 7 | "@aws-cdk/aws-ecr-assets:dockerIgnoreSupport": true, 8 | "@aws-cdk/aws-secretsmanager:parseOwnedSecretName": true, 9 | "@aws-cdk/aws-kms:defaultKeyPolicies": true, 10 | "@aws-cdk/aws-s3:grantWriteWithoutAcl": true 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /deploy/cdk/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/deploy/cdk/__init__.py -------------------------------------------------------------------------------- /deploy/cdk/queue_stack.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | from aws_cdk import ( 3 | core, 4 | aws_sqs as sqs, 5 | aws_lambda_event_sources as lambda_event_sources, 6 | ) 7 | 8 | if TYPE_CHECKING: 9 | from .lambda_stack import LambdaStack 10 | 11 | 12 | class QueueStack(core.Stack): 13 | def __init__( 14 | self, 15 | app, 16 | construct_id: str, 17 | lambda_stack: "LambdaStack", 18 | **kwargs, 19 | ) -> None: 20 | super().__init__(app, construct_id, **kwargs) 21 | 22 | self.cogify_queue = self._queue( 23 | f"{construct_id}-cogify-queue", 24 | visibility_timeout=900, 25 | dead_letter_queue=sqs.DeadLetterQueue( 26 | max_receive_count=5, 27 | queue=self._queue(f"{construct_id}-cogify-dlq"), 28 | ), 29 | ) 30 | 31 | lambda_stack.trigger_cogify_lambda.add_event_source( 32 | lambda_event_sources.SqsEventSource( 33 | self.cogify_queue, 34 | batch_size=10, 35 | max_batching_window=core.Duration.seconds(20), 36 | report_batch_item_failures=True, 37 | ) 38 | ) 39 | 40 | self.stac_ready_queue = self._queue( 41 | f"{construct_id}-stac-ready-queue", 42 | visibility_timeout=900, 43 | dead_letter_queue=sqs.DeadLetterQueue( 44 | max_receive_count=3, 45 | queue=self._queue(f"{construct_id}-stac-ready-dlq", retention_days=14), 46 | ), 47 | ) 48 | self.stac_ready_queue.grant_send_messages(lambda_stack.cogify_lambda.role) 49 | 50 | lambda_stack.trigger_ingest_lambda.add_event_source( 51 | lambda_event_sources.SqsEventSource( 52 | self.stac_ready_queue, 53 | batch_size=10, 54 | max_batching_window=core.Duration.seconds(30), 55 | report_batch_item_failures=True, 56 | ) 57 | ) 58 | 59 | def _queue( 60 | self, name, visibility_timeout=30, dead_letter_queue=None, retention_days=4 61 | ): 62 | return sqs.Queue( 63 | self, 64 | name, 65 | queue_name=name, 66 | visibility_timeout=core.Duration.seconds(visibility_timeout), 67 | dead_letter_queue=dead_letter_queue, 68 | retention_period=core.Duration.days(retention_days), 69 | ) 70 | -------------------------------------------------------------------------------- /deploy/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | ENV = os.environ.get("ENV") 5 | 6 | COGNITO_APP_SECRET = os.environ["COGNITO_APP_SECRET"] 7 | STAC_INGESTOR_URL = os.environ["STAC_INGESTOR_URL"] 8 | 9 | EARTHDATA_USERNAME = os.environ.get("EARTHDATA_USERNAME", "XXXX") 10 | EARTHDATA_PASSWORD = os.environ.get("EARTHDATA_PASSWORD", "XXXX") 11 | 12 | APP_NAME = "veda-data-pipelines" 13 | VEDA_DATA_BUCKET = "climatedashboard-data" 14 | VEDA_EXTERNAL_BUCKETS = ["nasa-maap-data-store", "covid-eo-blackmarble"] 15 | MCP_BUCKETS = { 16 | "prod": "veda-data-store", 17 | "stage": "veda-data-store-staging", 18 | } 19 | 20 | # This should throw if it is not provided 21 | EXTERNAL_ROLE_ARN = os.environ["EXTERNAL_ROLE_ARN"] 22 | -------------------------------------------------------------------------------- /deploy/requirements.txt: -------------------------------------------------------------------------------- 1 | aws_cdk.core==1.151.0 2 | aws_cdk.aws_iam==1.151.0 3 | aws_cdk.custom_resources==1.151.0 4 | aws_cdk.aws_stepfunctions==1.151.0 5 | aws_cdk.aws_events==1.151.0 6 | aws_cdk.aws_events_targets==1.151.0 7 | aws_cdk.aws_secretsmanager==1.151.0 8 | aws_cdk.aws_lambda==1.151.0 9 | aws_cdk.aws_stepfunctions_tasks==1.151.0 10 | aws_cdk.aws_ec2==1.151.0 11 | aws_cdk.aws_sqs==1.151.0 12 | aws_cdk.aws_s3==1.151.0 13 | aws_cdk.aws_lambda_event_sources==1.151.0 14 | aws_cdk.aws_lambda_python==1.151.0 15 | -------------------------------------------------------------------------------- /env.sample.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Script to populate the environment variables for CDK deployment/pgstac database ingestion 3 | 4 | # Usage: source env.sh 5 | # Valid environments: dev, staging (for now) 6 | 7 | #===== Needed temporarily to load collections =====# 8 | devCognitoAppSecret=xxxx 9 | stageCognitoAppSecret=xxxx 10 | 11 | devStacIngestorUrl=https://6r8ht9b123.execute-api.us-west-2.amazonaws.com/dev 12 | stageStacIngestorUrl=xxx 13 | 14 | 15 | if [[ -z $1 ]] 16 | then 17 | echo "please provide an environment as the first argument" 18 | else 19 | if [[ $1 = 'staging' ]] 20 | then 21 | cognitoAppSecret=$stageCognitoAppSecret 22 | stacIngestorUrl=$stageStacIngestorUrl 23 | else 24 | cognitoAppSecret=$devCognitoAppSecret 25 | stacIngestorUrl=$devStacIngestorUrl 26 | fi 27 | 28 | export EXTERNAL_ROLE_ARN="arn:aws:iam::xxxxxx:role/xxxxx" 29 | export EARTHDATA_USERNAME=XXXX 30 | export EARTHDATA_PASSWORD=XXXX 31 | 32 | export COGNITO_APP_SECRET=$cognitoAppSecret 33 | export ENV=$1 34 | export APP_NAME="veda-data-pipelines" 35 | 36 | export STAC_INGESTOR_URL=$stacIngestorUrl 37 | 38 | echo "$1 environment set" 39 | fi 40 | -------------------------------------------------------------------------------- /lambdas/build-stac/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim-bullseye as production 2 | 3 | WORKDIR /app 4 | 5 | # Install Python dependencies 6 | RUN pip install --upgrade pip 7 | COPY requirements.txt requirements.txt 8 | RUN pip install -r requirements.txt 9 | RUN rm requirements.txt 10 | 11 | COPY handler.py . 12 | COPY utils ./utils 13 | 14 | # Precompile 15 | RUN python -m compileall . 16 | 17 | # Test target 18 | FROM production AS test 19 | 20 | COPY requirements-test.txt requirements-test.txt 21 | RUN pip install -r requirements-test.txt 22 | RUN rm requirements-test.txt 23 | 24 | COPY tests ./tests 25 | CMD ["pytest", "tests"] 26 | -------------------------------------------------------------------------------- /lambdas/build-stac/README.md: -------------------------------------------------------------------------------- 1 | ## Build STAC function 2 | 3 | Code intended to receive a message from an SQSEventSource trigger. The message contains data necessary to build a STAC Item. This STAC item is generated and written to a JSON file and uploaded to s3. 4 | 5 | ```bash 6 | docker build --platform=linux/amd64 -t build-stac . 7 | # Runs an example in handler.py 8 | docker run \ 9 | -v $HOME/.aws/credentials:/root/.aws/credentials:ro \ 10 | --env AWS_PROFILE=XXX \ 11 | --env BUCKET=XXX \ 12 | --rm -it \ 13 | build-stac python -m handler 14 | ``` 15 | -------------------------------------------------------------------------------- /lambdas/build-stac/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from sys import getsizeof 4 | from typing import Any, Dict, TypedDict, Union 5 | from uuid import uuid4 6 | 7 | import smart_open 8 | 9 | from utils import stac, events 10 | 11 | 12 | class S3LinkOutput(TypedDict): 13 | stac_file_url: str 14 | 15 | 16 | class StacItemOutput(TypedDict): 17 | stac_item: Dict[str, Any] 18 | 19 | 20 | def handler(event: Dict[str, Any], context) -> Union[S3LinkOutput, StacItemOutput]: 21 | """ 22 | Lambda handler for STAC Collection Item generation 23 | 24 | Arguments: 25 | event - object with event parameters to be provided in one of 2 formats. 26 | Format option 1 (with Granule ID defined to retrieve all metadata from CMR): 27 | { 28 | "collection": "OMDOAO3e", 29 | "s3_filename": "s3://climatedashboard-data/OMDOAO3e/OMI-Aura_L3-OMDOAO3e_2022m0120_v003-2022m0122t021759.he5.tif", 30 | "granule_id": "G2205784904-GES_DISC", 31 | } 32 | Format option 2 (with regex provided to parse datetime from the filename: 33 | { 34 | "collection": "OMDOAO3e", 35 | "s3_filename": "s3://climatedashboard-data/OMSO2PCA/OMSO2PCA_LUT_SCD_2005.tif", 36 | } 37 | 38 | """ 39 | 40 | EventType = events.CmrEvent if event.get("granule_id") else events.RegexEvent 41 | parsed_event = EventType.parse_obj(event) 42 | stac_item = stac.generate_stac(parsed_event).to_dict() 43 | 44 | output: StacItemOutput = {"stac_item": stac_item} 45 | 46 | # Return STAC Item Directly 47 | if getsizeof(json.dumps(output)) < (256 * 1024): 48 | return output 49 | 50 | # Return link to STAC Item 51 | key = f"s3://{os.environ['BUCKET']}/{uuid4()}.json" 52 | with smart_open.open(key, "w") as file: 53 | file.write(json.dumps(stac_item)) 54 | 55 | return {"stac_file_url": key} 56 | 57 | 58 | if __name__ == "__main__": 59 | sample_event = { 60 | "collection": "nightlights-hd-monthly", 61 | "s3_filename": "s3://climatedashboard-data/delivery/BMHD_Maria_Stages/BeforeMaria_Stage0_2017-07-21_2017-09-19.tif", 62 | "granule_id": None, 63 | "datetime_range": None, 64 | "start_datetime": None, 65 | "end_datetime": None, 66 | } 67 | print(json.dumps(handler(sample_event, {}), indent=2)) 68 | -------------------------------------------------------------------------------- /lambdas/build-stac/requirements-test.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | moto 3 | boto3-stubs[s3] -------------------------------------------------------------------------------- /lambdas/build-stac/requirements.txt: -------------------------------------------------------------------------------- 1 | aws-lambda-powertools 2 | awslambdaric 3 | boto3 4 | pystac==1.4.0 5 | python-cmr 6 | rasterio==1.3.0 7 | rio-stac==0.4.2 8 | shapely 9 | smart-open 10 | pydantic==1.9.1 11 | -------------------------------------------------------------------------------- /lambdas/build-stac/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/lambdas/build-stac/tests/__init__.py -------------------------------------------------------------------------------- /lambdas/build-stac/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest import mock 3 | 4 | import pytest 5 | import boto3 6 | from moto import mock_s3 7 | 8 | from mypy_boto3_s3.service_resource import S3ServiceResource, Bucket 9 | 10 | 11 | @pytest.fixture(scope="session", autouse=True) 12 | def mock_environment(): 13 | with mock.patch.dict(os.environ, {"BUCKET": "test-bucket"}): 14 | yield os.environ 15 | 16 | 17 | @pytest.fixture 18 | def aws_credentials(): 19 | """Mocked AWS Credentials for moto.""" 20 | os.environ["AWS_ACCESS_KEY_ID"] = "testing" 21 | os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" 22 | os.environ["AWS_SECURITY_TOKEN"] = "testing" 23 | os.environ["AWS_SESSION_TOKEN"] = "testing" 24 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1" 25 | 26 | 27 | @pytest.fixture 28 | def s3_client(aws_credentials): 29 | with mock_s3(): 30 | yield boto3.client("s3", region_name="us-east-1") 31 | 32 | 33 | @pytest.fixture 34 | def s3_resource(aws_credentials) -> S3ServiceResource: 35 | with mock_s3(): 36 | yield boto3.resource("s3", region_name="us-east-1") 37 | 38 | 39 | @pytest.fixture 40 | def s3_created_bucket(s3_resource, mock_environment) -> Bucket: 41 | s3_bucket = s3_resource.Bucket(mock_environment["BUCKET"]) 42 | 43 | s3_bucket.create() 44 | yield s3_bucket 45 | -------------------------------------------------------------------------------- /lambdas/build-stac/tests/test_handler.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | from typing import TYPE_CHECKING, Any, Dict, Type 3 | from unittest.mock import MagicMock, Mock 4 | from pydantic import ValidationError 5 | 6 | import pytest 7 | from pystac import Item 8 | 9 | import handler 10 | from utils import stac, events 11 | 12 | if TYPE_CHECKING: 13 | from functools import _SingleDispatchCallable 14 | 15 | 16 | def build_mock_stac_item(item: Dict[str, Any]) -> MagicMock: 17 | """ 18 | Build a mocked STAC Item from a dict 19 | """ 20 | expected_stac_item = MagicMock(spec=Item) 21 | expected_stac_item.to_dict.return_value = item 22 | return expected_stac_item 23 | 24 | 25 | @contextlib.contextmanager 26 | def override_registry( 27 | dispatch_callable: "_SingleDispatchCallable[Any]", cls: Type, mock: Mock 28 | ): 29 | """ 30 | Helper to override a singledispatch function with a mock for testing. 31 | """ 32 | original = dispatch_callable.registry[cls] 33 | dispatch_callable.register(cls, mock) 34 | try: 35 | yield mock 36 | finally: 37 | dispatch_callable.register(cls, original) 38 | 39 | 40 | def test_routing_regex_event(): 41 | """ 42 | Ensure that the system properly identifies, classifies, and routes regex-style events. 43 | """ 44 | regex_event = { 45 | "collection": "test-collection", 46 | "s3_filename": "s3://test-bucket/delivery/BMHD_Maria_Stages/70001_BeforeMaria_Stage0_2017-07-21.tif", 47 | "granule_id": None, 48 | "datetime_range": None, 49 | "start_datetime": None, 50 | "end_datetime": None, 51 | } 52 | 53 | with override_registry( 54 | stac.generate_stac, 55 | events.RegexEvent, 56 | MagicMock(return_value=build_mock_stac_item({"mock": "STAC Item 1"})), 57 | ) as called_mock, override_registry( 58 | stac.generate_stac, 59 | events.CmrEvent, 60 | MagicMock(), 61 | ) as not_called_mock: 62 | handler.handler(regex_event, None) 63 | 64 | called_mock.assert_called_once_with(events.RegexEvent.parse_obj(regex_event)) 65 | assert not not_called_mock.call_count 66 | 67 | 68 | def test_routing_cmr_event(): 69 | """ 70 | Ensure that the system properly identifies, classifies, and routes CMR-style events. 71 | """ 72 | cmr_event = { 73 | "collection": "test-collection", 74 | "s3_filename": "s3://test-bucket/delivery/BMHD_Maria_Stages/70001_BeforeMaria_Stage0_2017-07-21.tif", 75 | "granule_id": "test-granule", 76 | } 77 | 78 | with override_registry( 79 | stac.generate_stac, 80 | events.CmrEvent, 81 | MagicMock(return_value=build_mock_stac_item({"mock": "STAC Item 1"})), 82 | ) as called_mock, override_registry( 83 | stac.generate_stac, 84 | events.RegexEvent, 85 | MagicMock(), 86 | ) as not_called_mock: 87 | handler.handler(cmr_event, None) 88 | 89 | called_mock.assert_called_once_with(events.CmrEvent.parse_obj(cmr_event)) 90 | assert not not_called_mock.call_count 91 | 92 | 93 | @pytest.mark.parametrize( 94 | "bad_event", 95 | [{"collection": "test-collection"}], 96 | ) 97 | def test_routing_unexpected_event(bad_event): 98 | """ 99 | Ensure that a malformatted event raises a validation error 100 | """ 101 | with pytest.raises(ValidationError): 102 | handler.handler(bad_event, None) 103 | -------------------------------------------------------------------------------- /lambdas/build-stac/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/lambdas/build-stac/utils/__init__.py -------------------------------------------------------------------------------- /lambdas/build-stac/utils/events.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Dict, List, Literal, Optional, Union 3 | from pathlib import Path 4 | import re 5 | 6 | from pydantic import BaseModel, Field 7 | import pystac 8 | 9 | 10 | INTERVAL = Literal["month", "year"] 11 | 12 | 13 | class BaseEvent(BaseModel, frozen=True): 14 | collection: str 15 | s3_filename: str 16 | 17 | id_regex: Optional[str] = None 18 | asset_name: Optional[str] = None 19 | asset_roles: Optional[List[str]] = None 20 | asset_media_type: Optional[Union[str, pystac.MediaType]] = None 21 | 22 | def item_id(self: "BaseEvent") -> str: 23 | if self.id_regex: 24 | id_components = re.findall(self.id_regex, self.s3_filename) 25 | assert len(id_components) == 1 26 | id = "-".join(id_components[0]) 27 | else: 28 | id = Path(self.s3_filename).stem 29 | return id 30 | 31 | 32 | class CmrEvent(BaseEvent): 33 | granule_id: str 34 | 35 | 36 | class RegexEvent(BaseEvent): 37 | filename_regex: Optional[str] 38 | 39 | start_datetime: Optional[datetime] = None 40 | end_datetime: Optional[datetime] = None 41 | single_datetime: Optional[datetime] = None 42 | 43 | properties: Optional[Dict] = Field(default_factory=dict) 44 | datetime_range: Optional[INTERVAL] = None 45 | 46 | 47 | SupportedEvent = Union[RegexEvent, CmrEvent] 48 | -------------------------------------------------------------------------------- /lambdas/build-stac/utils/regex.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import Callable, Dict, Tuple, Union 3 | from datetime import datetime, timezone 4 | from dateutil.relativedelta import relativedelta 5 | 6 | from . import events 7 | 8 | 9 | DATERANGE = Tuple[datetime, datetime] 10 | 11 | 12 | def _calculate_year_range(datetime_obj: datetime) -> DATERANGE: 13 | start_datetime = datetime_obj.replace(month=1, day=1) 14 | end_datetime = datetime_obj.replace(month=12, day=31) 15 | return start_datetime, end_datetime 16 | 17 | 18 | def _calculate_month_range(datetime_obj: datetime) -> DATERANGE: 19 | start_datetime = datetime_obj.replace(day=1) 20 | end_datetime = datetime_obj + relativedelta(day=31) 21 | return start_datetime, end_datetime 22 | 23 | 24 | DATETIME_RANGE_METHODS: Dict[events.INTERVAL, Callable[[datetime], DATERANGE]] = { 25 | "month": _calculate_month_range, 26 | "year": _calculate_year_range, 27 | } 28 | 29 | 30 | def extract_dates( 31 | filename: str, datetime_range: events.INTERVAL 32 | ) -> Union[Tuple[datetime, datetime, None], Tuple[None, None, datetime]]: 33 | """ 34 | Extracts start & end or single date string from filename. 35 | """ 36 | DATE_REGEX_STRATEGIES = [ 37 | (r"_(\d{4}-\d{2}-\d{2})", "%Y-%m-%d"), 38 | (r"_(\d{8})", "%Y%m%d"), 39 | (r"_(\d{6})", "%Y%m"), 40 | (r"_(\d{4})", "%Y"), 41 | ] 42 | 43 | # Find dates in filename 44 | dates = [] 45 | for pattern, dateformat in DATE_REGEX_STRATEGIES: 46 | dates_found = re.compile(pattern).findall(filename) 47 | if not dates_found: 48 | continue 49 | 50 | for date_str in dates_found: 51 | date = datetime.strptime(date_str, dateformat) 52 | date_tz = date.replace(tzinfo=timezone.utc) 53 | dates.append(date_tz) 54 | 55 | break 56 | 57 | num_dates_found = len(dates) 58 | 59 | # No dates found 60 | if not num_dates_found: 61 | raise Exception( 62 | f"No dates provided in {filename=}. " 63 | "At least one date in format yyyy-mm-dd is required." 64 | ) 65 | 66 | # Many dates found 67 | if num_dates_found > 1: 68 | dates.sort() 69 | start_datetime, *_, end_datetime = dates 70 | return start_datetime, end_datetime, None 71 | 72 | # Single date found 73 | single_datetime = dates[0] 74 | 75 | # Convert single date to range 76 | if datetime_range: 77 | start_datetime, end_datetime = DATETIME_RANGE_METHODS[datetime_range]( 78 | single_datetime 79 | ) 80 | return start_datetime, end_datetime, None 81 | 82 | # Return single date 83 | return None, None, single_datetime 84 | -------------------------------------------------------------------------------- /lambdas/build-stac/utils/role.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | 3 | 4 | def assume_role(role_arn, session_name): 5 | sts = boto3.client("sts") 6 | creds = sts.assume_role( 7 | RoleArn=role_arn, 8 | RoleSessionName=session_name, 9 | ) 10 | return creds["Credentials"] 11 | -------------------------------------------------------------------------------- /lambdas/cmr-query/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim-bullseye 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install --upgrade pip 6 | COPY requirements.txt requirements.txt 7 | RUN pip install -r requirements.txt 8 | 9 | RUN find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[2-3][0-9]//'); cp $f $n; done; 10 | RUN find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf 11 | RUN find . -type f -a -name '*.py' -print0 | xargs -0 rm -f 12 | RUN find . -type d -a -name 'tests' -print0 | xargs -0 rm -rf 13 | RUN echo "Remove lambda python packages" 14 | RUN rm -rdf ./numpy/doc/ 15 | RUN rm -rdf ./stack 16 | RUN rm -rdf ./docutils* 17 | 18 | COPY handler.py handler.py 19 | -------------------------------------------------------------------------------- /lambdas/cmr-query/README.md: -------------------------------------------------------------------------------- 1 | Docker to query CMR for granules associated with a given collection and temporal range. 2 | 3 | ```bash 4 | docker build -t cmr-query . 5 | # Currently runs an example for OMI Ozone 6 | docker run cmr-query python -m handler 7 | ``` 8 | 9 | Example input: 10 | ``` 11 | { 12 | "hours": 240, 13 | "collection": "OMDOAO3e", 14 | "version": "003", 15 | "include": "^.+he5$", 16 | } 17 | ``` 18 | 19 | Example output: 20 | ``` 21 | { 22 | "collection": xxx, 23 | "href": xxx, 24 | "granule_id": xxx, 25 | "upload": True, 26 | } 27 | ``` 28 | -------------------------------------------------------------------------------- /lambdas/cmr-query/handler.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import datetime as dt 4 | 5 | from cmr import GranuleQuery 6 | 7 | 8 | def handler(event, context): 9 | """ 10 | Lambda handler for the NetCDF ingestion pipeline 11 | """ 12 | collection = event["collection"] 13 | version = event["version"] 14 | 15 | temporal = event.get("temporal", ["1000-01-01T00:00:00Z", "3000-01-01T23:59:59Z"]) 16 | startdate = dt.datetime.strptime(temporal[0], "%Y-%m-%dT%H:%M:%SZ") 17 | enddate = dt.datetime.strptime(temporal[1], "%Y-%m-%dT%H:%M:%SZ") 18 | print(f"Querying for {collection} granules from {startdate} to {enddate}") 19 | 20 | api = GranuleQuery() 21 | granules = ( 22 | api.short_name(collection) 23 | .version(version) 24 | .temporal(startdate, enddate) 25 | .bounding_box(*event.get("bounding_box", [-180, -90, 180, 90])) 26 | .get_all() 27 | ) 28 | 29 | urls = [] 30 | for granule in granules: 31 | for link in granule["links"]: 32 | if event.get("mode") == "stac": 33 | if link["href"][-9:] == "stac.json" and link["href"][0:5] == "https": 34 | urls.append(link) 35 | else: 36 | if link["rel"] == "http://esipfed.org/ns/fedsearch/1.1/data#": 37 | href = link["href"] 38 | file_obj = { 39 | "collection": collection, 40 | "href": href, 41 | "granule_id": granule["id"], 42 | "id": granule["id"], 43 | "mode": event.get("mode"), 44 | # "start_datetime": granule["time_start"], 45 | # "end_datetime": granule["time_end"] 46 | } 47 | if event["include"]: 48 | pattern = re.compile(event["include"]) 49 | matched = pattern.match(href) 50 | if matched: 51 | urls.append(file_obj) 52 | else: 53 | urls.append(file_obj) 54 | 55 | print(f"Returning {len(urls)} urls") 56 | return {"cogify": event.get("cogify", False), "objects": urls} 57 | 58 | 59 | if __name__ == "__main__": 60 | sample_event = { 61 | # "mode": "stac", 62 | "collection": "IS2SITMOGR4", 63 | "version": "1", 64 | "include": "^.+nc$", 65 | "temporal": ["2018-01-21T00:00:00Z", "2018-04-20T23:59:59Z"], 66 | "bounding_box": [-180, -90, 180, 90], 67 | } 68 | handler(sample_event, {}) 69 | -------------------------------------------------------------------------------- /lambdas/cmr-query/requirements.txt: -------------------------------------------------------------------------------- 1 | python-cmr 2 | awslambdaric 3 | boto3 4 | -------------------------------------------------------------------------------- /lambdas/cogify/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim-bullseye 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install --upgrade pip 6 | COPY requirements.txt requirements.txt 7 | RUN pip install -r requirements.txt 8 | 9 | RUN find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[2-3][0-9]//'); cp $f $n; done; 10 | RUN find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf 11 | RUN find . -type f -a -name '*.py' -print0 | xargs -0 rm -f 12 | RUN find . -type d -a -name 'tests' -print0 | xargs -0 rm -rf 13 | RUN echo "Remove lambda python packages" 14 | RUN rm -rdf ./numpy/doc/ 15 | RUN rm -rdf ./stack 16 | RUN rm -rdf ./docutils* 17 | 18 | COPY . . 19 | -------------------------------------------------------------------------------- /lambdas/cogify/ERA5/fetch.py: -------------------------------------------------------------------------------- 1 | import cdsapi 2 | 3 | c = cdsapi.Client() 4 | 5 | c.retrieve( 6 | "reanalysis-era5-single-levels", 7 | { 8 | "product_type": "reanalysis", 9 | "format": "netcdf", 10 | "variable": "cloud_base_height", 11 | "year": "2021", 12 | "month": "08", 13 | "day": ["01"], 14 | "time": ["00:00"], 15 | }, 16 | "download.nc", 17 | ) 18 | -------------------------------------------------------------------------------- /lambdas/cogify/README.md: -------------------------------------------------------------------------------- 1 | # NetCDF4 / HDF5 to COG 2 | 3 | 🚧 WIP 🚧 Configurable module for converting NetCDF4 / HDF5 to COG. 4 | 5 | At this time, just a few configurations have been made in `example.ini`. 6 | 7 | Before running the commands below, make sure you `cd cogify/`. 8 | 9 | ## Testing 10 | 11 | `handler.py` by default is using an example with OMI OAO3 dataset. 12 | 13 | ```bash 14 | export EARTHDATA_USERNAME=xxx 15 | export EARTHDATA_PASSWORD=XXX 16 | # OR export AWS_PROFILE=xxx 17 | export AWS_ACCESS_KEY_ID=XXX 18 | export AWS_SECRET_ACCESS_KEY=XXX 19 | 20 | docker build -t cogify . 21 | # Runs an example in handler.py 22 | docker run --env EARTHDATA_USERNAME --env EARTHDATA_PASSWORD cogify python -m handler 23 | ``` 24 | 25 | 26 | Example Input: 27 | ``` 28 | { 29 | "collection": "OMDOAO3e", 30 | "href": "https://acdisc.gesdisc.eosdis.nasa.gov/data//Aura_OMI_Level3/OMDOAO3e.003/2022/OMI-Aura_L3-OMDOAO3e_2022m0120_v003-2022m0122t021759.he5", 31 | "upload": True, 32 | "granule_id":"G2205784904-GES_DISC" 33 | } 34 | 35 | ``` 36 | 37 | Example Output 38 | ``` 39 | { 40 | "s3_filename": xxx, 41 | "filename": xxx, 42 | "granule_id": xxx, 43 | "collection": xxx, 44 | } 45 | 46 | ``` 47 | 48 | 49 | ## Other supported collections 50 | 51 | ### GPM IMERG Example 52 | 53 | [Update me] 54 | 55 | ### ERA5 Cloud Base Height Example 56 | 57 | ERA5 data is fetched using `cdsapi` library which first requires registration and API configuration, see https://cds.climate.copernicus.eu/api-how-to for instructions. 58 | 59 | Current configuration is for the cloud base height variable. 60 | 61 | ```bash 62 | # First fetch the data 63 | python3 ERA5/fetch.py 64 | # Generate the cog 65 | python3 run.py -f download.nc -c ERA5 66 | ``` 67 | 68 | 69 | AWS Provisioning: 70 | To function as a lambda task, the Lambda function's execution role needs to be given permission to AWS PutObject. 71 | 72 | - Add permission policy to the Lambda's execution role to allow `s3:PutObject`. This is because this Cogify lambda needs to access an S3 Bucket to write COG files 73 | -------------------------------------------------------------------------------- /lambdas/cogify/example.ini: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | output_bucket = climatedashboard-data 3 | output_dir = OMDOAO3e_003 4 | 5 | [GPM_3IMERGM] 6 | group = Grid 7 | variable_name = precipitation 8 | 9 | [ERA5] 10 | variable_name = cbh 11 | 12 | [NISAR] # WIP 13 | variable_name = science/LSAR/GCOV/grids/frequencyA/HHHH 14 | src_crs = +proj=utm +zone=32S +datum=WGS84 15 | x_variable = science/LSAR/GCOV/metadata/radarGrid/xCoordinates 16 | y_variable = science/LSAR/GCOV/metadata/radarGrid/yCoordinates 17 | 18 | [OMNO2d] 19 | variable_name = HDFEOS/GRIDS/ColumnAmountNO2/Data Fields/ColumnAmountNO2TropCloudScreened 20 | affine_transformation = (xmin, xres, 0, ymax, 0, -yres) 21 | 22 | [OMDOAO3e] 23 | variable_name=HDFEOS/GRIDS/ColumnAmountO3/Data Fields/ColumnAmountO3 24 | affine_transformation = (xmin, xres, 0, ymax, 0, -yres) 25 | -------------------------------------------------------------------------------- /lambdas/cogify/requirements.txt: -------------------------------------------------------------------------------- 1 | awslambdaric 2 | rasterio 3 | netCDF4 4 | rio-cogeo 5 | h5py 6 | boto3 7 | # just for era5 config 8 | cdsapi -------------------------------------------------------------------------------- /lambdas/data-transfer/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest import mock 3 | 4 | import pytest 5 | import boto3 6 | from moto import mock_s3 7 | 8 | from mypy_boto3_s3.service_resource import S3ServiceResource, Bucket 9 | 10 | 11 | @pytest.fixture(scope="session", autouse=True) 12 | def mock_environment(): 13 | with mock.patch.dict(os.environ, {"BUCKET": "dst-bucket"}): 14 | yield os.environ 15 | 16 | 17 | @pytest.fixture 18 | def aws_credentials(): 19 | """Mocked AWS Credentials for moto.""" 20 | os.environ["AWS_ACCESS_KEY_ID"] = "testing" 21 | os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" 22 | os.environ["AWS_SECURITY_TOKEN"] = "testing" 23 | os.environ["AWS_SESSION_TOKEN"] = "testing" 24 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1" 25 | 26 | 27 | @pytest.fixture 28 | def s3_resource(aws_credentials) -> S3ServiceResource: 29 | with mock_s3(): 30 | yield boto3.resource("s3", region_name="us-east-1") 31 | 32 | 33 | @pytest.fixture 34 | def mock_src_bucket(s3_resource, mock_environment) -> Bucket: 35 | s3_bucket = s3_resource.Bucket("src-bucket") 36 | 37 | s3_bucket.create() 38 | yield s3_bucket 39 | 40 | 41 | @pytest.fixture 42 | def mock_dst_bucket(s3_resource, mock_environment) -> Bucket: 43 | s3_bucket = s3_resource.Bucket(mock_environment["BUCKET"]) 44 | 45 | s3_bucket.create() 46 | yield s3_bucket 47 | -------------------------------------------------------------------------------- /lambdas/data-transfer/handler.py: -------------------------------------------------------------------------------- 1 | import os 2 | import urllib.parse 3 | import tempfile 4 | 5 | import boto3 6 | from botocore.errorfactory import ClientError 7 | 8 | 9 | def assume_role(role_arn, session_name): 10 | sts = boto3.client("sts") 11 | creds = sts.assume_role( 12 | RoleArn=role_arn, 13 | RoleSessionName=session_name, 14 | ) 15 | return creds["Credentials"] 16 | 17 | 18 | def handler(event, context): 19 | TARGET_BUCKET = os.environ["BUCKET"] 20 | 21 | kwargs = {} 22 | if role_arn := os.environ.get("EXTERNAL_ROLE_ARN"): 23 | creds = assume_role(role_arn, "veda-data-pipelines_data-transfer") 24 | kwargs = { 25 | "aws_access_key_id": creds["AccessKeyId"], 26 | "aws_secret_access_key": creds["SecretAccessKey"], 27 | "aws_session_token": creds["SessionToken"], 28 | } 29 | source_s3 = boto3.client("s3") 30 | target_s3 = boto3.client("s3", **kwargs) 31 | 32 | for object in event: 33 | if not object.get("upload"): 34 | continue 35 | 36 | url = urllib.parse.urlparse(object["s3_filename"]) 37 | src_bucket = url.hostname 38 | src_key = url.path.strip("/") 39 | filename = src_key.split("/")[-1] 40 | 41 | target_key = f"{object.get('collection')}/{filename}" 42 | target_url = f"s3://{TARGET_BUCKET}/{target_key}" 43 | 44 | # Check if the corresponding object exists in the target bucket 45 | try: 46 | target_s3.head_object(Bucket=TARGET_BUCKET, Key=target_key) 47 | except ClientError: 48 | try: 49 | # Not found 50 | with tempfile.TemporaryDirectory() as tmp_dir: 51 | tmp_filename = f"{tmp_dir}/{filename}" 52 | source_s3.download_file(src_bucket, src_key, tmp_filename) 53 | target_s3.upload_file(tmp_filename, TARGET_BUCKET, target_key) 54 | except: 55 | print( 56 | "Failed while trying to upload file from " 57 | f"s3://{src_bucket}/{src_key} to s3://{TARGET_BUCKET}/{target_key}." 58 | ) 59 | raise 60 | 61 | object["s3_filename"] = target_url 62 | 63 | return event 64 | -------------------------------------------------------------------------------- /lambdas/data-transfer/requirements-test.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | moto 3 | boto3-stubs[s3] 4 | -------------------------------------------------------------------------------- /lambdas/data-transfer/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | -------------------------------------------------------------------------------- /lambdas/data-transfer/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/lambdas/data-transfer/tests/__init__.py -------------------------------------------------------------------------------- /lambdas/data-transfer/tests/test_handler.py: -------------------------------------------------------------------------------- 1 | import handler 2 | 3 | 4 | def test_handler(mock_src_bucket, mock_dst_bucket): 5 | test_data = b"test-object" 6 | test_object = mock_src_bucket.put_object(Body=test_data, Key="test-key") 7 | test_event = { 8 | "upload": 1, 9 | "s3_filename": f"s3://{test_object.bucket_name}/{test_object.key}", 10 | "collection": "test_collection", 11 | } 12 | 13 | response = handler.handler([test_event], None) 14 | 15 | expected_bucket = mock_dst_bucket.name 16 | expected_path = "/".join([test_event["collection"], test_object.key]) 17 | assert response == [ 18 | { 19 | **test_event, 20 | "s3_filename": f"s3://{expected_bucket}/{expected_path}", 21 | } 22 | ] 23 | assert mock_dst_bucket.Object(expected_path).get()["Body"].read() == test_data 24 | -------------------------------------------------------------------------------- /lambdas/proxy/handler.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | import boto3 5 | 6 | 7 | def handler(event, context): 8 | STEP_FUNCTION_ARN = os.environ["STEP_FUNCTION_ARN"] 9 | step_function_input = [json.loads(record["body"]) for record in event["Records"]] 10 | 11 | client = boto3.client("stepfunctions") 12 | client.start_execution( 13 | stateMachineArn=STEP_FUNCTION_ARN, 14 | input=json.dumps(step_function_input), 15 | ) 16 | return 17 | -------------------------------------------------------------------------------- /lambdas/proxy/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | aws-lambda-powertools 3 | -------------------------------------------------------------------------------- /lambdas/s3-discovery/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim-bullseye 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install --upgrade pip 6 | COPY requirements.txt requirements.txt 7 | RUN pip install -r requirements.txt 8 | 9 | RUN find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[2-3][0-9]//'); cp $f $n; done; 10 | RUN find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf 11 | RUN find . -type f -a -name '*.py' -print0 | xargs -0 rm -f 12 | RUN find . -type d -a -name 'tests' -print0 | xargs -0 rm -rf 13 | RUN echo "Remove lambda python packages" 14 | RUN rm -rdf ./numpy/doc/ 15 | RUN rm -rdf ./stack 16 | RUN rm -rdf ./docutils* 17 | 18 | COPY handler.py handler.py 19 | -------------------------------------------------------------------------------- /lambdas/s3-discovery/README.md: -------------------------------------------------------------------------------- 1 | Module to query an `s3` bucket to discover COGs 2 | ```bash 3 | docker build -t s3-discovery. 4 | # Currently runs an example for OMI Ozone 5 | docker run s3-discovery python -m handler 6 | ``` 7 | 8 | To run this locally, you may need to pass your AWS credentials to the module: `docker run -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY s3-discovery python -m handler` 9 | 10 | AWS Provisioning 11 | This Lambda needs to list the contents of a S3 Bucket in order to discover files. 12 | - Add `s3:ListBucket` to the Lambda's execution role 13 | -------------------------------------------------------------------------------- /lambdas/s3-discovery/handler.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | import boto3 5 | 6 | 7 | def assume_role(role_arn, session_name): 8 | sts = boto3.client("sts") 9 | creds = sts.assume_role( 10 | RoleArn=role_arn, 11 | RoleSessionName=session_name, 12 | ) 13 | return creds["Credentials"] 14 | 15 | 16 | def list_bucket(bucket, prefix, filename_regex): 17 | kwargs = {} 18 | if role_arn := os.environ.get("EXTERNAL_ROLE_ARN"): 19 | creds = assume_role(role_arn, "veda-data-pipelines_s3-discovery") 20 | kwargs = { 21 | "aws_access_key_id": creds["AccessKeyId"], 22 | "aws_secret_access_key": creds["SecretAccessKey"], 23 | "aws_session_token": creds["SessionToken"], 24 | } 25 | s3 = boto3.resource("s3", **kwargs) 26 | try: 27 | files = [] 28 | bucket = s3.Bucket(bucket) 29 | for obj in bucket.objects.filter(Prefix=prefix): 30 | if filename_regex: 31 | if re.match(filename_regex, obj.key): 32 | files.append(obj.key) 33 | else: 34 | files.append(obj.key) 35 | return files 36 | 37 | except: 38 | print("Failed during s3 item/asset discovery") 39 | raise 40 | 41 | 42 | def handler(event, context): 43 | bucket = event.pop("bucket") 44 | prefix = event.pop("prefix", "") 45 | 46 | filenames = list_bucket( 47 | bucket=bucket, prefix=prefix, filename_regex=event.pop("filename_regex", None) 48 | ) 49 | 50 | files_objs = [] 51 | cogify = event.pop("cogify", False) 52 | collection = event.get("collection", prefix.rstrip("/")) 53 | for filename in filenames: 54 | files_objs.append( 55 | { 56 | **event, 57 | "collection": collection, 58 | "s3_filename": f"s3://{bucket}/{filename}", 59 | "upload": event.get("upload", False), 60 | } 61 | ) 62 | return { 63 | "cogify": cogify, 64 | "objects": files_objs, 65 | } 66 | 67 | 68 | if __name__ == "__main__": 69 | sample_event = { 70 | "bucket": "climatedashboard-data", 71 | "prefix": "social_vulnerability_index/", 72 | "filename_regex": "^(.*)_housing_(.*)$", 73 | "collection": "social-vulnerability-index-housing", 74 | "upload": True, 75 | "cogify": False, 76 | } 77 | 78 | handler(sample_event, {}) 79 | -------------------------------------------------------------------------------- /lambdas/s3-discovery/requirements.txt: -------------------------------------------------------------------------------- 1 | awslambdaric 2 | boto3 3 | -------------------------------------------------------------------------------- /lambdas/submit-stac/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim-bullseye 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install --upgrade pip 6 | COPY requirements.txt requirements.txt 7 | RUN pip install -r requirements.txt 8 | 9 | RUN find . -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[2-3][0-9]//'); cp $f $n; done; 10 | RUN find . -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf 11 | RUN find . -type f -a -name '*.py' -print0 | xargs -0 rm -f 12 | RUN find . -type d -a -name 'tests' -print0 | xargs -0 rm -rf 13 | RUN echo "Remove lambda python packages" 14 | RUN rm -rdf ./numpy/doc/ 15 | RUN rm -rdf ./stack 16 | RUN rm -rdf ./docutils* 17 | 18 | COPY handler.py handler.py 19 | -------------------------------------------------------------------------------- /lambdas/submit-stac/README.md: -------------------------------------------------------------------------------- 1 | # DB Write 2 | 3 | Test with python locally (uses example data in [hlss30_stac_example.ndjson](./hlss30_stac_example.ndjson)) 4 | 5 | ```bash 6 | python -m handler 7 | ``` 8 | 9 | Build and test the docker image: 10 | 11 | ```bash 12 | # From the root of this repository 13 | docker build -t db_write -f lambdas/db-write/Dockerfile . 14 | export COGNITO_APP_SECRET=veda-auth-stack-dev/data-pipelines 15 | # TODO (aimee): This isn't currently working because we are getting an import error for boto3 16 | # docker run --env "COGNITO_APP_SECRET=$COGNITO_APP_SECRET" python -m handler 17 | ``` 18 | -------------------------------------------------------------------------------- /lambdas/submit-stac/requirements.txt: -------------------------------------------------------------------------------- 1 | awslambdaric 2 | boto3 3 | aws-lambda-powertools 4 | requests 5 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "veda-optimized-data-pipelines" 3 | version = "0.1.0" 4 | description = "Cloud pipelines for ingesting cloud optimized data" 5 | authors = ["IMPACT", "DevSeed"] 6 | license = "MIT" 7 | packages = [ 8 | { include = "scripts" } 9 | ] 10 | 11 | [tool.poetry.dependencies] 12 | python = "^3.8" 13 | argparse = "^1.4.0" 14 | boto3 = "^1.21.43" 15 | moto = {extras = ["s3"], version = "^3.1.16"} 16 | pytest = "^7.1.2" 17 | pydantic = "^1.9.1" 18 | exceptiongroup = "1.0.0rc9" 19 | python-dotenv = "^1.0.0" 20 | 21 | [tool.poetry.dev-dependencies] 22 | "aws_cdk.core" = "^1.151.0" 23 | "aws_cdk.aws_iam" = "^1.151.0" 24 | "aws_cdk.custom_resources" = "^1.151.0" 25 | "aws_cdk.aws_stepfunctions" = "^1.151.0" 26 | "aws_cdk.aws_events" = "^1.151.0" 27 | "aws_cdk.aws_events_targets" = "^1.151.0" 28 | "aws_cdk.aws_secretsmanager" = "^1.151.0" 29 | "aws_cdk.aws_lambda" = "^1.151.0" 30 | "aws_cdk.aws_stepfunctions_tasks" = "^1.151.0" 31 | "aws_cdk.aws_ec2" = "^1.151.0" 32 | "aws_cdk.aws_sqs" = "^1.151.0" 33 | "aws_cdk.aws_s3" = "^1.151.0" 34 | "aws_cdk.aws_lambda_event_sources" = "^1.151.0" 35 | "aws_cdk.aws_lambda_python" = "^1.151.0" 36 | boto3-stubs = {extras = ["s3"], version = "^1.24.45"} 37 | black = "^22.8.0" 38 | 39 | [build-system] 40 | requires = ["poetry-core>=1.0.0"] 41 | build-backend = "poetry.core.masonry.api" 42 | 43 | [tool.poetry.scripts] 44 | insert-collection = "scripts.collection:insert" 45 | delete-collection = "scripts.collection:delete" 46 | update-collection = "scripts.collection:update" 47 | insert-item = "scripts.item:insert" 48 | delete-item = "scripts.item:delete" 49 | update-item = "scripts.item:update" 50 | deploy = "scripts.cdk:deploy" 51 | destroy = "scripts.cdk:destroy" 52 | -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/scripts/__init__.py -------------------------------------------------------------------------------- /scripts/cdk.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | 4 | 5 | def deploy(): 6 | os.chdir("deploy") 7 | try: 8 | subprocess.check_output( 9 | "cdk deploy --all --require-approval never", 10 | stderr=subprocess.STDOUT, 11 | shell=True, 12 | ) 13 | except subprocess.CalledProcessError as cpe: 14 | print(cpe.output.decode()) 15 | raise 16 | 17 | 18 | def destroy(): 19 | os.chdir("deploy") 20 | try: 21 | subprocess.check_output( 22 | "cdk destroy --all --require-approval never", 23 | stderr=subprocess.STDOUT, 24 | shell=True, 25 | ) 26 | except subprocess.CalledProcessError as cpe: 27 | print(cpe.output.decode()) 28 | raise 29 | -------------------------------------------------------------------------------- /scripts/collection.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import requests 4 | 5 | from dotenv import load_dotenv 6 | from .utils import args_handler, get_collections, get_secret 7 | 8 | 9 | def get_app_credentials( 10 | cognito_domain: str, client_id: str, client_secret: str, scope: str, **kwargs 11 | ): 12 | response = requests.post( 13 | f"{cognito_domain}/oauth2/token", 14 | headers={ 15 | "Content-Type": "application/x-www-form-urlencoded", 16 | }, 17 | auth=(client_id, client_secret), 18 | data={ 19 | "grant_type": "client_credentials", 20 | # A space-separated list of scopes to request for the generated access token. 21 | "scope": scope, 22 | }, 23 | ) 24 | try: 25 | response.raise_for_status() 26 | except: 27 | print(response.text) 28 | raise 29 | return response.json() 30 | 31 | 32 | def insert_collections(files): 33 | print("Authenticating") 34 | cognito_details = get_secret(os.environ.get("COGNITO_APP_SECRET")) 35 | credentials = get_app_credentials(**cognito_details) 36 | bearer_token = credentials["access_token"] 37 | 38 | print("Inserting collections:") 39 | base_url = os.environ.get("STAC_INGESTOR_URL") 40 | with requests.Session() as s: 41 | for file in files: 42 | print(file) 43 | try: 44 | with open(file) as fd: 45 | response = s.post( 46 | f"{base_url.rstrip('/')}/collections", 47 | json=json.load(fd), 48 | headers={"Authorization": f"Bearer {bearer_token}"}, 49 | ) 50 | response.raise_for_status() 51 | print(response.text) 52 | except: 53 | print("Error inserting collection.") 54 | raise 55 | 56 | 57 | @args_handler 58 | def insert(collections): 59 | load_dotenv() 60 | files = get_collections(collections) 61 | insert_collections(files) 62 | 63 | 64 | @args_handler 65 | def delete(collections): 66 | print("Function not implemented") 67 | 68 | 69 | @args_handler 70 | def update(collections): 71 | print("Function not implemented") 72 | -------------------------------------------------------------------------------- /scripts/item.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | 5 | from dotenv import load_dotenv 6 | from .utils import args_handler, get_items, get_sf_ingestion_arn 7 | 8 | 9 | def insert_items(files): 10 | print("Inserting items:") 11 | for filename in files: 12 | print(filename) 13 | events = json.load(open(filename)) 14 | if type(events) != list: 15 | events = [events] 16 | 17 | sf_client = boto3.client("stepfunctions") 18 | sf_arn = get_sf_ingestion_arn() 19 | for event in events: 20 | response = sf_client.start_execution( 21 | stateMachineArn=sf_arn, input=json.dumps(event) 22 | ) 23 | print(response) 24 | 25 | 26 | @args_handler 27 | def insert(items): 28 | load_dotenv() 29 | files = get_items(items) 30 | insert_items(files) 31 | 32 | 33 | def update(items): 34 | print("Function not implemented") 35 | 36 | 37 | def delete(items): 38 | print("Function not implemented") 39 | -------------------------------------------------------------------------------- /scripts/utils.py: -------------------------------------------------------------------------------- 1 | from sys import argv 2 | import functools 3 | import glob 4 | import os 5 | import base64 6 | import json 7 | 8 | import boto3 9 | 10 | DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "data") 11 | 12 | 13 | def data_files(data, data_path): 14 | files = [] 15 | for item in data: 16 | files.extend(glob.glob(os.path.join(data_path, f"{item}*.json"))) 17 | return files 18 | 19 | 20 | def get_items(query): 21 | items_path = os.path.join(DATA_PATH, "step_function_inputs") 22 | return data_files(query, items_path) 23 | 24 | 25 | def get_collections(query): 26 | collections_path = os.path.join(DATA_PATH, "collections") 27 | return data_files(query, collections_path) 28 | 29 | 30 | def arguments(): 31 | if len(argv) <= 1: 32 | print("No collection provided") 33 | return 34 | return argv[1:] 35 | 36 | 37 | def args_handler(func): 38 | @functools.wraps(func) 39 | def prep_args(*args, **kwargs): 40 | internal_args = arguments() 41 | func(internal_args) 42 | 43 | return prep_args 44 | 45 | 46 | def get_secret(secret_name: str) -> None: 47 | """Retrieve secrets from AWS Secrets Manager 48 | 49 | Args: 50 | secret_name (str): name of aws secrets manager secret containing database connection secrets 51 | profile_name (str, optional): optional name of aws profile for use in debugger only 52 | 53 | Returns: 54 | secrets (dict): decrypted secrets in dict 55 | """ 56 | 57 | # Create a Secrets Manager client 58 | session = boto3.session.Session(region_name="us-west-2") 59 | client = session.client(service_name="secretsmanager") 60 | 61 | # In this sample we only handle the specific exceptions for the 'GetSecretValue' API. 62 | # See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html 63 | # We rethrow the exception by default. 64 | 65 | get_secret_value_response = client.get_secret_value(SecretId=secret_name) 66 | 67 | # Decrypts secret using the associated KMS key. 68 | # Depending on whether the secret is a string or binary, one of these fields will be populated. 69 | if "SecretString" in get_secret_value_response: 70 | return json.loads(get_secret_value_response["SecretString"]) 71 | else: 72 | return json.loads(base64.b64decode(get_secret_value_response["SecretBinary"])) 73 | 74 | 75 | def get_sf_ingestion_arn(): 76 | sts = boto3.client("sts") 77 | ACCOUNT_ID = sts.get_caller_identity().get("Account") 78 | REGION = os.environ.get("AWS_REGION", "us-east-1") 79 | APP_NAME = os.environ.get("APP_NAME") 80 | ENV = os.environ.get("ENV", "dev") 81 | return f"arn:aws:states:{REGION}:{ACCOUNT_ID}:stateMachine:{APP_NAME}-{ENV}-stepfunction-discover" 82 | -------------------------------------------------------------------------------- /veda-data_ingest_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NASA-IMPACT/veda-data-pipelines/41e1413d47c8685c7685fb9562d4c396b7a21d3c/veda-data_ingest_pipeline.png --------------------------------------------------------------------------------