├── pyproject.toml
├── 02-raw-data
└── vector
│ └── south-korea
│ ├── gadm36_south_korea.cpg
│ ├── gadm36_south_korea.dbf
│ ├── gadm36_south_korea.shp
│ ├── gadm36_south_korea.shx
│ ├── gadm36_south_korea.prj
│ └── license.txt
├── .pre-commit-config.yaml
├── environment.yml
├── Makefile
├── LICENSE
├── README.md
├── 05-papers-writings
└── vnp46a1-workflow.md
├── .gitignore
└── 01-code-scripts
├── preprocess_vnp46a1.py
├── clip_vnp46a1.py
├── preprocess_vnp46a2.py
├── concatenate_vnp46a1.py
├── download_laads_order.py
├── clip_vnp46a1.ipynb
├── concatenate_vnp46a1.ipynb
├── download_laads_order.ipynb
├── preprocess_vnp46a2.ipynb
├── preprocess_vnp46a1.ipynb
├── explore-quality-bands-vnp46a1.ipynb
├── explore-quality-bands-vnp46a2.ipynb
├── calculate_baseline.ipynb
└── viirs.py
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 79
3 |
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/gadm36_south_korea.cpg:
--------------------------------------------------------------------------------
1 | UTF-8
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/gadm36_south_korea.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/calekochenour/nighttime-radiance/HEAD/02-raw-data/vector/south-korea/gadm36_south_korea.dbf
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/gadm36_south_korea.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/calekochenour/nighttime-radiance/HEAD/02-raw-data/vector/south-korea/gadm36_south_korea.shp
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/gadm36_south_korea.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/calekochenour/nighttime-radiance/HEAD/02-raw-data/vector/south-korea/gadm36_south_korea.shx
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/gadm36_south_korea.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/psf/black
3 | rev: stable
4 | hooks:
5 | - id: black
6 | language_version: python3.7
7 | - repo: https://gitlab.com/pycqa/flake8
8 | rev: 3.7.6
9 | hooks:
10 | - id: flake8
11 | language: python_venv
12 |
--------------------------------------------------------------------------------
/02-raw-data/vector/south-korea/license.txt:
--------------------------------------------------------------------------------
1 | These data were extracted from the GADM database (www.gadm.org), version 3.4, April 2018. They can be used for non-commercial purposes only. It is not allowed to redistribute these data, or use them for commercial purposes, without prior consent. See the website (www.gadm.org) for more information.
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: nighttime-radiance
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | # Main Python
7 | - python=3.7.6
8 | # Scientific computing
9 | - numpy
10 | - pandas
11 | # Spatial packages
12 | - geopandas
13 | - rasterio
14 | - earthpy
15 | # Jupyter environment
16 | - jupyterlab
17 | - notebook
18 | - jupyter_contrib_nbextensions
19 | - nbclean
20 | - autopep8
21 | - nb_black
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Set phony targets
2 | .PHONY: all download preprocess concatenate clip clean
3 |
4 | # Download, preprocess, concatenate, and clip
5 | all: download preprocess concatenate clip
6 |
7 | # Download a LAADS order
8 | download: 01-code-scripts/download_laads_order.py
9 | python 01-code-scripts/download_laads_order.py
10 |
11 | # Preprocess VNP46A1 HDF5 files
12 | preprocess: 01-code-scripts/preprocess_vnp46a1.py
13 | python 01-code-scripts/preprocess_vnp46a1.py
14 |
15 | # Concatenate adjacent VNP46A1 GeoTiff files
16 | concatenate: 01-code-scripts/concatenate_vnp46a1.py
17 | python 01-code-scripts/concatenate_vnp46a1.py
18 |
19 | # Clip VNP46A1 concatenated GeoTiff files
20 | clip: 01-code-scripts/clip_vnp46a1.py
21 | python 01-code-scripts/clip_vnp46a1.py
22 |
23 | # Delete raw HDF5, preprocessed GeoTiff, and unclipped GeoTiff files
24 | clean:
25 | rm -f 02-raw-data/hdf/south-korea/*.h5
26 | rm -f 03-processed-data/raster/south-korea/vnp46a1-grid/*.tif
27 | rm -f 03-processed-data/raster/south-korea/vnp46a1-grid-concatenated/*.tif
28 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2020, Cale Kochenour
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Nighttimme Radiance
2 | Python scripts and Jupyter Notebooks for working with nighttime radiance data.
3 |
4 | ## Contents
5 |
6 | The project contains folders for all stages of the project workflow as well as other files necessary to run the code.
7 |
8 | ### `01-code-scripts/`
9 |
10 | Contains all Python and Jupyter Notebook files.
11 |
12 | ### `02-raw-data/`
13 |
14 | Contains all original/unprocessed data.
15 |
16 | ### `03-processed-data/`
17 |
18 | Contains all created/processed data.
19 |
20 | ### `04-graphics-outputs/`
21 |
22 | Contains all figures and plots.
23 |
24 | ### `05-papers-writings/`
25 |
26 | Contains all written content.
27 |
28 | ### `Makefile`
29 |
30 | Contains instructions to run the Python scripts in the `01-code-scripts/` folder.
31 |
32 | ### `environment.yml`
33 |
34 | Contains all information to create the Conda environment required to run the Python and Jupyter Notebook files in the `01-code-scripts/` folder.
35 |
36 | ## Prerequisites
37 |
38 | To run the Python and Jupyter Notebook files in the `01-code-scripts/` folder, you will need:
39 |
40 | * Conda ([Miniconda](https://docs.conda.io/en/latest/miniconda.html) or [Anaconda](https://docs.anaconda.com/anaconda/install/))
41 |
42 | ## Local Setup Instructions
43 |
44 | The instructions expect you to have a local copy of this GitHub repository.
45 |
46 | ### Create and Activate Conda Environment
47 |
48 | From the terminal, you can create and activate the Conda environment.
49 |
50 | Create environment:
51 |
52 | ```bash
53 | $ conda env create -f environment.yml
54 | ```
55 |
56 | Activate environment:
57 |
58 | ```bash
59 | $ conda activate nighttime-radiance
60 | ```
61 |
62 | ### Open Jupyter Notebook
63 |
64 | Once you activate the Conda environment, you can work with the Jupyter Notebook files.
65 |
66 | Open Jupyter Notebook:
67 |
68 | ```bash
69 | $ jupyter notebook
70 | ```
71 |
--------------------------------------------------------------------------------
/05-papers-writings/vnp46a1-workflow.md:
--------------------------------------------------------------------------------
1 | # VNP46A1 Data Workflow
2 |
3 | This document specifies how to use the code in the `01-code-scripts/` folder to download and preprocess VNP46A1 data.
4 |
5 | The project `Makefile` centralizes the execution of code for this repository. Navigate to the root repository to run the `Makefile` commands:
6 |
7 | ```bash
8 | $ cd ~/nighttime-radiance
9 | ```
10 |
11 | ## Download data
12 |
13 | Downloading data requires an order to have been placed at NASA LAADS. Once the order has completed and you have changed the variables (as necessary) in the user-defined variables section of the `01-code-scripts/download_laads_order.py` file, run the following command:
14 |
15 | ```bash
16 | $ make download
17 | ```
18 |
19 | ## Preprocess Data
20 |
21 | Preprocessing data requires VNP46A1 HDF5 (.h5) files to have been downloaded. Once files are downloaded and you have changed the variables (as necessary) in the user-defined variables section of the `01-code-scripts/preprocess_vnp46a1.py` file, run the following command:
22 |
23 | ```bash
24 | $ make preprocess
25 | ```
26 |
27 | ## Concatenate Data (Optional)
28 |
29 | Concatenating data is necessary when the study area crosses into multiple VNP46A1 images. The workflow handles concatenating horizontally-adjacent images at this time. This step requires preprocessed GeoTiff files. Once files are preprocessed into GeoTiffs and you have changed the variables (as necessary) in the user-defined variables section of the `01-code-scripts/concatenate_vnp46a1.py` file, run the following command:
30 |
31 | ```bash
32 | $ make concatenate
33 | ```
34 |
35 | ## Clip Data
36 |
37 | Clipping data requires preprocessed (and optionally concatenated) GeoTiff files. Once files are preprocessed into GeoTiffs and you have changed the variables (as necessary) in the user-defined variables section of the `01-code-scripts/clip_vnp46a1.py` file, run the following command:
38 |
39 | ```bash
40 | $ make clip
41 | ```
42 |
43 | ## Full Workflow
44 |
45 | To run the full workflow in succession (download, preprocess, concatenate, clip), ensure all user-defined variables in all scripts are set correctly and run the following command:
46 |
47 | ```bash
48 | $ make all
49 | ```
50 |
51 | Note that this command includes data concatenation. The `Makefile` contents will have to be changed if the concatenation script is not required for the study area.
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
--------------------------------------------------------------------------------
/01-code-scripts/preprocess_vnp46a1.py:
--------------------------------------------------------------------------------
1 | """
2 | -------------------------------------------------------------------------------
3 | Preprocesses NASA VNP46A1 HDF5 files. This script takes raw .h5 files and
4 | completes the following preprocessing tasks:
5 |
6 | - Extracts radiance and qualify flag bands;
7 | - Masks radiance for fill values, clouds, and sensor problems;
8 | - Fills masked data with NaN values;
9 | - Creates a georeferencing transform;
10 | - Creates export metadata; and,
11 | - Exports radiance data to GeoTiff format.
12 |
13 | This script uses the following folder structure:
14 |
15 | ├── 01-code-scripts
16 | │ ├── clip_vnp46a1.ipynb
17 | │ ├── clip_vnp46a1.py
18 | │ ├── concatenate_vnp46a1.ipynb
19 | │ ├── concatenate_vnp46a1.py
20 | │ ├── download_laads_order.ipynb
21 | │ ├── download_laads_order.py
22 | │ ├── preprocess_vnp46a1.ipynb
23 | │ ├── preprocess_vnp46a1.py
24 | │ └── viirs.py
25 | ├── 02-raw-data
26 | ├── 03-processed-data
27 | ├── 04-graphics-outputs
28 | └── 05-papers-writings
29 |
30 | Running the script from the 'nighttime-radiance/' folder works by default. If
31 | the script runs from a different folder, the paths in the environment setup
32 | section may have to be changed.
33 | -------------------------------------------------------------------------------
34 | """
35 | # -------------------------ENVIRONMENT SETUP--------------------------------- #
36 | import os
37 | import warnings
38 | import glob
39 | import viirs
40 |
41 | # Set options
42 | warnings.simplefilter("ignore")
43 |
44 | # -------------------------USER-DEFINED VARIABLES---------------------------- #
45 | # Define path folder containing input VNP46A1 HDF5 files
46 | hdf5_input_folder = os.path.join("02-raw-data", "hdf", "south-korea")
47 |
48 | # Defne path to output folder to store exported GeoTiff files
49 | geotiff_output_folder = os.path.join(
50 | "03-processed-data", "raster", "south-korea", "vnp46a1-grid"
51 | )
52 |
53 | # -------------------------DATA PREPROCESSING-------------------------------- #
54 | # Preprocess each HDF5 file (extract bands, mask for fill values, clouds, and
55 | # sensor problems, fill masked values with NaN, export to GeoTiff)
56 | hdf5_files = glob.glob(os.path.join(hdf5_input_folder, "*.h5"))
57 | processed_files = 0
58 | total_files = len(hdf5_files)
59 | for hdf5 in hdf5_files:
60 | viirs.preprocess_vnp46a1(
61 | hdf5_path=hdf5, output_folder=geotiff_output_folder
62 | )
63 | processed_files += 1
64 | print(f"Preprocessed file: {processed_files} of {total_files}\n\n")
65 |
66 | # -------------------------SCRIPT COMPLETION--------------------------------- #
67 | print("\n")
68 | print("-" * (18 + len(os.path.basename(__file__))))
69 | print(f"Completed script: {os.path.basename(__file__)}")
70 | print("-" * (18 + len(os.path.basename(__file__))))
71 |
--------------------------------------------------------------------------------
/01-code-scripts/clip_vnp46a1.py:
--------------------------------------------------------------------------------
1 | """
2 | -------------------------------------------------------------------------------
3 | Clips already concatenated (and already-preprocessed) VNP46A1 GeoTiff
4 | files to a specified country bounding box.
5 |
6 | This script uses the following folder structure:
7 |
8 | ├── 01-code-scripts
9 | │ ├── clip_vnp46a1.ipynb
10 | │ ├── clip_vnp46a1.py
11 | │ ├── concatenate_vnp46a1.ipynb
12 | │ ├── concatenate_vnp46a1.py
13 | │ ├── download_laads_order.ipynb
14 | │ ├── download_laads_order.py
15 | │ ├── preprocess_vnp46a1.ipynb
16 | │ ├── preprocess_vnp46a1.py
17 | │ └── viirs.py
18 | ├── 02-raw-data
19 | ├── 03-processed-data
20 | ├── 04-graphics-outputs
21 | └── 05-papers-writings
22 |
23 | Running the script from the 'nighttime-radiance/' folder works by default. If
24 | the script runs from a different folder, the paths in the environment setup
25 | section may have to be changed.
26 | -------------------------------------------------------------------------------
27 | """
28 | # -------------------------ENVIRONMENT SETUP--------------------------------- #
29 | # Import packages
30 | import os
31 | import warnings
32 | import glob
33 | import geopandas as gpd
34 | import viirs
35 |
36 | # Set options
37 | warnings.simplefilter("ignore")
38 |
39 | # -------------------------USER-DEFINED VARIABLES---------------------------- #
40 | # Set path to folder containing concateanted preprocessed VNP46A1 files
41 | geotiff_input_folder = os.path.join(
42 | "03-processed-data", "raster", "south-korea", "vnp46a1-grid-concatenated"
43 | )
44 |
45 | # Set path to output folder to store clipped, exported files
46 | geotiff_output_folder = os.path.join(
47 | "03-processed-data", "raster", "south-korea", "vnp46a1-clipped"
48 | )
49 |
50 | # Set path to shapefile for clipping GeoTiff files
51 | shapefile_path = os.path.join(
52 | "02-raw-data", "vector", "south-korea", "gadm36_south_korea.shp"
53 | )
54 |
55 | # Set country name for clipping (for file export name)
56 | clip_country = "South Korea"
57 |
58 | # -------------------------DATA PREPROCESSING-------------------------------- #
59 | # Clip images to bounding box and export clipped images to GeoTiff files
60 | geotiff_files = glob.glob(os.path.join(geotiff_input_folder, "*.tif"))
61 | clipped_files = 0
62 | total_files = len(geotiff_files)
63 | for file in geotiff_files:
64 | viirs.clip_vnp46a1(
65 | geotiff_path=file,
66 | clip_boundary=gpd.read_file(shapefile_path),
67 | clip_country=clip_country,
68 | output_folder=geotiff_output_folder,
69 | )
70 | clipped_files += 1
71 | print(f"Clipped file: {clipped_files} of {total_files}\n\n")
72 |
73 | # -------------------------SCRIPT COMPLETION--------------------------------- #
74 | print("\n")
75 | print("-" * (18 + len(os.path.basename(__file__))))
76 | print(f"Completed script: {os.path.basename(__file__)}")
77 | print("-" * (18 + len(os.path.basename(__file__))))
78 |
--------------------------------------------------------------------------------
/01-code-scripts/preprocess_vnp46a2.py:
--------------------------------------------------------------------------------
1 | """
2 | -------------------------------------------------------------------------------
3 | Preprocesses NASA VNP46A2 HDF5 files. This Notebook takes raw `.h5` files
4 | and completes the following preprocessing tasks:
5 |
6 | - Extracts radiance and quality flag bands;
7 | - Masks radiance for fill values, clouds, and sea water;
8 | - Fills masked data with NaN values;
9 | - Creates a georeferencing transform;
10 | - Creates export metadata; and,
11 | - Exports radiance data to GeoTiff format.
12 |
13 | This script uses the following folder structure:
14 |
15 | ├── 01-code-scripts
16 | │ ├── clip_vnp46a1.ipynb
17 | │ ├── clip_vnp46a1.py
18 | │ ├── concatenate_vnp46a1.ipynb
19 | │ ├── concatenate_vnp46a1.py
20 | │ ├── download_laads_order.ipynb
21 | │ ├── download_laads_order.py
22 | │ ├── preprocess_vnp46a1.ipynb
23 | │ ├── preprocess_vnp46a1.py
24 | │ ├── preprocess_vnp46a2.ipynb
25 | │ ├── preprocess_vnp46a2.py
26 | │ └── viirs.py
27 | ├── 02-raw-data
28 | ├── 03-processed-data
29 | ├── 04-graphics-outputs
30 | └── 05-papers-writings
31 |
32 | Running this script from the `01-code-scripts/` folder works by default. If
33 | the script runs from a different folder, the paths in the environment setup
34 | section may have to be changed.
35 | -------------------------------------------------------------------------------
36 | """
37 | # -------------------------ENVIRONMENT SETUP--------------------------------- #
38 | # Import packages
39 | import os
40 | import warnings
41 | import glob
42 | import viirs
43 |
44 | # Set options
45 | warnings.simplefilter("ignore")
46 |
47 | # -------------------------USER-DEFINED VARIABLES---------------------------- #
48 | # Define path to folder containing input VNP46A2 HDF5 files
49 | hdf5_input_folder = os.path.join(
50 | "02-raw-data", "hdf", "south-korea", "vnp46a2"
51 | )
52 |
53 | # Defne path to output folder to store exported GeoTiff files
54 | geotiff_output_folder = os.path.join(
55 | "03-processed-data", "raster", "south-korea", "vnp46a2-grid"
56 | )
57 |
58 | # -------------------------DATA PREPROCESSING-------------------------------- #
59 | # Preprocess each HDF5 file (extract bands, mask for fill values,
60 | # poor-quality, no retrieval, clouds, sea water, fill masked values
61 | # with NaN, export to GeoTiff)
62 | hdf5_files = glob.glob(os.path.join(hdf5_input_folder, "*.h5"))
63 | processed_files = 0
64 | total_files = len(hdf5_files)
65 | for hdf5 in hdf5_files:
66 | viirs.preprocess_vnp46a2(
67 | hdf5_path=hdf5, output_folder=geotiff_output_folder
68 | )
69 | processed_files += 1
70 | print(f"Preprocessed file: {processed_files} of {total_files}\n\n")
71 |
72 | # -------------------------SCRIPT COMPLETION--------------------------------- #
73 | print("\n")
74 | print("-" * (18 + len(os.path.basename(__file__))))
75 | print(f"Completed script: {os.path.basename(__file__)}")
76 | print("-" * (18 + len(os.path.basename(__file__))))
77 |
--------------------------------------------------------------------------------
/01-code-scripts/concatenate_vnp46a1.py:
--------------------------------------------------------------------------------
1 | """
2 | -------------------------------------------------------------------------------
3 | Concatenates already-preprocessed VNP46A1 GeoTiff files that are spatially
4 | adjacent in the longitudinal direction and exports single GeoTiff files
5 | containing the concatenated data. Used in cases when a study area bounding
6 | box intersects two VNP46A1 grid cells.
7 |
8 | This script uses the following folder structure:
9 |
10 | ├── 01-code-scripts
11 | │ ├── clip_vnp46a1.ipynb
12 | │ ├── clip_vnp46a1.py
13 | │ ├── concatenate_vnp46a1.ipynb
14 | │ ├── concatenate_vnp46a1.py
15 | │ ├── download_laads_order.ipynb
16 | │ ├── download_laads_order.py
17 | │ ├── preprocess_vnp46a1.ipynb
18 | │ ├── preprocess_vnp46a1.py
19 | │ └── viirs.py
20 | ├── 02-raw-data
21 | ├── 03-processed-data
22 | ├── 04-graphics-outputs
23 | └── 05-papers-writings
24 |
25 | Running the script from the 'nighttime-radiance/' folder works by default. If
26 | the script runs from a different folder, the paths in the environment setup
27 | section may have to be changed.
28 | -------------------------------------------------------------------------------
29 | """
30 | # -------------------------ENVIRONMENT SETUP--------------------------------- #
31 | # Import packages
32 | import os
33 | import warnings
34 | import glob
35 | import viirs
36 |
37 | # Set options
38 | warnings.simplefilter("ignore")
39 |
40 | # -------------------------USER-DEFINED VARIABLES---------------------------- #
41 | # Define path to folder containing preprocessed VNP46A1 GeoTiff files
42 | geotiff_input_folder = os.path.join(
43 | "03-processed-data", "raster", "south-korea", "vnp46a1-grid"
44 | )
45 |
46 | # Defne path to output folder to store concatenated, exported GeoTiff files
47 | geotiff_output_folder = os.path.join(
48 | "03-processed-data", "raster", "south-korea", "vnp46a1-grid-concatenated"
49 | )
50 |
51 | # Set start date and end date for processing
52 | start_date, end_date = "2020-04-20", "2020-04-29"
53 |
54 | # -------------------------DATA PREPROCESSING-------------------------------- #
55 | # Concatenate and export adjacent images that have the same acquisition date
56 | dates = viirs.create_date_range(start_date=start_date, end_date=end_date)
57 | geotiff_files = glob.glob(os.path.join(geotiff_input_folder, "*.tif"))
58 | concatenated_dates = 0
59 | skipped_dates = 0
60 | processed_dates = 0
61 | total_dates = len(dates)
62 | for date in dates:
63 | adjacent_images = []
64 | for file in geotiff_files:
65 | if date in viirs.extract_date_vnp46a1(geotiff_path=file):
66 | adjacent_images.append(file)
67 | adjacent_images_sorted = sorted(adjacent_images)
68 | if len(adjacent_images_sorted) == 2:
69 | viirs.concatenate_preprocessed_vnp46a1(
70 | west_geotiff_path=adjacent_images_sorted[0],
71 | east_geotiff_path=adjacent_images_sorted[1],
72 | output_folder=geotiff_output_folder,
73 | )
74 | concatenated_dates += 1
75 | else:
76 | skipped_dates += 1
77 | processed_dates += 1
78 | print(f"Processed dates: {processed_dates} of {total_dates}\n\n")
79 |
80 | print(
81 | f"Concatenated dates: {concatenated_dates}, Skipped dates: {skipped_dates}"
82 | )
83 |
84 | # -------------------------SCRIPT COMPLETION--------------------------------- #
85 | print("\n")
86 | print("-" * (18 + len(os.path.basename(__file__))))
87 | print(f"Completed script: {os.path.basename(__file__)}")
88 | print("-" * (18 + len(os.path.basename(__file__))))
89 |
--------------------------------------------------------------------------------
/01-code-scripts/download_laads_order.py:
--------------------------------------------------------------------------------
1 | """
2 | -------------------------------------------------------------------------------
3 | Downloads a LAADS Web Order. The script will download files to the directory
4 | specified in the 'download_directory' variable. Any folders not existing in
5 | the specified path will be created during the download.
6 |
7 | Level-1 and Atmosphere Archive & Distribution System (LAADS) home:
8 |
9 | - https://ladsweb.modaps.eosdis.nasa.gov/about/purpose/
10 |
11 | Files can be searched for and data orders can be placed here:
12 |
13 | - https://ladsweb.modaps.eosdis.nasa.gov/search/
14 |
15 | User accounts (needed to obtain a token) can be created here:
16 |
17 | - https://urs.earthdata.nasa.gov/
18 |
19 | Download parameters will accompany the LAADS order completion email:
20 |
21 | -e robots=off : Bypass the robots.txt file, to allow access to all files in
22 | the order
23 |
24 | -m : Enable mirroring options (-r -N -l inf) for recursive
25 | download, timestamping & unlimited depth
26 |
27 | -np : Do not recurse into the parent location
28 |
29 | -R .html,.tmp : Reject (do not save) any .html or .tmp files (which are
30 | extraneous to the order)
31 |
32 | -nH : Do not create a subdirectory with the Host name
33 | (ladsweb.modaps.eosdis.nasa.gov)
34 |
35 | --cut-dirs=3 : Do not create subdirectories for the first 3 levels
36 | (archive/orders/{ORDER_ID})
37 |
38 | --header : Adds the header with your appKey (which is encrypted via
39 | SSL)
40 |
41 | -P : Specify the directory prefix (may be relative or absolute)
42 |
43 | This script uses the following folder structure:
44 |
45 | ├── 01-code-scripts
46 | │ ├── clip_vnp46a1.ipynb
47 | │ ├── clip_vnp46a1.py
48 | │ ├── concatenate_vnp46a1.ipynb
49 | │ ├── concatenate_vnp46a1.py
50 | │ ├── download_laads_order.ipynb
51 | │ ├── download_laads_order.py
52 | │ ├── preprocess_vnp46a1.ipynb
53 | │ ├── preprocess_vnp46a1.py
54 | │ └── viirs.py
55 | ├── 02-raw-data
56 | ├── 03-processed-data
57 | ├── 04-graphics-outputs
58 | └── 05-papers-writings
59 |
60 | Running the script from the 'nighttime-radiance/' folder works by default. If
61 | the script runs from a different folder, the paths in the environment setup
62 | section may have to be changed.
63 | -------------------------------------------------------------------------------
64 | """
65 | # -------------------------ENVIRONMENT SETUP--------------------------------- #
66 | # Import packages
67 | import os
68 |
69 | # -------------------------USER-DEFINED VARIABLES---------------------------- #
70 | # Set LAADS token (specific to user account)
71 | token = os.environ.get("LAADS_TOKEN")
72 |
73 | # Set path to file containing order ID
74 | order_id_file_path = os.path.join("05-papers-writings", "laads-order.txt")
75 |
76 | # Set location for data downloaded (for test and real data)
77 | test_directory = "05-papers-writings"
78 | data_directory = os.path.join("02-raw-data", "hdf", "south-korea")
79 |
80 | # Test the script by downloading LAADS README
81 | # Set True for README download, False for LAADS data order download
82 | test_download = True
83 |
84 | # -------------------------DATA ACQUISITION---------------------------------- #
85 | # Get order ID from file
86 | with open(order_id_file_path, mode="r") as file:
87 | order_id = int(file.readline())
88 |
89 | # Set wget download string
90 | download_str = (
91 | (
92 | "wget -e robots=off -m -np -R .html,.tmp -nH --cut-dirs=3 "
93 | '"https://ladsweb.modaps.eosdis.nasa.gov/archive/README" '
94 | f'--header "Authorization: Bearer {token}" -P {test_directory}'
95 | )
96 | if test_download
97 | else (
98 | "wget -e robots=off -m -np -R .html,.tmp -nH --cut-dirs=3 "
99 | f'"https://ladsweb.modaps.eosdis.nasa.gov/archive/orders/{order_id}/" '
100 | f'--header "Authorization: Bearer {token}" -P {data_directory}'
101 | )
102 | )
103 |
104 | # Download data
105 | os.system(download_str)
106 |
107 | # -------------------------SCRIPT COMPLETION--------------------------------- #
108 | print("\n")
109 | print("-" * (18 + len(os.path.basename(__file__))))
110 | print(f"Completed script: {os.path.basename(__file__)}")
111 | print("-" * (18 + len(os.path.basename(__file__))))
112 |
--------------------------------------------------------------------------------
/01-code-scripts/clip_vnp46a1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction\n",
8 | "\n",
9 | "Clips already concatenated (and already-preprocessed) VNP46A1 GeoTiff files to a specified country bounding box.\n",
10 | "\n",
11 | "This Notebook uses the following folder structure:\n",
12 | "\n",
13 | "```\n",
14 | "├── 01-code-scripts\n",
15 | "│ ├── clip_vnp46a1.ipynb\n",
16 | "│ ├── clip_vnp46a1.py\n",
17 | "│ ├── concatenate_vnp46a1.ipynb\n",
18 | "│ ├── concatenate_vnp46a1.py\n",
19 | "│ ├── download_laads_order.ipynb\n",
20 | "│ ├── download_laads_order.py\n",
21 | "│ ├── preprocess_vnp46a1.ipynb\n",
22 | "│ ├── preprocess_vnp46a1.py\n",
23 | "│ └── viirs.py\n",
24 | "├── 02-raw-data\n",
25 | "├── 03-processed-data\n",
26 | "├── 04-graphics-outputs\n",
27 | "└── 05-papers-writings\n",
28 | "```\n",
29 | "\n",
30 | "Running the Notebook from the `01-code-scripts/` folder works by default. If the Notebook runs from a different folder, the paths in the environment setup section may have to be changed."
31 | ]
32 | },
33 | {
34 | "cell_type": "markdown",
35 | "metadata": {},
36 | "source": [
37 | "# Environment Setup"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {
44 | "ExecuteTime": {
45 | "end_time": "2020-10-28T15:47:00.145720Z",
46 | "start_time": "2020-10-28T15:47:00.003086Z"
47 | }
48 | },
49 | "outputs": [],
50 | "source": [
51 | "# Load Notebook formatter\n",
52 | "%load_ext nb_black\n",
53 | "# %reload_ext nb_black"
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": null,
59 | "metadata": {
60 | "ExecuteTime": {
61 | "end_time": "2020-10-28T15:47:01.401715Z",
62 | "start_time": "2020-10-28T15:47:00.147698Z"
63 | }
64 | },
65 | "outputs": [],
66 | "source": [
67 | "# Import packages\n",
68 | "import os\n",
69 | "import warnings\n",
70 | "import glob\n",
71 | "import geopandas as gpd\n",
72 | "import viirs"
73 | ]
74 | },
75 | {
76 | "cell_type": "code",
77 | "execution_count": null,
78 | "metadata": {
79 | "ExecuteTime": {
80 | "end_time": "2020-10-28T15:47:01.415679Z",
81 | "start_time": "2020-10-28T15:47:01.404709Z"
82 | }
83 | },
84 | "outputs": [],
85 | "source": [
86 | "# Set options\n",
87 | "warnings.simplefilter(\"ignore\")"
88 | ]
89 | },
90 | {
91 | "cell_type": "code",
92 | "execution_count": null,
93 | "metadata": {
94 | "ExecuteTime": {
95 | "end_time": "2020-10-28T15:47:01.426650Z",
96 | "start_time": "2020-10-28T15:47:01.417674Z"
97 | }
98 | },
99 | "outputs": [],
100 | "source": [
101 | "# Set working directory\n",
102 | "os.chdir(\"..\")"
103 | ]
104 | },
105 | {
106 | "cell_type": "markdown",
107 | "metadata": {},
108 | "source": [
109 | "# User-Defined Variables"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {
116 | "ExecuteTime": {
117 | "end_time": "2020-10-28T15:47:01.447593Z",
118 | "start_time": "2020-10-28T15:47:01.429641Z"
119 | }
120 | },
121 | "outputs": [],
122 | "source": [
123 | "# Set path to folder containing concateanted preprocessed VNP46A1 files\n",
124 | "geotiff_input_folder = os.path.join(\n",
125 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a1-grid-concatenated\"\n",
126 | ")\n",
127 | "\n",
128 | "# Set path to output folder to store clipped, exported files\n",
129 | "geotiff_output_folder = os.path.join(\n",
130 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a1-clipped\"\n",
131 | ")\n",
132 | "\n",
133 | "# Set path to shapefile for clipping GeoTiff files\n",
134 | "shapefile_path = os.path.join(\n",
135 | " \"02-raw-data\", \"vector\", \"south-korea\", \"gadm36_south_korea.shp\"\n",
136 | ")\n",
137 | "\n",
138 | "# Set country name for clipping (for file export name)\n",
139 | "clip_country = \"South Korea\""
140 | ]
141 | },
142 | {
143 | "cell_type": "markdown",
144 | "metadata": {},
145 | "source": [
146 | "# Data Preprocessing"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "metadata": {
153 | "ExecuteTime": {
154 | "end_time": "2020-10-28T15:47:03.366919Z",
155 | "start_time": "2020-10-28T15:47:01.449587Z"
156 | }
157 | },
158 | "outputs": [],
159 | "source": [
160 | "# Clip images to bounding box and export clipped images to GeoTiff files\n",
161 | "geotiff_files = glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))\n",
162 | "clipped_files = 0\n",
163 | "total_files = len(geotiff_files)\n",
164 | "for file in geotiff_files:\n",
165 | " viirs.clip_vnp46a1(\n",
166 | " geotiff_path=file,\n",
167 | " clip_boundary=gpd.read_file(shapefile_path),\n",
168 | " clip_country=clip_country,\n",
169 | " output_folder=geotiff_output_folder,\n",
170 | " )\n",
171 | " clipped_files += 1\n",
172 | " print(f\"Clipped file: {clipped_files} of {total_files}\\n\\n\")"
173 | ]
174 | }
175 | ],
176 | "metadata": {
177 | "hide_input": false,
178 | "kernelspec": {
179 | "display_name": "Python 3",
180 | "language": "python",
181 | "name": "python3"
182 | },
183 | "language_info": {
184 | "codemirror_mode": {
185 | "name": "ipython",
186 | "version": 3
187 | },
188 | "file_extension": ".py",
189 | "mimetype": "text/x-python",
190 | "name": "python",
191 | "nbconvert_exporter": "python",
192 | "pygments_lexer": "ipython3",
193 | "version": "3.7.6"
194 | },
195 | "toc": {
196 | "base_numbering": 1,
197 | "nav_menu": {},
198 | "number_sections": true,
199 | "sideBar": true,
200 | "skip_h1_title": false,
201 | "title_cell": "Table of Contents",
202 | "title_sidebar": "Contents",
203 | "toc_cell": false,
204 | "toc_position": {
205 | "height": "calc(100% - 180px)",
206 | "left": "10px",
207 | "top": "150px",
208 | "width": "307.2px"
209 | },
210 | "toc_section_display": true,
211 | "toc_window_display": false
212 | },
213 | "varInspector": {
214 | "cols": {
215 | "lenName": 16,
216 | "lenType": 16,
217 | "lenVar": 40
218 | },
219 | "kernels_config": {
220 | "python": {
221 | "delete_cmd_postfix": "",
222 | "delete_cmd_prefix": "del ",
223 | "library": "var_list.py",
224 | "varRefreshCmd": "print(var_dic_list())"
225 | },
226 | "r": {
227 | "delete_cmd_postfix": ") ",
228 | "delete_cmd_prefix": "rm(",
229 | "library": "var_list.r",
230 | "varRefreshCmd": "cat(var_dic_list()) "
231 | }
232 | },
233 | "types_to_exclude": [
234 | "module",
235 | "function",
236 | "builtin_function_or_method",
237 | "instance",
238 | "_Feature"
239 | ],
240 | "window_display": false
241 | }
242 | },
243 | "nbformat": 4,
244 | "nbformat_minor": 4
245 | }
246 |
--------------------------------------------------------------------------------
/01-code-scripts/concatenate_vnp46a1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction\n",
8 | "\n",
9 | "Concatenates already-preprocessed VNP46A1 GeoTiff files that are spatially adjacent in the longitudinal direction and exports single GeoTiff files containing the concatenated data. Used in cases when a study area bounding box intersects two VNP46A1 grid cells (.e.g. `VNP46A1.A2020001.h30v05.001.2020004003738.h5` and `VNP46A1.A2020001.h31v05.001.2020004003738.h5` for raw files and `vnp46a1-a2020001-h30v05-001-2020004003738.tif` and `vnp46a1-a2020001-h31v05-001-2020004003841.tif` for already-preprocessed files).\n",
10 | "\n",
11 | "This Notebook uses the following folder structure:\n",
12 | "\n",
13 | "```\n",
14 | "├── 01-code-scripts\n",
15 | "│ ├── clip_vnp46a1.ipynb\n",
16 | "│ ├── clip_vnp46a1.py\n",
17 | "│ ├── concatenate_vnp46a1.ipynb\n",
18 | "│ ├── concatenate_vnp46a1.py\n",
19 | "│ ├── download_laads_order.ipynb\n",
20 | "│ ├── download_laads_order.py\n",
21 | "│ ├── preprocess_vnp46a1.ipynb\n",
22 | "│ ├── preprocess_vnp46a1.py\n",
23 | "│ └── viirs.py\n",
24 | "├── 02-raw-data\n",
25 | "├── 03-processed-data\n",
26 | "├── 04-graphics-outputs\n",
27 | "└── 05-papers-writings\n",
28 | "```\n",
29 | "\n",
30 | "Running the Notebook from the `01-code-scripts/` folder works by default. If the Notebook runs from a different folder, the paths in the environment setup section may have to be changed.\n",
31 | "\n",
32 | "This notebook uses files that have alrady been preprocessed and saved to GeoTiff files."
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "# Environment Setup"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": null,
45 | "metadata": {
46 | "ExecuteTime": {
47 | "end_time": "2020-10-28T15:46:27.953816Z",
48 | "start_time": "2020-10-28T15:46:27.798980Z"
49 | }
50 | },
51 | "outputs": [],
52 | "source": [
53 | "# Load Notebook formatter\n",
54 | "%load_ext nb_black\n",
55 | "# %reload_ext nb_black"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {
62 | "ExecuteTime": {
63 | "end_time": "2020-10-28T15:46:29.204612Z",
64 | "start_time": "2020-10-28T15:46:27.956803Z"
65 | }
66 | },
67 | "outputs": [],
68 | "source": [
69 | "# Import packages\n",
70 | "import os\n",
71 | "import warnings\n",
72 | "import glob\n",
73 | "import viirs"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {
80 | "ExecuteTime": {
81 | "end_time": "2020-10-28T15:46:29.217552Z",
82 | "start_time": "2020-10-28T15:46:29.207583Z"
83 | }
84 | },
85 | "outputs": [],
86 | "source": [
87 | "# Set options\n",
88 | "warnings.simplefilter(\"ignore\")"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": null,
94 | "metadata": {
95 | "ExecuteTime": {
96 | "end_time": "2020-10-28T15:46:29.235511Z",
97 | "start_time": "2020-10-28T15:46:29.219535Z"
98 | }
99 | },
100 | "outputs": [],
101 | "source": [
102 | "# Set working directory\n",
103 | "os.chdir(\"..\")"
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "metadata": {},
109 | "source": [
110 | "# User-Defined Variables"
111 | ]
112 | },
113 | {
114 | "cell_type": "code",
115 | "execution_count": null,
116 | "metadata": {
117 | "ExecuteTime": {
118 | "end_time": "2020-10-28T15:46:29.252449Z",
119 | "start_time": "2020-10-28T15:46:29.237487Z"
120 | }
121 | },
122 | "outputs": [],
123 | "source": [
124 | "# Define path to folder containing preprocessed VNP46A1 GeoTiff files\n",
125 | "geotiff_input_folder = os.path.join(\n",
126 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a1-grid\"\n",
127 | ")\n",
128 | "\n",
129 | "# Defne path to output folder to store concatenated, exported GeoTiff files\n",
130 | "geotiff_output_folder = os.path.join(\n",
131 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a1-grid-concatenated\"\n",
132 | ")\n",
133 | "\n",
134 | "# Set start date and end date for processing\n",
135 | "start_date, end_date = \"2020-01-01\", \"2020-04-09\""
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "# Data Preprocessing"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "metadata": {
149 | "ExecuteTime": {
150 | "end_time": "2020-10-28T15:46:31.895246Z",
151 | "start_time": "2020-10-28T15:46:29.255439Z"
152 | }
153 | },
154 | "outputs": [],
155 | "source": [
156 | "# Concatenate and export adjacent images that have the same acquisition date\n",
157 | "dates = viirs.create_date_range(start_date=start_date, end_date=end_date)\n",
158 | "geotiff_files = glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))\n",
159 | "concatenated_dates = 0\n",
160 | "skipped_dates = 0\n",
161 | "processed_dates = 0\n",
162 | "total_dates = len(dates)\n",
163 | "for date in dates:\n",
164 | " adjacent_images = []\n",
165 | " for file in geotiff_files:\n",
166 | " if date in viirs.extract_date_vnp46a1(geotiff_path=file):\n",
167 | " adjacent_images.append(file)\n",
168 | " adjacent_images_sorted = sorted(adjacent_images)\n",
169 | " if len(adjacent_images_sorted) == 2:\n",
170 | " viirs.concatenate_preprocessed_vnp46a1(\n",
171 | " west_geotiff_path=adjacent_images_sorted[0],\n",
172 | " east_geotiff_path=adjacent_images_sorted[1],\n",
173 | " output_folder=geotiff_output_folder,\n",
174 | " )\n",
175 | " concatenated_dates += 1\n",
176 | " else:\n",
177 | " skipped_dates += 1\n",
178 | " processed_dates += 1\n",
179 | " print(f\"Processed dates: {processed_dates} of {total_dates}\\n\\n\")\n",
180 | "\n",
181 | "print(\n",
182 | " f\"Concatenated dates: {concatenated_dates}, Skipped dates: {skipped_dates}\"\n",
183 | ")"
184 | ]
185 | }
186 | ],
187 | "metadata": {
188 | "hide_input": false,
189 | "kernelspec": {
190 | "display_name": "Python 3",
191 | "language": "python",
192 | "name": "python3"
193 | },
194 | "language_info": {
195 | "codemirror_mode": {
196 | "name": "ipython",
197 | "version": 3
198 | },
199 | "file_extension": ".py",
200 | "mimetype": "text/x-python",
201 | "name": "python",
202 | "nbconvert_exporter": "python",
203 | "pygments_lexer": "ipython3",
204 | "version": "3.7.6"
205 | },
206 | "toc": {
207 | "base_numbering": 1,
208 | "nav_menu": {},
209 | "number_sections": true,
210 | "sideBar": true,
211 | "skip_h1_title": false,
212 | "title_cell": "Table of Contents",
213 | "title_sidebar": "Contents",
214 | "toc_cell": false,
215 | "toc_position": {
216 | "height": "calc(100% - 180px)",
217 | "left": "10px",
218 | "top": "150px",
219 | "width": "307.2px"
220 | },
221 | "toc_section_display": true,
222 | "toc_window_display": false
223 | },
224 | "varInspector": {
225 | "cols": {
226 | "lenName": 16,
227 | "lenType": 16,
228 | "lenVar": 40
229 | },
230 | "kernels_config": {
231 | "python": {
232 | "delete_cmd_postfix": "",
233 | "delete_cmd_prefix": "del ",
234 | "library": "var_list.py",
235 | "varRefreshCmd": "print(var_dic_list())"
236 | },
237 | "r": {
238 | "delete_cmd_postfix": ") ",
239 | "delete_cmd_prefix": "rm(",
240 | "library": "var_list.r",
241 | "varRefreshCmd": "cat(var_dic_list()) "
242 | }
243 | },
244 | "types_to_exclude": [
245 | "module",
246 | "function",
247 | "builtin_function_or_method",
248 | "instance",
249 | "_Feature"
250 | ],
251 | "window_display": false
252 | }
253 | },
254 | "nbformat": 4,
255 | "nbformat_minor": 4
256 | }
257 |
--------------------------------------------------------------------------------
/01-code-scripts/download_laads_order.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction \n",
8 | "\n",
9 | "Downloads a LAADS Web Order. The script will download files to the directory specified in the `download_directory` variable. Any folders not existing in the specified path will be created during the download.\n",
10 | "\n",
11 | "Level-1 and Atmosphere Archive & Distribution System (LAADS) home:\n",
12 | "\n",
13 | "* https://ladsweb.modaps.eosdis.nasa.gov/about/purpose/\n",
14 | "\n",
15 | "Files can be searched for and data orders can be placed here:\n",
16 | "\n",
17 | "* https://ladsweb.modaps.eosdis.nasa.gov/search/\n",
18 | "\n",
19 | "User accounts (needed to obtain a token) can be created here:\n",
20 | "\n",
21 | "* https://urs.earthdata.nasa.gov/\n",
22 | "\n",
23 | "Download parameters will accompany the LAADS order completion email:\n",
24 | "\n",
25 | "* `-e robots=off` : Bypass the robots.txt file, to allow access to all files in the order\n",
26 | "\n",
27 | "\n",
28 | "* `-m` : Enable mirroring options (-r -N -l inf) for recursive download, timestamping & unlimited depth\n",
29 | "\n",
30 | "\n",
31 | "* `-np` : Do not recurse into the parent location\n",
32 | "\n",
33 | "\n",
34 | "* `-R .html,.tmp` : Reject (do not save) any .html or .tmp files (which are extraneous to the order)\n",
35 | "\n",
36 | "\n",
37 | "* `-nH` : Do not create a subdirectory with the Host name (ladsweb.modaps.eosdis.nasa.gov)\n",
38 | "\n",
39 | "\n",
40 | "* `--cut-dirs=3` : Do not create subdirectories for the first 3 levels (archive/orders/{ORDER_ID})\n",
41 | "\n",
42 | "\n",
43 | "* `--header` : Adds the header with your appKey (which is encrypted via SSL)\n",
44 | "\n",
45 | "\n",
46 | "* `-P` : Specify the directory prefix (may be relative or absolute)\n",
47 | "\n",
48 | "This Notebook uses the following folder structure:\n",
49 | "\n",
50 | "```\n",
51 | "├── 01-code-scripts\n",
52 | "│ ├── clip_vnp46a1.ipynb\n",
53 | "│ ├── clip_vnp46a1.py\n",
54 | "│ ├── concatenate_vnp46a1.ipynb\n",
55 | "│ ├── concatenate_vnp46a1.py\n",
56 | "│ ├── download_laads_order.ipynb\n",
57 | "│ ├── download_laads_order.py\n",
58 | "│ ├── preprocess_vnp46a1.ipynb\n",
59 | "│ ├── preprocess_vnp46a1.py\n",
60 | "│ └── viirs.py\n",
61 | "├── 02-raw-data\n",
62 | "├── 03-processed-data\n",
63 | "├── 04-graphics-outputs\n",
64 | "└── 05-papers-writings\n",
65 | "```\n",
66 | "\n",
67 | "Running the Notebook from the `01-code-scripts/` folder works by default. If the Notebook runs from a different folder, the paths in the environment setup section may have to be changed."
68 | ]
69 | },
70 | {
71 | "cell_type": "markdown",
72 | "metadata": {},
73 | "source": [
74 | "# Environment Setup"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {
81 | "ExecuteTime": {
82 | "end_time": "2020-10-28T17:21:21.900127Z",
83 | "start_time": "2020-10-28T17:21:21.755509Z"
84 | }
85 | },
86 | "outputs": [],
87 | "source": [
88 | "# Load Notebook formatter\n",
89 | "%load_ext nb_black\n",
90 | "# %reload_ext nb_black"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": null,
96 | "metadata": {
97 | "ExecuteTime": {
98 | "end_time": "2020-10-28T17:21:21.994873Z",
99 | "start_time": "2020-10-28T17:21:21.987892Z"
100 | }
101 | },
102 | "outputs": [],
103 | "source": [
104 | "# Import packages\n",
105 | "import os"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": null,
111 | "metadata": {
112 | "ExecuteTime": {
113 | "end_time": "2020-10-28T17:21:25.699687Z",
114 | "start_time": "2020-10-28T17:21:25.688718Z"
115 | }
116 | },
117 | "outputs": [],
118 | "source": [
119 | "# Set working directory\n",
120 | "os.chdir(\"..\")"
121 | ]
122 | },
123 | {
124 | "cell_type": "markdown",
125 | "metadata": {},
126 | "source": [
127 | "# User-Defined Variables"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "metadata": {
134 | "ExecuteTime": {
135 | "end_time": "2020-10-28T17:21:33.200338Z",
136 | "start_time": "2020-10-28T17:21:33.185379Z"
137 | }
138 | },
139 | "outputs": [],
140 | "source": [
141 | "# Set LAADS token (specific to user account)\n",
142 | "token = os.environ.get(\"LAADS_TOKEN\")\n",
143 | "\n",
144 | "# Set path to file containing order ID\n",
145 | "order_id_file_path = os.path.join(\"05-papers-writings\", \"laads-order.txt\")\n",
146 | "\n",
147 | "# Set location for data downloaded (for test and real data)\n",
148 | "test_directory = \"05-papers-writings\"\n",
149 | "data_directory = os.path.join(\"02-raw-data\", \"hdf\", \"south-korea\")\n",
150 | "\n",
151 | "# Test the script by downloading LAADS README\n",
152 | "# Set True for README download, False for LAADS data order download\n",
153 | "test_download = True"
154 | ]
155 | },
156 | {
157 | "cell_type": "markdown",
158 | "metadata": {},
159 | "source": [
160 | "# Data Acquistion"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "metadata": {
167 | "ExecuteTime": {
168 | "end_time": "2020-10-28T17:26:07.211661Z",
169 | "start_time": "2020-10-28T17:26:06.370445Z"
170 | }
171 | },
172 | "outputs": [],
173 | "source": [
174 | "# Get order ID from file\n",
175 | "with open(order_id_file_path, mode=\"r\") as file:\n",
176 | " order_id = int(file.readline())\n",
177 | "\n",
178 | "# Set wget download string\n",
179 | "download_str = (\n",
180 | " (\n",
181 | " \"wget -e robots=off -m -np -R .html,.tmp -nH --cut-dirs=3 \"\n",
182 | " '\"https://ladsweb.modaps.eosdis.nasa.gov/archive/README\" '\n",
183 | " f'--header \"Authorization: Bearer {token}\" -P {test_directory}'\n",
184 | " )\n",
185 | " if test_download\n",
186 | " else (\n",
187 | " \"wget -e robots=off -m -np -R .html,.tmp -nH --cut-dirs=3 \"\n",
188 | " f'\"https://ladsweb.modaps.eosdis.nasa.gov/archive/orders/{order_id}/\" '\n",
189 | " f'--header \"Authorization: Bearer {token}\" -P {data_directory}'\n",
190 | " )\n",
191 | ")\n",
192 | "\n",
193 | "# Download data\n",
194 | "result = os.system(download_str)\n",
195 | "if result == 0:\n",
196 | " print(\"Downloaded data.\")\n",
197 | "else:\n",
198 | " print(\"Failed to download data.\")"
199 | ]
200 | }
201 | ],
202 | "metadata": {
203 | "hide_input": false,
204 | "kernelspec": {
205 | "display_name": "Python 3",
206 | "language": "python",
207 | "name": "python3"
208 | },
209 | "language_info": {
210 | "codemirror_mode": {
211 | "name": "ipython",
212 | "version": 3
213 | },
214 | "file_extension": ".py",
215 | "mimetype": "text/x-python",
216 | "name": "python",
217 | "nbconvert_exporter": "python",
218 | "pygments_lexer": "ipython3",
219 | "version": "3.7.6"
220 | },
221 | "toc": {
222 | "base_numbering": 1,
223 | "nav_menu": {},
224 | "number_sections": true,
225 | "sideBar": true,
226 | "skip_h1_title": false,
227 | "title_cell": "Table of Contents",
228 | "title_sidebar": "Contents",
229 | "toc_cell": false,
230 | "toc_position": {},
231 | "toc_section_display": true,
232 | "toc_window_display": true
233 | },
234 | "varInspector": {
235 | "cols": {
236 | "lenName": 16,
237 | "lenType": 16,
238 | "lenVar": 40
239 | },
240 | "kernels_config": {
241 | "python": {
242 | "delete_cmd_postfix": "",
243 | "delete_cmd_prefix": "del ",
244 | "library": "var_list.py",
245 | "varRefreshCmd": "print(var_dic_list())"
246 | },
247 | "r": {
248 | "delete_cmd_postfix": ") ",
249 | "delete_cmd_prefix": "rm(",
250 | "library": "var_list.r",
251 | "varRefreshCmd": "cat(var_dic_list()) "
252 | }
253 | },
254 | "types_to_exclude": [
255 | "module",
256 | "function",
257 | "builtin_function_or_method",
258 | "instance",
259 | "_Feature"
260 | ],
261 | "window_display": false
262 | }
263 | },
264 | "nbformat": 4,
265 | "nbformat_minor": 4
266 | }
267 |
--------------------------------------------------------------------------------
/01-code-scripts/preprocess_vnp46a2.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction\n",
8 | "\n",
9 | "Preprocesses NASA VNP46A2 HDF5 files. This Notebook takes raw `.h5` files and completes the following preprocessing tasks:\n",
10 | "\n",
11 | "* Extracts radiance and quality flag bands;\n",
12 | "* Masks radiance for fill values, clouds, and sea water;\n",
13 | "* Fills masked data with NaN values;\n",
14 | "* Creates a georeferencing transform;\n",
15 | "* Creates export metadata; and,\n",
16 | "* Exports radiance data to GeoTiff format.\n",
17 | "\n",
18 | "This Notebook uses the following folder structure:\n",
19 | "\n",
20 | "```\n",
21 | "├── 01-code-scripts\n",
22 | "│ ├── clip_vnp46a1.ipynb\n",
23 | "│ ├── clip_vnp46a1.py\n",
24 | "│ ├── concatenate_vnp46a1.ipynb\n",
25 | "│ ├── concatenate_vnp46a1.py\n",
26 | "│ ├── download_laads_order.ipynb\n",
27 | "│ ├── download_laads_order.py\n",
28 | "│ ├── preprocess_vnp46a1.ipynb\n",
29 | "│ ├── preprocess_vnp46a1.py\n",
30 | "│ ├── preprocess_vnp46a2.ipynb\n",
31 | "│ ├── preprocess_vnp46a2.py\n",
32 | "│ └── viirs.py\n",
33 | "├── 02-raw-data\n",
34 | "├── 03-processed-data\n",
35 | "├── 04-graphics-outputs\n",
36 | "└── 05-papers-writings\n",
37 | "```\n",
38 | "\n",
39 | "Running the Notebook from the `01-code-scripts/` folder works by default. If the Notebook runs from a different folder, the paths in the environment setup section may have to be changed."
40 | ]
41 | },
42 | {
43 | "cell_type": "markdown",
44 | "metadata": {},
45 | "source": [
46 | "# Environment Setup"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "metadata": {
53 | "ExecuteTime": {
54 | "end_time": "2020-12-03T15:03:41.974665Z",
55 | "start_time": "2020-12-03T15:03:41.768217Z"
56 | }
57 | },
58 | "outputs": [],
59 | "source": [
60 | "# Load Notebook formatter\n",
61 | "%load_ext nb_black\n",
62 | "# %reload_ext nb_black"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": null,
68 | "metadata": {
69 | "ExecuteTime": {
70 | "end_time": "2020-12-03T15:03:46.104743Z",
71 | "start_time": "2020-12-03T15:03:41.977658Z"
72 | }
73 | },
74 | "outputs": [],
75 | "source": [
76 | "# Import packages\n",
77 | "import os\n",
78 | "import warnings\n",
79 | "import glob\n",
80 | "import viirs"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": null,
86 | "metadata": {
87 | "ExecuteTime": {
88 | "end_time": "2020-12-03T15:03:46.116722Z",
89 | "start_time": "2020-12-03T15:03:46.106714Z"
90 | }
91 | },
92 | "outputs": [],
93 | "source": [
94 | "# Set options\n",
95 | "warnings.simplefilter(\"ignore\")"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": null,
101 | "metadata": {
102 | "ExecuteTime": {
103 | "end_time": "2020-12-03T15:03:46.128655Z",
104 | "start_time": "2020-12-03T15:03:46.119717Z"
105 | }
106 | },
107 | "outputs": [],
108 | "source": [
109 | "# Set working directory\n",
110 | "os.chdir(\"..\")"
111 | ]
112 | },
113 | {
114 | "cell_type": "markdown",
115 | "metadata": {},
116 | "source": [
117 | "# User-Defined Variables"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": null,
123 | "metadata": {
124 | "ExecuteTime": {
125 | "end_time": "2020-12-03T15:03:46.143647Z",
126 | "start_time": "2020-12-03T15:03:46.130652Z"
127 | }
128 | },
129 | "outputs": [],
130 | "source": [
131 | "# Define path to folder containing input VNP46A2 HDF5 files\n",
132 | "hdf5_input_folder = os.path.join(\n",
133 | " \"02-raw-data\", \"hdf\", \"south-korea\", \"vnp46a2\"\n",
134 | ")\n",
135 | "\n",
136 | "# Defne path to output folder to store exported GeoTiff files\n",
137 | "geotiff_output_folder = os.path.join(\n",
138 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a2-grid\"\n",
139 | ")"
140 | ]
141 | },
142 | {
143 | "cell_type": "markdown",
144 | "metadata": {},
145 | "source": [
146 | "# Data Preprocessing"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "metadata": {
153 | "ExecuteTime": {
154 | "end_time": "2020-12-03T15:06:08.600412Z",
155 | "start_time": "2020-12-03T15:03:46.145611Z"
156 | },
157 | "scrolled": true
158 | },
159 | "outputs": [],
160 | "source": [
161 | "# Preprocess each HDF5 file (extract bands, mask for fill values,\n",
162 | "# poor-quality, no retrieval, clouds, sea water, fill masked values\n",
163 | "# with NaN, export to GeoTiff)\n",
164 | "hdf5_files = glob.glob(os.path.join(hdf5_input_folder, \"*.h5\"))\n",
165 | "processed_files = 0\n",
166 | "total_files = len(hdf5_files)\n",
167 | "for hdf5 in hdf5_files:\n",
168 | " viirs.preprocess_vnp46a2(\n",
169 | " hdf5_path=hdf5, output_folder=geotiff_output_folder\n",
170 | " )\n",
171 | " processed_files += 1\n",
172 | " print(f\"Preprocessed file: {processed_files} of {total_files}\\n\\n\")"
173 | ]
174 | },
175 | {
176 | "cell_type": "markdown",
177 | "metadata": {},
178 | "source": [
179 | "# Notes and References"
180 | ]
181 | },
182 | {
183 | "cell_type": "markdown",
184 | "metadata": {},
185 | "source": [
186 | "**File download:**\n",
187 | "\n",
188 | "VNP46A2 HDF5 files were first downloaded using the `01-code-scripts/download_laads_order.py` script. This script requires a user to have a valid [NASA Earthdata](https://urs.earthdata.nasa.gov/) account and have placed an order for files.\n",
189 | "\n",
190 | "
\n",
191 | "\n",
192 | "**Useful links:**\n",
193 | "\n",
194 | "* [VNP46A2 Product Information](https://ladsweb.modaps.eosdis.nasa.gov/missions-and-measurements/products/VNP46A2/)\n",
195 | "* [VIIRS Black Marble User Guide](https://viirsland.gsfc.nasa.gov/PDF/VIIRS_BlackMarble_UserGuide.pdf)\n",
196 | "* [NASA Earthdata Scripts](https://git.earthdata.nasa.gov/projects/LPDUR/repos/nasa-viirs/browse/scripts)\n",
197 | "\n",
198 | "
\n",
199 | "\n",
200 | "**File naming convention:**\n",
201 | "\n",
202 | "VNP46A2.AYYYYDDD.hXXvYY.CCC.YYYYDDDHHMMSS.h5\n",
203 | "\n",
204 | "* VNP46A2 = Short-name\n",
205 | "* AYYYYDDD = Acquisition Year and Day of Year\n",
206 | "* hXXvYY = Tile Identifier (horizontalXXverticalYY)\n",
207 | "* CCC = Collection Version\n",
208 | "* YYYYDDDHHMMSS = Production Date – Year, Day, Hour, Minute, Second\n",
209 | "* h5 = Data Format (HDF5)\n",
210 | "\n",
211 | "
\n",
212 | "\n",
213 | "**Bands of interest (User Guide pp. 12-13):**\n",
214 | "\n",
215 | "| Scientific Dataset | Units | Description | Bit Types | Fill Value | Valid Range | Scale Factor | Offset |\n",
216 | "|:-----------------------------|:-------------------|:------------------------|:-------------------------|:------------|:-------------|:--------------|:--------|\n",
217 | "| DNB_BRDF-Corrected_NTL | nW_per_cm2_per_sr | BRDF corrected DNB NTL | 16-bit unsigned integer | 65,535 | 0 - 65,534 | 0.1 | 0.0 |\n",
218 | "| Mandatory Quality Flag | Unitless | Mandatory quality flag | 8-bit unsigned integer | 255 | 0 - 3 | N/A | N/A |\n",
219 | "| QF_Cloud_Mask | Unitless | Quality flag for cloud mask | 16-bit unsigned integer | 65,535 | 0 - 65,534 | N/A | N/A |\n",
220 | "| Snow_Flag | Unitless | Flag for snow cover | 8-bit unsigned integer | 255 | 0 - 1 | N/A | N/A |\n",
221 | "\n",
222 | "
\n",
223 | "\n",
224 | "**Masking Criteria/Workflow:**\n",
225 | "\n",
226 | "* mask where `dnb_brdf_corrected_ntl == 65535` (Fill Value)\n",
227 | "* mask where `mandatory_quality_flag == 2` (Poor Quality)\n",
228 | "* mask where `mandatory_quality_flag == 255` (No Retrieval)\n",
229 | "* mask where `cloud_detection_bitmask == 2` (Probably Cloudy)\n",
230 | "* mask where `cloud_detection_bitmask == 3` (Confident Cloudy)\n",
231 | "* mask where `land_water_bitmask == 3` (Sea Water)\n",
232 | "\n",
233 | "
\n",
234 | "\n",
235 | "**Preprocessing Workflow:**\n",
236 | "\n",
237 | "* Extract bands\n",
238 | "* Apply scale factor\n",
239 | "* Mask for fill values\n",
240 | "* Mask for poor quality and no retrieval\n",
241 | "* Mask for clouds\n",
242 | "* Mask for sea water\n",
243 | "* Fill masked values\n",
244 | "* Create transform\n",
245 | "* Create metadata\n",
246 | "* Export array to GeoTiff\n",
247 | "\n",
248 | "
\n",
249 | "\n",
250 | "**QF_Cloud_Mask (base-10) (Adapted from User Guide p. 14):**\n",
251 | "\n",
252 | "| Bit | Flag Description Key | Interpretation |\n",
253 | "|:-----|:-----------------------------------------------|:-------------------------------------------------------------------------------------------|\n",
254 | "| 0 | Day/Night | 0 = Night
1 = Day |\n",
255 | "| 1-3 | Land/Water Background | 0 = Land & Desert
1 = Land no Desert
2 = Inland Water
3 = Sea Water
5 = Coastal |\n",
256 | "| 4-5 | Cloud Mask Quality | 0 = Poor
1 = Low
2 = Medium
3 = High |\n",
257 | "| 6-7 | Cloud Detection Results & Confidence Indicator | 0 = Confident Clear
1 = Probably Clear
2 = Probably Cloudy
3 = Confident Cloudy |\n",
258 | "| 8 | Shadow Detected | 0 = No
1 = Yes |\n",
259 | "| 9 | Cirrus Detection (IR) (BTM15 –BTM16) | 0 = No Cloud
1 = Cloud |\n",
260 | "| 10 | Snow/Ice Surface | 0 = No Snow/Ice
1 = Snow/Ice |\n",
261 | "\n",
262 | "
\n",
263 | "\n",
264 | "**Mandatory_Cloud_Flag (base-10) (User Guide p. 16):**\n",
265 | "\n",
266 | "| Value | Retrieval Quality | Algorithm Instance |\n",
267 | "|:-------|:-------------------|:-------------------------------------------------------------------------|\n",
268 | "| 0 | High-quality | Main algorithm (Persistent nighttime lights) |\n",
269 | "| 1 | High-quality | Main algorithm (Ephemeral Nighttime Lights) |\n",
270 | "| 2 | Poor-quality | Main algorithm (Outlier, potential cloud contamination or other issues) |\n",
271 | "| 255 | No retrieval | Fill value |\n",
272 | "\n",
273 | "\n",
274 | "\n",
275 | "\n",
276 | "**Snow_Flag (base-10) (User Guide p. 16)**:\n",
277 | "\n",
278 | "| Flag Description Key | Value | Interpretation |\n",
279 | "|:----------------------|:---------------|:---------------------------------------|\n",
280 | "| Snow/Ice Surface | 0
1
255 | No Snow/Ice
Snow/Ice
Fill Value |"
281 | ]
282 | }
283 | ],
284 | "metadata": {
285 | "hide_input": false,
286 | "jupytext": {
287 | "encoding": "# -*- coding: utf-8 -*-"
288 | },
289 | "kernelspec": {
290 | "display_name": "Python 3",
291 | "language": "python",
292 | "name": "python3"
293 | },
294 | "language_info": {
295 | "codemirror_mode": {
296 | "name": "ipython",
297 | "version": 3
298 | },
299 | "file_extension": ".py",
300 | "mimetype": "text/x-python",
301 | "name": "python",
302 | "nbconvert_exporter": "python",
303 | "pygments_lexer": "ipython3",
304 | "version": "3.7.6"
305 | },
306 | "toc": {
307 | "base_numbering": 1,
308 | "nav_menu": {},
309 | "number_sections": true,
310 | "sideBar": true,
311 | "skip_h1_title": false,
312 | "title_cell": "Table of Contents",
313 | "title_sidebar": "Contents",
314 | "toc_cell": false,
315 | "toc_position": {
316 | "height": "calc(100% - 180px)",
317 | "left": "10px",
318 | "top": "150px",
319 | "width": "307.2px"
320 | },
321 | "toc_section_display": true,
322 | "toc_window_display": false
323 | },
324 | "varInspector": {
325 | "cols": {
326 | "lenName": 16,
327 | "lenType": 16,
328 | "lenVar": 40
329 | },
330 | "kernels_config": {
331 | "python": {
332 | "delete_cmd_postfix": "",
333 | "delete_cmd_prefix": "del ",
334 | "library": "var_list.py",
335 | "varRefreshCmd": "print(var_dic_list())"
336 | },
337 | "r": {
338 | "delete_cmd_postfix": ") ",
339 | "delete_cmd_prefix": "rm(",
340 | "library": "var_list.r",
341 | "varRefreshCmd": "cat(var_dic_list()) "
342 | }
343 | },
344 | "types_to_exclude": [
345 | "module",
346 | "function",
347 | "builtin_function_or_method",
348 | "instance",
349 | "_Feature"
350 | ],
351 | "window_display": false
352 | }
353 | },
354 | "nbformat": 4,
355 | "nbformat_minor": 4
356 | }
357 |
--------------------------------------------------------------------------------
/01-code-scripts/preprocess_vnp46a1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction\n",
8 | "\n",
9 | "Preprocesses NASA VNP46A1 HDF5 files. This Notebook takes raw `.h5` files and completes the following preprocessing tasks:\n",
10 | "\n",
11 | "* Extracts radiance and qualify flag bands;\n",
12 | "* Masks radiance for fill values, clouds, and sensor problems;\n",
13 | "* Fills masked data with NaN values;\n",
14 | "* Creates a georeferencing transform;\n",
15 | "* Creates export metadata; and,\n",
16 | "* Exports radiance data to GeoTiff format.\n",
17 | "\n",
18 | "This Notebook uses the following folder structure:\n",
19 | "\n",
20 | "```\n",
21 | "├── 01-code-scripts\n",
22 | "│ ├── clip_vnp46a1.ipynb\n",
23 | "│ ├── clip_vnp46a1.py\n",
24 | "│ ├── concatenate_vnp46a1.ipynb\n",
25 | "│ ├── concatenate_vnp46a1.py\n",
26 | "│ ├── download_laads_order.ipynb\n",
27 | "│ ├── download_laads_order.py\n",
28 | "│ ├── preprocess_vnp46a1.ipynb\n",
29 | "│ ├── preprocess_vnp46a1.py\n",
30 | "│ └── viirs.py\n",
31 | "├── 02-raw-data\n",
32 | "├── 03-processed-data\n",
33 | "├── 04-graphics-outputs\n",
34 | "└── 05-papers-writings\n",
35 | "```\n",
36 | "\n",
37 | "Running the Notebook from the `01-code-scripts/` folder works by default. If the Notebook runs from a different folder, the paths in the environment setup section may have to be changed."
38 | ]
39 | },
40 | {
41 | "cell_type": "markdown",
42 | "metadata": {},
43 | "source": [
44 | "# Environment Setup"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {
51 | "ExecuteTime": {
52 | "end_time": "2020-10-24T22:04:45.870198Z",
53 | "start_time": "2020-10-24T22:04:45.723579Z"
54 | }
55 | },
56 | "outputs": [],
57 | "source": [
58 | "# Load Notebook formatter\n",
59 | "%load_ext nb_black\n",
60 | "# %reload_ext nb_black"
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "execution_count": null,
66 | "metadata": {
67 | "ExecuteTime": {
68 | "end_time": "2020-10-24T22:04:46.084876Z",
69 | "start_time": "2020-10-24T22:04:45.873191Z"
70 | }
71 | },
72 | "outputs": [],
73 | "source": [
74 | "# Import packages\n",
75 | "import os\n",
76 | "import warnings\n",
77 | "import glob\n",
78 | "import viirs"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": null,
84 | "metadata": {
85 | "ExecuteTime": {
86 | "end_time": "2020-10-24T22:04:46.095830Z",
87 | "start_time": "2020-10-24T22:04:46.086839Z"
88 | }
89 | },
90 | "outputs": [],
91 | "source": [
92 | "# Set options\n",
93 | "warnings.simplefilter(\"ignore\")"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": null,
99 | "metadata": {
100 | "ExecuteTime": {
101 | "end_time": "2020-10-24T22:04:46.108795Z",
102 | "start_time": "2020-10-24T22:04:46.098807Z"
103 | }
104 | },
105 | "outputs": [],
106 | "source": [
107 | "# Set working directory\n",
108 | "os.chdir(\"..\")"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "# User-Defined Variables"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "metadata": {
122 | "ExecuteTime": {
123 | "end_time": "2020-10-24T22:04:46.122743Z",
124 | "start_time": "2020-10-24T22:04:46.110776Z"
125 | }
126 | },
127 | "outputs": [],
128 | "source": [
129 | "# Define path to folder containing input VNP46A1 HDF5 files\n",
130 | "hdf5_input_folder = os.path.join(\"02-raw-data\", \"hdf\", \"south-korea\")\n",
131 | "\n",
132 | "# Defne path to output folder to store exported GeoTiff files\n",
133 | "geotiff_output_folder = os.path.join(\n",
134 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a1-grid\"\n",
135 | ")"
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "# Data Preprocessing"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "metadata": {
149 | "ExecuteTime": {
150 | "end_time": "2020-10-24T22:04:53.533901Z",
151 | "start_time": "2020-10-24T22:04:46.124736Z"
152 | },
153 | "scrolled": true
154 | },
155 | "outputs": [],
156 | "source": [
157 | "# Preprocess each HDF5 file (extract bands, mask for fill values, clouds, and\n",
158 | "# sensor problems, fill masked values with NaN, export to GeoTiff)\n",
159 | "hdf5_files = glob.glob(os.path.join(hdf5_input_folder, \"*.h5\"))\n",
160 | "processed_files = 0\n",
161 | "total_files = len(hdf5_files)\n",
162 | "for hdf5 in hdf5_files:\n",
163 | " viirs.preprocess_vnp46a1(\n",
164 | " hdf5_path=hdf5, output_folder=geotiff_output_folder\n",
165 | " )\n",
166 | " processed_files += 1\n",
167 | " print(f\"Preprocessed file: {processed_files} of {total_files}\\n\\n\")"
168 | ]
169 | },
170 | {
171 | "cell_type": "markdown",
172 | "metadata": {},
173 | "source": [
174 | "# Notes and References"
175 | ]
176 | },
177 | {
178 | "cell_type": "markdown",
179 | "metadata": {},
180 | "source": [
181 | "**File download:**\n",
182 | "\n",
183 | "VNP46A1 HDF5 files were first downloaded using the `01-code-scripts/download_laads_order.py` script. This script requires a user to have a valid [NASA Earthdata](https://urs.earthdata.nasa.gov/) account and have placed an order for files.\n",
184 | "\n",
185 | "
\n",
186 | "\n",
187 | "**Useful links:**\n",
188 | "\n",
189 | "* [VNP46A1 Product Information](https://ladsweb.modaps.eosdis.nasa.gov/missions-and-measurements/products/VNP46A1/)\n",
190 | "* [VIIRS Black Marble User Guide](https://viirsland.gsfc.nasa.gov/PDF/VIIRS_BlackMarble_UserGuide.pdf)\n",
191 | "* [NASA Earthdata Scripts](https://git.earthdata.nasa.gov/projects/LPDUR/repos/nasa-viirs/browse/scripts)\n",
192 | "\n",
193 | "
\n",
194 | "\n",
195 | "**File naming convention:**\n",
196 | "\n",
197 | "VNP46A1.AYYYYDDD.hXXvYY.CCC.YYYYDDDHHMMSS.h5\n",
198 | "\n",
199 | "* VNP46A1 = Short-name\n",
200 | "* AYYYYDDD = Acquisition Year and Day of Year\n",
201 | "* hXXvYY = Tile Identifier (horizontalXXverticalYY)\n",
202 | "* CCC = Collection Version\n",
203 | "* YYYYDDDHHMMSS = Production Date – Year, Day, Hour, Minute, Second\n",
204 | "* h5 = Data Format (HDF5)\n",
205 | "\n",
206 | "
\n",
207 | "\n",
208 | "**Bands of interest (User Guide pp. 12-13):**\n",
209 | "\n",
210 | "| Scientific Dataset | Units | Description | Bit Types | Fill Value | Valid Range | Scale Factor | Offset |\n",
211 | "|:-----------------------------|:-------------------|:------------------------|:-------------------------|:------------|:-------------|:--------------|:--------|\n",
212 | "| DNB_At_Sensor_Radiance_500m | nW_per_cm2_per_sr | At-sensor DNB radiance | 16-bit unsigned integer | 65535 | 0 - 65534 | 0.1 | 0.0 |\n",
213 | "| QF_Cloud_Mask | Unitless | Cloud mask status | 16-bit unsigned integer | 65535 | 0 - 65534 | N/A | N/A |\n",
214 | "| QF_DNB | Unitless | DNB_quality flag | 16-bit unsigned integer | 65535 | 0 - 65534 | N/A | N/A |\n",
215 | "| UTC_Time | Decimal hours | UTC Time | 32-bit floating point | -999.9 | 0 - 24 | 1.0 | 0.0 |\n",
216 | "\n",
217 | "
\n",
218 | "\n",
219 | "**Masking Criteria/Workflow:**\n",
220 | "\n",
221 | "* mask where DNB_At_Sensor_Radiance_500m == 65535\n",
222 | "* mask where QF_Cloud_Mask == 2 (Probably Cloudy)\n",
223 | "* mask where QF_Cloud_Mask == 3 (Confident Cloudy)\n",
224 | "* mask where QF_DNB != 0 (0 = no problems, any other number means some kind of issue)\n",
225 | "\n",
226 | "
\n",
227 | "\n",
228 | "**Preprocessing Workflow:**\n",
229 | "\n",
230 | "* Extract bands\n",
231 | "* Mask for fill values\n",
232 | "* Mask for clouds\n",
233 | "* Mask for sensor problems\n",
234 | "* Fill masked values\n",
235 | "* Create transform\n",
236 | "* Create metadata\n",
237 | "* Export array to GeoTiff\n",
238 | "\n",
239 | "
\n",
240 | "\n",
241 | "**QF_Cloud_Mask (base-2) (User Guide p. 14):**\n",
242 | "\n",
243 | "| Bit | Flag Description Key | Interpretation |\n",
244 | "|:-----|:-----------------------------------------------|:-------------------------------------------------------------------------------------------|\n",
245 | "| 0 | Day/Night | 0 = Night
1 = Day |\n",
246 | "| 1-3 | Land/Water Background | 000 = Land & Desert
001 = Land no Desert
010 = Inland Water
011 = Sea Water
101 = Coastal |\n",
247 | "| 4-5 | Cloud Mask Quality | 00 = Poor
01 = Low
10 = Medium
11 = High |\n",
248 | "| 6-7 | Cloud Detection Results & Confidence Indicator | 00 = Confident Clear
01 = Probably Clear
10 = Probably Cloudy
11 = Confident Cloudy |\n",
249 | "| 8 | Shadow Detected | 1 = Yes
0 = No |\n",
250 | "| 9 | Cirrus Detection (IR) (BTM15 –BTM16) | 1 = Cloud
0 = No Cloud |\n",
251 | "| 10 | Snow/Ice Surface | 1 = Snow/Ice
0 = No Snow/Ice |\n",
252 | "\n",
253 | "
\n",
254 | "\n",
255 | "**QF_Cloud_Mask (base-10) (Adapted from User Guide p. 14):**\n",
256 | "\n",
257 | "| Bit | Flag Description Key | Interpretation |\n",
258 | "|:-----|:-----------------------------------------------|:-------------------------------------------------------------------------------------------|\n",
259 | "| 0 | Day/Night | 0 = Night
1 = Day |\n",
260 | "| 1-3 | Land/Water Background | 0 = Land & Desert
1 = Land no Desert
2 = Inland Water
3 = Sea Water
5 = Coastal |\n",
261 | "| 4-5 | Cloud Mask Quality | 0 = Poor
1 = Low
2 = Medium
3 = High |\n",
262 | "| 6-7 | Cloud Detection Results & Confidence Indicator | 0 = Confident Clear
1 = Probably Clear
2 = Probably Cloudy
3 = Confident Cloudy |\n",
263 | "| 8 | Shadow Detected | 1 = Yes
0 = No |\n",
264 | "| 9 | Cirrus Detection (IR) (BTM15 –BTM16) | 1 = Cloud
0 = No Cloud |\n",
265 | "| 10 | Snow/Ice Surface | 1 = Snow/Ice
0 = No Snow/Ice |\n",
266 | "\n",
267 | "
\n",
268 | "\n",
269 | "**QF_DNB (base-10) (User Guide pp. 14-15)**:\n",
270 | "\n",
271 | "| Science Data Set | Flag Mask Values and Descriptions|\n",
272 | "|:-----------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n",
273 | "| QF_DNB | 1 = Substitute_Cal
2 = Out_of_Range
4 = Saturation
8 = Temp_not_Nominal
16 = Stray_Light
256 = Bowtie_Deleted/Range_Bit
512 = Missing_EV
1024 = Cal_Fail
2048 = Dead_Detector |"
274 | ]
275 | }
276 | ],
277 | "metadata": {
278 | "hide_input": false,
279 | "kernelspec": {
280 | "display_name": "Python 3",
281 | "language": "python",
282 | "name": "python3"
283 | },
284 | "language_info": {
285 | "codemirror_mode": {
286 | "name": "ipython",
287 | "version": 3
288 | },
289 | "file_extension": ".py",
290 | "mimetype": "text/x-python",
291 | "name": "python",
292 | "nbconvert_exporter": "python",
293 | "pygments_lexer": "ipython3",
294 | "version": "3.7.6"
295 | },
296 | "toc": {
297 | "base_numbering": 1,
298 | "nav_menu": {},
299 | "number_sections": true,
300 | "sideBar": true,
301 | "skip_h1_title": false,
302 | "title_cell": "Table of Contents",
303 | "title_sidebar": "Contents",
304 | "toc_cell": false,
305 | "toc_position": {
306 | "height": "calc(100% - 180px)",
307 | "left": "10px",
308 | "top": "150px",
309 | "width": "307.2px"
310 | },
311 | "toc_section_display": true,
312 | "toc_window_display": true
313 | },
314 | "varInspector": {
315 | "cols": {
316 | "lenName": 16,
317 | "lenType": 16,
318 | "lenVar": 40
319 | },
320 | "kernels_config": {
321 | "python": {
322 | "delete_cmd_postfix": "",
323 | "delete_cmd_prefix": "del ",
324 | "library": "var_list.py",
325 | "varRefreshCmd": "print(var_dic_list())"
326 | },
327 | "r": {
328 | "delete_cmd_postfix": ") ",
329 | "delete_cmd_prefix": "rm(",
330 | "library": "var_list.r",
331 | "varRefreshCmd": "cat(var_dic_list()) "
332 | }
333 | },
334 | "types_to_exclude": [
335 | "module",
336 | "function",
337 | "builtin_function_or_method",
338 | "instance",
339 | "_Feature"
340 | ],
341 | "window_display": false
342 | }
343 | },
344 | "nbformat": 4,
345 | "nbformat_minor": 4
346 | }
347 |
--------------------------------------------------------------------------------
/01-code-scripts/explore-quality-bands-vnp46a1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# VNP46A1 Quality Flag Bands Exploration\n",
8 | "\n",
9 | "Explores the QF_Cloud_Mask and QF_DNB bands in a VNP46A1 image.\n",
10 | "\n",
11 | "The tables in this Notebook are found in the [Black Marble User Guide Version 1.0](https://viirsland.gsfc.nasa.gov/PDF/VIIRS_BlackMarble_UserGuide.pdf).\n",
12 | "\n",
13 | "**QF_Cloud_Mask (base-10) (Adapted from User Guide p. 14):**\n",
14 | "\n",
15 | "| Bit | Flag Description Key | Interpretation |\n",
16 | "|:-----|:-----------------------------------------------|:-------------------------------------------------------------------------------------------|\n",
17 | "| 0 | Day/Night | 0 = Night
1 = Day |\n",
18 | "| 1-3 | Land/Water Background | 0 = Land & Desert
1 = Land no Desert
2 = Inland Water
3 = Sea Water
5 = Coastal |\n",
19 | "| 4-5 | Cloud Mask Quality | 0 = Poor
1 = Low
2 = Medium
3 = High |\n",
20 | "| 6-7 | Cloud Detection Results & Confidence Indicator | 0 = Confident Clear
1 = Probably Clear
2 = Probably Cloudy
3 = Confident Cloudy |\n",
21 | "| 8 | Shadow Detected | 0 = No
1 = Yes |\n",
22 | "| 9 | Cirrus Detection (IR) (BTM15 –BTM16) | 0 = No Cloud
1 = Cloud |\n",
23 | "| 10 | Snow/Ice Surface | 0 = No Snow/Ice
1 = Snow/Ice |\n",
24 | "
\n",
25 | "\n",
26 | "**QF_DNB (base-10) (User Guide pp. 14-15)**:\n",
27 | "\n",
28 | "| Science Data Set | Flag Mask Value and Description|\n",
29 | "|:-----------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n",
30 | "| QF_DNB | 1 = Substitute_Cal
2 = Out_of_Range
4 = Saturation
8 = Temp_not_Nominal
16 = Stray_Light
256 = Bowtie_Deleted/Range_Bit
512 = Missing_EV
1024 = Cal_Fail
2048 = Dead_Detector |"
31 | ]
32 | },
33 | {
34 | "cell_type": "markdown",
35 | "metadata": {},
36 | "source": [
37 | "# Environment Setup"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {
44 | "ExecuteTime": {
45 | "end_time": "2020-12-03T16:37:17.814149Z",
46 | "start_time": "2020-12-03T16:37:17.657570Z"
47 | }
48 | },
49 | "outputs": [],
50 | "source": [
51 | "# Load Notebook formatter\n",
52 | "%load_ext nb_black\n",
53 | "# %reload_ext nb_black"
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": null,
59 | "metadata": {
60 | "ExecuteTime": {
61 | "end_time": "2020-12-03T16:37:19.316446Z",
62 | "start_time": "2020-12-03T16:37:17.817143Z"
63 | }
64 | },
65 | "outputs": [],
66 | "source": [
67 | "# Import packages\n",
68 | "import os\n",
69 | "import warnings\n",
70 | "import numpy as np\n",
71 | "import earthpy.plot as ep\n",
72 | "import viirs"
73 | ]
74 | },
75 | {
76 | "cell_type": "code",
77 | "execution_count": null,
78 | "metadata": {
79 | "ExecuteTime": {
80 | "end_time": "2020-12-03T16:37:19.328898Z",
81 | "start_time": "2020-12-03T16:37:19.319409Z"
82 | }
83 | },
84 | "outputs": [],
85 | "source": [
86 | "# Set options\n",
87 | "warnings.filterwarnings(\"ignore\")"
88 | ]
89 | },
90 | {
91 | "cell_type": "code",
92 | "execution_count": null,
93 | "metadata": {
94 | "ExecuteTime": {
95 | "end_time": "2020-12-03T16:37:19.345850Z",
96 | "start_time": "2020-12-03T16:37:19.331890Z"
97 | }
98 | },
99 | "outputs": [],
100 | "source": [
101 | "# Set working directory\n",
102 | "os.chdir(\"..\")\n",
103 | "print(f\"Working directory: {os.getcwd()}\")"
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "metadata": {},
109 | "source": [
110 | "# Data Acquisition and Preprocessing"
111 | ]
112 | },
113 | {
114 | "cell_type": "code",
115 | "execution_count": null,
116 | "metadata": {
117 | "ExecuteTime": {
118 | "end_time": "2020-12-03T16:37:19.359847Z",
119 | "start_time": "2020-12-03T16:37:19.348843Z"
120 | }
121 | },
122 | "outputs": [],
123 | "source": [
124 | "# Set path to VNP46A1 test image (Jan 6, 2020, majority cloudy)\n",
125 | "hdf5_path = os.path.join(\n",
126 | " \"02-raw-data\",\n",
127 | " \"hdf\",\n",
128 | " \"south-korea\",\n",
129 | " \"VNP46A1.A2020006.h30v05.001.2020029061058.h5\",\n",
130 | ")"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {
137 | "ExecuteTime": {
138 | "end_time": "2020-12-03T16:37:19.649042Z",
139 | "start_time": "2020-12-03T16:37:19.362805Z"
140 | }
141 | },
142 | "outputs": [],
143 | "source": [
144 | "# Extract DNB At-Sensor Radiance, QF CLoud Mask, and QF DNB bands\n",
145 | "dnb_at_sensor_radiance = viirs.extract_band_vnp46a1(\n",
146 | " hdf5_path=hdf5_path, band_name=\"DNB_At_Sensor_Radiance_500m\"\n",
147 | ")\n",
148 | "qf_cloud_mask = viirs.extract_band_vnp46a1(\n",
149 | " hdf5_path=hdf5_path, band_name=\"QF_Cloud_Mask\"\n",
150 | ")\n",
151 | "qf_dnb = viirs.extract_band_vnp46a1(hdf5_path=hdf5_path, band_name=\"QF_DNB\")"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "metadata": {
158 | "ExecuteTime": {
159 | "end_time": "2020-12-03T16:37:19.751779Z",
160 | "start_time": "2020-12-03T16:37:19.653029Z"
161 | }
162 | },
163 | "outputs": [],
164 | "source": [
165 | "# Show QF DNB bitmask unique values (within the single image)\n",
166 | "print(f\"QF DNB: {np.unique(qf_dnb)}\")"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "metadata": {
173 | "ExecuteTime": {
174 | "end_time": "2020-12-03T16:37:19.958230Z",
175 | "start_time": "2020-12-03T16:37:19.755755Z"
176 | }
177 | },
178 | "outputs": [],
179 | "source": [
180 | "# Extract QF Cloud Mask bitmasks\n",
181 | "day_night = viirs.extract_qa_bits(qf_cloud_mask, 0, 0)\n",
182 | "land_water_background = viirs.extract_qa_bits(qf_cloud_mask, 1, 3)\n",
183 | "cloud_mask_quality = viirs.extract_qa_bits(qf_cloud_mask, 4, 5)\n",
184 | "cloud_detection = viirs.extract_qa_bits(qf_cloud_mask, 6, 7)\n",
185 | "shadow_detected = viirs.extract_qa_bits(qf_cloud_mask, 8, 8)\n",
186 | "cirrus_detection = viirs.extract_qa_bits(qf_cloud_mask, 9, 9)\n",
187 | "snow_ice_surface = viirs.extract_qa_bits(qf_cloud_mask, 10, 10)"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": null,
193 | "metadata": {
194 | "ExecuteTime": {
195 | "end_time": "2020-12-03T16:37:20.599568Z",
196 | "start_time": "2020-12-03T16:37:19.961205Z"
197 | }
198 | },
199 | "outputs": [],
200 | "source": [
201 | "# Show QF Cloud Mask bitmask unique values (within the single image)\n",
202 | "print(f\"Day/Night: {np.unique(day_night)}\")\n",
203 | "print(f\"Land/Water Background: {np.unique(land_water_background)}\")\n",
204 | "print(f\"Cloud Mask Quality: {np.unique(cloud_mask_quality)}\")\n",
205 | "print(f\"Coud Detection Results: {np.unique(cloud_detection)}\")\n",
206 | "print(f\"Shadow Detected: {np.unique(shadow_detected)}\")\n",
207 | "print(f\"Cirrus Detection: {np.unique(cirrus_detection)}\")\n",
208 | "print(f\"Snow/Ice Surface: {np.unique(snow_ice_surface)}\")"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "metadata": {
215 | "ExecuteTime": {
216 | "end_time": "2020-12-03T16:37:20.989044Z",
217 | "start_time": "2020-12-03T16:37:20.601564Z"
218 | }
219 | },
220 | "outputs": [],
221 | "source": [
222 | "# Create quality flag stack\n",
223 | "quality_flag_stack = viirs.stack_quality_flags_vnp46a1(vnp46a1_path=hdf5_path)\n",
224 | "print(\n",
225 | " f\"Quality stack shape (bands, rows, columns): {quality_flag_stack.shape}\"\n",
226 | ")"
227 | ]
228 | },
229 | {
230 | "cell_type": "markdown",
231 | "metadata": {},
232 | "source": [
233 | "# Data Processing"
234 | ]
235 | },
236 | {
237 | "cell_type": "code",
238 | "execution_count": null,
239 | "metadata": {},
240 | "outputs": [],
241 | "source": []
242 | },
243 | {
244 | "cell_type": "markdown",
245 | "metadata": {},
246 | "source": [
247 | "# Data Postprocessing"
248 | ]
249 | },
250 | {
251 | "cell_type": "markdown",
252 | "metadata": {},
253 | "source": [
254 | "# Data Visualization"
255 | ]
256 | },
257 | {
258 | "cell_type": "markdown",
259 | "metadata": {},
260 | "source": [
261 | "## Radiance Image"
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": null,
267 | "metadata": {
268 | "ExecuteTime": {
269 | "end_time": "2020-12-03T16:37:21.877183Z",
270 | "start_time": "2020-12-03T16:37:20.991038Z"
271 | }
272 | },
273 | "outputs": [],
274 | "source": [
275 | "# Plot raw at-sensor radiance image (before preprocessing)\n",
276 | "radiance = ep.plot_bands(dnb_at_sensor_radiance, vmax=100)"
277 | ]
278 | },
279 | {
280 | "cell_type": "markdown",
281 | "metadata": {},
282 | "source": [
283 | "## Single QA Bands"
284 | ]
285 | },
286 | {
287 | "cell_type": "code",
288 | "execution_count": null,
289 | "metadata": {
290 | "ExecuteTime": {
291 | "end_time": "2020-12-03T16:37:22.505759Z",
292 | "start_time": "2020-12-03T16:37:21.879177Z"
293 | }
294 | },
295 | "outputs": [],
296 | "source": [
297 | "# Plot day/night bitmask\n",
298 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
299 | " bitmask_array=day_night, bitmask_name=\"Day/Night\"\n",
300 | ")"
301 | ]
302 | },
303 | {
304 | "cell_type": "code",
305 | "execution_count": null,
306 | "metadata": {
307 | "ExecuteTime": {
308 | "end_time": "2020-12-03T16:37:23.130596Z",
309 | "start_time": "2020-12-03T16:37:22.508783Z"
310 | }
311 | },
312 | "outputs": [],
313 | "source": [
314 | "# Plot land/water background bitmask\n",
315 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
316 | " bitmask_array=land_water_background, bitmask_name=\"Land/Water Background\"\n",
317 | ")"
318 | ]
319 | },
320 | {
321 | "cell_type": "code",
322 | "execution_count": null,
323 | "metadata": {
324 | "ExecuteTime": {
325 | "end_time": "2020-12-03T16:37:23.760460Z",
326 | "start_time": "2020-12-03T16:37:23.133589Z"
327 | }
328 | },
329 | "outputs": [],
330 | "source": [
331 | "# Plot cloud mask quality bitmask\n",
332 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
333 | " bitmask_array=cloud_mask_quality, bitmask_name=\"Cloud Mask Quality\"\n",
334 | ")"
335 | ]
336 | },
337 | {
338 | "cell_type": "code",
339 | "execution_count": null,
340 | "metadata": {
341 | "ExecuteTime": {
342 | "end_time": "2020-12-03T16:37:24.416923Z",
343 | "start_time": "2020-12-03T16:37:23.763453Z"
344 | }
345 | },
346 | "outputs": [],
347 | "source": [
348 | "# Plot cloud detection bitmask\n",
349 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
350 | " bitmask_array=cloud_detection, bitmask_name=\"Cloud Detection\"\n",
351 | ")"
352 | ]
353 | },
354 | {
355 | "cell_type": "code",
356 | "execution_count": null,
357 | "metadata": {
358 | "ExecuteTime": {
359 | "end_time": "2020-12-03T16:37:25.062995Z",
360 | "start_time": "2020-12-03T16:37:24.418885Z"
361 | }
362 | },
363 | "outputs": [],
364 | "source": [
365 | "# Plot shadow detected bitmask\n",
366 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
367 | " bitmask_array=shadow_detected, bitmask_name=\"Shadow Detected\"\n",
368 | ")"
369 | ]
370 | },
371 | {
372 | "cell_type": "code",
373 | "execution_count": null,
374 | "metadata": {
375 | "ExecuteTime": {
376 | "end_time": "2020-12-03T16:37:25.677377Z",
377 | "start_time": "2020-12-03T16:37:25.066018Z"
378 | }
379 | },
380 | "outputs": [],
381 | "source": [
382 | "# Plot cirrus detection bitmask\n",
383 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
384 | " bitmask_array=cirrus_detection, bitmask_name=\"Cirrus Detection\"\n",
385 | ")"
386 | ]
387 | },
388 | {
389 | "cell_type": "code",
390 | "execution_count": null,
391 | "metadata": {
392 | "ExecuteTime": {
393 | "end_time": "2020-12-03T16:37:26.288757Z",
394 | "start_time": "2020-12-03T16:37:25.679403Z"
395 | },
396 | "scrolled": false
397 | },
398 | "outputs": [],
399 | "source": [
400 | "# Plot snow/ice surface bitmask\n",
401 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
402 | " bitmask_array=snow_ice_surface, bitmask_name=\"Snow/Ice Surface\"\n",
403 | ")"
404 | ]
405 | },
406 | {
407 | "cell_type": "code",
408 | "execution_count": null,
409 | "metadata": {
410 | "ExecuteTime": {
411 | "end_time": "2020-12-03T16:37:26.965656Z",
412 | "start_time": "2020-12-03T16:37:26.291735Z"
413 | },
414 | "scrolled": false
415 | },
416 | "outputs": [],
417 | "source": [
418 | "# Plot QF DNB bitmask\n",
419 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band(\n",
420 | " bitmask_array=qf_dnb, bitmask_name=\"QF DNB\"\n",
421 | ")"
422 | ]
423 | },
424 | {
425 | "cell_type": "markdown",
426 | "metadata": {},
427 | "source": [
428 | "## All QA Bands"
429 | ]
430 | },
431 | {
432 | "cell_type": "code",
433 | "execution_count": null,
434 | "metadata": {
435 | "ExecuteTime": {
436 | "end_time": "2020-12-03T16:37:31.664783Z",
437 | "start_time": "2020-12-03T16:37:26.967651Z"
438 | }
439 | },
440 | "outputs": [],
441 | "source": [
442 | "# Plot all quality flags\n",
443 | "fig, ax = viirs.plot_quality_flags_vnp46a1(quality_flag_stack)"
444 | ]
445 | },
446 | {
447 | "cell_type": "markdown",
448 | "metadata": {},
449 | "source": [
450 | "# Data Export"
451 | ]
452 | },
453 | {
454 | "cell_type": "code",
455 | "execution_count": null,
456 | "metadata": {},
457 | "outputs": [],
458 | "source": []
459 | }
460 | ],
461 | "metadata": {
462 | "hide_input": false,
463 | "kernelspec": {
464 | "display_name": "Python 3",
465 | "language": "python",
466 | "name": "python3"
467 | },
468 | "language_info": {
469 | "codemirror_mode": {
470 | "name": "ipython",
471 | "version": 3
472 | },
473 | "file_extension": ".py",
474 | "mimetype": "text/x-python",
475 | "name": "python",
476 | "nbconvert_exporter": "python",
477 | "pygments_lexer": "ipython3",
478 | "version": "3.7.6"
479 | },
480 | "toc": {
481 | "base_numbering": 1,
482 | "nav_menu": {},
483 | "number_sections": true,
484 | "sideBar": true,
485 | "skip_h1_title": false,
486 | "title_cell": "Table of Contents",
487 | "title_sidebar": "Contents",
488 | "toc_cell": false,
489 | "toc_position": {},
490 | "toc_section_display": true,
491 | "toc_window_display": true
492 | },
493 | "varInspector": {
494 | "cols": {
495 | "lenName": 16,
496 | "lenType": 16,
497 | "lenVar": 40
498 | },
499 | "kernels_config": {
500 | "python": {
501 | "delete_cmd_postfix": "",
502 | "delete_cmd_prefix": "del ",
503 | "library": "var_list.py",
504 | "varRefreshCmd": "print(var_dic_list())"
505 | },
506 | "r": {
507 | "delete_cmd_postfix": ") ",
508 | "delete_cmd_prefix": "rm(",
509 | "library": "var_list.r",
510 | "varRefreshCmd": "cat(var_dic_list()) "
511 | }
512 | },
513 | "types_to_exclude": [
514 | "module",
515 | "function",
516 | "builtin_function_or_method",
517 | "instance",
518 | "_Feature"
519 | ],
520 | "window_display": false
521 | }
522 | },
523 | "nbformat": 4,
524 | "nbformat_minor": 4
525 | }
526 |
--------------------------------------------------------------------------------
/01-code-scripts/explore-quality-bands-vnp46a2.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# VNP46A2 Quality Flag Bands Exploration\n",
8 | "\n",
9 | "Explores the Mandatory_Quality_Flag, Snow_Flag, and QF_Cloud_Mask bands in a VNP46A2 image.\n",
10 | "\n",
11 | "The tables in this Notebook are found in the [Black Marble User Guide Version 1.0](https://viirsland.gsfc.nasa.gov/PDF/VIIRS_BlackMarble_UserGuide.pdf).\n",
12 | "\n",
13 | "**QF_Cloud_Mask (base-10) (Adapted from User Guide p. 14):**\n",
14 | "\n",
15 | "| Bit | Flag Description Key | Interpretation |\n",
16 | "|:-----|:-----------------------------------------------|:-------------------------------------------------------------------------------------------|\n",
17 | "| 0 | Day/Night | 0 = Night
1 = Day |\n",
18 | "| 1-3 | Land/Water Background | 0 = Land & Desert
1 = Land no Desert
2 = Inland Water
3 = Sea Water
5 = Coastal |\n",
19 | "| 4-5 | Cloud Mask Quality | 0 = Poor
1 = Low
2 = Medium
3 = High |\n",
20 | "| 6-7 | Cloud Detection Results & Confidence Indicator | 0 = Confident Clear
1 = Probably Clear
2 = Probably Cloudy
3 = Confident Cloudy |\n",
21 | "| 8 | Shadow Detected | 0 = No
1 = Yes |\n",
22 | "| 9 | Cirrus Detection (IR) (BTM15 –BTM16) | 0 = No Cloud
1 = Cloud |\n",
23 | "| 10 | Snow/Ice Surface | 0 = No Snow/Ice
1 = Snow/Ice |\n",
24 | "\n",
25 | "
\n",
26 | "\n",
27 | "**Mandatory_Cloud_Flag (base-10) (User Guide p. 16):**\n",
28 | "\n",
29 | "| Value | Retrieval Quality | Algorithm Instance |\n",
30 | "|:-------|:-------------------|:-------------------------------------------------------------------------|\n",
31 | "| 0 | High-quality | Main algorithm (Persistent nighttime lights) |\n",
32 | "| 1 | High-quality | Main algorithm (Ephemeral Nighttime Lights) |\n",
33 | "| 2 | Poor-quality | Main algorithm (Outlier, potential cloud contamination or other issues) |\n",
34 | "| 255 | No retrieval | Fill value |\n",
35 | "\n",
36 | "\n",
37 | "\n",
38 | "\n",
39 | "**Snow_Flag (base-10) (User Guide p. 16)**:\n",
40 | "\n",
41 | "| Flag Description Key | Value | Interpretation |\n",
42 | "|:----------------------|:---------------|:---------------------------------------|\n",
43 | "| Snow/Ice Surface | 0
1
255 | No Snow/Ice
Snow/Ice
Fill Value |"
44 | ]
45 | },
46 | {
47 | "cell_type": "markdown",
48 | "metadata": {},
49 | "source": [
50 | "# Environment Setup"
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": null,
56 | "metadata": {
57 | "ExecuteTime": {
58 | "end_time": "2020-12-03T16:38:07.534008Z",
59 | "start_time": "2020-12-03T16:38:07.361967Z"
60 | }
61 | },
62 | "outputs": [],
63 | "source": [
64 | "# Load Notebook formatter\n",
65 | "%load_ext nb_black\n",
66 | "# %reload_ext nb_black"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": null,
72 | "metadata": {
73 | "ExecuteTime": {
74 | "end_time": "2020-12-03T16:38:10.286025Z",
75 | "start_time": "2020-12-03T16:38:07.537999Z"
76 | }
77 | },
78 | "outputs": [],
79 | "source": [
80 | "# Import packages\n",
81 | "import os\n",
82 | "import warnings\n",
83 | "import numpy as np\n",
84 | "import earthpy.plot as ep\n",
85 | "import viirs"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {
92 | "ExecuteTime": {
93 | "end_time": "2020-12-03T16:38:10.298005Z",
94 | "start_time": "2020-12-03T16:38:10.288055Z"
95 | }
96 | },
97 | "outputs": [],
98 | "source": [
99 | "# Set options\n",
100 | "warnings.filterwarnings(\"ignore\")"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": null,
106 | "metadata": {
107 | "ExecuteTime": {
108 | "end_time": "2020-12-03T16:38:10.313961Z",
109 | "start_time": "2020-12-03T16:38:10.299988Z"
110 | }
111 | },
112 | "outputs": [],
113 | "source": [
114 | "# Set working directory\n",
115 | "os.chdir(\"..\")\n",
116 | "print(f\"Working directory: {os.getcwd()}\")"
117 | ]
118 | },
119 | {
120 | "cell_type": "markdown",
121 | "metadata": {},
122 | "source": [
123 | "# Data Acquisition and Preprocessing"
124 | ]
125 | },
126 | {
127 | "cell_type": "code",
128 | "execution_count": null,
129 | "metadata": {
130 | "ExecuteTime": {
131 | "end_time": "2020-12-03T16:38:10.328935Z",
132 | "start_time": "2020-12-03T16:38:10.316942Z"
133 | }
134 | },
135 | "outputs": [],
136 | "source": [
137 | "# Set path to VNP46A2 test image (Jun 6, 2016)\n",
138 | "hdf5_path = os.path.join(\n",
139 | " \"02-raw-data\",\n",
140 | " \"hdf\",\n",
141 | " \"south-korea\",\n",
142 | " \"vnp46a2\",\n",
143 | " \"VNP46A2.A2016153.h30v05.001.2020267141459.h5\",\n",
144 | ")"
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": null,
150 | "metadata": {
151 | "ExecuteTime": {
152 | "end_time": "2020-12-03T16:38:10.777147Z",
153 | "start_time": "2020-12-03T16:38:10.330905Z"
154 | }
155 | },
156 | "outputs": [],
157 | "source": [
158 | "# Extract all VNP46A2 bands\n",
159 | "dnb_brdf_corrected_ntl = viirs.extract_band_vnp46a2(\n",
160 | " hdf5_path=hdf5_path, band_name=\"DNB_BRDF-Corrected_NTL\"\n",
161 | ")\n",
162 | "dnb_lunar_irradiance = viirs.extract_band_vnp46a2(\n",
163 | " hdf5_path=hdf5_path, band_name=\"DNB_Lunar_Irradiance\"\n",
164 | ")\n",
165 | "gap_filled_dnb_brdf_corrected_ntl = viirs.extract_band_vnp46a2(\n",
166 | " hdf5_path=hdf5_path, band_name=\"Gap_Filled_DNB_BRDF-Corrected_NTL\"\n",
167 | ")\n",
168 | "latest_high_quality_retrieval = viirs.extract_band_vnp46a2(\n",
169 | " hdf5_path=hdf5_path, band_name=\"Latest_High_Quality_Retrieval\"\n",
170 | ")\n",
171 | "mandatory_quality_flag = viirs.extract_band_vnp46a2(\n",
172 | " hdf5_path=hdf5_path, band_name=\"Mandatory_Quality_Flag\"\n",
173 | ")\n",
174 | "qf_cloud_mask = viirs.extract_band_vnp46a2(\n",
175 | " hdf5_path=hdf5_path, band_name=\"QF_Cloud_Mask\"\n",
176 | ")\n",
177 | "snow_flag = viirs.extract_band_vnp46a2(\n",
178 | " hdf5_path=hdf5_path, band_name=\"Snow_Flag\"\n",
179 | ")"
180 | ]
181 | },
182 | {
183 | "cell_type": "code",
184 | "execution_count": null,
185 | "metadata": {
186 | "ExecuteTime": {
187 | "end_time": "2020-12-03T16:38:11.059012Z",
188 | "start_time": "2020-12-03T16:38:10.781107Z"
189 | }
190 | },
191 | "outputs": [],
192 | "source": [
193 | "# Show main bands unique values (within the single image)\n",
194 | "print(\n",
195 | " (\n",
196 | " \"Latest High Quality Retrieval: \"\n",
197 | " f\"{viirs.get_unique_values(latest_high_quality_retrieval)}\"\n",
198 | " )\n",
199 | ")\n",
200 | "print(\n",
201 | " (\n",
202 | " f\"Mandatory Quality Flag: \"\n",
203 | " f\"{viirs.get_unique_values(mandatory_quality_flag)}\"\n",
204 | " )\n",
205 | ")\n",
206 | "print(f\"Snow Flag: {viirs.get_unique_values(snow_flag)}\")"
207 | ]
208 | },
209 | {
210 | "cell_type": "code",
211 | "execution_count": null,
212 | "metadata": {
213 | "ExecuteTime": {
214 | "end_time": "2020-12-03T16:38:11.265076Z",
215 | "start_time": "2020-12-03T16:38:11.062002Z"
216 | }
217 | },
218 | "outputs": [],
219 | "source": [
220 | "# Extract QF Cloud Mask bitmasks\n",
221 | "day_night = viirs.extract_qa_bits(qf_cloud_mask, 0, 0)\n",
222 | "land_water_background = viirs.extract_qa_bits(qf_cloud_mask, 1, 3)\n",
223 | "cloud_mask_quality = viirs.extract_qa_bits(qf_cloud_mask, 4, 5)\n",
224 | "cloud_detection = viirs.extract_qa_bits(qf_cloud_mask, 6, 7)\n",
225 | "shadow_detected = viirs.extract_qa_bits(qf_cloud_mask, 8, 8)\n",
226 | "cirrus_detection = viirs.extract_qa_bits(qf_cloud_mask, 9, 9)\n",
227 | "snow_ice_surface = viirs.extract_qa_bits(qf_cloud_mask, 10, 10)"
228 | ]
229 | },
230 | {
231 | "cell_type": "code",
232 | "execution_count": null,
233 | "metadata": {
234 | "ExecuteTime": {
235 | "end_time": "2020-12-03T16:38:12.004120Z",
236 | "start_time": "2020-12-03T16:38:11.268069Z"
237 | }
238 | },
239 | "outputs": [],
240 | "source": [
241 | "# Show QF Cloud Mask bitmask unique values (within the single image)\n",
242 | "print(f\"Day/Night: {viirs.get_unique_values(day_night)}\")\n",
243 | "print(\n",
244 | " f\"Land/Water Background: {viirs.get_unique_values(land_water_background)}\"\n",
245 | ")\n",
246 | "print(f\"Cloud Mask Quality: {viirs.get_unique_values(cloud_mask_quality)}\")\n",
247 | "print(f\"Coud Detection Results: {viirs.get_unique_values(cloud_detection)}\")\n",
248 | "print(f\"Shadow Detected: {viirs.get_unique_values(shadow_detected)}\")\n",
249 | "print(f\"Cirrus Detection: {viirs.get_unique_values(cirrus_detection)}\")\n",
250 | "print(f\"Snow/Ice Surface: {viirs.get_unique_values(snow_ice_surface)}\")"
251 | ]
252 | },
253 | {
254 | "cell_type": "code",
255 | "execution_count": null,
256 | "metadata": {
257 | "ExecuteTime": {
258 | "end_time": "2020-12-03T16:38:12.397653Z",
259 | "start_time": "2020-12-03T16:38:12.007094Z"
260 | }
261 | },
262 | "outputs": [],
263 | "source": [
264 | "# Create quality flag stack\n",
265 | "quality_flag_stack = viirs.stack_quality_flags_vnp46a2(vnp46a2_path=hdf5_path)\n",
266 | "print(\n",
267 | " f\"Quality stack shape (bands, rows, columns): {quality_flag_stack.shape}\"\n",
268 | ")"
269 | ]
270 | },
271 | {
272 | "cell_type": "markdown",
273 | "metadata": {},
274 | "source": [
275 | "# Data Processing"
276 | ]
277 | },
278 | {
279 | "cell_type": "code",
280 | "execution_count": null,
281 | "metadata": {},
282 | "outputs": [],
283 | "source": []
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {},
288 | "source": [
289 | "# Data Postprocessing"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": null,
295 | "metadata": {},
296 | "outputs": [],
297 | "source": []
298 | },
299 | {
300 | "cell_type": "markdown",
301 | "metadata": {},
302 | "source": [
303 | "# Data Visualization"
304 | ]
305 | },
306 | {
307 | "cell_type": "markdown",
308 | "metadata": {},
309 | "source": [
310 | "## Radiance Image"
311 | ]
312 | },
313 | {
314 | "cell_type": "code",
315 | "execution_count": null,
316 | "metadata": {
317 | "ExecuteTime": {
318 | "end_time": "2020-12-03T16:38:13.289766Z",
319 | "start_time": "2020-12-03T16:38:12.400648Z"
320 | }
321 | },
322 | "outputs": [],
323 | "source": [
324 | "# Plot raw DNB_BRDF-Corrected_NTL (before preprocessing)\n",
325 | "radiance = ep.plot_bands(dnb_brdf_corrected_ntl, vmax=100)"
326 | ]
327 | },
328 | {
329 | "cell_type": "markdown",
330 | "metadata": {},
331 | "source": [
332 | "## Single QA Bands"
333 | ]
334 | },
335 | {
336 | "cell_type": "code",
337 | "execution_count": null,
338 | "metadata": {
339 | "ExecuteTime": {
340 | "end_time": "2020-12-03T16:38:14.004154Z",
341 | "start_time": "2020-12-03T16:38:13.291751Z"
342 | }
343 | },
344 | "outputs": [],
345 | "source": [
346 | "# Plot mandatory quality flag\n",
347 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
348 | " bitmask_array=mandatory_quality_flag, bitmask_name=\"Mandatory Quality Flag\"\n",
349 | ")"
350 | ]
351 | },
352 | {
353 | "cell_type": "code",
354 | "execution_count": null,
355 | "metadata": {
356 | "ExecuteTime": {
357 | "end_time": "2020-12-03T16:38:14.681806Z",
358 | "start_time": "2020-12-03T16:38:14.006149Z"
359 | }
360 | },
361 | "outputs": [],
362 | "source": [
363 | "# Plot snow flag\n",
364 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
365 | " bitmask_array=snow_flag, bitmask_name=\"Snow Flag\"\n",
366 | ")"
367 | ]
368 | },
369 | {
370 | "cell_type": "code",
371 | "execution_count": null,
372 | "metadata": {
373 | "ExecuteTime": {
374 | "end_time": "2020-12-03T16:38:15.343007Z",
375 | "start_time": "2020-12-03T16:38:14.684767Z"
376 | }
377 | },
378 | "outputs": [],
379 | "source": [
380 | "# Plot day/night band\n",
381 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
382 | " bitmask_array=day_night, bitmask_name=\"Day/Night\"\n",
383 | ")"
384 | ]
385 | },
386 | {
387 | "cell_type": "code",
388 | "execution_count": null,
389 | "metadata": {
390 | "ExecuteTime": {
391 | "end_time": "2020-12-03T16:38:16.074124Z",
392 | "start_time": "2020-12-03T16:38:15.345999Z"
393 | }
394 | },
395 | "outputs": [],
396 | "source": [
397 | "# Plot land/water background\n",
398 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
399 | " bitmask_array=land_water_background, bitmask_name=\"Land/Water Background\"\n",
400 | ")"
401 | ]
402 | },
403 | {
404 | "cell_type": "code",
405 | "execution_count": null,
406 | "metadata": {
407 | "ExecuteTime": {
408 | "end_time": "2020-12-03T16:38:16.822144Z",
409 | "start_time": "2020-12-03T16:38:16.077115Z"
410 | }
411 | },
412 | "outputs": [],
413 | "source": [
414 | "# Plot cloud mask quality\n",
415 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
416 | " bitmask_array=cloud_mask_quality, bitmask_name=\"Cloud Mask Quality\"\n",
417 | ")"
418 | ]
419 | },
420 | {
421 | "cell_type": "code",
422 | "execution_count": null,
423 | "metadata": {
424 | "ExecuteTime": {
425 | "end_time": "2020-12-03T16:38:17.512474Z",
426 | "start_time": "2020-12-03T16:38:16.825138Z"
427 | }
428 | },
429 | "outputs": [],
430 | "source": [
431 | "# Plot cloud detection results\n",
432 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
433 | " bitmask_array=cloud_detection, bitmask_name=\"Cloud Detection\"\n",
434 | ")"
435 | ]
436 | },
437 | {
438 | "cell_type": "code",
439 | "execution_count": null,
440 | "metadata": {
441 | "ExecuteTime": {
442 | "end_time": "2020-12-03T16:38:18.117831Z",
443 | "start_time": "2020-12-03T16:38:17.514438Z"
444 | }
445 | },
446 | "outputs": [],
447 | "source": [
448 | "# Plot shadow detected\n",
449 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
450 | " bitmask_array=shadow_detected, bitmask_name=\"Shadow Detected\"\n",
451 | ")"
452 | ]
453 | },
454 | {
455 | "cell_type": "code",
456 | "execution_count": null,
457 | "metadata": {
458 | "ExecuteTime": {
459 | "end_time": "2020-12-03T16:38:18.735098Z",
460 | "start_time": "2020-12-03T16:38:18.119822Z"
461 | }
462 | },
463 | "outputs": [],
464 | "source": [
465 | "# Plot cirrus detection\n",
466 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
467 | " bitmask_array=shadow_detected, bitmask_name=\"Cirrus Detection\"\n",
468 | ")"
469 | ]
470 | },
471 | {
472 | "cell_type": "code",
473 | "execution_count": null,
474 | "metadata": {
475 | "ExecuteTime": {
476 | "end_time": "2020-12-03T16:38:19.355744Z",
477 | "start_time": "2020-12-03T16:38:18.738067Z"
478 | }
479 | },
480 | "outputs": [],
481 | "source": [
482 | "# Plot snow/ice surface\n",
483 | "fig, ax = viirs.plot_quality_flag_bitmask_single_band_vnp46a2(\n",
484 | " bitmask_array=shadow_detected, bitmask_name=\"Snow/Ice Surface\"\n",
485 | ")"
486 | ]
487 | },
488 | {
489 | "cell_type": "markdown",
490 | "metadata": {},
491 | "source": [
492 | "## All QA Bands"
493 | ]
494 | },
495 | {
496 | "cell_type": "code",
497 | "execution_count": null,
498 | "metadata": {
499 | "ExecuteTime": {
500 | "end_time": "2020-12-03T16:38:24.025633Z",
501 | "start_time": "2020-12-03T16:38:19.358714Z"
502 | }
503 | },
504 | "outputs": [],
505 | "source": [
506 | "# Plot all QA bands\n",
507 | "fig, ax = viirs.plot_quality_flags_vnp46a2(\n",
508 | " vnp46a2_quality_stack=quality_flag_stack, data_source=\"NASA\"\n",
509 | ")"
510 | ]
511 | },
512 | {
513 | "cell_type": "markdown",
514 | "metadata": {},
515 | "source": [
516 | "# Data Export"
517 | ]
518 | },
519 | {
520 | "cell_type": "code",
521 | "execution_count": null,
522 | "metadata": {},
523 | "outputs": [],
524 | "source": []
525 | }
526 | ],
527 | "metadata": {
528 | "hide_input": false,
529 | "kernelspec": {
530 | "display_name": "Python 3",
531 | "language": "python",
532 | "name": "python3"
533 | },
534 | "language_info": {
535 | "codemirror_mode": {
536 | "name": "ipython",
537 | "version": 3
538 | },
539 | "file_extension": ".py",
540 | "mimetype": "text/x-python",
541 | "name": "python",
542 | "nbconvert_exporter": "python",
543 | "pygments_lexer": "ipython3",
544 | "version": "3.7.6"
545 | },
546 | "toc": {
547 | "base_numbering": 1,
548 | "nav_menu": {},
549 | "number_sections": true,
550 | "sideBar": true,
551 | "skip_h1_title": false,
552 | "title_cell": "Table of Contents",
553 | "title_sidebar": "Contents",
554 | "toc_cell": false,
555 | "toc_position": {
556 | "height": "calc(100% - 180px)",
557 | "left": "10px",
558 | "top": "150px",
559 | "width": "320px"
560 | },
561 | "toc_section_display": true,
562 | "toc_window_display": true
563 | },
564 | "varInspector": {
565 | "cols": {
566 | "lenName": 16,
567 | "lenType": 16,
568 | "lenVar": 40
569 | },
570 | "kernels_config": {
571 | "python": {
572 | "delete_cmd_postfix": "",
573 | "delete_cmd_prefix": "del ",
574 | "library": "var_list.py",
575 | "varRefreshCmd": "print(var_dic_list())"
576 | },
577 | "r": {
578 | "delete_cmd_postfix": ") ",
579 | "delete_cmd_prefix": "rm(",
580 | "library": "var_list.r",
581 | "varRefreshCmd": "cat(var_dic_list()) "
582 | }
583 | },
584 | "types_to_exclude": [
585 | "module",
586 | "function",
587 | "builtin_function_or_method",
588 | "instance",
589 | "_Feature"
590 | ],
591 | "window_display": false
592 | }
593 | },
594 | "nbformat": 4,
595 | "nbformat_minor": 4
596 | }
597 |
--------------------------------------------------------------------------------
/01-code-scripts/calculate_baseline.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Calculate Baseline\n",
8 | "\n",
9 | "Creates baseline mean, standard deviation, variance, and median GeoTiffs for specified each month and specified date ranges.\n",
10 | "\n",
11 | "Baseline years:\n",
12 | "\n",
13 | "* 2012-2019 (Jan-Dec)\n",
14 | "\n",
15 | "Comparison Year:\n",
16 | "\n",
17 | "* 2020 (Jan-Dec)"
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "# Environment Setup"
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": null,
30 | "metadata": {
31 | "ExecuteTime": {
32 | "end_time": "2020-12-16T13:57:26.596137Z",
33 | "start_time": "2020-12-16T13:57:26.335802Z"
34 | }
35 | },
36 | "outputs": [],
37 | "source": [
38 | "# Load Notebook formatter\n",
39 | "%load_ext nb_black\n",
40 | "# %reload_ext nb_black"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": null,
46 | "metadata": {
47 | "ExecuteTime": {
48 | "end_time": "2020-12-16T13:57:32.602808Z",
49 | "start_time": "2020-12-16T13:57:26.620073Z"
50 | }
51 | },
52 | "outputs": [],
53 | "source": [
54 | "# Import packages\n",
55 | "import os\n",
56 | "import glob\n",
57 | "import re\n",
58 | "import warnings\n",
59 | "import viirs"
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": null,
65 | "metadata": {
66 | "ExecuteTime": {
67 | "end_time": "2020-12-16T13:57:32.648684Z",
68 | "start_time": "2020-12-16T13:57:32.635720Z"
69 | }
70 | },
71 | "outputs": [],
72 | "source": [
73 | "# Set Options\n",
74 | "warnings.filterwarnings(\"ignore\")\n",
75 | "# sns.set(font_scale=1.5, style=\"whitegrid\")\n",
76 | "# sns.set(font_scale=1.5)\n",
77 | "# pd.set_option(\"display.max_columns\", None)\n",
78 | "# pd.set_option(\"display.max_rows\", None)\n",
79 | "# pd.set_option(\"precision\", 15)"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": null,
85 | "metadata": {
86 | "ExecuteTime": {
87 | "end_time": "2020-12-16T13:57:32.690598Z",
88 | "start_time": "2020-12-16T13:57:32.679602Z"
89 | }
90 | },
91 | "outputs": [],
92 | "source": [
93 | "# Set working directory\n",
94 | "os.chdir(\"..\")\n",
95 | "print(f\"Working directory: {os.getcwd()}\")"
96 | ]
97 | },
98 | {
99 | "cell_type": "markdown",
100 | "metadata": {},
101 | "source": [
102 | "# User-Defined Variables"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {
109 | "ExecuteTime": {
110 | "end_time": "2020-12-16T13:57:37.262836Z",
111 | "start_time": "2020-12-16T13:57:37.247843Z"
112 | }
113 | },
114 | "outputs": [],
115 | "source": [
116 | "# Set paths\n",
117 | "geotiff_input_folder = os.path.join(\n",
118 | " \"03-processed-data\", \"raster\", \"south-korea\", \"vnp46a2-clipped\"\n",
119 | ")\n",
120 | "\n",
121 | "statistics_output_folder = os.path.join(\n",
122 | " \"03-processed-data\", \"raster\", \"south-korea\", \"statistics\", \"vnp46a2\"\n",
123 | ")"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "# Data Acquisition and Preprocessing"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {
137 | "ExecuteTime": {
138 | "end_time": "2020-12-15T15:07:02.939193Z",
139 | "start_time": "2020-12-15T15:07:02.930217Z"
140 | }
141 | },
142 | "outputs": [],
143 | "source": [
144 | "# Get list of radiance rasters\n",
145 | "# radiance_geotiffs = glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))\n",
146 | "# print(f\"Found {len(radiance_geotiffs)} files\")"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "metadata": {
153 | "ExecuteTime": {
154 | "end_time": "2020-12-15T15:06:36.442946Z",
155 | "start_time": "2020-12-15T15:06:36.388058Z"
156 | }
157 | },
158 | "outputs": [],
159 | "source": [
160 | "# # Get export metadata (for exporting statistics, same for all files)\n",
161 | "# metadata = viirs.extract_geotiff_metadata(\n",
162 | "# glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))[0]\n",
163 | "# )\n",
164 | "# metadata"
165 | ]
166 | },
167 | {
168 | "cell_type": "markdown",
169 | "metadata": {},
170 | "source": [
171 | "# Data Processing"
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {},
177 | "source": [
178 | "## Setup"
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "execution_count": null,
184 | "metadata": {
185 | "ExecuteTime": {
186 | "end_time": "2020-12-16T14:07:00.717718Z",
187 | "start_time": "2020-12-16T14:07:00.558132Z"
188 | }
189 | },
190 | "outputs": [],
191 | "source": [
192 | "# Get export metadata (for exporting statistics, same for all files)\n",
193 | "metadata = viirs.extract_geotiff_metadata(\n",
194 | " glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))[0]\n",
195 | ")\n",
196 | "metadata"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "metadata": {
203 | "ExecuteTime": {
204 | "end_time": "2020-12-16T14:24:46.159640Z",
205 | "start_time": "2020-12-16T14:24:46.139694Z"
206 | }
207 | },
208 | "outputs": [],
209 | "source": [
210 | "# Set month numbers (for filtering data) and abbreviations (for output name)\n",
211 | "month_numbers = [\n",
212 | " \"01\",\n",
213 | " \"02\",\n",
214 | " \"03\",\n",
215 | " \"04\",\n",
216 | " \"05\",\n",
217 | " \"06\",\n",
218 | " \"07\",\n",
219 | " \"08\",\n",
220 | " \"09\",\n",
221 | " \"10\",\n",
222 | " \"11\",\n",
223 | " \"12\",\n",
224 | "]\n",
225 | "\n",
226 | "month_abbreviations = [\n",
227 | " \"jan\",\n",
228 | " \"feb\",\n",
229 | " \"mar\",\n",
230 | " \"apr\",\n",
231 | " \"may\",\n",
232 | " \"jun\",\n",
233 | " \"jul\",\n",
234 | " \"aug\",\n",
235 | " \"sep\",\n",
236 | " \"oct\",\n",
237 | " \"nov\",\n",
238 | " \"dec\",\n",
239 | "]"
240 | ]
241 | },
242 | {
243 | "cell_type": "code",
244 | "execution_count": null,
245 | "metadata": {},
246 | "outputs": [],
247 | "source": [
248 | "# Set date ranges of interest (use leap year to include 2/29)\n",
249 | "date_ranges = [\n",
250 | " [\n",
251 | " date[4:]\n",
252 | " for date in viirs.create_date_range(\n",
253 | " start_date=\"2020-03-15\", end_date=\"2020-04-14\"\n",
254 | " )\n",
255 | " ],\n",
256 | " [\n",
257 | " date[4:]\n",
258 | " for date in viirs.create_date_range(\n",
259 | " start_date=\"2020-04-15\", end_date=\"2020-05-14\"\n",
260 | " )\n",
261 | " ],\n",
262 | " [\n",
263 | " date[4:]\n",
264 | " for date in viirs.create_date_range(\n",
265 | " start_date=\"2020-05-15\", end_date=\"2020-06-14\"\n",
266 | " )\n",
267 | " ],\n",
268 | "]\n",
269 | "date_ranges"
270 | ]
271 | },
272 | {
273 | "cell_type": "markdown",
274 | "metadata": {},
275 | "source": [
276 | "## Baseline Data (2012-2019)"
277 | ]
278 | },
279 | {
280 | "cell_type": "markdown",
281 | "metadata": {},
282 | "source": [
283 | "### Monthly"
284 | ]
285 | },
286 | {
287 | "cell_type": "code",
288 | "execution_count": null,
289 | "metadata": {
290 | "ExecuteTime": {
291 | "end_time": "2020-12-16T14:42:33.245877Z",
292 | "start_time": "2020-12-16T14:26:42.692237Z"
293 | }
294 | },
295 | "outputs": [],
296 | "source": [
297 | "# Calculate statistics for all months over the baseline years\n",
298 | "for index, baseline_month in enumerate(month_numbers):\n",
299 | " print(f\"Started month: {month_abbreviations[index].capitalize()}\")\n",
300 | " print(\"Gathering GeoTiffs within range...\")\n",
301 | " # Get list of arrays for acqusition dates that match the month\n",
302 | " array_list = [\n",
303 | " viirs.read_geotiff_into_array(geotiff)\n",
304 | " for geotiff in glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))\n",
305 | " if re.compile(f\"^{baseline_month}$\").match(\n",
306 | " # Acquisition month\n",
307 | " os.path.basename(geotiff)[12:14]\n",
308 | " )\n",
309 | " # Acquisition year\n",
310 | " and os.path.basename(geotiff)[8:12] != \"2020\"\n",
311 | " ]\n",
312 | "\n",
313 | " print(f\"Number of arrays: {len(array_list)}\")\n",
314 | " print(\"Calculating statistics...\")\n",
315 | " # Calculate mean, variance, standard deviation, and median\n",
316 | " statistics = {\n",
317 | " \"mean\": {\n",
318 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"mean\"),\n",
319 | " \"file\": (\n",
320 | " \"vnp46a2-south-korea-2012-2019-mean-\"\n",
321 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
322 | " ),\n",
323 | " },\n",
324 | " \"variance\": {\n",
325 | " \"array\": viirs.calculate_statistic(\n",
326 | " array_list, statistic=\"variance\"\n",
327 | " ),\n",
328 | " \"file\": (\n",
329 | " \"vnp46a2-south-korea-2012-2019-variance-\"\n",
330 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
331 | " ),\n",
332 | " },\n",
333 | " \"deviation\": {\n",
334 | " \"array\": viirs.calculate_statistic(\n",
335 | " array_list, statistic=\"deviation\"\n",
336 | " ),\n",
337 | " \"file\": (\n",
338 | " \"vnp46a2-south-korea-2012-2019-deviation-\"\n",
339 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
340 | " ),\n",
341 | " },\n",
342 | " \"median\": {\n",
343 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"median\"),\n",
344 | " \"file\": (\n",
345 | " \"vnp46a2-south-korea-2012-2019-median-\"\n",
346 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
347 | " ),\n",
348 | " },\n",
349 | " }\n",
350 | "\n",
351 | " print(\"Exporting statistics to GeoTiffs...\")\n",
352 | " # Export stastistic arrays to GeoTiff\n",
353 | " for statistic in statistics.keys():\n",
354 | " try:\n",
355 | " viirs.export_array(\n",
356 | " array=statistics.get(statistic).get(\"array\"),\n",
357 | " output_path=os.path.join(\n",
358 | " statistics_output_folder,\n",
359 | " statistics.get(statistic).get(\"file\"),\n",
360 | " ),\n",
361 | " metadata=metadata,\n",
362 | " )\n",
363 | " except Exception as error:\n",
364 | " print(error)\n",
365 | "\n",
366 | " # Ouput completion message\n",
367 | " print(f\"Completed month: {month_abbreviations[index].capitalize()}\\n\")"
368 | ]
369 | },
370 | {
371 | "cell_type": "markdown",
372 | "metadata": {},
373 | "source": [
374 | "### Inter-Month Ranges"
375 | ]
376 | },
377 | {
378 | "cell_type": "code",
379 | "execution_count": null,
380 | "metadata": {
381 | "ExecuteTime": {
382 | "end_time": "2020-12-16T14:15:26.541902Z",
383 | "start_time": "2020-12-16T14:11:18.250661Z"
384 | }
385 | },
386 | "outputs": [],
387 | "source": [
388 | "# Complete for all defined date ranges\n",
389 | "for date_range in date_ranges:\n",
390 | " print(f\"Started date range: {date_range[0]}-{date_range[-1]}\")\n",
391 | " # Initialize list for storing arrays\n",
392 | " array_list = []\n",
393 | "\n",
394 | " print(\"Gathering GeoTiffs within range...\")\n",
395 | " # Get data within date range over the baseline years\n",
396 | " for geotiff in glob.glob(os.path.join(geotiff_input_folder, \"*.tif\")):\n",
397 | " for month_day in date_range:\n",
398 | " month = month_day[:2]\n",
399 | " day = month_day[2:]\n",
400 | "\n",
401 | " # Get arrays from GeoTiffs within the month-day range\n",
402 | " if (\n",
403 | " # Acquisition month\n",
404 | " re.compile(f\"^{month}$\").match(\n",
405 | " os.path.basename(geotiff)[12:14]\n",
406 | " )\n",
407 | " # Acquisition day\n",
408 | " and re.compile(f\"^{day}$\").match(\n",
409 | " os.path.basename(geotiff)[14:16]\n",
410 | " )\n",
411 | " # Acquisition year\n",
412 | " and os.path.basename(geotiff)[8:12] != \"2020\"\n",
413 | " ):\n",
414 | " array_list.append(viirs.read_geotiff_into_array(geotiff))\n",
415 | "\n",
416 | " print(f\"Number of arrays: {len(array_list)}\")\n",
417 | " print(\"Calculating statistics...\")\n",
418 | " # Calculate mean, variance, standard deviation, and median\n",
419 | " statistics = {\n",
420 | " \"mean\": {\n",
421 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"mean\"),\n",
422 | " \"file\": (\n",
423 | " \"vnp46a2-south-korea-2012-2019-mean-\"\n",
424 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
425 | " ),\n",
426 | " },\n",
427 | " \"variance\": {\n",
428 | " \"array\": viirs.calculate_statistic(\n",
429 | " array_list, statistic=\"variance\"\n",
430 | " ),\n",
431 | " \"file\": (\n",
432 | " \"vnp46a2-south-korea-2012-2019-variance-\"\n",
433 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
434 | " ),\n",
435 | " },\n",
436 | " \"deviation\": {\n",
437 | " \"array\": viirs.calculate_statistic(\n",
438 | " array_list, statistic=\"deviation\"\n",
439 | " ),\n",
440 | " \"file\": (\n",
441 | " \"vnp46a2-south-korea-2012-2019-deviation-\"\n",
442 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
443 | " ),\n",
444 | " },\n",
445 | " \"median\": {\n",
446 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"median\"),\n",
447 | " \"file\": (\n",
448 | " \"vnp46a2-south-korea-2012-2019-median-\"\n",
449 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
450 | " ),\n",
451 | " },\n",
452 | " }\n",
453 | "\n",
454 | " print(\"Exporting statistics to GeoTiffs...\")\n",
455 | " # Export stastistic arrays to GeoTiff\n",
456 | " for statistic in statistics.keys():\n",
457 | " try:\n",
458 | " viirs.export_array(\n",
459 | " array=statistics.get(statistic).get(\"array\"),\n",
460 | " output_path=os.path.join(\n",
461 | " statistics_output_folder,\n",
462 | " statistics.get(statistic).get(\"file\"),\n",
463 | " ),\n",
464 | " metadata=metadata,\n",
465 | " )\n",
466 | " except Exception as error:\n",
467 | " print(error)\n",
468 | "\n",
469 | " print(f\"Completed date range: {date_range[0]}-{date_range[-1]}\\n\")"
470 | ]
471 | },
472 | {
473 | "cell_type": "markdown",
474 | "metadata": {},
475 | "source": [
476 | "## Comparison Data (2020)"
477 | ]
478 | },
479 | {
480 | "cell_type": "markdown",
481 | "metadata": {},
482 | "source": [
483 | "### Monthly"
484 | ]
485 | },
486 | {
487 | "cell_type": "code",
488 | "execution_count": null,
489 | "metadata": {
490 | "ExecuteTime": {
491 | "end_time": "2020-12-16T14:26:07.638557Z",
492 | "start_time": "2020-12-16T14:26:07.266442Z"
493 | }
494 | },
495 | "outputs": [],
496 | "source": [
497 | "# Calculate statistics for all months over the baseline years\n",
498 | "for index, baseline_month in enumerate(month_numbers):\n",
499 | " print(f\"Started month: {month_abbreviations[index].capitalize()}\")\n",
500 | " print(\"Gathering GeoTiffs within range...\")\n",
501 | " # Get list of arrays for acqusition dates that match the month\n",
502 | " array_list = [\n",
503 | " viirs.read_geotiff_into_array(geotiff)\n",
504 | " for geotiff in glob.glob(os.path.join(geotiff_input_folder, \"*.tif\"))\n",
505 | " if re.compile(f\"^{baseline_month}$\").match(\n",
506 | " # Acquisition month\n",
507 | " os.path.basename(geotiff)[12:14]\n",
508 | " )\n",
509 | " # Acquisition year\n",
510 | " and os.path.basename(geotiff)[8:12] == \"2020\"\n",
511 | " ]\n",
512 | "\n",
513 | " print(f\"Number of arrays: {len(array_list)}\")\n",
514 | " print(\"Calculating statistics...\")\n",
515 | " # Calculate mean, variance, standard deviation, and median\n",
516 | " statistics = {\n",
517 | " \"mean\": {\n",
518 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"mean\"),\n",
519 | " \"file\": (\n",
520 | " \"vnp46a2-south-korea-2020-mean-\"\n",
521 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
522 | " ),\n",
523 | " },\n",
524 | " \"variance\": {\n",
525 | " \"array\": viirs.calculate_statistic(\n",
526 | " array_list, statistic=\"variance\"\n",
527 | " ),\n",
528 | " \"file\": (\n",
529 | " \"vnp46a2-south-korea-2020-variance-\"\n",
530 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
531 | " ),\n",
532 | " },\n",
533 | " \"deviation\": {\n",
534 | " \"array\": viirs.calculate_statistic(\n",
535 | " array_list, statistic=\"deviation\"\n",
536 | " ),\n",
537 | " \"file\": (\n",
538 | " \"vnp46a2-south-korea-2020-deviation-\"\n",
539 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
540 | " ),\n",
541 | " },\n",
542 | " \"median\": {\n",
543 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"median\"),\n",
544 | " \"file\": (\n",
545 | " \"vnp46a2-south-korea-2020-median-\"\n",
546 | " f\"{baseline_month}-{month_abbreviations[index]}.tif\"\n",
547 | " ),\n",
548 | " },\n",
549 | " }\n",
550 | "\n",
551 | " print(\"Exporting statistics to GeoTiffs...\")\n",
552 | " # Export stastistic arrays to GeoTiff\n",
553 | " for statistic in statistics.keys():\n",
554 | " try:\n",
555 | " viirs.export_array(\n",
556 | " array=statistics.get(statistic).get(\"array\"),\n",
557 | " output_path=os.path.join(\n",
558 | " statistics_output_folder,\n",
559 | " statistics.get(statistic).get(\"file\"),\n",
560 | " ),\n",
561 | " metadata=metadata,\n",
562 | " )\n",
563 | " except Exception as error:\n",
564 | " print(error)\n",
565 | "\n",
566 | " # Ouput completion message\n",
567 | " print(f\"Completed month: {month_abbreviations[index].capitalize()}\\n\")"
568 | ]
569 | },
570 | {
571 | "cell_type": "markdown",
572 | "metadata": {},
573 | "source": [
574 | "### Inter-Month Ranges"
575 | ]
576 | },
577 | {
578 | "cell_type": "code",
579 | "execution_count": null,
580 | "metadata": {
581 | "ExecuteTime": {
582 | "end_time": "2020-12-15T16:21:30.693256Z",
583 | "start_time": "2020-12-15T16:21:29.388440Z"
584 | }
585 | },
586 | "outputs": [],
587 | "source": [
588 | "# Complete for all defined date ranges\n",
589 | "for date_range in date_ranges:\n",
590 | " print(f\"Started date range: {date_range[0]}-{date_range[-1]}\")\n",
591 | " # Initialize list for storing arrays\n",
592 | " array_list = []\n",
593 | "\n",
594 | " print(\"Gathering GeoTiffs within range...\")\n",
595 | " # Get data within date range over the baseline years\n",
596 | " for geotiff in glob.glob(os.path.join(geotiff_input_folder, \"*.tif\")):\n",
597 | " for month_day in date_range:\n",
598 | " month = month_day[:2]\n",
599 | " day = month_day[2:]\n",
600 | "\n",
601 | " # Get arrays from GeoTiffs within the month-day range\n",
602 | " if (\n",
603 | " # Acquisition month\n",
604 | " re.compile(f\"^{month}$\").match(\n",
605 | " os.path.basename(geotiff)[12:14]\n",
606 | " )\n",
607 | " # Acquisition day\n",
608 | " and re.compile(f\"^{day}$\").match(\n",
609 | " os.path.basename(geotiff)[14:16]\n",
610 | " )\n",
611 | " # Acquisition year\n",
612 | " and os.path.basename(geotiff)[8:12] == \"2020\"\n",
613 | " ):\n",
614 | " array_list.append(viirs.read_geotiff_into_array(geotiff))\n",
615 | "\n",
616 | " print(f\"Number of arrays: {len(array_list)}\")\n",
617 | " print(\"Calculating statistics...\")\n",
618 | " # Calculate mean, variance, standard deviation, and median\n",
619 | " statistics = {\n",
620 | " \"mean\": {\n",
621 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"mean\"),\n",
622 | " \"file\": (\n",
623 | " \"vnp46a2-south-korea-2020-mean-\"\n",
624 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
625 | " ),\n",
626 | " },\n",
627 | " \"variance\": {\n",
628 | " \"array\": viirs.calculate_statistic(\n",
629 | " array_list, statistic=\"variance\"\n",
630 | " ),\n",
631 | " \"file\": (\n",
632 | " \"vnp46a2-south-korea-2020-variance-\"\n",
633 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
634 | " ),\n",
635 | " },\n",
636 | " \"deviation\": {\n",
637 | " \"array\": viirs.calculate_statistic(\n",
638 | " array_list, statistic=\"deviation\"\n",
639 | " ),\n",
640 | " \"file\": (\n",
641 | " \"vnp46a2-south-korea-2020-deviation-\"\n",
642 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
643 | " ),\n",
644 | " },\n",
645 | " \"median\": {\n",
646 | " \"array\": viirs.calculate_statistic(array_list, statistic=\"median\"),\n",
647 | " \"file\": (\n",
648 | " \"vnp46a2-south-korea-2020-median-\"\n",
649 | " f\"{date_range[0]}-{date_range[-1]}.tif\"\n",
650 | " ),\n",
651 | " },\n",
652 | " }\n",
653 | "\n",
654 | " print(\"Exporting statistics to GeoTiffs...\")\n",
655 | " # Export stastistic arrays to GeoTiff\n",
656 | " for statistic in statistics.keys():\n",
657 | " try:\n",
658 | " viirs.export_array(\n",
659 | " array=statistics.get(statistic).get(\"array\"),\n",
660 | " output_path=os.path.join(\n",
661 | " statistics_output_folder,\n",
662 | " statistics.get(statistic).get(\"file\"),\n",
663 | " ),\n",
664 | " metadata=metadata,\n",
665 | " )\n",
666 | " except Exception as error:\n",
667 | " print(error)\n",
668 | "\n",
669 | " print(f\"Completed date range: {date_range[0]}-{date_range[-1]}\\n\")"
670 | ]
671 | }
672 | ],
673 | "metadata": {
674 | "hide_input": false,
675 | "jupytext": {
676 | "formats": "ipynb,py:light"
677 | },
678 | "kernelspec": {
679 | "display_name": "Python 3",
680 | "language": "python",
681 | "name": "python3"
682 | },
683 | "language_info": {
684 | "codemirror_mode": {
685 | "name": "ipython",
686 | "version": 3
687 | },
688 | "file_extension": ".py",
689 | "mimetype": "text/x-python",
690 | "name": "python",
691 | "nbconvert_exporter": "python",
692 | "pygments_lexer": "ipython3",
693 | "version": "3.7.6"
694 | },
695 | "toc": {
696 | "base_numbering": 1,
697 | "nav_menu": {},
698 | "number_sections": true,
699 | "sideBar": true,
700 | "skip_h1_title": false,
701 | "title_cell": "Table of Contents",
702 | "title_sidebar": "Contents",
703 | "toc_cell": false,
704 | "toc_position": {},
705 | "toc_section_display": true,
706 | "toc_window_display": true
707 | },
708 | "varInspector": {
709 | "cols": {
710 | "lenName": 16,
711 | "lenType": 16,
712 | "lenVar": 40
713 | },
714 | "kernels_config": {
715 | "python": {
716 | "delete_cmd_postfix": "",
717 | "delete_cmd_prefix": "del ",
718 | "library": "var_list.py",
719 | "varRefreshCmd": "print(var_dic_list())"
720 | },
721 | "r": {
722 | "delete_cmd_postfix": ") ",
723 | "delete_cmd_prefix": "rm(",
724 | "library": "var_list.r",
725 | "varRefreshCmd": "cat(var_dic_list()) "
726 | }
727 | },
728 | "types_to_exclude": [
729 | "module",
730 | "function",
731 | "builtin_function_or_method",
732 | "instance",
733 | "_Feature"
734 | ],
735 | "window_display": false
736 | }
737 | },
738 | "nbformat": 4,
739 | "nbformat_minor": 4
740 | }
741 |
--------------------------------------------------------------------------------
/01-code-scripts/viirs.py:
--------------------------------------------------------------------------------
1 | """ Module to work with NASA VIIRS DNB data """
2 |
3 | import os
4 | import re
5 | import datetime as dt
6 | import matplotlib.pyplot as plt
7 | from matplotlib import colors
8 | import numpy as np
9 | import numpy.ma as ma
10 | import pandas as pd
11 | import rasterio as rio
12 | from rasterio.transform import from_origin
13 | import earthpy.plot as ep
14 | import earthpy.spatial as es
15 |
16 |
17 | def calculate_statistic(data, statistic="mean"):
18 | """Calculates the specified statistic over input arrays covering
19 | the same geographic area.
20 |
21 | Parameters
22 | ----------
23 | data : list of numpy arrays
24 | List of arrays containing the data. Individual arrays can
25 | contain NaN values.
26 |
27 | statistic : str (optional)
28 | Statistic to be calculated over the arrays in the
29 | list. Default value is 'mean'. Function supports
30 | 'mean', 'variance', 'deviation', and 'median'.
31 |
32 | Returns
33 | -------
34 | data_statistic : numpy array
35 | Array containing the statistic value for each pixel, computed
36 | over the number of arrays in the input list.
37 |
38 | Example
39 | -------
40 | >>>
41 | >>>
42 | >>>
43 | >>>
44 | """
45 | # Raise errors
46 | if not isinstance(data, list):
47 | raise TypeError("Input data must be of type list.")
48 |
49 | # Calculate statistic (mean, variance, standard deviation, or median)
50 | if statistic == "mean":
51 | data_statistic = np.nanmean(np.stack(data), axis=0)
52 | elif statistic == "variance":
53 | data_statistic = np.nanvar(np.stack(data), axis=0)
54 | elif statistic == "deviation":
55 | data_statistic = np.nanstd(np.stack(data), axis=0)
56 | elif statistic == "median":
57 | data_statistic = np.nanmedian(np.stack(data), axis=0)
58 | else:
59 | raise ValueError(
60 | "Invalid statistic. Function supports "
61 | "'mean', 'variance', 'deviation', or 'median'."
62 | )
63 |
64 | return data_statistic
65 |
66 |
67 | def clip_vnp46a1(geotiff_path, clip_boundary, clip_country, output_folder):
68 | """Clips an image to a bounding box and exports the clipped image to
69 | a GeoTiff file.
70 |
71 | Paramaters
72 | ----------
73 | geotiff_path : str
74 | Path to the GeoTiff image to be clipped.
75 |
76 | clip_boundary : geopandas geodataframe
77 | Geodataframe for containing the boundary used for clipping.
78 |
79 | clip_country : str
80 | Name of the country the data is being clipped to. The country
81 | name is used in the name of the exported file. E.g. 'South Korea'.
82 | Spaces and capital letters are acceptable and handled within the
83 | function.
84 |
85 | output_folder : str
86 | Path to the folder where the clipped file will be exported to.
87 |
88 | Returns
89 | -------
90 | message : str
91 | Indication of concatenation completion status (success
92 | or failure).
93 |
94 | Example
95 | -------
96 | >>>
97 | >>>
98 | >>>
99 | >>>
100 | """
101 | # Clip VNP46A1 file
102 | print(
103 | f"Started clipping: Clip {os.path.basename(geotiff_path)} "
104 | f"to {clip_country} boundary"
105 | )
106 | try:
107 | print("Clipping image...")
108 | # Clip image (return clipped array and new metadata)
109 | with rio.open(geotiff_path) as src:
110 | cropped_image, cropped_metadata = es.crop_image(
111 | raster=src, geoms=clip_boundary
112 | )
113 |
114 | print("Setting export name...")
115 | # Set export name
116 | export_name = create_clipped_export_name(
117 | image_path=geotiff_path, country_name=clip_country
118 | )
119 |
120 | print("Exporting to GeoTiff...")
121 | # Export file
122 | export_array(
123 | array=cropped_image[0],
124 | output_path=os.path.join(output_folder, export_name),
125 | metadata=cropped_metadata,
126 | )
127 | except Exception as error:
128 | message = print(f"Clipping failed: {error}\n")
129 | else:
130 | message = print(
131 | f"Completed clipping: Clip {os.path.basename(geotiff_path)} "
132 | f"to {clip_country} boundary\n"
133 | )
134 |
135 | return message
136 |
137 |
138 | def clip_vnp46a2(geotiff_path, clip_boundary, clip_country, output_folder):
139 | """Clips an image to a bounding box and exports the clipped image to
140 | a GeoTiff file.
141 |
142 | Paramaters
143 | ----------
144 | geotiff_path : str
145 | Path to the GeoTiff image to be clipped.
146 |
147 | clip_boundary : geopandas geodataframe
148 | Geodataframe for containing the boundary used for clipping.
149 |
150 | clip_country : str
151 | Name of the country the data is being clipped to. The country
152 | name is used in the name of the exported file. E.g. 'South Korea'.
153 | Spaces and capital letters are acceptable and handled within the
154 | function.
155 |
156 | output_folder : str
157 | Path to the folder where the clipped file will be exported to.
158 |
159 | Returns
160 | -------
161 | message : str
162 | Indication of concatenation completion status (success
163 | or failure).
164 |
165 | Example
166 | -------
167 | >>>
168 | >>>
169 | >>>
170 | >>>
171 | """
172 | # Clip VNP46A2 file
173 | print(
174 | f"Started clipping: Clip {os.path.basename(geotiff_path)} "
175 | f"to {clip_country} boundary"
176 | )
177 | try:
178 | print("Clipping image...")
179 | # Clip image (return clipped array and new metadata)
180 | with rio.open(geotiff_path) as src:
181 | cropped_image, cropped_metadata = es.crop_image(
182 | raster=src, geoms=clip_boundary
183 | )
184 |
185 | print("Setting export name...")
186 | # Set export name
187 | export_name = create_clipped_export_name(
188 | image_path=geotiff_path, country_name=clip_country
189 | )
190 |
191 | print("Exporting to GeoTiff...")
192 | # Export file
193 | export_array(
194 | array=cropped_image[0],
195 | output_path=os.path.join(output_folder, export_name),
196 | metadata=cropped_metadata,
197 | )
198 | except Exception as error:
199 | message = print(f"Clipping failed: {error}\n")
200 | else:
201 | message = print(
202 | f"Completed clipping: Clip {os.path.basename(geotiff_path)} "
203 | f"to {clip_country} boundary\n"
204 | )
205 |
206 | return message
207 |
208 |
209 | def concatenate_preprocessed_vnp46a1(
210 | west_geotiff_path, east_geotiff_path, output_folder
211 | ):
212 | """Concatenates horizontally-adjacent preprocessed VNP46A1 GeoTiff
213 | file and exports the concatenated array to a single GeoTiff.
214 |
215 | Paramaters
216 | ----------
217 | west_geotiff_path : str
218 | Path to the West-most GeoTiff.
219 |
220 | east_geotiff_path : str
221 | Path to the East-most GeoTiff.
222 |
223 | output_folder : str
224 | Path to the folder where the concatenated file will be
225 | exported to.
226 |
227 | Returns
228 | -------
229 | message : str
230 | Indication of concatenation completion status (success
231 | or failure).
232 |
233 | Example
234 | -------
235 | >>>
236 | >>>
237 | >>>
238 | >>>
239 | """
240 | # Concatenate adjacent VNP46A1 GeoTiff files
241 | print(
242 | (
243 | f"Started concatenating:\n "
244 | f"{os.path.basename(west_geotiff_path)}\n "
245 | f"{os.path.basename(east_geotiff_path)}"
246 | )
247 | )
248 |
249 | try:
250 | print("Concatenating West and East arrays...")
251 | # Concatenate West and East images along the 1-axis
252 | concatenated = np.concatenate(
253 | (
254 | read_geotiff_into_array(geotiff_path=west_geotiff_path),
255 | read_geotiff_into_array(geotiff_path=east_geotiff_path),
256 | ),
257 | axis=1,
258 | )
259 |
260 | print("Getting bounding box information...")
261 | # Get bounding box (left, top, bottom) from west image and
262 | # (right) from east image
263 | longitude_min = extract_geotiff_bounding_box(
264 | geotiff_path=west_geotiff_path
265 | ).left
266 | longitude_max = extract_geotiff_bounding_box(
267 | geotiff_path=east_geotiff_path
268 | ).right
269 | latitude_min = extract_geotiff_bounding_box(
270 | geotiff_path=west_geotiff_path
271 | ).bottom
272 | latitude_max = extract_geotiff_bounding_box(
273 | geotiff_path=west_geotiff_path
274 | ).top
275 |
276 | print("Creating transform...")
277 | # Set transform (west bound, north bound, x cell size, y cell size)
278 | concatenated_transform = from_origin(
279 | longitude_min,
280 | latitude_max,
281 | (longitude_max - longitude_min) / concatenated.shape[1],
282 | (latitude_max - latitude_min) / concatenated.shape[0],
283 | )
284 |
285 | print("Creating metadata...")
286 | # Create metadata for GeoTiff export
287 | metadata = create_metadata(
288 | array=concatenated,
289 | transform=concatenated_transform,
290 | driver="GTiff",
291 | nodata=np.nan,
292 | count=1,
293 | crs="epsg:4326",
294 | )
295 |
296 | print("Setting file export name...")
297 | # Get name for the exported file
298 | export_name = create_concatenated_export_name(
299 | west_image_path=west_geotiff_path,
300 | east_image_path=east_geotiff_path,
301 | )
302 |
303 | print("Exporting to GeoTiff...")
304 | # Export concatenated array
305 | export_array(
306 | array=concatenated,
307 | output_path=os.path.join(output_folder, export_name),
308 | metadata=metadata,
309 | )
310 | except Exception as error:
311 | message = print(f"Concatenating failed: {error}\n")
312 | else:
313 | message = print(
314 | (
315 | f"Completed concatenating:\n "
316 | f"{os.path.basename(west_geotiff_path)}\n "
317 | f"{os.path.basename(east_geotiff_path)}\n"
318 | )
319 | )
320 |
321 | return message
322 |
323 |
324 | def concatenate_preprocessed_vnp46a2(
325 | west_geotiff_path, east_geotiff_path, output_folder
326 | ):
327 | """Concatenates horizontally-adjacent preprocessed VNP46A2 GeoTiff
328 | file and exports the concatenated array to a single GeoTiff.
329 |
330 | Paramaters
331 | ----------
332 | west_geotiff_path : str
333 | Path to the West-most GeoTiff.
334 |
335 | east_geotiff_path : str
336 | Path to the East-most GeoTiff.
337 |
338 | output_folder : str
339 | Path to the folder where the concatenated file will be
340 | exported to.
341 |
342 | Returns
343 | -------
344 | message : str
345 | Indication of concatenation completion status (success
346 | or failure).
347 |
348 | Example
349 | -------
350 | >>>
351 | >>>
352 | >>>
353 | >>>
354 | """
355 | # Concatenate adjacent VNP46A1 GeoTiff files
356 | print(
357 | (
358 | f"Started concatenating:\n "
359 | f"{os.path.basename(west_geotiff_path)}\n "
360 | f"{os.path.basename(east_geotiff_path)}"
361 | )
362 | )
363 |
364 | try:
365 | print("Concatenating West and East arrays...")
366 | # Concatenate West and East images along the 1-axis
367 | concatenated = np.concatenate(
368 | (
369 | read_geotiff_into_array(geotiff_path=west_geotiff_path),
370 | read_geotiff_into_array(geotiff_path=east_geotiff_path),
371 | ),
372 | axis=1,
373 | )
374 |
375 | print("Getting bounding box information...")
376 | # Get bounding box (left, top, bottom) from west image and
377 | # (right) from east image
378 | longitude_min = extract_geotiff_bounding_box(
379 | geotiff_path=west_geotiff_path
380 | ).left
381 | longitude_max = extract_geotiff_bounding_box(
382 | geotiff_path=east_geotiff_path
383 | ).right
384 | latitude_min = extract_geotiff_bounding_box(
385 | geotiff_path=west_geotiff_path
386 | ).bottom
387 | latitude_max = extract_geotiff_bounding_box(
388 | geotiff_path=west_geotiff_path
389 | ).top
390 |
391 | print("Creating transform...")
392 | # Set transform (west bound, north bound, x cell size, y cell size)
393 | concatenated_transform = from_origin(
394 | longitude_min,
395 | latitude_max,
396 | (longitude_max - longitude_min) / concatenated.shape[1],
397 | (latitude_max - latitude_min) / concatenated.shape[0],
398 | )
399 |
400 | print("Creating metadata...")
401 | # Create metadata for GeoTiff export
402 | metadata = create_metadata(
403 | array=concatenated,
404 | transform=concatenated_transform,
405 | driver="GTiff",
406 | nodata=np.nan,
407 | count=1,
408 | crs="epsg:4326",
409 | )
410 |
411 | print("Setting file export name...")
412 | # Get name for the exported file
413 | export_name = create_concatenated_export_name(
414 | west_image_path=west_geotiff_path,
415 | east_image_path=east_geotiff_path,
416 | )
417 |
418 | print("Exporting to GeoTiff...")
419 | # Export concatenated array
420 | export_array(
421 | array=concatenated,
422 | output_path=os.path.join(output_folder, export_name),
423 | metadata=metadata,
424 | )
425 | except Exception as error:
426 | message = print(f"Concatenating failed: {error}\n")
427 | else:
428 | message = print(
429 | (
430 | f"Completed concatenating:\n "
431 | f"{os.path.basename(west_geotiff_path)}\n "
432 | f"{os.path.basename(east_geotiff_path)}\n"
433 | )
434 | )
435 |
436 | return message
437 |
438 |
439 | def create_clipped_export_name(image_path, country_name):
440 | """Creates a file name indicating a clipped file.
441 |
442 | Paramaters
443 | ----------
444 | image_path : str
445 | Path to the original (unclipped image).
446 |
447 | Returns
448 | -------
449 | export_name : str
450 | New file name for export, indicating clipping.
451 |
452 | Example
453 | -------
454 | >>>
455 | >>>
456 | >>>
457 | >>>
458 | """
459 | # Set export name
460 | image_source = os.path.basename(image_path)[:7]
461 | image_date = extract_date_vnp46a1(image_path)
462 | image_country = country_name.replace(" ", "-").lower()
463 | export_name = f"{image_source}-{image_date}-clipped-{image_country}.tif"
464 |
465 | return export_name
466 |
467 |
468 | def create_concatenated_export_name(west_image_path, east_image_path):
469 | """Creates a file name indicating the concatenation of adjacent two files.
470 |
471 | Paramaters
472 | ----------
473 | west_image_path : str
474 | Path to the West-most image.
475 |
476 | east_image_past : str
477 | Path to the East-most image.
478 |
479 | Returns
480 | -------
481 | export_name : str
482 | New file name for export, indicating concatenation.
483 |
484 | Example
485 | -------
486 | >>>
487 | >>>
488 | >>>
489 | >>>
490 | """
491 | # Extract the horizontal grid numbers from the West and East images
492 | west_image_horizontal_grid_number, east_image_horizontal_grid_number = (
493 | os.path.basename(west_image_path)[18:20],
494 | os.path.basename(east_image_path)[18:20],
495 | )
496 |
497 | # Replace the single horizontal grid number with both the West and
498 | # East numbers; remove series and processing time information
499 | data_source_and_date = os.path.basename(west_image_path)[:16]
500 | vertical_grid_number = os.path.basename(west_image_path)[21:23]
501 | export_name = (
502 | f"{data_source_and_date}-h{west_image_horizontal_grid_number}"
503 | f"{east_image_horizontal_grid_number}v{vertical_grid_number}.tif"
504 | )
505 |
506 | return export_name
507 |
508 |
509 | def create_date_range(start_date, end_date):
510 | """Creates a list of dates between a specified start and end date.
511 |
512 | Parameters
513 | ----------
514 | start_date : str
515 | Start date, formatted as 'YYYY-MM-DD'.
516 |
517 | end_date : str
518 | Start date, formatted as 'YYYY-MM-DD'.
519 |
520 | Returns
521 | -------
522 | date_range : list (of str)
523 | List of dates between and including the start and end dates,
524 | with each date formatted as 'YYYYMMDD'.
525 |
526 | Example
527 | -------
528 | >>>
529 | >>>
530 | >>>
531 | >>>
532 | """
533 | # Get list of dates
534 | dates = [
535 | dt.datetime.strftime(date, "%Y%m%d")
536 | for date in pd.date_range(start=start_date, end=end_date)
537 | ]
538 |
539 | return dates
540 |
541 |
542 | def create_metadata(
543 | array, transform, driver="GTiff", nodata=0, count=1, crs="epsg:4326"
544 | ):
545 | """Creates export metadata, for use with
546 | exporting an array to raster format.
547 |
548 | Parameters
549 | ----------
550 | array : numpy array
551 | Array containing data for export.
552 |
553 | transform : rasterio.transform affine object
554 | Affine transformation for the georeferenced array.
555 |
556 | driver : str
557 | File type/format for export. Defaults to GeoTiff ('GTiff').
558 |
559 | nodata : int or float
560 | Value in the array indicating no data. Defaults to 0.
561 |
562 | count : int
563 | Number of bands in the array for export. Defaults to 1.
564 |
565 | crs : str
566 | Coordinate reference system for the georeferenced
567 | array. Defaults to EPSG 4326 ('epsg:4326').
568 |
569 | Returns
570 | -------
571 | metadata : dict
572 | Dictionary containing the export metadata.
573 |
574 | Example
575 | -------
576 | >>> # Imports
577 | >>> import numpy as np
578 | >>> from rasterio.transform import from_origin
579 | >>> # Create array
580 | >>> arr = np.array([[1,2],[3,4]])
581 | >>> transform = from_origin(-73.0, 43.0, 0.5, 0.5)
582 | >>> meta = create_metadata(arr, transform)
583 | # Display metadata
584 | >>> meta
585 | {'driver': 'GTiff',
586 | 'dtype': dtype('int32'),
587 | 'nodata': 0,
588 | 'width': 2,
589 | 'height': 2,
590 | 'count': 1,
591 | 'crs': 'epsg:4326',
592 | 'transform': Affine(0.5, 0.0, -73.0,
593 | 0.0, -0.5, 43.0)}
594 | """
595 | # Define metadata
596 | metadata = {
597 | "driver": driver,
598 | "dtype": array.dtype,
599 | "nodata": nodata,
600 | "width": array.shape[1],
601 | "height": array.shape[0],
602 | "count": count,
603 | "crs": crs,
604 | "transform": transform,
605 | }
606 |
607 | return metadata
608 |
609 |
610 | def create_transform_vnp46a1(hdf5):
611 | """Creates a geographic transform for a VNP46A1 HDF5 file,
612 | based on longitude bounds, latitude bounds, and cell size.
613 |
614 | Parameters
615 | ----------
616 | hdf5 : str
617 | Path to an existsing VNP46A1 HDF5 file.
618 |
619 | Returns
620 | -------
621 | transform : affine.Affine object
622 | Affine transformation for the georeferenced array.
623 |
624 | Example
625 | -------
626 | >>>
627 | >>>
628 | >>>
629 | >>>
630 | """
631 | # Extract bounding box from top-level dataset
632 | with rio.open(hdf5) as dataset:
633 | longitude_min = int(
634 | dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_WestBoundingCoord"]
635 | )
636 | longitude_max = int(
637 | dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_EastBoundingCoord"]
638 | )
639 | latitude_min = int(
640 | dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_SouthBoundingCoord"]
641 | )
642 | latitude_max = int(
643 | dataset.tags()["HDFEOS_GRIDS_VNP_Grid_DNB_NorthBoundingCoord"]
644 | )
645 |
646 | # Extract number of row and columns from first
647 | # Science Data Set (subdataset/band)
648 | with rio.open(dataset.subdatasets[0]) as science_data_set:
649 | num_rows, num_columns = (
650 | science_data_set.meta.get("height"),
651 | science_data_set.meta.get("width"),
652 | )
653 |
654 | # Define transform (top-left corner, cell size)
655 | transform = from_origin(
656 | longitude_min,
657 | latitude_max,
658 | (longitude_max - longitude_min) / num_columns,
659 | (latitude_max - latitude_min) / num_rows,
660 | )
661 |
662 | return transform
663 |
664 |
665 | def create_transform_vnp46a2(hdf5):
666 | """Creates a geographic transform for a VNP46A2 HDF5 file,
667 | based on longitude bounds, latitude bounds, and cell size.
668 |
669 | Parameters
670 | ----------
671 | hdf5 : str
672 | Path to an existsing VNP46A1 HDF5 file.
673 |
674 | Returns
675 | -------
676 | transform : affine.Affine object
677 | Affine transformation for the georeferenced array.
678 |
679 | Example
680 | -------
681 | >>>
682 | >>>
683 | >>>
684 | >>>
685 | """
686 | # Extract bounding box from top-level dataset
687 | with rio.open(hdf5) as dataset:
688 | longitude_min = int(dataset.tags()["WestBoundingCoord"])
689 | longitude_max = int(dataset.tags()["EastBoundingCoord"])
690 | latitude_min = int(dataset.tags()["SouthBoundingCoord"])
691 | latitude_max = int(dataset.tags()["NorthBoundingCoord"])
692 |
693 | # Extract number of row and columns from first
694 | # Science Data Set (subdataset/band)
695 | with rio.open(dataset.subdatasets[0]) as band:
696 | num_rows, num_columns = (
697 | band.meta.get("height"),
698 | band.meta.get("width"),
699 | )
700 |
701 | # Define transform (top-left corner, cell size)
702 | transform = from_origin(
703 | longitude_min,
704 | latitude_max,
705 | (longitude_max - longitude_min) / num_columns,
706 | (latitude_max - latitude_min) / num_rows,
707 | )
708 |
709 | return transform
710 |
711 |
712 | def export_array(array, output_path, metadata):
713 | """Exports a numpy array to a GeoTiff.
714 |
715 | Parameters
716 | ----------
717 | array : numpy array
718 | Numpy array to be exported to GeoTiff.
719 |
720 | output_path : str
721 | Path to the output file (includeing filename).
722 |
723 | metadata : dict
724 | Dictionary containing the metadata required
725 | for export.
726 |
727 | Returns
728 | -------
729 | output_message : str
730 | Message indicating success or failure of export.
731 |
732 | Example
733 | -------
734 | >>> # Define export output paths
735 | >>> radiance_mean_outpath = os.path.join(
736 | ... output_directory,
737 | ... "radiance-mean.tif")
738 | # Define export transform
739 | >>> transform = from_origin(
740 | ... lon_min,
741 | ... lat_max,
742 | ... coord_spacing,
743 | ... coord_spacing)
744 | >>> # Define export metadata
745 | >>> export_metadata = {
746 | ... "driver": "GTiff",
747 | ... "dtype": radiance_mean.dtype,
748 | ... "nodata": 0,
749 | ... "width": radiance_mean.shape[1],
750 | ... "height": radiance_mean.shape[0],
751 | ... "count": 1,
752 | ... "crs": 'epsg:4326',
753 | ... "transform": transform
754 | ... }
755 | >>> # Export mean radiance
756 | >>> export_array(
757 | >>> array=radiance_mean,
758 | >>> output_path=radiance_mean_outpath,
759 | >>> metadata=export_metadata)
760 | Exported: radiance-mean.tif
761 | """
762 | # Write numpy array to GeoTiff
763 | try:
764 | with rio.open(output_path, "w", **metadata) as dst:
765 | dst.write(array, 1)
766 | except Exception as error:
767 | output_message = print(f"ERROR: {error}")
768 | else:
769 | output_message = print(f"Exported: {os.path.split(output_path)[-1]}")
770 |
771 | return output_message
772 |
773 |
774 | def extract_acquisition_date_vnp46a1(hdf5_path):
775 | """Returns the acquisition date of a VNP46A1 HDF5 file.
776 |
777 | Parameters
778 | ----------
779 | hdf5_path : str
780 | Path to a VNP46A1 HDF5 file.
781 |
782 | Returns
783 | -------
784 | acquisition_date : str
785 | Acquisition date of the image, formatted as 'YYYY-MM-DD'.
786 |
787 | Example
788 | -------
789 | >>> hdf5_file = "VNP46A1.A2020001.h30v05.001.2020004003738.h5"
790 | >>> extract_acquisition_date_vnp46a1(hdf5_file)
791 | '2020-01-01'
792 | """
793 | # Open file and extract date
794 | with rio.open(hdf5_path) as dataset:
795 | acquisition_date = dataset.tags()[
796 | "HDFEOS_GRIDS_VNP_Grid_DNB_RangeBeginningDate"
797 | ]
798 |
799 | return acquisition_date
800 |
801 |
802 | def extract_band_vnp46a1(hdf5_path, band_name):
803 | """Extracts the specified band (Science Data Set) from a NASA VNP46A1
804 | HDF5 file.
805 |
806 | Available Science Data Sets include:
807 |
808 | BrightnessTemperature_M12
809 | Moon_Illumination_Fraction
810 | Moon_Phase_Angle
811 | QF_Cloud_Mask
812 | QF_DNB
813 | QF_VIIRS_M10
814 | QF_VIIRS_M11
815 | QF_VIIRS_M12
816 | QF_VIIRS_M13
817 | QF_VIIRS_M15
818 | QF_VIIRS_M16
819 | BrightnessTemperature_M13
820 | Radiance_M10
821 | Radiance_M11
822 | Sensor_Azimuth
823 | Sensor_Zenith
824 | Solar_Azimuth
825 | Solar_Zenith
826 | UTC_Time
827 | BrightnessTemperature_M15
828 | BrightnessTemperature_M16
829 | DNB_At_Sensor_Radiance_500m
830 | Glint_Angle
831 | Granule
832 | Lunar_Azimuth
833 | Lunar_Zenith
834 |
835 | Parameters
836 | ----------
837 | hdf5_path : str
838 | Path to the VNP46A1 HDF5 (.h5) file.
839 |
840 | band_name : str
841 | Name of the band (Science Data Set) to be extracted. Must be an exact
842 | match to an available Science Data Set.
843 |
844 | Returns
845 | -------
846 | band : numpy array
847 | Array containing the data for the specified band (Science Data Set).
848 |
849 | Example
850 | -------
851 | >>> qf_cloud_mask = extract_band_vnp46a1(
852 | ... hdf5='VNP46A1.A2020001.h30v05.001.2020004003738.h5',
853 | ... band='QF_Cloud_Mask'
854 | ... )
855 | >>> type(qf_cloud_mask)
856 | numpy.ndarray
857 | """
858 | # Raise error for invalid band name
859 | band_names = [
860 | "BrightnessTemperature_M12",
861 | "Moon_Illumination_Fraction",
862 | "Moon_Phase_Angle",
863 | "QF_Cloud_Mask",
864 | "QF_DNB",
865 | "QF_VIIRS_M10",
866 | "QF_VIIRS_M11",
867 | "QF_VIIRS_M12",
868 | "QF_VIIRS_M13",
869 | "QF_VIIRS_M15",
870 | "QF_VIIRS_M16",
871 | "BrightnessTemperature_M13",
872 | "Radiance_M10",
873 | "Radiance_M11",
874 | "Sensor_Azimuth",
875 | "Sensor_Zenith",
876 | "Solar_Azimuth",
877 | "Solar_Zenith",
878 | "UTC_Time",
879 | "BrightnessTemperature_M15",
880 | "BrightnessTemperature_M16",
881 | "DNB_At_Sensor_Radiance_500m",
882 | "Glint_Angle",
883 | "Granule",
884 | "Lunar_Azimuth",
885 | "Lunar_Zenith",
886 | ]
887 | if band_name not in band_names:
888 | raise ValueError(
889 | f"Invalid band name. Must be one of the following: {band_names}"
890 | )
891 |
892 | # Open top-level dataset, loop through Science Data Sets (subdatasets),
893 | # and extract specified band
894 | with rio.open(hdf5_path) as dataset:
895 | for science_data_set in dataset.subdatasets:
896 | if re.search(f"{band_name}$", science_data_set):
897 | with rio.open(science_data_set) as src:
898 | band = src.read(1)
899 |
900 | return band
901 |
902 |
903 | def extract_band_vnp46a2(hdf5_path, band_name):
904 | """Extracts the specified band (Science Data Set) from a NASA VNP46A2
905 | HDF5 file.
906 |
907 | Available Science Data Sets include:
908 |
909 | DNB_BRDF-Corrected_NTL
910 | DNB_Lunar_Irradiance
911 | Gap_Filled_DNB_BRDF-Corrected_NTL
912 | Latest_High_Quality_Retrieval
913 | Mandatory_Quality_Flag
914 | QF_Cloud_Mask
915 | Snow_Flag
916 |
917 | Parameters
918 | ----------
919 | hdf5_path : str
920 | Path to the VNP46A2 HDF5 (.h5) file.
921 |
922 | band_name : str
923 | Name of the band (Science Data Set) to be extracted. Must be an exact
924 | match to an available Science Data Set.
925 |
926 | Returns
927 | -------
928 | band : numpy array
929 | Array containing the data for the specified band (Science Data Set).
930 |
931 | Example
932 | -------
933 | >>> qf_cloud_mask = extract_band_vnp46a2(
934 | ... hdf5='VNP46A2.A2016153.h30v05.001.2020267141459.h5',
935 | ... band='QF_Cloud_Mask'
936 | ... )
937 | >>> type(qf_cloud_mask)
938 | numpy.ndarray
939 | """
940 | # Raise error for invalid band name
941 | band_names = [
942 | "DNB_BRDF-Corrected_NTL",
943 | "DNB_Lunar_Irradiance",
944 | "Gap_Filled_DNB_BRDF-Corrected_NTL",
945 | "Latest_High_Quality_Retrieval",
946 | "Mandatory_Quality_Flag",
947 | "QF_Cloud_Mask",
948 | "Snow_Flag",
949 | ]
950 | if band_name not in band_names:
951 | raise ValueError(
952 | f"Invalid band name. Must be one of the following: {band_names}"
953 | )
954 |
955 | # Open top-level dataset, loop through Science Data Sets (subdatasets),
956 | # and extract specified band
957 | with rio.open(hdf5_path) as dataset:
958 | for science_data_set in dataset.subdatasets:
959 | if re.search(f"{band_name}$", science_data_set):
960 | with rio.open(science_data_set) as src:
961 | band = src.read(1)
962 |
963 | return band
964 |
965 |
966 | def extract_date_vnp46a1(geotiff_path):
967 | """Extracts the file date from a preprocessed VNP46A1 GeoTiff.
968 |
969 | Parameters
970 | ----------
971 | geotiff_path : str
972 | Path to the GeoTiff file.
973 |
974 | Returns
975 | -------
976 | date : str
977 | Acquisition date of the preprocessed VNP46A1 GeoTiff.
978 |
979 | Example
980 | -------
981 | >>>
982 | >>>
983 | >>>
984 | >>>
985 | """
986 | # Get date (convert YYYYJJJ to YYYYMMDD)
987 | date = dt.datetime.strptime(
988 | os.path.basename(geotiff_path)[9:16], "%Y%j"
989 | ).strftime("%Y%m%d")
990 |
991 | return date
992 |
993 |
994 | def extract_date_vnp46a2(geotiff_path):
995 | """Extracts the file date from a preprocessed VNP46A2 GeoTiff.
996 |
997 | Parameters
998 | ----------
999 | geotiff_path : str
1000 | Path to the GeoTiff file.
1001 |
1002 | Returns
1003 | -------
1004 | date : str
1005 | Acquisition date of the preprocessed VNP46A2 GeoTiff.
1006 |
1007 | Example
1008 | -------
1009 | >>>
1010 | >>>
1011 | >>>
1012 | >>>
1013 | """
1014 | # Get date (convert YYYYJJJ to YYYYMMDD)
1015 | date = dt.datetime.strptime(
1016 | os.path.basename(geotiff_path)[9:16], "%Y%j"
1017 | ).strftime("%Y%m%d")
1018 |
1019 | return date
1020 |
1021 |
1022 | def extract_geotiff_bounding_box(geotiff_path):
1023 | """Extracts the bounding box from a GeoTiff file.
1024 |
1025 | Parameters
1026 | ----------
1027 | geotiff_path : str
1028 | Path to the GeoTiff file.
1029 |
1030 | Returns
1031 | -------
1032 | bounding_box : rasterio.coords.BoundingBox
1033 | Bounding box for the GeoTiff
1034 |
1035 | Example
1036 | -------
1037 | >>>
1038 | >>>
1039 | >>>
1040 | >>>
1041 | """
1042 | # Extract bounding box
1043 | with rio.open(geotiff_path) as src:
1044 | bounding_box = src.bounds
1045 |
1046 | return bounding_box
1047 |
1048 |
1049 | def extract_geotiff_metadata(geotiff_path):
1050 | """Extract metadata from a GeoTiff file.
1051 |
1052 | Parameters
1053 | ----------
1054 | geotiff_path : str
1055 | Path to the GeoTiff file.
1056 |
1057 | Returns
1058 | -------
1059 | metadata : dict
1060 | Dictionary containing the metadata.
1061 |
1062 | Example
1063 | -------
1064 | >>>
1065 | >>>
1066 | >>>
1067 | >>>
1068 | """
1069 | # Read-in array
1070 | with rio.open(geotiff_path) as src:
1071 | metadata = src.meta
1072 |
1073 | return metadata
1074 |
1075 |
1076 | def extract_qa_bits(qa_band, start_bit, end_bit):
1077 | """Extracts the QA bitmask values for a specified bitmask (starting
1078 | and ending bit).
1079 |
1080 | Parameters
1081 | ----------
1082 | qa_band : numpy array
1083 | Array containing the raw QA values (base-2) for all bitmasks.
1084 |
1085 | start_bit : int
1086 | First bit in the bitmask.
1087 |
1088 | end_bit : int
1089 | Last bit in the bitmask.
1090 |
1091 | Returns
1092 | -------
1093 | qa_values : numpy array
1094 | Array containing the extracted QA values (base-10) for the
1095 | bitmask.
1096 |
1097 | Example
1098 | -------
1099 | >>>
1100 | >>>
1101 | >>>
1102 | >>>
1103 | """
1104 | # Initialize QA bit string/pattern to check QA band against
1105 | qa_bits = 0
1106 |
1107 | # Add each specified QA bit flag value/string/pattern
1108 | # to the QA bits to check/extract
1109 | for bit in range(start_bit, end_bit + 1):
1110 | qa_bits += bit ** 2
1111 |
1112 | # Check QA band against specified QA bits to see what
1113 | # QA flag values are set
1114 | qa_flags_set = qa_band & qa_bits
1115 |
1116 | # Get base-10 value that matches bitmask documentation
1117 | # (0-1 for single bit, 0-3 for 2 bits, or 0-2^N for N bits)
1118 | qa_values = qa_flags_set >> start_bit
1119 |
1120 | return qa_values
1121 |
1122 |
1123 | def get_masking_details(array):
1124 | """Returns information about how many pixels are masked in an array.
1125 |
1126 | Parameters
1127 | ----------
1128 | array : numpy.ma.core.MaskedArray
1129 | Masked array.
1130 |
1131 | Returns
1132 | -------
1133 | tuple
1134 |
1135 | total : int
1136 | Total number of pixels in the array.
1137 |
1138 | masked : int
1139 | Number of masked pixels in the array.
1140 |
1141 | unmasked : int
1142 | Number of unmasked pixels in the array.
1143 |
1144 | message : str
1145 | Message providing the masking information.
1146 |
1147 | Example
1148 | -------
1149 | >>>
1150 | >>>
1151 | >>>
1152 | >>>
1153 | """
1154 | # Get masking information
1155 | total = array.shape[0] * array.shape[1]
1156 | masked = ma.count_masked(array)
1157 | unmasked = array.count()
1158 |
1159 | # Create message
1160 | message = print(f"Masked: {masked}/{total}, Unmasked: {unmasked}/{total}")
1161 |
1162 | return message
1163 |
1164 |
1165 | def get_unique_values(array):
1166 | """Returns the unique values from a NumPy array as a list.
1167 |
1168 | Parameters
1169 | ----------
1170 | array : numppy array
1171 | Array from which to get the unique values.
1172 |
1173 | Returns
1174 | -------
1175 | values : list
1176 | List of unique values from the array.
1177 |
1178 | Example
1179 | ------
1180 | >>>
1181 | >>>
1182 | >>>
1183 | >>>
1184 | """
1185 | # Get unique values
1186 | values = np.unique(array).tolist()
1187 |
1188 | return values
1189 |
1190 |
1191 | def plot_quality_flag_bitmask(bitmask_array, bitmask_name, axis):
1192 | """Plots the discrete bitmask values for an image.
1193 |
1194 | Parameters
1195 | ----------
1196 | bitmask_array : numpy array
1197 | Array containing the base-10 bitmask values.
1198 |
1199 | bitmask_name : str
1200 | Name of the bitmask layer. Valid names: 'Day/Night', 'Land/Water
1201 | Background', 'Cloud Mask Quality', 'Cloud Detection',
1202 | 'Shadow Detected', 'Cirrus Detection', 'Snow/Ice Surface', and
1203 | 'QF DNB'.
1204 |
1205 | Returns
1206 | -------
1207 | tuple
1208 |
1209 | fig : matplotlib.figure.Figure object
1210 | The figure object associated with the histogram.
1211 |
1212 | ax : matplotlib.axes._subplots.AxesSubplot objects
1213 | The axes objects associated with the histogram.
1214 |
1215 | Example
1216 | -------
1217 | >>>
1218 | >>>
1219 | >>>
1220 | >>>
1221 | """
1222 | # Store possible bitmask values and titles (for plotting)
1223 | vnp46a1_bitmasks = {
1224 | "Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
1225 | "Land/Water Background": {
1226 | "values": [0, 1, 2, 3, 5],
1227 | "labels": [
1228 | "Land & Desert",
1229 | "Land no Desert",
1230 | "Inland Water",
1231 | "Sea Water",
1232 | "Coastal",
1233 | ],
1234 | },
1235 | "Cloud Mask Quality": {
1236 | "values": [0, 1, 2, 3],
1237 | "labels": ["Poor", "Low", "Medium", "High"],
1238 | },
1239 | "Cloud Detection": {
1240 | "values": [0, 1, 2, 3],
1241 | "labels": [
1242 | "Confident Clear",
1243 | "Probably Clear",
1244 | "Probably Cloudy",
1245 | "Confident Cloudy",
1246 | ],
1247 | },
1248 | "Shadow Detected": {
1249 | "values": [0, 1],
1250 | "labels": ["No Shadow", "Shadow"],
1251 | },
1252 | "Cirrus Detection": {
1253 | "values": [0, 1],
1254 | "labels": ["No Cirrus Cloud", "Cirrus Cloud"],
1255 | },
1256 | "Snow/Ice Surface": {
1257 | "values": [0, 1],
1258 | "labels": ["No Snow/Ice", "Snow/Ice"],
1259 | },
1260 | "QF DNB": {
1261 | "values": [0, 1, 2, 4, 8, 16, 256, 512, 1024, 2048],
1262 | "labels": [
1263 | "No Sensor Problems",
1264 | "Substitute Calibration",
1265 | "Out of Range",
1266 | "Saturation",
1267 | "Temperature not Nominal",
1268 | "Stray Light",
1269 | "Bowtie Deleted / Range Bit",
1270 | "Missing EV",
1271 | "Calibration Fail",
1272 | "Dead Detector",
1273 | ],
1274 | },
1275 | }
1276 |
1277 | # Raise errors
1278 | if bitmask_name not in vnp46a1_bitmasks.keys():
1279 | raise ValueError(
1280 | f"Invalid name. Valid names are: {list(vnp46a1_bitmasks.keys())}"
1281 | )
1282 |
1283 | # Get values and labels for bitmask
1284 | bitmask_values = vnp46a1_bitmasks.get(bitmask_name).get("values")
1285 | bitmask_labels = vnp46a1_bitmasks.get(bitmask_name).get("labels")
1286 |
1287 | # Create colormap with the number of values in the bitmask
1288 | cmap = plt.cm.get_cmap("tab20b", len(bitmask_values))
1289 |
1290 | # Add start bin of 0 to list of bitmask values
1291 | bins = [0] + bitmask_values
1292 |
1293 | # Normalize colormap to discrete intervals
1294 | bounds = [((a + b) / 2) for a, b in zip(bins[:-1], bins[1::1])] + [
1295 | 2 * (bins[-1]) - bins[-2]
1296 | ]
1297 | norm = colors.BoundaryNorm(bounds, cmap.N)
1298 |
1299 | # Plot bitmask on axis
1300 | bitmask = axis.imshow(bitmask_array, cmap=cmap, norm=norm)
1301 | ep.draw_legend(
1302 | im_ax=bitmask,
1303 | classes=bitmask_values,
1304 | cmap=cmap,
1305 | titles=bitmask_labels,
1306 | )
1307 | axis.set_title(f"{bitmask_name}", size=16)
1308 | axis.set_axis_off()
1309 |
1310 | return axis
1311 |
1312 |
1313 | def plot_quality_flag_bitmask_vnp46a2(bitmask_array, bitmask_name, axis):
1314 | """Plots the discrete bitmask values for an image.
1315 |
1316 | Parameters
1317 | ----------
1318 | bitmask_array : numpy array
1319 | Array containing the base-10 bitmask values.
1320 |
1321 | bitmask_name : str
1322 | Name of the bitmask layer. Valid names: 'Mandatory Quality Flag',
1323 | 'Snow Flag', 'Day/Night', 'Land/Water Background',
1324 | 'Cloud Mask Quality', 'Cloud Detection', 'Shadow Detected',
1325 | 'Cirrus Detection', and 'Snow/Ice Surface'.
1326 |
1327 | Returns
1328 | -------
1329 | ax : matplotlib.axes._subplots.AxesSubplot objects
1330 | The axes objects associated with plot.
1331 |
1332 | Example
1333 | -------
1334 | >>>
1335 | >>>
1336 | >>>
1337 | >>>
1338 | """
1339 | # Store possible bitmask values and titles (for plotting)
1340 | vnp46a2_bitmasks = {
1341 | "Mandatory Quality Flag": {
1342 | "values": [0, 1, 2, 255],
1343 | "labels": [
1344 | "High-Quality (Persistent)",
1345 | "High-Quality (Ephemeral)",
1346 | "Poor-Quality",
1347 | "No Retrieval",
1348 | ],
1349 | },
1350 | "Snow Flag": {
1351 | "values": [0, 1, 255],
1352 | "labels": ["No Snow/Ice", "Snow/Ice", "Fill Value"],
1353 | },
1354 | "Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
1355 | "Land/Water Background": {
1356 | "values": [0, 1, 2, 3, 5, 7],
1357 | "labels": [
1358 | "Land & Desert",
1359 | "Land no Desert",
1360 | "Inland Water",
1361 | "Sea Water",
1362 | "Coastal",
1363 | "No Data / Unknown",
1364 | ],
1365 | },
1366 | "Cloud Mask Quality": {
1367 | "values": [0, 1, 2, 3],
1368 | "labels": ["Poor", "Low", "Medium", "High"],
1369 | },
1370 | "Cloud Detection": {
1371 | "values": [0, 1, 2, 3],
1372 | "labels": [
1373 | "Confident Clear",
1374 | "Probably Clear",
1375 | "Probably Cloudy",
1376 | "Confident Cloudy",
1377 | ],
1378 | },
1379 | "Shadow Detected": {
1380 | "values": [0, 1],
1381 | "labels": ["No Shadow", "Shadow"],
1382 | },
1383 | "Cirrus Detection": {
1384 | "values": [0, 1],
1385 | "labels": ["No Cirrus Cloud", "Cirrus Cloud"],
1386 | },
1387 | "Snow/Ice Surface": {
1388 | "values": [0, 1],
1389 | "labels": ["No Snow/Ice", "Snow/Ice"],
1390 | },
1391 | }
1392 |
1393 | # Raise errors
1394 | if bitmask_name not in vnp46a2_bitmasks.keys():
1395 | raise ValueError(
1396 | f"Invalid name. Valid names are: {list(vnp46a2_bitmasks.keys())}"
1397 | )
1398 |
1399 | # Plot bitmask on axis
1400 | bitmask = axis.imshow(
1401 | bitmask_array,
1402 | # cmap="Accent",
1403 | vmin=vnp46a2_bitmasks.get(bitmask_name).get("values")[0],
1404 | vmax=vnp46a2_bitmasks.get(bitmask_name).get("values")[-1],
1405 | )
1406 | ep.draw_legend(
1407 | im_ax=bitmask,
1408 | classes=vnp46a2_bitmasks.get(bitmask_name).get("values"),
1409 | titles=vnp46a2_bitmasks.get(bitmask_name).get("labels"),
1410 | )
1411 | axis.set_title(f"{bitmask_name}", size=16)
1412 | axis.set_axis_off()
1413 |
1414 | return axis
1415 |
1416 |
1417 | def plot_quality_flag_bitmask_single_band(bitmask_array, bitmask_name):
1418 | """Plots the discrete bitmask values for an image.
1419 |
1420 | Parameters
1421 | ----------
1422 | bitmask_array : numpy array
1423 | Array containing the base-10 bitmask values.
1424 |
1425 | bitmask_name : str
1426 | Name of the bitmask layer. Valid names: 'Day/Night', 'Land/Water
1427 | Background', 'Cloud Mask Quality', 'Cloud Detection',
1428 | 'Shadow Detected', 'Cirrus Detection', 'Snow/Ice Surface', and
1429 | 'QF DNB'.
1430 |
1431 | Returns
1432 | -------
1433 | tuple
1434 |
1435 | fig : matplotlib.figure.Figure object
1436 | The figure object associated with the histogram.
1437 |
1438 | ax : matplotlib.axes._subplots.AxesSubplot objects
1439 | The axes objects associated with the histogram.
1440 |
1441 | Example
1442 | -------
1443 | >>>
1444 | >>>
1445 | >>>
1446 | >>>
1447 | """
1448 | # Store possible bitmask values and titles (for plotting)
1449 | vnp46a1_bitmasks = {
1450 | "Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
1451 | "Land/Water Background": {
1452 | "values": [0, 1, 2, 3, 5],
1453 | "labels": [
1454 | "Land & Desert",
1455 | "Land no Desert",
1456 | "Inland Water",
1457 | "Sea Water",
1458 | "Coastal",
1459 | ],
1460 | },
1461 | "Cloud Mask Quality": {
1462 | "values": [0, 1, 2, 3],
1463 | "labels": ["Poor", "Low", "Medium", "High"],
1464 | },
1465 | "Cloud Detection": {
1466 | "values": [0, 1, 2, 3],
1467 | "labels": [
1468 | "Confident Clear",
1469 | "Probably Clear",
1470 | "Probably Cloudy",
1471 | "Confident Cloudy",
1472 | ],
1473 | },
1474 | "Shadow Detected": {
1475 | "values": [0, 1],
1476 | "labels": ["No Shadow", "Shadow"],
1477 | },
1478 | "Cirrus Detection": {
1479 | "values": [0, 1],
1480 | "labels": ["No Cirrus Cloud", "Cirrus Cloud"],
1481 | },
1482 | "Snow/Ice Surface": {
1483 | "values": [0, 1],
1484 | "labels": ["No Snow/Ice", "Snow/Ice"],
1485 | },
1486 | "QF DNB": {
1487 | "values": [0, 1, 2, 4, 8, 16, 256, 512, 1024, 2048],
1488 | "labels": [
1489 | "No Sensor Problems",
1490 | "Substitute Calibration",
1491 | "Out of Range",
1492 | "Saturation",
1493 | "Temperature not Nominal",
1494 | "Stray Light",
1495 | "Bowtie Deleted / Range Bit",
1496 | "Missing EV",
1497 | "Calibration Fail",
1498 | "Dead Detector",
1499 | ],
1500 | },
1501 | }
1502 |
1503 | # Raise errors
1504 | if bitmask_name not in vnp46a1_bitmasks.keys():
1505 | raise ValueError(
1506 | f"Invalid name. Valid names are: {list(vnp46a1_bitmasks.keys())}"
1507 | )
1508 |
1509 | # Get values and labels for bitmask
1510 | bitmask_values = vnp46a1_bitmasks.get(bitmask_name).get("values")
1511 | bitmask_labels = vnp46a1_bitmasks.get(bitmask_name).get("labels")
1512 |
1513 | # Create colormap with the number of values in the bitmask
1514 | cmap = plt.cm.get_cmap("tab20b", len(bitmask_values))
1515 |
1516 | # Add start bin of 0 to list of bitmask values
1517 | bins = [0] + bitmask_values
1518 |
1519 | # Normalize colormap to discrete intervals
1520 | bounds = [((a + b) / 2) for a, b in zip(bins[:-1], bins[1::1])] + [
1521 | 2 * (bins[-1]) - bins[-2]
1522 | ]
1523 | norm = colors.BoundaryNorm(bounds, cmap.N)
1524 |
1525 | # Plot bitmask
1526 | with plt.style.context("dark_background"):
1527 | fig, ax = plt.subplots(figsize=(12, 8))
1528 | bitmask = ax.imshow(bitmask_array, cmap=cmap, norm=norm)
1529 | ep.draw_legend(
1530 | im_ax=bitmask,
1531 | classes=bitmask_values,
1532 | cmap=cmap,
1533 | titles=bitmask_labels,
1534 | )
1535 | ax.set_title(f"{bitmask_name} Bitmask", size=20)
1536 | ax.set_axis_off()
1537 |
1538 | return fig, ax
1539 |
1540 |
1541 | def plot_quality_flag_bitmask_single_band_vnp46a2(bitmask_array, bitmask_name):
1542 | """Plots the discrete bitmask values for an image.
1543 |
1544 | Parameters
1545 | ----------
1546 | bitmask_array : numpy array
1547 | Array containing the base-10 bitmask values.
1548 |
1549 | bitmask_name : str
1550 | Name of the bitmask layer. Valid names: 'Mandatory Quality Flag',
1551 | 'Snow Flag', 'Day/Night', 'Land/Water Background',
1552 | 'Cloud Mask Quality', 'Cloud Detection', 'Shadow Detected',
1553 | 'Cirrus Detection', and 'Snow/Ice Surface'.
1554 |
1555 | Returns
1556 | -------
1557 | ax : matplotlib.axes._subplots.AxesSubplot objects
1558 | The axes objects associated with plot.
1559 |
1560 | Example
1561 | -------
1562 | >>>
1563 | >>>
1564 | >>>
1565 | >>>
1566 | """
1567 | # Store possible bitmask values and titles (for plotting)
1568 | vnp46a2_bitmasks = {
1569 | "Mandatory Quality Flag": {
1570 | "values": [0, 1, 2, 255],
1571 | "labels": [
1572 | "High-Quality (Persistent)",
1573 | "High-Quality (Ephemeral)",
1574 | "Poor-Quality",
1575 | "No Retrieval",
1576 | ],
1577 | },
1578 | "Snow Flag": {
1579 | "values": [0, 1, 255],
1580 | "labels": ["No Snow/Ice", "Snow/Ice", "Fill Value"],
1581 | },
1582 | "Day/Night": {"values": [0, 1], "labels": ["Night", "Day"]},
1583 | "Land/Water Background": {
1584 | "values": [0, 1, 2, 3, 5, 7],
1585 | "labels": [
1586 | "Land & Desert",
1587 | "Land no Desert",
1588 | "Inland Water",
1589 | "Sea Water",
1590 | "Coastal",
1591 | "No Data / Unknown",
1592 | ],
1593 | },
1594 | "Cloud Mask Quality": {
1595 | "values": [0, 1, 2, 3],
1596 | "labels": ["Poor", "Low", "Medium", "High"],
1597 | },
1598 | "Cloud Detection": {
1599 | "values": [0, 1, 2, 3],
1600 | "labels": [
1601 | "Confident Clear",
1602 | "Probably Clear",
1603 | "Probably Cloudy",
1604 | "Confident Cloudy",
1605 | ],
1606 | },
1607 | "Shadow Detected": {
1608 | "values": [0, 1],
1609 | "labels": ["No Shadow", "Shadow"],
1610 | },
1611 | "Cirrus Detection": {
1612 | "values": [0, 1],
1613 | "labels": ["No Cirrus Cloud", "Cirrus Cloud"],
1614 | },
1615 | "Snow/Ice Surface": {
1616 | "values": [0, 1],
1617 | "labels": ["No Snow/Ice", "Snow/Ice"],
1618 | },
1619 | }
1620 |
1621 | # Raise errors
1622 | if bitmask_name not in vnp46a2_bitmasks.keys():
1623 | raise ValueError(
1624 | f"Invalid name. Valid names are: {list(vnp46a2_bitmasks.keys())}"
1625 | )
1626 |
1627 | # Plot bitmask on axis
1628 | with plt.style.context("dark_background"):
1629 | fig, ax = plt.subplots(figsize=(12, 8))
1630 | bitmask = ax.imshow(
1631 | bitmask_array,
1632 | # cmap="Accent",
1633 | vmin=vnp46a2_bitmasks.get(bitmask_name).get("values")[0],
1634 | vmax=vnp46a2_bitmasks.get(bitmask_name).get("values")[-1],
1635 | )
1636 | ep.draw_legend(
1637 | im_ax=bitmask,
1638 | classes=vnp46a2_bitmasks.get(bitmask_name).get("values"),
1639 | titles=vnp46a2_bitmasks.get(bitmask_name).get("labels"),
1640 | )
1641 | ax.set_title(f"{bitmask_name}", size=16)
1642 | ax.set_axis_off()
1643 |
1644 | return fig, ax
1645 |
1646 |
1647 | def plot_quality_flags_vnp46a1(vnp46a1_quality_stack, data_source="NASA"):
1648 | """Plots all VIIRS VNP46A1 DNB QF Cloud Mask bitmasks and the
1649 | QF DNB bitmask.
1650 |
1651 | Parameters
1652 | ----------
1653 | vnp46a1_quality_stack : numpy array
1654 | 3D array containing the quality flag bitmask layers.
1655 |
1656 | data_source : str, optional
1657 | Location of the data. Default value is 'NASA'.
1658 |
1659 | Returns
1660 | -------
1661 | tuple
1662 |
1663 | fig : matplotlib.figure.Figure object
1664 | The figure object associated with the histogram.
1665 |
1666 | ax : matplotlib.axes._subplots.AxesSubplot objects
1667 | The axes objects associated with the histogram.
1668 |
1669 | Example
1670 | -------
1671 | >>>
1672 | >>>
1673 | >>>
1674 | >>>
1675 | """
1676 | # Configure plot
1677 | with plt.style.context("dark_background"):
1678 | fig, ax = plt.subplots(nrows=4, ncols=2, figsize=(15, 20))
1679 | plt.suptitle("VNP46A1 Quality Flag Bitmasks", size=20)
1680 | plt.subplots_adjust(top=0.935)
1681 |
1682 | # Plot bitmasks
1683 | # Day/night
1684 | plot_quality_flag_bitmask(
1685 | bitmask_array=vnp46a1_quality_stack[0],
1686 | bitmask_name="Day/Night",
1687 | axis=ax[0][0],
1688 | )
1689 |
1690 | # Land/water background
1691 | plot_quality_flag_bitmask(
1692 | bitmask_array=vnp46a1_quality_stack[1],
1693 | bitmask_name="Land/Water Background",
1694 | axis=ax[0][1],
1695 | )
1696 |
1697 | # Cloud mask quality
1698 | plot_quality_flag_bitmask(
1699 | bitmask_array=vnp46a1_quality_stack[2],
1700 | bitmask_name="Cloud Mask Quality",
1701 | axis=ax[1][0],
1702 | )
1703 |
1704 | # Cloud detection
1705 | plot_quality_flag_bitmask(
1706 | bitmask_array=vnp46a1_quality_stack[3],
1707 | bitmask_name="Cloud Detection",
1708 | axis=ax[1][1],
1709 | )
1710 |
1711 | # Shadow detected
1712 | plot_quality_flag_bitmask(
1713 | bitmask_array=vnp46a1_quality_stack[4],
1714 | bitmask_name="Shadow Detected",
1715 | axis=ax[2][0],
1716 | )
1717 |
1718 | # Cirrus detection
1719 | plot_quality_flag_bitmask(
1720 | bitmask_array=vnp46a1_quality_stack[5],
1721 | bitmask_name="Cirrus Detection",
1722 | axis=ax[2][1],
1723 | )
1724 |
1725 | # Snow/ice surface
1726 | plot_quality_flag_bitmask(
1727 | bitmask_array=vnp46a1_quality_stack[6],
1728 | bitmask_name="Snow/Ice Surface",
1729 | axis=ax[3][0],
1730 | )
1731 |
1732 | # QF DNB
1733 | plot_quality_flag_bitmask(
1734 | bitmask_array=vnp46a1_quality_stack[7],
1735 | bitmask_name="QF DNB",
1736 | axis=ax[3][1],
1737 | )
1738 |
1739 | # Add caption
1740 | fig.text(
1741 | 0.5,
1742 | 0.1,
1743 | f"Data Source: {data_source}",
1744 | ha="center",
1745 | fontsize=12,
1746 | )
1747 |
1748 | return fig, ax
1749 |
1750 |
1751 | def plot_quality_flags_vnp46a2(vnp46a2_quality_stack, data_source="NASA"):
1752 | """Plots all VIIRS VNP46A2 DNB QF Cloud Mask bitmasks, the Mandatory
1753 | Quality Flag, and Snow Flag.
1754 |
1755 | Parameters
1756 | ----------
1757 | vnp46a2_quality_stack : numpy array
1758 | 3D array containing the quality flag bitmask layers.
1759 |
1760 | data_source : str, optional
1761 | Location of the data. Default value is 'NASA'.
1762 |
1763 | Returns
1764 | -------
1765 | tuple
1766 |
1767 | fig : matplotlib.figure.Figure object
1768 | The figure object associated with the histogram.
1769 |
1770 | ax : matplotlib.axes._subplots.AxesSubplot objects
1771 | The axes objects associated with the histogram.
1772 |
1773 | Example
1774 | -------
1775 | >>>
1776 | >>>
1777 | >>>
1778 | >>>
1779 | """
1780 | # Configure plot
1781 | with plt.style.context("dark_background"):
1782 | fig, ax = plt.subplots(nrows=5, ncols=2, figsize=(15, 20))
1783 | plt.suptitle("VNP46A2 Quality Flag Bitmasks", size=20)
1784 | plt.subplots_adjust(top=0.935)
1785 |
1786 | # Plot bitmasks
1787 | # Mandatory Quality Flag
1788 | plot_quality_flag_bitmask_vnp46a2(
1789 | bitmask_array=vnp46a2_quality_stack[0],
1790 | bitmask_name="Mandatory Quality Flag",
1791 | axis=ax[0][0],
1792 | )
1793 |
1794 | # Snow flag
1795 | plot_quality_flag_bitmask_vnp46a2(
1796 | bitmask_array=vnp46a2_quality_stack[1],
1797 | bitmask_name="Snow Flag",
1798 | axis=ax[0][1],
1799 | )
1800 |
1801 | # Day/night
1802 | plot_quality_flag_bitmask_vnp46a2(
1803 | bitmask_array=vnp46a2_quality_stack[2],
1804 | bitmask_name="Day/Night",
1805 | axis=ax[1][0],
1806 | )
1807 |
1808 | # Land/water background
1809 | plot_quality_flag_bitmask_vnp46a2(
1810 | bitmask_array=vnp46a2_quality_stack[3],
1811 | bitmask_name="Land/Water Background",
1812 | axis=ax[1][1],
1813 | )
1814 |
1815 | # Cloud mask quality
1816 | plot_quality_flag_bitmask_vnp46a2(
1817 | bitmask_array=vnp46a2_quality_stack[4],
1818 | bitmask_name="Cloud Mask Quality",
1819 | axis=ax[2][0],
1820 | )
1821 |
1822 | # Cloud detection
1823 | plot_quality_flag_bitmask_vnp46a2(
1824 | bitmask_array=vnp46a2_quality_stack[5],
1825 | bitmask_name="Cloud Detection",
1826 | axis=ax[2][1],
1827 | )
1828 |
1829 | # Shadow detected
1830 | plot_quality_flag_bitmask_vnp46a2(
1831 | bitmask_array=vnp46a2_quality_stack[6],
1832 | bitmask_name="Shadow Detected",
1833 | axis=ax[3][0],
1834 | )
1835 |
1836 | # Cirrus detection
1837 | plot_quality_flag_bitmask_vnp46a2(
1838 | bitmask_array=vnp46a2_quality_stack[6],
1839 | bitmask_name="Cirrus Detection",
1840 | axis=ax[3][1],
1841 | )
1842 |
1843 | # Snow/ice surface
1844 | plot_quality_flag_bitmask_vnp46a2(
1845 | bitmask_array=vnp46a2_quality_stack[8],
1846 | bitmask_name="Snow/Ice Surface",
1847 | axis=ax[4][0],
1848 | )
1849 |
1850 | # Add caption
1851 | fig.text(
1852 | 0.5,
1853 | 0.1,
1854 | f"Data Source: {data_source}",
1855 | ha="center",
1856 | fontsize=12,
1857 | )
1858 |
1859 | # Remove unused axis
1860 | fig.delaxes(ax[4][1])
1861 |
1862 | return fig, ax
1863 |
1864 |
1865 | def preprocess_vnp46a1(hdf5_path, output_folder):
1866 | """Preprocessed a NASA VNP46A1 HDF5 (.h5 file)
1867 |
1868 | Preprocessing steps include masking data for fill values, clouds, and
1869 | sensor problems, filling masked values, and exporting data to a GeoTiff.
1870 |
1871 | Parameters
1872 | ----------
1873 | hdf5_path : str
1874 | Path to the VNP46A1 HDF5 (.h5) file to be preprocessed.
1875 |
1876 | output_folder : str
1877 | Path to the folder where the preprocessed file will be exported to.
1878 |
1879 | Returns
1880 | -------
1881 | message : str
1882 | Indication of preprocessing completion status (success or failure).
1883 |
1884 | Example
1885 | -------
1886 | >>>
1887 | >>>
1888 | >>>
1889 | >>>
1890 | """
1891 | # Preprocess VNP46A1 HDF5 file
1892 | print(f"Started preprocessing: {os.path.basename(hdf5_path)}")
1893 | try:
1894 | print("Extracting bands...")
1895 | # Extract DNB_At_Sensor_Radiance_500m, QF_Cloud_Mask, QF_DNB
1896 | dnb_at_sensor_radiance = extract_band_vnp46a1(
1897 | hdf5_path=hdf5_path, band_name="DNB_At_Sensor_Radiance_500m"
1898 | )
1899 | qf_cloud_mask = extract_band_vnp46a1(
1900 | hdf5_path=hdf5_path, band_name="QF_Cloud_Mask"
1901 | )
1902 | qf_dnb = extract_band_vnp46a1(hdf5_path=hdf5_path, band_name="QF_DNB")
1903 |
1904 | print("Applying scale factor...")
1905 | # Apply scale factor to radiance values
1906 | dnb_at_sensor_radiance_scaled = (
1907 | dnb_at_sensor_radiance.astype("float") * 0.1
1908 | )
1909 |
1910 | print("Masking for fill values...")
1911 | # Mask radiance for fill value (DNB_At_Sensor_Radiance_500m == 65535)
1912 | masked_for_fill_value = ma.masked_where(
1913 | dnb_at_sensor_radiance_scaled == 6553.5,
1914 | dnb_at_sensor_radiance_scaled,
1915 | copy=True,
1916 | )
1917 |
1918 | print("Masking for clouds...")
1919 | # Extract QF_Cloud_Mask bits 6-7 (Cloud Detection Results &
1920 | # Confidence Indicator)
1921 | cloud_detection_bitmask = extract_qa_bits(
1922 | qa_band=qf_cloud_mask, start_bit=6, end_bit=7
1923 | )
1924 |
1925 | # Mask radiance for 'probably cloudy' (cloud_detection_bitmask == 2)
1926 | masked_for_probably_cloudy = ma.masked_where(
1927 | cloud_detection_bitmask == 2, masked_for_fill_value, copy=True
1928 | )
1929 |
1930 | # Mask radiance for 'confident cloudy' (cloud_detection_bitmask == 3)
1931 | masked_for_confident_cloudy = ma.masked_where(
1932 | cloud_detection_bitmask == 3, masked_for_probably_cloudy, copy=True
1933 | )
1934 |
1935 | print("Masking for sea water...")
1936 | # Extract QF_Cloud_Mask bits 1-3 (Land/Water Background)
1937 | land_water_bitmask = extract_qa_bits(
1938 | qa_band=qf_cloud_mask, start_bit=1, end_bit=3
1939 | )
1940 |
1941 | # Mask radiance for sea water (land_water_bitmask == 3)
1942 | masked_for_sea_water = ma.masked_where(
1943 | land_water_bitmask == 3, masked_for_confident_cloudy, copy=True
1944 | )
1945 |
1946 | print("Masking for sensor problems...")
1947 | # Mask radiance for sensor problems (QF_DNB != 0)
1948 | # (0 = no problems, any number > 0 means some kind of issue)
1949 | # masked_for_sensor_problems = ma.masked_where(
1950 | # qf_dnb > 0, masked_for_confident_cloudy, copy=True
1951 | # )
1952 | masked_for_sensor_problems = ma.masked_where(
1953 | qf_dnb > 0, masked_for_sea_water, copy=True
1954 | )
1955 |
1956 | print("Filling masked values...")
1957 | # Set fill value to np.nan and fill masked values
1958 | ma.set_fill_value(masked_for_sensor_problems, np.nan)
1959 | filled_data = masked_for_sensor_problems.filled()
1960 |
1961 | print("Creating metadata...")
1962 | # Create metadata (for export)
1963 | metadata = create_metadata(
1964 | array=filled_data,
1965 | transform=create_transform_vnp46a1(hdf5_path),
1966 | driver="GTiff",
1967 | nodata=np.nan,
1968 | count=1,
1969 | crs="epsg:4326",
1970 | )
1971 |
1972 | print("Exporting to GeoTiff...")
1973 | # Export masked array to GeoTiff (no data set to np.nan in export)
1974 | export_name = (
1975 | f"{os.path.basename(hdf5_path)[:-3].lower().replace('.', '-')}.tif"
1976 | )
1977 | export_array(
1978 | array=filled_data,
1979 | output_path=os.path.join(output_folder, export_name),
1980 | metadata=metadata,
1981 | )
1982 | except Exception as error:
1983 | message = print(f"Preprocessing failed: {error}\n")
1984 | else:
1985 | message = print(
1986 | f"Completed preprocessing: {os.path.basename(hdf5_path)}\n"
1987 | )
1988 |
1989 | return message
1990 |
1991 |
1992 | def preprocess_vnp46a2(hdf5_path, output_folder):
1993 | """Preprocessed a NASA VNP46A2 HDF5 (.h5 file)
1994 |
1995 | Preprocessing steps include masking data for fill values, clouds, and
1996 | sensor problems, filling masked values, and exporting data to a GeoTiff.
1997 |
1998 | Parameters
1999 | ----------
2000 | hdf5_path : str
2001 | Path to the VNP46A2 HDF5 (.h5) file to be preprocessed.
2002 |
2003 | output_folder : str
2004 | Path to the folder where the preprocessed file will be exported to.
2005 |
2006 | Returns
2007 | -------
2008 | message : str
2009 | Indication of preprocessing completion status (success or failure).
2010 |
2011 | Example
2012 | -------
2013 | >>>
2014 | >>>
2015 | >>>
2016 | >>>
2017 | """
2018 | # Preprocess VNP46A1 HDF5 file
2019 | print(f"Started preprocessing: {os.path.basename(hdf5_path)}")
2020 | try:
2021 | print("Extracting bands...")
2022 | # Extract DNB BRDF-Corrected radiance
2023 | dnb_brdf_corrected_ntl = extract_band_vnp46a2(
2024 | hdf5_path=hdf5_path, band_name="DNB_BRDF-Corrected_NTL"
2025 | )
2026 |
2027 | # Extract Mandatory Quality Flag, QF Cloud Mask, and Snow Flag bands
2028 | mandatory_quality_flag = extract_band_vnp46a2(
2029 | hdf5_path=hdf5_path, band_name="Mandatory_Quality_Flag"
2030 | )
2031 | qf_cloud_mask = extract_band_vnp46a2(
2032 | hdf5_path=hdf5_path, band_name="QF_Cloud_Mask"
2033 | )
2034 |
2035 | print("Applying scale factor...")
2036 | # Apply scale factor to radiance values
2037 | dnb_brdf_corrected_ntl_scaled = (
2038 | dnb_brdf_corrected_ntl.astype("float") * 0.1
2039 | )
2040 |
2041 | print("Masking for fill values...")
2042 | # Mask radiance for fill value (dnb_brdf_corrected_ntl == 65535)
2043 | masked_for_fill_value = ma.masked_where(
2044 | dnb_brdf_corrected_ntl_scaled == 6553.5,
2045 | dnb_brdf_corrected_ntl_scaled,
2046 | copy=True,
2047 | )
2048 |
2049 | print("Masking for poor quality and no retrieval...")
2050 | # Mask radiance for 'poor quality' (mandatory_quality_flag == 2)
2051 | masked_for_poor_quality = ma.masked_where(
2052 | mandatory_quality_flag == 2, masked_for_fill_value, copy=True
2053 | )
2054 |
2055 | # Mask radiance for 'no retrieval' (mandatory_quality_flag == 255)
2056 | masked_for_no_retrieval = ma.masked_where(
2057 | mandatory_quality_flag == 255, masked_for_poor_quality, copy=True
2058 | )
2059 |
2060 | print("Masking for clouds...")
2061 | # Extract QF_Cloud_Mask bits 6-7 (Cloud Detection Results &
2062 | # Confidence Indicator)
2063 | cloud_detection_bitmask = extract_qa_bits(
2064 | qa_band=qf_cloud_mask, start_bit=6, end_bit=7
2065 | )
2066 |
2067 | # Mask radiance for 'probably cloudy' (cloud_detection_bitmask == 2)
2068 | masked_for_probably_cloudy = ma.masked_where(
2069 | cloud_detection_bitmask == 2, masked_for_no_retrieval, copy=True
2070 | )
2071 |
2072 | # Mask radiance for 'confident cloudy' (cloud_detection_bitmask == 3)
2073 | masked_for_confident_cloudy = ma.masked_where(
2074 | cloud_detection_bitmask == 3, masked_for_probably_cloudy, copy=True
2075 | )
2076 |
2077 | print("Masking for sea water...")
2078 | # Extract QF_Cloud_Mask bits 1-3 (Land/Water Background)
2079 | land_water_bitmask = extract_qa_bits(
2080 | qa_band=qf_cloud_mask, start_bit=1, end_bit=3
2081 | )
2082 |
2083 | # Mask radiance for sea water (land_water_bitmask == 3)
2084 | masked_for_sea_water = ma.masked_where(
2085 | land_water_bitmask == 3, masked_for_confident_cloudy, copy=True
2086 | )
2087 |
2088 | print("Filling masked values...")
2089 | # Set fill value to np.nan and fill masked values
2090 | ma.set_fill_value(masked_for_sea_water, np.nan)
2091 | filled_data = masked_for_sea_water.filled()
2092 |
2093 | print("Creating metadata...")
2094 | # Create metadata (for export)
2095 | metadata = create_metadata(
2096 | array=filled_data,
2097 | transform=create_transform_vnp46a2(hdf5_path),
2098 | driver="GTiff",
2099 | nodata=np.nan,
2100 | count=1,
2101 | crs="epsg:4326",
2102 | )
2103 |
2104 | print("Exporting to GeoTiff...")
2105 | # Export masked array to GeoTiff (no data set to np.nan in export)
2106 | export_name = (
2107 | f"{os.path.basename(hdf5_path)[:-3].lower().replace('.', '-')}.tif"
2108 | )
2109 | export_array(
2110 | array=filled_data,
2111 | output_path=os.path.join(output_folder, export_name),
2112 | metadata=metadata,
2113 | )
2114 | except Exception as error:
2115 | message = print(f"Preprocessing failed: {error}\n")
2116 | else:
2117 | message = print(
2118 | f"Completed preprocessing: {os.path.basename(hdf5_path)}\n"
2119 | )
2120 |
2121 | return message
2122 |
2123 |
2124 | def read_geotiff_into_array(geotiff_path, dimensions=1):
2125 | """Reads a GeoTiff file into a NumPy array.
2126 |
2127 | Parameters
2128 | ----------
2129 | geotiff_path : str
2130 | Path to the GeoTiff file.
2131 |
2132 | dimensions : int, optional
2133 | Number of bands to read in. Default value is 1.
2134 |
2135 | Returns
2136 | -------
2137 | array : numpy array
2138 | Array containing the data.
2139 |
2140 | Example
2141 | -------
2142 | >>>
2143 | >>>
2144 | >>>
2145 | >>>
2146 | """
2147 | # Read-in array
2148 | with rio.open(geotiff_path) as src:
2149 | array = src.read(dimensions)
2150 |
2151 | return array
2152 |
2153 |
2154 | def save_figure(output_path):
2155 | """Saves the current figure to a specified location.
2156 |
2157 | Parameters
2158 | ----------
2159 | output_path : str
2160 | Path (including file name and extension)
2161 | for the output file.
2162 |
2163 | Returns
2164 | -------
2165 | message : str
2166 | Message indicating location of saved file
2167 | (upon success) or error message (upon failure)/
2168 |
2169 | Example
2170 | -------
2171 | >>> # Set output path sand save figure
2172 | >>> outpath = os.path.join("04-graphics-outputs", "figure.png")
2173 | >>> save_figure(outpath)
2174 | Saved plot: 04-graphics-outputs\\figure.png
2175 | """
2176 | # Save figure
2177 | try:
2178 | plt.savefig(
2179 | fname=output_path, facecolor="k", dpi=300, bbox_inches="tight"
2180 | )
2181 | except Exception as error:
2182 | message = print(f"Failed to save plot: {error}")
2183 | else:
2184 | message = print(f"Saved plot: {os.path.split(output_path)[-1]}")
2185 |
2186 | # Return message
2187 | return message
2188 |
2189 |
2190 | def stack_quality_flags_vnp46a1(vnp46a1_path):
2191 | """Creates a stacked (3D) NumPy array containing all of the VNP46A1
2192 | quality flag bitmask layers.
2193 |
2194 | Parameters
2195 | ----------
2196 | vnp46a1_path : str
2197 | Path to the VNP46A1 HDF5 (.h5) file.
2198 |
2199 | Returns
2200 | -------
2201 | quality_flag_stack : numpy array
2202 | 3D array containing the quality flag bitmask layers.
2203 |
2204 | Example
2205 | -------
2206 | >>>
2207 | >>>
2208 | >>>
2209 | >>>
2210 | """
2211 | # Extract QF CLoud Mask and QF DNB bands
2212 | qf_cloud_mask = extract_band_vnp46a1(
2213 | hdf5_path=vnp46a1_path, band_name="QF_Cloud_Mask"
2214 | )
2215 | qf_dnb = extract_band_vnp46a1(hdf5_path=vnp46a1_path, band_name="QF_DNB")
2216 |
2217 | # Extract QF Cloud Mask bitmasks
2218 | day_night = extract_qa_bits(qf_cloud_mask, 0, 0)
2219 | land_water_background = extract_qa_bits(qf_cloud_mask, 1, 3)
2220 | cloud_mask_quality = extract_qa_bits(qf_cloud_mask, 4, 5)
2221 | cloud_detection = extract_qa_bits(qf_cloud_mask, 6, 7)
2222 | shadow_detected = extract_qa_bits(qf_cloud_mask, 8, 8)
2223 | cirrus_detection = extract_qa_bits(qf_cloud_mask, 9, 9)
2224 | snow_ice_surface = extract_qa_bits(qf_cloud_mask, 10, 10)
2225 |
2226 | # Create stack
2227 | quality_flag_stack = np.stack(
2228 | arrays=[
2229 | day_night,
2230 | land_water_background,
2231 | cloud_mask_quality,
2232 | cloud_detection,
2233 | shadow_detected,
2234 | cirrus_detection,
2235 | snow_ice_surface,
2236 | qf_dnb,
2237 | ]
2238 | )
2239 |
2240 | return quality_flag_stack
2241 |
2242 |
2243 | def stack_quality_flags_vnp46a2(vnp46a2_path):
2244 | """Creates a stacked (3D) NumPy array containing all of the VNP46A2
2245 | quality flag bitmask layers.
2246 |
2247 | Parameters
2248 | ----------
2249 | vnp46a2_path : str
2250 | Path to the VNP46A2 HDF5 (.h5) file.
2251 |
2252 | Returns
2253 | -------
2254 | quality_flag_stack : numpy array
2255 | 3D array containing the quality flag bitmask layers.
2256 |
2257 | Example
2258 | -------
2259 | >>>
2260 | >>>
2261 | >>>
2262 | >>>
2263 | """
2264 | # Extract Mandatory Qyality Flag, QF Cloud Mask, and Snow Flag bands
2265 | mandatory_quality_flag = extract_band_vnp46a2(
2266 | hdf5_path=vnp46a2_path, band_name="Mandatory_Quality_Flag"
2267 | )
2268 | qf_cloud_mask = extract_band_vnp46a2(
2269 | hdf5_path=vnp46a2_path, band_name="QF_Cloud_Mask"
2270 | )
2271 | snow_flag = extract_band_vnp46a2(
2272 | hdf5_path=vnp46a2_path, band_name="Snow_Flag"
2273 | )
2274 |
2275 | # Extract QF Cloud Mask bitmasks
2276 | day_night = extract_qa_bits(qf_cloud_mask, 0, 0)
2277 | land_water_background = extract_qa_bits(qf_cloud_mask, 1, 3)
2278 | cloud_mask_quality = extract_qa_bits(qf_cloud_mask, 4, 5)
2279 | cloud_detection = extract_qa_bits(qf_cloud_mask, 6, 7)
2280 | shadow_detected = extract_qa_bits(qf_cloud_mask, 8, 8)
2281 | cirrus_detection = extract_qa_bits(qf_cloud_mask, 9, 9)
2282 | snow_ice_surface = extract_qa_bits(qf_cloud_mask, 10, 10)
2283 |
2284 | # Create stack
2285 | quality_flag_stack = np.stack(
2286 | arrays=[
2287 | mandatory_quality_flag,
2288 | snow_flag,
2289 | day_night,
2290 | land_water_background,
2291 | cloud_mask_quality,
2292 | cloud_detection,
2293 | shadow_detected,
2294 | cirrus_detection,
2295 | snow_ice_surface,
2296 | ]
2297 | )
2298 |
2299 | return quality_flag_stack
2300 |
--------------------------------------------------------------------------------