├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── color_scales ├── au_clear_observations ├── au_water_observations ├── au_water_percentage ├── clear_observations_binned ├── cloud_coverage ├── default_color_scale ├── evi ├── nazeer_chlorophyll ├── nazeer_v2 ├── ndvi ├── ndvi_difference ├── ndvi_percentage_change ├── ramp ├── roygbiv ├── ryg ├── tsm_binned ├── watanabe_chlorophyll ├── water_observations_binned └── water_percentage_binned ├── data_cube_utilities ├── __init__.py ├── aggregate.py ├── build_cloud_coverage_table_landsat.py ├── clean_mask.py ├── crs.py ├── curve_fitting.py ├── dask.py ├── data_access_api.py ├── data_stats.py ├── dc_baseline.py ├── dc_ccd.py ├── dc_chunker.py ├── dc_clustering.py ├── dc_coastal_change.py ├── dc_display_map.py ├── dc_displayutil.py ├── dc_fractional_coverage_classifier.py ├── dc_load.py ├── dc_mosaic.py ├── dc_ndvi_anomaly.py ├── dc_rgb.py ├── dc_sar_utils.py ├── dc_slip.py ├── dc_time.py ├── dc_utilities.py ├── dc_water_classifier.py ├── dc_water_quality.py ├── endmembers_landsat.csv ├── import_export.py ├── models │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_blue.joblib │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_green.joblib │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_nir.joblib │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_red.joblib │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir1.joblib │ ├── LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir2.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_blue.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_green.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_nir.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_red.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir1.joblib │ ├── LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir2.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_blue.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_green.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_nir.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_red.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir1.joblib │ ├── LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir2.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_blue.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_green.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_nir.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_red.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir1.joblib │ ├── LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir2.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_blue.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_green.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_nir.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_red.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir1.joblib │ ├── LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir2.joblib │ ├── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_blue.joblib │ ├── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_green.joblib │ ├── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_nir.joblib │ ├── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_red.joblib │ ├── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir1.joblib │ └── LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir2.joblib ├── plotter_utils.py ├── plotter_utils_consts.py ├── raster_filter.py ├── scale.py ├── shapefile_mask.py ├── sort.py ├── transect │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-35.pyc │ │ └── line_scan.cpython-35.pyc │ ├── interpolate.py │ ├── line_scan.py │ ├── ordered_set.py │ ├── tests │ │ ├── .ipynb_checkpoints │ │ │ └── Pytest+exectution-checkpoint.ipynb │ │ ├── Pytest+exectution.ipynb │ │ ├── test_interpolate.py │ │ └── test_linescan.py │ └── xarraypath.py ├── trend.py ├── unique.py ├── urbanization.py ├── vegetation.py ├── voxel_visualizer │ ├── data │ │ ├── coords.csv │ │ ├── data.js │ │ ├── mini_lake.nc │ │ └── temp_arr.txt │ ├── imgs │ │ ├── arrow-down-icon.png │ │ ├── arrow-left-icon.png │ │ ├── arrow-right-icon.png │ │ ├── arrow-up-icon.png │ │ ├── rotate-left-icon.png │ │ ├── rotate-right-icon.png │ │ ├── zoom-in-icon.png │ │ └── zoom-out-icon.png │ ├── js │ │ ├── jsm │ │ │ ├── .ipynb_checkpoints │ │ │ │ ├── EffectComposer-checkpoint.js │ │ │ │ └── UnrealBloomPass-checkpoint.js │ │ │ ├── BloomPass.js │ │ │ ├── ConvolutionShader.js │ │ │ ├── CopyShader.js │ │ │ ├── EffectComposer.js │ │ │ ├── LuminosityHighPassShader.js │ │ │ ├── MaskPass.js │ │ │ ├── Pass.js │ │ │ ├── RenderPass.js │ │ │ ├── ShaderPass.js │ │ │ ├── UnrealBloomPass.js │ │ │ ├── dat.gui.module.js │ │ │ └── stats.module.js │ │ └── lib │ │ │ ├── potree │ │ │ ├── potree.min.js │ │ │ └── potree.module.js │ │ │ └── threejs │ │ │ ├── OrbitControls.js │ │ │ ├── three.min.js │ │ │ └── three.module.js │ ├── server.py │ ├── template.html │ └── voxel_visualizer.py ├── wasard.py └── xarray_bokeh_plotting.py ├── dea_tools ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.rst ├── dea_tools │ ├── __init__.py │ ├── __main__.py │ ├── bandindices.py │ ├── bom.py │ ├── classification.py │ ├── climate.py │ ├── coastal.py │ ├── dask.py │ ├── datahandling.py │ ├── plotting.py │ ├── segmentation.py │ ├── spatial.py │ ├── temporal.py │ └── waterbodies.py └── setup.py └── test ├── __init__.py ├── test_data_access_api.py ├── test_dc_ccd.py ├── test_dc_chunker.py ├── test_dc_clustering.py ├── test_dc_coastal_change.py ├── test_dc_fractional_coverage_classifier.py ├── test_dc_mosaic.py ├── test_dc_ndvi_anomaly.py ├── test_dc_sar_utils.py ├── test_dc_slip.py ├── test_dc_utilities.py ├── test_dc_water_classifier.py └── test_dc_water_quality.py /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | data_cube_utilities/.ipynb_checkpoints 3 | **/__pycache__ 4 | 5 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/.gitmodules -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Data Cube Utilities 2 | ================= 3 | 4 | This repository serves as a common module for our Jupyter examples and our UI. 5 | 6 | This includes: 7 | * A wrapper for the Data Cube Core API with some added functionality 8 | * Water detection (WOfS) 9 | * TSM 10 | * Coastline Classification 11 | * Coastal Change detection 12 | * Continuous Change Detection (CCD) 13 | * Spectral Unmixing (Fractional Coverage) 14 | * Various plotting utilities 15 | * Cloudfree mosaicking 16 | * SLIP 17 | * RGB display methods 18 | 19 | This repository is set up as a submodule in our data_cube_ui and data_cube_notebooks repositories but can be used as a general utility module if desired. 20 | 21 | Requirements 22 | ================= 23 | * Full Data Cube installation with ingested data 24 | * imagemagick (sudo apt-get install imagemagick) 25 | * jupyter 26 | * matplotlib 27 | * scipy 28 | * sklearn 29 | * lcmap-pyccd=2017.08.18 30 | * skimage 31 | * seaborn 32 | * folium 33 | 34 | -------------------------------------------------------------------------------- /color_scales/au_clear_observations: -------------------------------------------------------------------------------- 1 | 0% 255 255 255 2 | 1% 178 24 0 3 | 2.5% 255 68 0 4 | 5% 255 128 0 5 | 10% 255 162 0 6 | 15% 255 192 0 7 | 20% 255 213 0 8 | 25% 255 243 0 9 | 30% 230 255 0 10 | 35% 188 255 0 11 | 40% 137 255 0 12 | 50% 104 196 0 13 | 60% 68 196 0 14 | 70% 3 181 0 15 | 80% 3 149 0 16 | 100% 2 105 0 17 | -------------------------------------------------------------------------------- /color_scales/au_water_observations: -------------------------------------------------------------------------------- 1 | 0% 255 255 255 2 | 0.5% 137 0 0 3 | 1.25% 153 0 0 4 | 2.5% 227 132 0 5 | 6.25% 227 223 0 6 | 12.5% 166 227 0 7 | 25% 0 227 45 8 | 37.5% 0 227 200 9 | 50% 0 151 227 10 | 62.5% 0 95 227 11 | 75% 0 15 227 12 | 87.5% 0 14 169 13 | 100% 87 0 2272 14 | -------------------------------------------------------------------------------- /color_scales/au_water_percentage: -------------------------------------------------------------------------------- 1 | nan 255 255 255 2 | 0.002 255 255 255 3 | 0.005 142 1 1 4 | 0.01 207 34 0 5 | 0.02 227 132 0 6 | 0.05 227 223 0 7 | 0.10 166 227 0 8 | 0.20 98 227 0 9 | 0.30 0 227 45 10 | 0.40 0 227 132 11 | 0.50 0 227 200 12 | 0.60 0 197 227 13 | 0.70 0 151 227 14 | 0.80 0 95 227 15 | 0.90 0 15 227 16 | 1.00 87 0 227 17 | 1.10 87 0 227 18 | -------------------------------------------------------------------------------- /color_scales/clear_observations_binned: -------------------------------------------------------------------------------- 1 | 0% 255 255 255 2 | 0.001% 178 24 0 3 | 1% 178 24 0 4 | 1.001% 255 68 0 5 | 2.5% 255 68 0 6 | 2.5001% 255 128 0 7 | 5% 255 128 0 8 | 5.001% 255 162 0 9 | 10% 255 162 0 10 | 10.001% 255 192 0 11 | 15% 255 192 0 12 | 15.001% 255 213 0 13 | 20% 255 213 0 14 | 20.001% 255 243 0 15 | 25% 255 243 0 16 | 25.001% 230 255 0 17 | 30% 230 255 0 18 | 30.001% 188 255 0 19 | 35% 188 255 0 20 | 35.001% 137 255 0 21 | 40% 137 255 0 22 | 40.001% 104 196 0 23 | 50% 104 196 0 24 | 50.001% 68 196 0 25 | 60% 68 196 0 26 | 60.001% 3 181 0 27 | 70% 3 181 0 28 | 70.001% 3 149 0 29 | 80% 3 149 0 30 | 80.001% 2 105 0 31 | 100% 2 105 0 32 | -------------------------------------------------------------------------------- /color_scales/cloud_coverage: -------------------------------------------------------------------------------- 1 | 0 red 2 | 1 green 3 | -------------------------------------------------------------------------------- /color_scales/default_color_scale: -------------------------------------------------------------------------------- 1 | 0% red 2 | 100% green 3 | -------------------------------------------------------------------------------- /color_scales/evi: -------------------------------------------------------------------------------- 1 | -1.000 5 24 82 2 | -0.300 5 24 82 3 | -0.180 255 255 255 4 | 0.000 255 255 255 5 | 0.025 206 197 180 6 | 0.075 191 163 124 7 | 0.125 160 134 80 8 | 0.150 143 172 56 9 | 0.175 164 194 36 10 | 0.233 137 182 4 11 | 0.266 119 176 1 12 | 0.333 99 164 2 13 | 0.366 74 151 13 14 | 0.433 41 133 9 15 | 0.466 0 114 0 16 | 0.550 0 90 0 17 | 0.650 0 76 0 18 | 0.750 0 57 0 19 | 0.850 0 32 0 20 | 0.950 1 15 2 21 | 1.000 0 0 0 22 | -------------------------------------------------------------------------------- /color_scales/nazeer_chlorophyll: -------------------------------------------------------------------------------- 1 | 0 255 255 255 2 | 5 36 245 1 3 | 10 134 247 2 4 | 20 204 247 1 5 | 30 248 218 1 6 | 40 252 140 2 7 | 60 246 42 2 8 | -------------------------------------------------------------------------------- /color_scales/nazeer_v2: -------------------------------------------------------------------------------- 1 | 0,255,255,255 2 | .01 3 96 5 3 | 5 2 112 1 4 | 10 2 131 3 5 | 15 39 146 6 6 | 20 77 168 4 7 | 25 122 184 5 8 | 30 159 205 4 9 | 35 196 223 2 10 | 40 235 246 0 11 | 41 254 247 0 12 | 42 252 222 4 13 | 43 255 196 0 14 | 44 253 171 4 15 | 45 254 150 1 16 | 46 250 126 7 17 | 47 253 97 4 18 | 48 252 67 3 19 | 49 253 33 0 20 | -------------------------------------------------------------------------------- /color_scales/ndvi: -------------------------------------------------------------------------------- 1 | -1.000 5 24 82 2 | -0.300 5 24 82 3 | -0.180 255 255 255 4 | 0.000 255 255 255 5 | 0.025 206 197 180 6 | 0.075 191 163 124 7 | 0.125 179 174 96 8 | 0.150 163 181 80 9 | 0.175 144 170 60 10 | 0.233 166 195 29 11 | 0.266 135 183 3 12 | 0.333 121 175 1 13 | 0.366 101 163 0 14 | 0.433 78 151 0 15 | 0.466 43 132 4 16 | 0.550 0 114 0 17 | 0.650 0 90 1 18 | 0.750 0 73 0 19 | 0.850 0 56 0 20 | 0.950 0 31 0 21 | 1.000 0 0 0 22 | nan 0 0 0 23 | -------------------------------------------------------------------------------- /color_scales/ndvi_difference: -------------------------------------------------------------------------------- 1 | -0.40 172 21 14 2 | -0.30 247 103 50 3 | -0.20 249 133 20 4 | -0.10 255 204 37 5 | 0.0 255 255 255 6 | 0.08 124 250 127 7 | 0.16 39 216 64 8 | 0.24 16 165 1 9 | 0.32 51 98 54 10 | 0.40 3 49 2 11 | nan 0 0 0 12 | -------------------------------------------------------------------------------- /color_scales/ndvi_percentage_change: -------------------------------------------------------------------------------- 1 | -1 red 2 | 0 yellow 3 | 1 green 4 | nan 0 0 0 5 | -9999 0 0 0 6 | -------------------------------------------------------------------------------- /color_scales/ramp: -------------------------------------------------------------------------------- 1 | 0,255,255,255,255 2 | 1,94,79,162,255 3 | 20,50,136,189,255 4 | 40,102,194,165,255 5 | 60,171,221,164,255 6 | 80,230,245,152,255 7 | 100,254,224,139,255 8 | 120,253,174,97,255 9 | 140,244,109,67,255 10 | 160,213,62,79,255 11 | 180,158,1,66,255 12 | -------------------------------------------------------------------------------- /color_scales/roygbiv: -------------------------------------------------------------------------------- 1 | 190 red 2 | 170 orange 3 | 150 yellow 4 | 130 green 5 | 100 blue 6 | 70 indigo 7 | 40 violet 8 | 0 white 9 | default white 10 | -------------------------------------------------------------------------------- /color_scales/ryg: -------------------------------------------------------------------------------- 1 | -1 red 2 | 0 yellow 3 | 1 green 4 | -------------------------------------------------------------------------------- /color_scales/tsm_binned: -------------------------------------------------------------------------------- 1 | 0,255,255,255 2 | 0.001,94,79,162 3 | 10,94,79,162 4 | 10.001,50,136,189 5 | 20,50,136,189 6 | 20.001,102,194,165 7 | 40,102,194,165 8 | 40.001,171,221,164 9 | 60,171,221,164 10 | 60.001,230,245,152 11 | 80,230,245,152 12 | 80.001,254,224,139 13 | 100,254,224,139 14 | 100.001,253,174,97 15 | 120,253,174,97 16 | 120.001,244,109,67 17 | 140,244,109,67 18 | 140.001,213,62,79 19 | 160,213,62,79 20 | 160.001,158,1,66 21 | 180,158,1,66 22 | -------------------------------------------------------------------------------- /color_scales/watanabe_chlorophyll: -------------------------------------------------------------------------------- 1 | 0,255,255,255 2 | 1 3 96 5 3 | 50 2 112 1 4 | 100 2 131 3 5 | 150 39 146 6 6 | 200 77 168 4 7 | 250 122 184 5 8 | 300 159 205 4 9 | 350 196 223 2 10 | 400 235 246 0 11 | 410 254 247 0 12 | 420 252 222 4 13 | 430 255 196 0 14 | 440 253 171 4 15 | 450 254 150 1 16 | 460 250 126 7 17 | 470 253 97 4 18 | 480 252 67 3 19 | 490 253 33 0 20 | -------------------------------------------------------------------------------- /color_scales/water_observations_binned: -------------------------------------------------------------------------------- 1 | 0% 255 255 255 2 | 0.001% 137 0 0 3 | 0.5% 137 0 0 4 | 0.5001% 153 0 0 5 | 1.25% 153 0 0 6 | 1.25001% 227 132 0 7 | 2.5% 227 132 0 8 | 2.5001% 227 223 0 9 | 6.25% 227 223 0 10 | 6.25001% 166 227 0 11 | 12.5% 166 227 0 12 | 12.5001% 0 227 45 13 | 25% 0 227 45 14 | 25.001% 0 227 200 15 | 37.5% 0 227 200 16 | 37.5001% 0 151 227 17 | 50% 0 151 227 18 | 50.001% 0 95 227 19 | 62.5% 0 95 227 20 | 62.5001% 0 15 227 21 | 75% 0 15 227 22 | 75.001% 0 14 169 23 | 87.5% 0 14 169 24 | 87.5001% 87 0 227 25 | 100% 87 0 227 26 | -------------------------------------------------------------------------------- /color_scales/water_percentage_binned: -------------------------------------------------------------------------------- 1 | nan 255 255 255 2 | 0 255 255 255 3 | 0.001 207 34 0 4 | 0.05 207 34 0 5 | 0.05001 227 223 0 6 | 0.20 227 223 0 7 | 0.20001 98 227 0 8 | 0.30 98 227 0 9 | 0.30001 0 227 45 10 | 0.40 0 227 45 11 | 0.40001 0 227 132 12 | 0.55 0 227 132 13 | 0.55001 0 197 227 14 | 0.70 0 197 227 15 | 0.70001 0 151 227 16 | 0.85 0 151 227 17 | 0.85001 0 15 227 18 | 1.00 0 15 227 19 | 1.10 0 15 227 20 | -------------------------------------------------------------------------------- /data_cube_utilities/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/__init__.py -------------------------------------------------------------------------------- /data_cube_utilities/aggregate.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | 4 | def get_bin_intervals(data, num_bins): 5 | """ 6 | Returns bin intervals for 1D data. 7 | 8 | Parameters 9 | ---------- 10 | data: np.ndarray 11 | A 1D NumPy array of values to get bin intervals for. 12 | num_bins: int 13 | The number of bins to create. 14 | 15 | Returns 16 | ------- 17 | bin_intervals: np.ndarray of shape (num_bins, 2) 18 | A 2D NumPy array of bin intervals, with each row being one bin, 19 | with the first value being the lower bound for the bin and 20 | the second being the upper bound for the bin. 21 | """ 22 | # Transition points between bins. 23 | bin_trans = np.linspace(data[0], data[-1], num_bins+1, endpoint=True) 24 | bin_intervals = np.empty((num_bins, 2), dtype=data.dtype) 25 | for i in range(num_bins): 26 | bin_intervals[i, :] = [bin_trans[i], bin_trans[i+1]] 27 | return bin_intervals 28 | 29 | 30 | def xr_scale_res(dataset, x_coord='longitude', y_coord='latitude', 31 | frac_res=None, abs_res=None, val_interp_method='linear'): 32 | """ 33 | Scales the resolution of an `xarray.Dataset` or `xarray.DataArray` 34 | to a fraction of its original resolution or an absolute resolution. 35 | 36 | Parameters 37 | ---------- 38 | dataset: xarray.Dataset or xarray.DataArray 39 | The Dataset or DataArray to reduce the resolution of. 40 | x_coord, y_coord: str 41 | Names of the x and y coordinates in `dataset` to scale. 42 | frac_res: float 43 | The fraction of the original resolution to scale to. Must be postive. 44 | Note that this can be greater than 1.0, in which case the resolution 45 | is upsampled. 46 | abs_res: list-like 47 | A list-like of the number of pixels for the x and y axes, respectively. 48 | Overrides `frac_res` if specified. 49 | val_interp_method: str 50 | The interpolation method for the values. This is the `method` parameter 51 | input to `xarray.Dataset.interp()` after the coordinates have been interpolated. 52 | Can be one of ['nearest', 'linear']. 53 | 54 | Returns 55 | ------- 56 | dataset_scaled: xarray.Dataset or xarray.DataArray 57 | The result of scaling the resolution of `dataset`. 58 | 59 | Raises 60 | ------ 61 | AssertionError: If neither `frac_res` nor `abs_res` is specified. 62 | """ 63 | assert frac_res is not None or abs_res is not None, \ 64 | "Either frac_res or abs_res must be specified (i.e. not None)." 65 | if frac_res is not None: 66 | x_px = y_px = np.sqrt(frac_res) 67 | interp_param = 'frac' 68 | elif abs_res is not None: 69 | interp_param = 'num' 70 | x_px, y_px = abs_res 71 | return xr_interp(dataset, {x_coord: ('interp', {interp_param: x_px}), \ 72 | y_coord: ('interp', {interp_param: y_px})}, 73 | val_interp_method=val_interp_method) 74 | 75 | 76 | def xr_sel_time_by_bin(dataset, num_bins, time_coord='time'): 77 | """ 78 | Selects time coordinates by nearest neighbors of the means of bins. 79 | This is useful for plotting data with high variance in temporal 80 | spacing between acquisitions. 81 | 82 | Parameters 83 | ---------- 84 | dataset: xarray.Dataset or xarray.DataArray 85 | The Dataset or DataArray to aggregate by binning. 86 | Must have a 'time' coordinate of type `datetime64`. 87 | num_bins: int 88 | The number of bins to use. 89 | time_coord: str 90 | The name of the time coordinate to bin. 91 | 92 | Returns 93 | ------- 94 | result: xarray.Dataset or xarray.DataArray 95 | The result of aggregating within bins for the binned data. 96 | """ 97 | return xr_interp(dataset, {time_coord: ('bin', {'num': num_bins})}) 98 | 99 | 100 | def xr_interp(dataset, interp_config, val_interp_method='nearest'): 101 | """ 102 | Interpolates an `xarray.Dataset` or `xarray.DataArray`. 103 | This is often done to match dimensions between xarray objects or 104 | downsample to reduce memory consumption. 105 | 106 | First, coordinates are interpolated according to `interp_config`. 107 | Then the data values for those interpolated coordinates are obtained 108 | through interpolation. 109 | 110 | Parameters 111 | ---------- 112 | dataset: xarray.Dataset or xarray.DataArray 113 | The Dataset or DataArray to interpolate. 114 | interp_config: dict 115 | Mapping of names of coordinates to 2-tuples of the interpolation types 116 | to use for those coordinates and the parameters for those interpolation types. 117 | The supported coordinate interpolation types are 'interp' for 118 | linear interpolation and 'bin' for binning. 119 | The parameters, with supported interpolation types annotated to their 120 | left, are as follow: 121 | ('interp', 'bin'): 'frac': 122 | - The fraction of the original size to use. Exclusive with 'num'. 123 | ('interp', 'bin'): 'num': 124 | - The number of points in the output. Exclusive with 'frac'. 125 | - Either 'frac' or 'num' must be in the interpolation parameters. 126 | The following is an example value: 127 | `{'latitude':('interp',{'frac':0.5}),'longitude':('interp',{'frac':0.5}), 128 | 'time':('bin',{'num':20})}`. 129 | val_interp_method: str 130 | The interpolation method for the values. This is the `method` parameter 131 | input to `xarray.Dataset.interp()` after the coordinates have been interpolated. 132 | Can be one of ['nearest', 'linear']. 133 | 134 | Returns 135 | ------- 136 | interp_data: xarray.Dataset or xarray.DataArray 137 | The specified interpolation of `dataset`. 138 | """ 139 | from .dc_time import _n64_datetime_to_scalar, \ 140 | _scalar_to_n64_datetime 141 | 142 | # Create the new coordinates. 143 | new_coords = {} 144 | for dim, (interp_type, interp_kwargs) in interp_config.items(): 145 | # Determine the number of points to use. 146 | num_pts = interp_kwargs.get('num', None) 147 | if num_pts is None: 148 | frac = interp_kwargs.get('frac', None) 149 | num_pts_orig = len(dataset[dim]) 150 | num_pts = int(round(num_pts_orig * frac)) 151 | dim_vals = dataset[dim].values 152 | dim_dtype = type(dim_vals[0]) 153 | # Convert NumPy datetime64 objects to scalars. 154 | if dim_dtype == np.datetime64: 155 | dim_vals = np.array(list(map(_n64_datetime_to_scalar, dim_vals))) 156 | interp_vals = None 157 | # Interpolate coordinates. 158 | if interp_type == 'bin': 159 | bin_intervals = get_bin_intervals(dim_vals, num_pts) 160 | interp_vals = np.mean(bin_intervals, axis=1) 161 | if interp_type == 'interp': 162 | interp_inds = np.linspace(0, len(dim_vals) - 1, num_pts, dtype=np.int32) 163 | interp_vals = dim_vals[interp_inds] 164 | # Convert scalars to NumPy datetime64 objects. 165 | if dim_dtype == np.datetime64: 166 | interp_vals = np.array(list(map(_scalar_to_n64_datetime, interp_vals))) 167 | new_coords[dim] = interp_vals 168 | # Nearest-neighbor interpolate data values. 169 | interp_data = dataset.interp(coords=new_coords, method=val_interp_method) 170 | # xarray.Dataset.interp() converts to dtype float64, so cast back to the original dtypes. 171 | if isinstance(dataset, xr.DataArray): 172 | interp_data = interp_data.astype(dataset.dtype) 173 | elif isinstance(dataset, xr.Dataset): 174 | for data_var_name in interp_data.data_vars: 175 | interp_data[data_var_name] = interp_data[data_var_name].astype(dataset[data_var_name].dtype) 176 | return interp_data 177 | -------------------------------------------------------------------------------- /data_cube_utilities/build_cloud_coverage_table_landsat.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | import datacube 3 | from .clean_mask import landsat_clean_mask_invalid, landsat_qa_clean_mask 4 | import numpy as np 5 | import xarray as xr 6 | import pandas as pd 7 | 8 | 9 | def build_cloud_coverage_table_landsat(product, 10 | platform, 11 | collection, 12 | level, 13 | latitude, 14 | longitude, 15 | time=None, 16 | dc=None, 17 | extra_band='green', 18 | extra_load_params={}): 19 | dc = dc if dc is not None else datacube.Datacube(app="") 20 | 21 | load_params = dict(product=product, 22 | latitude=latitude, 23 | longitude=longitude, 24 | measurements=[extra_band, 'pixel_qa'], 25 | **extra_load_params) 26 | 27 | if time is not None: 28 | load_params["time"] = time 29 | 30 | landsat_dataset = dc.load(**load_params).persist() 31 | clean_mask = landsat_qa_clean_mask(landsat_dataset, platform=platform, 32 | collection=collection, level=level) & \ 33 | landsat_clean_mask_invalid(landsat_dataset, platform, collection, level) 34 | 35 | data_mask = xr.full_like(clean_mask, True) 36 | band_no_data_values = dc.list_measurements().loc[product, 'nodata'] 37 | if band_no_data_values is not None: 38 | for data_var in landsat_dataset.values(): 39 | band_data_mask = data_var != data_var.attrs['nodata'] 40 | data_mask = data_mask & band_data_mask 41 | clean_data_mask = clean_mask & data_mask 42 | 43 | landsat_dataset = landsat_dataset.where(clean_data_mask) 44 | 45 | times = list(landsat_dataset.time.values) 46 | 47 | clean_data_mask_list = [clean_data_mask.sel(time=str(time)).values 48 | for time in clean_data_mask.time.values] 49 | # Calculate the percentage of all pixels which are not cloud. 50 | percentage_list = [clean_data_mask.mean()*100 for clean_data_mask in clean_data_mask_list] 51 | clean_pixel_count_list = list(map(np.sum, clean_data_mask_list)) 52 | 53 | data = {"times": times, 54 | "clean_percentage": percentage_list, 55 | "clean_count": clean_pixel_count_list} 56 | 57 | return (landsat_dataset, 58 | pd.DataFrame(data=data, columns=["times", "clean_percentage", "clean_count"]), 59 | clean_mask, data_mask, clean_data_mask) 60 | -------------------------------------------------------------------------------- /data_cube_utilities/crs.py: -------------------------------------------------------------------------------- 1 | # from pyproj import Transformer 2 | # from functools import partial 3 | 4 | # def xarray_change_crs(ds, out_crs, in_crs=None, x_dim=None, y_dim=None): 5 | # """ 6 | # (In development) 7 | 8 | # Changes the CRS of the x/y coordinates of an `xarray.Dataset` or `xarray.DataArray`. 9 | # This is an in-place operation. 10 | 11 | # Parameters 12 | # ---------- 13 | # ds: xarray.Dataset or xarray.DataArray 14 | # The xarray object to change coordinate values for. 15 | # out_crs, in_crs: str 16 | # The output and input CRSes. If `in_crs` is not specified, the function will 17 | # attempt to read the metadata of `ds` to determine it. If this fails, the function 18 | # will throw an error. 19 | # x_dim, y_dim: str 20 | # The string names of the x and y dimensions in `ds`. If not specified, the dimension 21 | # name will be inferred. If this fails, the function will throw an error. 22 | 23 | # Returns 24 | # ------- 25 | # out_ds: xarray.Dataset or xarray.DataArray 26 | # Same as `ds` but with different coordinate values. 27 | # """ 28 | # if in_crs is None: 29 | # in_crs = meas_data.attrs.get('crs') 30 | # assert in_crs is not None, 'Could not determine `in_crs`. Please specify this argument.' 31 | # in_crs = meas_data.crs[6:] # Remove the leading '+init='. 32 | # if x_dim is None: 33 | # x_dim = 'x' if 'x' in ds.dims else None 34 | # x_dim = 'longitude' if x_dim is None and 'longitude' in ds.dims else x_dim 35 | # assert x_dim is not None, 'Could not determine `x_dim`. Please specify this argument.' 36 | # if y_dim is None: 37 | # y_dim = 'y' if 'y' in ds.dims else None 38 | # y_dim = 'latitude' if y_dim is None and 'latitude' in ds.dims else y_dim 39 | # assert y_dim is not None, 'Could not determine `y_dim`. Please specify this argument.' 40 | # x_coords = ds.coords[x_dim] 41 | # y_coords = ds.coords[y_dim] 42 | # transformer = Transformer.from_crs(in_crs, out_crs) 43 | # new_x_coords, new_y_coords = [], [] 44 | # for ind, (x_val, y_val) in enumerate(zip(x_coords.values, y_coords.values)): 45 | # x_val, y_val = transformer.transform(x_val, y_val) 46 | # new_x_coords.append(x_val) 47 | # new_y_coords.append(y_val) 48 | # ds.assign_coords({x_dim:new_x_coords}) 49 | # ds.assign_coords({y_dim:new_y_coords}) -------------------------------------------------------------------------------- /data_cube_utilities/dask.py: -------------------------------------------------------------------------------- 1 | import os 2 | import psutil 3 | import numpy as np 4 | import dask 5 | from datacube.utils.dask import start_local_dask 6 | from datacube.utils.rio import configure_s3_access 7 | 8 | def create_local_dask_cluster(spare_mem='3Gb', 9 | aws_unsigned= True, 10 | display_client=True, 11 | start_local_dask_kwargs=None, 12 | configure_s3_access_kwargs=None): 13 | """ 14 | Using the datacube utils function 'datacube.utils.dask.start_local_dask', generate 15 | a local dask cluster. 16 | 17 | Credit belongs to Digital Earth Africa: 18 | https://github.com/digitalearthafrica/deafrica-sandbox-notebooks/blob/master/Scripts/deafrica_dask.py 19 | 20 | Example use : 21 | 22 | import sys 23 | sys.path.append("../Scripts") 24 | from deafrica_dask import create_local_dask_cluster 25 | 26 | create_local_dask_cluster(spare_mem='4Gb') 27 | 28 | Parameters 29 | ---------- 30 | spare_mem : String, optional 31 | The amount of memory, in Gb, to leave for the notebook to run. 32 | This memory will not be used by the cluster. e.g '3Gb' 33 | aws_unsigned : Bool, optional 34 | This parameter determines if credentials for S3 access are required and 35 | passes them on to processing threads, either local or on dask cluster. 36 | Set to True if working with publicly available datasets, and False if 37 | working with private data. i.e if loading Landsat C2 provisional data set 38 | this to aws_unsigned=False 39 | display_client : Bool, optional 40 | An optional boolean indicating whether to display a summary of 41 | the dask client, including a link to monitor progress of the 42 | analysis. Set to False to hide this display. 43 | start_local_dask_kwargs: dict, optional 44 | Keyword arguments for the function `datacube.utils.dask.start_local_dask`, which 45 | creates the Dask client. 46 | Some settings to configure include the number of workers, number of threads per worker, and the memory limit. 47 | configure_s3_access_kwargs: dict, optional 48 | Keyword arguments for the function `datacube.utils.rio.configure_s3_access`, which 49 | configures the Dask to access S3. 50 | """ 51 | start_local_dask_kwargs = {} if start_local_dask_kwargs is None else start_local_dask_kwargs 52 | configure_s3_access_kwargs = {} if configure_s3_access_kwargs is None else configure_s3_access_kwargs 53 | 54 | # configure dashboard link to go over proxy 55 | dask.config.set({"distributed.dashboard.link": 56 | os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')+"proxy/{port}/status"}) 57 | 58 | # start up a local cluster 59 | num_physical_cpu = psutil.cpu_count(logical=False) 60 | num_logical_cpu = psutil.cpu_count(logical=True) 61 | start_local_dask_kwargs.setdefault('n_workers', 1) 62 | start_local_dask_kwargs.setdefault('threads_per_worker', int(np.floor(num_logical_cpu/start_local_dask_kwargs['n_workers']))) 63 | client = start_local_dask(mem_safety_margin=spare_mem, **start_local_dask_kwargs) 64 | 65 | ## Configure GDAL for s3 access 66 | configure_s3_access(aws_unsigned=aws_unsigned, 67 | client=client, **configure_s3_access_kwargs) 68 | 69 | return client -------------------------------------------------------------------------------- /data_cube_utilities/data_stats.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | 3 | ## Data Availability ## 4 | 5 | def find_gaps(data_arr, aggregation_method): 6 | """ 7 | Finds the minimum, mean, median, or maximum time difference between True values 8 | in a boolean xarray.DataArray. This should be a faster implementation. 9 | 10 | Parameters 11 | ---------- 12 | data_arr: xarray.DataArray of bool 13 | DataArray of boolean values denoting which elements are desired. 14 | Examples of desired elements include clear views (or "non-cloud pixels"). 15 | This DataArray must have a 'time' dimension. 16 | aggregation_method: str 17 | The aggregation method to use. Can be any of ['min', 'mean', 'median', 'max']. 18 | 19 | Returns 20 | ------- 21 | gaps: xarray.DataArray of float64 22 | The time gaps between True values in `data_arr`. Due to limitations of the numpy.datetime64 data type, 23 | the time differences are in seconds, stored as np.float64. 24 | """ 25 | from .dc_time import _n64_datetime_to_scalar 26 | 27 | # 1. Convert time from numpy.datetime64 to scalars and broadcast along latitude and longitude. 28 | time = _n64_datetime_to_scalar(data_arr.time) 29 | time, _, _ = xr.broadcast(time, data_arr.latitude, data_arr.longitude) 30 | # 2. Fill each undesired point with its previous desired point's time and find the time differences. 31 | mask = data_arr == 1 32 | time = time.where(mask) 33 | time = time.ffill(dim='time') 34 | time_diff = time.diff('time') 35 | # A time difference is only 0 because of differencing after the forward fill. 36 | time_diff = time_diff.where(time_diff != 0) 37 | # 3. Calculate the desired statistic for the time differences. 38 | if aggregation_method == 'min': return time_diff.min('time') 39 | if aggregation_method == 'mean': return time_diff.mean('time') 40 | if aggregation_method == 'median': return time_diff.median('time') 41 | if aggregation_method == 'max': return time_diff.max('time') 42 | 43 | ## End Data Availability ## -------------------------------------------------------------------------------- /data_cube_utilities/dc_baseline.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray 3 | import xarray.core.ops as ops 4 | import xarray as xr 5 | from itertools import islice 6 | 7 | 8 | def _window(seq, n=2): 9 | """Returns a sliding _window (of width n) over data from the iterable" 10 | " s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... """ 11 | it = iter(seq) 12 | result = tuple(islice(it, n)) 13 | if len(result) == n: 14 | yield result 15 | for elem in it: 16 | result = result[1:] + (elem,) 17 | yield result 18 | 19 | 20 | def _composite_of_first(arrays, reverse=False, name_suffix="_composite"): 21 | #In memory of Rube Goldberg 22 | narrays = arrays.copy(deep=True) 23 | narrays.values = narrays.values[::-1] if reverse else narrays.values 24 | notnulls = [ops.notnull(array) for array in narrays] 25 | first_notnull = ops.argmax(ops.stack(notnulls), axis=0) 26 | composite = np.choose(first_notnull, narrays) 27 | return xr.DataArray( 28 | composite, 29 | coords=[narrays.latitude, narrays.longitude], 30 | dims=['latitude', 'longitude'], 31 | name="{band}{suffix}".format(band=narrays.name, suffix=name_suffix)) 32 | 33 | 34 | def _mosaic(dataset, most_recent_first=False, custom_label="_composite"): 35 | return xr.merge([ 36 | _composite_of_first(dataset[variable], reverse=most_recent_first, name_suffix=custom_label) 37 | for variable in dataset.data_vars 38 | ]) 39 | 40 | 41 | def _composite_by_average(dataset, custom_label="_composite"): 42 | composite = dataset.mean('time') 43 | return composite 44 | 45 | 46 | ## This should be the the only method called from dc baseline 47 | def generate_baseline(dataset, composite_size=5, mode="average", custom_label=""): 48 | ranges = _window(range(len(dataset.time)), n=composite_size + 1) 49 | reffs = (dataset.isel(time=list(frame)[:-1]) for frame in ranges) 50 | 51 | baselines = None 52 | if mode == "average": 53 | baselines = (_composite_by_average(ref, custom_label=custom_label) for ref in reffs) 54 | elif mode == "composite": 55 | baselines = (_mosaic(ref, most_recent_first=True, custom_label=custom_label) for ref in reffs) 56 | 57 | baseline = xr.concat(baselines, dim='time') 58 | baseline['time'] = dataset.time[composite_size:] 59 | return baseline 60 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_clustering.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import numpy as np 3 | from collections import OrderedDict 4 | import xarray as xr 5 | from xarray.ufuncs import logical_not as xr_not 6 | from xarray.ufuncs import isnan as xr_nan 7 | import matplotlib.pyplot as plt 8 | 9 | 10 | def get_frequency_counts(classification): 11 | """ 12 | Get the raw and fractional class frequency counts for an `xarray.Dataset`. 13 | Intended to be used with outputs from the `*_cluster_dataset()` functions. 14 | 15 | Parameters 16 | ---------- 17 | classification: xarray.DataArray 18 | The cluster values. 19 | An `xarray.Dataset` with a `classification` data variable. 20 | 21 | Returns 22 | ------- 23 | freqs: np.ndarray of np.float64 24 | A 2D NumPy array containing entries of the format 25 | [class_number, count, frequency] ordered by class number. 26 | """ 27 | classifications = classification.values.flatten() 28 | class_nums, class_counts = np.unique(classifications, return_counts=True) 29 | num_classifications = len(classifications) 30 | fractional_freqs = [count / num_classifications for count in class_counts] 31 | freqs = np.array([(class_num, count, freq) for (class_num, count, freq) in 32 | zip(class_nums, class_counts, fractional_freqs)]) 33 | return freqs 34 | 35 | 36 | def clustering_pre_processing(dataset_in, bands): 37 | # Determine the pixel indices which have no NaN values and remove all other pixels. 38 | dims = list(dataset_in.dims) 39 | no_nan_mask = xr_not(xr_nan(dataset_in.to_array().transpose(*dims, 'variable')).any('variable')).values 40 | array_from = [] 41 | for band in bands: 42 | array_from.append(dataset_in[band].values[no_nan_mask].flatten()) 43 | features = np.array(array_from) 44 | features = np.swapaxes(features, 0, 1) 45 | np.set_printoptions(suppress=True) 46 | return features, no_nan_mask 47 | 48 | 49 | def clustering_post_processing(classified, dataset_in, bands, no_nan_mask): 50 | shape = dataset_in[bands[0]].values.shape 51 | 52 | # Reshape the results to the original data's shape. 53 | # We'll use -1 as the "classification" for pixels with missing data. 54 | classification = np.full(shape, -1) 55 | classification[no_nan_mask] = classified.labels_ 56 | 57 | dataset_out = xr.DataArray(classification, coords=dataset_in.coords) 58 | return dataset_out 59 | 60 | 61 | def kmeans_cluster_dataset(dataset_in, bands, n_clusters=4): 62 | """ 63 | Clusters a dataset with Kmeans clustering. 64 | 65 | Parameters 66 | ---------- 67 | dataset_in: xarray.Dataset 68 | A Dataset containing the bands listed in `bands`. 69 | bands: list of str 70 | A list of names of the bands in `dataset_in` to cluster with. 71 | 72 | Returns 73 | ------- 74 | clustered: xarray.Dataset 75 | A Dataset of the same shape as `dataset_in`, containing a single data variable 76 | called "classification", which are the numeric class labels in range [0, n_clusters-1]. 77 | 78 | clustered: xarray.DataArray 79 | A DataArray of the same shape as `dataset_in`, containing the numberic class labels in range [0, n_clusters-1]. 80 | """ 81 | features, no_nan_mask = clustering_pre_processing(dataset_in, bands) 82 | """ 83 | classified = AgglomerativeClustering(n_clusters=n_clusters).fit(np_array) 84 | classified = Birch(n_clusters=n_clusters).fit(np_array) 85 | classified = DBSCAN(eps=0.005, min_samples=5).fit(np_array) 86 | """ 87 | from sklearn.cluster import KMeans 88 | classified = KMeans(n_clusters=n_clusters).fit(features) 89 | return clustering_post_processing(classified, dataset_in, bands, no_nan_mask) 90 | 91 | def birch_cluster_dataset(dataset_in, bands, n_clusters=4): 92 | """ 93 | Clusters a dataset with BIRCH clustering. 94 | 95 | Parameters 96 | ---------- 97 | dataset_in: xarray.Dataset 98 | A Dataset containing the bands listed in `bands`. 99 | bands: list of str 100 | A list of names of the bands in `dataset_in` to cluster with. 101 | 102 | Returns 103 | ------- 104 | clustered: xarray.Dataset 105 | A Dataset of the same shape as `dataset_in`, containing a single data variable 106 | called "classification", which are the numeric class labels in range [0, n_clusters-1]. 107 | 108 | clustered: xarray.DataArray 109 | A DataArray of the same shape as `dataset_in`, containing the numberic class labels in range [0, n_clusters-1]. 110 | """ 111 | features, no_nan_mask = clustering_pre_processing(dataset_in, bands) 112 | """ 113 | classified = AgglomerativeClustering(n_clusters=n_clusters).fit(np_array) 114 | classified = DBSCAN(eps=0.005, min_samples=5).fit(np_array) 115 | classified = KMeans(n_clusters=n_clusters).fit(np_array) 116 | """ 117 | from sklearn.cluster import Birch 118 | classified = Birch(n_clusters=n_clusters, threshold=0.00001).fit(features) 119 | return clustering_post_processing(classified, dataset_in, bands, no_nan_mask) 120 | 121 | def plot_kmeans_next_to_mosaic(da_a, da_b): 122 | def mod_rgb(dataset, 123 | at_index = 0, 124 | bands = ['red', 'green', 'blue'], 125 | paint_on_mask = [], 126 | max_possible = 3500, 127 | width = 10 128 | ): 129 | ### < Dataset to RGB Format, needs float values between 0-1 130 | rgb = np.stack([dataset[bands[0]], 131 | dataset[bands[1]], 132 | dataset[bands[2]]], axis = -1).astype(np.int16) 133 | 134 | rgb[rgb<0] = 0 135 | rgb[rgb > max_possible] = max_possible # Filter out saturation points at arbitrarily defined max_possible value 136 | 137 | rgb = rgb.astype(float) 138 | rgb *= 1 / np.max(rgb) 139 | ### > 140 | 141 | ### < takes a T/F mask, apply a color to T areas 142 | for mask, color in paint_on_mask: 143 | rgb[mask] = np.array(color)/ 255.0 144 | ### > 145 | 146 | if 'time' in dataset: 147 | plt.imshow((rgb[at_index])) 148 | else: 149 | plt.imshow(rgb) 150 | 151 | fig = plt.figure(figsize = (15,8)) 152 | a=fig.add_subplot(1,2,1) 153 | a.set_title('Kmeans') 154 | plt.imshow(da_a.values, cmap = "magma_r") 155 | 156 | b=fig.add_subplot(1,2,2) 157 | mod_rgb(da_b) 158 | b.set_title('RGB Composite') 159 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_coastal_change.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 United States Government as represented by the Administrator 2 | # of the National Aeronautics and Space Administration. All Rights Reserved. 3 | # 4 | # Portion of this code is Copyright Geoscience Australia, Licensed under the 5 | # Apache License, Version 2.0 (the "License"); you may not use this file 6 | # except in compliance with the License. You may obtain a copy of the License 7 | # at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # The CEOS 2 platform is licensed under the Apache License, Version 2.0 (the 12 | # "License"); you may not use this file except in compliance with the License. 13 | # You may obtain a copy of the License at 14 | # http://www.apache.org/licenses/LICENSE-2.0. 15 | # 16 | # Unless required by applicable law or agreed to in writing, software 17 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 18 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 19 | # License for the specific language governing permissions and limitations 20 | # under the License. 21 | 22 | from datetime import datetime 23 | import numpy as np 24 | 25 | 26 | def compute_coastal_change(old_mosaic, new_mosaic, no_data = -9999): 27 | """Compute the coastal change and coastlines for two mosaics 28 | 29 | Computes the output products and appends them onto the old mosaic as 30 | coastal_change, coastline_old, coastline_new 31 | 32 | Args: 33 | old_mosaic, new_mosaic - single timeslice mosaic data. 34 | 35 | Returns: 36 | Xarray dataset containing all original data with three new variables. 37 | 38 | """ 39 | from .dc_water_classifier import wofs_classify 40 | from .dc_utilities import create_cfmask_clean_mask, create_bit_mask 41 | 42 | # Create a combined bitmask - cfmask if it exists, otherwise pixel_qa. 43 | combined_mask = create_cfmask_clean_mask(old_mosaic.cf_mask) & create_cfmask_clean_mask( 44 | new_mosaic.cf_mask) if 'cf_mask' in old_mosaic else create_bit_mask( 45 | old_mosaic.pixel_qa, [1, 2]) & create_bit_mask(new_mosaic.pixel_qa, [1, 2]) 46 | 47 | old_water = wofs_classify(old_mosaic, mosaic=True, clean_mask=combined_mask, no_data = no_data) 48 | new_water = wofs_classify(new_mosaic, mosaic=True, clean_mask=combined_mask, no_data = no_data) 49 | 50 | coastal_change = new_water - old_water 51 | 52 | coastal_change = coastal_change.where(coastal_change.wofs != 0) 53 | 54 | new_coastline = _coastline_classification_2(new_water) 55 | old_coastline = _coastline_classification_2(old_water) 56 | 57 | old_mosaic['coastal_change'] = coastal_change.wofs 58 | old_mosaic['coastline_old'] = old_coastline.coastline 59 | old_mosaic['coastline_new'] = new_coastline.coastline 60 | 61 | return old_mosaic 62 | 63 | 64 | def mask_mosaic_with_coastlines(dataset): 65 | """Mask a mosaic using old/new coastline""" 66 | 67 | required_measurements = ['red', 'green', 'blue', 'coastline_old', 'coastline_new'] 68 | assert set(required_measurements).issubset( 69 | set(dataset.data_vars)), "Please include all required bands: Red, green, blue, and coastline masks." 70 | 71 | green = _darken_color([89, 255, 61], .8) 72 | pink = [[255, 8, 74], [252, 8, 74], [230, 98, 137], [255, 147, 172], [255, 192, 205]][0] 73 | blue = [[13, 222, 255], [139, 237, 236], [0, 20, 225], [30, 144, 255]][-1] 74 | dataset_clone = dataset.copy(deep=True) 75 | # mask the new coastline in blue. 76 | dataset_clone.red.values[dataset_clone.coastline_new.values == 1] = _adjust_color(blue[0]) 77 | dataset_clone.green.values[dataset_clone.coastline_new.values == 1] = _adjust_color(blue[1]) 78 | dataset_clone.blue.values[dataset_clone.coastline_new.values == 1] = _adjust_color(blue[2]) 79 | #mask the old coastline in green. 80 | dataset_clone.red.values[dataset_clone.coastline_old.values == 1] = _adjust_color(green[0]) 81 | dataset_clone.green.values[dataset_clone.coastline_old.values == 1] = _adjust_color(green[1]) 82 | dataset_clone.blue.values[dataset_clone.coastline_old.values == 1] = _adjust_color(green[2]) 83 | 84 | return dataset_clone 85 | 86 | 87 | def mask_mosaic_with_coastal_change(dataset): 88 | """Mask a mosaic with coastal change""" 89 | 90 | required_measurements = ['red', 'green', 'blue', 'coastal_change'] 91 | assert set(required_measurements).issubset( 92 | set(dataset.data_vars)), "Please include all required bands: Red, green, blue, and coastal change." 93 | 94 | green = _darken_color([89, 255, 61], .8) 95 | pink = [[255, 8, 74], [252, 8, 74], [230, 98, 137], [255, 147, 172], [255, 192, 205]][0] 96 | blue = [[13, 222, 255], [139, 237, 236], [0, 20, 225], [30, 144, 255]][-1] 97 | dataset_clone = dataset.copy(deep=True) 98 | dataset_clone.red.values[dataset_clone.coastal_change.values == 1] = _adjust_color(pink[0]) 99 | dataset_clone.green.values[dataset_clone.coastal_change.values == 1] = _adjust_color(pink[1]) 100 | dataset_clone.blue.values[dataset_clone.coastal_change.values == 1] = _adjust_color(pink[2]) 101 | 102 | dataset_clone.red.values[dataset_clone.coastal_change.values == -1] = _adjust_color(green[0]) 103 | dataset_clone.green.values[dataset_clone.coastal_change.values == -1] = _adjust_color(green[1]) 104 | dataset_clone.blue.values[dataset_clone.coastal_change.values == -1] = _adjust_color(green[2]) 105 | 106 | return dataset_clone 107 | 108 | 109 | def _adjust_color(color, scale=4096): 110 | return int(float(color * scale) / 255.0) 111 | 112 | 113 | def _darken_color(color, scale=0.8): 114 | return [int(float(x * scale)) for x in color] 115 | 116 | 117 | def _coastline_classification(dataset, water_band='wofs'): 118 | import scipy.ndimage.filters as conv 119 | 120 | kern = np.array([[1, 1, 1], [1, 0.001, 1], [1, 1, 1]]) 121 | convolved = conv.convolve(dataset[water_band], kern, mode='constant') // 1 122 | 123 | ds = dataset.where(convolved > 0) 124 | ds = ds.where(convolved < 6) 125 | ds.wofs.values[~np.isnan(ds.wofs.values)] = 1 126 | ds.wofs.values[np.isnan(ds.wofs.values)] = 0 127 | ds = ds.rename({"wofs": "coastline"}) 128 | 129 | return ds 130 | 131 | 132 | def _coastline_classification_2(dataset, water_band='wofs'): 133 | import scipy.ndimage.filters as conv 134 | 135 | kern = np.array([[1, 1, 1], [1, 0.001, 1], [1, 1, 1]]) 136 | convolved = conv.convolve(dataset[water_band], kern, mode='constant', cval=-999) // 1 137 | 138 | ds = dataset.copy(deep=True) 139 | ds.wofs.values[(~np.isnan(ds[water_band].values)) & (ds.wofs.values == 1)] = 1 140 | ds.wofs.values[convolved < 0] = 0 141 | ds.wofs.values[convolved > 6] = 0 142 | ds = ds.rename({"wofs": "coastline"}) 143 | 144 | return ds 145 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_displayutil.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | import matplotlib.pyplot as plt 4 | import datetime 5 | import math 6 | import collections 7 | 8 | 9 | def _is_list(var): 10 | return isinstance(var, (list, tuple)) 11 | 12 | 13 | def display_at_time(cubes, time = None, color = [238,17,17], width = 3, name = None, on_pixel = True, mode = None,h=4,w=10): 14 | lop = [] 15 | for x in cubes: 16 | if isinstance(x, tuple): 17 | #Overlays the not-nan values of the 'subject' on the 'canvas'. 18 | canvas = x[0].sel(time = time, method = 'nearest').squeeze() 19 | subject = x[1].sel(time = time, method = 'nearest').squeeze() 20 | overlay = _overlayer(canvas, subject, color = color,on_pixel = on_pixel, mode = mode) 21 | lop.append(overlay) 22 | else: 23 | selection = x.sel(time = time, method = 'nearest').squeeze() 24 | lop.append(_to_image(selection)) 25 | _display_list_of_plottables(lop, maxwidth = width, name = name,h = h, w = w) 26 | 27 | def _to_image(cube,minval = 0, maxval = 2000, backfill = []): 28 | red, green, blue = [np.array(cube.red.values), np.array(cube.green.values), np.array(cube.blue.values)] 29 | red = np.copy(red) 30 | green = np.copy(green) 31 | blue = np.copy(blue) 32 | red[red < minval] = minval 33 | green[green < minval] = minval 34 | blue[blue < minval] = minval 35 | 36 | red[red > maxval] = maxval 37 | green[green > maxval] = maxval 38 | blue[blue > maxval] = maxval 39 | 40 | red = red/maxval 41 | green = green/maxval 42 | blue = blue/maxval 43 | 44 | red = (abs(red -1) * 255).astype(np.int16) 45 | green = (abs(green -1) * 255).astype(np.int16) 46 | blue = (abs(blue -1) * 255).astype(np.int16) 47 | 48 | rgb = np.dstack([red,green,blue]) 49 | _reversedim(rgb, k = 0) 50 | return rgb 51 | 52 | 53 | def _display_list_of_plottables(plotables, maxwidth = 3, name= 'figure', h=4,w=10): 54 | assert _is_list(plotables), "the plotables argument must be a list" 55 | height = math.ceil(len(plotables)/maxwidth) 56 | plt.figure(name,figsize=(w, h)) 57 | for index, item in enumerate(plotables): 58 | plt.subplot(height, maxwidth, index+1) 59 | plt.imshow(item) 60 | plt.show() 61 | 62 | 63 | def _reversedim(M,k=0): 64 | idx = tuple((slice(None,None,-1) if ii == k else slice(None) 65 | for ii in range(M.ndim))) 66 | return M[idx] 67 | 68 | 69 | def _overlayer(canvas, overlay, band = 'red',color = [238,17,17], on_pixel = True, mode = None): 70 | subject = overlay[band] 71 | if on_pixel is True: 72 | subject_indices = np.copy(np.dstack(np.where(~np.isnan(subject.values)))[0]) 73 | else: 74 | subject_indices = np.copy(np.dstack(np.where(np.isnan(subject.values)))[0]) 75 | rgb_canvas = np.copy(_to_image(canvas)) 76 | if mode is 'blend': 77 | for x in subject_indices: 78 | rgb_canvas[x[0]][x[1]][0] = ((255 - color[0]) + rgb_canvas[x[0]][x[1]][0])/2 79 | rgb_canvas[x[0]][x[1]][1] = ((255 - color[1]) + rgb_canvas[x[0]][x[1]][1])/2 80 | rgb_canvas[x[0]][x[1]][2] = ((255 - color[2]) + rgb_canvas[x[0]][x[1]][2])/2 81 | return rgb_canvas 82 | else: 83 | for x in subject_indices: 84 | rgb_canvas[x[0]][x[1]][0] = 255 - color[0] 85 | rgb_canvas[x[0]][x[1]][1] = 255 - color[1] 86 | rgb_canvas[x[0]][x[1]][2] = 255 - color[2] 87 | return rgb_canvas 88 | 89 | 90 | def __is_iterable(value): 91 | return isinstance(value, collections.Iterable) 92 | 93 | def _np64_to_datetime(dt64): 94 | ts = (dt64 - np.datetime64('1970-01-01T00:00:00Z')) / np.timedelta64(1, 's') 95 | return datetime.datetime.utcfromtimestamp(ts) 96 | 97 | 98 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_ndvi_anomaly.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | # This import is only for 4 | from .vegetation import EVI as _EVI_orig, EVI2 as _EVI2_orig, NDVI as _NDVI_orig 5 | 6 | 7 | def EVI(*args, **kwargs): 8 | """ 9 | Instead of this function, please use the EVI() function in vegetation.py. 10 | """ 11 | return _EVI_orig(*args, **kwargs) 12 | 13 | 14 | def EVI2(*args, **kwargs): 15 | """ 16 | Instead of this function, please use the EVI2() function in vegetation.py. 17 | """ 18 | return _EVI2_orig(*args, **kwargs) 19 | 20 | 21 | def NDVI(*args, **kwargs): 22 | """ 23 | Instead of this function, please use the NDVI() function in vegetation.py. 24 | """ 25 | return _NDVI_orig(*args, **kwargs) 26 | 27 | 28 | def compute_ndvi_anomaly(baseline_data, 29 | scene_data, 30 | baseline_clear_mask=None, 31 | selected_scene_clear_mask=None, 32 | no_data=-9999): 33 | """Compute the scene+baseline median ndvi values and the difference 34 | 35 | Args: 36 | basleine_data: xarray dataset with dims lat, lon, t 37 | scene_data: xarray dataset with dims lat, lon - should be mosaicked already. 38 | baseline_clear_mask: boolean mask signifying clear pixels for the baseline data 39 | selected_scene_clear_mask: boolean mask signifying lcear pixels for the baseline data 40 | no_data: nodata value for the datasets 41 | 42 | Returns: 43 | xarray dataset with scene_ndvi, baseline_ndvi(median), ndvi_difference, and ndvi_percentage_change. 44 | """ 45 | from .dc_water_classifier import wofs_classify 46 | 47 | assert selected_scene_clear_mask is not None and baseline_clear_mask is not None, "Both the selected scene and baseline data must have associated clear mask data." 48 | 49 | #cloud filter + nan out all nodata. 50 | baseline_data = baseline_data.where((baseline_data != no_data) & baseline_clear_mask) 51 | 52 | baseline_ndvi = (baseline_data.nir - baseline_data.red) / (baseline_data.nir + baseline_data.red) 53 | median_ndvi = baseline_ndvi.median('time') 54 | 55 | #scene should already be mosaicked. 56 | water_class = wofs_classify(scene_data, clean_mask=selected_scene_clear_mask, mosaic=True).wofs 57 | scene_cleaned = scene_data.copy(deep=True).where((scene_data != no_data) & (water_class == 0)) 58 | scene_ndvi = (scene_cleaned.nir - scene_cleaned.red) / (scene_cleaned.nir + scene_cleaned.red) 59 | 60 | ndvi_difference = scene_ndvi - median_ndvi 61 | ndvi_percentage_change = (scene_ndvi - median_ndvi) / median_ndvi 62 | 63 | #convert to conventional nodata vals. 64 | scene_ndvi.values[~np.isfinite(scene_ndvi.values)] = no_data 65 | ndvi_difference.values[~np.isfinite(ndvi_difference.values)] = no_data 66 | ndvi_percentage_change.values[~np.isfinite(ndvi_percentage_change.values)] = no_data 67 | 68 | scene_ndvi_dataset = xr.Dataset( 69 | { 70 | 'scene_ndvi': scene_ndvi, 71 | 'baseline_ndvi': median_ndvi, 72 | 'ndvi_difference': ndvi_difference, 73 | 'ndvi_percentage_change': ndvi_percentage_change 74 | }, 75 | coords={'latitude': scene_data.latitude, 76 | 'longitude': scene_data.longitude}) 77 | 78 | return scene_ndvi_dataset 79 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_rgb.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from time import time 3 | import numpy as np 4 | 5 | 6 | # Change the bands (RGB) here if you want other false color combinations 7 | def rgb(dataset, time_index=0, x_coord='longitude', y_coord='latitude', 8 | bands=['red', 'green', 'blue'], paint_on_mask = [], 9 | min_inten=0.0, max_inten=1.0, 10 | width=10, fig=None, ax=None, imshow_kwargs=None, 11 | percentiles=(5, 95)): 12 | """ 13 | Creates a figure showing an area, using three specified bands as the rgb componenets. 14 | 15 | Parameters 16 | ---------- 17 | dataset: xarray.Dataset 18 | A Dataset containing at least latitude and longitude coordinates and optionally time. 19 | The coordinate order should be time, latitude, and finally longitude. 20 | Must contain the data variables specified in the `bands` parameter. 21 | time_index: 22 | The time index to show data for if `dataset` is not 2D. 23 | x_coord, y_coord, time_coord: str 24 | Names of DataArrays in `dataset_in` to use as x, y, and time coordinates. 25 | bands: list-like 26 | A list-like containing 3 names of data variables in `dataset` to use as the red, green, and blue 27 | bands, respectively. 28 | paint_on_mask: tuple 29 | A 2-tuple of a boolean NumPy array (a "mask") and a list-like of 3 numeric values 30 | in the range [0, 255]. The array specifies where to "paint" over the RGB image with 31 | the RGB color specified by the second element. 32 | min_inten, max_inten: float 33 | The min and max intensities for any band. These can be in range [0,1]. 34 | These can be used to brighten or darken the image. 35 | width: int 36 | The width of the figure in inches. 37 | fig: matplotlib.figure.Figure 38 | The figure to use for the plot. 39 | If only `fig` is supplied, the Axes object used will be the first. 40 | ax: matplotlib.axes.Axes 41 | The axes to use for the plot. 42 | imshow_kwargs: dict 43 | The dictionary of keyword arguments passed to `ax.imshow()`. 44 | You can pass a colormap here with the key 'cmap'. 45 | percentiles: list-like, default (5, 95) 46 | A 2-tuple of the lower and upper percentiles of the selected bands to set the min and max intensities 47 | to. The lower or upper values are overridden by `vmin` and `vmax` in `imshow_kwargs`. 48 | The range is [0, 100]. So `percentiles=(0,100)` would scale to the min and max of the selected bands. 49 | 50 | Returns 51 | ------- 52 | fig, ax: matplotlib.figure.Figure, matplotlib.axes.Axes 53 | The figure and axes used for the plot. 54 | """ 55 | from .plotter_utils import figure_ratio, \ 56 | xarray_set_axes_labels, retrieve_or_create_fig_ax 57 | 58 | imshow_kwargs = {} if imshow_kwargs is None else imshow_kwargs 59 | vmin = imshow_kwargs.pop('vmin', None) 60 | vmax = imshow_kwargs.pop('vmax', None) 61 | 62 | ### < Dataset to RGB Format, needs float values between 0-1 63 | rgb = np.stack([dataset[bands[0]], 64 | dataset[bands[1]], 65 | dataset[bands[2]]], axis = -1) 66 | # Interpolate values to be in the range [0,1] for creating the image. 67 | if vmin is None: 68 | vmin = np.nanpercentile(rgb, percentiles[0]) 69 | if vmax is None: 70 | vmax = np.nanpercentile(rgb, percentiles[1]) 71 | rgb = np.interp(rgb, (vmin, vmax), [min_inten,max_inten]) 72 | rgb = rgb.astype(float) 73 | ### > 74 | 75 | ### < takes a T/F mask, apply a color to T areas 76 | for mask, color in paint_on_mask: 77 | rgb[mask] = np.array(color)/ 255.0 78 | ### > 79 | 80 | fig, ax = retrieve_or_create_fig_ax(fig, ax, figsize=figure_ratio(rgb.shape[:2], fixed_width = width)) 81 | 82 | xarray_set_axes_labels(dataset, ax, x_coord, y_coord) 83 | 84 | if 'time' in dataset.dims: 85 | ax.imshow(rgb[time_index], **imshow_kwargs) 86 | else: 87 | ax.imshow(rgb, vmin=vmin, vmax=vmax, **imshow_kwargs) 88 | 89 | return fig, ax -------------------------------------------------------------------------------- /data_cube_utilities/dc_sar_utils.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | 4 | 5 | #db is given by 10*log10(DN) + CF. for ALOS this is apparently -83, 0 for s1 6 | #modifies in place. 7 | def dn_to_db(dataset_in, data_vars=['hh', 'hv'], cf=-83): 8 | for data_var in data_vars: 9 | dataset_in[data_var] = ( 10 | 10 * xr.ufuncs.log10(xr.ufuncs.square(dataset_in[data_var].astype('float64'))) + cf).astype('float32') 11 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_slip.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | 5 | def compute_slip(baseline_data, target_data, dem_data, no_data=-9999): 6 | """ 7 | Compute the SLIP product for a baseline and target dataset. 8 | 9 | Parameters 10 | ---------- 11 | baseline_data: 12 | single timeslice dataset with dims lat, lon 13 | target_data: 14 | single timeslice dataset with dims lat, lon 15 | dem_data: 16 | DEM dataset for the above datasets with corresponding lat, lon indices 17 | 18 | Returns 19 | ------- 20 | out: xarray.Dataset 21 | SLIP mask 22 | """ 23 | 24 | required_measurements = ['red', 'nir', 'swir1'] 25 | assert set(required_measurements).issubset( 26 | set(baseline_data.data_vars)), "Please include all required bands: Red, NIR, SWIR1." 27 | assert set(required_measurements).issubset( 28 | set(target_data.data_vars)), "Please include all required bands: Red, NIR, SWIR1." 29 | 30 | slip_mask = xr.Dataset( 31 | { 32 | 'slip': (tuple(target_data.dims.keys()), np.ones(target_data.red.values.shape)) 33 | }, 34 | coords={'latitude': target_data.latitude, 35 | 'longitude': target_data.longitude}) 36 | 37 | filtered_baseline_data = baseline_data.where(baseline_data != no_data) 38 | 39 | ndwi_target = (target_data.nir - target_data.swir1) / (target_data.nir + target_data.swir1) 40 | ndwi_baseline = (filtered_baseline_data.nir - filtered_baseline_data.swir1) / ( 41 | filtered_baseline_data.nir + filtered_baseline_data.swir1) 42 | ndwi_change = ndwi_target - ndwi_baseline 43 | target_ndwi_filtered = slip_mask.where(abs(ndwi_change) > 0.20) 44 | 45 | red_change = (target_data.red - filtered_baseline_data.red) / (filtered_baseline_data.red) 46 | target_red_filtered = target_ndwi_filtered.where(red_change > 0.40) 47 | 48 | is_above_slope_threshold = create_slope_mask(dem_data, degree_threshold=15, resolution=30) 49 | target_red_slope_filtered = target_red_filtered.where(is_above_slope_threshold) 50 | 51 | slip_mask.slip.values[target_red_slope_filtered.isnull().slip.values] = 0 52 | return slip_mask.slip.astype('int16') 53 | 54 | 55 | def mask_mosaic_with_slip(dataset): 56 | """Using a mosaic and the slip mask, create a red masked dataset to be written to a png. 57 | """ 58 | required_measurements = ['red', 'green', 'blue', 'slip'] 59 | assert set(required_measurements).issubset( 60 | set(dataset.data_vars)), "Please include all required bands: Red, green, blue, and slip mask." 61 | 62 | masked_dataset = dataset.copy(deep=True) 63 | masked_dataset.red.values[masked_dataset.slip.values == 1] = 4096 64 | masked_dataset.green.values[masked_dataset.slip.values == 1] = 0 65 | masked_dataset.blue.values[masked_dataset.slip.values == 1] = 0 66 | 67 | return masked_dataset 68 | 69 | 70 | def _generate_gradient(matrix, resolution=1.0, remove_border=False): 71 | north, east = np.gradient(matrix) 72 | max_gradient = np.maximum.reduce([abs(north), abs(east)]) 73 | if remove_border: 74 | max_gradient[:, 0] = np.nan 75 | max_gradient[:, -1] = np.nan 76 | max_gradient[0, :] = np.nan 77 | max_gradient[-1, :] = np.nan 78 | return (max_gradient / float(resolution)) 79 | 80 | 81 | def _generate_degree(dem_matrix, resolutution=1.0): 82 | return np.rad2deg(np.arctan(_generate_gradient(dem_matrix, resolution=resolution))) 83 | 84 | 85 | def create_slope_mask(dem_data, resolution=1.0, degree_threshold=15, no_data=-9999): 86 | ## Uses values at first DEM acquistion date 87 | target = dem_data.dem.values[0].astype(np.float32) 88 | target[target == no_data] = np.nan 89 | ## Generates gradient per dem pixel, turns to degrees per dem pixel, bounds to range between 1 and 100 90 | slopes = _generate_gradient(target, resolution=resolution) 91 | angle_of_elevation = np.rad2deg(np.arctan(slopes)) 92 | ## Create a mask for greater than 15 degrees. Here is what 15 degrees looks like: https://i.stack.imgur.com/BIrAW.png 93 | mask = angle_of_elevation > degree_threshold 94 | return mask 95 | -------------------------------------------------------------------------------- /data_cube_utilities/dc_time.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from datetime import datetime 3 | 4 | def dt_to_str(date, fmt='%Y-%m-%d'): 5 | """ 6 | Converts a datetime object to a string. 7 | """ 8 | return date.strftime(fmt) 9 | 10 | def _n64_to_datetime(n64): 11 | """ 12 | Converts Numpy 64 bit timestamps to datetime objects. Units in seconds 13 | """ 14 | return datetime.utcfromtimestamp(n64.tolist() / 1e9) 15 | 16 | def _n64_datetime_to_scalar(dt64): 17 | """ 18 | Converts a NumPy datetime64 object to the number of seconds since 19 | midnight, January 1, 1970, as a NumPy float64. 20 | 21 | Returns 22 | ------- 23 | scalar: numpy.float64 24 | The number of seconds since midnight, January 1, 1970, as a NumPy float64. 25 | """ 26 | return (dt64 - np.datetime64('1970-01-01T00:00:00Z')) / np.timedelta64(1, 's') 27 | 28 | def _scalar_to_n64_datetime(scalar): 29 | """ 30 | Converts a floating point number to a NumPy datetime64 object. 31 | 32 | Returns 33 | ------- 34 | dt64: numpy.datetime64 35 | The NumPy datetime64 object representing the datetime of the scalar argument. 36 | """ 37 | return (scalar * np.timedelta64(1, 's')) + np.datetime64('1970-01-01T00:00:00Z') -------------------------------------------------------------------------------- /data_cube_utilities/dc_water_quality.py: -------------------------------------------------------------------------------- 1 | import gc 2 | import numpy as np 3 | import xarray as xr 4 | from xarray.ufuncs import sign as xr_sign 5 | 6 | from . import dc_utilities as utilities 7 | from .dc_utilities import create_default_clean_mask 8 | from datetime import datetime 9 | import warnings 10 | 11 | def _tsmi(dataset): 12 | out = (dataset.red + dataset.green) * 0.0001 / 2 13 | return out.where(out>0, 0) 14 | 15 | def tsm(dataset_in, clean_mask=None, no_data=0): 16 | """ 17 | Calculate Total Suspended Matter (TSM) for water. 18 | 19 | Parameters 20 | ---------- 21 | dataset_in: xarray.Dataset 22 | Dataset retrieved from the Data Cube. 23 | Must have 'red' and 'green' data variables. 24 | clean_mask: np.ndarray 25 | A NumPy array with dtype boolean 26 | True for values considered clean; 27 | if no clean mask is supplied, all values will be considered clean 28 | no_data: numeric 29 | no data pixel value; default: -9999 30 | 31 | Raises 32 | ------ 33 | ValueError 34 | if dataset_in is an empty xarray.Dataset. 35 | """ 36 | assert 'red' in dataset_in and 'green' in dataset_in, "Red and Green bands are required for the TSM analysis." 37 | # Default to masking nothing. 38 | if clean_mask is None: 39 | clean_mask = create_default_clean_mask(dataset_in) 40 | 41 | tsm = 3983 * _tsmi(dataset_in)**1.6246 42 | tsm = tsm.where(clean_mask, no_data) 43 | 44 | # Create xarray of data 45 | _coords = { key:dataset_in[key] for key in dataset_in.dims.keys()} 46 | dataset_out = xr.Dataset({'tsm': tsm}, coords=_coords) 47 | return dataset_out 48 | 49 | 50 | def mask_water_quality(dataset_in, wofs): 51 | import scipy.ndimage.filters as conv 52 | 53 | wofs_criteria = wofs.where(wofs > 0.8) 54 | wofs_criteria.values[wofs_criteria.values > 0] = 0 55 | kernel = np.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]]) 56 | 57 | mask = conv.convolve(wofs_criteria.values, kernel, mode='constant') 58 | mask = mask.astype(np.float32) 59 | 60 | dataset_out = dataset_in.copy(deep=True) 61 | for var in dataset_out.data_vars: 62 | dataset_out[var].values += mask 63 | dataset_out.where(dataset_out != np.nan, 0) 64 | 65 | return dataset_out 66 | 67 | 68 | def watanabe_chlorophyll(dataset_in, clean_mask=None, no_data=0): 69 | assert 'red' in dataset_in and 'nir' in dataset_in, "Red and NIR bands are required for the Watanabe Chlorophyll analysis." 70 | # Default to masking nothing. 71 | if clean_mask is None: 72 | clean_mask = create_default_clean_mask(dataset_in) 73 | 74 | chl_a = 925.001 * (dataset_in.nir.astype('float64') / dataset_in.red.astype('float64')) - 77.16 75 | chl_a.values[np.invert(clean_mask)] = no_data # Contains data for clear pixels 76 | 77 | # Create xarray of data 78 | time = dataset_in.time 79 | latitude = dataset_in.latitude 80 | longitude = dataset_in.longitude 81 | dataset_out = xr.Dataset( 82 | { 83 | 'watanabe_chlorophyll': chl_a 84 | }, coords={'time': time, 85 | 'latitude': latitude, 86 | 'longitude': longitude}) 87 | return dataset_out 88 | 89 | 90 | def nazeer_chlorophyll(dataset_in, clean_mask=None, no_data=0): 91 | # Default to masking nothing. 92 | if clean_mask is None: 93 | clean_mask = create_default_clean_mask(dataset_in) 94 | 95 | chl_a = (0.57 * (dataset_in.red.astype('float64') * 0.0001) / 96 | (dataset_in.blue.astype('float64') * 0.0001)**2) - 2.61 97 | chl_a.values[np.invert(clean_mask)] = no_data # Contains data for clear pixels 98 | 99 | # Create xarray of data 100 | time = dataset_in.time 101 | latitude = dataset_in.latitude 102 | longitude = dataset_in.longitude 103 | dataset_out = xr.Dataset( 104 | { 105 | 'nazeer_chlorophyll': chl_a 106 | }, coords={'time': time, 107 | 'latitude': latitude, 108 | 'longitude': longitude}) 109 | return dataset_out 110 | -------------------------------------------------------------------------------- /data_cube_utilities/endmembers_landsat.csv: -------------------------------------------------------------------------------- 1 | 0.221278,0.194657,0.185084 2 | 0.158646,0.124426,0.133194 3 | 0.318769,0.353266,0.351759 4 | 0.27837,0.205394,0.219051 5 | 0.110084,0.207552,0.195807 6 | 0.240117,0.313152,0.372525 7 | -0.00350365,-0.0136971,-0.0252406 8 | -0.45829,-0.489322,-0.451449 9 | 0.00336074,0.0565252,0.026707 10 | -0.102095,-0.214333,-0.191102 11 | -0.431983,-0.343912,-0.3986 12 | -0.120435,-0.186504,-0.130198 13 | -0.356991,-0.349285,-0.34865 14 | -0.273501,-0.258459,-0.264982 15 | -0.369887,-0.35342,-0.332965 16 | -0.308914,-0.294818,-0.285217 17 | -0.41083,-0.276556,-0.243608 18 | -0.47814,-0.319201,-0.259104 19 | 0.0471296,0.00541363,-0.0144294 20 | 0.0672315,0.0330578,0.0134346 21 | 0.0961602,0.0463529,0.0250543 22 | 0.0553179,0.0345365,0.014565 23 | 0.0537376,0.0266596,0.0184105 24 | 0.0513707,0.0216845,0.00962467 25 | 0.0734568,0.0192625,0.00656426 26 | 0.00159656,-0.00577743,-0.0140936 27 | 0.0118977,-0.00509826,0.00269739 28 | 0.14005,0.107032,0.0912866 29 | 0.0297587,0.0760549,0.0730475 30 | 0.0366517,0.0648199,0.0852012 31 | 0.0796892,0.0702537,0.0586763 32 | 0.0983765,0.0713539,0.085325 33 | -0.0920991,0.0392889,0.0796964 34 | -0.00286206,-0.0310567,-0.0308064 35 | 0.00332116,0.0176412,0.00479461 36 | -0.0183138,-0.0417001,-0.0202645 37 | -0.0826381,-0.0597683,-0.0815997 38 | 0.0910667,0.114301,0.123672 39 | -0.0413266,-0.00853333,-0.0110451 40 | -0.266485,-0.243911,-0.208223 41 | -0.11121,-0.0795158,-0.0885339 42 | 0.0248843,-0.0103428,0.00376874 43 | 0.413122,0.41799,0.362451 44 | -0.20743,-0.193644,-0.19598 45 | 0.125396,0.0725202,0.0962061 46 | 0.153951,0.153932,0.175212 47 | -0.213856,-0.271798,-0.269369 48 | -0.0451829,-0.0389898,-0.0567644 49 | -0.204545,-0.21806,-0.196447 50 | 0.028604,0.0448428,0.0379768 51 | 0.100117,0.0171461,0.0262317 52 | -0.181644,-0.133052,-0.169625 53 | -0.100321,-0.152294,-0.0979239 54 | 0.218034,0.251028,0.218154 55 | 0.263396,0.205769,0.192002 56 | 0.0144514,0.0765301,0.00908153 57 | 0.121607,0.0628187,0.0826527 58 | 0.0858304,-0.0312874,-0.00785044 59 | -0.27968,-0.290508,-0.304775 60 | -0.119071,-0.218026,-0.159731 61 | -0.133257,-0.0817596,-0.132507 62 | 0.112391,0.0352191,0.0184495 63 | 0.217303,0.131567,0.181877 64 | -------------------------------------------------------------------------------- /data_cube_utilities/import_export.py: -------------------------------------------------------------------------------- 1 | import time 2 | import numpy as np 3 | import xarray as xr 4 | 5 | from . import dc_utilities 6 | import datacube 7 | import rasterio 8 | 9 | ## Export ## 10 | 11 | def export_xarray_to_netcdf(data, path): 12 | """ 13 | Exports an xarray object as a single NetCDF file. 14 | 15 | Parameters 16 | ---------- 17 | data: xarray.Dataset or xarray.DataArray 18 | The Dataset or DataArray to export. 19 | path: str 20 | The path to store the exported NetCDF file at. 21 | Must include the filename and ".nc" extension. 22 | """ 23 | # Record original attributes to restore after export. 24 | orig_data_attrs = data.attrs.copy() 25 | orig_data_var_attrs = {} 26 | if isinstance(data, xr.Dataset): 27 | for data_var in data.data_vars: 28 | orig_data_var_attrs[data_var] = data[data_var].attrs.copy() 29 | 30 | # If present, convert the CRS object from the Data Cube to a string. 31 | # String and numeric attributes are retained. 32 | # All other attributes are removed. 33 | def handle_attr(data, attr): 34 | if attr == 'crs' and not isinstance(data.attrs[attr], str): 35 | data.attrs[attr] = data.crs.crs_str 36 | elif not isinstance(data.attrs[attr], (str, int, float)): 37 | del data.attrs[attr] 38 | 39 | # To be able to call `xarray.Dataset.to_netcdf()`, convert the CRS 40 | # object from the Data Cube to a string, retain string and numeric 41 | # attributes, and remove all other attributes. 42 | for attr in data.attrs: 43 | handle_attr(data, attr) 44 | if isinstance(data, xr.Dataset): 45 | for data_var in data.data_vars: 46 | for attr in list(data[data_var].attrs): 47 | handle_attr(data[data_var], attr) 48 | # Move units from the time coord attributes to its encoding. 49 | if 'time' in data.coords: 50 | orig_time_attrs = data.time.attrs.copy() 51 | if 'units' in data.time.attrs: 52 | time_units = data.time.attrs['units'] 53 | del data.time.attrs['units'] 54 | data.time.encoding['units'] = time_units 55 | # Export to NetCDF. 56 | data.to_netcdf(path) 57 | # Restore original attributes. 58 | data.attrs = orig_data_attrs 59 | if 'time' in data.coords: 60 | data.time.attrs = orig_time_attrs 61 | if isinstance(data, xr.Dataset): 62 | for data_var in data.data_vars: 63 | data[data_var].attrs = orig_data_var_attrs[data_var] 64 | 65 | def export_slice_to_geotiff(ds, path, x_coord='longitude', y_coord='latitude'): 66 | """ 67 | NOTE: Instead of this function, please use `import_export.export_xarray_to_geotiff()`. 68 | 69 | Exports a 2D slice of an xarray.Dataset as a GeoTIFF. 70 | 71 | ds: xarray.Dataset 72 | The Dataset to export. Must have exactly 2 dimensions - 'latitude' and 'longitude'. 73 | x_coord, y_coord: string 74 | Names of the x and y coordinates in `ds`. 75 | path: str 76 | The path to store the exported GeoTIFF. 77 | """ 78 | kwargs = dict(tif_path=path, data=ds.astype(np.float32), bands=list(ds.data_vars.keys()), 79 | x_coord=x_coord, y_coord=y_coord) 80 | if 'crs' in ds.attrs: 81 | kwargs['crs'] = str(ds.attrs['crs']) 82 | dc_utilities.write_geotiff_from_xr(**kwargs) 83 | 84 | 85 | def export_xarray_to_multiple_geotiffs(ds, path, x_coord='longitude', y_coord='latitude'): 86 | """ 87 | Exports an xarray.Dataset as individual time slices - one GeoTIFF per time slice. 88 | 89 | Parameters 90 | ---------- 91 | ds: xarray.Dataset 92 | The Dataset to export. Must have exactly 3 dimensions - 'latitude', 'longitude', and 'time'. 93 | The 'time' dimension must have type `numpy.datetime64`. 94 | path: str 95 | The path prefix to store the exported GeoTIFFs. For example, 'geotiffs/mydata' would result in files named like 96 | 'mydata_2016_12_05_12_31_36.tif' within the 'geotiffs' folder. 97 | x_coord, y_coord: string 98 | Names of the x and y coordinates in `ds`. 99 | """ 100 | def time_to_string(t): 101 | return time.strftime("%Y_%m_%d_%H_%M_%S", time.gmtime(t.astype(int) / 1000000000)) 102 | 103 | for t_ind, t in enumerate(ds.time): 104 | time_slice_xarray = ds.isel(time=t_ind) 105 | export_slice_to_geotiff(time_slice_xarray, 106 | path + "_" + time_to_string(t) + ".tif", 107 | x_coord=x_coord, y_coord=y_coord) 108 | 109 | 110 | def export_xarray_to_geotiff(data, tif_path, bands=None, no_data=-9999, crs="EPSG:4326", 111 | x_coord='longitude', y_coord='latitude'): 112 | """ 113 | Export a GeoTIFF from a 2D `xarray.Dataset`. 114 | 115 | Parameters 116 | ---------- 117 | data: xarray.Dataset or xarray.DataArray 118 | An xarray with 2 dimensions to be exported as a GeoTIFF. 119 | If the dtype is `bool`, convert to dtype `numpy.uint8`. 120 | tif_path: string 121 | The path to write the GeoTIFF file to. You should include the file extension. 122 | bands: list of string 123 | The bands to write - in the order they should be written. 124 | Ignored if `data` is an `xarray.DataArray`. 125 | no_data: int 126 | The nodata value. 127 | crs: string 128 | The CRS of the output. 129 | x_coord, y_coord: string 130 | The string names of the x and y dimensions. 131 | """ 132 | from .dc_utilities import _get_transform_from_xr 133 | 134 | if isinstance(data, xr.DataArray): 135 | height, width = data.sizes[y_coord], data.sizes[x_coord] 136 | count, dtype = 1, data.dtype 137 | else: 138 | if bands is None: 139 | bands = list(data.data_vars.keys()) 140 | else: 141 | assrt_msg_begin = "The `data` parameter is an `xarray.Dataset`. " 142 | assert isinstance(bands, list), assrt_msg_begin + "Bands must be a list of strings." 143 | assert len(bands) > 0 and isinstance(bands[0], str), assrt_msg_begin + "You must supply at least one band." 144 | height, width = data.dims[y_coord], data.dims[x_coord] 145 | count, dtype = len(bands), data[bands[0]].dtype 146 | with rasterio.open( 147 | tif_path, 148 | 'w', 149 | driver='GTiff', 150 | height=height, 151 | width=width, 152 | count=count, 153 | dtype=dtype, 154 | crs=crs, 155 | transform=_get_transform_from_xr(data, x_coord=x_coord, y_coord=y_coord), 156 | nodata=no_data) as dst: 157 | if isinstance(data, xr.DataArray): 158 | dst.write(data.values, 1) 159 | else: 160 | for index, band in enumerate(bands): 161 | dst.write(data[band].values.astype(dtype), index + 1) 162 | dst.close() 163 | 164 | ## End export ## -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c1_l2_to_LANDSAT_5_c2_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_5_c2_l2_to_LANDSAT_5_c1_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c1_l2_to_LANDSAT_7_c2_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_7_c2_l2_to_LANDSAT_7_c1_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c1_l2_to_LANDSAT_8_c2_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_blue.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_blue.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_green.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_green.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_nir.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_nir.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_red.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_red.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir1.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir1.joblib -------------------------------------------------------------------------------- /data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir2.joblib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/models/LANDSAT_8_c2_l2_to_LANDSAT_8_c1_l2_swir2.joblib -------------------------------------------------------------------------------- /data_cube_utilities/plotter_utils_consts.py: -------------------------------------------------------------------------------- 1 | # The number of points to use in smooth curve fits. 2 | n_pts_smooth = 2000 3 | default_fourier_n_harm = 10 -------------------------------------------------------------------------------- /data_cube_utilities/scale.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | 4 | def xr_scale(data, data_vars=None, min_max=None, scaling='norm', copy=False): 5 | """ 6 | Scales an xarray Dataset or DataArray with standard scaling or norm scaling. 7 | 8 | Parameters 9 | ---------- 10 | data: xarray.Dataset or xarray.DataArray 11 | The NumPy array to scale. 12 | data_vars: list 13 | The names of the data variables to scale. 14 | min_max: tuple 15 | A 2-tuple which specifies the desired range of the final output - the minimum and the maximum, in that order. 16 | If all values are the same, all values will become min_max[0]. 17 | scaling: str 18 | The options are ['std', 'norm']. 19 | The option 'std' standardizes. The option 'norm' normalizes (min-max scales). 20 | copy: bool 21 | Whether or not to copy `data` before scaling. 22 | """ 23 | data = data.copy() if copy else data 24 | if isinstance(data, xr.Dataset): 25 | data_arr_names = list(data.data_vars) if data_vars is None else data_vars 26 | for data_arr_name in data_arr_names: 27 | data_arr = data[data_arr_name] 28 | data_arr.values = np_scale(data_arr.values, min_max=min_max, scaling=scaling) 29 | elif isinstance(data, xr.DataArray): 30 | data.values = np_scale(data.values, min_max=min_max, scaling=scaling) 31 | return data 32 | 33 | 34 | def np_scale(arr, pop_arr=None, pop_min_max=None, pop_mean_std=None, min_max=None, scaling='norm'): 35 | """ 36 | Scales a NumPy array with standard scaling or norm scaling, default to norm scaling. 37 | 38 | Parameters 39 | ---------- 40 | arr: numpy.ndarray 41 | The NumPy array to scale. 42 | pop_arr: numpy.ndarray, optional 43 | The NumPy array to treat as the population. 44 | If specified, all members of `arr` must be within the range of `pop_arr` 45 | or `min_max` must be specified. 46 | pop_min_max: list-like, optional 47 | The population minimum and maximum, in that order. 48 | Supercedes `pop_arr` when normalizing. 49 | pop_mean_std: list-like, optional 50 | The population mean and standard deviation, in that order. 51 | Supercedes `pop_arr` when standard scaling. 52 | min_max: list-like, optional 53 | The desired minimum and maximum of the final output, in that order. 54 | If all values are the same, all values will become `min_max[0]`. 55 | scaling: str, optional 56 | The options are ['std', 'norm']. 57 | The option 'std' standardizes. The option 'norm' normalizes (min-max scales). 58 | """ 59 | if len(arr) == 0: 60 | return arr 61 | pop_arr = arr if pop_arr is None else pop_arr 62 | if scaling == 'norm': 63 | pop_min, pop_max = (pop_min_max[0], pop_min_max[1]) if pop_min_max is not None \ 64 | else (np.nanmin(pop_arr), np.nanmax(pop_arr)) 65 | numerator, denominator = arr - pop_min, pop_max - pop_min 66 | elif scaling == 'std': 67 | mean, std = pop_mean_std if pop_mean_std is not None else (np.nanmean(pop_arr), np.nanstd(pop_arr)) 68 | numerator, denominator = arr - mean, std 69 | # Primary scaling 70 | new_arr = arr 71 | if denominator > 0: 72 | new_arr = numerator / denominator 73 | # Optional final scaling. 74 | if min_max is not None: 75 | if denominator > 0: 76 | new_arr = np.interp(new_arr, (np.nanmin(new_arr), np.nanmax(new_arr)), min_max) 77 | else: # The values are identical - set all values to the low end of the desired range. 78 | new_arr = np.full_like(new_arr, min_max[0]) 79 | return new_arr -------------------------------------------------------------------------------- /data_cube_utilities/shapefile_mask.py: -------------------------------------------------------------------------------- 1 | import fiona 2 | import xarray as xr 3 | import numpy as np 4 | 5 | from rasterio.features import geometry_mask 6 | import shapely 7 | from shapely.ops import transform 8 | from shapely.geometry import shape 9 | from functools import partial 10 | 11 | def get_y_x_bounds_shapefile(shapefile): 12 | """ 13 | Returns the y/x bounds of a shapefile. 14 | 15 | Parameters 16 | ---------- 17 | shapefile: string 18 | The shapefile to be used. 19 | 20 | Returns 21 | ------- 22 | y, x: list 23 | The y and x bounds of the shapefile. 24 | """ 25 | with fiona.open(shapefile, 'r') as src: 26 | # create a shapely geometry 27 | # this is done for the convenience for the .bounds property only 28 | shp_geom = shape(src[0]['geometry']) 29 | 30 | # get the bounding box of the shapefile geometry 31 | y, x = [[None] * 2, [None] * 2] 32 | x[0], y[0] = shp_geom.bounds[0:2] 33 | x[1], y[1] = shp_geom.bounds[2:4] 34 | return y, x 35 | 36 | def shapefile_mask(dataset: xr.Dataset, shapefile) -> np.ndarray: 37 | """ 38 | Extracts a mask from an `xarray.Dataset` using a shapefile and the latitude and longitude extents of the xarray. 39 | 40 | Parameters 41 | ---------- 42 | dataset: xarray.Dataset 43 | The dataset with latitude and longitude extents. 44 | shapefile: string 45 | The path to the shapefile to be used. 46 | 47 | Returns 48 | ------- 49 | out: np.ndarray 50 | A boolean mask array. 51 | """ 52 | import pyproj 53 | 54 | with fiona.open(shapefile, 'r') as src: 55 | collection = list(src) 56 | geometries = [] 57 | for feature in collection: 58 | geom = shape(feature['geometry']) 59 | project = partial( 60 | pyproj.transform, 61 | pyproj.Proj(init=src.crs['init']), # source crs 62 | pyproj.Proj(init='epsg:4326')) # destination crs 63 | geom = transform(project, geom) # apply projection 64 | geometries.append(geom) 65 | geobox = dataset.geobox 66 | mask = geometry_mask( 67 | geometries, 68 | out_shape=geobox.shape, 69 | transform=geobox.affine, 70 | all_touched=True, 71 | invert=True) 72 | return mask 73 | -------------------------------------------------------------------------------- /data_cube_utilities/sort.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def xarray_sortby_coord(dataset, coord): 4 | """ 5 | Sort an xarray.Dataset by a coordinate. xarray.Dataset.sortby() sometimes fails, so this is an alternative. 6 | Credit to https://stackoverflow.com/a/42600594/5449970. 7 | """ 8 | return dataset.loc[{coord:np.sort(dataset.coords[coord].values)}] -------------------------------------------------------------------------------- /data_cube_utilities/transect/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/transect/__init__.py -------------------------------------------------------------------------------- /data_cube_utilities/transect/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/transect/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /data_cube_utilities/transect/__pycache__/line_scan.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/transect/__pycache__/line_scan.cpython-35.pyc -------------------------------------------------------------------------------- /data_cube_utilities/transect/interpolate.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from itertools import islice 3 | 4 | nan = np.nan 5 | 6 | def window(seq, n=2): 7 | "Returns a sliding window (of width n) over data from the iterable" 8 | " s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... " 9 | it = iter(seq) 10 | result = tuple(islice(it, n)) 11 | if len(result) == n: 12 | yield result 13 | for elem in it: 14 | result = result[1:] + (elem,) 15 | yield result 16 | 17 | def hex_to_rgb(rgbstr): 18 | rgbstr= rgbstr.replace('#','') 19 | hex_prefix = '0x' 20 | 21 | r = hex_prefix + rgbstr[:2] 22 | g = hex_prefix + rgbstr[2:4] 23 | b = hex_prefix + rgbstr[4:] 24 | 25 | return np.array([int(r, 16), 26 | int(g, 16), 27 | int(b, 16)]) 28 | 29 | def _bin_and_index(value, size): 30 | '''Takes two arguments. value and size. value is a float between 0 and 1, size is the number of bins into which 31 | we divide the range 0 and 1. An index is returned denoting which of these bins value falls into 32 | ''' 33 | for i in range(size): 34 | if value > i/size and value <= (i + 1)/size: 35 | return i 36 | return 0 37 | 38 | def get_gradient(colors, value): 39 | ''' make sure the value is between 0 and 1. If the value is between 0 and 1, you will get interpolated values in between. 40 | This displays gradients with quadruple digit precision 41 | ''' 42 | 43 | if np.isnan(value): 44 | return np.array([nan,nan,nan]) 45 | 46 | colors = [np.array(hex_to_rgb(color)) for color in colors] 47 | color_pairs = list(window(colors)) 48 | 49 | size = len(color_pairs) 50 | index = _bin_and_index(value,size) 51 | color1,color2 = color_pairs[index] 52 | 53 | direction = (color2 - color1).astype(float) 54 | 55 | v = value * size - index 56 | return (v * direction) + color1 57 | 58 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/line_scan.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def _reformat(xs, ys): 4 | '''Zips a list of xs and a list of ys. Converts to np.array. Casts to int''' 5 | return np.array(list(zip(xs, ys))).astype(int) 6 | 7 | def line_scan(c1, c2): 8 | ''' 9 | Accepts two integer coordinate pairs, c1 and c2. 10 | Returns a list of integer coordinate pairs representing all points on the line drawn between c1 and c2. 11 | ''' 12 | 13 | c1 = np.array(c1) 14 | c2 = np.array(c2) 15 | x_direction = int( 2 * (int(c1[0] < c2[0]) - .5)) 16 | y_direction = int( 2 * (int(c1[1] < c2[1]) - .5)) 17 | 18 | if c1[0] == c2[0]: 19 | range_of_ys = list(range(c1[1], c2[1] + 1, y_direction)) 20 | range_of_xs = [c1[0] for x in range_of_ys] 21 | return _reformat(range_of_xs, range_of_ys) 22 | 23 | if c1[1] == c2[1]: 24 | range_of_xs = list(range(c1[0], c2[0] + 1, x_direction)) 25 | range_of_ys = [c1[1] for x in range_of_xs] 26 | return _reformat(range_of_xs, range_of_ys) 27 | 28 | dy = c2[1] - c1[1] 29 | dx = c2[0] - c1[0] 30 | 31 | m = dy/dx 32 | _y = c1[1] 33 | _x = c1[0] 34 | 35 | sign = 1 if m > 0 else -1 36 | 37 | if abs(m) >= 1: 38 | 39 | range_of_ys = list(range(c1[1], c2[1] + sign, sign*x_direction)) 40 | range_of_xs = [ (((y-_y)/m) + _x)//1 for y in range_of_ys] 41 | return _reformat(range_of_xs, range_of_ys) 42 | 43 | elif abs(m) < 1: 44 | 45 | range_of_xs = list(range(c1[0], c2[0] + 1, x_direction)) 46 | range_of_ys = [ (m * (x-_x))//1 + _y for x in range_of_xs] 47 | 48 | return _reformat(range_of_xs, range_of_ys) -------------------------------------------------------------------------------- /data_cube_utilities/transect/ordered_set.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | class OrderedSet(collections.MutableSet): 4 | 5 | def __init__(self, iterable=None): 6 | self.end = end = [] 7 | end += [None, end, end] # sentinel node for doubly linked list 8 | self.map = {} # key --> [key, prev, next] 9 | if iterable is not None: 10 | self |= iterable 11 | 12 | def __len__(self): 13 | return len(self.map) 14 | 15 | def __contains__(self, key): 16 | return key in self.map 17 | 18 | def add(self, key): 19 | if key not in self.map: 20 | end = self.end 21 | curr = end[1] 22 | curr[2] = end[1] = self.map[key] = [key, curr, end] 23 | 24 | def discard(self, key): 25 | if key in self.map: 26 | key, prev, next = self.map.pop(key) 27 | prev[2] = next 28 | next[1] = prev 29 | 30 | def __iter__(self): 31 | end = self.end 32 | curr = end[2] 33 | while curr is not end: 34 | yield curr[0] 35 | curr = curr[2] 36 | 37 | def __reversed__(self): 38 | end = self.end 39 | curr = end[1] 40 | while curr is not end: 41 | yield curr[0] 42 | curr = curr[1] 43 | 44 | def pop(self, last=True): 45 | if not self: 46 | raise KeyError('set is empty') 47 | key = self.end[1][0] if last else self.end[2][0] 48 | self.discard(key) 49 | return key 50 | 51 | def __repr__(self): 52 | if not self: 53 | return '%s()' % (self.__class__.__name__,) 54 | return '%s(%r)' % (self.__class__.__name__, list(self)) 55 | 56 | def __eq__(self, other): 57 | if isinstance(other, OrderedSet): 58 | return len(self) == len(other) and list(self) == list(other) 59 | return set(self) == set(other) 60 | 61 | 62 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/tests/.ipynb_checkpoints/Pytest+exectution-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 3, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "Requirement already satisfied: pytest in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages\n", 13 | "Requirement already satisfied: six>=1.10.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 14 | "Requirement already satisfied: attrs>=17.2.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 15 | "Requirement already satisfied: py>=1.5.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 16 | "Requirement already satisfied: setuptools in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 17 | "Requirement already satisfied: pluggy<0.7,>=0.5 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n" 18 | ] 19 | } 20 | ], 21 | "source": [ 22 | "!pip install pytest" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 4, 28 | "metadata": {}, 29 | "outputs": [ 30 | { 31 | "name": "stdout", 32 | "output_type": "stream", 33 | "text": [ 34 | "\u001b[1m============================= test session starts ==============================\u001b[0m\n", 35 | "platform linux -- Python 3.4.3, pytest-3.3.2, py-1.5.2, pluggy-0.6.0\n", 36 | "rootdir: /home/local/AMA-INC/owagner/work/data_cube_notebooks_transect/utils/data_cube_utilities/transect/tests, inifile:\n", 37 | "collected 24 items \u001b[0m\u001b[1m\u001b[1m\n", 38 | "\n", 39 | "test_interpolate.py ........\u001b[36m [ 33%]\u001b[0m\n", 40 | "test_linescan.py ................\u001b[36m [100%]\u001b[0m\n", 41 | "\n", 42 | "\u001b[1m\u001b[32m========================== 24 passed in 0.11 seconds ===========================\u001b[0m\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "!pytest" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [] 56 | } 57 | ], 58 | "metadata": { 59 | "kernelspec": { 60 | "display_name": "Python 3", 61 | "language": "python", 62 | "name": "python3" 63 | }, 64 | "language_info": { 65 | "codemirror_mode": { 66 | "name": "ipython", 67 | "version": 3 68 | }, 69 | "file_extension": ".py", 70 | "mimetype": "text/x-python", 71 | "name": "python", 72 | "nbconvert_exporter": "python", 73 | "pygments_lexer": "ipython3", 74 | "version": "3.4.3" 75 | } 76 | }, 77 | "nbformat": 4, 78 | "nbformat_minor": 2 79 | } 80 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/tests/Pytest+exectution.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 3, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "Requirement already satisfied: pytest in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages\n", 13 | "Requirement already satisfied: six>=1.10.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 14 | "Requirement already satisfied: attrs>=17.2.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 15 | "Requirement already satisfied: py>=1.5.0 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 16 | "Requirement already satisfied: setuptools in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n", 17 | "Requirement already satisfied: pluggy<0.7,>=0.5 in /home/localuser/Datacube/datacube_env/lib/python3.4/site-packages (from pytest)\n" 18 | ] 19 | } 20 | ], 21 | "source": [ 22 | "!pip install pytest" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 4, 28 | "metadata": {}, 29 | "outputs": [ 30 | { 31 | "name": "stdout", 32 | "output_type": "stream", 33 | "text": [ 34 | "\u001b[1m============================= test session starts ==============================\u001b[0m\n", 35 | "platform linux -- Python 3.4.3, pytest-3.3.2, py-1.5.2, pluggy-0.6.0\n", 36 | "rootdir: /home/local/AMA-INC/owagner/work/data_cube_notebooks_transect/utils/data_cube_utilities/transect/tests, inifile:\n", 37 | "collected 24 items \u001b[0m\u001b[1m\u001b[1m\n", 38 | "\n", 39 | "test_interpolate.py ........\u001b[36m [ 33%]\u001b[0m\n", 40 | "test_linescan.py ................\u001b[36m [100%]\u001b[0m\n", 41 | "\n", 42 | "\u001b[1m\u001b[32m========================== 24 passed in 0.11 seconds ===========================\u001b[0m\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "!pytest" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [] 56 | } 57 | ], 58 | "metadata": { 59 | "kernelspec": { 60 | "display_name": "Python 3", 61 | "language": "python", 62 | "name": "python3" 63 | }, 64 | "language_info": { 65 | "codemirror_mode": { 66 | "name": "ipython", 67 | "version": 3 68 | }, 69 | "file_extension": ".py", 70 | "mimetype": "text/x-python", 71 | "name": "python", 72 | "nbconvert_exporter": "python", 73 | "pygments_lexer": "ipython3", 74 | "version": "3.4.3" 75 | } 76 | }, 77 | "nbformat": 4, 78 | "nbformat_minor": 2 79 | } 80 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/tests/test_interpolate.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import sys 4 | sys.path.append('../') 5 | 6 | from interpolate import get_gradient, _bin_and_index 7 | 8 | three_colors = ['#ffffff', '#000000', '#ff0000'] 9 | two_colors = ['#ffffff', '#000000'] 10 | 11 | 12 | equal = np.testing.assert_array_equal 13 | close_enough = np.testing.assert_allclose 14 | 15 | def test_bin_lower(): 16 | value = 0.3 17 | size = 2 18 | params = (value, size) 19 | expected_answer = 0 20 | equal(expected_answer, _bin_and_index(*params)) 21 | 22 | def test_bin_higher(): 23 | value = 0.9 24 | size = 2 25 | params = (value, size) 26 | expected_answer = 1 27 | equal(expected_answer, _bin_and_index(*params)) 28 | 29 | ## test__ 30 | def test_3_half(): 31 | value = 0.5 32 | params = (three_colors, value) 33 | expected_answer = np.array([0, 0, 0]) 34 | 35 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 36 | 37 | def test_3_quarter(): 38 | value = 0.25 39 | params = (three_colors, value) 40 | expected_answer = np.array([127.5, 127.5, 127.5]) 41 | 42 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 43 | 44 | def test_3_3quarter(): 45 | value = 0.75 46 | params = (three_colors, value) 47 | expected_answer = np.array([127.5, 0, 0]) 48 | 49 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 50 | 51 | 52 | def test_2_half(): 53 | value = 0.5 54 | params = (two_colors, value) 55 | expected_answer = np.array([127.5, 127.5, 127.5]) 56 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 57 | 58 | def test_2_quarter(): 59 | value = 0.25 60 | params = (two_colors, value) 61 | expected_answer = np.array([191.25,191.25,191.25]) 62 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 63 | 64 | def test_2_3quarter(): 65 | value = 0.75 66 | params = (two_colors, value) 67 | expected_answer = np.array([63.75,63.75,63.75]) 68 | close_enough( expected_answer, get_gradient(*params),atol = 1 ) 69 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/tests/test_linescan.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | ## requires pytest 4 | 5 | import sys 6 | 7 | sys.path.append('../') 8 | from line_scan import line_scan 9 | equal = np.testing.assert_array_equal 10 | 11 | ''' 12 | I line can go from.... 13 | 14 | lr - left to right 15 | rl = right to left 16 | 17 | ud - up to down 18 | du - down to up 19 | 20 | mg - slope greater than 1 21 | ml - slope less than one 22 | me - slope equal to one 23 | 24 | Or it can be a completely... 25 | 26 | h, horizontal 27 | v, vertical 28 | 29 | The following tests account for all possible combinations of orientation and slope. 30 | ''' 31 | 32 | ############## lr_ud 33 | def test_lr_ud_mg(): 34 | a, b = [np.array([1, 10]), np.array([4, 2])] 35 | expected_answer = [[1, 10], [1, 9], [1, 8], [2, 7], [2, 6], [2, 5], [3, 4], [3, 3], [4, 2]] 36 | 37 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 38 | 39 | 40 | def test_lr_ud_ml(): 41 | a, b = np.array([1, 10]), np.array([10, 5]) 42 | expected_answer = [[1, 10], [2, 9], [3, 8], [4, 8], [5, 7], [6, 7], [7, 6], [8, 6], [9, 5], [10, 5]] 43 | 44 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 45 | 46 | 47 | def test_lr_ud_me(): 48 | a, b = np.array([1, 10]), np.array([10, 1]) 49 | expected_answer = [[1, 10], [2, 9], [3, 8], [4, 7], [5, 6], [6, 5], [7, 4], [8, 3], [9, 2], [10, 1]] 50 | 51 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 52 | 53 | 54 | ############## lr_du 55 | def test_lr_du_mg(): 56 | a, b = [np.array([1, 2]), np.array([4, 10])] 57 | expected_answer = [[1, 2], [1, 3], [1, 4], [2, 5], [2, 6], [2, 7], [3, 8], [3, 9], [4, 10]] 58 | 59 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 60 | 61 | 62 | def test_lr_du_ml(): 63 | a, b = [np.array([1, 5]), np.array([10, 10])] 64 | expected_answer = [[1, 5], [2, 5], [3, 6], [4, 6], [5, 7], [6, 7], [7, 8], [8, 8], [9, 9], [10, 10]] 65 | 66 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 67 | 68 | 69 | def test_lr_du_me(): 70 | a, b = [np.array([1, 1]), np.array([10, 10])] 71 | expected_answer = [[1, 1], [2, 2], [3, 3], [4, 4], [5, 5], [6, 6], [7, 7], [8, 8], [9, 9], [10, 10]] 72 | 73 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 74 | 75 | 76 | ############## rl_ud 77 | def test_rl_ud_mg(): 78 | a, b = [np.array([4, 10]), np.array([1, 2])] 79 | expected_answer = [[4, 10], [3, 9], [3, 8], [2, 7], [2, 6], [2, 5], [1, 4]] 80 | 81 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 82 | 83 | 84 | def test_rl_ud_ml(): 85 | a, b = [np.array([8, 10]), np.array([1, 5])] 86 | expected_answer = [[8, 10], [7, 9], [6, 8], [5, 7], [4, 7], [3, 6]] 87 | 88 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 89 | 90 | 91 | def test_rl_ud_me(): 92 | a, b = [np.array([8, 10]), np.array([4, 5])] 93 | expected_answer = [[8, 10], [7, 9], [6, 8], [5, 7]] 94 | 95 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 96 | 97 | 98 | ############## rl_du 99 | def test_rl_du_mg(): 100 | a, b = [np.array([4, 2]), np.array([1, 10])] 101 | expected_answer = [[4, 2], [3, 3], [3, 4], [2, 5], [2, 6], [2, 7], [1, 8]] 102 | 103 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 104 | 105 | 106 | def test_rl_du_ml(): #-0.444444444444 107 | a, b = [np.array([10, 2]), np.array([1, 6])] 108 | expected_answer = [[10, 2], [9, 2], [8, 2], [7, 3], [6, 3], [5, 4], [4, 4], [3, 5]] 109 | 110 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 111 | 112 | 113 | def test_rl_du_me(): 114 | a, b = [np.array([11, 5]), np.array([1, 15])] 115 | expected_answer = [[11, 5], [10, 6], [9, 7], [8, 8], [7, 9], [6, 10], [5, 11], [4, 12], [3, 13]] 116 | 117 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 118 | 119 | 120 | ############### h 121 | 122 | 123 | def test_rl_h(): 124 | a, b = [np.array([10, 4]), np.array([2, 4])] 125 | expected_answer = [[10, 4], [9, 4], [8, 4], [7, 4], [6, 4], [5, 4], [4, 4]] 126 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 127 | 128 | 129 | def test_lr_h(): 130 | a, b = [np.array([2, 4]), np.array([10, 4])] 131 | expected_answer = [[2, 4], [3, 4], [4, 4], [5, 4], [6, 4], [7, 4], [8, 4], [9, 4], [10, 4]] 132 | 133 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 134 | 135 | 136 | ############### v 137 | def test_v_du(): 138 | a, b = [np.array([10, 2]), np.array([10, 8])] 139 | expected_answer = [[10, 2], [10, 3], [10, 4], [10, 5], [10, 6], [10, 7], [10, 8]] 140 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 141 | 142 | 143 | def test_v_ud(): 144 | a, b = [np.array([10, 10]), np.array([10, 2])] 145 | expected_answer = [[10, 10], [10, 9], [10, 8], [10, 7], [10, 6], [10, 5], [10, 4]] 146 | equal(np.array(line_scan(a, b)), np.array(expected_answer)) 147 | 148 | 149 | ################################################################################ 150 | 151 | points = {} 152 | points['lrudmg'] = [np.array([1,10]), np.array([4,2])] 153 | points['lrudml'] = [np.array([1,10]), np.array([10,5])] 154 | points['lrudme'] = [np.array([1,10]), np.array([10,1])] 155 | 156 | points['lrdumg'] = [np.array([1,2]), np.array([4,10])] 157 | points['lrduml'] = [np.array([1,5]), np.array([10,10])] 158 | points['lrdume'] = [np.array([1,1]), np.array([10,10])] 159 | 160 | 161 | points['rludmg'] = [ np.array([4,10]), np.array([1,2])] 162 | points['rludml'] = [ np.array([8,10]), np.array([1,5])] 163 | points['rludme'] = [np.array([8,10]), np.array([4,5])] 164 | 165 | points['rldumg'] = [ np.array([4,2]), np.array([1,10])] 166 | points['rlduml'] = [ np.array([10,2]), np.array([1,6])] 167 | points['rldume'] = [np.array([11,5]), np.array([1,15])] 168 | 169 | 170 | points['h_lr'] = [np.array([2,4]), np.array([10,4])] 171 | points['h_rl'] = [np.array([10,4]), np.array([2,4])] 172 | 173 | points['v_du'] = [np.array([10,2]), np.array([10,8])] 174 | points['v_ud'] = [np.array([10,10]), np.array([10,2])] 175 | 176 | -------------------------------------------------------------------------------- /data_cube_utilities/transect/xarraypath.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def get_index_at(coords, ds): 4 | lat = coords[0] 5 | lon = coords[1] 6 | 7 | nearest_lat = ds.sel(latitude = lat, method = 'nearest').latitude.values 8 | nearest_lon = ds.sel(longitude = lon, method = 'nearest').longitude.values 9 | 10 | lat_index = np.where(ds.latitude.values == nearest_lat)[0] 11 | lon_index = np.where(ds.longitude.values == nearest_lon)[0] 12 | 13 | return (int(lat_index), int(lon_index)) 14 | 15 | def create_pixel_trail(start, end, ds): 16 | a = get_index_at(start, ds) 17 | b = get_index_at(end, ds) 18 | 19 | indices = line_scan(a, b) 20 | 21 | pixels = [ ds.isel(latitude = x, longitude = y) for x, y in indices] 22 | return pixels 23 | 24 | 25 | -------------------------------------------------------------------------------- /data_cube_utilities/trend.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from itertools import islice, product 3 | import numpy as np 4 | import xarray as xr 5 | 6 | def __where_not_nan(arr: np.ndarray): 7 | """Finds position of not nan values in an nd-array 8 | 9 | Args: 10 | arr (numpy.ndarray): nd-array with nan values 11 | 12 | Returns: 13 | data (xr.DataArray): nd-array with indices of finite(not nan) values 14 | """ 15 | return np.where(np.isfinite(arr)) 16 | 17 | 18 | def __flatten_shallow(arr): 19 | """Flattens first two axes of nd-array 20 | Args: 21 | arr (numpy.ndarray): nd-array with dimensions (n, m) 22 | 23 | Returns: 24 | arr (numpy.ndarray): nd-array with dimensions (n*m) 25 | """ 26 | # TODO: Done in a hurry, Find numpy native way of resizing 27 | return arr.reshape(arr.shape[0] * arr.shape[1]) 28 | 29 | 30 | def __linear_fit(da: xr.DataArray): 31 | """Applies linear regression on a 1-D xr.DataArray. 32 | 33 | Args: 34 | da (xr.DataArray): 1-D Data-Array being manipulated. 35 | 36 | Returns: 37 | data (xr.DataArray): DataArray with a single element(slope of regression). 38 | """ 39 | 40 | xs = np.array(list(range(len(da.time)))) 41 | ys = __flatten_shallow(da.values) 42 | 43 | not_nan = __where_not_nan(ys)[0].astype(int) 44 | 45 | xs = xs[not_nan] 46 | ys = ys[not_nan] 47 | 48 | pf = np.polyfit(xs,ys, 1) 49 | return xr.DataArray(pf[0]) 50 | 51 | 52 | def linear(da: xr.DataArray): 53 | """Reduces xarray along a time component. The reduction yields a slope for each spatial coordinate in the xarray. 54 | 55 | Args: 56 | da (xr.DataArray): 3-D Data-Array being manipulated. `latitude` and `longitude` are required dimensions. 57 | 58 | Returns: 59 | linear_trend_product (xr.DataArray): 2-D Data-Array 60 | """ 61 | 62 | # TODO: Decouple from coordinate system, and allow regression along multiple components. 63 | stacked = da.stack(allpoints = ['latitude', 64 | 'longitude']) 65 | 66 | trend = stacked.groupby('allpoints').apply(__linear_fit) 67 | 68 | unstacked = trend.unstack('allpoints') 69 | 70 | return unstacked.rename(dict(allpoints_level_0 = "latitude", 71 | allpoints_level_1 = "longitude")) -------------------------------------------------------------------------------- /data_cube_utilities/unique.py: -------------------------------------------------------------------------------- 1 | def dask_array_uniques(arr): 2 | """ 3 | Returns the unique values for a Dask Array object. 4 | 5 | Parameters 6 | ---------- 7 | arr: dask.array.core.Array 8 | 9 | Returns 10 | ------- 11 | uniques: numpy.ndarray 12 | """ 13 | import dask 14 | 15 | return dask.dataframe.from_dask_array(arr.flatten())\ 16 | .drop_duplicates().to_dask_array(lengths=True).compute() -------------------------------------------------------------------------------- /data_cube_utilities/urbanization.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .vegetation import NDVI 4 | 5 | def NDBI(ds): 6 | """ 7 | Computes the Normalized Difference Built-up Index for an `xarray.Dataset`. 8 | The formula is (SWIR1 - NIR) / (SWIR1 + NIR). 9 | Values should be in the range [-1,1] for valid LANDSAT data (swir1 and nir are positive). 10 | 11 | This is a spectral index for which high values often indicate urban areas. 12 | Note that DBSI often performs better in arid and semi-arid environments, since 13 | NDBI does not differentiate bare soil from urban areas well. 14 | 15 | Parameters 16 | ---------- 17 | ds: xarray.Dataset 18 | An `xarray.Dataset` that must contain 'swir1' and 'nir' `DataArrays`. 19 | 20 | Returns 21 | ------- 22 | ndbi: xarray.DataArray 23 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 24 | the same order.y 25 | """ 26 | return (ds.swir1 - ds.nir) / (ds.swir1 + ds.nir) 27 | 28 | 29 | def DBSI(ds, normalize=True): 30 | """ 31 | Computes the Dry Bare-Soil Index as defined in the paper "Applying 32 | Built-Up and Bare-Soil Indices from Landsat 8 to Cities in Dry Climates". 33 | The formula is (SWIR1 - GREEN) / (SWIR1 + GREEN) - NDVI. 34 | If `normalize == False`, returned values should be in the range [-2,2]. 35 | 36 | This is a spectral index for which high values often indicate bare soil and 37 | low values often indicate urban areas. 38 | Note that DBSI often performs better in arid and semi-arid environments than NDBI, since 39 | it differentiates bare soil from urban areas better. 40 | 41 | Parameters 42 | ---------- 43 | ds: xarray.Dataset 44 | An `xarray.Dataset` that must contain 45 | 'swir1', 'green', 'nir', and 'red' `DataArrays`. 46 | normalize: boolean 47 | Whether to normalize to the range [-1,1] - the range of most common spectral indices. 48 | 49 | Returns 50 | ------- 51 | dbsi: xarray.DataArray 52 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 53 | the same order. 54 | """ 55 | dbsi = (ds.swir1 - ds.green) / (ds.swir1 + ds.green) - NDVI(ds) 56 | if normalize: 57 | dbsi.values = np.interp(dbsi.values, (-2, 2), (-1, 1)) 58 | return dbsi -------------------------------------------------------------------------------- /data_cube_utilities/vegetation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def EVI(ds, G=2.5, C1=6, C2=7.5, L=1, normalize=True): 4 | """ 5 | Computes the 3-band Enhanced Vegetation Index for an `xarray.Dataset`. 6 | The formula is G * (NIR - RED) / (NIR + C1*RED - C2*BLUE + L). 7 | Usually, G = 2.5, C1 = 6, C2 = 7.5, and L = 1. 8 | For Landsat data, returned values should be in the range [-1,1] if `normalize == True`. 9 | If `normalize == False`, returned values should be in the range [-1,2.5]. 10 | 11 | EVI is superior to NDVI in accuracy because it is less dependent on the solar 12 | incidence angle, atmospheric conditions (e.g. particles and clouds), shadows, and 13 | soil appearance. 14 | 15 | Parameters 16 | ---------- 17 | ds: xarray.Dataset 18 | An `xarray.Dataset` that must contain 'nir', 'red', and 'blue' `DataArrays`. 19 | G, C1, C2, L: float 20 | G is the gain factor - a constant scaling factor. 21 | C1 and C2 pertain to aerosols in clouds. 22 | L adjusts for canopy background and soil appearance. It particularly pertains to 23 | the nir and red bands, which are transmitted non-linearly through a canopy. 24 | normalize: boolean 25 | Whether to normalize to the range [-1,1] - the range of most common spectral indices. 26 | 27 | Returns 28 | ------- 29 | evi: xarray.DataArray 30 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 31 | the same order. 32 | """ 33 | evi = G * (ds.nir - ds.red) / (ds.nir + C1 * ds.red - C2 * ds.blue + L) 34 | # Clamp values to the range [-1,2.5]. 35 | evi.values[evi.values < -1] = -1 36 | evi.values[2.5 < evi.values] = 2.5 37 | if normalize: 38 | # Scale values in the range [0,2.5] to the range [0,1]. 39 | pos_vals_mask = 0 < evi.values 40 | evi.values[pos_vals_mask] = np.interp(evi.values[pos_vals_mask], (0, 2.5), (0, 1)) 41 | return evi 42 | 43 | 44 | def EVI2(ds, G=2.5, C=2.4, L=1, normalize=True): 45 | """ 46 | Computes the 2-band Enhanced Vegetation Index for an `xarray.Dataset`. 47 | The formula is G*((NIR-RED)/(NIR+C*Red+L)). 48 | Usually, G = 2.5, C = 2.4, and L = 1. 49 | For Landsat data, returned values should be in the range [-1,1] if `normalize == True`. 50 | If `normalize == False`, returned values should be in the range [-1,2.5]. 51 | 52 | EVI2 does not require a blue band like EVI, which means less data is required to use it. 53 | Additionally, the blue band used in EVI can have a low signal-to-noise ratio 54 | in earth observation imagery. When atmospheric effects are insignificant (e.g. on clear days), 55 | EVI2 should closely match EVI. 56 | 57 | Parameters 58 | ---------- 59 | ds: xarray.Dataset 60 | An `xarray.Dataset` that must contain 'nir', and 'red' `DataArrays`. 61 | G, C, L: float 62 | G is the gain factor - a constant scaling factor. 63 | C pertains to aerosols in clouds. 64 | L adjusts for canopy background and soil appearance. It particularly pertains to 65 | the nir and red bands, which are transmitted non-linearly through a canopy. 66 | normalize: boolean 67 | Whether to normalize to the range [-1,1] - the range of most common spectral indices. 68 | 69 | Returns 70 | ------- 71 | evi: xarray.DataArray 72 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 73 | the same order. 74 | """ 75 | evi = G * (ds.nir - ds.red) / (ds.nir + C * ds.red + L) 76 | # Clamp values to the range [-1,2.5]. 77 | evi.values[evi.values < -1] = -1 78 | evi.values[2.5 < evi.values] = 2.5 79 | if normalize: 80 | # Scale values in the range [0,2.5] to the range [0,1]. 81 | pos_vals_mask = 0 < evi.values 82 | evi.values[pos_vals_mask] = np.interp(evi.values[pos_vals_mask], (0, 2.5), (0, 1)) 83 | return evi 84 | 85 | def NBR(ds, band_pair=0): 86 | """ 87 | Computes the Normalized Burn Ratio for an `xarray.Dataset`. 88 | The formula is (NIR - SWIR2) / (NIR + SWIR2). 89 | Values should be in the range [-1,1] for valid LANDSAT data (nir and swir2 are positive). 90 | 91 | Parameters 92 | ---------- 93 | ds: xarray.Dataset 94 | An `xarray.Dataset` that must contain 'nir' and 'swir2' `DataArrays`. 95 | 96 | Returns 97 | ------- 98 | nbr: xarray.DataArray 99 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 100 | the same order. 101 | """ 102 | bands = [None] * 2 103 | if band_pair == 0: 104 | bands = ['nir', 'swir2'] 105 | elif band_pair == 1: 106 | bands = ['swir1', 'swir2'] 107 | else: 108 | raise AssertionError('The band_pair parameter must be in [0,1]') 109 | 110 | return (ds[bands[0]] - ds[bands[1]]) / (ds[bands[0]] + ds[bands[1]]) 111 | 112 | def NDVI(ds): 113 | """ 114 | Computes the Normalized Difference Vegetation Index for an `xarray.Dataset`. 115 | The formula is (NIR - RED) / (NIR + RED). 116 | Values should be in the range [-1,1] for valid LANDSAT data (nir and red are positive). 117 | 118 | Parameters 119 | ---------- 120 | ds: xarray.Dataset 121 | An `xarray.Dataset` that must contain 'nir' and 'red' `DataArrays`. 122 | 123 | Returns 124 | ------- 125 | ndvi: xarray.DataArray 126 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 127 | the same order. 128 | """ 129 | return (ds.nir - ds.red) / (ds.nir + ds.red) 130 | 131 | 132 | def SAVI(ds, L=0.5, normalize=True): 133 | """ 134 | Computes the Soil-Adjusted Vegetation Index for an `xarray.Dataset`. 135 | The formula is (NIR - RED) / (NIR + RED + L) * (1 + L). 136 | For Landsat data, returned values should be in the range [-1,1] if `normalize == True`. 137 | If `normalize == False`, returned values should be in the range [-1-L,1+L]. 138 | 139 | In areas where vegetative cover is low (i.e., < 40%) and the soil surface 140 | is exposed, the reflectance of light in the red and near-infrared spectra 141 | can influence vegetation index values. This is especially problematic when 142 | comparisons are being made across different soil types that may reflect different 143 | amounts of light in the red and near infrared wavelengths (i.e. soils with 144 | different brightness values). The soil-adjusted vegetation index was developed 145 | as a modification of the Normalized Difference Vegetation Index to correct for 146 | the influence of soil brightness when vegetative cover is low. 147 | 148 | Parameters 149 | ---------- 150 | ds: xarray.Dataset 151 | An `xarray.Dataset` that must contain 'nir', and 'red' `DataArrays`. 152 | L: float 153 | L is the “soil brightness correction factor”, which should be varied based 154 | on the greenness of vegetation in the scene. In very high vegetation regions, 155 | `L=0`. In areas with no green vegetation, `L=1`. Generally, `L=0.5` works well 156 | and is the default value. When `L=0`, `SAVI==NDVI`. 157 | normalize: boolean 158 | Whether to normalize to the range [-1,1] - the range of most common spectral indices. 159 | 160 | Returns 161 | ------- 162 | savi: xarray.DataArray 163 | An `xarray.DataArray` with the same shape as `ds` - the same coordinates in 164 | the same order. 165 | """ 166 | savi = (ds.nir - ds.red) / (ds.nir + ds.red + L) * (1 + L) 167 | if normalize: 168 | savi.values = np.interp(savi.values, (-1-L, 1+L), (-1, 1)) 169 | return savi -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/data/mini_lake.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/data/mini_lake.nc -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/arrow-down-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/arrow-down-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/arrow-left-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/arrow-left-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/arrow-right-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/arrow-right-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/arrow-up-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/arrow-up-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/rotate-left-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/rotate-left-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/rotate-right-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/rotate-right-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/zoom-in-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/zoom-in-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/imgs/zoom-out-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/data_cube_utilities/voxel_visualizer/imgs/zoom-out-icon.png -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/.ipynb_checkpoints/EffectComposer-checkpoint.js: -------------------------------------------------------------------------------- 1 | import { 2 | Clock, 3 | LinearFilter, 4 | Mesh, 5 | OrthographicCamera, 6 | PlaneBufferGeometry, 7 | RGBAFormat, 8 | Vector2, 9 | WebGLRenderTarget 10 | } from "../lib/threejs/three.module.js"; 11 | import { CopyShader } from "./CopyShader.js"; 12 | import { ShaderPass } from "./ShaderPass.js"; 13 | import { MaskPass } from "./MaskPass.js"; 14 | import { ClearMaskPass } from "./MaskPass.js"; 15 | 16 | var EffectComposer = function ( renderer, renderTarget ) { 17 | 18 | this.renderer = renderer; 19 | 20 | if ( renderTarget === undefined ) { 21 | 22 | var parameters = { 23 | minFilter: LinearFilter, 24 | magFilter: LinearFilter, 25 | format: RGBAFormat 26 | }; 27 | 28 | var size = renderer.getSize( new Vector2() ); 29 | this._pixelRatio = renderer.getPixelRatio(); 30 | this._width = size.width; 31 | this._height = size.height; 32 | 33 | renderTarget = new WebGLRenderTarget( this._width * this._pixelRatio, this._height * this._pixelRatio, parameters ); 34 | renderTarget.texture.name = 'EffectComposer.rt1'; 35 | 36 | } else { 37 | 38 | this._pixelRatio = 1; 39 | this._width = renderTarget.width; 40 | this._height = renderTarget.height; 41 | 42 | } 43 | 44 | this.renderTarget1 = renderTarget; 45 | this.renderTarget2 = renderTarget.clone(); 46 | this.renderTarget2.texture.name = 'EffectComposer.rt2'; 47 | 48 | this.writeBuffer = this.renderTarget1; 49 | this.readBuffer = this.renderTarget2; 50 | 51 | this.renderToScreen = true; 52 | 53 | this.passes = []; 54 | 55 | // dependencies 56 | 57 | if ( CopyShader === undefined ) { 58 | 59 | console.error( 'THREE.EffectComposer relies on CopyShader' ); 60 | 61 | } 62 | 63 | if ( ShaderPass === undefined ) { 64 | 65 | console.error( 'THREE.EffectComposer relies on ShaderPass' ); 66 | 67 | } 68 | 69 | this.copyPass = new ShaderPass( CopyShader ); 70 | 71 | this.clock = new Clock(); 72 | 73 | }; 74 | 75 | Object.assign( EffectComposer.prototype, { 76 | 77 | swapBuffers: function () { 78 | 79 | var tmp = this.readBuffer; 80 | this.readBuffer = this.writeBuffer; 81 | this.writeBuffer = tmp; 82 | 83 | }, 84 | 85 | addPass: function ( pass ) { 86 | 87 | this.passes.push( pass ); 88 | pass.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 89 | 90 | }, 91 | 92 | insertPass: function ( pass, index ) { 93 | 94 | this.passes.splice( index, 0, pass ); 95 | pass.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 96 | 97 | }, 98 | 99 | isLastEnabledPass: function ( passIndex ) { 100 | 101 | for ( var i = passIndex + 1; i < this.passes.length; i ++ ) { 102 | 103 | if ( this.passes[ i ].enabled ) { 104 | 105 | return false; 106 | 107 | } 108 | 109 | } 110 | 111 | return true; 112 | 113 | }, 114 | 115 | render: function ( deltaTime ) { 116 | 117 | // deltaTime value is in seconds 118 | 119 | if ( deltaTime === undefined ) { 120 | 121 | deltaTime = this.clock.getDelta(); 122 | 123 | } 124 | 125 | var currentRenderTarget = this.renderer.getRenderTarget(); 126 | 127 | var maskActive = false; 128 | 129 | var pass, i, il = this.passes.length; 130 | 131 | for ( i = 0; i < il; i ++ ) { 132 | 133 | pass = this.passes[ i ]; 134 | 135 | if ( pass.enabled === false ) continue; 136 | 137 | pass.renderToScreen = ( this.renderToScreen && this.isLastEnabledPass( i ) ); 138 | pass.render( this.renderer, this.writeBuffer, this.readBuffer, deltaTime, maskActive ); 139 | 140 | if ( pass.needsSwap ) { 141 | 142 | if ( maskActive ) { 143 | 144 | var context = this.renderer.getContext(); 145 | var stencil = this.renderer.state.buffers.stencil; 146 | 147 | //context.stencilFunc( context.NOTEQUAL, 1, 0xffffffff ); 148 | stencil.setFunc( context.NOTEQUAL, 1, 0xffffffff ); 149 | 150 | this.copyPass.render( this.renderer, this.writeBuffer, this.readBuffer, deltaTime ); 151 | 152 | //context.stencilFunc( context.EQUAL, 1, 0xffffffff ); 153 | stencil.setFunc( context.EQUAL, 1, 0xffffffff ); 154 | 155 | } 156 | 157 | this.swapBuffers(); 158 | 159 | } 160 | 161 | if ( MaskPass !== undefined ) { 162 | 163 | if ( pass instanceof MaskPass ) { 164 | 165 | maskActive = true; 166 | 167 | } else if ( pass instanceof ClearMaskPass ) { 168 | 169 | maskActive = false; 170 | 171 | } 172 | 173 | } 174 | 175 | } 176 | 177 | this.renderer.setRenderTarget( currentRenderTarget ); 178 | 179 | }, 180 | 181 | reset: function ( renderTarget ) { 182 | 183 | if ( renderTarget === undefined ) { 184 | 185 | var size = this.renderer.getSize( new Vector2() ); 186 | this._pixelRatio = this.renderer.getPixelRatio(); 187 | this._width = size.width; 188 | this._height = size.height; 189 | 190 | renderTarget = this.renderTarget1.clone(); 191 | renderTarget.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 192 | 193 | } 194 | 195 | this.renderTarget1.dispose(); 196 | this.renderTarget2.dispose(); 197 | this.renderTarget1 = renderTarget; 198 | this.renderTarget2 = renderTarget.clone(); 199 | 200 | this.writeBuffer = this.renderTarget1; 201 | this.readBuffer = this.renderTarget2; 202 | 203 | }, 204 | 205 | setSize: function ( width, height ) { 206 | 207 | this._width = width; 208 | this._height = height; 209 | 210 | var effectiveWidth = this._width * this._pixelRatio; 211 | var effectiveHeight = this._height * this._pixelRatio; 212 | 213 | this.renderTarget1.setSize( effectiveWidth, effectiveHeight ); 214 | this.renderTarget2.setSize( effectiveWidth, effectiveHeight ); 215 | 216 | for ( var i = 0; i < this.passes.length; i ++ ) { 217 | 218 | this.passes[ i ].setSize( effectiveWidth, effectiveHeight ); 219 | 220 | } 221 | 222 | }, 223 | 224 | setPixelRatio: function ( pixelRatio ) { 225 | 226 | this._pixelRatio = pixelRatio; 227 | 228 | this.setSize( this._width, this._height ); 229 | 230 | } 231 | 232 | } ); 233 | 234 | 235 | var Pass = function () { 236 | 237 | // if set to true, the pass is processed by the composer 238 | this.enabled = true; 239 | 240 | // if set to true, the pass indicates to swap read and write buffer after rendering 241 | this.needsSwap = true; 242 | 243 | // if set to true, the pass clears its buffer before rendering 244 | this.clear = false; 245 | 246 | // if set to true, the result of the pass is rendered to screen. This is set automatically by EffectComposer. 247 | this.renderToScreen = false; 248 | 249 | }; 250 | 251 | Object.assign( Pass.prototype, { 252 | 253 | setSize: function ( /* width, height */ ) {}, 254 | 255 | render: function ( /* renderer, writeBuffer, readBuffer, deltaTime, maskActive */ ) { 256 | 257 | console.error( 'THREE.Pass: .render() must be implemented in derived pass.' ); 258 | 259 | } 260 | 261 | } ); 262 | 263 | // Helper for passes that need to fill the viewport with a single quad. 264 | Pass.FullScreenQuad = ( function () { 265 | 266 | var camera = new OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 267 | var geometry = new PlaneBufferGeometry( 2, 2 ); 268 | 269 | var FullScreenQuad = function ( material ) { 270 | 271 | this._mesh = new Mesh( geometry, material ); 272 | 273 | }; 274 | 275 | Object.defineProperty( FullScreenQuad.prototype, 'material', { 276 | 277 | get: function () { 278 | 279 | return this._mesh.material; 280 | 281 | }, 282 | 283 | set: function ( value ) { 284 | 285 | this._mesh.material = value; 286 | 287 | } 288 | 289 | } ); 290 | 291 | Object.assign( FullScreenQuad.prototype, { 292 | 293 | dispose: function () { 294 | 295 | this._mesh.geometry.dispose(); 296 | 297 | }, 298 | 299 | render: function ( renderer ) { 300 | 301 | renderer.render( this._mesh, camera ); 302 | 303 | } 304 | 305 | } ); 306 | 307 | return FullScreenQuad; 308 | 309 | } )(); 310 | 311 | export { EffectComposer, Pass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/BloomPass.js: -------------------------------------------------------------------------------- 1 | import { 2 | AdditiveBlending, 3 | LinearFilter, 4 | RGBAFormat, 5 | ShaderMaterial, 6 | UniformsUtils, 7 | Vector2, 8 | WebGLRenderTarget 9 | } from "../lib/threejs/three.module.js"; 10 | import { Pass } from "./Pass.js"; 11 | import { CopyShader } from "./CopyShader.js"; 12 | import { ConvolutionShader } from "./ConvolutionShader.js"; 13 | 14 | var BloomPass = function ( strength, kernelSize, sigma, resolution ) { 15 | 16 | Pass.call( this ); 17 | 18 | strength = ( strength !== undefined ) ? strength : 1; 19 | kernelSize = ( kernelSize !== undefined ) ? kernelSize : 25; 20 | sigma = ( sigma !== undefined ) ? sigma : 4.0; 21 | resolution = ( resolution !== undefined ) ? resolution : 256; 22 | 23 | // render targets 24 | 25 | var pars = { minFilter: LinearFilter, magFilter: LinearFilter, format: RGBAFormat }; 26 | 27 | this.renderTargetX = new WebGLRenderTarget( resolution, resolution, pars ); 28 | this.renderTargetX.texture.name = "BloomPass.x"; 29 | this.renderTargetY = new WebGLRenderTarget( resolution, resolution, pars ); 30 | this.renderTargetY.texture.name = "BloomPass.y"; 31 | 32 | // copy material 33 | 34 | if ( CopyShader === undefined ) 35 | console.error( "BloomPass relies on CopyShader" ); 36 | 37 | var copyShader = CopyShader; 38 | 39 | this.copyUniforms = UniformsUtils.clone( copyShader.uniforms ); 40 | 41 | this.copyUniforms[ "opacity" ].value = strength; 42 | 43 | this.materialCopy = new ShaderMaterial( { 44 | 45 | uniforms: this.copyUniforms, 46 | vertexShader: copyShader.vertexShader, 47 | fragmentShader: copyShader.fragmentShader, 48 | blending: AdditiveBlending, 49 | transparent: true 50 | 51 | } ); 52 | 53 | // convolution material 54 | 55 | if ( ConvolutionShader === undefined ) 56 | console.error( "BloomPass relies on ConvolutionShader" ); 57 | 58 | var convolutionShader = ConvolutionShader; 59 | 60 | this.convolutionUniforms = UniformsUtils.clone( convolutionShader.uniforms ); 61 | 62 | this.convolutionUniforms[ "uImageIncrement" ].value = BloomPass.blurX; 63 | this.convolutionUniforms[ "cKernel" ].value = ConvolutionShader.buildKernel( sigma ); 64 | 65 | this.materialConvolution = new ShaderMaterial( { 66 | 67 | uniforms: this.convolutionUniforms, 68 | vertexShader: convolutionShader.vertexShader, 69 | fragmentShader: convolutionShader.fragmentShader, 70 | defines: { 71 | "KERNEL_SIZE_FLOAT": kernelSize.toFixed( 1 ), 72 | "KERNEL_SIZE_INT": kernelSize.toFixed( 0 ) 73 | } 74 | 75 | } ); 76 | 77 | this.needsSwap = false; 78 | 79 | this.fsQuad = new Pass.FullScreenQuad( null ); 80 | 81 | }; 82 | 83 | BloomPass.prototype = Object.assign( Object.create( Pass.prototype ), { 84 | 85 | constructor: BloomPass, 86 | 87 | render: function ( renderer, writeBuffer, readBuffer, deltaTime, maskActive ) { 88 | 89 | if ( maskActive ) renderer.state.buffers.stencil.setTest( false ); 90 | 91 | // Render quad with blured scene into texture (convolution pass 1) 92 | 93 | this.fsQuad.material = this.materialConvolution; 94 | 95 | this.convolutionUniforms[ "tDiffuse" ].value = readBuffer.texture; 96 | this.convolutionUniforms[ "uImageIncrement" ].value = BloomPass.blurX; 97 | 98 | renderer.setRenderTarget( this.renderTargetX ); 99 | renderer.clear(); 100 | this.fsQuad.render( renderer ); 101 | 102 | 103 | // Render quad with blured scene into texture (convolution pass 2) 104 | 105 | this.convolutionUniforms[ "tDiffuse" ].value = this.renderTargetX.texture; 106 | this.convolutionUniforms[ "uImageIncrement" ].value = BloomPass.blurY; 107 | 108 | renderer.setRenderTarget( this.renderTargetY ); 109 | renderer.clear(); 110 | this.fsQuad.render( renderer ); 111 | 112 | // Render original scene with superimposed blur to texture 113 | 114 | this.fsQuad.material = this.materialCopy; 115 | 116 | this.copyUniforms[ "tDiffuse" ].value = this.renderTargetY.texture; 117 | 118 | if ( maskActive ) renderer.state.buffers.stencil.setTest( true ); 119 | 120 | renderer.setRenderTarget( readBuffer ); 121 | if ( this.clear ) renderer.clear(); 122 | this.fsQuad.render( renderer ); 123 | 124 | } 125 | 126 | } ); 127 | 128 | BloomPass.blurX = new Vector2( 0.001953125, 0.0 ); 129 | BloomPass.blurY = new Vector2( 0.0, 0.001953125 ); 130 | 131 | export { BloomPass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/ConvolutionShader.js: -------------------------------------------------------------------------------- 1 | import { 2 | Vector2 3 | } from "../lib/threejs/three.module.js"; 4 | 5 | /** 6 | * Convolution shader 7 | * ported from o3d sample to WebGL / GLSL 8 | * http://o3d.googlecode.com/svn/trunk/samples/convolution.html 9 | */ 10 | 11 | var ConvolutionShader = { 12 | 13 | defines: { 14 | 15 | "KERNEL_SIZE_FLOAT": "25.0", 16 | "KERNEL_SIZE_INT": "25" 17 | 18 | }, 19 | 20 | uniforms: { 21 | 22 | "tDiffuse": { value: null }, 23 | "uImageIncrement": { value: new Vector2( 0.001953125, 0.0 ) }, 24 | "cKernel": { value: [] } 25 | 26 | }, 27 | 28 | vertexShader: [ 29 | 30 | "uniform vec2 uImageIncrement;", 31 | 32 | "varying vec2 vUv;", 33 | 34 | "void main() {", 35 | 36 | " vUv = uv - ( ( KERNEL_SIZE_FLOAT - 1.0 ) / 2.0 ) * uImageIncrement;", 37 | " gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 38 | 39 | "}" 40 | 41 | ].join( "\n" ), 42 | 43 | fragmentShader: [ 44 | 45 | "uniform float cKernel[ KERNEL_SIZE_INT ];", 46 | 47 | "uniform sampler2D tDiffuse;", 48 | "uniform vec2 uImageIncrement;", 49 | 50 | "varying vec2 vUv;", 51 | 52 | "void main() {", 53 | 54 | " vec2 imageCoord = vUv;", 55 | " vec4 sum = vec4( 0.0, 0.0, 0.0, 0.0 );", 56 | 57 | " for( int i = 0; i < KERNEL_SIZE_INT; i ++ ) {", 58 | 59 | " sum += texture2D( tDiffuse, imageCoord ) * cKernel[ i ];", 60 | " imageCoord += uImageIncrement;", 61 | 62 | " }", 63 | 64 | " gl_FragColor = sum;", 65 | 66 | "}" 67 | 68 | 69 | ].join( "\n" ), 70 | 71 | buildKernel: function ( sigma ) { 72 | 73 | // We lop off the sqrt(2 * pi) * sigma term, since we're going to normalize anyway. 74 | 75 | function gauss( x, sigma ) { 76 | 77 | return Math.exp( - ( x * x ) / ( 2.0 * sigma * sigma ) ); 78 | 79 | } 80 | 81 | var i, values, sum, halfWidth, kMaxKernelSize = 25, kernelSize = 2 * Math.ceil( sigma * 3.0 ) + 1; 82 | 83 | if ( kernelSize > kMaxKernelSize ) kernelSize = kMaxKernelSize; 84 | halfWidth = ( kernelSize - 1 ) * 0.5; 85 | 86 | values = new Array( kernelSize ); 87 | sum = 0.0; 88 | for ( i = 0; i < kernelSize; ++ i ) { 89 | 90 | values[ i ] = gauss( i - halfWidth, sigma ); 91 | sum += values[ i ]; 92 | 93 | } 94 | 95 | // normalize the kernel 96 | 97 | for ( i = 0; i < kernelSize; ++ i ) values[ i ] /= sum; 98 | 99 | return values; 100 | 101 | } 102 | 103 | }; 104 | 105 | export { ConvolutionShader }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/CopyShader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Full-screen textured quad shader 3 | */ 4 | 5 | var CopyShader = { 6 | 7 | uniforms: { 8 | 9 | "tDiffuse": { value: null }, 10 | "opacity": { value: 1.0 } 11 | 12 | }, 13 | 14 | vertexShader: [ 15 | 16 | "varying vec2 vUv;", 17 | 18 | "void main() {", 19 | 20 | " vUv = uv;", 21 | " gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 22 | 23 | "}" 24 | 25 | ].join( "\n" ), 26 | 27 | fragmentShader: [ 28 | 29 | "uniform float opacity;", 30 | 31 | "uniform sampler2D tDiffuse;", 32 | 33 | "varying vec2 vUv;", 34 | 35 | "void main() {", 36 | 37 | " vec4 texel = texture2D( tDiffuse, vUv );", 38 | " gl_FragColor = opacity * texel;", 39 | 40 | "}" 41 | 42 | ].join( "\n" ) 43 | 44 | }; 45 | 46 | export { CopyShader }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/EffectComposer.js: -------------------------------------------------------------------------------- 1 | import { 2 | Clock, 3 | LinearFilter, 4 | Mesh, 5 | OrthographicCamera, 6 | PlaneBufferGeometry, 7 | RGBAFormat, 8 | Vector2, 9 | WebGLRenderTarget 10 | } from "../lib/threejs/three.module.js"; 11 | import { CopyShader } from "./CopyShader.js"; 12 | import { ShaderPass } from "./ShaderPass.js"; 13 | import { MaskPass } from "./MaskPass.js"; 14 | import { ClearMaskPass } from "./MaskPass.js"; 15 | 16 | var EffectComposer = function ( renderer, renderTarget ) { 17 | 18 | this.renderer = renderer; 19 | 20 | if ( renderTarget === undefined ) { 21 | 22 | var parameters = { 23 | minFilter: LinearFilter, 24 | magFilter: LinearFilter, 25 | format: RGBAFormat 26 | }; 27 | 28 | var size = renderer.getSize( new Vector2() ); 29 | this._pixelRatio = renderer.getPixelRatio(); 30 | this._width = size.width; 31 | this._height = size.height; 32 | 33 | renderTarget = new WebGLRenderTarget( this._width * this._pixelRatio, this._height * this._pixelRatio, parameters ); 34 | renderTarget.texture.name = 'EffectComposer.rt1'; 35 | 36 | } else { 37 | 38 | this._pixelRatio = 1; 39 | this._width = renderTarget.width; 40 | this._height = renderTarget.height; 41 | 42 | } 43 | 44 | this.renderTarget1 = renderTarget; 45 | this.renderTarget2 = renderTarget.clone(); 46 | this.renderTarget2.texture.name = 'EffectComposer.rt2'; 47 | 48 | this.writeBuffer = this.renderTarget1; 49 | this.readBuffer = this.renderTarget2; 50 | 51 | this.renderToScreen = true; 52 | 53 | this.passes = []; 54 | 55 | // dependencies 56 | 57 | if ( CopyShader === undefined ) { 58 | 59 | console.error( 'THREE.EffectComposer relies on CopyShader' ); 60 | 61 | } 62 | 63 | if ( ShaderPass === undefined ) { 64 | 65 | console.error( 'THREE.EffectComposer relies on ShaderPass' ); 66 | 67 | } 68 | 69 | this.copyPass = new ShaderPass( CopyShader ); 70 | 71 | this.clock = new Clock(); 72 | 73 | }; 74 | 75 | Object.assign( EffectComposer.prototype, { 76 | 77 | swapBuffers: function () { 78 | 79 | var tmp = this.readBuffer; 80 | this.readBuffer = this.writeBuffer; 81 | this.writeBuffer = tmp; 82 | 83 | }, 84 | 85 | addPass: function ( pass ) { 86 | 87 | this.passes.push( pass ); 88 | pass.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 89 | 90 | }, 91 | 92 | insertPass: function ( pass, index ) { 93 | 94 | this.passes.splice( index, 0, pass ); 95 | pass.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 96 | 97 | }, 98 | 99 | isLastEnabledPass: function ( passIndex ) { 100 | 101 | for ( var i = passIndex + 1; i < this.passes.length; i ++ ) { 102 | 103 | if ( this.passes[ i ].enabled ) { 104 | 105 | return false; 106 | 107 | } 108 | 109 | } 110 | 111 | return true; 112 | 113 | }, 114 | 115 | render: function ( deltaTime ) { 116 | 117 | // deltaTime value is in seconds 118 | 119 | if ( deltaTime === undefined ) { 120 | 121 | deltaTime = this.clock.getDelta(); 122 | 123 | } 124 | 125 | var currentRenderTarget = this.renderer.getRenderTarget(); 126 | 127 | var maskActive = false; 128 | 129 | var pass, i, il = this.passes.length; 130 | 131 | for ( i = 0; i < il; i ++ ) { 132 | 133 | pass = this.passes[ i ]; 134 | 135 | if ( pass.enabled === false ) continue; 136 | 137 | pass.renderToScreen = ( this.renderToScreen && this.isLastEnabledPass( i ) ); 138 | pass.render( this.renderer, this.writeBuffer, this.readBuffer, deltaTime, maskActive ); 139 | 140 | if ( pass.needsSwap ) { 141 | 142 | if ( maskActive ) { 143 | 144 | var context = this.renderer.getContext(); 145 | var stencil = this.renderer.state.buffers.stencil; 146 | 147 | //context.stencilFunc( context.NOTEQUAL, 1, 0xffffffff ); 148 | stencil.setFunc( context.NOTEQUAL, 1, 0xffffffff ); 149 | 150 | this.copyPass.render( this.renderer, this.writeBuffer, this.readBuffer, deltaTime ); 151 | 152 | //context.stencilFunc( context.EQUAL, 1, 0xffffffff ); 153 | stencil.setFunc( context.EQUAL, 1, 0xffffffff ); 154 | 155 | } 156 | 157 | this.swapBuffers(); 158 | 159 | } 160 | 161 | if ( MaskPass !== undefined ) { 162 | 163 | if ( pass instanceof MaskPass ) { 164 | 165 | maskActive = true; 166 | 167 | } else if ( pass instanceof ClearMaskPass ) { 168 | 169 | maskActive = false; 170 | 171 | } 172 | 173 | } 174 | 175 | } 176 | 177 | this.renderer.setRenderTarget( currentRenderTarget ); 178 | 179 | }, 180 | 181 | reset: function ( renderTarget ) { 182 | 183 | if ( renderTarget === undefined ) { 184 | 185 | var size = this.renderer.getSize( new Vector2() ); 186 | this._pixelRatio = this.renderer.getPixelRatio(); 187 | this._width = size.width; 188 | this._height = size.height; 189 | 190 | renderTarget = this.renderTarget1.clone(); 191 | renderTarget.setSize( this._width * this._pixelRatio, this._height * this._pixelRatio ); 192 | 193 | } 194 | 195 | this.renderTarget1.dispose(); 196 | this.renderTarget2.dispose(); 197 | this.renderTarget1 = renderTarget; 198 | this.renderTarget2 = renderTarget.clone(); 199 | 200 | this.writeBuffer = this.renderTarget1; 201 | this.readBuffer = this.renderTarget2; 202 | 203 | }, 204 | 205 | setSize: function ( width, height ) { 206 | 207 | this._width = width; 208 | this._height = height; 209 | 210 | var effectiveWidth = this._width * this._pixelRatio; 211 | var effectiveHeight = this._height * this._pixelRatio; 212 | 213 | this.renderTarget1.setSize( effectiveWidth, effectiveHeight ); 214 | this.renderTarget2.setSize( effectiveWidth, effectiveHeight ); 215 | 216 | for ( var i = 0; i < this.passes.length; i ++ ) { 217 | 218 | this.passes[ i ].setSize( effectiveWidth, effectiveHeight ); 219 | 220 | } 221 | 222 | }, 223 | 224 | setPixelRatio: function ( pixelRatio ) { 225 | 226 | this._pixelRatio = pixelRatio; 227 | 228 | this.setSize( this._width, this._height ); 229 | 230 | } 231 | 232 | } ); 233 | 234 | 235 | var Pass = function () { 236 | 237 | // if set to true, the pass is processed by the composer 238 | this.enabled = true; 239 | 240 | // if set to true, the pass indicates to swap read and write buffer after rendering 241 | this.needsSwap = true; 242 | 243 | // if set to true, the pass clears its buffer before rendering 244 | this.clear = false; 245 | 246 | // if set to true, the result of the pass is rendered to screen. This is set automatically by EffectComposer. 247 | this.renderToScreen = false; 248 | 249 | }; 250 | 251 | Object.assign( Pass.prototype, { 252 | 253 | setSize: function ( /* width, height */ ) {}, 254 | 255 | render: function ( /* renderer, writeBuffer, readBuffer, deltaTime, maskActive */ ) { 256 | 257 | console.error( 'THREE.Pass: .render() must be implemented in derived pass.' ); 258 | 259 | } 260 | 261 | } ); 262 | 263 | // Helper for passes that need to fill the viewport with a single quad. 264 | Pass.FullScreenQuad = ( function () { 265 | 266 | var camera = new OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 267 | var geometry = new PlaneBufferGeometry( 2, 2 ); 268 | 269 | var FullScreenQuad = function ( material ) { 270 | 271 | this._mesh = new Mesh( geometry, material ); 272 | 273 | }; 274 | 275 | Object.defineProperty( FullScreenQuad.prototype, 'material', { 276 | 277 | get: function () { 278 | 279 | return this._mesh.material; 280 | 281 | }, 282 | 283 | set: function ( value ) { 284 | 285 | this._mesh.material = value; 286 | 287 | } 288 | 289 | } ); 290 | 291 | Object.assign( FullScreenQuad.prototype, { 292 | 293 | dispose: function () { 294 | 295 | this._mesh.geometry.dispose(); 296 | 297 | }, 298 | 299 | render: function ( renderer ) { 300 | 301 | renderer.render( this._mesh, camera ); 302 | 303 | } 304 | 305 | } ); 306 | 307 | return FullScreenQuad; 308 | 309 | } )(); 310 | 311 | export { EffectComposer, Pass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/LuminosityHighPassShader.js: -------------------------------------------------------------------------------- 1 | import { 2 | Color 3 | } from "../lib/threejs/three.module.js"; 4 | 5 | /** 6 | * Luminosity 7 | * http://en.wikipedia.org/wiki/Luminosity 8 | */ 9 | 10 | var LuminosityHighPassShader = { 11 | 12 | shaderID: "luminosityHighPass", 13 | 14 | uniforms: { 15 | 16 | "tDiffuse": { value: null }, 17 | "luminosityThreshold": { value: 1.0 }, 18 | "smoothWidth": { value: 1.0 }, 19 | "defaultColor": { value: new Color( 0x000000 ) }, 20 | "defaultOpacity": { value: 0.0 } 21 | 22 | }, 23 | 24 | vertexShader: [ 25 | 26 | "varying vec2 vUv;", 27 | 28 | "void main() {", 29 | 30 | " vUv = uv;", 31 | 32 | " gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 33 | 34 | "}" 35 | 36 | ].join( "\n" ), 37 | 38 | fragmentShader: [ 39 | 40 | "uniform sampler2D tDiffuse;", 41 | "uniform vec3 defaultColor;", 42 | "uniform float defaultOpacity;", 43 | "uniform float luminosityThreshold;", 44 | "uniform float smoothWidth;", 45 | 46 | "varying vec2 vUv;", 47 | 48 | "void main() {", 49 | 50 | " vec4 texel = texture2D( tDiffuse, vUv );", 51 | 52 | " vec3 luma = vec3( 0.299, 0.587, 0.114 );", 53 | 54 | " float v = dot( texel.xyz, luma );", 55 | 56 | " vec4 outputColor = vec4( defaultColor.rgb, defaultOpacity );", 57 | 58 | " float alpha = smoothstep( luminosityThreshold, luminosityThreshold + smoothWidth, v );", 59 | 60 | " gl_FragColor = mix( outputColor, texel, alpha );", 61 | 62 | "}" 63 | 64 | ].join( "\n" ) 65 | 66 | }; 67 | 68 | export { LuminosityHighPassShader }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/MaskPass.js: -------------------------------------------------------------------------------- 1 | import { Pass } from "./Pass.js"; 2 | 3 | var MaskPass = function ( scene, camera ) { 4 | 5 | Pass.call( this ); 6 | 7 | this.scene = scene; 8 | this.camera = camera; 9 | 10 | this.clear = true; 11 | this.needsSwap = false; 12 | 13 | this.inverse = false; 14 | 15 | }; 16 | 17 | MaskPass.prototype = Object.assign( Object.create( Pass.prototype ), { 18 | 19 | constructor: MaskPass, 20 | 21 | render: function ( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) { 22 | 23 | var context = renderer.getContext(); 24 | var state = renderer.state; 25 | 26 | // don't update color or depth 27 | 28 | state.buffers.color.setMask( false ); 29 | state.buffers.depth.setMask( false ); 30 | 31 | // lock buffers 32 | 33 | state.buffers.color.setLocked( true ); 34 | state.buffers.depth.setLocked( true ); 35 | 36 | // set up stencil 37 | 38 | var writeValue, clearValue; 39 | 40 | if ( this.inverse ) { 41 | 42 | writeValue = 0; 43 | clearValue = 1; 44 | 45 | } else { 46 | 47 | writeValue = 1; 48 | clearValue = 0; 49 | 50 | } 51 | 52 | state.buffers.stencil.setTest( true ); 53 | state.buffers.stencil.setOp( context.REPLACE, context.REPLACE, context.REPLACE ); 54 | state.buffers.stencil.setFunc( context.ALWAYS, writeValue, 0xffffffff ); 55 | state.buffers.stencil.setClear( clearValue ); 56 | state.buffers.stencil.setLocked( true ); 57 | 58 | // draw into the stencil buffer 59 | 60 | renderer.setRenderTarget( readBuffer ); 61 | if ( this.clear ) renderer.clear(); 62 | renderer.render( this.scene, this.camera ); 63 | 64 | renderer.setRenderTarget( writeBuffer ); 65 | if ( this.clear ) renderer.clear(); 66 | renderer.render( this.scene, this.camera ); 67 | 68 | // unlock color and depth buffer for subsequent rendering 69 | 70 | state.buffers.color.setLocked( false ); 71 | state.buffers.depth.setLocked( false ); 72 | 73 | // only render where stencil is set to 1 74 | 75 | state.buffers.stencil.setLocked( false ); 76 | state.buffers.stencil.setFunc( context.EQUAL, 1, 0xffffffff ); // draw if == 1 77 | state.buffers.stencil.setOp( context.KEEP, context.KEEP, context.KEEP ); 78 | state.buffers.stencil.setLocked( true ); 79 | 80 | } 81 | 82 | } ); 83 | 84 | 85 | var ClearMaskPass = function () { 86 | 87 | Pass.call( this ); 88 | 89 | this.needsSwap = false; 90 | 91 | }; 92 | 93 | ClearMaskPass.prototype = Object.create( Pass.prototype ); 94 | 95 | Object.assign( ClearMaskPass.prototype, { 96 | 97 | render: function ( renderer /*, writeBuffer, readBuffer, deltaTime, maskActive */ ) { 98 | 99 | renderer.state.buffers.stencil.setLocked( false ); 100 | renderer.state.buffers.stencil.setTest( false ); 101 | 102 | } 103 | 104 | } ); 105 | 106 | export { MaskPass, ClearMaskPass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/Pass.js: -------------------------------------------------------------------------------- 1 | import { 2 | OrthographicCamera, 3 | PlaneBufferGeometry, 4 | Mesh 5 | } from "../lib/threejs/three.module.js"; 6 | 7 | function Pass() { 8 | 9 | // if set to true, the pass is processed by the composer 10 | this.enabled = true; 11 | 12 | // if set to true, the pass indicates to swap read and write buffer after rendering 13 | this.needsSwap = true; 14 | 15 | // if set to true, the pass clears its buffer before rendering 16 | this.clear = false; 17 | 18 | // if set to true, the result of the pass is rendered to screen. This is set automatically by EffectComposer. 19 | this.renderToScreen = false; 20 | 21 | } 22 | 23 | Object.assign( Pass.prototype, { 24 | 25 | setSize: function ( /* width, height */ ) {}, 26 | 27 | render: function ( /* renderer, writeBuffer, readBuffer, deltaTime, maskActive */ ) { 28 | 29 | console.error( 'THREE.Pass: .render() must be implemented in derived pass.' ); 30 | 31 | } 32 | 33 | } ); 34 | 35 | // Helper for passes that need to fill the viewport with a single quad. 36 | 37 | // Important: It's actually a hack to put FullScreenQuad into the Pass namespace. This is only 38 | // done to make examples/js code work. Normally, FullScreenQuad should be exported 39 | // from this module like Pass. 40 | 41 | Pass.FullScreenQuad = ( function () { 42 | 43 | var camera = new OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 44 | var geometry = new PlaneBufferGeometry( 2, 2 ); 45 | 46 | var FullScreenQuad = function ( material ) { 47 | 48 | this._mesh = new Mesh( geometry, material ); 49 | 50 | }; 51 | 52 | Object.defineProperty( FullScreenQuad.prototype, 'material', { 53 | 54 | get: function () { 55 | 56 | return this._mesh.material; 57 | 58 | }, 59 | 60 | set: function ( value ) { 61 | 62 | this._mesh.material = value; 63 | 64 | } 65 | 66 | } ); 67 | 68 | Object.assign( FullScreenQuad.prototype, { 69 | 70 | dispose: function () { 71 | 72 | this._mesh.geometry.dispose(); 73 | 74 | }, 75 | 76 | render: function ( renderer ) { 77 | 78 | renderer.render( this._mesh, camera ); 79 | 80 | } 81 | 82 | } ); 83 | 84 | return FullScreenQuad; 85 | 86 | } )(); 87 | 88 | export { Pass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/RenderPass.js: -------------------------------------------------------------------------------- 1 | import { Pass } from "./Pass.js"; 2 | 3 | var RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) { 4 | 5 | Pass.call( this ); 6 | 7 | this.scene = scene; 8 | this.camera = camera; 9 | 10 | this.overrideMaterial = overrideMaterial; 11 | 12 | this.clearColor = clearColor; 13 | this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0; 14 | 15 | this.clear = true; 16 | this.clearDepth = false; 17 | this.needsSwap = false; 18 | 19 | }; 20 | 21 | RenderPass.prototype = Object.assign( Object.create( Pass.prototype ), { 22 | 23 | constructor: RenderPass, 24 | 25 | render: function ( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) { 26 | 27 | var oldAutoClear = renderer.autoClear; 28 | renderer.autoClear = false; 29 | 30 | var oldClearColor, oldClearAlpha, oldOverrideMaterial; 31 | 32 | if ( this.overrideMaterial !== undefined ) { 33 | 34 | oldOverrideMaterial = this.scene.overrideMaterial; 35 | 36 | this.scene.overrideMaterial = this.overrideMaterial; 37 | 38 | } 39 | 40 | if ( this.clearColor ) { 41 | 42 | oldClearColor = renderer.getClearColor().getHex(); 43 | oldClearAlpha = renderer.getClearAlpha(); 44 | 45 | renderer.setClearColor( this.clearColor, this.clearAlpha ); 46 | 47 | } 48 | 49 | if ( this.clearDepth ) { 50 | 51 | renderer.clearDepth(); 52 | 53 | } 54 | 55 | renderer.setRenderTarget( this.renderToScreen ? null : readBuffer ); 56 | 57 | // TODO: Avoid using autoClear properties, see https://github.com/mrdoob/three.js/pull/15571#issuecomment-465669600 58 | if ( this.clear ) renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil ); 59 | renderer.render( this.scene, this.camera ); 60 | 61 | if ( this.clearColor ) { 62 | 63 | renderer.setClearColor( oldClearColor, oldClearAlpha ); 64 | 65 | } 66 | 67 | if ( this.overrideMaterial !== undefined ) { 68 | 69 | this.scene.overrideMaterial = oldOverrideMaterial; 70 | 71 | } 72 | 73 | renderer.autoClear = oldAutoClear; 74 | 75 | } 76 | 77 | } ); 78 | 79 | export { RenderPass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/ShaderPass.js: -------------------------------------------------------------------------------- 1 | import { 2 | ShaderMaterial, 3 | UniformsUtils 4 | } from "../lib/threejs/three.module.js"; 5 | import { Pass } from "./Pass.js"; 6 | 7 | var ShaderPass = function ( shader, textureID ) { 8 | 9 | Pass.call( this ); 10 | 11 | this.textureID = ( textureID !== undefined ) ? textureID : "tDiffuse"; 12 | 13 | if ( shader instanceof ShaderMaterial ) { 14 | 15 | this.uniforms = shader.uniforms; 16 | 17 | this.material = shader; 18 | 19 | } else if ( shader ) { 20 | 21 | this.uniforms = UniformsUtils.clone( shader.uniforms ); 22 | 23 | this.material = new ShaderMaterial( { 24 | 25 | defines: Object.assign( {}, shader.defines ), 26 | uniforms: this.uniforms, 27 | vertexShader: shader.vertexShader, 28 | fragmentShader: shader.fragmentShader 29 | 30 | } ); 31 | 32 | } 33 | 34 | this.fsQuad = new Pass.FullScreenQuad( this.material ); 35 | 36 | }; 37 | 38 | ShaderPass.prototype = Object.assign( Object.create( Pass.prototype ), { 39 | 40 | constructor: ShaderPass, 41 | 42 | render: function ( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) { 43 | 44 | if ( this.uniforms[ this.textureID ] ) { 45 | 46 | this.uniforms[ this.textureID ].value = readBuffer.texture; 47 | 48 | } 49 | 50 | this.fsQuad.material = this.material; 51 | 52 | if ( this.renderToScreen ) { 53 | 54 | renderer.setRenderTarget( null ); 55 | this.fsQuad.render( renderer ); 56 | 57 | } else { 58 | 59 | renderer.setRenderTarget( writeBuffer ); 60 | // TODO: Avoid using autoClear properties, see https://github.com/mrdoob/three.js/pull/15571#issuecomment-465669600 61 | if ( this.clear ) renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil ); 62 | this.fsQuad.render( renderer ); 63 | 64 | } 65 | 66 | } 67 | 68 | } ); 69 | 70 | export { ShaderPass }; -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/js/jsm/stats.module.js: -------------------------------------------------------------------------------- 1 | var Stats = function () { 2 | 3 | var mode = 0; 4 | 5 | var container = document.createElement( 'div' ); 6 | container.style.cssText = 'position:fixed;top:0;left:0;cursor:pointer;opacity:0.9;z-index:10000'; 7 | container.addEventListener( 'click', function ( event ) { 8 | 9 | event.preventDefault(); 10 | showPanel( ++ mode % container.children.length ); 11 | 12 | }, false ); 13 | 14 | // 15 | 16 | function addPanel( panel ) { 17 | 18 | container.appendChild( panel.dom ); 19 | return panel; 20 | 21 | } 22 | 23 | function showPanel( id ) { 24 | 25 | for ( var i = 0; i < container.children.length; i ++ ) { 26 | 27 | container.children[ i ].style.display = i === id ? 'block' : 'none'; 28 | 29 | } 30 | 31 | mode = id; 32 | 33 | } 34 | 35 | // 36 | 37 | var beginTime = ( performance || Date ).now(), prevTime = beginTime, frames = 0; 38 | 39 | var fpsPanel = addPanel( new Stats.Panel( 'FPS', '#0ff', '#002' ) ); 40 | var msPanel = addPanel( new Stats.Panel( 'MS', '#0f0', '#020' ) ); 41 | 42 | if ( self.performance && self.performance.memory ) { 43 | 44 | var memPanel = addPanel( new Stats.Panel( 'MB', '#f08', '#201' ) ); 45 | 46 | } 47 | 48 | showPanel( 0 ); 49 | 50 | return { 51 | 52 | REVISION: 16, 53 | 54 | dom: container, 55 | 56 | addPanel: addPanel, 57 | showPanel: showPanel, 58 | 59 | begin: function () { 60 | 61 | beginTime = ( performance || Date ).now(); 62 | 63 | }, 64 | 65 | end: function () { 66 | 67 | frames ++; 68 | 69 | var time = ( performance || Date ).now(); 70 | 71 | msPanel.update( time - beginTime, 200 ); 72 | 73 | if ( time >= prevTime + 1000 ) { 74 | 75 | fpsPanel.update( ( frames * 1000 ) / ( time - prevTime ), 100 ); 76 | 77 | prevTime = time; 78 | frames = 0; 79 | 80 | if ( memPanel ) { 81 | 82 | var memory = performance.memory; 83 | memPanel.update( memory.usedJSHeapSize / 1048576, memory.jsHeapSizeLimit / 1048576 ); 84 | 85 | } 86 | 87 | } 88 | 89 | return time; 90 | 91 | }, 92 | 93 | update: function () { 94 | 95 | beginTime = this.end(); 96 | 97 | }, 98 | 99 | // Backwards Compatibility 100 | 101 | domElement: container, 102 | setMode: showPanel 103 | 104 | }; 105 | 106 | }; 107 | 108 | Stats.Panel = function ( name, fg, bg ) { 109 | 110 | var min = Infinity, max = 0, round = Math.round; 111 | var PR = round( window.devicePixelRatio || 1 ); 112 | 113 | var WIDTH = 80 * PR, HEIGHT = 48 * PR, 114 | TEXT_X = 3 * PR, TEXT_Y = 2 * PR, 115 | GRAPH_X = 3 * PR, GRAPH_Y = 15 * PR, 116 | GRAPH_WIDTH = 74 * PR, GRAPH_HEIGHT = 30 * PR; 117 | 118 | var canvas = document.createElement( 'canvas' ); 119 | canvas.width = WIDTH; 120 | canvas.height = HEIGHT; 121 | canvas.style.cssText = 'width:80px;height:48px'; 122 | 123 | var context = canvas.getContext( '2d' ); 124 | context.font = 'bold ' + ( 9 * PR ) + 'px Helvetica,Arial,sans-serif'; 125 | context.textBaseline = 'top'; 126 | 127 | context.fillStyle = bg; 128 | context.fillRect( 0, 0, WIDTH, HEIGHT ); 129 | 130 | context.fillStyle = fg; 131 | context.fillText( name, TEXT_X, TEXT_Y ); 132 | context.fillRect( GRAPH_X, GRAPH_Y, GRAPH_WIDTH, GRAPH_HEIGHT ); 133 | 134 | context.fillStyle = bg; 135 | context.globalAlpha = 0.9; 136 | context.fillRect( GRAPH_X, GRAPH_Y, GRAPH_WIDTH, GRAPH_HEIGHT ); 137 | 138 | return { 139 | 140 | dom: canvas, 141 | 142 | update: function ( value, maxValue ) { 143 | 144 | min = Math.min( min, value ); 145 | max = Math.max( max, value ); 146 | 147 | context.fillStyle = bg; 148 | context.globalAlpha = 1; 149 | context.fillRect( 0, 0, WIDTH, GRAPH_Y ); 150 | context.fillStyle = fg; 151 | context.fillText( round( value ) + ' ' + name + ' (' + round( min ) + '-' + round( max ) + ')', TEXT_X, TEXT_Y ); 152 | 153 | context.drawImage( canvas, GRAPH_X + PR, GRAPH_Y, GRAPH_WIDTH - PR, GRAPH_HEIGHT, GRAPH_X, GRAPH_Y, GRAPH_WIDTH - PR, GRAPH_HEIGHT ); 154 | 155 | context.fillRect( GRAPH_X + GRAPH_WIDTH - PR, GRAPH_Y, PR, GRAPH_HEIGHT ); 156 | 157 | context.fillStyle = bg; 158 | context.globalAlpha = 0.9; 159 | context.fillRect( GRAPH_X + GRAPH_WIDTH - PR, GRAPH_Y, PR, round( ( 1 - ( value / maxValue ) ) * GRAPH_HEIGHT ) ); 160 | 161 | } 162 | 163 | }; 164 | 165 | }; 166 | 167 | export default Stats; 168 | -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/server.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from fastapi import FastAPI 4 | from fastapi.staticfiles import StaticFiles 5 | import uvicorn 6 | from fastapi.middleware.cors import CORSMiddleware 7 | import pathlib 8 | 9 | app = FastAPI() 10 | 11 | app.add_middleware(CORSMiddleware, 12 | allow_origins = ["*"], 13 | allow_methods = ["*"], 14 | allow_headers = ["*"], 15 | allow_credentials = True) 16 | 17 | 18 | # Mount static files (provide routes). 19 | mountable = [["/static", "./"]] 20 | for api_route, directory_path in mountable: 21 | print("") 22 | 23 | directory_path = pathlib.Path(directory_path).resolve() 24 | 25 | print(f"Mounting:{directory_path} on {api_route}") 26 | app.mount(api_route, StaticFiles(directory=directory_path), name= "static") 27 | 28 | for x in directory_path.glob("./*"): 29 | print(":::",x) 30 | 31 | 32 | if __name__ == '__main__': 33 | uvicorn.run(app, 34 | port = os.environ['VOXEL_VISUALIZER_PORT'], 35 | host = "0.0.0.0" 36 | ) 37 | 38 | -------------------------------------------------------------------------------- /data_cube_utilities/voxel_visualizer/voxel_visualizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | import re 4 | 5 | import psutil 6 | 7 | import numpy as np 8 | import xarray as xr 9 | 10 | from jinja2 import Template 11 | from IPython.display import HTML 12 | 13 | from utils.data_cube_utilities.dc_time import _n64_to_datetime, dt_to_str 14 | 15 | VOXEL_VIS_WEB_SVR_CMD = 'python3 server.py &' 16 | 17 | def voxel_visualize(da: xr.DataArray, **kwargs): 18 | """ 19 | Show a 3D visualization of a boolean xarray `xr`. 20 | 21 | It creates an `iframe` DOM element in the cell's output in Jupyter. 22 | 23 | The camera can be controlled with either: 24 | 1. The mouse and arrow keys OR 25 | 2. Buttons on the right side (hideable) 26 | 27 | There is a slider on the left side with 2 modes - Range and Select. 28 | * Range: This mode shows layers (time slices) after the selected time 29 | (shown as text above the slider) at opacity `voxel_opacity`. 30 | Layers before the selected time are shown in a lower opacity 31 | (more translucent). 32 | * Select: This mode shows only the selected layer at opacity `voxel_opacity`. 33 | Layers other than the selected time are shown in a lower opacity 34 | (more translucent). 35 | 36 | The visualization is created with Three.js. 37 | 38 | Parameters 39 | ---------- 40 | da: xr.DataArray 41 | The boolean DataArray to show in 3D. 42 | x_scale, y_scale, z_scale: numeric 43 | Distance scale factors for voxels the x, y, and z dimensions (default 1). 44 | distance_scale: numeric 45 | Distance scale factor for voxels in all dimensions (default 1). 46 | voxel_size: numeric 47 | The initial size of the voxels (default 3). 48 | voxel_opacity: float 49 | The opacity of the voxels (range: [0,1], default 0.5). 50 | show_stats: bool 51 | Whether to show the stats such as FPS (default False). 52 | show_controls: bool 53 | Whether to show the controls (default True). 54 | """ 55 | cwd = os.getcwd() 56 | os.chdir(os.path.dirname(__file__)) 57 | 58 | def _launch_server_if_not_running(): 59 | # Determine if the server is running. 60 | process_cmds = (p.cmdline() for p in psutil.process_iter()) 61 | cmd_found = False 62 | for cmd in process_cmds: 63 | for token in VOXEL_VIS_WEB_SVR_CMD.split(): 64 | if token != '&' and token not in cmd: 65 | break 66 | cmd_found = True 67 | break 68 | if cmd_found: 69 | break 70 | # If the server is not running, start it. 71 | if not cmd_found: 72 | os.system(VOXEL_VIS_WEB_SVR_CMD) 73 | 74 | # Ensure the webserver is running. 75 | _launch_server_if_not_running() 76 | # Load the voxel visualizer template. 77 | fs = open('template.html','r') 78 | template = Template(fs.read()) 79 | fs.close() 80 | 81 | if not da.dtype == 'bool': 82 | raise Exception("You need to pass a boolean xarray.DataArray to use this.") 83 | 84 | # Reverse the x dimension. 85 | da = da.sel(longitude=da.longitude[::-1]).astype(np.int8) 86 | 87 | da_str = str(da.values.tolist())#.replace('array(', '').replace(')', '')#.replace('\n', ',').replace(',,', ',') 88 | times_str = str([dt_to_str(_n64_to_datetime(time), fmt='%Y-%m-%dT%H:%M:%S.%f') 89 | for time in da.time.values]).replace(',', ',\n') 90 | # Render the template. 91 | x_scale = kwargs.get('x_scale', 1) 92 | assert isinstance(x_scale, (int, float)), "x_scale must be an int or float." 93 | kwargs['x_scale'] = x_scale 94 | y_scale = kwargs.get('y_scale', 1) 95 | assert isinstance(y_scale, (int, float)), "y_scale must be an int or float." 96 | kwargs['y_scale'] = x_scale 97 | z_scale = kwargs.get('z_scale', 1) 98 | assert isinstance(z_scale, (int, float)), "z_scale must be an int or float." 99 | kwargs['z_scale'] = z_scale 100 | distance_scale = kwargs.get('distance_scale', 1) 101 | assert isinstance(distance_scale, (int, float)), "distance_scale must be an int or float." 102 | kwargs['distance_scale'] = distance_scale 103 | voxel_size = kwargs.get('voxel_size', 4) 104 | assert isinstance(voxel_size, (int, float)), "voxel_size must be an int or float." 105 | kwargs['voxel_size'] = voxel_size 106 | voxel_opacity = kwargs.get('voxel_opacity', 1) 107 | assert isinstance(voxel_opacity, (int, float)), "voxel_opacity must be an int or float." 108 | kwargs['voxel_opacity'] = voxel_opacity 109 | show_stats = kwargs.setdefault('show_stats', False) 110 | assert isinstance(show_stats, bool), "show_stats must be a boolean." 111 | kwargs['show_stats'] = show_stats 112 | show_controls = kwargs.setdefault('show_controls', True) 113 | assert isinstance(show_controls, bool), "show_controls must be a boolean." 114 | kwargs['show_controls'] = show_controls 115 | filled_template = template.render(data_array=da_str, times=times_str, **kwargs) 116 | 117 | # Remove single line comments and add 118 | # line continuation characters (\ in JS). 119 | # Ensure the HTML is all on one line for the iframe srcdoc. 120 | filled_template_no_sngl_lne_cmts = [] 121 | for i, line in enumerate(filled_template.splitlines()): 122 | if re.search('^\s*//', line) is None: 123 | filled_template_no_sngl_lne_cmts.append(line) 124 | filled_template_sngl_lne = ''.join(filled_template_no_sngl_lne_cmts) 125 | 126 | # Escape quotes for JS string concatenation. 127 | filled_template_sngl_lne_esc = filled_template_sngl_lne\ 128 | .replace('\"', '\\"').replace("\'", "\\'")#\ 129 | 130 | # "Escape" script tags to avoid closing the script tag 131 | # containing the substituted filled template HTML string. 132 | end_scr = '/script>' 133 | filled_template_sngl_lne_esc_split = \ 134 | re.split(end_scr, filled_template_sngl_lne_esc) 135 | # Format the strings to form the full string in JS by concat. 136 | filled_template_sngl_lne_esc_split_fmt = [] 137 | for i, string in enumerate(filled_template_sngl_lne_esc_split): 138 | # All but first must have end script tag restored. 139 | # All are enclosed in single quotes. 140 | if i > 0: 141 | string = f"\'{end_scr}{string}\'" 142 | else: 143 | string = f"\'{string}\'" 144 | filled_template_sngl_lne_esc_split_fmt.append(string) 145 | filled_template_sngl_lne_esc_fmt = \ 146 | " + ".join(filled_template_sngl_lne_esc_split_fmt) 147 | 148 | vox_vis_server_port = os.environ['VOXEL_VISUALIZER_PORT'] 149 | iframe = HTML(f""" 150 |
151 | 156 |
157 | 170 | """) 171 | 172 | os.chdir(cwd) 173 | return iframe -------------------------------------------------------------------------------- /data_cube_utilities/xarray_bokeh_plotting.py: -------------------------------------------------------------------------------- 1 | from bokeh.models.widgets import Panel, Tabs 2 | import numpy as np 3 | import pandas as pd 4 | from bokeh.models import FuncTickFormatter 5 | from bokeh.io import push_notebook, show, output_notebook 6 | from bokeh.layouts import row 7 | from bokeh.plotting import figure 8 | from bokeh.models.sources import ColumnDataSource 9 | from bokeh.models import HoverTool 10 | 11 | 12 | def __stem_figure(list1, 13 | name1, 14 | list2, 15 | name2, 16 | **kwargs): 17 | 18 | dataframe_A = pd.DataFrame(list1, index = list1).rename(columns = {0:name1}) 19 | dataFrame_B = pd.DataFrame(list2, index = list2).rename(columns = {0:name2}) 20 | 21 | 22 | 23 | result = pd.concat([dataframe_A,dataFrame_B], join = 'outer', axis = 1) 24 | result = (~pd.isnull(result)).astype(int) 25 | 26 | aframe = result[name1][ result[name1] > 0] 27 | bframe = result[name2][ result[name2] > 0] 28 | bframe = bframe.map(np.negative) 29 | 30 | positive_match = aframe[result[name1] == result[name2]] 31 | positive_miss = aframe[result[name1] != result[name2]] 32 | 33 | negative_match = bframe[result[name1] == result[name2]] 34 | negative_miss = bframe[result[name1] != result[name2]] 35 | 36 | 37 | ## Handle DateTime formats on X-Axis 38 | x_axis_format = 'auto' if type(list1[0]) != np.datetime64 else "datetime" 39 | 40 | ## Create Canvas/Figure 41 | p = figure(plot_width = 900, 42 | plot_height = 200, 43 | tools = "xpan, reset, save, xzoom_in, xzoom_out", 44 | x_axis_type = x_axis_format, 45 | title= "Coordinate Comparison: {}".format(kwargs['dimension'])) 46 | 47 | ## Create a long horizontal line 48 | start = min(aframe.index.min(), bframe.index.min()) 49 | end = max(aframe.index.max(), bframe.index.max()) 50 | 51 | 52 | categories = ["oof", "doof"] 53 | 54 | 55 | ## Remove Horizontal Gridlines 56 | p.ygrid.grid_line_color = None 57 | 58 | ## Remove YAxis Labels (Since they ploy categorical) 59 | p.yaxis.major_tick_line_color = None # turn off y-axis major ticks 60 | p.yaxis.minor_tick_line_color = None # turn off y-axis minor ticks 61 | # p.yaxis.major_label_text_font_size = '0pt' # turn off y-axis tick labels 62 | 63 | p.segment(x0 = start, 64 | x1 = end, 65 | y0 = 0, 66 | y1 = 0, 67 | line_width = 1) 68 | 69 | def __plot_stem_and_bulb(figure, series, color, name): 70 | 71 | figure.circle(x = "index", 72 | y = name, 73 | source = ColumnDataSource(series.to_frame()), 74 | alpha=0.5, 75 | size = 10, 76 | color = color) 77 | 78 | figure.segment(x0 = 'index', 79 | x1 = 'index', 80 | y0 = 0, 81 | y1 = name, 82 | source = ColumnDataSource(series.to_frame()), 83 | line_width = 1, 84 | color = color) 85 | return figure 86 | 87 | p = __plot_stem_and_bulb(p, positive_match, "green", name1) 88 | p = __plot_stem_and_bulb(p, positive_miss, "red", name1) 89 | p = __plot_stem_and_bulb(p, negative_match, "green", name2) 90 | p = __plot_stem_and_bulb(p, negative_miss, "red", name2) 91 | 92 | p.yaxis.axis_label = "{} {}".format(name2, name1) 93 | 94 | return p 95 | print(result) 96 | 97 | def init_notebook(): 98 | from bokeh.plotting import figure, show 99 | from bokeh.io import output_notebook 100 | output_notebook() 101 | 102 | def dim_alignement(dataset_1 = None, 103 | name_1 = "dataset_1", 104 | dataset_2 = None, 105 | name_2 = "dataset_2", 106 | ): 107 | xr1 = dataset_1 108 | xr2 = dataset_2 109 | 110 | common_dims = set(xr1.dims).intersection(set(xr2.dims)) 111 | 112 | empty_set = set() 113 | if common_dims == empty_set: 114 | raise Exception("datasets do not have any dims in common") 115 | 116 | display_tabs = [] 117 | for dim in common_dims: 118 | fig = __stem_figure(xr1[dim].values, 119 | name_1, 120 | xr2[dim].values, 121 | name_2, 122 | dimension = dim 123 | ) 124 | 125 | display_tabs.append(Panel(child = fig, title = dim)) 126 | 127 | tabs = Tabs(tabs = display_tabs) ## Make a figure with many tabs. 128 | show(tabs) 129 | -------------------------------------------------------------------------------- /dea_tools/.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | *.egg-info/* 3 | dist/* 4 | *.pyc 5 | -------------------------------------------------------------------------------- /dea_tools/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md LICENSE 2 | -------------------------------------------------------------------------------- /dea_tools/README.rst: -------------------------------------------------------------------------------- 1 | dea-tools 2 | ========= 3 | 4 | Python functions and algorithms developed to assist in analysing DEA data (e.g. loading data, plotting, spatial analysis, machine learning). 5 | 6 | Installation 7 | ------------ 8 | 9 | To work with this module on the DEA Sandbox from within the `dea-notebooks` repo, you can add the Tools folder to the system path: 10 | 11 | .. code-block:: python 12 | 13 | import sys 14 | sys.path.insert(1, '../Tools/') 15 | import dea_tools.datahandling # or some other submodule 16 | 17 | You can also `pip install` the module. To do this on the DEA Sandbox, run `pip` from the terminal: 18 | 19 | .. code-block:: bash 20 | 21 | pip install -e Tools/ 22 | 23 | Install from the source on any other system with `pip`: 24 | 25 | .. code-block:: bash 26 | 27 | pip install --extra-index-url="https://packages.dea.ga.gov.au" git+https://github.com/GeoscienceAustralia/dea-notebooks.git#subdirectory=Tools 28 | 29 | Citing DEA Tools 30 | ---------------- 31 | 32 | If you use any of the notebooks, code or tools in this repository in your work, please reference them using the following citation: 33 | 34 | Krause, C., Dunn, B., Bishop-Taylor, R., Adams, C., Burton, C., Alger, M., Chua, S., Phillips, C., Newey, V., Kouzoubov, K., Leith, A., Ayers, D., Hicks, A., DEA Notebooks contributors 2021. Digital Earth Australia notebooks and tools repository. Geoscience Australia, Canberra. https://doi.org/10.26186/145234 35 | 36 | -------------------------------------------------------------------------------- /dea_tools/dea_tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/dea_tools/dea_tools/__init__.py -------------------------------------------------------------------------------- /dea_tools/dea_tools/__main__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/dea_tools/dea_tools/__main__.py -------------------------------------------------------------------------------- /dea_tools/dea_tools/dask.py: -------------------------------------------------------------------------------- 1 | ## dea_dask.py 2 | ''' 3 | Description: A set of python functions for simplifying the creation of a 4 | local dask cluster. 5 | 6 | License: The code in this notebook is licensed under the Apache License, 7 | Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0). Digital Earth 8 | Australia data is licensed under the Creative Commons by Attribution 4.0 9 | license (https://creativecommons.org/licenses/by/4.0/). 10 | 11 | Contact: If you need assistance, please post a question on the Open Data 12 | Cube Slack channel (http://slack.opendatacube.org/) or on the GIS Stack 13 | Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube) 14 | using the `open-data-cube` tag (you can view previously asked questions 15 | here: https://gis.stackexchange.com/questions/tagged/open-data-cube). 16 | 17 | If you would like to report an issue with this script, you can file one on 18 | Github (https://github.com/GeoscienceAustralia/dea-notebooks/issues/new). 19 | 20 | Functions included: 21 | create_local_dask_cluster 22 | create_dask_gateway_cluster 23 | 24 | Last modified: March 2020 25 | 26 | ''' 27 | 28 | 29 | from importlib.util import find_spec 30 | import os 31 | import dask 32 | from aiohttp import ClientConnectionError 33 | from datacube.utils.dask import start_local_dask 34 | from datacube.utils.rio import configure_s3_access 35 | 36 | _HAVE_PROXY = bool(find_spec('jupyter_server_proxy')) 37 | _IS_AWS = ('AWS_ACCESS_KEY_ID' in os.environ or 38 | 'AWS_DEFAULT_REGION' in os.environ) 39 | 40 | 41 | def create_local_dask_cluster(spare_mem='3Gb', display_client=True): 42 | """ 43 | Using the datacube utils function `start_local_dask`, generate 44 | a local dask cluster. Automatically detects if on AWS or NCI. 45 | 46 | Example use : 47 | 48 | import sys 49 | sys.path.append("../Scripts") 50 | from dea_dask import create_local_dask_cluster 51 | 52 | create_local_dask_cluster(spare_mem='4Gb') 53 | 54 | Parameters 55 | ---------- 56 | spare_mem : String, optional 57 | The amount of memory, in Gb, to leave for the notebook to run. 58 | This memory will not be used by the cluster. e.g '3Gb' 59 | display_client : Bool, optional 60 | An optional boolean indicating whether to display a summary of 61 | the dask client, including a link to monitor progress of the 62 | analysis. Set to False to hide this display. 63 | 64 | """ 65 | 66 | if _HAVE_PROXY: 67 | # Configure dashboard link to go over proxy 68 | prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/') 69 | dask.config.set({"distributed.dashboard.link": 70 | prefix + "proxy/{port}/status"}) 71 | 72 | # Start up a local cluster 73 | client = start_local_dask(mem_safety_margin=spare_mem) 74 | 75 | if _IS_AWS: 76 | # Configure GDAL for s3 access 77 | configure_s3_access(aws_unsigned=True, 78 | client=client) 79 | 80 | # Show the dask cluster settings 81 | if display_client: 82 | from IPython.display import display 83 | display(client) 84 | 85 | 86 | try: 87 | from dask_gateway import Gateway 88 | 89 | def create_dask_gateway_cluster(profile='r5_L', workers=2): 90 | """ 91 | Create a cluster in our internal dask cluster. 92 | 93 | Parameters 94 | ---------- 95 | profile : str 96 | Possible values are: 97 | - r5_L (2 cores, 15GB memory) 98 | - r5_XL (4 cores, 31GB memory) 99 | - r5_2XL (8 cores, 63GB memory) 100 | - r5_4XL (16 cores, 127GB memory) 101 | 102 | workers : int 103 | Number of workers in the cluster. 104 | """ 105 | try: 106 | gateway = Gateway() 107 | options = gateway.cluster_options() 108 | options['profile'] = profile 109 | options['jupyterhub_user'] = os.getenv('JUPYTERHUB_USER') 110 | cluster = gateway.new_cluster(options) 111 | cluster.scale(workers) 112 | return cluster 113 | except ClientConnectionError: 114 | raise ConnectionError("access to dask gateway cluster unauthorized") 115 | 116 | except ImportError: 117 | def create_dask_gateway_cluster(*args, **kwargs): 118 | raise NotImplementedError 119 | -------------------------------------------------------------------------------- /dea_tools/dea_tools/waterbodies.py: -------------------------------------------------------------------------------- 1 | ## dea_waterbodies.py 2 | """ 3 | Description: This file contains a set of python functions for loading 4 | and processing DEA Waterbodies. 5 | 6 | License: The code in this notebook is licensed under the Apache License, 7 | Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0). Digital Earth 8 | Australia data is licensed under the Creative Commons by Attribution 4.0 9 | license (https://creativecommons.org/licenses/by/4.0/). 10 | 11 | Contact: If you need assistance, please post a question on the Open Data 12 | Cube Slack channel (http://slack.opendatacube.org/) or on the GIS Stack 13 | Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube) 14 | using the `open-data-cube` tag (you can view previously asked questions 15 | here: https://gis.stackexchange.com/questions/tagged/open-data-cube). 16 | 17 | If you would like to report an issue with this script, file one on 18 | Github: https://github.com/GeoscienceAustralia/dea-notebooks/issues/new 19 | 20 | Functions included: 21 | get_waterbody 22 | get_waterbodies 23 | get_geohashes 24 | get_time_series 25 | 26 | Last modified: November 2020 27 | """ 28 | 29 | import geopandas as gpd 30 | from owslib.wfs import WebFeatureService 31 | from owslib.fes import PropertyIsEqualTo 32 | from owslib.etree import etree 33 | import pandas as pd 34 | 35 | WFS_ADDRESS = "https://geoserver.dea.ga.gov.au/geoserver/wfs" 36 | 37 | 38 | def get_waterbody(geohash: str) -> gpd.GeoDataFrame: 39 | """Gets a waterbody polygon and metadata by geohash. 40 | 41 | Parameters 42 | ---------- 43 | geohash : str 44 | The geohash/UID for a waterbody in DEA Waterbodies. 45 | 46 | Returns 47 | ------- 48 | gpd.GeoDataFrame 49 | A GeoDataFrame with the polygon. 50 | """ 51 | wfs = WebFeatureService(url=WFS_ADDRESS, version="1.1.0") 52 | filter_ = PropertyIsEqualTo(propertyname="uid", literal=geohash) 53 | filterxml = etree.tostring(filter_.toXML()).decode("utf-8") 54 | response = wfs.getfeature( 55 | typename="DigitalEarthAustraliaWaterbodies", 56 | filter=filterxml, 57 | outputFormat="json", 58 | ) 59 | wb_gpd = gpd.read_file(response) 60 | return wb_gpd 61 | 62 | 63 | def get_waterbodies(bbox: tuple, crs="EPSG:4326") -> gpd.GeoDataFrame: 64 | """Gets the polygons and metadata for multiple waterbodies by bbox. 65 | 66 | Parameters 67 | ---------- 68 | bbox : (xmin, ymin, xmax, ymax) 69 | Bounding box. 70 | crs : str 71 | Optional CRS for the bounding box. 72 | 73 | Returns 74 | ------- 75 | gpd.GeoDataFrame 76 | A GeoDataFrame with the polygons and metadata. 77 | """ 78 | wfs = WebFeatureService(url=WFS_ADDRESS, version="1.1.0") 79 | response = wfs.getfeature( 80 | typename="DigitalEarthAustraliaWaterbodies", 81 | bbox=tuple(bbox) + (crs,), 82 | outputFormat="json", 83 | ) 84 | wb_gpd = gpd.read_file(response) 85 | return wb_gpd 86 | 87 | 88 | def get_geohashes(bbox: tuple = None, crs: str = "EPSG:4326") -> [str]: 89 | """Gets all waterbody geohashes. 90 | 91 | Parameters 92 | ---------- 93 | bbox : (xmin, ymin, xmax, ymax) 94 | Optional bounding box. 95 | crs : str 96 | Optional CRS for the bounding box. 97 | 98 | Returns 99 | ------- 100 | [str] 101 | A list of geohashes. 102 | """ 103 | wfs = WebFeatureService(url=WFS_ADDRESS, version="1.1.0") 104 | if bbox is not None: 105 | bbox = tuple(bbox) + (crs,) 106 | response = wfs.getfeature( 107 | typename="DigitalEarthAustraliaWaterbodies", 108 | propertyname="uid", 109 | outputFormat="json", 110 | bbox=bbox, 111 | ) 112 | wb_gpd = gpd.read_file(response) 113 | return list(wb_gpd["uid"]) 114 | 115 | 116 | def get_time_series(geohash: str = None, waterbody: pd.Series = None) -> pd.DataFrame: 117 | """Gets the time series for a waterbody. Specify either a GeoDataFrame row or a geohash. 118 | 119 | Parameters 120 | ---------- 121 | geohash : str 122 | The geohash/UID for a waterbody in DEA Waterbodies. 123 | waterbody : pd.Series 124 | One row of a GeoDataFrame representing a waterbody. 125 | 126 | Returns 127 | ------- 128 | pd.DataFrame 129 | A time series for the waterbody. 130 | """ 131 | if waterbody is not None and geohash is not None: 132 | raise ValueError("One of waterbody and geohash must be None") 133 | if waterbody is None and geohash is None: 134 | raise ValueError("One of waterbody and geohash must be specified") 135 | 136 | if geohash is not None: 137 | wb = get_waterbody(geohash) 138 | url = wb.timeseries[0] 139 | else: 140 | url = waterbody.timeseries 141 | wb_timeseries = pd.read_csv(url) 142 | # Tidy up the dataframe. 143 | wb_timeseries.dropna(inplace=True) 144 | wb_timeseries.columns = ["date", "pc_wet", "px_wet"] 145 | wb_timeseries = wb_timeseries.set_index("date") 146 | wb_timeseries.index = pd.to_datetime(wb_timeseries.index) 147 | return wb_timeseries 148 | -------------------------------------------------------------------------------- /dea_tools/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Note: To use the 'upload' functionality of this file, you must: 4 | # $ pipenv install twine --dev 5 | 6 | import io 7 | import os 8 | import sys 9 | from shutil import rmtree 10 | 11 | from setuptools import find_packages, setup, Command 12 | 13 | # Package meta-data. 14 | NAME = 'dea-tools' 15 | DESCRIPTION = 'Functions and algorithms for analysing Digital Earth Australia data.' 16 | URL = 'https://github.com/GeoscienceAustralia/dea-notebooks' 17 | EMAIL = 'dea@ga.gov.au' 18 | AUTHOR = 'Geoscience Australia' 19 | REQUIRES_PYTHON = '>=3.6.0' 20 | VERSION = '0.1.0' 21 | 22 | # Where are we? 23 | IS_SANDBOX = os.getenv('JUPYTER_IMAGE', default='').startswith('geoscienceaustralia/sandbox') 24 | IS_NCI = 'dea-env' in os.getenv('LOADEDMODULES_modshare', default='') 25 | IS_DEA = IS_NCI or IS_SANDBOX 26 | 27 | # What packages are required for this module to be executed? 28 | # These are all on the Sandbox/NCI so shouldn't need installing on those platforms. 29 | REQUIRED = [ 30 | # bom 31 | 'ciso8601', 32 | 'pytz', 33 | 'requests', 34 | 'lxml', 35 | # classification 36 | 'numpy', 37 | 'xarray', 38 | 'geopandas', 39 | 'datacube', 40 | 'tqdm', 41 | 'dask', 42 | 'rasterio', 43 | 'scikit-learn', 44 | # coastal 45 | 'matplotlib', 46 | 'pandas', 47 | 'scipy', 48 | # 'otps', # Hard to install, but available on Sandbox and NCI 49 | # datahandling 50 | 'GDAL', 51 | 'odc-ui', 52 | 'numexpr', 53 | # plotting 54 | 'folium', 55 | 'pyproj', 56 | 'branca', 57 | 'shapely', 58 | 'scikit-image', 59 | # climate 60 | 'python-dateutil', 61 | # waterbodies 62 | 'OWSLib', 63 | ] 64 | 65 | # What packages are optional? 66 | EXTRAS = { 67 | 'jupyter': ['IPython', 'ipywidgets', 'ipyleaflet'], 68 | 'boto': ['boto3'], 69 | } 70 | 71 | # The rest you shouldn't have to touch too much :) 72 | # ------------------------------------------------ 73 | # Except, perhaps the License and Trove Classifiers! 74 | # If you do change the License, remember to change the Trove Classifier for that! 75 | 76 | here = os.path.abspath(os.path.dirname(__file__)) 77 | 78 | # Import the README and use it as the long-description. 79 | # Note: this will only work if 'README.md' is present in your MANIFEST.in file! 80 | try: 81 | with io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: 82 | long_description = '\n' + f.read() 83 | except FileNotFoundError: 84 | long_description = DESCRIPTION 85 | 86 | # Load the package's __version__.py module as a dictionary. 87 | about = {} 88 | if not VERSION: 89 | project_slug = NAME.lower().replace("-", "_").replace(" ", "_") 90 | with open(os.path.join(here, project_slug, '__version__.py')) as f: 91 | exec(f.read(), about) 92 | else: 93 | about['__version__'] = VERSION 94 | 95 | 96 | class UploadCommand(Command): 97 | """Support setup.py upload.""" 98 | 99 | description = 'Build and publish the package.' 100 | user_options = [] 101 | 102 | @staticmethod 103 | def status(s): 104 | """Prints things in bold.""" 105 | print('\033[1m{0}\033[0m'.format(s)) 106 | 107 | def initialize_options(self): 108 | pass 109 | 110 | def finalize_options(self): 111 | pass 112 | 113 | def run(self): 114 | try: 115 | self.status('Removing previous builds…') 116 | rmtree(os.path.join(here, 'dist')) 117 | except OSError: 118 | pass 119 | 120 | self.status('Building Source and Wheel (universal) distribution…') 121 | os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) 122 | 123 | self.status('Uploading the package to PyPI via Twine…') 124 | os.system('twine upload dist/*') 125 | 126 | self.status('Pushing git tags…') 127 | os.system('git tag v{0}'.format(about['__version__'])) 128 | os.system('git push --tags') 129 | 130 | sys.exit() 131 | 132 | 133 | # Where the magic happens: 134 | setup( 135 | name=NAME, 136 | version=about['__version__'], 137 | description=DESCRIPTION, 138 | long_description=long_description, 139 | long_description_content_type='text/markdown', 140 | author=AUTHOR, 141 | author_email=EMAIL, 142 | python_requires=REQUIRES_PYTHON, 143 | url=URL, 144 | packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), 145 | 146 | # entry_points={ 147 | # 'console_scripts': ['mycli=mymodule:cli'], 148 | # }, 149 | install_requires=REQUIRED if not IS_DEA else [], 150 | extras_require=EXTRAS if not IS_DEA else {k: [] for k in EXTRAS}, 151 | include_package_data=True, 152 | license='Apache License 2.0', 153 | classifiers=[ 154 | # Trove classifiers 155 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 156 | 'License :: OSI Approved :: Apache Software License', 157 | 'Development Status :: 3 - Alpha', 158 | 'Intended Audience :: Science/Research', 159 | 'Topic :: Scientific/Engineering :: GIS' 160 | 'Programming Language :: Python', 161 | 'Programming Language :: Python :: 3', 162 | 'Programming Language :: Python :: 3.6', 163 | ], 164 | # $ setup.py publish support. 165 | cmdclass={ 166 | 'upload': UploadCommand, 167 | }, 168 | ) 169 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ceos-seo/data_cube_utilities/1979c2a160f2ad306582c60f9ad6be67dfc353ab/test/__init__.py -------------------------------------------------------------------------------- /test/test_data_access_api.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from data_cube_utilities.data_access_api import DataAccessApi 3 | 4 | from datetime import datetime 5 | import xarray as xr 6 | import numpy as np 7 | 8 | 9 | class TestDataAccessApi(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.dc_api = DataAccessApi(config="/home/localuser/Datacube/data_cube_ui/config/.datacube.conf") 13 | 14 | def tearDown(self): 15 | self.dc_api.close() 16 | 17 | def test_get_dataset_by_extent(self): 18 | product = 'ls7_ledaps_meta_river' 19 | fake_product = 'fake1' 20 | kwargs = { 21 | 'time': (datetime(2015, 1, 1), datetime(2015, 3, 1)), 22 | 'longitude': (-71.6, -71.5), 23 | 'latitude': (4.5, 4.6), 24 | 'measurements': ['red', 'green', 'blue'] 25 | } 26 | data = self.dc_api.get_dataset_by_extent(product, **kwargs) 27 | fake_data = self.dc_api.get_dataset_by_extent(fake_product, **kwargs) 28 | 29 | self.assertTrue(type(data) == xr.Dataset) 30 | self.assertFalse(fake_data) 31 | self.assertIn('red', data) 32 | 33 | def test_get_stacked_datasets_by_extent(self): 34 | products = ['ls7_ledaps_meta_river', 'ls8_ledaps_meta_river'] 35 | fake_products = ['fake1', 'fake2'] 36 | kwargs = { 37 | 'time': (datetime(2015, 1, 1), datetime(2015, 3, 1)), 38 | 'longitude': (-71.6, -71.5), 39 | 'latitude': (4.5, 4.6), 40 | 'measurements': ['red', 'green', 'blue'] 41 | } 42 | data = self.dc_api.get_stacked_datasets_by_extent(products, **kwargs) 43 | fake_data = self.dc_api.get_stacked_datasets_by_extent(fake_products, **kwargs) 44 | 45 | self.assertIsNone(fake_data) 46 | self.assertIn('red', data) 47 | self.assertIn('satellite', data) 48 | self.assertTrue(type(data) == xr.Dataset) 49 | 50 | def test_get_query_metadata(self): 51 | faked_data = self.dc_api.get_datacube_metadata('ls7_collections_sr_scene_fake') 52 | self.assertTrue(faked_data['scene_count'] == 0) 53 | 54 | datacube_data = self.dc_api.get_datacube_metadata('ls7_ledaps_ghana') 55 | expected_contents = ['time_extents', 'lat_extents', 'lon_extents', 'tile_count', 'pixel_count'] 56 | for var in expected_contents: 57 | self.assertIn(var, datacube_data) 58 | 59 | self.assertTrue(type(datacube_data['time_extents'][0]) == datetime) 60 | self.assertTrue(type(datacube_data['lat_extents'][0]) == float) 61 | 62 | def test_list_acquisition_dates(self): 63 | faked_dates = self.dc_api.list_acquisition_dates('fake1') 64 | self.assertFalse(faked_dates) 65 | 66 | ghana_dates = self.dc_api.list_acquisition_dates('ls7_ledaps_ghana') 67 | self.assertTrue(len(ghana_dates) > 0) 68 | self.assertTrue(type(ghana_dates[0]) == datetime) 69 | 70 | def test_list_combined_acquisition_dates(self): 71 | faked_dates = self.dc_api.list_combined_acquisition_dates(['fake1', 'fake2']) 72 | self.assertFalse(faked_dates) 73 | 74 | ghana_dates = self.dc_api.list_combined_acquisition_dates(['ls7_ledaps_ghana']) 75 | tonga_dates = self.dc_api.list_combined_acquisition_dates(['ls7_ledaps_tonga']) 76 | combined_date_list = self.dc_api.list_combined_acquisition_dates(['ls7_ledaps_tonga', 'ls7_ledaps_ghana']) 77 | combined_date_list_with_fake = self.dc_api.list_combined_acquisition_dates( 78 | ['ls7_ledaps_tonga', 'ls7_ledaps_ghana', 'fake1']) 79 | 80 | self.assertTrue(len(ghana_dates) > 0) 81 | self.assertTrue(type(ghana_dates[0]) == datetime) 82 | self.assertTrue(len(ghana_dates) + len(tonga_dates) == len(combined_date_list)) 83 | self.assertTrue(len(combined_date_list) == len(combined_date_list_with_fake)) 84 | 85 | def test_get_full_dataset_extent(self): 86 | faked_data = self.dc_api.get_full_dataset_extent('ls7_collections_sr_scene_fake') 87 | self.assertTrue(len(faked_data) == 0) 88 | 89 | datacube_data = self.dc_api.get_full_dataset_extent('ls7_ledaps_ghana') 90 | expected_contents = ['time', 'latitude', 'longitude'] 91 | for var in expected_contents: 92 | self.assertIn(var, datacube_data) 93 | self.assertTrue(type(datacube_data[var]) == xr.DataArray) 94 | 95 | def test_get_datacube_metadata(self): 96 | faked_data = self.dc_api.get_datacube_metadata('ls7_collections_sr_scene_fake') 97 | self.assertTrue(faked_data['scene_count'] == 0) 98 | 99 | datacube_data = self.dc_api.get_datacube_metadata('ls7_ledaps_ghana') 100 | expected_contents = ['time_extents', 'lat_extents', 'lon_extents', 'tile_count', 'pixel_count'] 101 | for var in expected_contents: 102 | self.assertIn(var, datacube_data) 103 | 104 | self.assertTrue(type(datacube_data['time_extents'][0]) == datetime) 105 | self.assertTrue(type(datacube_data['lat_extents'][0]) == float) 106 | 107 | def test_validate_measurements(self): 108 | self.assertTrue( 109 | self.dc_api.validate_measurements('ls7_collections_sr_scene', ['sr_band1', 'sr_band2', 'sr_band3'])) 110 | self.assertFalse( 111 | self.dc_api.validate_measurements('ls7_collections_sr_scene', ['not', 'valid', 'measurements'])) 112 | self.assertFalse( 113 | self.dc_api.validate_measurements('ls7_collections_sr_scene_fake', ['sr_band1', 'sr_band2', 'sr_band3'])) 114 | -------------------------------------------------------------------------------- /test/test_dc_ccd.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_ccd 4 | 5 | 6 | class TestCCD(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_process_pixel(self): 15 | pass 16 | 17 | def test_process_xarray(self): 18 | pass 19 | -------------------------------------------------------------------------------- /test/test_dc_chunker.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from datetime import datetime 4 | from data_cube_utilities import dc_chunker 5 | import xarray as xr 6 | import numpy as np 7 | 8 | 9 | class TestChunker(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.negative_to_positive = (-1, 1) 13 | self.positive_to_negative = (1, -1) 14 | self.dates = [ 15 | datetime(2005, 1, 1), datetime(2006, 1, 1), datetime(2007, 5, 3), datetime(2014, 2, 1), datetime(2000, 1, 1) 16 | ] 17 | 18 | def tearDown(self): 19 | pass 20 | 21 | def test_create_geographic_chunks(self): 22 | with self.assertRaises(AssertionError): 23 | dc_chunker.create_geographic_chunks(longitude=self.positive_to_negative, latitude=self.positive_to_negative) 24 | with self.assertRaises(AssertionError): 25 | dc_chunker.create_geographic_chunks(longitude=(0, 1, 2), latitude=self.negative_to_positive) 26 | 27 | geographic_chunk_data = dc_chunker.create_geographic_chunks( 28 | longitude=self.negative_to_positive, latitude=self.negative_to_positive, geographic_chunk_size=0.1) 29 | 30 | self.assertTrue(len(geographic_chunk_data) == 40) 31 | for geographic_chunk in geographic_chunk_data: 32 | self.assertTrue(geographic_chunk['longitude'] == self.negative_to_positive) 33 | self.assertTrue(geographic_chunk['latitude'][0] >= self.negative_to_positive[0]) 34 | self.assertTrue(geographic_chunk['latitude'][1] <= self.negative_to_positive[1]) 35 | 36 | self.assertTrue(geographic_chunk_data[0]['latitude'][0] == self.negative_to_positive[0]) 37 | self.assertTrue(geographic_chunk_data[-1]['latitude'][1] == self.negative_to_positive[1]) 38 | 39 | def test_combine_geographic_chunks(self): 40 | longitude_values = list(range(0, 10, 1)) 41 | latitude_ranges = [list(range(x * 10, x * 10 + 11, 1)) for x in range(10)] 42 | 43 | dataset_chunks = [ 44 | xr.Dataset( 45 | { 46 | 'test_data': (('latitude', 'longitude'), np.ones((len(latitude_values), len(longitude_values)))) 47 | }, 48 | coords={'latitude': latitude_values, 49 | 'longitude': longitude_values}) for latitude_values in latitude_ranges 50 | ] 51 | 52 | combined_data = dc_chunker.combine_geographic_chunks(dataset_chunks) 53 | 54 | self.assertTrue(len(combined_data.latitude) == 101) 55 | self.assertTrue(len(combined_data.longitude) == 10) 56 | self.assertTrue(combined_data.test_data.values.shape == (101, 10)) 57 | 58 | def test_create_time_chunks(self): 59 | date_groups = dc_chunker.create_time_chunks(self.dates, time_chunk_size=2) 60 | self.assertTrue(len(date_groups) == 3) 61 | self.assertTrue(date_groups[0][0] == min(self.dates)) 62 | self.assertTrue(date_groups[-1][-1] == max(self.dates)) 63 | 64 | date_groups = dc_chunker.create_time_chunks(self.dates, time_chunk_size=10) 65 | self.assertTrue(len(date_groups) == 1) 66 | 67 | date_groups = dc_chunker.create_time_chunks(self.dates, _reversed=True, time_chunk_size=2) 68 | self.assertTrue(date_groups[0][0] == max(self.dates)) 69 | self.assertTrue(date_groups[-1][-1] == min(self.dates)) 70 | 71 | def test_group_datetimes_by_year(self): 72 | date_groups = dc_chunker.group_datetimes_by_year(self.dates) 73 | self.assertTrue(len(date_groups.keys()) == 5) 74 | 75 | for key in date_groups: 76 | self.assertTrue(len(date_groups[key]) == 1) 77 | 78 | def test_group_datetimes_by_month(self): 79 | date_groups = dc_chunker.group_datetimes_by_month(self.dates) 80 | self.assertTrue(len(date_groups.keys()) == 3) 81 | self.assertTrue(len(date_groups[1]) == 3) 82 | 83 | date_groups = dc_chunker.group_datetimes_by_month(self.dates, months=[2, 5]) 84 | self.assertTrue(len(date_groups.keys()) == 2) 85 | 86 | date_groups = dc_chunker.group_datetimes_by_month(self.dates, months=[]) 87 | self.assertFalse(date_groups) 88 | 89 | def test_generate_baseline(self): 90 | baseline_iterable = sorted(self.dates) 91 | 92 | baseline = dc_chunker.generate_baseline(baseline_iterable, window_length=10) 93 | self.assertTrue(len(baseline) == 1) 94 | self.assertTrue(len(baseline[0]) == 5) 95 | 96 | baseline = dc_chunker.generate_baseline(baseline_iterable, window_length=2) 97 | self.assertTrue(len(baseline) == 3) 98 | self.assertTrue(len(baseline[0]) == 3) 99 | -------------------------------------------------------------------------------- /test/test_dc_clustering.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_clustering 4 | 5 | 6 | class TestClustering(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_kmeans_cluster_dataset(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_coastal_change.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_coastal_change 4 | 5 | 6 | class TestCoastalChange(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_compute_coastal_change(self): 15 | pass 16 | 17 | def test_mask_mosaic_with_coastlines(self): 18 | pass 19 | 20 | def test_mask_mosaic_with_coastal_change(self): 21 | pass 22 | -------------------------------------------------------------------------------- /test/test_dc_fractional_coverage_classifier.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_fractional_coverage_classifier 4 | 5 | 6 | class TestFractionalCover(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_frac_coverage_classify(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_ndvi_anomaly.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_ndvi_anomaly 4 | 5 | 6 | class TestNDVIAnomaly(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_sar_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities.dc_sar_utils import dn_to_db 4 | 5 | 6 | class TestSARUtils(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_dn_to_db(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_slip.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_slip 4 | 5 | 6 | class TestSlip(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_utilities.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | import xarray as xr 5 | from datetime import datetime 6 | 7 | from data_cube_utilities import dc_utilities 8 | 9 | 10 | class TestDCUtilities(unittest.TestCase): 11 | 12 | def setUp(self): 13 | # yapf: disable 14 | 15 | self.times = [ 16 | datetime(1999, 5, 6), 17 | datetime(2006, 1, 2), 18 | datetime(2006, 1, 16), 19 | datetime(2015, 12, 31), 20 | datetime(2016, 1, 1), 21 | ] 22 | 23 | self.latitudes = [1, 2] 24 | self.longitudes = [1, 2] 25 | 26 | self.sample_data = np.array([[[1, 1], [1, 1]], 27 | [[2, 2], [2, 2]], 28 | [[3, 3], [3, 3]], 29 | [[0, 0], [0, 0]], 30 | [[5, 5], [5, 5]]]) 31 | # yapf: enable 32 | 33 | def tearDown(self): 34 | pass 35 | 36 | def test_create_cfmask_clean_mask(self): 37 | dataset = xr.Dataset( 38 | { 39 | 'cf_mask': (('time', 'latitude', 'longitude'), self.sample_data) 40 | }, 41 | coords={'time': self.times, 42 | 'latitude': self.latitudes, 43 | 'longitude': self.longitudes}) 44 | 45 | cf_mask = dc_utilities.create_cfmask_clean_mask(dataset.cf_mask) 46 | 47 | self.assertTrue((cf_mask == np.array([[[True, True], [True, True]], [[False, False], [False, False]], 48 | [[False, False], [False, False]], [[True, True], [True, True]], 49 | [[False, False], [False, False]]])).all()) 50 | 51 | def test_perform_timeseries_analysis(self): 52 | pass 53 | 54 | # def test_nan_to_num(self): 55 | # dataset = xr.Dataset( 56 | # { 57 | # 'data': (('time', 'latitude', 'longitude'), self.sample_data) 58 | # }, 59 | # coords={'time': self.times, 60 | # 'latitude': self.latitudes, 61 | # 'longitude': self.longitudes}) 62 | 63 | # dataset_nan = dataset.where(dataset.data > 2) 64 | 65 | # dc_utilities.nan_to_num(dataset_nan, -9999) 66 | 67 | # self.assertTrue((dataset_nan.data.values == np.array( 68 | # [[[-9999, -9999], [-9999, -9999]], [[-9999, -9999], [-9999, -9999]], [[3, 3], [3, 3]], 69 | # [[-9999, -9999], [-9999, -9999]], [[5, 5], [5, 5]]])).all()) 70 | 71 | def test_clear_attrs(self): 72 | dataset = xr.Dataset( 73 | { 74 | 'data': (('time', 'latitude', 'longitude'), self.sample_data) 75 | }, 76 | coords={'time': self.times, 77 | 'latitude': self.latitudes, 78 | 'longitude': self.longitudes}, 79 | attrs={'temp_attrs': 5}) 80 | 81 | dc_utilities.clear_attrs(dataset) 82 | 83 | self.assertTrue(not dataset.attrs) 84 | 85 | def test_create_bit_mask(self): 86 | pass 87 | 88 | def test_add_timestamp_data_to_xr(self): 89 | pass 90 | 91 | def test_write_geotiff_from_xr(self): 92 | pass 93 | 94 | def test_write_png_from_xr(self): 95 | pass 96 | 97 | def test_write_single_band_png_from_xr(self): 98 | pass 99 | 100 | def test_get_transform_from_xr(self): 101 | pass 102 | 103 | def test_chunks(self): 104 | pass 105 | -------------------------------------------------------------------------------- /test/test_dc_water_classifier.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_water_classifier 4 | 5 | 6 | class TestWaterClass(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_(self): 15 | pass 16 | -------------------------------------------------------------------------------- /test/test_dc_water_quality.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from data_cube_utilities import dc_water_quality 4 | 5 | 6 | class TestTSM(unittest.TestCase): 7 | 8 | def setUp(self): 9 | pass 10 | 11 | def tearDown(self): 12 | pass 13 | 14 | def test_(self): 15 | pass 16 | --------------------------------------------------------------------------------