├── images
├── dummy
├── LI_S2.jpg
├── NC_S2.jpg
├── logging.jpg
├── michael.jpg
├── LI_class.jpg
└── NC_class.jpg
├── LICENSE
├── README.md
└── Classification
├── Maanas_unsupervised.js
├── parking_lots.js
├── NC_Solar.js
└── classified.js
/images/dummy:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/images/LI_S2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/LI_S2.jpg
--------------------------------------------------------------------------------
/images/NC_S2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/NC_S2.jpg
--------------------------------------------------------------------------------
/images/logging.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/logging.jpg
--------------------------------------------------------------------------------
/images/michael.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/michael.jpg
--------------------------------------------------------------------------------
/images/LI_class.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/LI_class.jpg
--------------------------------------------------------------------------------
/images/NC_class.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mjevans26/GEE/master/images/NC_class.jpg
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 mjevans26
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Google Earth Engine for Conservation
4 |
5 | This repo contains example analyses conducted by Defenders of Wildlife using the Google Earth Engine platform.
6 |
7 | ## Automated Change Detection
8 |
9 | We have developed several publicly available apps using the Google Earth Engine App Developer focusing on automatically mapping and quantifying changes to the landscape over time. The first of these assesses deforestation due to [Logging over time on Prince of Wales Island in the Tongass National Forest](https://defendersofwildlifegis.users.earthengine.app/view/powdeforestation)
10 |
11 |
12 |
13 |
14 |
15 | Another example measured coastal habitat loss for several Threatened and Endangered species following [Hurricane Michael](https://defendersofwildlifegis.users.earthengine.app/view/hurricanemichael)
16 |
17 |
18 |
19 |
20 |
21 | ## Landscape Classification
22 | ### Ground mounted solar panels in North Carolina
23 |
24 | The Nature Conservancy is interested in modelling the impact of the proliferation of ground-mounted solar energy development on wildlife habitat and connectivity in North Carolina.
25 |
26 | We used Sentinel-2 multispectral imagery and Sentinel-1 SAR data to classify the state into Forest, Ag/Pasture, Impervious, Water, and Solar. Using a random forest classifier, we achieved a Kappa accuracy of 0.84. In the images below, black areas are the places our model identified as solar panels
27 |
28 |
29 |
30 |
31 |
32 |
33 | ### Parking lots on Long Island
34 |
35 | Long Island is exploring options for developing utility scale renewable energy as New York State strives to achieve its 50% renewable energy by 2030 goals. In order to promote renewable energy development and habitat conservation, we were interested in finding low-impact sites for solar development on Long Island - and parking lots present an ideal case. However, a comprehensive map of parking lots across the entire Island is currently unavailable.
36 |
37 | In this analysis we again used a random forest classifier applied to Sentinel-1 and Sentinel-2 data.
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/Classification/Maanas_unsupervised.js:
--------------------------------------------------------------------------------
1 | //Define Imports
2 | // Area of interest around Maanas park
3 | var geometry =
4 | /* color: #d63000 */
5 | /* shown: false */
6 | ee.Geometry.Polygon(
7 | [[[90.54414589375006, 26.815995087871755],
8 | [90.5468924757813, 26.785963735625458],
9 | [90.59152443378912, 26.75101930485026],
10 | [90.57847816914068, 26.724650605025396],
11 | [90.56749184101568, 26.653485968662928],
12 | [90.8819754835938, 26.63998421664039],
13 | [90.8984549757813, 26.634460312526723],
14 | [90.91356117695318, 26.627094691558753],
15 | [90.969179463086, 26.62218401382214],
16 | [90.99389870136724, 26.64428040181993],
17 | [91.00969154804693, 26.636915413999333],
18 | [91.02823097675787, 26.663918047034365],
19 | [91.04265053242193, 26.673122030060668],
20 | [91.06393654316412, 26.67005411823117],
21 | [91.10032875507818, 26.706863612328252],
22 | [91.14084084003912, 26.701342946146017],
23 | [91.155947041211, 26.74427440545754],
24 | [91.16418678730474, 26.74611396316001],
25 | [91.17654640644537, 26.75960314343375],
26 | [91.23559792011724, 26.751632457678262],
27 | [91.24658586506746, 26.780135599830718],
28 | [91.26512367695318, 26.806802661323157],
29 | [91.23937586086004, 26.815405375760232],
30 | [91.15191341107595, 26.814193902667654],
31 | [91.10152534960662, 26.82275384543421],
32 | [90.88539952331928, 26.81394970199623]]])
33 |
34 | //Bring in Sentinel-2 data
35 | var S2 = ee.ImageCollection("COPERNICUS/S2");
36 |
37 | //Load modules for masking clouds and modeling phenology
38 | var Phenology = require('users/defendersofwildlifeGIS/Modules:Phenology')
39 | var Clouds = require('users/defendersofwildlifeGIS/Modules:Clouds')
40 |
41 | var end = '2019-11-30'
42 | var start = '2018-11-30'
43 | var collection = S2.filterBounds(geometry).filterDate(start, end)
44 |
45 | function addVariables(img){
46 | var nir = img.select('B8')
47 | var red = img.select('B4')
48 | var blue = img.select('B2')
49 | img = img.addBands(red.add(blue).rename('B4B2'))
50 | var rgb = img.select(['B4', 'B3', 'B2'])
51 | var light = rgb.reduce(ee.Reducer.max()).add(rgb.reduce(ee.Reducer.min())).divide(2).rename('light')
52 | var ndvi = img.normalizedDifference(['B8', 'B4']).rename('ndvi')
53 | var ndwi = img.normalizedDifference(['B3', 'B8']).rename('ndwi')
54 | //var evi2 = nir.subtract(red).divide(nir.add(red.multiply(2.4)).add(1)).multiply(2.5)
55 | var bi = img.normalizedDifference(['B4B2', 'B3']).rename('bi')
56 | var ndwi2 = img.normalizedDifference(['B8', 'B11']).rename('ndwi2')
57 | return Phenology.addHarmonics(img.addBands(ndvi).addBands(ndwi).addBands(bi).addBands(ndwi2).addBands(light), 1)
58 | }
59 |
60 | var bands = ['ndvi', 'ndwi', 'bi', 'light']
61 |
62 | var varsCollection = collection.map(addVariables).map(Clouds.basicQA);
63 |
64 | var winter = varsCollection.filterDate('2018-12-01', '2019-02-28').select(bands).median()
65 | var summer = varsCollection.filterDate('2019-03-01', '2019-05-31').select(bands).median()
66 | var monsoon = varsCollection.filterDate('2019-06-01', '2019-09-30').select(bands).median()
67 | var postmonsoon = varsCollection.filterDate('2019-10-01', '2019-11-30').select(bands).median()
68 | var seasons = winter.addBands(summer).addBands(monsoon).addBands(postmonsoon)
69 |
70 | print(varsCollection.size())
71 |
72 | //Output of linear regression is a 2-band image
73 | var model = varsCollection.select(['offset', 'time', 'cos', 'sin', 'ndvi', 'ndwi', 'bi', 'light'])
74 | .reduce(ee.Reducer.linearRegression(4, 4));
75 |
76 | //These coefficeints are 2d Nx by Ny
77 | var coeff = model.select('coefficients').toArray()
78 |
79 | var ximage = coeff.arraySlice(0, 1, 2).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['x_ndvi', 'x_ndwi', 'x_bi', 'x_light']])
80 | var cosimage = coeff.arraySlice(0, 2, 3).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['sin_ndvi', 'sin_ndwi', 'sin_bi', 'sin_light']])
81 | var sinimage = coeff.arraySlice(0, 3, 4).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['cos_ndvi', 'cos_ndwi', 'cos_bi', 'cos_light']])
82 | var image = ximage.addBands(cosimage).addBands(sinimage).addBands(seasons).clip(geometry)
83 |
84 | var training = image.sample({
85 | region: geometry,
86 | scale: 10,
87 | numPixels: 10000,
88 | tileScale: 12
89 | })
90 | var clusterer = ee.Clusterer.wekaXMeans(2, 15).train(training)
91 | var result = image.cluster(clusterer, 'class')
92 | var output = result.focal_mode(1, 'square', 'pixels')
93 | var vizParams = {palette: ['00A600','1DB000','3EBB00','63C600','8BD000','B6DB00','E6E600','E7CE1D','f2f2f2','003dea','dedede','edb694','efc2b3','f1d6d3','f2d12f'], min:0 , max:14}
94 | Map.addLayer(varsCollection.median(), {bands:['B4', 'B3', 'B2'], min:250, max:2500}, 'image')
95 | Map.addLayer(output, vizParams, 'class')
96 |
97 | var vector = output.reduceToVectors({
98 | reducer: ee.Reducer.first(),
99 | geometry: geometry,
100 | scale: 10,
101 | maxPixels: 1e13,
102 | tileScale: 12
103 | })
104 |
105 | Export.table.toDrive({
106 | collection: vector,
107 | description: 'Maanas_unsupervised',
108 | fileFormat: 'KML'
109 | })
110 |
--------------------------------------------------------------------------------
/Classification/parking_lots.js:
--------------------------------------------------------------------------------
1 | //SET UP FEATURES FOR SAMPLING TRAINING DATA
2 | plots = ee.FeatureCollection(plots).map(function(ft){return ft.set('landcover', 1)});
3 | bldgs = ee.FeatureCollection(bldgs).map(function(ft){return ft.set('landcover', 2)});
4 | rds = ee.FeatureCollection(rds).map(function(ft){return ft.set('landcover', 3).buffer(10)});
5 | rds = rds.reduceToImage(['landcover'], ee.Reducer.first());
6 | var outbnd = ee.FeatureCollection(LI).union(10);
7 |
8 | var imperv = plots.merge(bldgs).filterBounds(geometry);
9 | var doi = '2017-03-01';
10 | var today = ee.Date(Date.now());
11 | var centroids = plots.filter(ee.Filter.gt('AREA', 10)).map(function(ft){return ft.centroid()});
12 | var random = ee.FeatureCollection.randomPoints(geometry, 1000);
13 |
14 | Map.addLayer(imperv, {}, 'bldgs&rds');
15 | //Map.addLayer(centroids, {}, 'centroids');
16 | //Map.addLayer(random, {}, 'randpts');
17 |
18 | //get most recent not cloudy s2 image
19 |
20 | //.filterMetadata('CLOUDY_PIXEL_PERCENTAGE', 'less_than', 10)
21 | //.sort('system:time_start', false).first());
22 | /*
23 | var before = ee.Image(S2.filterDate('2016-07-01', '2016-08-31').sort('CLOUDY_PIXEL_PERCENTAGE', true).first());
24 | var after = ee.Image(S2.filterDate('2017-07-01', '2017-08-31').sort('CLOUDY_PIXEL_PERCENTAGE', true).first());
25 | */
26 |
27 | var v = sentinel1
28 | // Filter to get images with VV and VH dual polarization.
29 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VH'))
30 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV'))
31 | // Filter to get images collected in interferometric wide swath mode.
32 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
33 |
34 | var h = sentinel1
35 | // Filter to get images with HH and HV dual polarization.
36 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HH'))
37 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HV'))
38 | // Filter to get images collected in interferometric wide swath mode.
39 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
40 |
41 | // Filter to get images from different look angles.
42 | var vhAscending = v.filter(ee.Filter.eq('orbitProperties_pass', 'ASCENDING'));
43 | var vhDescending = v.filter(ee.Filter.eq('orbitProperties_pass', 'DESCENDING'));
44 |
45 | var ascend=vhAscending
46 | .filterDate('2018-05-01','2018-07-30');
47 |
48 | var descend=vhDescending
49 | .filterDate('2018-05-01', '2018-07-30');
50 | /*
51 | var d_past=vhDescending
52 | .filterDate('2016-07-01','2016-08-31');
53 |
54 | var d_current=vhDescending
55 | .filterDate('2017-07-01', '2017-08-31');
56 | */
57 | function adcomp(ascend, descend){
58 | var vv = ee.ImageCollection(ascend.select(['VV']), descend.select(['VV'])).mean();
59 | var vh = ee.ImageCollection(ascend.select(['VH']), descend.select(['VH'])).mean();
60 | return ee.Image.cat(vv, vh);
61 | }
62 |
63 | //var pastcomp = adcomp(a_past, d_past).clip(geometry);
64 | var currentcomp = adcomp(ascend, descend).clip(LI);
65 |
66 | //Define functions to mask clouds for S2 and LS8 data
67 | function add_time(img) {i
68 | return img.addBands(img.metadata('system:time_start'));
69 | }
70 |
71 | function hprocess(img_col, region, type){
72 | var area = img_col.filterBounds(region);
73 | var time = area.map(add_time);
74 | var med;
75 | if (type == 'recent'){
76 | med = time.qualityMosaic('system:time_start')
77 | .clipToCollection(region);
78 | } else if (type == 'median'){
79 | med = time.median()
80 | .clipToCollection(region);
81 | }
82 | var norm = med.normalizedDifference(['HH', 'HV']).rename('norm');
83 | var diffh = med.select('HH').subtract(med.select('HV')).rename(['HHminHV']);
84 | var sumh = med.select('HH').add(med.select('HV')).rename(['HVpluHH']);
85 | var ratioh = med.select('HH').divide(med.select('HV')).rename(['HHdivHV']);
86 | return med.addBands(ratioh).addBands(sumh);
87 | //var final = norm(nd, bands_class, aoi);
88 | }
89 |
90 | function vprocess(img_col, region, type){
91 | var area = img_col.filterBounds(region);
92 | var time = area.map(add_time);
93 | var med;
94 | if (type == 'recent'){
95 | med = time.qualityMosaic('system:time_start')
96 | .clip(region);
97 | } else if (type == 'median'){
98 | med = time.median()
99 | .clip(region);
100 | }
101 | //var norm = med.normalizedDifference().rename('norm');
102 | var diffv = med.select('VV').subtract(med.select('VH')).rename(['VVminVH']);
103 | var sumv = med.select('VV').add(med.select('VH')).rename(['VVpluVH']);
104 | var ratiov = med.select('VV').divide(med.select('VH')).rename(['VVdivVH']);
105 | var norm = diffv.divide(sumv).rename('norm');
106 | return med.addBands(ratiov).addBands(sumv).addBands(diffv).addBands(norm);
107 | //var final = norm(nd, bands_class, aoi);
108 | }
109 | /*
110 | var dpast = vprocess(d_past, geometry, 'recent');
111 | var dcurrent = vprocess(d_current, geometry, 'recent');
112 | var apast = vprocess(a_past, geometry, 'recent');
113 | var acurrent = vprocess(a_current, geometry, 'recent');
114 |
115 | //Map.addLayer(acurrent, {}, 'acurrent');
116 | //Map.addLayer(dcurrent, {}, 'dcurrent');
117 | //Map.addLayer(apast, {}, 'apast');
118 | //Map.addLayer(dpast, {}, 'dpast');
119 | //Map.addLayer(apast.subtract(acurrent), {}, 'adiff');
120 | //Map.addLayer(pastcomp.subtract(currentcomp), {}, 'compdiff');
121 | function cor(img){
122 | var imageA = img.select('VV');
123 | var imageB = img.select('VH');
124 | var correl = ee.Algorithms.CrossCorrelation(imageA, imageB,0,3);
125 | return correl.select(3);
126 | }
127 |
128 | */
129 |
130 | // This example uses the Sentinel-2 QA band to cloud mask
131 | // the collection. The Sentinel-2 cloud flags are less
132 | // selective, so the collection is also pre-filtered by the
133 | // CLOUDY_PIXEL_PERCENTAGE flag, to use only relatively
134 | // cloud-free granule.
135 |
136 | // Function to mask clouds using the Sentinel-2 QA band.
137 | function maskS2clouds(image) {
138 | var qa = image.select('QA60');
139 |
140 | // Bits 10 and 11 are clouds and cirrus, respectively.
141 | var cloudBitMask = ee.Number(2).pow(10).int();
142 | var cirrusBitMask = ee.Number(2).pow(11).int();
143 |
144 | // Both flags should be set to zero, indicating clear conditions.
145 | var mask = qa.bitwiseAnd(cloudBitMask).eq(0).and(
146 | qa.bitwiseAnd(cirrusBitMask).eq(0));
147 |
148 | // Return the masked and scaled data.
149 | return image.updateMask(mask).divide(10000)
150 | .copyProperties(image, ['system:time_start']);
151 | }
152 |
153 | //Function to add time variables for
154 | //note if there is one cycle per year multiply t by ncycles
155 | var addVariables = function(image) {
156 | var time = image.date().difference('2000-01-01', 'year');
157 | return image.select(['B2', 'B3', 'B4', 'B8', 'B11', 'B12'])
158 | //.addBands(ee.Image.constant(1))
159 | //.addBands(ee.Image.constant(time).rename('t'))
160 | //.addBands(ee.Image.constant(time.multiply(2*Math.PI).sin()).rename('sin'))
161 | //.addBands(ee.Image.constant(time.multiply(2*Math.PI).cos()).rename('cos'))
162 | .addBands(image.normalizedDifference(['B8', 'B4']).rename('ndvi'))
163 | .addBands(image.normalizedDifference(['B2', 'B11']).divide(image.select(['B8'])).rename('ndsi'))
164 | .float();
165 |
166 | };
167 |
168 | // Map the function over one year of data and take the median.
169 | var collection = S2.filterDate(today.advance(-1, 'year'), today)
170 | // Pre-filter to get less cloudy granules.
171 | .filterBounds(LI)
172 | .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
173 | .map(maskS2clouds)
174 | .map(addVariables);
175 |
176 | var first = ee.Image(collection.sort('CLOUDY_PIXEL_PERCENTAGE').first()).clip(LI);
177 | Map.addLayer(first, {bands:['B4', 'B3', 'B2'], min:0.02, max:0.15}, 'S2');
178 |
179 | var months = ee.List.sequence(1,12,3).map(function(month){
180 | return collection.filter(ee.Filter.calendarRange(month, ee.Number(month).add(2), 'month')).median();
181 | });
182 |
183 | function combineBands(image, result){
184 | return ee.Image(result).addBands(image);
185 | }
186 |
187 | var empty = ee.Image().select();
188 |
189 | var composite = ee.Image(months.iterate(combineBands, empty));
190 |
191 | // Display the results.
192 | var data = composite.addBands(currentcomp);//.clip(geometry);
193 | //Map.addLayer(data, {}, 'data');
194 | Map.addLayer(currentcomp, {bands:['VV', 'VH', 'VV'], min: -15, max: -3}, 'S1');
195 | var nlcd = NLCD.select('landcover');//.clip(geometry);
196 | //Map.addLayer(nlcd, {}, 'nlcd');
197 |
198 | nlcd = nlcd.remap({
199 | from: [41, 42, 43, 81, 82, 21, 11],
200 | to: [3, 3, 3, 4, 4, 4, 5],
201 | bandName: 'landcover'
202 | }).rename(['landcover']).addBands(data);
203 | print(nlcd.bandNames());
204 |
205 | Map.addLayer(nlcd, {}, 'ncld');
206 | //nat = nlcd.where(nlcd.eq(41).or(nlcd.eq(42)).or(nlcd.eq(43)), ee.Image(3));
207 | //bare = nlcd.where(nlcd.eq(81).or(nlcd.eq(82)), ee.Image(4));
208 |
209 | var imperv_ft = data.clip(geometry).sampleRegions({
210 | collection: imperv,
211 | properties: ['landcover'],
212 | tileScale: 16,
213 | scale: 30
214 | });
215 |
216 | var nat_ft = nlcd.stratifiedSample({
217 | numPoints: 5000,
218 | classBand: 'landcover',
219 | region: geometry,
220 | tileScale: 16,
221 | scale: 100
222 | });
223 |
224 |
225 | //Map.addLayer(composite, {bands: ['B3', 'B2', 'B1'], min: 0, max: 0.3});
226 | print("natural features:", nat_ft.size());
227 | print("impervious features:", imperv_ft.size());
228 | var ft = nat_ft.merge(imperv_ft).randomColumn();
229 |
230 | var validation = ft.filter(ee.Filter.lt('random', 0.3));
231 | var training = ft.filter(ee.Filter.gte('random', 0.7));
232 |
233 | //tileScale explanation: specifies how big a tile to use to transfer data,
234 | //will slow things down, but avoid memory error
235 |
236 | //print(training.reduceColumns(ee.Reducer.frequencyHistogram(), ['landcover']));
237 |
238 | var classifier = ee.Classifier.randomForest(20).train(training, 'landcover', data.bandNames());
239 |
240 | //print(classifier.confusionMatrix().accuracy());
241 |
242 | var holdout = validation.classify(classifier).errorMatrix('landcover', 'classification');
243 | print(holdout.accuracy());
244 |
245 | var result = data.clip(LI).classify(classifier);
246 | Map.addLayer(result, {bands:'classification', palette:['black', 'white', 'green', 'yellow', 'blue'], min:1, max:5}, 'result');
247 |
248 | var segments = result.select('classification').eq(1)
249 | .where(rds.eq(3), 0)
250 | .focal_min(1, 'square', 'pixels', 2);//.focal_max(1, 'square', 'pixels', 2);//.focal_min(1, 'square', 'pixels');
251 | //var segments = ee.Algorithms.Image.Segmentation.SNIC(result, 10, 10, 4);
252 | Map.addLayer(output, {}, 'parking lots');
253 |
254 | var bands = ['B2', 'B3', 'B4', 'B8', 'B11', 'B12', 'ndvi', 'ndsi',
255 | 'B2_1', 'B3_1', 'B4_1', 'B8_1', 'B11_1', 'B12_1', 'ndvi_1', 'ndsi_1',
256 | 'B2_2', 'B3_2', 'B4_2', 'B8_2', 'B11_2', 'B12_2', 'ndvi_2', 'ndsi_2',
257 | 'B2_3', 'B3_3', 'B4_3', 'B8_3', 'B11_3', 'B12_3', 'ndvi_3', 'ndsi_3', 'VV', 'VH', 'landcover'];
258 |
259 | var outputFeatures = Array.from(bands);
260 |
261 | print(outputFeatures);
262 | var link = '91ebc2da1d448a5cc0008d9d1ce02f41';
263 | var train_desc = 'tf_plot_train_' + link;
264 | var test_desc = 'tf_plot_test_' + link;
265 |
266 | Export.table.toDrive({
267 | collection: training,
268 | description: train_desc,
269 | fileFormat: 'TFRecord',
270 | selectors: outputFeatures
271 | });
272 |
273 | Export.table.toDrive({
274 | collection: validation,
275 | description: test_desc,
276 | fileFormat: 'TFRecord',
277 | selectors: outputFeatures
278 | });
279 |
280 |
281 | // Export the image to TFRecord format. Note:
282 | // print(ee.Image(1).reduceRegion('count', exportRegion, 30)); // 5620989
283 |
284 | var image_desc = 'tf_plot_image_' + link;
285 |
286 | // Already exported
287 | // Export.image.toCloudStorage({
288 | // image: image.select(bands),
289 | // description: image_desc,
290 | // scale: 30,
291 | // fileFormat: 'TFRecord',
292 | // bucket: 'nclinton-training-temp',
293 | // region: exportRegion,
294 | // formatOptions: {
295 | // 'patchDimensions': [256, 256],
296 | // maxFileSize: 104857600,
297 | // compressed: true,
298 | // },
299 | // });
300 |
301 | var polys = segments.reduceToVectors({
302 | geometry: outbnd,
303 | scale: 10,
304 | eightConnected: true,
305 | labelProperty: 'class',
306 | maxPixels: 1e11,
307 | geometryType: 'polygon'
308 | }).filter(ee.Filter.eq('class', 1));
309 |
310 | Map.addLayer(polys);
311 |
312 | Export.table.toAsset({
313 | collection: polys,
314 | description: "LI_plots_85a2518c78a1b01856574f802998e359",
315 | })
316 |
317 | Export.image.toDrive({
318 | image: result,
319 | description: "LI_class_c5cd47e574915f2d17af56300be04e6d",
320 | region: result.geometry(),
321 | scale:10,
322 | maxPixels: 1e11
323 | });
324 |
--------------------------------------------------------------------------------
/Classification/NC_Solar.js:
--------------------------------------------------------------------------------
1 | //1. SET UP FUNCTIONS FOR LATER
2 | // Import cloud masking functions from defenders modules
3 | var Clouds = require('users/defendersofwildlifeGIS/Modules:Clouds')
4 |
5 | //Function to process S2 imagery masking for clouds, water and shadow
6 | function maskclouds(img){
7 | var clouds = Clouds.basicQA(img);
8 | clouds = Clouds.sentinelCloudScore(clouds);
9 | var waterMask = Clouds.waterScore(img).select('waterScore').lte(0.5);
10 | var shadowMask = img.select('B11').gt(900);
11 | //var darkC = Clouds.darkC(img, ['B4', 'B3', 'B2'])
12 | //var nd = img.normalizedDifference(['B2', 'B12']).rename('nd');
13 | //var darkbands = img.select(['B8', 'B11', 'B12']).reduce(ee.Reducer.sum()).rename('darkbands');
14 | //var out = img.addBands([clouds, water, nd, darkbands]);
15 | return clouds.updateMask(clouds.select('cloudScore').lte(25));//.and(shadowMask).and(waterMask));
16 | }
17 |
18 | //Function to add derived variables to multispectral data
19 | //TO DO pan sharpen swir1 & swir2 bands?
20 | var addVariables = function(image) {
21 | var r = image.select('B4');
22 | var b = image.select('B2');
23 | var g = image.select('B3');
24 | var n = image.select('B8');
25 | //var time = image.date().difference('2000-01-01', 'year');
26 | var vndvi = image.normalizedDifference(['B3', 'B4']).rename('vndvi');
27 | var gli = g.multiply(2).subtract(b).subtract(r).divide(g.multiply(2).add(r).add(b)).rename('gli');
28 | //var gli = image.expression(
29 | // '((2* b("G")) - b("R")- b("B"))/((2 * b("G")) + b("R") + b("B"))
30 | // ).rename('gli');
31 | var lightness = r.max(b).max(g).add(r.min(g).min(b)).rename('light');
32 | var luminosity = r.multiply(0.21).add(g.multiply(0.72).add(b.multiply(0.07))).rename('luminosity');
33 | var rgbavg = image.select(['B4', 'B3', 'B2']).reduce(ee.Reducer.mean());
34 | var ndvi = image.normalizedDifference(['B8', 'B4']).rename('ndvi');
35 | var ndsi = image.normalizedDifference(['B2', 'B11']).divide(image.select(['B8'])).rename('ndsi');
36 | var ndwi = image.normalizedDifference(['B8', 'B11']).rename('ndwi')
37 | return(image.select(['B2', 'B3', 'B4', 'B8', 'B11', 'B12'])
38 | .addBands([vndvi, ndvi, ndsi, ndwi, gli, lightness, luminosity, rgbavg]));
39 | };
40 |
41 | //Function to process horizontal polarization sentinel 1 data
42 | function hprocess(img_col, region, type){
43 | var area = img_col.filterBounds(region);
44 | var time = area.map(add_time);
45 | var med;
46 | if (type == 'recent'){
47 | med = time.qualityMosaic('system:time_start')
48 | .clipToCollection(region);
49 | } else if (type == 'median'){
50 | med = time.median()
51 | .clipToCollection(region);
52 | }
53 | var norm = med.normalizedDifference(['HH', 'HV']).rename('norm');
54 | var diffh = med.select('HH').subtract(med.select('HV')).rename(['HHminHV']);
55 | var sumh = med.select('HH').add(med.select('HV')).rename(['HVpluHH']);
56 | var ratioh = med.select('HH').divide(med.select('HV')).rename(['HHdivHV']);
57 | return med.addBands(ratioh).addBands(sumh);
58 | //var final = norm(nd, bands_class, aoi);
59 | }
60 | //Function to select and process vertical polarization sentinel 1 data
61 | function vprocess(img_col, region, type){
62 | var area = img_col.filterBounds(region);
63 | var time = area.map(add_time);
64 | var med;
65 | if (type == 'recent'){
66 | med = time.qualityMosaic('system:time_start')
67 | .clip(region);
68 | } else if (type == 'median'){
69 | med = time.median()
70 | .clip(region);
71 | }
72 | //var norm = med.normalizedDifference().rename('norm');
73 | var diffv = med.select('VV').subtract(med.select('VH')).rename(['VVminVH']);
74 | var sumv = med.select('VV').add(med.select('VH')).rename(['VVpluVH']);
75 | var ratiov = med.select('VV').divide(med.select('VH')).rename(['VVdivVH']);
76 | var norm = diffv.divide(sumv).rename('norm');
77 | return med.addBands(ratiov).addBands(sumv).addBands(diffv).addBands(norm);
78 | //var final = norm(nd, bands_class, aoi);
79 | }
80 | // Comine H and V polarization sentinel 1 data
81 | function vdiff(img){
82 | var vvsubvh = img.select(['VV']).subtract(img.select(['VH'])).rename('VVsubVH')
83 | return img.addBands(vvsubvh);
84 | }
85 |
86 | function adcomp(ascend, descend){
87 | var vv = ee.ImageCollection(ascend.select(['VV']), descend.select(['VV'])).mean();
88 | var vh = ee.ImageCollection(ascend.select(['VH']), descend.select(['VH'])).mean();
89 | var vvvh = ee.ImageCollection(ascend.select(['VVsubVH']), descend.select(['VVsubVH'])).mean();
90 | return ee.Image.cat(vv, vh, vvvh);
91 | }
92 |
93 | function create_composite_img(aoi){
94 | var collection = S2.filterDate('2019-01-01','2019-04-28')
95 | .filterBounds(aoi)
96 | .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
97 | .map(maskclouds)
98 | .map(addVariables);
99 |
100 | first = collection.median().clip(aoi)
101 | variance = colleciton.reduce(ee.Reducer.variance()).clip(aoi)
102 | var v = sentinel1
103 | // Filter to get images with VV and VH dual polarization.
104 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VH'))
105 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV'))
106 | // Filter to get images collected in interferometric wide swath mode.
107 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
108 |
109 | var h = sentinel1
110 | // Filter to get images with HH and HV dual polarization.
111 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HH'))
112 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HV'))
113 | // Filter to get images collected in interferometric wide swath mode.
114 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
115 |
116 | var vhAscending = v.filter(ee.Filter.eq('orbitProperties_pass', 'ASCENDING'));
117 | var vhDescending = v.filter(ee.Filter.eq('orbitProperties_pass', 'DESCENDING'));
118 |
119 | var ascend=vhAscending
120 | .filterDate('2019-03-01','2019-04-28').filterBounds(aoi).map(vdiff);
121 |
122 | var descend=vhDescending
123 | .filterDate('2019-02-01','2019-04-28').filterBounds(aoi).map(vdiff);
124 |
125 | s1comp = adcomp(ascend, descend).clip(aoi)
126 |
127 | return first.addBands(variance).addBands(s1comp);
128 | }
129 |
130 | // 2. COLLECT AND CURATE IMAGERY
131 | //Collect and process sentinel 1 data
132 | var v = sentinel1
133 | // Filter to get images with VV and VH dual polarization.
134 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VH'))
135 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV'))
136 | // Filter to get images collected in interferometric wide swath mode.
137 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
138 |
139 | var h = sentinel1
140 | // Filter to get images with HH and HV dual polarization.
141 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HH'))
142 | .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'HV'))
143 | // Filter to get images collected in interferometric wide swath mode.
144 | .filter(ee.Filter.eq('instrumentMode', 'IW'));
145 |
146 | // Filter to get images from different look angles.
147 | var vhAscending = v.filter(ee.Filter.eq('orbitProperties_pass', 'ASCENDING'));
148 | var vhDescending = v.filter(ee.Filter.eq('orbitProperties_pass', 'DESCENDING'));
149 |
150 | var ascend=vhAscending
151 | .filterDate('2019-03-01','2019-04-28').filterBounds(geometry).map(vdiff);
152 |
153 | var descend=vhDescending
154 | .filterDate('2019-02-01','2019-04-28').filterBounds(geometry).map(vdiff);
155 |
156 | var currentcomp = adcomp(ascend, descend).clip(geometry);
157 |
158 | // Create a three month colleciton of S2 images in the study area,
159 | // cloud mask and add variables
160 |
161 | var collection = S2.filterDate('2019-01-01','2019-04-28')
162 | .filterBounds(geometry)
163 | .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
164 | .map(maskclouds)
165 | .map(addVariables);
166 |
167 | // compute temporal variance of all pixels
168 | var variance = collection.reduce(ee.Reducer.variance());
169 |
170 | // create median composite for visualization
171 | var first = collection.median().clip(geometry);
172 |
173 | var data = first.addBands(variance).addBands(currentcomp);
174 | // Alternative features
175 | // CREATE 2-pixel radius kernel
176 | var list1 = [1,0,1,0,1];
177 | var list2 = [0,0,0,0,0];
178 | var list3 = [1,0,0,0,1];
179 | var weights = [list1, list2, list3, list2, list1];
180 |
181 | var kernel2 = ee.Kernel.fixed({
182 | width: 5,
183 | height: 5,
184 | weights: weights
185 | })
186 |
187 | //CREATE 4-pixel radius kernel
188 | var list1 = [1,0,0,0,1,0,0,0,1];
189 | var list2 = [0,0,0,0,0,0,0,0,0]
190 | var weights = [list1, list2, list2, list2, list1, list2, list2, list2, list1]
191 | var kernel4 = ee.Kernel.fixed({
192 | width: 9,
193 | height: 9,
194 | weights: weights
195 | })
196 |
197 | var neighborhood = first.select(['B2', 'B3', 'B4', 'B8']).reduceNeighborhood({
198 | reducer: ee.Reducer.mean().combine({
199 | reducer2: ee.Reducer.variance(),
200 | sharedInputs: true}),
201 | kernel: ee.Kernel.square(1, 'pixels')
202 | });
203 |
204 | var neighborhood2 = neighborhood.neighborhoodToBands(kernel2)
205 | var neighborhood4 = neighborhood.neighborhoodToBands(kernel4)
206 |
207 | var rings = neighborhood2.addBands(neighborhood4)
208 |
209 |
210 | // 3. SET UP FEATURES FOR SAMPLING TRAINING DATA
211 | // set landcover property to '1' for solar panels
212 | var solar = ee.FeatureCollection(table).map(function(ft){return ft.set('landcover', 1)})
213 | var good_solar = solar.filterMetadata('Status', 'equals', 'Established');
214 | var solar_buff = good_solar.map(function(ft){return ft.buffer(-50)})
215 | var solar_cent = good_solar.map(function(ft){return ft.centroid(10)})
216 |
217 | // Remap nlcd into broad categories
218 | var nlcd = NLCD.select('landcover').clip(geometry);
219 | Map.addLayer(nlcd, {}, 'nlcd');
220 | nlcd = nlcd.remap({
221 | from: [23, 24, 41, 42, 43, 90, 95, 81, 82, 21, 11],
222 | to: [2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 5],
223 | bandName: 'landcover'
224 | }).rename(['landcover']);
225 |
226 | // Create observations of features at solar panel
227 | var solar_ft = data.sampleRegions({
228 | //Using manually curated points resulted in better performance
229 | collection: solar_pts,
230 | //Alternatively, we can automatically collect sample points from within well
231 | //-curated training areas
232 | //colleciton: solar_buff.filterBounds(geometry),
233 | properties: ['landcover'],
234 | tileScale: 16,
235 | scale: 10,
236 | geometries: true
237 | });
238 |
239 |
240 | //Create a stratified sample of 1000 observations
241 | // of features in each broad landcover class
242 | var nat_ft = data.addBands(nlcd).stratifiedSample({
243 | numPoints: 1000,
244 | classBand: 'landcover',
245 | region: geometry,
246 | tileScale: 16,
247 | scale: 10,
248 | geometries: true
249 | });
250 |
251 | //Adding manual 'not-solar' points in areas where the model initially mis-identified
252 | // solar fields improves performance
253 | var non_ft = data.sampleRegions({
254 | collection: non_pts,
255 | properties: ['landcover'],
256 | tileScale: 16,
257 | scale: 10,
258 | geometries: true
259 | })
260 |
261 | //Combine all observations
262 | nat_ft = nat_ft.filter(ee.Filter.bounds(solar).not());
263 |
264 | ///Check our sample sizes for each class of observations
265 | print("natural features:", nat_ft.size());
266 | print("solar observations:", solar_ft.size());
267 | var ft = nat_ft.merge(solar_ft).merge(non_ft).randomColumn();
268 |
269 | Export.table.toAsset({
270 | collection: ft,
271 | description: 'solar_samples_5e3d8aef33f3fb0d30c068ce8d69b2be'
272 | })
273 |
274 | //Split observations into training and testing data
275 | var validation = ft.filter(ee.Filter.lt('random', 0.3));
276 | var training = ft.filter(ee.Filter.gte('random', 0.7));
277 |
278 | // 4. TRAIN CLASSIFIER AND MAKE PREDICTIONS
279 | //Train a random forest classifier using training data
280 | var classifier = ee.Classifier.randomForest(20).train(training, 'landcover', data.bandNames());
281 |
282 | //Test classifier performance on validation data and create error matrix
283 | var holdout = validation.classify(classifier)
284 | .errorMatrix('landcover', 'classification');
285 |
286 | print('variance', holdout.accuracy());
287 |
288 | //Classify image
289 | var result = data.clip(geometry).classify(classifier);
290 |
291 | var eroded = result.focal_mode(1, 'square', 'pixels')
292 |
293 | // Display the results.
294 | var data = first.addBands(variance).clip(geometry);
295 | Map.addLayer(data, {bands:['B4', 'B3', 'B2'], min:500, max:1500},'S2');
296 | Map.addLayer(eroded, {bands:'classification', palette:['#000000', '#ffffff', '#099500', '#fff700', '#005ce7'], min:1, max:5}, 'result');
297 | Map.addLayer(solar_buff, {color: '#ff00eb'}, 'solar fields');
298 |
299 | //TEST UNMIXING APPROACH
300 | var nat_dict = nat_ft.reduceColumns({
301 | reducer: ee.Reducer.median().repeat(8).group(8, 'landcover'),
302 | selectors: ['B2', 'B3', 'B4', 'B8', 'B11', 'B12', 'ndvi', 'ndsi', 'landcover']
303 | })
304 |
305 | var solar_dict = solar_ft.reduceColumns({
306 | reducer: ee.Reducer.median().repeat(8),
307 | selectors: ['B2', 'B3', 'B4', 'B8', 'B11', 'B12','ndvi', 'ndsi']
308 | })
309 |
310 |
311 | var urbanList = ee.Dictionary(ee.List(nat_dict.get('groups')).get(0)).get('median');
312 | var greenList = ee.Dictionary(ee.List(nat_dict.get('groups')).get(1)).get('median');
313 | var openList = ee.Dictionary(ee.List(nat_dict.get('groups')).get(2)).get('median');
314 | var waterList = ee.Dictionary(ee.List(nat_dict.get('groups')).get(3)).get('median');
315 | var solarList = solar_dict.get('median')
316 | //print(solarList);
317 |
318 | var miximg = first.select(['B2', 'B3', 'B4', 'B8', 'B11', 'B12', 'ndvi', 'ndsi'])
319 | .unmix(
320 | [urbanList, greenList, openList, waterList, solarList],
321 | true)
322 |
323 | //Map.addLayer(miximg, {}, 'unmix')
324 |
--------------------------------------------------------------------------------
/Classification/classified.js:
--------------------------------------------------------------------------------
1 | // Import the required modules
2 | var Phenology = require('users/defendersofwildlifeGIS/Modules:Phenology')
3 | var Clouds = require('users/defendersofwildlifeGIS/Modules:Clouds')
4 | var Terrain = require('users/defendersofwildlifeGIS/Modules:Terrain')
5 | var Calibrate = require('users/defendersofwildlifeGIS/Modules:Calibration')
6 | var MAD = require('users/defendersofwildlifeGIS/Modules:MAD')
7 | var MAD = require('users/defendersofwildlifeGIS/Modules:MAD')
8 |
9 | //Define adjusted MAD functions for calibration
10 | function canon(cor, before, after, length){
11 | var labels = ee.List.sequence(1, length).map(function(item){
12 | return ee.String("V").cat(ee.Number(item).toInt().format());
13 | });
14 | var decomp = cor.matrixSingularValueDecomposition();
15 | //create n * min(n, m) matrix of canonical covariates a
16 | var U = ee.Array(decomp.get("U"));
17 | //create m * min(n, m) matrix of canonical covariates b
18 | var V = ee.Array(decomp.get("V"));
19 | //get diagonal elements of SVD equivalent to CCA correlations
20 | var S = ee.Array(decomp.get("S")).matrixDiagonal();
21 | //turn images into array images with 1 x nbands matrices at each pixel
22 | var before2D = before.toArray().toArray(1).arrayTranspose();
23 | var after2D = after.toArray().toArray(1).arrayTranspose();
24 | var a = before2D.matrixMultiply(ee.Image(U)).arrayProject([1]).arrayFlatten([labels]);
25 | var b = after2D.matrixMultiply(ee.Image(V)).arrayProject([1]).arrayFlatten([labels]);
26 | return ee.Dictionary({'a':a, 'b':b});
27 | }
28 | function mad(before, after, aoi){
29 | var length = before.bandNames().length().min(after.bandNames().length());
30 | var corMat = MAD.corrmat(before, after, aoi);
31 | var cca = canon(corMat, before, after, length);
32 | // var cvs = ee.Image(cca.get('img'));
33 | // var ccachi = chisq(cvs, aoi, 100);
34 | // var rhos = cca.get('rhos');
35 | return cca;
36 | //return ee.Dictionary({'img': cvs.addBands(ccachi), 'rhos':rhos});
37 | }
38 |
39 | // Define visualization palettes for the different classes
40 | var wwfpalette = ['f2f2f2', 'f2f2f2', '003dea', 'e7ce1d', 'e7ce1d', 'ab6c1d', '00adff', 'e7ce1d', 'e7ce1d', 'e7ce1d', 'e7ce1d', 'b6db00', 'b6db00', '3ebb00', '008700']
41 | var palette = ['f2f2f2','00A600','003dea','B6DB00','E6E600','E7CE1D','1DB000','3EBB00','63C600','8BD000','dedede','edb694','efc2b3','f1d6d3','f2d12f']
42 | var valpalette = ['dac041', '9f591d','c6c6c6', 'eaffce', 'ff8d00', 'caff29', 'd7ffc6', '0008ff', '77d448', '559733', '8500ff', '02bdc6']
43 |
44 | // Second round of ground truth data had columns 'Name', 'Lat', "long'
45 |
46 | var legend= ee.Dictionary({
47 | 'Agriculture': 'dac041',
48 | 'Degraded / fallow land': '9f591d',
49 | 'Dry river bed': 'c6c6c6',
50 | 'Eastern_Wet_alluvial_grasslands': 'eaffce',
51 | 'Grassland with invasives': 'ff8d00',
52 | 'Grassland with woodland succession':'caff29',
53 | 'Lower_alluvial_savannah_woodlands':'d7ffc6',
54 | 'River / water':'0008ff',
55 | 'Sub tropical mixed moist deciduous forests':'77d448',
56 | 'Sub tropical semi evergreen forests':'559733',
57 | 'Swamp_areas':'8500ff',
58 | 'Wetland':'02bdc6'
59 | })
60 |
61 | var names = ['class 0', 'class 1', 'water', 'class 3', 'class 4', 'class 5', 'class 6', 'class 7', 'class 8', 'class 9', 'class 10', 'class 11', 'class 12', 'class 13', 'class 14']
62 | var vizParams = {'palette': wwfpalette, 'min':0, 'max':14}
63 |
64 | var data = table.merge(table2).merge(table3).filter(ee.Filter.inList('class', ['Settlement', 'Savanna with invasives', 'River / water', 'Dry river bed']).not()).merge(riverbed18).merge(water18).merge(ag)
65 | var classes = data.aggregate_array('class').distinct()
66 | print(classes)
67 |
68 |
69 | var training = classes.iterate(function(curr, prev){
70 | var i = classes.indexOf(curr)
71 | var subset = data.filterMetadata('class', 'equals', curr).map(function(ft){return ft.set('class_num', i)})
72 | return(ee.FeatureCollection(prev).merge(subset))
73 | }, ee.FeatureCollection([]))
74 |
75 | // Function to rescale pixels of image
76 | function rescale(img){
77 | var bandNames = img.bandNames()
78 | var min = img.reduce(ee.Reducer.min())
79 | var max = img.reduce(ee.Reducer.max())
80 | var rescaled = img.subtract(min).divide(max.subtract(min)).rename(bandNames)
81 | return rescaled
82 | }
83 |
84 | // Function adding spectral indices to an image
85 | function addVariables(img){
86 | var bandNames = img.bandNames()
87 | var nir = img.select('B8')
88 | var red = img.select('B4')
89 | var blue = img.select('B2')
90 | img = img.addBands(red.add(blue).rename('B4B2'))
91 | var rgb = img.select(['B4', 'B3', 'B2'])
92 | var evi = nir.subtract(red).divide(nir.add(red.multiply(6)).subtract(blue.multiply(7.5)).add(1)).multiply(2.5).rename('evi')
93 | var light = rgb.reduce(ee.Reducer.max()).add(rgb.reduce(ee.Reducer.min())).divide(2).rename('light')
94 | var ndvi = img.normalizedDifference(['B8', 'B4']).rename('ndvi')
95 | var ndwi = img.normalizedDifference(['B3', 'B8']).rename('ndwi')
96 | //var evi2 = nir.subtract(red).divide(nir.add(red.multiply(2.4)).add(1)).multiply(2.5)
97 | var bi = img.normalizedDifference(['B4B2', 'B3']).rename('bi')
98 | var ndwi2 = img.normalizedDifference(['B8', 'B11']).rename('ndwi2')
99 | return img.addBands(ndvi).addBands(ndwi).addBands(bi).addBands(light).addBands(evi).set('system:time_start', img.get('system:time_start'))
100 | }
101 |
102 | var end = '2020-11-30'
103 |
104 | var start = '2015-11-30'
105 |
106 | var s2Bands = ee.List(['B2', 'B3', 'B4', 'B8', 'B11', 'B12'])
107 |
108 | var collection = S2.filterBounds(boundary).filterDate(start, end)
109 |
110 | var indices = ee.List(['ndvi', 'ndwi', 'bi', 'evi'])
111 |
112 | var varsCollection = collection.map(Clouds.basicQA).select(s2Bands).map(addVariables);
113 |
114 | var winter18 = varsCollection.filterDate('2016-12-01', '2017-02-28').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
115 | var summer18 = varsCollection.filterDate('2016-03-01', '2016-05-31').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
116 | var monsoon18 = varsCollection.filterDate('2016-06-01', '2016-09-30').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
117 | var postmonsoon18 = varsCollection.filterDate('2016-10-01', '2016-11-30').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
118 |
119 | var seasons18 = ee.Image.cat([
120 | winter18.select(s2Bands),
121 | summer18.select(s2Bands),
122 | monsoon18.select(s2Bands),
123 | postmonsoon18.select(s2Bands)
124 | ])
125 |
126 | var winter19 = varsCollection.filterDate('2019-12-01', '2020-02-28').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
127 | var summer19 = varsCollection.filterDate('2020-03-01', '2020-05-31').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
128 | var monsoon19 = varsCollection.filterDate('2020-06-01', '2020-09-30').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
129 | var postmonsoon19 = varsCollection.filterDate('2020-10-01', '2020-11-30').select(s2Bands.add('ndvi')).qualityMosaic('ndvi')//.median()
130 |
131 | var seasons19 = ee.Image.cat([
132 | winter19.select(s2Bands),
133 | summer19.select(s2Bands),
134 | monsoon19.select(s2Bands),
135 | postmonsoon19.select(s2Bands)
136 | ])
137 |
138 | //Try mad calibration
139 | /*
140 | var madcal = ee.Dictionary(mad(seasons18, seasons19, geometry))
141 | var img19 = ee.Image(madcal.get('b'))
142 | var img18 = ee.Image(madcal.get('a'))
143 | */
144 |
145 | // get band names of composite for classification
146 | var input_features = seasons19.bandNames()
147 |
148 |
149 | //Try regression calibrations
150 |
151 | //var calibrated = Calibrate.calibrate_regress(seasons19, seasons18, input_features, geometry, 100)
152 |
153 | // Now rescale to 0-1 per pixel
154 |
155 | seasons18 = ee.Image.cat([
156 | rescale(seasons18.select(input_features.slice(0,6))),
157 | rescale(seasons18.select(input_features.slice(6,12))),
158 | rescale(seasons18.select(input_features.slice(12,18))),
159 | rescale(seasons18.select(input_features.slice(18,24)))
160 | ])
161 |
162 | seasons19 = ee.Image.cat([
163 | rescale(seasons19.select(input_features.slice(0,6))),
164 | rescale(seasons19.select(input_features.slice(6,12))),
165 | rescale(seasons19.select(input_features.slice(12,18))),
166 | rescale(seasons19.select(input_features.slice(18,24)))
167 | ])
168 |
169 | Map.addLayer(postmonsoon19, {bands:['B4', 'B3', 'B2'], min: 0, max: 1250}, 'S219')
170 | Map.addLayer(postmonsoon18, {bands:['B4', 'B3', 'B2'], min: 0, max: 1250}, 'S218')
171 |
172 | // FEATURE ENGINEERING
173 |
174 | // Use image segmentation to generate clusters that we can use to increase training data
175 |
176 |
177 | var snic20 = ee.Algorithms.Image.Segmentation.SNIC({
178 | image: seasons19,
179 | size: 50
180 | })
181 |
182 | // Add SNIC output cluster ids to imagery data
183 | var img_data = seasons19.addBands(snic20.select('clusters')).clip(boundary)
184 |
185 | //generate 5 points within each output cluster
186 |
187 | var random = img_data.stratifiedSample({
188 | numPoints: 3,
189 | classBand: 'clusters',
190 | region: boundary,
191 | seed: 9,
192 | scale: 10,
193 | tileScale: 6
194 | })
195 |
196 |
197 | //Map.addLayer(snic20)
198 |
199 | // Add the cluster membership to training features
200 | //var features = snic20.sampleRegions({
201 | // collection: training,
202 | // properties: ['class_num'],
203 | // scale: 10
204 | //})
205 |
206 | //print(training)
207 | //Map.addLayer(snic19, {bands:['B4_mean', 'B3_mean', 'B2_mean']}, 'snic19')
208 |
209 | /*
210 | var clusters = snic20.select('clusters').reduceToVectors({
211 | scale:10
212 | }).filterBounds(ee.FeatureCollection(training).geometry())
213 |
214 | //define a spatial join to match clusters to ground-truth
215 |
216 | var join = ee.Join.saveFirst({
217 | matchKey: 'training'
218 | })
219 |
220 | // Define a spatial filter as geometries that intersect.
221 | var spatialFilter = ee.Filter.intersects({
222 | leftField: '.geo',
223 | rightField: '.geo',
224 | maxError: 10
225 | });
226 |
227 | var joined = join.apply(clusters, training, spatialFilter)
228 |
229 | //print(joined)
230 | */
231 |
232 | /*
233 | In addition to the provided bands, we can experiment with different derived spectral features to be
234 | used as predictors in classification analyses. These include:
235 |
236 | 1. Principal components
237 | 3. Harmonic coefficients
238 | */
239 |
240 | // 1. PCA Transformation
241 | /*
242 | function get_eigens(img, bnds, aoi){
243 | var arrayImage = img.select(bnds).toArray()
244 |
245 | var covar = arrayImage.reduceRegion({
246 | reducer: ee.Reducer.covariance(),
247 | geometry: aoi,
248 | scale: 10,
249 | maxPixels: 1e13
250 | });
251 |
252 | var covarArray = ee.Array(covar.get('array'));
253 |
254 | var eigens = covarArray.eigen();
255 |
256 | //Since the eigenvalues are appended to the eigenvectors, slice the two apart and discard the eigenvectors:
257 | var eigenVectors = eigens.slice(1, 1);
258 |
259 | //Perform the matrix multiplication
260 | var pcs = ee.Image(eigenVectors)
261 | .matrixMultiply(arrayImage.clip(aoi).toArray(1));
262 |
263 | //Finally, convert back to a multi-band image and display the first PC
264 | var pcNames = ee.List.sequence(1, bnds.size()).map(function(int){
265 | return ee.String('pc').cat(ee.Number(int).format('%d'))
266 | })
267 |
268 | var pcImage = pcs
269 | // Throw out an an unneeded dimension, [[]] -> [].
270 | .arrayProject([0])
271 | // Make the one band array image a multi-band image, [] -> image.
272 | .arrayFlatten([pcNames]);
273 |
274 | return pcImage
275 | }
276 |
277 | var pcImage19 = get_eigens(seasons19, s2Bands, boundary)
278 | var pcImage18 = get_eigens(seasons18, s2Bands, boundary)
279 | //Map.addLayer(pcImage19, {'bands':['pc1', 'pc2', 'pc3']}, 'PC19');
280 | //Map.addLayer(pcImage18, {'bands':['pc1', 'pc2', 'pc3']}, 'PC18');
281 | */
282 |
283 | // 2. Harmonic Regression
284 | /*
285 |
286 | //Output of linear regression is a 2-band image
287 | var model = varsCollection.select(['offset', 'time', 'cos', 'sin', 'ndvi', 'ndwi', 'bi', 'light'])
288 | .reduce(ee.Reducer.linearRegression(4, 4));
289 |
290 | //These coefficeints are 2d Nx by Ny
291 | var coeff = model.select('coefficients').toArray()
292 |
293 | //resid is 1D array of length Ny
294 | var resid = model.select('residuals').arrayGet([0]);
295 |
296 | var ximage = coeff.arraySlice(0, 1, 2).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['x_ndvi', 'x_ndwi', 'x_bi', 'x_light']])
297 | var cosimage = coeff.arraySlice(0, 2, 3).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['sin_ndvi', 'sin_ndwi', 'sin_bi', 'sin_light']])
298 | var sinimage = coeff.arraySlice(0, 3, 4).arrayTranspose(0,1).arrayProject([0]).arrayFlatten([['cos_ndvi', 'cos_ndwi', 'cos_bi', 'cos_light']])
299 | var img = ximage.addBands(cosimage).addBands(sinimage).addBands(seasons19).clip(boundary)
300 | */
301 |
302 | // SUPERVISED CLASSIFICATION
303 | // We can run supervised classification on different sets of features.
304 |
305 | // 1. Using PCA values
306 |
307 |
308 | // Extract PCA values at ground truth points
309 | var features = seasons19.sampleRegions({
310 | collection: training,
311 | properties: ['class_num'],
312 | scale: 10
313 | })
314 |
315 | print(features.size())
316 |
317 | var features18 = seasons18.sampleRegions({
318 | collection: training,
319 | properties: ['class_num'],
320 | scale:10
321 | })
322 |
323 | Export.table.toCloudStorage({
324 | collection: features,
325 | description: 'manas_training_tfrecords',
326 | bucket:'cvod-203614-mlengine',
327 | fileNamePrefix: ''
328 |
329 | })
330 |
331 | // 2. Using scaled reflectance values from all seasons
332 |
333 | // Extract pixel values at ground truth points
334 |
335 | /*
336 | var features = img_data.sampleRegions({
337 | collection: training,
338 | properties: ['class_num'],
339 | scale: 10
340 | })
341 | */
342 |
343 | // Filter new points & assign class id to new features based on cluster id
344 | // Only need to do once then export
345 | /*
346 | var clusters = features.aggregate_array('clusters').distinct()
347 |
348 | var new_features = random.filter(ee.Filter.inList('clusters', clusters))
349 |
350 | var keys = features.distinct(['clusters', 'class_num'])
351 | new_features = random.remap(keys.aggregate_array('clusters'), keys.aggregate_array('class_num'), 'clusters')
352 |
353 | Export.table.toAsset({
354 | collection: new_features,
355 | description: 'snic-generated-training-data',
356 | assetId: 'snic_training_data'
357 | })
358 |
359 | var training_features = features.merge(new_features)
360 | */
361 |
362 | // 3. Using scaled reflectance values from postmonsoon
363 | /*
364 | // Extract pixel values at ground truth points
365 | var features = seasons19.sampleRegions({
366 | collection: training,
367 | properties: ['class_num'],
368 | scale: 10
369 | })
370 | */
371 | var classifier = ee.Classifier.smileRandomForest(20)
372 | var trained = classifier.train({
373 | features: features,
374 | classProperty: 'class_num',
375 | // use all feature properties as predictors except cluster id
376 | inputProperties: seasons19.bandNames()//input_features
377 | })
378 |
379 | var trained18 = classifier.train({
380 | features: features18,
381 | classProperty: 'class_num',
382 | inputProperties: seasons18.bandNames()
383 | })
384 |
385 | var confusion = trained.confusionMatrix()
386 | print(confusion)
387 | /*
388 | var medians = features.reduceColumns({
389 | reducer: ee.Reducer.median().repeat(seasons19.bandNames().size()).group(seasons19.bandNames().size(), 'class_num'),
390 | selectors: seasons19.bandNames().add('class_num')
391 | })
392 |
393 | var keys = ee.List(medians.get('groups')).map(function(dict){return ee.Dictionary(dict).get('class_num')})
394 | */
395 |
396 | //var classified = ee.Image('users/defendersofwildlifeGIS/Manas/Manas_supervised_21Jan21')
397 | var classified = seasons19.clip(boundary).classify(trained)
398 | var classified18 = seasons18.clip(boundary).classify(trained18)
399 |
400 | // Classify PCA images
401 | //var classified = pcImage19.clip(boundary).classify(trained)
402 | //var classified18 = pcImage18.clip(boundary).classify(trained)
403 | var suppal = ['02bdc6', 'dac041', '8500ff', '9f591d', 'ff8d00', 'eaffce', 'd7ffc6', 'caff29', '559733', '77d448', 'c6c6c6', '0008ff',]
404 |
405 | Map.addLayer(classified, {'palette': suppal, 'min':0, 'max':11}, 'supervised')
406 | Map.addLayer(classified18, {'palette': suppal, 'min':0, 'max':11}, 'supervised18')
407 |
408 | // Export classified image
409 |
410 | Export.image.toAsset({
411 | image: classified,
412 | description: 'Manas_supervised20_14Feb21',
413 | scale:10,
414 | region: boundary,
415 | maxPixels: 1e13
416 | })
417 |
418 | Export.image.toAsset({
419 | image: classified18,
420 | description: 'Manas_supervised16_14Feb21',
421 | scale: 10,
422 | region: boundary,
423 | maxPixels: 1e13
424 | })
425 |
426 |
427 | // SPECTRAL MIXING APPROACH
428 | /*
429 | // Calculate median values of each class
430 | // Extract pixel values at ground truth points
431 |
432 | var features = winter19.sampleRegions({
433 | collection: training,
434 | properties: ['class'],
435 | scale: 10
436 | })
437 |
438 | //print(features)
439 |
440 | // Returns a list of dictionaries
441 | var medians = features.reduceColumns({
442 | reducer: ee.Reducer.median().repeat(winter19.bandNames().size()).group(winter19.bandNames().size(), 'class'),
443 | selectors: winter19.bandNames().add('class')
444 | })
445 |
446 |
447 | var lists = ee.List(medians.get('groups')).map(function(dict){return ee.Dictionary(dict).get('median')})
448 | var keys = ee.List(medians.get('groups')).map(function(dict){return ee.Dictionary(dict).get('class')})
449 | //print(keys)
450 |
451 | var img19 = winter19.clip(geometry)
452 | var img18 = winter18.clip(geometry)
453 | var unmixed19 = img19.unmix(lists, true).rename(keys)
454 | var unmixed18 = img18.unmix(lists, true).rename(keys)
455 | //print(unmixed.bandNames())
456 | // TO ID A CERTAIN HABITAT TYPE
457 | var arrayImg19 = unmixed19.toArray()
458 | var classes19 = arrayImg19.arrayArgmax().arrayFlatten([['class']])
459 | var arrayImg18 = unmixed18.toArray()
460 | var classes18 = arrayImg18.arrayArgmax().arrayFlatten([['class']])
461 | Map.addLayer(classes19, {'palette': valpalette, 'min':0, 'max':11}, 'unmixing 19')
462 | Map.addLayer(classes18, {'palette': valpalette, 'min':0, 'max':11}, 'unmixing 18')
463 | */
464 |
465 | // Make Legend
466 | function makeRow(color, name) {
467 | var colorBox = ui.Label({
468 | style: {
469 | backgroundColor: '#' + color,
470 | padding: '8px',
471 | margin: '0 0 4px 0'
472 | }
473 | });
474 | var description = ui.Label({
475 | value: name,
476 | style: {margin: '0 0 4px 6px'}
477 | });
478 | return ui.Panel({
479 | widgets: [colorBox, description],
480 | layout: ui.Panel.Layout.Flow('horizontal')
481 | });
482 | }
483 |
484 | var legendTitle = ui.Label({
485 | value: 'Legend',
486 | style: {'fontWeight':'bold'}
487 | })
488 | var legend = ui.Panel({
489 | widgets: [legendTitle],
490 | style: {'position': 'bottom-left'}
491 | })
492 |
493 | classes.evaluate(function(list){
494 |
495 | for (var i = 0; i < 12; i++) {
496 | var lc = list[i]
497 | legend.add(makeRow(suppal[i], lc))
498 | }
499 |
500 | })
501 |
502 | var subset = data.filterMetadata('class', 'equals', 'Dry river bed')
503 | print(subset.size())
504 | Map.addLayer(boundary)
505 | Map.addLayer(subset, {color:'blue'}, 'fallow pts')
506 | Map.add(legend)
507 |
508 | //Export individual classes as polygon KML
509 | /*
510 | var binary = classified.eq(9)
511 | .focal_mode(1, 'square', 'pixels')
512 | //.focal_max(1, 'square', 'pixels')
513 |
514 | var polys = binary.updateMask(binary)
515 | .reduceToVectors({
516 | scale:10,
517 | geometry: boundary,
518 | eightConnected: true,
519 | maxPixels: 1e13,
520 | tileScale: 8
521 | })
522 |
523 | var date = '20Jan21'
524 |
525 | Export.table.toDrive({
526 | collection: polys,
527 | description: 'deciduous_forest' + date,
528 | fileFormat: 'KML'
529 | })
530 | */
531 | // Export training data as TFRecords
532 |
533 | Export.table.toCloudStorage({
534 | collection: features,
535 | bucket: 'cvod-203614-mlengine',
536 | fileNamePrefix: 'landcover/data/seasons_raw',
537 | fileFormat: 'TFRecord',
538 | selectors: ['B2', 'B2_1', 'B2_2', 'B2_3']
539 | })
540 |
541 | /*
542 | // Export a test image as TFRecords
543 | Export.image.toCloudStorage({
544 | image: seasons19,
545 | description: 'manas_test2',
546 | bucket: 'cvod-203614-mlengine',
547 | fileNamePrefix: 'landcover/data/predict/test2/test2',
548 | scale: 10,
549 | fileFormat: 'TFRecord',
550 | region: geometry,
551 | formatOptions: {
552 | 'patchDimensions': [256, 256],
553 | maxFileSize: 104857600,
554 | compressed: true,
555 | },
556 | });
557 | */
558 |
--------------------------------------------------------------------------------