├── Data ├── Baseline └── delete.txt ├── data ├── HandLabeled │ └── file2.txt └── WeaklyLabeled │ └── file3.txt ├── imgs ├── s1.png ├── s2.png ├── UNET.png ├── Folder1.png ├── Folder2.png ├── Folder3.png ├── S2_half.png ├── label.png ├── Label_half.png ├── S1Hand_NAN.png ├── HandLabeledTable.png └── WeaklyLabeledTable.png ├── pre-processing ├── snappyfunctions_pre.py ├── Pre-Process-Data.ipynb └── Pre-Processing-Sentinel 1.ipynb ├── README.md ├── LICENSE ├── Random_Forest └── Random_Forest_Floods.ipynb └── U-NET ├── UNET_S1S2_MultiModal.ipynb └── U_NET_S1Hand.ipynb /Data: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Baseline/delete.txt: -------------------------------------------------------------------------------- 1 | tipota 2 | -------------------------------------------------------------------------------- /data/HandLabeled/file2.txt: -------------------------------------------------------------------------------- 1 | delete 2 | -------------------------------------------------------------------------------- /data/WeaklyLabeled/file3.txt: -------------------------------------------------------------------------------- 1 | delete 2 | -------------------------------------------------------------------------------- /imgs/s1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/s1.png -------------------------------------------------------------------------------- /imgs/s2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/s2.png -------------------------------------------------------------------------------- /imgs/UNET.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/UNET.png -------------------------------------------------------------------------------- /imgs/Folder1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/Folder1.png -------------------------------------------------------------------------------- /imgs/Folder2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/Folder2.png -------------------------------------------------------------------------------- /imgs/Folder3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/Folder3.png -------------------------------------------------------------------------------- /imgs/S2_half.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/S2_half.png -------------------------------------------------------------------------------- /imgs/label.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/label.png -------------------------------------------------------------------------------- /imgs/Label_half.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/Label_half.png -------------------------------------------------------------------------------- /imgs/S1Hand_NAN.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/S1Hand_NAN.png -------------------------------------------------------------------------------- /imgs/HandLabeledTable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/HandLabeledTable.png -------------------------------------------------------------------------------- /imgs/WeaklyLabeledTable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KonstantinosF/Flood-Mapping-Using-Satellite-Images/HEAD/imgs/WeaklyLabeledTable.png -------------------------------------------------------------------------------- /pre-processing/snappyfunctions_pre.py: -------------------------------------------------------------------------------- 1 | from osgeo import ogr 2 | import imageio 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | # import rasterio 6 | 7 | import snappy 8 | from os.path import join 9 | from glob import glob 10 | import numpy as np 11 | import os 12 | import glob 13 | import jpy 14 | System = jpy.get_type('java.lang.System') 15 | System.gc() 16 | import gc 17 | 18 | import re 19 | from geomet import wkt 20 | from snappy import GPF 21 | from snappy import ProductIO 22 | from snappy import HashMap 23 | from snappy import jpy 24 | HashMap = snappy.jpy.get_type('java.util.HashMap') 25 | import time 26 | 27 | from osgeo import gdal, ogr 28 | import sys 29 | from osgeo import osr 30 | 31 | import configparser 32 | from os.path import expanduser 33 | os.chdir(r"D:\MICROS\Ship_detection") 34 | 35 | 36 | 37 | def subset(image): 38 | 39 | region = 'POLYGON ((33.0130819325524 34.54982297667559, 33.285059184082606 34.58857638473086, 33.25449505627861 34.734151031006526, 32.98203732050098 34.6954232998211,33.0130819325524 34.54982297667559))' 40 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 41 | HashMap = jpy.get_type('java.util.HashMap') 42 | 43 | parameters = snappy.HashMap() 44 | parameters.put('copyMetadata', True) 45 | parameters.put('geoRegion', region) 46 | 47 | subset = snappy.GPF.createProduct('Subset', parameters, image) 48 | parameters = None 49 | print('Subset implemented succesfully...') 50 | 51 | 52 | return subset 53 | 54 | 55 | 56 | def importvector(image): 57 | 58 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 59 | HashMap = jpy.get_type('java.util.HashMap') 60 | 61 | #shape = fiona.open('C:\\Users\\Kostas-Geosystems\\Desktop\\Ship_detection\\AOI\\Cyprus_Coastline_Final_V2\\Cyprus_Coastline_Final_V2.shp') 62 | shape_path = r'D:\\MICROS\\Ship_detection\\Cyprus_Coastline_Final_V2.shp' 63 | shapef = "D:/MICROS/Ship_detection/AOI/Cyprus_Coastline_Final_V2/Cyprus_Coastline_Final_V2.shp" 64 | 65 | parameters = HashMap() 66 | parameters.put('vectorFile', shapef) 67 | parameters.put('separateShapes', True) 68 | 69 | addvector = snappy.GPF.createProduct('Import-Vector', parameters, image) 70 | parameters = None 71 | print('The land mask added succesfully...') 72 | 73 | return addvector 74 | 75 | 76 | def landmask(image): 77 | 78 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 79 | HashMap = jpy.get_type('java.util.HashMap') 80 | 81 | parameters = snappy.HashMap() 82 | parameters.put('sourceBands', 'Intensity_VH') 83 | parameters.put('landMask', False) 84 | parameters.put('useSRTM', False) 85 | parameters.put('geometry', 'Cyprus_Coastline_Final_V2_1') 86 | parameters.put('invertGeometry', True) 87 | parameters.put('shorelineExtension', 35) 88 | #parameters.put('byPass', False) 89 | 90 | maskland = snappy.GPF.createProduct('Land-Sea-Mask', parameters, image) 91 | parameters = None 92 | print('The land mask added succesfully...') 93 | 94 | return maskland 95 | 96 | def calibration(image): 97 | 98 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 99 | HashMap = jpy.get_type('java.util.HashMap') 100 | parameters = snappy.HashMap() 101 | parameters.put('sourceBands', 'Intensity_VH') 102 | parameters.put('outputImageScaleInDb', True) 103 | 104 | calibrated = snappy.GPF.createProduct('Calibration', parameters, image) 105 | parameters = None 106 | print('Calibration implemented succesfully...') 107 | 108 | return calibrated 109 | 110 | def adaptivethresholding(image): 111 | 112 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 113 | HashMap = jpy.get_type('java.util.HashMap') 114 | 115 | 116 | parameters = snappy.HashMap() 117 | parameters.put('targetWindowSizeInMeter', 250) 118 | parameters.put('guardWindowSizeInMeter', 600.0) 119 | parameters.put('backgroundWindowSizeInMeter', 800.0) 120 | parameters.put('pfa', 16.0) 121 | parameters.put('estimateBackground', False) 122 | 123 | threshold = snappy.GPF.createProduct('AdaptiveThresholding', parameters, image) 124 | parameters = None 125 | print('Adaptive Thresholding implemented succesfully...') 126 | 127 | return threshold 128 | 129 | def objectdiscrimination(image): 130 | 131 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 132 | HashMap = jpy.get_type('java.util.HashMap') 133 | 134 | parameters = snappy.HashMap() 135 | parameters.put('minTargetSizeInMeter', 30.0) 136 | parameters.put('maxTargetSizeInMeter', 800.0) 137 | 138 | objectdetection = snappy.GPF.createProduct('Object-Discrimination', parameters, image) 139 | parameters = None 140 | print('Object Discimination implemented succesfully...') 141 | 142 | return objectdetection 143 | 144 | def applyorbitfile(image): 145 | 146 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 147 | HashMap = jpy.get_type('java.util.HashMap') 148 | 149 | parameters = snappy.HashMap() 150 | parameters.put('orbitType', 'Sentinel Precise (Auto Download)') 151 | parameters.put('continueOnFail', True) 152 | 153 | orbit_correction = snappy.GPF.createProduct('Apply-Orbit-File', parameters, image) 154 | parameters = None 155 | print('Apply Orbit File implemented succesfully...') 156 | 157 | return orbit_correction 158 | 159 | 160 | def terraincorrection(image): 161 | 162 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis() 163 | HashMap = jpy.get_type('java.util.HashMap') 164 | 165 | proj = '''GEOGCS["WGS 84", 166 | DATUM["WGS_1984", 167 | SPHEROID["WGS 84",6378137,298.257223563, 168 | AUTHORITY["EPSG","7030"]], 169 | AUTHORITY["EPSG","6326"]], 170 | PRIMEM["Greenwich",0, 171 | AUTHORITY["EPSG","8901"]], 172 | UNIT["degree",0.0174532925199433, 173 | AUTHORITY["EPSG","9122"]], 174 | AUTHORITY["EPSG","4326"]]''' 175 | 176 | parameters = snappy.HashMap() 177 | parameters.put('demName', 'SRTM 1Sec HGT') 178 | parameters.put('sourceBands', 'Sigma0_VH') 179 | parameters.put('imageResamplingMethod', 'BILINEAR_INTERPOLATION') 180 | parameters.put('pixelSpacingInMeter', 10.0) 181 | parameters.put('mapProjection', proj) 182 | parameters.put('noDataValueAtSea', False) 183 | parameters.put('saveSelectedSourceBand', True) 184 | parameters.put('nodataValueAtSea', False) 185 | 186 | terrain_correction = snappy.GPF.createProduct('Terrain-Correction', parameters, image) 187 | parameters = None 188 | print('Terrain Correction implemented succesfully...') 189 | 190 | return terrain_correction 191 | 192 | 193 | 194 | 195 | 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flood-Mapping-Using-Satellite-Images 2 | MSc Thesis - Data Science - UoP & NCSR "Demokritos" 3 | 4 |

Download The Dataset

5 | 6 | - First Option:
7 | Visit the following link:
8 | https://mlhub.earth/data/c2smsfloods_v1 (you need to create an account first) 9 | 10 | - Second Option:
11 | The dataset is available for access through Google Cloud Storage bucket at: gs://senfloods11/ 12 | 13 | You can access the dataset bucket using the gsutil command line tool. If you would like to download the entire dataset (~14 GB) you can use gsutil rsync to clone the bucket to a local directory. The -m flag is recommended to speed downloads. The -r flag will download sub-directories and folder recursively. See the example below. 14 | 15 | $ gsutil -m rsync -r gs://sen1floods11 /YOUR/LOCAL/DIRECTORY/HERE 16 | 17 |

Dataset Information

18 | 19 | The dataset used is named as Sen1Floods11 and it is comprised with Sentinel 1 & 2 images with the corresponding ground truth masks. The dataset contains two main folders (flood_events & perm_water) as shown below: 20 | 21 |

22 | 23 |

24 | 25 | In this study we are only using the images included on the Flood Events folder and eliminate the permanent water images. The flood_events folder is further splitted into 2 subfolders as shown in the image below: 26 | 27 |

28 | 29 |

30 | 31 | 32 | The HandLabeled subfolder is splitted into 5 subfolders from which we are not using the "JRCWaterHand". 33 | 34 |

35 | 36 |

37 | 38 | While the WeaklyLabeled folder is splitted into 3 from which we are not using the "S2IndexLabelWeak". 39 | 40 | ![image](https://user-images.githubusercontent.com/23013328/158182582-fd4a76e3-9842-4221-a285-e02dcad35e28.png) 41 | 42 | 43 |

Flood Events

44 | 45 |

Hand Labeled

46 |

This subfolder contains one folder S1Hand which consists of sentinel 1 image patches with two polarization bands (VH & VV) and another one called S2Hand which includes Sentinel 2 image patches with all 13 spectral bands. It should be noticed that not all bands share the same spatial resolution, thus if needed an extra processing (pansharpening) should be applied. The size of the patches is 512x512 within the coordinate system EPSG:4326 - WGS 84 - Geographic. The rest folders are the coresponding ground trouth mask, each one being created with a different method. The areas of study are parts of 12 countries as shown below:

47 | 48 |

49 | 50 |

51 | 52 | 53 |

Weakly Labeled

54 | 55 |

56 | 57 |

58 | As can be seen the size of the Weakly labeled dataset is much larger than the hand labeled one. It should be noted that weakly labeled patches are not overlapping with the hand labeled patches, but they are geographically close. 59 | 60 |

Clean the Dataset - Pre Process

61 |

Hand Labeled

62 | 63 |

After visually checking the dataset with manually loading image patches on a free 64 | and open Geographic Information System software called QGIS, we noticed that 65 | many images contain corroded pixels with no information or the number with flooded 66 | pixels is significant lower than the background pixels. Additionally it was noticed 67 | that a large number of sentinel 2 images are heavily or totally covered with clouds. 68 | Bellow is an illustration of a sentinel 2 image tile blocked with clouds, 69 | the corresponding sentinel 1 tile and the respective ground truth. 70 | The initial image tiles of 512x512 size were splited into patches of 128x128, so 71 | from each itinial image 16 patches were created. The splitting process in a google 72 | colab environment took 8 to 10 hours to complete.

73 | 74 |

Another critical issue was the imbalance between the number of flooded pixels and the background pixels. In order to overcome all these challenges and create a coherent multimodal dataset, we eliminated patches completely covered with clouds, with no flooded pixels or corroded pixels but also the patches with unbalanced number of flooded pixels and background pixels. The remaining number of patches per geographic area is illustrated in Table 4.1. with a total number of images of 577.

75 | 76 |

77 | 78 | 79 | 80 |

81 | 82 | 83 |

84 | 85 | 86 |

87 | 88 | 89 |

Weakly Labeled

90 |

The initial total number of images were 4384. Each image was splitted into 16 91 | patches of a size 128x128 pixels, resulting in 70144 patches in total. From these we 92 | remove the patches having at least one cropped pixel labeled as (-1), patches were 93 | the number of flooded pixels were more than 50% than the background pixels and 94 | patches with with background pixels more than 50% of the flooded pixels, resulting 95 | in a dataset comprised of 6835 patches. Since the number of patches were still very 96 | high and not easy to handle, only the first 50 patches from each geographic area we 97 | kept, resulting in 600 patches in total.

98 | 99 | The link for the new dataset: https://uopel-my.sharepoint.com/:f:/g/personal/dit2025dsc_office365_uop_gr/EjZZUSHVyv1LozsRfnTt7uEBKoDEbOsyDsCOzMPi0X02lQ?e=gBMJub 100 | 101 | 102 |

Experiments

103 |

Experiments were splited into three parts, with each one based on a different 104 | semantic segmentation scheme. The first one is based on a Fully Convolutional 105 | Neural Network called U-NET, the second approach is based on a Random Forest 106 | and a set of hand crafted features while the last one is based on the concept of 107 | Transfer Learning using as a backbone the VGG16 model.

108 | 109 | 110 |

1. U-NET

111 | 112 |

U-Net is a convolutional neural network that was developed for biomedical image segmentation. The network is based on the fully convolutional network and its architecture was modified and extended to work with fewer training images and to yield more precise segmentations. The network consists of a contracting path (convolution) and an expansive path (deconvolution), which gives it the u-shaped architecture. The contracting path is a typical convolutional network that consists of repeated application of convolutions, each followed by a rectified linear unit (ReLU) and a max pooling operation. During the contraction, the spatial information is reduced while feature information is increased. The expansive pathway combines the feature and spatial information through a sequence of up-convolutions and concatenations with high-resolution features from the contracting path. [https://en.wikipedia.org/wiki/U-Net#cite_note-Shelhamer_2017-2]

113 | 114 | 117 | 118 | #### Single-Modal - UNET 119 | | Hand Labeled | | | 120 | | ------------------------ | ---|---- | 121 | | Source & Labels |IOU | Acc | 122 | | S1Hand & S1OtsuLabelHand |0.89|0.94 | 123 | | S2Hand & LabelHand |0.47|0.72 | 124 | 125 | 126 | |Weakly Labeled | | | 127 | |-------------------------| --- | --- | 128 | | Source & Labels | IOU | Acc | 129 | | S1Hand & S1OtsuLabelWeak|0.81 |0.87 | 130 | 131 | 132 | |Weakly Supervised | | | | 133 | |-------------------------|---------------- |--- | --- | 134 | | Trained On | Tested on | IOU | Acc | 135 | | S1Hand & S1OtsuLabelWeak| S1OtsuLabelHand |0.77 |0.86 | 136 | 137 | 138 | 139 | #### Multi-Modal - UNET 140 | 141 | | Hand Labeled | | | 142 | | ----------------------------------- | --- | --- | 143 | | Source & Labels | IOU | Acc | 144 | | S1Hand - S2Hand & S1OtsuLabelHand |0.72 |0.82 | 145 | | S1Hand - S2Hand & LabelHand |0.42 |0.71 | 146 | 147 | 148 | 149 | 150 | 151 |

2. Random Forest - Feature Engineering

152 | Feature based segmentation using Random Forest 153 | 154 |

For this set of experiments a various hand crafted features were utilized from both 155 | sentinel 1 and sentinel 2 raw spectral bands. More specifically from optical bands 156 | were constructed the NDVI and NDWI while from sentinel 1 the devision between 157 | VV and VH. Apart from these futures three more kernel based features were con- 158 | structed based on VH and NIR bands respectively. Those are the median filter with 159 | and variance filters with kernel size of 3 and the roberts edge detection filter.

160 | 161 | 162 | #### Single-Modal - RF 163 | | Hand Labeled | | | 164 | | ------------------------ |--- | --- | 165 | | Source & Labels |IOU | Acc | 166 | | S1Hand & S1OtsuLabelHand |0.79|0.89 | 167 | | S2Hand & LabelHand |0.87|0.93 | 168 | 169 | 170 | |Weakly Labeled | | | 171 | |-------------------------| --- | --- | 172 | | Source & Labels | IOU | Acc | 173 | | S1Weak & S1OtsuLabelWeak|0.81 |0.90 | 174 | 175 | 176 | |Weakly Supervised | | | | 177 | |-------------------------|--------------------------|--- | --- | 178 | | Trained On | Tested on | IOU | Acc | 179 | | S1Weak & S1OtsuLabelWeak| S1Hand + S1OtsuLabelHand | 0.77|0.88 | 180 | 181 | 182 | 183 | #### Multi-Modal - RF 184 | 185 | | Hand Labeled | | | 186 | | ----------------------------------- | --- | --- | 187 | | Source & Labels | IOU | Acc | 188 | | S1Hand - S2Hand & S1OtsuLabelHand |0.84 |0.92 | 189 | | S1Hand - S2Hand & LabelHand |0.87 |0.93 | 190 | 191 | 192 |

3. Transfer Learning - VGG16

193 | 194 | 195 | 196 |

In the current study the VGG16 architecture pretrained on the Imagenet pub- 197 | licly available data is being used, while the Sen1Floods11 dataset is used for fine 198 | tuning. Lastly, the classification part is handled by a random forest.

199 | 200 | #### Single-Modal - Transfer Learning (R NIR SWIR, VV+VH +VH/VV) 201 | | Hand Labeled | | | 202 | | ------------------------ |--- | --- | 203 | | Source & Labels |IOU | Acc | 204 | | S1Hand & S1OtsuLabelHand |0.84|0.92 | 205 | | S2Hand & LabelHand |0.47|0.65 | 206 | 207 | 208 | |Weakly Labeled | | | 209 | |-------------------------| --- | --- | 210 | | Source & Labels | IOU | Acc | 211 | | S1Hand & S1OtsuLabelWeak|0.86 |0.92 | 212 | 213 | 214 | 215 | |Weakly Supervised | | | | 216 | |-------------------------|--------------------------|--- | --- | 217 | | Trained On | Tested on | IOU | Acc | 218 | | S1Hand & S1OtsuLabelWeak| S1Hand + S1OtsuLabelHand |0.83 |0.91 | 219 | 220 | 221 | #### Multi-Modal - Transfer Learning (VH+RED+NIR) 222 | 223 | | Hand Labeled | | | 224 | | ----------------------------------- | --- | --- | 225 | | Source & Labels | IOU | Acc | 226 | | S1Hand - S2Hand & S1OtsuLabelHand |0.73 |0.85 | 227 | | S1Hand - S2Hand & LabelHand |0.55 |0.71 | 228 | 229 | 230 | 231 |

Credits

232 | Credits to Dr. Sreenivas Bhattiprolu for his excellent python and machine learning tutorials which I used through out my thesis. 233 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /pre-processing/Pre-Process-Data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "70a09cc3", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "# Load Dependencies\n", 11 | "import os\n", 12 | "import numpy as np\n", 13 | "from skimage.io import imread, imshow\n", 14 | "from collections import Counter\n", 15 | "import subprocess \n", 16 | "from subprocess import PIPE\n", 17 | "from skimage.io import imread, imshow, imsave\n", 18 | "# from PIL import Image\n", 19 | "from itertools import product\n", 20 | "import time" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "id": "f3e4f1bc", 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "def get_percentage_diff(previous, current):\n", 31 | " try:\n", 32 | " percentage = abs(previous - current)/max(previous, current) * 100\n", 33 | " except ZeroDivisionError:\n", 34 | " percentage = float('inf')\n", 35 | " return percentage" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "id": "711d6a15", 41 | "metadata": {}, 42 | "source": [ 43 | "

Hand Labeled

" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "id": "c581a931", 49 | "metadata": {}, 50 | "source": [ 51 | "

Create Smaller Patches 128x128

" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "id": "5ecd1481", 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "foldersLabels = ['LabelHand','S1OtsuLabelHand']" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "id": "6bd38289", 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "w,h=512,512\n", 72 | "d = 128\n", 73 | "grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 74 | "initial_path = os.getcwd()\n", 75 | "\n", 76 | "for folder in foldersLabels:\n", 77 | " source_path = initial_path +'/'+ folder+'/'\n", 78 | " os.chdir(source_path)\n", 79 | " print(os.getcwd())\n", 80 | " for item in os.listdir(source_path):\n", 81 | " if item.endswith('.tif'):\n", 82 | " img = imread(source_path +item)\n", 83 | " print(f\"Item: {item}\")\n", 84 | " grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 85 | " for i, j in grid:\n", 86 | " box = (j, i, j+d, i+d)\n", 87 | " name = item[:-4]+'_'+str(i)+str(j)+str(j+d)+str(i+d)+'.tif'\n", 88 | " new_img = img[box[0]:box[2],box[1]:box[3]]\n", 89 | " print(f\"New product: {name}\")\n", 90 | " imsave(name, new_img,check_contrast=False)\n", 91 | " \n", 92 | " path = os.path.join(source_path, item)\n", 93 | " os.remove(path)\n", 94 | " time.sleep(1)" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "id": "d688e541", 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "folder = [\"S2Hand\"]" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "id": "2e34b934", 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "w,h=512,512\n", 115 | "d = 128\n", 116 | "grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 117 | "initial_path = os.getcwd()\n", 118 | "\n", 119 | "for folder in folders:\n", 120 | " source_path = initial_path +'/'+ folder+'/'\n", 121 | " os.chdir(source_path)\n", 122 | " print(os.getcwd())\n", 123 | " for item in os.listdir(source_path):\n", 124 | " if item.endswith('.tif'):\n", 125 | " img = imread(source_path +item)\n", 126 | " print(f\"Item: {item}\")\n", 127 | " grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 128 | " for i, j in grid:\n", 129 | " box = (j, i, j+d, i+d)\n", 130 | " name = item[:-4]+'_'+str(i)+str(j)+str(j+d)+str(i+d)+'.tif'\n", 131 | " new_img = img[[2,3,4,8,11],box[0]:box[2],box[1]:box[3]] # Keep only RGB NIR and SWIR Bands\n", 132 | " print(f\"New product: {name}\")\n", 133 | " imsave(name, new_img,check_contrast=False)\n", 134 | " \n", 135 | " path = os.path.join(source_path, item)\n", 136 | " os.remove(path)\n", 137 | " time.sleep(1)" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": null, 143 | "id": "2c377eef", 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "folder = [\"S1Hand\"]" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "id": "2dd15e11", 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "w,h=512,512\n", 158 | "d = 128\n", 159 | "grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 160 | "initial_path = os.getcwd()\n", 161 | "\n", 162 | "for folder in folders:\n", 163 | " source_path = initial_path +'/'+ folder+'/'\n", 164 | " os.chdir(source_path)\n", 165 | " print(os.getcwd())\n", 166 | " for item in os.listdir(source_path):\n", 167 | " if item.endswith('.tif'):\n", 168 | " img = imread(source_path +item)\n", 169 | " print(f\"Item: {item}\")\n", 170 | " grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 171 | " for i, j in grid:\n", 172 | " box = (j, i, j+d, i+d)\n", 173 | " name = item[:-4]+'_'+str(i)+str(j)+str(j+d)+str(i+d)+'.tif'\n", 174 | " new_img = img[[2,3,4,8,11],box[0]:box[2],box[1]:box[3]]\n", 175 | " print(f\"New product: {name}\")\n", 176 | " imsave(name, new_img,check_contrast=False)\n", 177 | " \n", 178 | " path = os.path.join(source_path, item)\n", 179 | " os.remove(path)\n", 180 | " time.sleep(1)" 181 | ] 182 | }, 183 | { 184 | "cell_type": "markdown", 185 | "id": "f54a2cec", 186 | "metadata": {}, 187 | "source": [ 188 | "

Search for Patches Either Unbalanced Or With Corrupt Pixels

" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "id": "3fabdbdb", 195 | "metadata": {}, 196 | "outputs": [], 197 | "source": [ 198 | "source_path = os.getcwd() + '/S1OtsuLabelHand/'\n", 199 | "listimages=[]\n", 200 | "for item in os.listdir(source_path):\n", 201 | " print(item)\n", 202 | " img = imread(source_path +item)\n", 203 | " countzero = np.count_nonzero(img == 0)\n", 204 | " countone = np.count_nonzero(img == 1)\n", 205 | " countminus = np.count_nonzero(img ==-1) \n", 206 | " if (get_percentage_diff(countone,countzero) >50.0) or (get_percentage_diff(countzero,countone) >50.0) or (countminus>=1): \n", 207 | " x = item.split(\"_\")\n", 208 | " name = x[0] +\"_\"+ x[1]+\"_\"+x[3]\n", 209 | " listimages.append(name)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "id": "dc59bb12", 215 | "metadata": {}, 216 | "source": [ 217 | "

Erase the Patches

" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": null, 223 | "id": "d72b5a77", 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "folders = [\"S1Hand\",\"S2Hand\",\"LabelHand\",\"S1OtsuLabelHand\"]\n", 228 | "\n", 229 | "for folder in folders:\n", 230 | " source_path = os.getcwd() + '/' + folder +'/'\n", 231 | " for item in os.listdir(source_path):\n", 232 | " x = item.split(\"_\")\n", 233 | " name = x[0] +\"_\"+ x[1]+\"_\"+x[3]\n", 234 | " if name in listimages:\n", 235 | " path = os.path.join(source_path, item)\n", 236 | " os.remove(path)\n", 237 | " time.sleep(1)" 238 | ] 239 | }, 240 | { 241 | "cell_type": "markdown", 242 | "id": "7aa5274e", 243 | "metadata": {}, 244 | "source": [ 245 | "

Weakly Labeled

" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": null, 251 | "id": "16cd1a12", 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "folder = ['S1Weak']" 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": null, 261 | "id": "107a4fdd", 262 | "metadata": {}, 263 | "outputs": [], 264 | "source": [ 265 | "w,h=512,512\n", 266 | "d = 128\n", 267 | "grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 268 | "initial_path = os.getcwd()\n", 269 | "\n", 270 | "for folder in folder:\n", 271 | " source_path = initial_path +'/'+ folder+'/'\n", 272 | " os.chdir(source_path)\n", 273 | " print(os.getcwd())\n", 274 | " for item in os.listdir(source_path):\n", 275 | " if item.endswith('.tif'):\n", 276 | " img = imread(source_path +item)\n", 277 | " print(f\"Item: {item}\")\n", 278 | " grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 279 | " for i, j in grid:\n", 280 | " box = (j, i, j+d, i+d)\n", 281 | " name = item[:-4]+'_'+str(i)+str(j)+str(j+d)+str(i+d)+'.tif'\n", 282 | " new_img = img[0:2,box[0]:box[2],box[1]:box[3]]\n", 283 | " print(f\"New product: {name}\")\n", 284 | " imsave(name, new_img,check_contrast=False)\n", 285 | " \n", 286 | " path = os.path.join(source_path, item)\n", 287 | " os.remove(path)\n", 288 | " time.sleep(1)" 289 | ] 290 | }, 291 | { 292 | "cell_type": "code", 293 | "execution_count": null, 294 | "id": "905acbe3", 295 | "metadata": {}, 296 | "outputs": [], 297 | "source": [ 298 | "folder = ['S1OtsuLabelWeak']" 299 | ] 300 | }, 301 | { 302 | "cell_type": "code", 303 | "execution_count": null, 304 | "id": "715156da", 305 | "metadata": {}, 306 | "outputs": [], 307 | "source": [ 308 | "w,h=512,512\n", 309 | "d = 128\n", 310 | "grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 311 | "initial_path = os.getcwd()\n", 312 | "\n", 313 | "for folder in folder:\n", 314 | " source_path = initial_path +'/'+ folder+'/'\n", 315 | " os.chdir(source_path)\n", 316 | " print(os.getcwd())\n", 317 | " for item in os.listdir(source_path):\n", 318 | " if item.endswith('.tif'):\n", 319 | " img = imread(source_path +item)\n", 320 | " print(f\"Item: {item}\")\n", 321 | " grid = product(range(0, h-h%d, d), range(0, w-w%d, d))\n", 322 | " for i, j in grid:\n", 323 | " box = (j, i, j+d, i+d)\n", 324 | " name = item[:-4]+'_'+str(i)+str(j)+str(j+d)+str(i+d)+'.tif'\n", 325 | " new_img = img[box[0]:box[2],box[1]:box[3]]\n", 326 | " print(f\"New product: {name}\")\n", 327 | " imsave(name, new_img,check_contrast=False)\n", 328 | " time.sleep(1)\n", 329 | " \n", 330 | " path = os.path.join(source_path, item)\n", 331 | " os.remove(path)\n", 332 | " time.sleep(1)" 333 | ] 334 | }, 335 | { 336 | "cell_type": "markdown", 337 | "id": "dd98d0af", 338 | "metadata": {}, 339 | "source": [ 340 | "

Search for Patches Either Unbalanced Or With Corrupt Pixels

" 341 | ] 342 | }, 343 | { 344 | "cell_type": "code", 345 | "execution_count": null, 346 | "id": "489fb2f5", 347 | "metadata": {}, 348 | "outputs": [], 349 | "source": [ 350 | "source_path = os.getcwd() + '/S1OtsuLabelWeak/'\n", 351 | "listimagesw=[]\n", 352 | "for item in os.listdir(source_path):\n", 353 | " print(item)\n", 354 | " img = imread(source_path +item)\n", 355 | " countzero = np.count_nonzero(img == 0)\n", 356 | " countone = np.count_nonzero(img == 1)\n", 357 | " countminus = np.count_nonzero(img ==-1) \n", 358 | " if (get_percentage_diff(countone,countzero) >50.0) or (get_percentage_diff(countzero,countone) >50.0) or (countminus>=1): \n", 359 | " x = item.split(\"_\")\n", 360 | " name = x[0] +\"_\"+ x[1]+\"_\"+x[3]\n", 361 | " listimagesw.append(name)" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "id": "a925c30f", 367 | "metadata": {}, 368 | "source": [ 369 | "

Erase the Patches

" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "id": "7bd01be4", 376 | "metadata": {}, 377 | "outputs": [], 378 | "source": [ 379 | "folders = [\"S1Weak\",\"S1OtsuLabelWeak\"]\n", 380 | "\n", 381 | "for folder in folders:\n", 382 | " source_path = os.getcwd() + '/' + folder +'/'\n", 383 | " for item in os.listdir(source_path):\n", 384 | " x = item.split(\"_\")\n", 385 | " name = x[0] +\"_\"+ x[1]+\"_\"+x[3]\n", 386 | " if name in listimagesw:\n", 387 | " path = os.path.join(source_path, item)\n", 388 | " os.remove(path)\n", 389 | " time.sleep(1)" 390 | ] 391 | }, 392 | { 393 | "cell_type": "code", 394 | "execution_count": null, 395 | "id": "9f2f0fae", 396 | "metadata": {}, 397 | "outputs": [], 398 | "source": [] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": null, 403 | "id": "b6a1dcf6", 404 | "metadata": {}, 405 | "outputs": [], 406 | "source": [] 407 | } 408 | ], 409 | "metadata": { 410 | "kernelspec": { 411 | "display_name": "Python 3 (ipykernel)", 412 | "language": "python", 413 | "name": "python3" 414 | }, 415 | "language_info": { 416 | "codemirror_mode": { 417 | "name": "ipython", 418 | "version": 3 419 | }, 420 | "file_extension": ".py", 421 | "mimetype": "text/x-python", 422 | "name": "python", 423 | "nbconvert_exporter": "python", 424 | "pygments_lexer": "ipython3", 425 | "version": "3.7.12" 426 | } 427 | }, 428 | "nbformat": 4, 429 | "nbformat_minor": 5 430 | } 431 | -------------------------------------------------------------------------------- /Random_Forest/Random_Forest_Floods.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "ce79c87a", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "## Created By Konstantinos Fokeas\n", 11 | "## credits to: @DigitalSreeni" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "id": "0eaa02f7", 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import numpy as np\n", 22 | "import cv2\n", 23 | "import pandas as pd\n", 24 | "from scipy import ndimage as nd\n", 25 | " \n", 26 | "import pickle\n", 27 | "from matplotlib import pyplot as plt\n", 28 | "import os\n", 29 | "from skimage.io import imread, imshow\n", 30 | "from skimage.filters import roberts, sobel, scharr, prewitt\n", 31 | "import glob" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "id": "1bdd2e47", 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "####################################################################\n", 42 | "## STEP 1: READ TRAINING IMAGES AND EXTRACT FEATURES - Sentinel 1\n", 43 | "################################################################\n", 44 | "image_dataset_s1 = pd.DataFrame() #Dataframe to capture image features\n", 45 | "\n", 46 | "img_path = \"/content/S1Hand/\" ## CHANGE ME!!!\n", 47 | "for image in os.listdir(img_path):#iterate through each file \n", 48 | " df = pd.DataFrame() #Temporary data frame to capture information for each loop.\n", 49 | " #Reset dataframe to blank after each loop.\n", 50 | "\n", 51 | " vv = imread(img_path + image)[0:1,:,:].reshape((128,128)) \n", 52 | " vh = imread(img_path + image)[1:2,:,:].reshape((128,128))\n", 53 | " vv_vh = (vv/vh)\n", 54 | " \n", 55 | " #Add pixel values to the data frame\n", 56 | " vv_values = vv.reshape(-1)\n", 57 | " df['VV'] = vv_values #Pixel value itself as a feature\n", 58 | " \n", 59 | " vh_values = vh.reshape(-1)\n", 60 | " df['VH'] = vh_values \n", 61 | " \n", 62 | " vv_vh_values = vv_vh.reshape(-1)\n", 63 | " df['VV/VH'] = vv_vh_values \n", 64 | "\n", 65 | " #MEDIAN with sigma=3\n", 66 | " median_img = nd.median_filter(vh, size=3)\n", 67 | " median_img1 = median_img.reshape(-1)\n", 68 | " df['Median_s3'] = median_img1\n", 69 | "\n", 70 | " #VARIANCE with size=3\n", 71 | " variance_img = nd.generic_filter(vh, np.var, size=3)\n", 72 | " variance_img1 = variance_img.reshape(-1)\n", 73 | " df['Variance_s3'] = variance_img1 \n", 74 | "\n", 75 | " #ROBERTS EDGE\n", 76 | " edge_roberts = roberts(vh)\n", 77 | " edge_roberts1 = edge_roberts.reshape(-1)\n", 78 | " df['Roberts'] = edge_roberts1\n", 79 | "\n", 80 | " ###################################### \n", 81 | " #Update dataframe for images to include details for each image in the loop\n", 82 | " image_dataset_s1 = image_dataset_s1.append(df)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "id": "ee05dac4", 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "####################################################################\n", 93 | "## STEP 1: READ TRAINING IMAGES AND EXTRACT FEATURES - Sentinel 2\n", 94 | "################################################################\n", 95 | "image_dataset_s2 = pd.DataFrame() #Dataframe to capture image features\n", 96 | "\n", 97 | "img_path = \"/content/S2Hand/\" # CHANGE ME!!!\n", 98 | "for image in os.listdir(img_path):\n", 99 | " df = pd.DataFrame() #Temporary data frame to capture information for each loop.\n", 100 | " #Reset dataframe to blank after each loop.\n", 101 | "\n", 102 | " blue = imread(img_path + image)[0:1,:,:].reshape((128,128)) \n", 103 | " green = imread(img_path + image)[1:2,:,:].reshape((128,128))\n", 104 | " red = imread(img_path + image)[2:3,:,:].reshape((128,128))\n", 105 | " nir = imread(img_path + image)[3:4,:,:].reshape((128,128))\n", 106 | " swir = imread(img_path + image)[4:5,:,:].reshape((128,128))\n", 107 | " \n", 108 | " # Sentinel-2 MNDWI = (B03 - B08) / (B03 + B08)\n", 109 | " ndwi = (red - swir)/ (red + swir)\n", 110 | " ndwi_values = ndwi.reshape(-1)\n", 111 | " df['NDWI'] = ndwi_values \n", 112 | "\n", 113 | " # Sentinel 2 NDVI = (NIR - RED) / (NIR + RED), where RED is B4 and NIR is B8\n", 114 | " ndvi = (nir - red)/ (red + nir)\n", 115 | " ndvi_values = ndvi.reshape(-1)\n", 116 | " df['NDVI'] = ndvi_values \n", 117 | "\n", 118 | " #MEDIAN with sigma=3\n", 119 | " median_img = nd.median_filter(nir, size=3)\n", 120 | " median_img1 = median_img.reshape(-1)\n", 121 | " df['Median_s3'] = median_img1\n", 122 | "\n", 123 | " #VARIANCE with size=3\n", 124 | " variance_img = nd.generic_filter(nir, np.var, size=3)\n", 125 | " variance_img1 = variance_img.reshape(-1)\n", 126 | " df['Variance_s3'] = variance_img1 \n", 127 | "\n", 128 | " #ROBERTS EDGE\n", 129 | " edge_roberts = roberts(nir)\n", 130 | " edge_roberts1 = edge_roberts.reshape(-1)\n", 131 | " df['Roberts'] = edge_roberts1\n", 132 | "\n", 133 | " #START ADDING DATA TO THE DATAFRAME \n", 134 | "\n", 135 | " blue_values = blue.reshape(-1)\n", 136 | " df['Blue'] = blue_values \n", 137 | " \n", 138 | " green_values = green.reshape(-1)\n", 139 | " df['Green'] = green_values \n", 140 | " \n", 141 | " \n", 142 | " image_dataset_s2 = image_dataset_s2.append(df) " 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "id": "6f49fbde", 149 | "metadata": {}, 150 | "outputs": [], 151 | "source": [ 152 | "###########################################################\n", 153 | "# CONCATENATE S2 AND S1 DATAFRAMES\n", 154 | "##########################################################\n", 155 | "frames = [image_dataset_s1, image_dataset_s2]\n", 156 | "\n", 157 | "image_dataset = pd.concat(frames,axis=1,join='outer')" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "id": "abf9d028", 164 | "metadata": {}, 165 | "outputs": [], 166 | "source": [ 167 | "###########################################################\n", 168 | "# STEP 2: READ LABELED IMAGES (MASKS) AND CREATE ANOTHER DATAFRAME\n", 169 | " # WITH LABEL VALUES AND LABEL FILE NAMES\n", 170 | "##########################################################\n", 171 | "mask_dataset = pd.DataFrame() #Create dataframe to capture mask info.\n", 172 | "# label_path = \"/content/S1Label\" #iterate through each file to perform some action\n", 173 | "\n", 174 | "mask_path = \"/content/S1OtsuLabelHand/\" ### CHANGE ME!!!\n", 175 | "for mask in os.listdir(mask_path):\n", 176 | " df2 = pd.DataFrame() #Temporary dataframe to capture info for each mask in the loop\n", 177 | " input_mask = imread(mask_path + mask)\n", 178 | " label = input_mask\n", 179 | " #Add pixel values to the data frame\n", 180 | " label_values = label.reshape(-1)\n", 181 | " df2['Label_Value'] = label_values\n", 182 | " df2['Mask_Name'] = mask\n", 183 | " mask_dataset = mask_dataset.append(df2) #Update mask dataframe with all the info from each mask" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "id": "4d302828", 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "################################################################\n", 194 | " # STEP 3: GET DATA READY FOR RANDOM FOREST (or other classifier)\n", 195 | " # COMBINE BOTH DATAFRAMES INTO A SINGLE DATASET\n", 196 | "###############################################################\n", 197 | "dataset = pd.concat([image_dataset, mask_dataset], axis=1) #Concatenate both image and mask datasets\n", 198 | "\n", 199 | "#If you expect image and mask names to be the same this is where we can perform sanity check\n", 200 | "#dataset['Image_Name'].equals(dataset['Mask_Name']) \n", 201 | "##\n", 202 | "##If we do not want to include pixels with value 0 \n", 203 | "##e.g. Sometimes unlabeled pixels may be given a value 0.\n", 204 | "# dataset.replace([np.inf, -np.inf], np.nan, inplace=True)\n", 205 | "# dataset.fillna(999, inplace=True)\n", 206 | "dataset.fillna(-1, inplace=True)\n", 207 | "dataset = dataset[dataset.Label_Value != -1]\n", 208 | "dataset.dropna()\n", 209 | "# dataset = dataset.reset_index()\n", 210 | "# dataset.replace([np.inf, -np.inf], np.nan, inplace=True)\n", 211 | "# dataset.fillna(999, inplace=True)\n", 212 | "\n", 213 | "\n", 214 | "#Assign training features to X and labels to Y\n", 215 | "#Drop columns that are not relevant for training (non-features)\n", 216 | "X = dataset.drop(labels = [\"Mask_Name\", \"Label_Value\"], axis=1) \n", 217 | "\n", 218 | "#Assign label values to Y (our prediction)\n", 219 | "Y = dataset[\"Label_Value\"].values " 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": null, 225 | "id": "23e6b35d", 226 | "metadata": {}, 227 | "outputs": [], 228 | "source": [ 229 | "## Sanity Check\n", 230 | "print(X.shape)\n", 231 | "print(Y.shape)" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": null, 237 | "id": "7aba31cb", 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "################################################################\n", 242 | " # STEP : Scale The Features\n", 243 | "###############################################################\n", 244 | "from sklearn import preprocessing\n", 245 | "scaler = preprocessing.StandardScaler().fit(X)\n", 246 | "X_scaled = scaler.transform(X)" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "id": "b0523785", 253 | "metadata": {}, 254 | "outputs": [], 255 | "source": [ 256 | "##Split data into train and test to verify accuracy after fitting the model. \n", 257 | "from sklearn.model_selection import train_test_split\n", 258 | "X_train, X_test, y_train, y_test = train_test_split(X_scaled, Y, test_size=0.3) #random_state=20" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "id": "d5937692", 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "####################################################################\n", 269 | "# STEP 4: Define the classifier and fit a model with our training data\n", 270 | "###################################################################\n", 271 | "\n", 272 | "#Import training classifier\n", 273 | "from sklearn.ensemble import RandomForestClassifier\n", 274 | "## Instantiate model with n number of decision trees\n", 275 | "model = RandomForestClassifier(n_estimators = 20, verbose=10)\n", 276 | "\n", 277 | "## Train the model on training data\n", 278 | "model.fit(X_train, y_train)" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": null, 284 | "id": "510bc6e3", 285 | "metadata": {}, 286 | "outputs": [], 287 | "source": [ 288 | "#######################################################\n", 289 | "# STEP 5: Accuracy check - BINARY CLASSIFICATION\n", 290 | "#########################################################\n", 291 | "\n", 292 | "from sklearn import metrics\n", 293 | "prediction_test = model.predict(X_test)\n", 294 | "##Check accuracy on test dataset. \n", 295 | "print (\"Accuracy = \", metrics.accuracy_score(y_test, prediction_test))\n", 296 | "# print(\"IOU = \", metrics.jaccard_score(y_test, prediction_test))\n", 297 | "print(\"Precision = \", metrics.precision_score(y_test, prediction_test))\n", 298 | "print(\"Recall = \", \tmetrics.recall_score(y_test, prediction_test))\n", 299 | "print(\"F1 score = \", metrics.f1_score(y_test, prediction_test))" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "id": "1f0bf211", 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "#######################################################\n", 310 | "# STEP 5: Accuracy check - MULTILABEL CLASSIFICATION\n", 311 | "#########################################################\n", 312 | "\n", 313 | "from sklearn import metrics\n", 314 | "prediction_test = model.predict(X_test)\n", 315 | "##Check accuracy on test dataset. \n", 316 | "print (\"Accuracy = \", metrics.accuracy_score(y_test, prediction_test))\n", 317 | "print(\"IOU = \", metrics.jaccard_score(y_test, prediction_test, average =\"micro\"))\n", 318 | "print(\"Precision = \", metrics.precision_score(y_test, prediction_test, average =\"micro\"))\n", 319 | "print(\"Recall = \", \tmetrics.recall_score(y_test, prediction_test, average =\"micro\"))\n", 320 | "print(\"F1 score = \", metrics.f1_score(y_test, prediction_test, average =\"micro\"))" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "id": "857297d2", 327 | "metadata": {}, 328 | "outputs": [], 329 | "source": [ 330 | "#######################################################\n", 331 | "# STEP 6: Plot Feature Importance\n", 332 | "#########################################################\n", 333 | "\n", 334 | "# plt.barh(image_dataset.columns, model.feature_importances_)\n", 335 | "fig=plt.figure(figsize=(10,10))\n", 336 | "sorted_idx = model.feature_importances_.argsort()\n", 337 | "plt.barh(image_dataset.columns[sorted_idx], model.feature_importances_[sorted_idx])\n", 338 | "# plt.margins(x=0, y=-0.10) \n", 339 | "plt.xlabel(\"Random Forest Feature Importance\")" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": null, 345 | "id": "79c57f2a", 346 | "metadata": {}, 347 | "outputs": [], 348 | "source": [] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": null, 353 | "id": "2982760e", 354 | "metadata": {}, 355 | "outputs": [], 356 | "source": [] 357 | }, 358 | { 359 | "cell_type": "code", 360 | "execution_count": null, 361 | "id": "2a8bafcc", 362 | "metadata": {}, 363 | "outputs": [], 364 | "source": [] 365 | } 366 | ], 367 | "metadata": { 368 | "kernelspec": { 369 | "display_name": "Python 3 (ipykernel)", 370 | "language": "python", 371 | "name": "python3" 372 | }, 373 | "language_info": { 374 | "codemirror_mode": { 375 | "name": "ipython", 376 | "version": 3 377 | }, 378 | "file_extension": ".py", 379 | "mimetype": "text/x-python", 380 | "name": "python", 381 | "nbconvert_exporter": "python", 382 | "pygments_lexer": "ipython3", 383 | "version": "3.7.12" 384 | } 385 | }, 386 | "nbformat": 4, 387 | "nbformat_minor": 5 388 | } 389 | -------------------------------------------------------------------------------- /U-NET/UNET_S1S2_MultiModal.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": { 7 | "id": "7Npp6F20-U9o" 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import numpy as np\n", 12 | "import cv2\n", 13 | "import pandas as pd\n", 14 | "from scipy import ndimage as nd\n", 15 | " \n", 16 | "import pickle\n", 17 | "import os\n", 18 | "from skimage.io import imread, imshow\n", 19 | "from skimage.filters import roberts, sobel, scharr, prewitt\n", 20 | "import glob\n", 21 | "\n", 22 | "import tensorflow as tf\n", 23 | "import random\n", 24 | "from skimage.io import imread, imshow\n", 25 | "import skimage.transform\n", 26 | "import matplotlib.pyplot as plt" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "### Prepare the Dataset" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 44, 39 | "metadata": { 40 | "colab": { 41 | "base_uri": "https://localhost:8080/" 42 | }, 43 | "id": "mPC75L16-VAB", 44 | "outputId": "efa2ae89-a06b-4e38-9ec3-d79eb593539a" 45 | }, 46 | "outputs": [ 47 | { 48 | "name": "stdout", 49 | "output_type": "stream", 50 | "text": [ 51 | "577\n", 52 | "577\n", 53 | "577\n" 54 | ] 55 | } 56 | ], 57 | "source": [ 58 | "source_paths1 = \"S1Hand/\"\n", 59 | "source_paths2 = \"S2Hand\"\n", 60 | "label_path = \"LabelHand/\"\n", 61 | "\n", 62 | "sources1 = []\n", 63 | "sources2 =[]\n", 64 | "label = []\n", 65 | "\n", 66 | "for file in os.listdir(source_paths1):\n", 67 | " if file.endswith(\"tif\"):\n", 68 | " sources1.append(file)\n", 69 | "\n", 70 | "for file in os.listdir(source_paths2):\n", 71 | " if file.endswith(\"tif\"):\n", 72 | " sources2.append(file)\n", 73 | " \n", 74 | "for file in os.listdir(label_path):\n", 75 | " if file.endswith(\"tif\"):\n", 76 | " label.append(file)\n", 77 | "\n", 78 | "\n", 79 | "## SAnity Check\n", 80 | "sources1 = sorted(sources1)\n", 81 | "sources2 = sorted(sources2)\n", 82 | "label = sorted(label)\n", 83 | "print(len(sources1))\n", 84 | "print(len(sources2))\n", 85 | "print(len(label))" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 45, 91 | "metadata": { 92 | "id": "pHpwRO2h-VDF" 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "IMG_WIDTH = 128\n", 97 | "IMG_HEIGHT = 128\n", 98 | "IMG_CHANNELS = 7\n", 99 | "\n", 100 | "\n", 101 | "#Build the model\n", 102 | "inputs = tf.keras.layers.Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS))\n", 103 | "s = tf.keras.layers.Lambda(lambda x: x / 255)(inputs)" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 46, 109 | "metadata": { 110 | "id": "6kHXeG5m-VFF" 111 | }, 112 | "outputs": [], 113 | "source": [ 114 | "X = np.zeros((len(sources1), IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8)\n", 115 | "Y = np.zeros((len(label),IMG_HEIGHT, IMG_WIDTH, 1), dtype=np.uint8)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 47, 121 | "metadata": { 122 | "colab": { 123 | "base_uri": "https://localhost:8080/" 124 | }, 125 | "id": "rKiNGKAx-VIT", 126 | "outputId": "eca5d16c-9cc0-494d-aaf7-8536a75ad4a4" 127 | }, 128 | "outputs": [ 129 | { 130 | "name": "stdout", 131 | "output_type": "stream", 132 | "text": [ 133 | "(577, 128, 128, 7)\n", 134 | "(577, 128, 128, 1)\n" 135 | ] 136 | } 137 | ], 138 | "source": [ 139 | "# Sanity Check\n", 140 | "print(X.shape)\n", 141 | "print(Y.shape)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": 48, 147 | "metadata": { 148 | "id": "3WWhCybm-VKa" 149 | }, 150 | "outputs": [], 151 | "source": [ 152 | "for count, file in enumerate(sources1):\n", 153 | " source_img = imread(source_paths1 +'/' +file ) #[0:2,:,:]\n", 154 | " name = file.split(\"_\")\n", 155 | " s2_name = name[0] + \"_\" + name[1] + \"_\" + \"S2Hand_\" + name[3]\n", 156 | "\n", 157 | " s1_image = np.transpose(source_img, (1, 2, 0))\n", 158 | " s2_image =imread(source_paths2+'/' +s2_name)\n", 159 | " s2_image = np.transpose(s2_image, (1, 2, 0))\n", 160 | "\n", 161 | " merged = np.concatenate((s2_image, s1_image), axis=2)\n", 162 | " X[count] = merged\n", 163 | "\n", 164 | "\n", 165 | "for count, file in enumerate(label):\n", 166 | " slabel_img = imread(label_path + '/'+file ) #[:,:,:,IMG_CHANNELS]\n", 167 | " # print(slabel_img.min())\n", 168 | " y = np.expand_dims(slabel_img, axis=2)\n", 169 | " # print(f\"The shape of y labels: {y.min()}\")\n", 170 | " Y[count] = y " 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 49, 176 | "metadata": { 177 | "colab": { 178 | "base_uri": "https://localhost:8080/" 179 | }, 180 | "id": "6OuIvSIQ-VNZ", 181 | "outputId": "aff28868-a5d1-4bd8-b892-9d6182381dae" 182 | }, 183 | "outputs": [ 184 | { 185 | "name": "stdout", 186 | "output_type": "stream", 187 | "text": [ 188 | "[0 1 2]\n" 189 | ] 190 | } 191 | ], 192 | "source": [ 193 | "from sklearn.preprocessing import LabelEncoder \n", 194 | "labelencoder = LabelEncoder()\n", 195 | "Y_resahped = Y.reshape(-1,1)\n", 196 | "Y_resahped_encoded = labelencoder.fit_transform(Y_resahped)\n", 197 | "Y_original = Y_resahped_encoded.reshape(577,128,128,1)\n", 198 | "print(np.unique(Y_original))" 199 | ] 200 | }, 201 | { 202 | "cell_type": "markdown", 203 | "metadata": {}, 204 | "source": [ 205 | "### Define the UNET" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 50, 211 | "metadata": { 212 | "colab": { 213 | "base_uri": "https://localhost:8080/" 214 | }, 215 | "id": "UnDBdPkK-VPY", 216 | "outputId": "7c90c155-8f61-4ebe-ee90-c36f748f8644" 217 | }, 218 | "outputs": [ 219 | { 220 | "name": "stdout", 221 | "output_type": "stream", 222 | "text": [ 223 | "Model: \"model_3\"\n", 224 | "__________________________________________________________________________________________________\n", 225 | " Layer (type) Output Shape Param # Connected to \n", 226 | "==================================================================================================\n", 227 | " input_2 (InputLayer) [(None, 128, 128, 7 0 [] \n", 228 | " )] \n", 229 | " \n", 230 | " lambda_1 (Lambda) (None, 128, 128, 7) 0 ['input_2[0][0]'] \n", 231 | " \n", 232 | " conv2d_57 (Conv2D) (None, 128, 128, 16 1024 ['lambda_1[0][0]'] \n", 233 | " ) \n", 234 | " \n", 235 | " dropout_27 (Dropout) (None, 128, 128, 16 0 ['conv2d_57[0][0]'] \n", 236 | " ) \n", 237 | " \n", 238 | " conv2d_58 (Conv2D) (None, 128, 128, 16 2320 ['dropout_27[0][0]'] \n", 239 | " ) \n", 240 | " \n", 241 | " max_pooling2d_12 (MaxPooling2D (None, 64, 64, 16) 0 ['conv2d_58[0][0]'] \n", 242 | " ) \n", 243 | " \n", 244 | " conv2d_59 (Conv2D) (None, 64, 64, 32) 4640 ['max_pooling2d_12[0][0]'] \n", 245 | " \n", 246 | " dropout_28 (Dropout) (None, 64, 64, 32) 0 ['conv2d_59[0][0]'] \n", 247 | " \n", 248 | " conv2d_60 (Conv2D) (None, 64, 64, 32) 9248 ['dropout_28[0][0]'] \n", 249 | " \n", 250 | " max_pooling2d_13 (MaxPooling2D (None, 32, 32, 32) 0 ['conv2d_60[0][0]'] \n", 251 | " ) \n", 252 | " \n", 253 | " conv2d_61 (Conv2D) (None, 32, 32, 64) 18496 ['max_pooling2d_13[0][0]'] \n", 254 | " \n", 255 | " dropout_29 (Dropout) (None, 32, 32, 64) 0 ['conv2d_61[0][0]'] \n", 256 | " \n", 257 | " conv2d_62 (Conv2D) (None, 32, 32, 64) 36928 ['dropout_29[0][0]'] \n", 258 | " \n", 259 | " max_pooling2d_14 (MaxPooling2D (None, 16, 16, 64) 0 ['conv2d_62[0][0]'] \n", 260 | " ) \n", 261 | " \n", 262 | " conv2d_63 (Conv2D) (None, 16, 16, 128) 73856 ['max_pooling2d_14[0][0]'] \n", 263 | " \n", 264 | " dropout_30 (Dropout) (None, 16, 16, 128) 0 ['conv2d_63[0][0]'] \n", 265 | " \n", 266 | " conv2d_64 (Conv2D) (None, 16, 16, 128) 147584 ['dropout_30[0][0]'] \n", 267 | " \n", 268 | " max_pooling2d_15 (MaxPooling2D (None, 8, 8, 128) 0 ['conv2d_64[0][0]'] \n", 269 | " ) \n", 270 | " \n", 271 | " conv2d_65 (Conv2D) (None, 8, 8, 256) 295168 ['max_pooling2d_15[0][0]'] \n", 272 | " \n", 273 | " dropout_31 (Dropout) (None, 8, 8, 256) 0 ['conv2d_65[0][0]'] \n", 274 | " \n", 275 | " conv2d_66 (Conv2D) (None, 8, 8, 256) 590080 ['dropout_31[0][0]'] \n", 276 | " \n", 277 | " conv2d_transpose_12 (Conv2DTra (None, 16, 16, 128) 131200 ['conv2d_66[0][0]'] \n", 278 | " nspose) \n", 279 | " \n", 280 | " concatenate_12 (Concatenate) (None, 16, 16, 256) 0 ['conv2d_transpose_12[0][0]', \n", 281 | " 'conv2d_64[0][0]'] \n", 282 | " \n", 283 | " conv2d_67 (Conv2D) (None, 16, 16, 128) 295040 ['concatenate_12[0][0]'] \n", 284 | " \n", 285 | " dropout_32 (Dropout) (None, 16, 16, 128) 0 ['conv2d_67[0][0]'] \n", 286 | " \n", 287 | " conv2d_68 (Conv2D) (None, 16, 16, 128) 147584 ['dropout_32[0][0]'] \n", 288 | " \n", 289 | " conv2d_transpose_13 (Conv2DTra (None, 32, 32, 64) 32832 ['conv2d_68[0][0]'] \n", 290 | " nspose) \n", 291 | " \n", 292 | " concatenate_13 (Concatenate) (None, 32, 32, 128) 0 ['conv2d_transpose_13[0][0]', \n", 293 | " 'conv2d_62[0][0]'] \n", 294 | " \n", 295 | " conv2d_69 (Conv2D) (None, 32, 32, 64) 73792 ['concatenate_13[0][0]'] \n", 296 | " \n", 297 | " dropout_33 (Dropout) (None, 32, 32, 64) 0 ['conv2d_69[0][0]'] \n", 298 | " \n", 299 | " conv2d_70 (Conv2D) (None, 32, 32, 64) 36928 ['dropout_33[0][0]'] \n", 300 | " \n", 301 | " conv2d_transpose_14 (Conv2DTra (None, 64, 64, 32) 8224 ['conv2d_70[0][0]'] \n", 302 | " nspose) \n", 303 | " \n", 304 | " concatenate_14 (Concatenate) (None, 64, 64, 64) 0 ['conv2d_transpose_14[0][0]', \n", 305 | " 'conv2d_60[0][0]'] \n", 306 | " \n", 307 | " conv2d_71 (Conv2D) (None, 64, 64, 32) 18464 ['concatenate_14[0][0]'] \n", 308 | " \n", 309 | " dropout_34 (Dropout) (None, 64, 64, 32) 0 ['conv2d_71[0][0]'] \n", 310 | " \n", 311 | " conv2d_72 (Conv2D) (None, 64, 64, 32) 9248 ['dropout_34[0][0]'] \n", 312 | " \n", 313 | " conv2d_transpose_15 (Conv2DTra (None, 128, 128, 16 2064 ['conv2d_72[0][0]'] \n", 314 | " nspose) ) \n", 315 | " \n", 316 | " concatenate_15 (Concatenate) (None, 128, 128, 32 0 ['conv2d_transpose_15[0][0]', \n", 317 | " ) 'conv2d_58[0][0]'] \n", 318 | " \n", 319 | " conv2d_73 (Conv2D) (None, 128, 128, 16 4624 ['concatenate_15[0][0]'] \n", 320 | " ) \n", 321 | " \n", 322 | " dropout_35 (Dropout) (None, 128, 128, 16 0 ['conv2d_73[0][0]'] \n", 323 | " ) \n", 324 | " \n", 325 | " conv2d_74 (Conv2D) (None, 128, 128, 16 2320 ['dropout_35[0][0]'] \n", 326 | " ) \n", 327 | " \n", 328 | " conv2d_75 (Conv2D) (None, 128, 128, 3) 51 ['conv2d_74[0][0]'] \n", 329 | " \n", 330 | "==================================================================================================\n", 331 | "Total params: 1,941,715\n", 332 | "Trainable params: 1,941,715\n", 333 | "Non-trainable params: 0\n", 334 | "__________________________________________________________________________________________________\n" 335 | ] 336 | } 337 | ], 338 | "source": [ 339 | "#Contraction path\n", 340 | "c1 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(s)\n", 341 | "c1 = tf.keras.layers.Dropout(0.1)(c1)\n", 342 | "c1 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c1)\n", 343 | "p1 = tf.keras.layers.MaxPooling2D((2, 2))(c1)\n", 344 | "\n", 345 | "c2 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p1)\n", 346 | "c2 = tf.keras.layers.Dropout(0.1)(c2)\n", 347 | "c2 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c2)\n", 348 | "p2 = tf.keras.layers.MaxPooling2D((2, 2))(c2)\n", 349 | " \n", 350 | "c3 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p2)\n", 351 | "c3 = tf.keras.layers.Dropout(0.2)(c3)\n", 352 | "c3 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c3)\n", 353 | "p3 = tf.keras.layers.MaxPooling2D((2, 2))(c3)\n", 354 | " \n", 355 | "c4 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p3)\n", 356 | "c4 = tf.keras.layers.Dropout(0.2)(c4)\n", 357 | "c4 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c4)\n", 358 | "p4 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(c4)\n", 359 | " \n", 360 | "c5 = tf.keras.layers.Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p4)\n", 361 | "c5 = tf.keras.layers.Dropout(0.3)(c5)\n", 362 | "c5 = tf.keras.layers.Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c5)\n", 363 | "\n", 364 | "#Expansive path \n", 365 | "u6 = tf.keras.layers.Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(c5)\n", 366 | "u6 = tf.keras.layers.concatenate([u6, c4])\n", 367 | "c6 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u6)\n", 368 | "c6 = tf.keras.layers.Dropout(0.2)(c6)\n", 369 | "c6 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c6)\n", 370 | " \n", 371 | "u7 = tf.keras.layers.Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(c6)\n", 372 | "u7 = tf.keras.layers.concatenate([u7, c3])\n", 373 | "c7 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u7)\n", 374 | "c7 = tf.keras.layers.Dropout(0.2)(c7)\n", 375 | "c7 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c7)\n", 376 | " \n", 377 | "u8 = tf.keras.layers.Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(c7)\n", 378 | "u8 = tf.keras.layers.concatenate([u8, c2])\n", 379 | "c8 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u8)\n", 380 | "c8 = tf.keras.layers.Dropout(0.1)(c8)\n", 381 | "c8 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c8)\n", 382 | " \n", 383 | "u9 = tf.keras.layers.Conv2DTranspose(16, (2, 2), strides=(2, 2), padding='same')(c8)\n", 384 | "u9 = tf.keras.layers.concatenate([u9, c1], axis=3)\n", 385 | "c9 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u9)\n", 386 | "c9 = tf.keras.layers.Dropout(0.1)(c9)\n", 387 | "c9 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c9)\n", 388 | " \n", 389 | "outputs = tf.keras.layers.Conv2D(3, (1, 1), activation='softmax')(c9) #sigmoid\n", 390 | " \n", 391 | "model = tf.keras.Model(inputs=[inputs], outputs=[outputs])\n", 392 | "opt = tf.keras.optimizers.SGD(learning_rate=0.01, momentum=0.0, nesterov=False, name=\"SGD\")\n", 393 | "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n", 394 | "model.summary()" 395 | ] 396 | }, 397 | { 398 | "cell_type": "markdown", 399 | "metadata": {}, 400 | "source": [ 401 | "### Train the Model" 402 | ] 403 | }, 404 | { 405 | "cell_type": "code", 406 | "execution_count": 51, 407 | "metadata": { 408 | "id": "2rVVNazz-VRl" 409 | }, 410 | "outputs": [], 411 | "source": [ 412 | "from sklearn.model_selection import train_test_split\n", 413 | "x_train, x_test, y_train, y_test = train_test_split(X, Y_original, test_size=0.10)" 414 | ] 415 | }, 416 | { 417 | "cell_type": "code", 418 | "execution_count": 52, 419 | "metadata": { 420 | "colab": { 421 | "base_uri": "https://localhost:8080/" 422 | }, 423 | "id": "pzUR3HPp-VT6", 424 | "outputId": "5eb603b9-0708-4b1b-8aad-fd15612df017" 425 | }, 426 | "outputs": [ 427 | { 428 | "name": "stdout", 429 | "output_type": "stream", 430 | "text": [ 431 | "(519, 128, 128, 3)\n" 432 | ] 433 | } 434 | ], 435 | "source": [ 436 | "from keras.utils import to_categorical\n", 437 | "train_mask_cat = to_categorical(y_train, num_classes=3)\n", 438 | "y_train_cat = train_mask_cat.reshape(y_train.shape[0], y_train.shape[1], y_train.shape[2],3)\n", 439 | "print(y_train_cat.shape)" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": 53, 445 | "metadata": { 446 | "colab": { 447 | "base_uri": "https://localhost:8080/" 448 | }, 449 | "id": "ubVwwQaq-VWi", 450 | "outputId": "b3494caf-90e6-4f4b-ac82-4b864974b7a7" 451 | }, 452 | "outputs": [ 453 | { 454 | "name": "stdout", 455 | "output_type": "stream", 456 | "text": [ 457 | "(58, 128, 128, 3)\n" 458 | ] 459 | } 460 | ], 461 | "source": [ 462 | "test_mask_cat = to_categorical(y_test, num_classes=3)\n", 463 | "y_test_cat = test_mask_cat.reshape(y_test.shape[0], y_test.shape[1], y_test.shape[2], 3)\n", 464 | "print(y_test_cat.shape)" 465 | ] 466 | }, 467 | { 468 | "cell_type": "code", 469 | "execution_count": 54, 470 | "metadata": { 471 | "colab": { 472 | "base_uri": "https://localhost:8080/" 473 | }, 474 | "id": "jr1ftzzR-xYD", 475 | "outputId": "879219dd-2af1-436f-d415-1966a4b5e895" 476 | }, 477 | "outputs": [ 478 | { 479 | "name": "stdout", 480 | "output_type": "stream", 481 | "text": [ 482 | "Epoch 1/150\n", 483 | "38/39 [============================>.] - ETA: 0s - loss: 0.9866 - accuracy: 0.5342\n", 484 | "Epoch 1: val_loss improved from inf to 0.95362, saving model to model_S1S2.h5\n", 485 | "39/39 [==============================] - 4s 67ms/step - loss: 0.9847 - accuracy: 0.5367 - val_loss: 0.9536 - val_accuracy: 0.5800\n", 486 | "Epoch 2/150\n", 487 | "39/39 [==============================] - ETA: 0s - loss: 0.9226 - accuracy: 0.5887\n", 488 | "Epoch 2: val_loss improved from 0.95362 to 0.93723, saving model to model_S1S2.h5\n", 489 | "39/39 [==============================] - 2s 41ms/step - loss: 0.9226 - accuracy: 0.5887 - val_loss: 0.9372 - val_accuracy: 0.5805\n", 490 | "Epoch 3/150\n", 491 | "39/39 [==============================] - ETA: 0s - loss: 0.8916 - accuracy: 0.5959\n", 492 | "Epoch 3: val_loss improved from 0.93723 to 0.87214, saving model to model_S1S2.h5\n", 493 | "39/39 [==============================] - 2s 41ms/step - loss: 0.8916 - accuracy: 0.5959 - val_loss: 0.8721 - val_accuracy: 0.6110\n", 494 | "Epoch 4/150\n", 495 | "39/39 [==============================] - ETA: 0s - loss: 0.8803 - accuracy: 0.6026\n", 496 | "Epoch 4: val_loss did not improve from 0.87214\n", 497 | "39/39 [==============================] - 1s 35ms/step - loss: 0.8803 - accuracy: 0.6026 - val_loss: 0.8735 - val_accuracy: 0.6166\n", 498 | "Epoch 5/150\n", 499 | "39/39 [==============================] - ETA: 0s - loss: 0.8511 - accuracy: 0.6188\n", 500 | "Epoch 5: val_loss improved from 0.87214 to 0.86907, saving model to model_S1S2.h5\n", 501 | "39/39 [==============================] - 2s 41ms/step - loss: 0.8511 - accuracy: 0.6188 - val_loss: 0.8691 - val_accuracy: 0.6188\n", 502 | "Epoch 6/150\n", 503 | "39/39 [==============================] - ETA: 0s - loss: 0.8455 - accuracy: 0.6227\n", 504 | "Epoch 6: val_loss improved from 0.86907 to 0.85480, saving model to model_S1S2.h5\n", 505 | "39/39 [==============================] - 2s 41ms/step - loss: 0.8455 - accuracy: 0.6227 - val_loss: 0.8548 - val_accuracy: 0.6171\n", 506 | "Epoch 7/150\n", 507 | "39/39 [==============================] - ETA: 0s - loss: 0.8417 - accuracy: 0.6227\n", 508 | "Epoch 7: val_loss did not improve from 0.85480\n", 509 | "39/39 [==============================] - 1s 35ms/step - loss: 0.8417 - accuracy: 0.6227 - val_loss: 0.8638 - val_accuracy: 0.5702\n", 510 | "Epoch 8/150\n", 511 | "39/39 [==============================] - ETA: 0s - loss: 0.8364 - accuracy: 0.6266\n", 512 | "Epoch 8: val_loss improved from 0.85480 to 0.84154, saving model to model_S1S2.h5\n", 513 | "39/39 [==============================] - 2s 41ms/step - loss: 0.8364 - accuracy: 0.6266 - val_loss: 0.8415 - val_accuracy: 0.6045\n", 514 | "Epoch 9/150\n", 515 | "39/39 [==============================] - ETA: 0s - loss: 0.8207 - accuracy: 0.6413\n", 516 | "Epoch 9: val_loss improved from 0.84154 to 0.83639, saving model to model_S1S2.h5\n", 517 | "39/39 [==============================] - 2s 42ms/step - loss: 0.8207 - accuracy: 0.6413 - val_loss: 0.8364 - val_accuracy: 0.5989\n", 518 | "Epoch 10/150\n", 519 | "39/39 [==============================] - ETA: 0s - loss: 0.8114 - accuracy: 0.6454\n", 520 | "Epoch 10: val_loss improved from 0.83639 to 0.81918, saving model to model_S1S2.h5\n", 521 | "39/39 [==============================] - 2s 41ms/step - loss: 0.8114 - accuracy: 0.6454 - val_loss: 0.8192 - val_accuracy: 0.6395\n", 522 | "Epoch 11/150\n", 523 | "39/39 [==============================] - ETA: 0s - loss: 0.8144 - accuracy: 0.6359\n", 524 | "Epoch 11: val_loss did not improve from 0.81918\n", 525 | "39/39 [==============================] - 1s 36ms/step - loss: 0.8144 - accuracy: 0.6359 - val_loss: 0.8312 - val_accuracy: 0.6272\n", 526 | "Epoch 12/150\n", 527 | "39/39 [==============================] - ETA: 0s - loss: 0.7921 - accuracy: 0.6450\n", 528 | "Epoch 12: val_loss improved from 0.81918 to 0.81871, saving model to model_S1S2.h5\n", 529 | "39/39 [==============================] - 2s 41ms/step - loss: 0.7921 - accuracy: 0.6450 - val_loss: 0.8187 - val_accuracy: 0.6304\n", 530 | "Epoch 13/150\n", 531 | "39/39 [==============================] - ETA: 0s - loss: 0.7799 - accuracy: 0.6594\n", 532 | "Epoch 13: val_loss improved from 0.81871 to 0.80548, saving model to model_S1S2.h5\n", 533 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7799 - accuracy: 0.6594 - val_loss: 0.8055 - val_accuracy: 0.6108\n", 534 | "Epoch 14/150\n", 535 | "39/39 [==============================] - ETA: 0s - loss: 0.7909 - accuracy: 0.6548\n", 536 | "Epoch 14: val_loss did not improve from 0.80548\n", 537 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7909 - accuracy: 0.6548 - val_loss: 0.8731 - val_accuracy: 0.5763\n", 538 | "Epoch 15/150\n", 539 | "39/39 [==============================] - ETA: 0s - loss: 0.7887 - accuracy: 0.6546\n", 540 | "Epoch 15: val_loss improved from 0.80548 to 0.79251, saving model to model_S1S2.h5\n", 541 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7887 - accuracy: 0.6546 - val_loss: 0.7925 - val_accuracy: 0.6251\n", 542 | "Epoch 16/150\n", 543 | "39/39 [==============================] - ETA: 0s - loss: 0.7724 - accuracy: 0.6588\n", 544 | "Epoch 16: val_loss did not improve from 0.79251\n", 545 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7724 - accuracy: 0.6588 - val_loss: 0.8163 - val_accuracy: 0.6312\n", 546 | "Epoch 17/150\n", 547 | "39/39 [==============================] - ETA: 0s - loss: 0.7746 - accuracy: 0.6647\n", 548 | "Epoch 17: val_loss improved from 0.79251 to 0.79222, saving model to model_S1S2.h5\n", 549 | "39/39 [==============================] - 2s 41ms/step - loss: 0.7746 - accuracy: 0.6647 - val_loss: 0.7922 - val_accuracy: 0.6190\n", 550 | "Epoch 18/150\n", 551 | "39/39 [==============================] - ETA: 0s - loss: 0.7613 - accuracy: 0.6696\n", 552 | "Epoch 18: val_loss improved from 0.79222 to 0.77659, saving model to model_S1S2.h5\n", 553 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7613 - accuracy: 0.6696 - val_loss: 0.7766 - val_accuracy: 0.6419\n", 554 | "Epoch 19/150\n", 555 | "39/39 [==============================] - ETA: 0s - loss: 0.7805 - accuracy: 0.6628\n", 556 | "Epoch 19: val_loss improved from 0.77659 to 0.77540, saving model to model_S1S2.h5\n", 557 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7805 - accuracy: 0.6628 - val_loss: 0.7754 - val_accuracy: 0.6386\n", 558 | "Epoch 20/150\n", 559 | "39/39 [==============================] - ETA: 0s - loss: 0.7623 - accuracy: 0.6711\n", 560 | "Epoch 20: val_loss did not improve from 0.77540\n", 561 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7623 - accuracy: 0.6711 - val_loss: 0.8544 - val_accuracy: 0.6082\n", 562 | "Epoch 21/150\n", 563 | "39/39 [==============================] - ETA: 0s - loss: 0.7596 - accuracy: 0.6659\n", 564 | "Epoch 21: val_loss improved from 0.77540 to 0.77064, saving model to model_S1S2.h5\n", 565 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7596 - accuracy: 0.6659 - val_loss: 0.7706 - val_accuracy: 0.6488\n", 566 | "Epoch 22/150\n", 567 | "39/39 [==============================] - ETA: 0s - loss: 0.7661 - accuracy: 0.6670\n", 568 | "Epoch 22: val_loss did not improve from 0.77064\n", 569 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7661 - accuracy: 0.6670 - val_loss: 0.7790 - val_accuracy: 0.6483\n", 570 | "Epoch 23/150\n", 571 | "39/39 [==============================] - ETA: 0s - loss: 0.7472 - accuracy: 0.6777\n", 572 | "Epoch 23: val_loss did not improve from 0.77064\n", 573 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7472 - accuracy: 0.6777 - val_loss: 0.7733 - val_accuracy: 0.6323\n", 574 | "Epoch 24/150\n", 575 | "39/39 [==============================] - ETA: 0s - loss: 0.7468 - accuracy: 0.6760\n", 576 | "Epoch 24: val_loss did not improve from 0.77064\n", 577 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7468 - accuracy: 0.6760 - val_loss: 0.7999 - val_accuracy: 0.6445\n", 578 | "Epoch 25/150\n", 579 | "39/39 [==============================] - ETA: 0s - loss: 0.7523 - accuracy: 0.6727\n", 580 | "Epoch 25: val_loss improved from 0.77064 to 0.72420, saving model to model_S1S2.h5\n", 581 | "39/39 [==============================] - 2s 42ms/step - loss: 0.7523 - accuracy: 0.6727 - val_loss: 0.7242 - val_accuracy: 0.6588\n", 582 | "Epoch 26/150\n", 583 | "39/39 [==============================] - ETA: 0s - loss: 0.7509 - accuracy: 0.6747\n", 584 | "Epoch 26: val_loss did not improve from 0.72420\n", 585 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7509 - accuracy: 0.6747 - val_loss: 0.8179 - val_accuracy: 0.6546\n", 586 | "Epoch 27/150\n", 587 | "39/39 [==============================] - ETA: 0s - loss: 0.7618 - accuracy: 0.6759\n", 588 | "Epoch 27: val_loss did not improve from 0.72420\n", 589 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7618 - accuracy: 0.6759 - val_loss: 0.7453 - val_accuracy: 0.6616\n", 590 | "Epoch 28/150\n", 591 | "39/39 [==============================] - ETA: 0s - loss: 0.7285 - accuracy: 0.6871\n", 592 | "Epoch 28: val_loss did not improve from 0.72420\n", 593 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7285 - accuracy: 0.6871 - val_loss: 0.7521 - val_accuracy: 0.6583\n", 594 | "Epoch 29/150\n", 595 | "39/39 [==============================] - ETA: 0s - loss: 0.7424 - accuracy: 0.6798\n", 596 | "Epoch 29: val_loss did not improve from 0.72420\n", 597 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7424 - accuracy: 0.6798 - val_loss: 0.7840 - val_accuracy: 0.6242\n", 598 | "Epoch 30/150\n", 599 | "39/39 [==============================] - ETA: 0s - loss: 0.7309 - accuracy: 0.6846\n", 600 | "Epoch 30: val_loss did not improve from 0.72420\n", 601 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7309 - accuracy: 0.6846 - val_loss: 0.7638 - val_accuracy: 0.6475\n", 602 | "Epoch 31/150\n", 603 | "39/39 [==============================] - ETA: 0s - loss: 0.7438 - accuracy: 0.6740\n", 604 | "Epoch 31: val_loss did not improve from 0.72420\n", 605 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7438 - accuracy: 0.6740 - val_loss: 0.7682 - val_accuracy: 0.6499\n", 606 | "Epoch 32/150\n", 607 | "39/39 [==============================] - ETA: 0s - loss: 0.7401 - accuracy: 0.6839\n", 608 | "Epoch 32: val_loss did not improve from 0.72420\n", 609 | "39/39 [==============================] - 1s 37ms/step - loss: 0.7401 - accuracy: 0.6839 - val_loss: 0.7279 - val_accuracy: 0.6736\n", 610 | "Epoch 33/150\n", 611 | "39/39 [==============================] - ETA: 0s - loss: 0.7254 - accuracy: 0.6866\n", 612 | "Epoch 33: val_loss did not improve from 0.72420\n", 613 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7254 - accuracy: 0.6866 - val_loss: 0.8586 - val_accuracy: 0.6088\n", 614 | "Epoch 34/150\n", 615 | "39/39 [==============================] - ETA: 0s - loss: 0.7232 - accuracy: 0.6885\n", 616 | "Epoch 34: val_loss did not improve from 0.72420\n", 617 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7232 - accuracy: 0.6885 - val_loss: 0.7516 - val_accuracy: 0.6664\n", 618 | "Epoch 35/150\n", 619 | "39/39 [==============================] - ETA: 0s - loss: 0.7236 - accuracy: 0.6894\n", 620 | "Epoch 35: val_loss did not improve from 0.72420\n", 621 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7236 - accuracy: 0.6894 - val_loss: 0.7511 - val_accuracy: 0.6628\n", 622 | "Epoch 36/150\n", 623 | "39/39 [==============================] - ETA: 0s - loss: 0.7073 - accuracy: 0.6918\n", 624 | "Epoch 36: val_loss did not improve from 0.72420\n", 625 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7073 - accuracy: 0.6918 - val_loss: 0.7315 - val_accuracy: 0.6695\n", 626 | "Epoch 37/150\n", 627 | "39/39 [==============================] - ETA: 0s - loss: 0.7269 - accuracy: 0.6909\n", 628 | "Epoch 37: val_loss did not improve from 0.72420\n", 629 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7269 - accuracy: 0.6909 - val_loss: 0.7288 - val_accuracy: 0.6722\n", 630 | "Epoch 38/150\n", 631 | "39/39 [==============================] - ETA: 0s - loss: 0.7222 - accuracy: 0.6887\n", 632 | "Epoch 38: val_loss did not improve from 0.72420\n", 633 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7222 - accuracy: 0.6887 - val_loss: 0.7516 - val_accuracy: 0.6536\n", 634 | "Epoch 39/150\n", 635 | "39/39 [==============================] - ETA: 0s - loss: 0.7083 - accuracy: 0.6889\n", 636 | "Epoch 39: val_loss did not improve from 0.72420\n", 637 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7083 - accuracy: 0.6889 - val_loss: 0.7473 - val_accuracy: 0.6522\n", 638 | "Epoch 40/150\n", 639 | "39/39 [==============================] - ETA: 0s - loss: 0.7135 - accuracy: 0.6932\n", 640 | "Epoch 40: val_loss did not improve from 0.72420\n", 641 | "39/39 [==============================] - 1s 36ms/step - loss: 0.7135 - accuracy: 0.6932 - val_loss: 0.8308 - val_accuracy: 0.6472\n" 642 | ] 643 | } 644 | ], 645 | "source": [ 646 | "checkpointer = tf.keras.callbacks.ModelCheckpoint('model_S1S2.h5', verbose=1, save_best_only=True)\n", 647 | "\n", 648 | "callbacks = [\n", 649 | " tf.keras.callbacks.EarlyStopping(patience=15, monitor='val_loss'),checkpointer]\n", 650 | "\n", 651 | "results = model.fit(x_train, y_train_cat, validation_split=0.1, batch_size=12, epochs=150, callbacks=callbacks)" 652 | ] 653 | }, 654 | { 655 | "cell_type": "markdown", 656 | "metadata": {}, 657 | "source": [ 658 | "### Evaluate the model on the test data" 659 | ] 660 | }, 661 | { 662 | "cell_type": "code", 663 | "execution_count": 55, 664 | "metadata": { 665 | "colab": { 666 | "base_uri": "https://localhost:8080/" 667 | }, 668 | "id": "Aoll3F5h-xfX", 669 | "outputId": "f62bdc11-ac91-4af9-bf89-d8355b96a2bf" 670 | }, 671 | "outputs": [ 672 | { 673 | "name": "stdout", 674 | "output_type": "stream", 675 | "text": [ 676 | "Evaluate on test data\n", 677 | "5/5 [==============================] - 0s 19ms/step - loss: 0.7304 - accuracy: 0.6656\n", 678 | "test loss, test acc: [0.730411946773529, 0.6656094193458557]\n" 679 | ] 680 | } 681 | ], 682 | "source": [ 683 | "# Evaluate the model on the test data using `evaluate`\n", 684 | "print(\"Evaluate on test data\")\n", 685 | "results = model.evaluate(x_test, y_test_cat, batch_size=12)\n", 686 | "print(\"test loss, test acc:\", results)" 687 | ] 688 | }, 689 | { 690 | "cell_type": "code", 691 | "execution_count": 56, 692 | "metadata": { 693 | "colab": { 694 | "base_uri": "https://localhost:8080/" 695 | }, 696 | "id": "qN0_TU6H-xke", 697 | "outputId": "49fb1579-84a1-4e28-ad6e-f5efddab3052" 698 | }, 699 | "outputs": [ 700 | { 701 | "name": "stdout", 702 | "output_type": "stream", 703 | "text": [ 704 | "2/2 [==============================] - 0s 33ms/step\n", 705 | "Mean IoU: 0.3427987\n" 706 | ] 707 | } 708 | ], 709 | "source": [ 710 | "# IoU\n", 711 | "y_pred = model.predict(x_test)\n", 712 | "y_pred_argmax = np.argmax(y_pred, axis=3)\n", 713 | "\n", 714 | "from keras.metrics import MeanIoU\n", 715 | "n_classes = 3\n", 716 | "IoU_Keras = MeanIoU(num_classes=n_classes)\n", 717 | "IoU_Keras.update_state(y_test[:,:,:,0], y_pred_argmax)\n", 718 | "print(\"Mean IoU: \", IoU_Keras.result().numpy())" 719 | ] 720 | } 721 | ], 722 | "metadata": { 723 | "accelerator": "GPU", 724 | "colab": { 725 | "provenance": [] 726 | }, 727 | "gpuClass": "standard", 728 | "kernelspec": { 729 | "display_name": "Python 3 (ipykernel)", 730 | "language": "python", 731 | "name": "python3" 732 | }, 733 | "language_info": { 734 | "codemirror_mode": { 735 | "name": "ipython", 736 | "version": 3 737 | }, 738 | "file_extension": ".py", 739 | "mimetype": "text/x-python", 740 | "name": "python", 741 | "nbconvert_exporter": "python", 742 | "pygments_lexer": "ipython3", 743 | "version": "3.7.12" 744 | } 745 | }, 746 | "nbformat": 4, 747 | "nbformat_minor": 1 748 | } 749 | -------------------------------------------------------------------------------- /U-NET/U_NET_S1Hand.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": { 7 | "id": "ecGnkzi2Nxac" 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "import os\n", 14 | "import random\n", 15 | "from skimage.io import imread, imshow\n", 16 | "import skimage.transform\n", 17 | "import matplotlib.pyplot as plt" 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "### Prepare the Dataset" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 116, 30 | "metadata": { 31 | "id": "3VjC9qZGH3Fk" 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "source_path = \"S1Hand/\"\n", 36 | "label_path = \"S1OtsuLabelHand/\"\n", 37 | "\n", 38 | "source = []\n", 39 | "label = []\n", 40 | "for file in os.listdir(source_path):\n", 41 | " if file.endswith(\"tif\"):\n", 42 | " source.append(file)\n", 43 | " \n", 44 | "for file in os.listdir(label_path):\n", 45 | " if file.endswith(\"tif\"):\n", 46 | " label.append(file)" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 117, 52 | "metadata": { 53 | "colab": { 54 | "base_uri": "https://localhost:8080/" 55 | }, 56 | "id": "g4cj-J-VKJM5", 57 | "outputId": "83ec104d-5a73-405d-d5e7-f9663f4e672d" 58 | }, 59 | "outputs": [ 60 | { 61 | "name": "stdout", 62 | "output_type": "stream", 63 | "text": [ 64 | "577\n", 65 | "577\n" 66 | ] 67 | } 68 | ], 69 | "source": [ 70 | "## SAnity Check\n", 71 | "source = sorted(source)\n", 72 | "label = sorted(label)\n", 73 | "print(len(source))\n", 74 | "print(len(label))" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 118, 80 | "metadata": { 81 | "id": "fj2geM2hJBXz" 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "IMG_WIDTH = 128\n", 86 | "IMG_HEIGHT = 128\n", 87 | "IMG_CHANNELS = 2\n", 88 | "\n", 89 | "\n", 90 | "#Build the model\n", 91 | "inputs = tf.keras.layers.Input((IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS))\n", 92 | "s = tf.keras.layers.Lambda(lambda x: x / 255)(inputs)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": 127, 98 | "metadata": { 99 | "id": "J1L41iXaJDhY" 100 | }, 101 | "outputs": [], 102 | "source": [ 103 | "X = np.zeros((len(source), IMG_HEIGHT, IMG_WIDTH, IMG_CHANNELS), dtype=np.uint8)\n", 104 | "Y = np.zeros((len(label),IMG_HEIGHT, IMG_WIDTH, 1), dtype=bool)" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": { 111 | "id": "oVuqKYm9MaRW" 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "# Sanity Check\n", 116 | "print(X.shape)\n", 117 | "print(Y.shape)" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": null, 123 | "metadata": { 124 | "id": "HbtQALvmJFVh" 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "for count, file in enumerate(source):\n", 129 | " source_img = imread(source_path +'/' +file ) #[0:2,:,:]\n", 130 | " # print(f\"the shape of the source image: {source_img.shape}\")\n", 131 | " new_image = np.transpose(source_img, (1, 2, 0))\n", 132 | " X[count] = new_image\n", 133 | "\n", 134 | "\n", 135 | "for count, file in enumerate(label):\n", 136 | " slabel_img = imread(label_path + '/'+file ) #[:,:,:,IMG_CHANNELS]\n", 137 | " y = np.expand_dims(slabel_img, axis=2)\n", 138 | " # print(f\"The shape of y labels: {y.min()}\")\n", 139 | " Y[count] = y " 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": 125, 145 | "metadata": { 146 | "colab": { 147 | "base_uri": "https://localhost:8080/" 148 | }, 149 | "id": "HUrc7wUnW1hZ", 150 | "outputId": "682052bb-0a27-4f94-ddc1-463833f7c0f9" 151 | }, 152 | "outputs": [ 153 | { 154 | "name": "stdout", 155 | "output_type": "stream", 156 | "text": [ 157 | "(577, 128, 128, 5)\n", 158 | "(577, 128, 128, 2)\n" 159 | ] 160 | } 161 | ], 162 | "source": [ 163 | "# Sanity Check\n", 164 | "print(X.shape)\n", 165 | "print(Y.shape)" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "### Define the UNET" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "metadata": { 179 | "id": "IeceqqWjJFab" 180 | }, 181 | "outputs": [], 182 | "source": [ 183 | "#Contraction path\n", 184 | "c1 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(s)\n", 185 | "c1 = tf.keras.layers.Dropout(0.1)(c1)\n", 186 | "c1 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c1)\n", 187 | "p1 = tf.keras.layers.MaxPooling2D((2, 2))(c1)\n", 188 | "\n", 189 | "c2 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p1)\n", 190 | "c2 = tf.keras.layers.Dropout(0.1)(c2)\n", 191 | "c2 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c2)\n", 192 | "p2 = tf.keras.layers.MaxPooling2D((2, 2))(c2)\n", 193 | " \n", 194 | "c3 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p2)\n", 195 | "c3 = tf.keras.layers.Dropout(0.2)(c3)\n", 196 | "c3 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c3)\n", 197 | "p3 = tf.keras.layers.MaxPooling2D((2, 2))(c3)\n", 198 | " \n", 199 | "c4 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p3)\n", 200 | "c4 = tf.keras.layers.Dropout(0.2)(c4)\n", 201 | "c4 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c4)\n", 202 | "p4 = tf.keras.layers.MaxPooling2D(pool_size=(2, 2))(c4)\n", 203 | " \n", 204 | "c5 = tf.keras.layers.Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(p4)\n", 205 | "c5 = tf.keras.layers.Dropout(0.3)(c5)\n", 206 | "c5 = tf.keras.layers.Conv2D(256, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c5)\n", 207 | "\n", 208 | "#Expansive path \n", 209 | "u6 = tf.keras.layers.Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(c5)\n", 210 | "u6 = tf.keras.layers.concatenate([u6, c4])\n", 211 | "c6 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u6)\n", 212 | "c6 = tf.keras.layers.Dropout(0.2)(c6)\n", 213 | "c6 = tf.keras.layers.Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c6)\n", 214 | " \n", 215 | "u7 = tf.keras.layers.Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(c6)\n", 216 | "u7 = tf.keras.layers.concatenate([u7, c3])\n", 217 | "c7 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u7)\n", 218 | "c7 = tf.keras.layers.Dropout(0.2)(c7)\n", 219 | "c7 = tf.keras.layers.Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c7)\n", 220 | " \n", 221 | "u8 = tf.keras.layers.Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(c7)\n", 222 | "u8 = tf.keras.layers.concatenate([u8, c2])\n", 223 | "c8 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u8)\n", 224 | "c8 = tf.keras.layers.Dropout(0.1)(c8)\n", 225 | "c8 = tf.keras.layers.Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c8)\n", 226 | " \n", 227 | "u9 = tf.keras.layers.Conv2DTranspose(16, (2, 2), strides=(2, 2), padding='same')(c8)\n", 228 | "u9 = tf.keras.layers.concatenate([u9, c1], axis=3)\n", 229 | "c9 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(u9)\n", 230 | "c9 = tf.keras.layers.Dropout(0.1)(c9)\n", 231 | "c9 = tf.keras.layers.Conv2D(16, (3, 3), activation='relu', kernel_initializer='he_normal', padding='same')(c9)\n", 232 | " \n", 233 | "outputs = tf.keras.layers.Conv2D(1, (1, 1), activation='sigmoid')(c9)\n", 234 | " \n", 235 | "model = tf.keras.Model(inputs=[inputs], outputs=[outputs])\n", 236 | "opt = tf.keras.optimizers.SGD(learning_rate=0.01, momentum=0.0, nesterov=False, name=\"SGD\")\n", 237 | "model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n", 238 | "model.summary()" 239 | ] 240 | }, 241 | { 242 | "cell_type": "markdown", 243 | "metadata": {}, 244 | "source": [ 245 | "### Train the model" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": 123, 251 | "metadata": { 252 | "id": "10Xha46uJJrI" 253 | }, 254 | "outputs": [], 255 | "source": [ 256 | "from sklearn.model_selection import train_test_split\n", 257 | "x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.30)" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": { 264 | "id": "SoqrwpyYJJta" 265 | }, 266 | "outputs": [], 267 | "source": [ 268 | "checkpointer = tf.keras.callbacks.ModelCheckpoint('model_S1Hand.h5', verbose=1, save_best_only=True)\n", 269 | "\n", 270 | "callbacks = [\n", 271 | " tf.keras.callbacks.EarlyStopping(patience=70, monitor='val_loss'),checkpointer]\n", 272 | "\n", 273 | "results = model.fit(x_train, y_train, validation_split=0.1, batch_size=12, epochs=200, callbacks=callbacks)" 274 | ] 275 | }, 276 | { 277 | "cell_type": "markdown", 278 | "metadata": {}, 279 | "source": [ 280 | "### Evaluate the model on the test data" 281 | ] 282 | }, 283 | { 284 | "cell_type": "code", 285 | "execution_count": 110, 286 | "metadata": { 287 | "colab": { 288 | "base_uri": "https://localhost:8080/" 289 | }, 290 | "id": "EPkLmNvOJJvR", 291 | "outputId": "2288cdfa-c72d-4461-e8d7-d981af792e5f" 292 | }, 293 | "outputs": [ 294 | { 295 | "name": "stdout", 296 | "output_type": "stream", 297 | "text": [ 298 | "Evaluate on test data\n", 299 | "5/5 [==============================] - 0s 16ms/step - loss: 0.8661 - accuracy: 0.6650\n", 300 | "test loss, test acc: [0.8661026358604431, 0.6650211811065674]\n" 301 | ] 302 | } 303 | ], 304 | "source": [ 305 | "# Evaluate the model on the test data using `evaluate`\n", 306 | "print(\"Evaluate on test data\")\n", 307 | "results = model.evaluate(x_test, y_test, batch_size=12)\n", 308 | "print(\"test loss, test acc:\", results)" 309 | ] 310 | }, 311 | { 312 | "cell_type": "code", 313 | "execution_count": 111, 314 | "metadata": { 315 | "colab": { 316 | "base_uri": "https://localhost:8080/" 317 | }, 318 | "id": "rEdG-4KQlXRb", 319 | "outputId": "e730a229-ffa6-47f5-8c3a-34087b2ec5d5" 320 | }, 321 | "outputs": [ 322 | { 323 | "name": "stdout", 324 | "output_type": "stream", 325 | "text": [ 326 | "19/19 [==============================] - 1s 25ms/step\n", 327 | "IoU score is: 0.688331137928985\n" 328 | ] 329 | } 330 | ], 331 | "source": [ 332 | "y_pred = model.predict(X)\n", 333 | "y_pred_thresholded = y_pred >0.5\n", 334 | "\n", 335 | "intersection = np.logical_and(Y, y_pred_thresholded)\n", 336 | "union = np.logical_or(Y, y_pred_thresholded)\n", 337 | "iou_score = np.sum(intersection)/np.sum(union)\n", 338 | "print(\"IoU score is:\", iou_score)" 339 | ] 340 | }, 341 | { 342 | "cell_type": "markdown", 343 | "metadata": { 344 | "id": "nkjRGvdCJHSr" 345 | }, 346 | "source": [ 347 | "### Classification Map" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": 113, 353 | "metadata": { 354 | "colab": { 355 | "base_uri": "https://localhost:8080/" 356 | }, 357 | "id": "f8VDM2wBI1J7", 358 | "outputId": "d289a68c-ea10-483c-df32-78d04e2bd9aa" 359 | }, 360 | "outputs": [ 361 | { 362 | "name": "stdout", 363 | "output_type": "stream", 364 | "text": [ 365 | "1/1 [==============================] - 0s 17ms/step\n" 366 | ] 367 | } 368 | ], 369 | "source": [ 370 | "# Use Trainned UNET model for segmenting a test image\n", 371 | "test_img = np.expand_dims(x_test[4], axis=0)\n", 372 | "prediction = model.predict(test_img)\n", 373 | "prediction = prediction >0.5" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": 114, 379 | "metadata": { 380 | "colab": { 381 | "base_uri": "https://localhost:8080/", 382 | "height": 268 383 | }, 384 | "id": "x8d2geZCI1UP", 385 | "outputId": "bed0cdab-7cb4-452c-9d6a-f399237b729e" 386 | }, 387 | "outputs": [ 388 | { 389 | "data": { 390 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD7CAYAAABqkiE2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAfxElEQVR4nO3deXwV5dnw8d+V5SQmhCUhCUhECJEoikESEApVFqE1bpRPRUUUFKW+VfRxoSrap7VaWwrS0vYVRcUilUWwFsG9yIN9VUCQ6sOigqKyBSKbEgJZuN4/zhATSEhytjnJXN/PZz6cc89yX5lzzsXMPffMLaqKMca7YtwOwBjjLksCxnicJQFjPM6SgDEeZ0nAGI+zJGCMx4UtCYjIj0XkUxHZLCL3haseY0xwJBz9BEQkFvgMGAJsAz4ArlHVDSGvzBgTlLgwbbc3sFlVvwAQkXnAFUCtSaBt27baqVOnMIVyoiNHjrBx40YqKysjVqcxUeAbVU0/vjBcSaADsLXa+23A+dUXEJFxwDiAjh07snr16jCFcqIvvviC3r17s2fPnojVaUwU+Kq2QtcaBlV1hqoWqGpBevoJyckYEyHhSgLbgdOqvc9yyqJCeno6U6ZMYcyYMW6HYozrwpUEPgDOEJHOIuIDrgZeDlNdjZaSksKYMWMYPHiw26EY47qwtAmoaoWI3Aa8AcQCM1V1fTjqMsYEJ1wNg6jqq8Cr4dp+KGRkZNCvXz8+++wziouL3Q7HGFd4usfgkCFDWLZsGUOHDnU7FGNc4+kkICLEx8czfPhwJkyYQJs2bdwOyZiI83QSOGb48OHcf//9pKenExsb63Y4xkSUJQFHSkoKs2fPZtKkScTE2G4x3hG2hsGmJi4ujt69e1NRUcGZZ55JUVERe/fudTssY8LO/ss7Tu/evXn//fcZPXq026EYExGWBI4TFxdHy5YtSUhIcDsUYyLCkoAxHmdJoA4jR45k9uzZdOnSxe1QjAkrSwJ16N69OyNGjCAnJ4fU1FS3wzEmbCwJnITP52PWrFnMmTOHU045xe1wjAkLSwL1yMzMJDc3l8LCQs4++2y3wzEm5CwJNECnTp1YsGABN998s9uhGBNylgQaSES48MIL+eMf/0i3bt3cDseYkLEk0Ag9evTgjjvuIDs72+1QjAkZSwLGeJwlgUYSEc444wzy8vKIi7NbL0zTZ0kgAJMmTeLFF1+0/gOmWbAkEID4+Hh8Ph8i4nYoxgTNkkCARISYmBhLBKbJsyQQoPT0dObNm8cDDzzgdijGBMVatgKUkJDABRdcQElJCdnZ2RQXF/Pdd9+5HZYxjWZHAkEaPHgwq1atYtiwYW6HYkxALAkEyefzkZaWRmJiotuhGBMQSwLGeFzASUBEThORZSKyQUTWi8gdTnmqiLwlIpucfz3xMP8bb7yRJ598kvbt27sdijGNEsyRQAVwt6p2A/oAt4pIN+A+YKmqngEsdd43e3369OGaa66hY8eOpKSkuB2OMQ0WcBJQ1Z2q+qHz+jtgI9ABuAKY5Sw2C/BMi1mLFi1YuHAhM2bMsC7FpskISZuAiHQCzgNWApmqutOZVQRk1rHOOBFZLSKrm8tgoCJCVlYWp556qtuhGNNgQScBEWkBvAj8l6p+W32eqiqgta2nqjNUtUBVC9LT04MNwxgToKCSgIjE408Az6vqP5ziXSLS3pnfHtgdXIhNT3Z2Nr/73e8YPHiw26EYU69grg4I8AywUVWnVpv1MnBs+J7RwKLAw2uasrKyuOeee+jXr5/boRhTr2Bar/oB1wH/KyL/ccomAr8HXhCRscBXwIjgQjTGhFPASUBV/x9Q1y10dhwMtGvXjry8PDZv3kxJSYnb4RhTK+sxGEY33XQTy5cvJy8vz+1QjKmTJYEwio+PJyUlhauuuopRo0YRHx/vdkjGnMB6tIRZTEwMt99+Oxs2bGDRokWUl5e7HZIxNdiRgDEeZ0kgQuLj4+nQoQNt2njifirThFgSiJDs7GzeeecdJk6c6HYoxtRgSSBCYmNjSU9Pp0ePHowYMYKsrCy3QzIGsCQQcRdddBHz58/n/PPPdzsUYwBLAsZ4niUBlyQlJZGSkmLjFhjXWRJwye9//3teffVVMjIy3A7FeJwlAZeceuqpdOvWjf79+3P22We7HY7xMEsCLkpNTWX+/Pk8/PDDbodiPMySgMtiY2OJibGPwbjHvn3GeJwlAWM8zpJAFEhJSaFbt252X4FxhSWBKHDBBRewYsUKRoywJ7GZyLMkEAXi4uJISUnB5/O5HYrxIEsCxnicJYEoMnbsWGbPnk3Hjh3dDsV4iCWBKJKXl8ewYcNo2bKl26EYD7EkYIzHWRKIMnFxcQwaNIj+/fvbHYYmIiwJRJnExESmTZvG5MmT7RHlJiJCMSpxrIisFZElzvvOIrJSRDaLyHwRseteAejcuTPTpk3jsssuczsU08yF4kjgDmBjtfeTgD+qag6wDxgbgjo8JzMzk1tuuYW+ffu6HYpp5oIdmjwLuAR42nkvwCBgobPILGBYMHUYY8Ir2COBPwG/AI4679OA/apa4bzfBnSobUURGSciq0VkdXFxcZBhNF8dOnSgT58+dtnQhE3ASUBELgV2q+qaQNZX1RmqWqCqBenp6YGG0exde+21vP322/Ts2dPtUEwzFcyRQD/gchH5EpiH/zRgGtBaRI6NcZgFbA8qQo+LjY0lMTGRUaNGccstt5CYmOh2SKa5UdWgJ2AAsMR5vQC42nn9BPDz+tbPz89XU79NmzZpmzZtFLDJpkCm1VrL7y8c/QTuBe4Skc342wieCUMdxpgQCcnQ5Kr6P8D/OK+/AHqHYrumpvj4eLKzs9m6dSu7d+92OxzTTFiPwSbktNNOY+nSpfzyl790OxTTjITkSMBERkxMDK1ataJHjx6MGTMGgCNHjrB48WIOHjzobnCmybIk0AT179+f/v37A7Bnzx7ef/99SwImYHY6YIzHWRJo4kSE1NRU61FoAmZJoIlr3bo1ixYt4i9/+Ys9f8AExNoEmriYmBiysrLIzMxERI513ooqSUlJ9O3bl6KiItavX09eXh7t2rWrc/kDBw6watUqjh49WucykZSVlRXwoLElJSWsWLGCioqK+hd2S209iCI9WY/B4L3++usaExPjdo+0WqeuXbvqvn37dMaMGQronDlz9OjRo3VOK1eu1ISEBNfjPjaNGzfupPGebPr000+jqZdnxHoMGhfk5uYyefJkLrzwQrdDqZWI0KdPH6ZMmULPnj0RkXont2VmZvLb3/6WK6+8skHx1jZlZGTwm9/8hquuusrtP6dOdjrQTHTq1Im77rqL4uJiVq5cyZEjRyJ2ahAfH09sbCxHjhxBRE4YRMXn8yEidO/ene7du9e7vZiYmBNulFJVjhw5EnSsCQkJHD16lPLy8nqXTUtLY/z48aSkpARcX+vWrbntttto1aoVixYtory8nMrKyoC3Fw6WBJqZW2+9lcsvv5wbb7yRTz75JCJ13nPPPRQWFjJ69GhSUlKYMWMGcXHff7USExNp0aJFg7d31llnsWzZshptAtu3b+e6667jwIEDAceZmZnJ7NmzWbt2Lffee2/A2wlEYWEh7777Lg888ACvv/56ROuujyWBZiYrK4u2bduSnJwcsTo7duxIz549yc/Pp0WLFvTs2bNGEmispKQkevToUaMsIyODnj17smXLFr788stGb7NLly507dqV/Pz8WhNJixYtyMnJISbm+zPk7OxsYmNjG11XbdLS0khLSyM/P7/qvo9vv/2WzZs3h2T7QamtoSDSkzUMhlZpaanm5+dHrMFp+vTpqqp6+PBhPXz4cFj+pqNHj2ppaak+88wzjY5PRPTFF1/U0tJSVVVdsGDBCcv069dPv/32Wz106FDVFI6/paysrGr7ixYtinRjbq0Ng3Yk0AzFxcUxatQozjrrLObNmxexy1MJCQlh27aIkJiYSI8ePbj77rsBOHz4MH//+98bdIrg8/lOaGcYMmQI5557LuD/Xz85ObnGkUA4xMfHVz1KPjc3l7vuugtVpaKignnz5rFr166w1l+r2jJDpCc7EgiPNWvWaFJSkopIRI4EIm3Pnj2ak5OjIlJjOj4+EdHFixdXrbdgwQKNiYnRp556ypW4a1NaWqoFBQUn/TtCMNmRgNfk5OTw0ksvsXDhQp566im3wwm5lJQUnn76aQ4dOlRVVl5ezoQJE/jss8/qXK9///4sWbKEc845JxJhNojP52PatGlVRzUvvfRSxD4zSwLNWMuWLRk6dCjr168P2TYzMjJISkqqUdaYlv9Qio+PP6FfRFlZGTNnzqSsrKxGefWY27Vrx8UXXxyRGBsqJiaGH/zgB1Xvd+zYwZtvvsnu3bspLS0Na92WBEyjTJ48mcLCwhplbiWB2vh8Pv72t7+d0A7S1G6wGjlyJJdddhkjR45k6dKlYa3LkoAH5ObmcvXVV7N8+XJ27twZ0DZycnLo1asX3bp1o23btiGOMLRat27tdghBO+WUU0hMTGTo0KFkZGQA8NVXX/Hee+/VWC4jI4NBgwY1qIfl3Llza59RW0NBpCdrGAy/o0eP6sUXXxxwo9Ktt97q9p/geXPmzDnhcxkwYIBWVFQ0aH2sYdDbRIQ777yTQYMG8dBDD9X6JKIzzzyTe+65p9YOMmeeeWYkwjQn0adPH5599tkaZe3atQv6sqZohPqXn0xBQYGuXr3a7TA8YevWrQwcOJCdO3fWaFUHGDBgAP/6179C1kvORBcRWaOqBceX212EHtO+fXvefPNNHnnkEbdDMVHCTgc8Ji4ujuzsbPLy8hg4cGCNeT169IiKW3hNZFkS8KiBAweecI1dRMLebdZEn6A+cRFpLSILReQTEdkoIn1FJFVE3hKRTc6/bUIVrAkdESE2NrbGZAnAm4L91KcBr6vqmUAesBG4D1iqqmcAS533xpgoFXASEJFWwAU4A46qapmq7geuAGY5i80ChgUbpDEmfII5EugMFAPPishaEXlaRJKBTFU91i2tCMisbWURGSciq0VkdXFxcRBhGGOCEUwSiAN6AtNV9TyghOMO/Z1eSrV2RFDVGapaoKoF6enpQYRhjAlGMElgG7BNVVc67xfiTwq7RKQ9gPOvjaFtTBQLOAmoahGwVURynaLBwAbgZWC0UzYaWBRUhMaYsAq2n8B44HkR8QFfADfgTywviMhY4CtgRJB1GGPCKKgkoKr/AU7oi4z/qMAY0wRY7xBjPM6SgDEeZ0nAGI+zJGCMx1kSMMbjLAkY43GWBIzxOEsCxnicJQFjPM6SgDEeZ0nAGI+zJGCMx1kSMMbjLAkY43GWBIzxOEsCxnicJQFjPM6SgDEeZ0nAGI+zJGCMx1kSMMbjLAkY43GWBIzxOEsCxnicJQFjPC6oJCAid4rIehFZJyJzRSRRRDqLyEoR2Swi850hyowxUSrgJCAiHYDbgQJVPQeIBa4GJgF/VNUcYB8wNhSBGmPCI9jTgTjgFBGJA5KAncAg/MOUA8wChgVZhzEmjIIZmnw7MAX4Gv+P/wCwBtivqhXOYtuADrWtLyLjRGS1iKwuLi4ONAxjTJCCOR1oA1wBdAZOBZKBHzd0fVWdoaoFqlqQnp4eaBjGmCAFMzT5RcAWVS0GEJF/AP2A1iIS5xwNZAHbgw/TGG96++232b697p9QZmYmQ4YMQUQCriOYJPA10EdEkoBSYDCwGlgG/BSYB4wGFgVRhzGepapMmTKF1157rc5lBgwYwODBg4mNjQ24noCTgKquFJGFwIdABbAWmAG8AswTkUecsmcCjs4YjygvL+eRRx5hy5YtNco/+uijk663ceNGxowZg4gQFxfHxIkTycnJaVTdoqqNDjjUCgoKdPXq1W6HYYxrjhw5wsUXX8yaNWv49ttvA9pGfHw8ixcvplevXrXOT0tLW6OqBceXB3M6YIwJEZ/Px3PPPcfatWu58sorOXLkSKO3UV5ezo033khiYmKj1rMkYEwUUFW2b9/Otm3bCObofMeOHY1ex5KAMVGgrKyM2267jQ8//JCjR49GtG5LAsZEgfj4eMaPH09RUVGDli8tLeWvf/0r33zzTdB1WxIwJgrExsZy/fXXN3j5/fv3s2DBAg4cOEB5eXlQddutxMY0QSkpKcyZM4dp06YRExPcz9iOBIxpgmJjYzn33HPZu3dvUL0FwY4EjPE8SwLGNGFZWVmMHz+egoIT+gBVueSSS7j99tvrnG9JwJgmQFVPmABycnKYOnUqQ4YMqXPdsWPH8qc//anO+dYmYEyUKy4u5o477uC7776rKktOTmbq1Kns2bOHBx98kA0bNtS5/qOPPsrMmTPrnG9JwJgod+jQId56660afQJatmzJz3/+c4qKinjllVeorKysc/367suxJGBME/Tdd98xfPhwKisrT5oAGsKSgDFNkKqyZ8+ekGzLGgaN8ThLAsZ4nJ0OGBPlYmJiaNmyZdU9AocOHQr6foEa2w/ZlowxYdG+fXveeOMNVq5cycqVK7n88stDun07EjAmysXFxdV4bmCvXr3YuXMna9eupbS0NOjt25GAMU3MhAkTWLRoER061DquT6PZkYAxTczixYt577332Lt3b0i2Z0nANHmqSmVlZUDP5jv2qO6m5JVXXuHZZ5+loqKi/oUboGn99cbUYs2aNdx5550BJYHk5GSeeuopOnbsGIbIwuMXv/gFw4cP5+abb2bbtm1Bb8+SgGmSduzYwf79+wH/AB3vvfdeQA/obNGiBR9//DGqyumnnx6S2Pbs2cOuXbsA/6PEs7Ozg376T3U5OTlkZmbSvXt3WrZs2eD16rrJyAYfMU3Sz372M+bOnQtARUVFUK3kycnJ/PCHP2Tx4sUhOTV47LHHeOihhwDo0qULy5Yto3Xr1kFv93gHDx5s1NFPy5YtAxt8RERmApcCu1X1HKcsFZgPdAK+BEao6j7xP+doGlAIHALGqOqHDY7SmHps2rSJpUuX8tFHH9W4tTYYJSUlbN68mSeeeIK+ffuSn59f7zpffvklr7/+eq3z/v3vf1fFtn37dmbOnElSUhIAgwcP5owzzghJ3C1atAjJdmp9WMFxDy64AOgJrKtW9gfgPuf1fcAk53Uh8BogQB9gZX3bV1Xy8/PVmIaYPXu2AmGbHnrooQbF8c9//jOg7T///PNh3kN1A1ZrLb+/ek9UVPUd4PhrEVcAs5zXs4Bh1cqfc+pcgX+Y8vb11WFMfXbt2sUNN9zAtGnTwlrP/Pnzueqqq9i4cWNY64kmgZ4AZarqTud1EZDpvO4AbK223DanbCfHEZFxwDigSbXMmsg5ePAgBw8eBOCrr75i0aJF7Nu3L6x1btiwgU2bNjF+/Pha5x89epQ9e/ZUNUo2B0G3gqiqikijWxdVdQb+ocwpKChwv3XSRJ3nnnuORx99FPA3/h04cMDliPwt/4WFhXzxxRduhxIygSaBXSLSXlV3Oof7u53y7cBp1ZbLcsqMqaGsrIzly5dz6NChOpdZtWoV27dH9uuTm5tLt27dSE1NPWHeBx98wIYNG9iyZUvIeutFg0CTwMvAaOD3zr+LqpXfJiLzgPOBA9VOG4ypsn//fm666Sa+/vprt0OpYeTIkfz3f/93rfP+8Ic/sHDhwghHFH4NuUQ4FxgAtBWRbcCv8P/4XxCRscBXwAhn8VfxXyHYjP8S4Q1hiNmYkElMTGTChAlkZGQAcP7551fNKyoqYsqUKRw+fBiAtWvXuhJjuNWbBFT1mjpmDa5lWQVuDTYo0zxVVFRU/aBKSkoiPgT38RISEmjTpg3XXXddrdfu9+7dy4wZM0LWHyFaWbdhEzEvv/wyv/rVrwB/QjjWtdYt9913Hz/96U89f3XKkoCJmH379rFu3Tq3wyA1NZWuXbvSqlUrSktLA7rxqDmxh4oYz7nwwgtZvnw5n3zyCYWFhVHXOBlpdiRgPGfTpk089thjfPjhh5SVlaGqbN26lTlz5tCvXz/69+8f8jrPPfdcLrnkEs4+++yQbztYdiRgPGfdunVMnDixaniuyspKPv/8cx588EHefvttwN8zMFQNl7GxsfTq1YtHH32UvLy8kGwzlOxIwHhaSUkJY8eOpaysrGo4r8rKSu6++25WrFhx0s5MDXHqqafy+OOP07Vr11CEGxaWBIynVVZWsmLFiqr3e/fuZdOmTbz77rv1DuRZlzZt2pCWlgbA6aefzsCBAxv18I9IsyRgTDVPPvkks2bNCqpvwJgxY6p6HcbExJCSkhKq8MLCkoAx1Rw+fLiqQ1OgEhMTw/IkoXCxhkFjPM6OBIwJUkJCAr/+9a+reh5G42XAk7EkYEwQkpOTadu2LVdccQVnnXWW2+EExE4HjAnChAkTeOedd2qMFdjU2JGAMY3g8/no27cviYmJAJx33nlN/gYkSwLGNEJqaiqzZ88mKysL8A9j1tRZEjCmga6//nr69u1LmzZtmsWP/xhLAsbUwufz1Rg6TET4yU9+wrBhw06yVnSprKykvLy83uUsCRhznLi4OB5//HHOO++8GuXZ2dkuRRSYF198kUmTJtW7nCUBY/AP6ZWTk0NMTAxxcXHk5+fTo0cPt8MKyu7du/nww/pHAbQkYAyQl5fHa6+9RlxcHCKCz+dzO6SIsSRgPM3n83HttddSUFBAcnJySIcQd8s333zD3LlzWbZsWYOWtyRgPEtESEpK4t577yU3N9ftcEKmqKiIiRMnVg3hVh9LAsazxo8fz6WXXkqHDh3cDsVVlgSMZ51zzjkMGTLE7TBCYv/+/VWDpO7YsaNRj0azJGBMMzB9+nSmTp0K+PsHNOaxaA0ZhmwmcCmwW1XPccomA5cBZcDnwA2qut+Zdz8wFqgEblfVNxr11xgTJklJSfzoRz+q6vffpUsXlyMKnZKSEr755puA1m3IkcDfgL8Cz1Urewu4X1UrRGQScD9wr4h0A64GzgZOBf4lIl1VtTKg6IwJobS0NKZPn05mZqbboUSVhoxF+I6IdDqu7M1qb1cAP3VeXwHMU9UjwBYR2Qz0Bt4PSbTGNNJFF13EtddeC/jv/W/VqpXLEYXGoUOHePjhhykqKgJoUKeguoSiTeBGYL7zugP+pHDMNqfsBCIyDhgHNPlbMU30iY2NpUWLFhQUFDBmzBi3wwmpQ4cOsXv3bhYsWMDnn38e9PaCSgIi8gBQATzf2HVVdQYwA6CgoMDbg8GZkMvJyeGFF16gffv2bocSco888ggvvPBCyIZPCzgJiMgY/A2Gg/X7ER23A6dVWyzLKTMmIkSkqt9/bm4uCQkJbocUMrt27WL9+vWsXbs2JEcAxwSUBETkx8AvgAtVtfq1iJeBOSIyFX/D4BnAqqCjNKaBfD4ff/7znzn//PObRRfg6pYtW8aoUaNCNjzaMQ25RDgXGAC0FZFtwK/wXw1IAN5yHq6wQlVvUdX1IvICsAH/acKtdmXARMrQoUMZNGgQp59+erNKAPv372f69OmsWrWqaqi0UGrI1YFrail+5iTL/xb4bTBBGROIgQMHcu+997odRsgdOHCAqVOnBtwPoD7NJ10aYwJiScAYj7MkYIzH2Q1ExkSxJUuWsHbt2qAHST0ZSwLGRClV5fHHH+e1114Laz12OmCMx1kSMMbj5Psevy4GIVIMlADhuRDaOG2xOKqzOGpqynGcrqrpxxdGRRIAEJHVqlpgcVgcFkdk47DTAWM8zpKAMR4XTUlghtsBOCyOmiyOmppdHFHTJmCMcUc0HQkYY1xgScAYj4uKJCAiPxaRT0Vks4jcF6E6TxORZSKyQUTWi8gdTnmqiLwlIpucf9tEKJ5YEVkrIkuc951FZKWzT+aLSNiHyRWR1iKyUEQ+EZGNItLXjf0hInc6n8k6EZkrIomR2h8iMlNEdovIumplte4D8fuzE9PHItIzzHFMdj6bj0XkJRFpXW3e/U4cn4rIjxpVmaq6OgGx+AcwyQZ8wEdAtwjU2x7o6bxOAT4DugF/AO5zyu8DJkVoP9wFzAGWOO9fAK52Xj8B/J8IxDALuMl57QNaR3p/4H869RbglGr7YUyk9gdwAdATWFetrNZ9ABQCrwEC9AFWhjmOoUCc83pStTi6Ob+bBKCz83uKbXBd4f5iNeCP7Qu8Ue39/fgHNol0HIuAIcCnQHunrD3waQTqzgKWAoOAJc6X6ptqH3iNfRSmGFo5Pz45rjyi+8NJAluBVPw3uC0BfhTJ/QF0Ou7HV+s+AJ4ErqltuXDEcdy8nwDPO69r/GaAN4C+Da0nGk4Hjn3ox9Q5VkG4OIOrnAesBDJVdaczqwiIxHA1f8L/4NZjT5BMA/araoXzPhL7pDNQDDzrnJY8LSLJRHh/qOp2YArwNbATOACsIfL7o7q69oGb390b8R+FBB1HNCQBV4lIC+BF4L9U9dvq89SfVsN6DVVEjo3zuCac9TRAHP7Dz+mqeh7+ezlqtM9EaH+0wT+SVWf8T6xOBn4czjobIxL7oD7BjPdRm2hIAq6NVSAi8fgTwPOq+g+neJeItHfmtwd2hzmMfsDlIvIlMA//KcE0oLWIHHveQyT2yTZgm6qudN4vxJ8UIr0/LgK2qGqxqpYD/8C/jyK9P6qrax9E/LtbbbyPa52EFHQc0ZAEPgDOcFp/ffgHNH053JWK/1npzwAbVXVqtVkvA6Od16PxtxWEjarer6pZqtoJ/9/+tqpeCyzj+zEeIxFHEbBVRHKdosH4Hx0f0f2B/zSgj4gkOZ/RsTgiuj+OU9c+eBm43rlK0Ac4UO20IeSqjfdxuZ443sfVIpIgIp1p7Hgf4WzkaUQDSCH+1vnPgQciVGd//Id1HwP/caZC/OfjS4FNwL+A1AjuhwF8f3Ug2/kgNwMLgIQI1N8DWO3sk38CbdzYH8BDwCfAOmA2/lbviOwPYC7+tohy/EdHY+vaB/gbcP+v8739X6AgzHFsxn/uf+z7+kS15R9w4vgUuLgxdVm3YWM8LhpOB4wxLrIkYIzHWRIwxuMsCRjjcZYEjPE4SwLGeJwlAWM87v8DQhUgZyDy1SAAAAAASUVORK5CYII=\n", 391 | "text/plain": [ 392 | "
" 393 | ] 394 | }, 395 | "metadata": { 396 | "needs_background": "light" 397 | }, 398 | "output_type": "display_data" 399 | } 400 | ], 401 | "source": [ 402 | "# View and Save segmented image\n", 403 | "prediction_image = prediction.reshape(128,128)\n", 404 | "plt.imshow(prediction_image, cmap='gray')\n", 405 | "plt.imsave('UNET_S1Hand.jpg', prediction_image, cmap='gray')" 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": 115, 411 | "metadata": { 412 | "colab": { 413 | "base_uri": "https://localhost:8080/", 414 | "height": 285 415 | }, 416 | "id": "L3TXLg_zLitv", 417 | "outputId": "9a5de5d2-7223-4266-9b47-b55cc0579b99" 418 | }, 419 | "outputs": [ 420 | { 421 | "data": { 422 | "text/plain": [ 423 | "" 424 | ] 425 | }, 426 | "execution_count": 115, 427 | "metadata": {}, 428 | "output_type": "execute_result" 429 | }, 430 | { 431 | "data": { 432 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD7CAYAAABqkiE2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO2deXSURda4n+rO2mSDkISEJIQAEhUVCEMSQIHBwOiww1Fw+QEuLAfR0VEQR0fHTx0FlVFnBkVmHORTGUT4QEVWcRgEMgmyiuyyJKxhzZ5e6vdHd3oS0kk6ne5+O+l6zrkn3fXWW3VTb/ftqltVt4SUEoVC4b/otFZAoVBoizICCoWfo4yAQuHnKCOgUPg5yggoFH6OMgIKhZ/jMSMghPiVEOKgEOKIEOJZT9WjUCiahvDEOgEhhB44BGQD+UAuMF5Kud/tlSkUiiYR4KFyewNHpJTHAIQQS4ARgEMj0LZtW5mSkuIhVWpTUVHBTz/9hNls9lqdCvej1+u58cYbCQ4OrnWtrKyMAwcOYLFYNNDMZymUUsZcn+gpI9AeOFXtfT6QUT2DEGIyMBkgOTmZvLw8D6lSm3PnzjFz5kz279/v1XoVrmMwGBg4cCDnzp0jLy+P9PR0br75ZubMmUNcXBwA+/bt4/DhwwAcP36c2bNnU1FRoaXavsYJh6lSSrcLMBZYWO39g8Cf68qfnp4utWDx4sUSUNIMJDU1VRYWFtqf2aJFi2o9z2effVZzPX1c8qSD75+nHIMFQFK194m2NJ8iIyOD+fPnk5GR0XBmhU9Q9cwyMzO1VqXF4KnhQC7QRQjREeuXfxxwn4fqcpkuXbrQpUsXtm7dSk5Ojtbq+A0hISEEBDj+6JWWljocx1ssFoqLi4mPj+eBBx4gJCTE02r6DR4xAlJKkxDiMWAtoAf+LqX80RN1KZofr7zyCnfddVet9PLycu6//34OHDhQ61pBQQGDBw+2G48//vGPDB8+3OO6+gOe6gkgpVwNrPZU+e7khhtuICMjg927d1NeXq61Oi2exMREbrrpphppP//8M2fPnsVkMjm8x2g0cujQIfv73Nxc2rRpUyNPfn6++5X1Bxw5CrwtWjkGqzCZTPLs2bMyNTVVa8eNX8iSJUtqPYPf/va3MjAwUAohnCpDr9fLwMDAGqLT6TT/33xcHDoGPdYTaE7o9XoiIiJ4/PHHuXTpEgAbN27k+++/11izlkX37t0ZPnw4N998c61rZrMZo9HodFlms1mt83ATygjYCA0N5YknnrC/NxqN5OTk1Nk9VTSe9PR0/vCHP9RIk1JiNpvVoh4NURuI6uDRRx9l9erVpKWlaa1KiyYnJ4fs7Gy++OILrVXxW1RPoA46duxIQkICERERWqvi88TFxREVFVUj7eLFixQWFtZ5j9ls5vjx4+zZs4d//etfVYvKFI3EYDCQlJREYWEhFy9edKkM1RNQNJlZs2aRk5NTQ6ZMmVLvPZcuXWLEiBE8+eSTygA0gZ49e7J161YmTZrkchmqJ1APer2eESNG0L59e7788kvlH7BhMBgYMWIEBoMBsH4QIyMja+SpbzHPpk2b2LVrF2fOnKG0tNSjurYkUlNTGThwYI20tLQ0oqKiHG6ichZlBOohICCA5557jj179rB27VplBGxERUXx1ltvER8f79L9H3zwAf/85z/drFXLJyMjg4ULF7q9XDUccIIOHTqwePFiJk6cqLUqzYbRo0fz6aef1lgUtHHjRu699162bt2qoWbNl++//557772XTZs22dP27dvH+PHjm+ZYdbR4wNui9WIhZ1mwYIGMjY2VQUFBWi/60FQSEhLk6dOnG2wvk8kkR44cKaOjo51eBKSkYfnb3/4mLRaLLCwslMuXL5d6vd7Ze726i7BFMm7cOP7zn/9w++23a61Ks0Cv1/PBBx+wbNkywsLCtFanRVFUVMSYMWOYOnVqkxdNKZ9AIwgPDycsLIzQ0FCtVdGU8vJyNmzYwE033UR6enqNa5cvX2bbtm01Fv8UFBSo1X1uICEhgZ49e3Lp0iW++eYbDh8+zPnz55tesKPugbeluQwHpJTSYrHIoUOHat4l1FqEEHLUqFG12uff//63DAoKkkKIGqK1vi1Bxo0bJy0Wi5w0aZKrbar2DrgDIQSPPvoo6enpvP322xQVFWmtkibIanP7y5Yts++zOH36NCaTyWfm/u+44w5GjRoFwJUrV9z+zEaNGsUdd9xRI23Dhg18/fXXbqujdevWPPXUU6SnpyOEYPz48XTp0oV58+Zx4cKFplfgyDJ4W5pTT6CKU6dOyZSUFL92Eg4fPlyWlJTIRx55RHNd6pJp06bJkpISWVJSIg8ePChjY2PdUq5er5cGg0HOnz+/1mfjtddec+v/0KFDB3nhwgV7+RUVFbKgoEB26tSpsWWpnoA7iYuL48svv2TNmjU888wzWqujCf/617/o06ePT+/j//zzz9m2bRsAlZWV9l2iTeX2229n3rx5JCYmuqW8xvDaa6/xxRdfuK3dlRFwkcDAQLp16+YwCo6/cPXqVXbv3q21GvVSWFhY7x4GV4mMjKR79+5uL9cZTp48yb59+9xWnpoiVCj8HNUTaCJdu3Zl5syZrF271ud/FRXu4/Dhw7zxxhsIIWpd27x5s8vlxsXFcd9997Fr164aKwM9iiNHgbelOToGr+fRRx/V3AmmpPlLz549ZWlpqZw3b57U6XRSp9PJlJSUGo7BSZMmuVq+cgwqFM2F0aNH28OwhYaG1tql6U6UEXATbdu2JSUlhYKCgkbFylMoHJGcnExycnKNtOLiYs6fP09xcbFb61KOQTfx3HPPsWnTJjp06KC1KooWyrp16+jduzerVq1ya7nKCLiJsLAw4uLiGDNmDEOGDHHoMFIomkJFRQUXL150+yGrygi4kdDQUF5//XWee+459Hq91uooFE7hsk9ACJEEfAzEYfU8LpBSviOEaAP8E0gBjgP3SCkvN11VhaL5M3XqVPr06VPn9datWxMUFOTwWlZWFh9//DELFy5s0jTk9TTFMWgCfiul/EEIEQ7sEEKsByYCG6WUrwshngWeBWY1XdXmQ2BgIG3atOHatWvqWDNFDXr06MGvf/1rwPo5CQ8Pd/relJQUUlJSyM3NZe/evVy9etU95zU4mjd0RYCVQDZwEIi3pcUDBxu6tyWsE6hOWVmZPHbsmJwxY4bm885KfEtiY2NlamqqTE1NlRMnTpQWi6XRn6/z58/LnTt3ypSUlMbW77l1AkKIFKAHkAPESSnP2C6dxTpccHTPZGAyUGsqpLkTEhJCx44dad26tdaqKHyM8+fPc/nyZTIyMlwO1BoTE0NoaCgDBgygoKCg1vWioiJyc3OdD+TiyDI0RoAwYAcw2vb+ynXXLzdURkvrCVTx+9//XvNfHiW+J9HR0fLo0aMu9QKqYzabHcru3btlWFiYo7rd3xMQQgQCXwCfSCmX25LPCSHipZRnhBDxgBviHykULQudTtfkaWSdzvHkXkJCAv/zP/9DZWVljfRZsxy75poyOyCAvwE/SSnfrnZpFTABeN32d6WrdTR3AgIC7IdCSClrPRRvExgYiBBCcz0U1tgGRqORwMBAt5fdtm1bfvOb39RKr8sINGUY0A9rF2MPsMsmdwPRwEbgMLABaNNQWS11OHDq1CmZk5Mjc3Jy5JIlS6TBYNCsCyqEkPPmzZMrV66UUVFRmneJ/VkCAgLkbbfdJh9//PEmDwkaA+4eDkgptwB19WcGuVpuSyIxMdEeeaZt27b06NGDEydOaBaJJyAgwCO/PIrGYTKZ2L17N+3atUNK6bHVpYWFhc591hxZBm9LS+0JVMdiscjS0lL57rvvavYLFBwcLENDQzX/JVRilSFDhkiz2eyxz9z7778vDQaDXVCHj2iLEILQ0FDS09N5/PHH6dy5s9d1qKiooKyszOv1KrTBaDRSWlpql7pQW4m9TJ8+fejTpw8FBQUcOXJEa3UULRwhRJUPr05UT0ChaKEMHTqUVatWkZmZWW8+1RNQKDSivLyc/Px8hBAIIYiLi6vluC0sLHQ4hBNC0K5dOwIC6v4KV+01WLlyJQUFBZw6dcpxRkeOAm+LPzgGr2fMmDGaO6aUaCtBQUEyNjZWxsbGyuTkZLlr164anxGLxSLHjx9vz1NdOnfuLI8ePerUZ+3KlSvy3LlzEhVj0MrmzZs5e/YsAB06dCAjI0NjjRT+SmVlpf1A0ZCQEEwmE5cvX+bbb7/FbDYjpeSnn35yeOhoSUkJX375Jbfddhv9+/evd5qxwfiEjiyDt8VbPQGz2SwHDx5st8QPPPCAV+p1hOoJKKkuISEhMi8vT+7YsaNR07gDBw6UJpPJqc8cqidgXWv9m9/8hrFjxwLQpUsXzXSZOnUqmZmZvPrqq1y5ckUzPRS+gdFo5JVXXnF6WbfBYGD27Nl07969zj0EziJkA9MH3qBXr14yLy9PazW8ztmzZ/nFL36hyQrCwMBADAYDAFJKiouLGx2gIiAggFatWlFWVqb2I9SBTqcjLCysRnfdYrFQXFzc4NRdXYSGhhITE8O3335Lp06dAOsakOsD2AQHBxMSEmJ/L4TYIaXsVUtHl7RQNHsGDx5MTk4OOTk5bNiwgaSkpEaXMXDgQLZv387o0aM9oGHLIDExkfXr19vbOicnh3Xr1pGQkOBymc8//zwbNmyoEYdj/vz5ZGRk1JC5c+c6VZ5fDQd8jaCgIDIyMoiMjOTHH3/0at3l5eWcO3cOsMazdzoARTXCwsJIS0sjKirK3eq1GIKCgujSpUuNADNXr16lb9++dgd1da5du8aePXvq7ZVdu3aNM2fOcObMGaKiorj11lvt0a6r43ToMkeOAm+LP04RVmEymeTy5cu97ogSQsiAgAAZEBAg9Xq9S2WMGjVKSinl1KlTNXes+ap07txZXrp0yeFzNxqNteQ///lPg45BnU5nf3Z33nmnNJlM0mw21yrr+n0JqL0Dvoler2+yY8cVpJSYTCZMJpNLvQCAn376iRdffJHc3Fw3a9fy0ev1BAQE1JKkpCReeOEFhgwZUue9FovF/uyOHDnCSy+9xJYtW2qV5eznSg0HfACdTkdQUBAmk8k90WNdICAgACllowzCgQMHePnllz2oVfNHSonRaMRsNjt1FkW7du2YPXs2ISEhrF27tsH8x48f55VXXiEoKIg+ffqg1+sbvTVZ9QR8gL59+7Jx40ZGjRqlSf1hYWEsXryYefPmadIrackUFBQwfPhw3njjDY/Ws3DhQrKzs13yLamegA/Qpk0b+vXrx9KlSzWpPyAggPT0dAoKCtTxaW6mvLycnJwc4uPj2b9/P4mJiURERLi9npMnT3LmzBn27t1rN+Rt2rShXbt2Dd6rzL5C4QW+/vprMjMzneriu4rRaOSRRx4hMzOTzMxM3nzzTafuUz0BH6Jfv36UlpayYsUKLl265PX6ExISmDx5Mjk5Ofzwww9er78lYzQa7VLFqVOnWL16tcNFQ1u3bnWpnurBQ6oWD61du5aff/657pscTRl4W/x5ivB6ioqKZLdu3bw6jRUVFSUPHTpk1+F3v/ud5lNrLVU++eQTezuvWbNG6nQ6j9U1ffp0KaWUI0eOrEpTU4QKhaI2ajjgY+h0OmJiYmjTpo1HhgQ6nY62bdvWmAWIjIysNzhFSyUiIoKQkBAuXrzo8lqJxnLlyhXOnz9PdHS0V+pzBtUT8DFCQ0NZsmQJixYt8kh48Pj4eDZs2EBubq5dNm3a1OLOg3SG559/ns2bN5OSkuLVOgcNGsSZM2cazuwl/M/8+zhCCGJjY4mJiXH7dF1mZibdunWjQ4cOHpmmam78/PPP7Ny506vHx1++fBmTycQ333xDQUGByzsJHSGEoG/fvvZeRnh4OCtXrmzY4DhyFHhblGOwNtu3b5dBQUFucxIJIeTq1audqls5BpunBAYGyq1bt9qf43vvvXd9Hs8EFRFC6IE8oEBKOVQI0RFYgvU4sh3Ag1JKtdm8kaSkpPDOO++wevVqvvzyyyaXJ6Xkr3/9K9u2bWPmzJmEhYXVmXfYsGF1LjL58MMP2bNnT5P1Ubgfs9nMW2+9ZX92e/fude5GR5ahMQI8BXwKfGV7vxQYZ3v9PjCtoTJUT6Bu5s6dK1u1auXyTr/rpUOHDvLChQsu6zNu3DhpMBikEELzXz4ltSU0NFS2atXKoeCJKUIhRCLwa2Ch7b0Afgkss2VZBIxsSh3+zoQJE9iyZQvp6elaqwLA66+/zqpVq3zKu62wEhAQwIIFC9i2bZtDqfO+Jtb7J2AmUBW9IBq4IqU02d7nA+0d3SiEmAxMBvzSM+0sVdOFrVq1ckt5FRUV5OXl0alTJ5diLJaXl1NaWupWh5bCfZSVlVFSUgJYdyQ6NfPhqHvgjABDgb/aXg8AvgLaAkeq5UkC9jVUlhoO1I/JZJIDBw50W5cxMDBQPvjggy7pMnbsWBkYGKh5t1dJ3c82KChIBgUFySeeeKLGs8MDjsG+wHAhxN1ACBABvANECSECpLU3kAgUNKEOhQcwGo3s3r2bP/7xj4A15v3EiRNrhMCqC5PJVGP9u8K3qP5scnJy7M+4Plw2AlLK2cBsACHEAOBpKeX9QojPgbFYZwgmACtdrUPxX6oiELkr6MiePXvsXv6oqCjuuusu+9oBnU6nthS3ALZv38727dsbzOeJFYOzgKeEEEew+gj+5oE6/Aq9Xs+cOXP4xz/+Ue/UnqsUFxczadIksrOzyc7O5pNPPnF7HQrfxS0rBqWU3wHf2V4fA3q7o1zFf+nRowfh4eEeWUpsMplq/GJ0796drKws2rdvXyNuPViXHaekpHDq1CmvrbdXeBa1d0BRi/nz59OnTx+Hi03mzJnD6tWradu2rQaaKTyB2jvQjIiIiOC+++6zTwFt3ryZY8eOub2e8vJyjEaj/UjroUOH2ncZhoWFERkZqWIRNgPS0tLIzMy0v//HP/7hOKOjKQNvi5oidI3x48d7fMqpW7dusqioqEa9BQUFMj4+XvPpMCX1S1VQkSpQB5K2PB577DH69+/P888/T2FhoUfqOHXqFA8//DB33303EyZMAKB169b85S9/oaysDIC///3vbNy40SP1K5xHr9fzwgsv2BeB3XDDDU7dp4xAM6ZPnz6kpaWxYMECKisruXbtmtvruHr1KkuXLiUmJsZuBEJDQ2uER9+5cye7d++u8361rsB6OGhDx4JduXIFk8lUb5760Ol0ZGRk0KvXf88cdebHQRmBZk5UVBQrV67k22+/ZeLEiZos5509ezbTp0+vlW42m7n//vvJycnxuk6+xrBhw+o9ILSiooKxY8eyb98+l+swGo1MmjSp1oxOQygj0MzR6XQkJiYSFxeHEMJjRuDkyZOsXbuWHj16EBsbW+NamzZtaNOmTa17zGYzd9xxh/3A0vz8fK8fvKo1BoOBjIwMsrKy6l3HX15eTnBwcJPrc3TIaYM4chR4W5RjsOl4OnItWA/C/L//+79G6WWxWOyyYMECzZ1l3pbOnTvLwsJCabFY6m2nsrIymZ6e7ml9VLThlkzXrl2ZO3cu/fv391gdFouFjz76iJdffpmioiKn7hFC2MVfpxV9fRm2fz6VFkhKSgpPPfVUDaeQJ1i5ciXvv/8+Fy9ebLTDT6/XExIS4lfGQEppX3fhq/jP01C4jcLCQoYNG8Zzzz3XqPuGDRvG999/z5133ukhzXyP/Px87rrrLl577TWtVakT5RhUNBqj0ci+ffuIiYnhhx9+IDk52allxNHR0URHR5Oenl5r6io/P5/z5897SmXNqKioYPfu3fTo0aPefDqdjhtvvJGSkhIOHjzo3VkeR44Cb4tyDLqP3/72t15zeul0OhkaGio//PDDRulYUVEhS0tLa8hjjz2muRPPkzJx4sQG26W8vFxu3bpVhoSEeNUxqHoCCpexWCyUlZWxZs0aysrKuO+++5yKPRgUFFQrLTs7G71ez6effsqFCxc8oa4mREVFcf/993P77bfXm89sNvP555+zY8cO7+/OdGQZvC2qJ+A+nn76aSmEsAte+qULCwuTe/futU8HukJpaans3r17i4pk3LlzZ3np0qUG/3ctpwhVT6CF8fDDD/PLX/4SgCNHjvD0009TWen5Yx/KysqYNm2afWns+PHjefDBBxtVRnBwMO+99x47duzgmWee8WmPektCGYEWRlpaGmlpaQDs27eP1NRUh8dsFRcXu3XTkdlsZsuWLfb3roRI1+l09OvXj4iICFJTU6moqGiwzjNnzjRpvb2vIIQgISGB06dPc/bsWbc6BmNjYzEYDBw/ftzhdWUEWjBpaWls2bLF4Qdq+fLlTJkyRQOtGuamm26qYVDq4vLlywwePLjOD3dzIjg4mI8//pgdO3YwdOhQt56POHfuXO6++25iYmIcXldGoAUTEBBQp6OuW7dujB8/vs57LRYLGzZs4OLFi07XlZ2dbd8ncOuttzZe4WplXT/lWFxczLp164iPjycrKwuwRkkeOXIk586dA6y7GQ8cOOByvVoTFRVF69at3b66MCIiov4pXEeOAm+Lcgz6HhUVFTIzM7PRjkFPcfToURkdHS0feOCBOvM8++yzmjsCrxdnHYNV7NixQ4aGhrpVhxUrVkgppXIMKhqHr611j42N5b333iMxMbHOPGPHjqVr16410tatW8dnn33mUp0JCQk8//zz5OTksGjRIpfKaCwdOnTggw8+wGw2Y7FYmDdvXpO2FzuDMgJ+hsVioaioiMDAQAwGQ715w8PD7d37hggLC7PHIfQEYWFh9Q5fwOqMvN4hWVFRwTfffONSnUlJSTz44IOEhoaycqVrx2dEREQ0yqBGR0fbZ1WklKxZs4aTJ09SVFRUw7cjhCA8PLzBfRhCiAafi6hesFb06tVL5uXlaa2GX3D69GlGjx5Nnz59ePvtt+vNe+rUKacdVDqdjqSkJIcLgbTkypUrLi8+CgoKIjk5mWvXrrm8pDkwMJDk5GSXN02dPn2aQ4cOMXr0aC5fvmxPj4iIYNmyZU6dNRgfH09YWBhCiB1Sylo7zFRPwM8wGo0cPXqU1NTUBvMmJSV5QSPPEhUV5XRvpi4iIyOJjIx0k0aNIyEhAb1ez4ABA7hy5Yo9PTw8nBtvvLHe4ZGzKCOgUPg4cXFxfP7557XS9Xq9W8pvkhEQQkQBC4FuWD2RDwEHgX8CKcBx4B4p5eU6ilB4gS1btrBmzRrAGvizpKSEPXv28Pzzz9fI17FjRx566CGfcwoq3PeFd0RTewLvAGuklGOFEEGAAXgO2CilfF0I8SzwLNbzCRUakZuby6uvvloj7ccff6wV769///7cd9999g9cYGCgMgh+gMtGQAgRCdwBTASQUlYClUKIEcAAW7ZFWM8oVEagGbBz5077voOAgADmz59Pt27dNNZK4Wma0hPoCFwAPhJC3AbsAJ4A4qSUZ2x5zgJxjm4WQkwGJgMkJyc3QQ1FQ0RHRzv9ZS4uLgasRkAdOOofuDxFKIToBWwH+kopc4QQ7wDXgBlSyqhq+S5LKVvXV5aaIvQsRqOxwc04jjAYDH4VD7Cl44kpwnwgX0pZdbLEMqzj/3NCiHgp5RkhRDzQ8mJGNTMCAwM9cqS5omXgspmXUp4FTgkhqtZpDgL2A6uACba0CYBrS60UCoVXaOrswAzgE9vMwDFgElbDslQI8TBwAriniXUoFAoP0iQjIKXcBTgKdD+oKeUqFArvobw+CoWfo4yAQuHnKCOgUPg5yggoFH6OMgIKhZ+jjIBC4ecoI6BQ+DnKCCgUfo4yAgqFn6OMgELh5ygjoFD4OcoIKBR+jjICCoWfo4yAQuHnKCOgUPg5yggoFH6OMgIKhZ+jjIBC4ecoI6BQ+DnKCCgUfo4yAgqFn6OMgELh5ygjoFD4OcoIKBR+jjICCoWf0yQjIIR4UgjxoxBinxDiMyFEiBCioxAiRwhxRAjxT9sRZQqFwkdx2QgIIdoDjwO9pJTdAD0wDngDmCel7AxcBh52h6IKhZZYLBa7OEqrLlLKOu91p7iLph5IGgCECiGMgAE4A/wSuM92fRHwEjC/ifUoFJrx1Vdf8e677wLQunVr/vKXv3D48GFefPFFh/nHjRvHQw89BMDp06d57LHHKC4udqtOYWFhvPvuuyQmJja5LJeNgJSyQAjxJnASKAPWATuAK1JKky1bPtDe0f1CiMnAZIDk5GRX1VAoPIbRaKSgoIBdu3axfv16AGJiYjh06BD79u2zp11P586dGTBgAAAnTpzg22+/5erVq27VLSIigkOHDqHX64mPj29SWeL6rovTNwrRGvgCuBe4AnwOLANesg0FEEIkAd/Yhgt10qtXL5mXl+eSHgqFpzh06BCDBw+msLCQkpISAIQQREZGYjabKSoqcnhfaGgoBoMBALPZzNWrV2sNEZpKlR79+/fniy++QK/XO3PPDillrVPEmzIcuBP4WUp5wVbBcqAvECWECLD1BhKBgibUoVBohslk4tKlS3YDACCl5MqVK/XeV1ZWRllZmUd1q9Lj4MGDLF682KER6Nq1K717926wrKYYgZNAphDCgHU4MAjIAzYBY4ElwARgZRPqUCgU9XDgwAEmTZrk8Nr06dOdMgIuzw5IKXOwdv9/APbayloAzAKeEkIcAaKBv7lah0KhJe3bt2fBggVMnDhRa1VcYsOGDUyYMIEffvih3nxNmh2QUr4IXO8iPQY0bH4UCh8nMjKScePGUVFRwapVqygqKsJoNDrMGxERQUBA3V+nioqKGsMKb3Dw4EEOHjxIdnY2KSkpdeZz2THoTpRjUOHLFBUVceHCBaZPn86aNWtqXTcYDCxbtoyuXbvWWcayZcuYNWuWJ9Wsk7i4OFq1asWxY8fc7hhUKPyC8PBwwsPDycrKwmw217oeEhJCWloaHTt2rLOM7t27k52dDUBlZSU5OTmUl5d7TOfqnDt3rt7rqiegUDiJlLLOqT6drn73WvV7CwsL6d27NydOnHC7jg2gegIKRVMQQiCEaPK94eHhzJ49276A6Msvv2TLli0ulZuamsqjjz7aoBEC6hyOKCOgUHiZ0NBQpkyZYn9/6dIlcnNzawPXX84AABBfSURBVOQxm82YTKbrb61FcnIyzzzzjFOLhZQRUCh8lOnTpzN69OgaaZ9++invvPOOV+pXRkCh0JikpCSSkpJqpB06dIju3bs3eG/nzp1dHqJUoRyDCoUPYjKZqKysbDCfXq8nODjYqTI9sXdAoVB4iICAgHoXH7kTFV5MofBzlBFQKPwcZQQUCj9HGQGFws9RRkCh8HOUEVAo/BxlBBQKP0etE1AoWjilpaV1BkMB1RNQKFo8M2fOJCMjo87rqiegaDEcP36ckydPunRvZGQkt956a5PX4V9PeXk5O3fuJDo6mhtuuMGtZTtLQUEBBw8erPO6MgKKFsOCBQuYO3euS/fefvvtrFu3zu1LdQsKChgxYgTDhw9n4cKFbi3bXSgjoGiWLF++nN27d9dI27x5s1N78B1x9OhR/vCHPzgVnKMxXLp0iaKiIvLy8njxxRcZO3Yst9xyi1vraCrKCCiaDSaTyR6ia8WKFfzv//6v28o+efIkr7zyitvKu57du3eze/duOnXqRFpaWoP59Xq92w1SXSgjoGgWnD59mkceeYRr164B1v32zZFXX32VBQsWNJjv3nvvZcaMGV7QSBkBhQ9z8eJFe6Tc/Px8tm7d6vaDPb3NoUOHnDJgzpwc5C6UEVD4LIsXL+b3v/89ABaLhdLSUo01apk0OOgQQvxdCHFeCLGvWlobIcR6IcRh29/WtnQhhHhXCHFECLFHCNHTk8orWiYXLlzgww8/ZPPmzRQVFVFUVERJSYnbT/b1Zfbs2cMHH3zAqVOnPF6XM56HfwC/ui7tWWCjlLILsNH2HuAuoItNJgPz3aOmwp84fvw4M2bMYMWKFVqrohkbN25k2rRp7Nu3r+HMTaRBIyCl3Axcui55BLDI9noRMLJa+sfSynasx5THu0tZRcumsrKSp59+mlmzZtW7zFXROGbOnMmyZcvqvO6qTyBOSnnG9vosEGd73R6o3n/Jt6Wd4TqEEJOx9hZITk52UQ1FS6GoqIjCwkLWrFnDjz/+qLU6LYqsrKx6rzd5IlJaB2qNHqxJKRdIKXtJKXvFxMQ0VQ1FM+ett96iX79+zXbqrznjak/gnBAiXkp5xtbdP29LLwCqB1BPtKUpFA45d+4cOTk5/PDDD5w+fVprdfwSV43AKmAC8Lrt78pq6Y8JIZYAGcDVasMGhaIWO3bsYNSoUVgsFq1V8VsaNAJCiM+AAUBbIUQ+8CLWL/9SIcTDwAngHlv21cDdwBGgFJjkAZ0VLYCioiLmzp3Lzp07lQHQmAaNgJRyfB2XBjnIK4HpTVVK0bKpqKigsLCQjz76iPz8fK3V8XtUUBGF13nhhRe4++677UuCFdqilg0rvMb58+c5evQoO3fu5MCBA1qr49PEx8fTsWNHWrdu7fG6lBFQeI2vv/6aqVOnurzn35+45557mDNnDoGBgR6vyyeMwNmzZ1m+fHmtM9oVLQuLxeLUSbv+SEBAAA8++CDt2rUDoF+/fgQFBXmnbq/U0gAFBQUsXryY4cOHo9fr3R7nTaE9ZrMZs9mstRo+iRCC4OBgpk+fTnp6utfr9xnH4Pfff8/gwYP56quvtFZF4Wby8/MZNWoU8+bN01oVn2TKlCl89dVXmgUi9RkjcOHCBTZt2kRBgVpg2NIoLS1l8+bNyhl4HQaDgc6dO9OrVy8GDBhAeHi4Jnr4xHBAofBHsrKyWLp0Ka1atdJUD58zAtu2bSMsLIyhQ4cSFRWltToKhdsJDg5m2LBhZGVl0bp1a819YMIXorUIIWooERYWxrZt2+jWrZtWKincyKFDh+jdu3ezjw/oLqKjo8nLyyMlJcWr9Qohdkgpe12f7nM9AYWiJTNt2jQGDRpE27ZttVbFjjICCoUXCA4OJjw8nEGDBjFmzBit1amBz8wOKBQtmWHDhpGbm8uQIUO0VqUWPtkTMJvNbN26lZKSEnr37q2540ShcBWDwUBGRgZZWVle9wFUsXPnTs6fP193Biml5oI1PFkNEULIfv36ycrKSqlo3hw8eFBGRkbWesb+IJ07d5aFhYXSYrFo1v6jRo2SNud7nnTw/fPJngBQ3UAoFM0OIQRTpkyhV69ehIWFadqbnTBhAn379uXpp592eN1njQBYhwXl5eUIIdx+ZLRC4W6q9gCA9UDRe++9lwEDBmirFDBixAiAOo2ATzsG9+7dyx133MGHH36otSoKRYPcfPPNfPfdd3z//ff8+9//1mQzkCv49M9rSUkJu3btUlFoFT5LYmIisbGxANxyyy307NnTKzEA3IlPGwGFwtd58sknmTZtGgA6na7ZGQBQRkDh5wQFBXH//fdTWlrK559/7nTk406dOjFy5Eh69+5NaGioh7V0jV27drFx48YG8zULI1A1U6DWCyjciRACg8HArFmzOH/+PCtWrHAq8pEQgltuuYU333zTC1q6hpSSLVu21OkMrI5POwarWLJkCcOHD2fv3r1aq6JoQcyYMYOlS5fSvn17unXrxqpVqxg/vq4I+1Zat27NokWLeOGFF7ykZeM5ceIEY8aMYf585w4FbxY9gaNHj3LixAmnrJrCtzh79iwFBQU+dcCIwWAgNjaWrKwssrOz7elDhgxh//79bN26lbNnz1JRUVHjvpiYGDp06EB2drY9FqAvUlRUxPr16ykuLnYqf7PoCSiaJ2azmalTpzJmzBinP5De4M477yQ3N5eRI0fWujZlyhS2bdtG9+7da1174403WLt2rX02oKXgzDFkfweGAuellN1saXOBYUAlcBSYJKW8Yrs2G3gYMAOPSynXekh3hY8jhCA9PR2TycS6deswGo1aqwRYnYF1beU1GAwEBwdz1113kZqaWuPazTffTJs2bbyhoktYLBY2btzIzp07a7R1v379SEpK4rPPPnN8o6O1xLLmuv47gJ7Avmppg4EA2+s3gDdsr28CdgPBQEesBkLvRB0NrsEOCAiQ3333ncfXWSvcj6/tHRg7dqzWTeIRKisrZWZmZq3/d8WKFVJKWefegQaHA1LKzcCl69LWSSmrTpDYjvUIcoARwBIpZYWU8mesB5P2bqgOZzCbzbz++uvMnj2bsrIydxSpULRoBg4cyEcffUTPnj3rzecOn8BDwDe21+2BU9Wu5dvSaiGEmCyEyBNC5DlTiZSSNWvWsHz5cnWAhcIlhBBERkZqHtjT0+h0OiIjI+nVqxcTJ04kOTm53vxNmh0QQvwOMAGfNPZeKeUCYIGtHLVdUOFx4uLiWL58uWb7+r1Famoqy5YtIyEhwan8LhsBIcRErA7DQbbxBkABkFQtW6ItrckIIbjtttu49dZb0ev17ihS4WGklOzatYu9e/dqdv5gYmIiXbp0ASA2Npa0tDSvHPLpSQ4cOOBwP43JZOLatWuEhITQqVMnwsLCnCvQkaPgegFSqOkY/BWwH4i5Lt/N1HQMHsONjsGNGzdKs9nsOc+Kwq0YjUY5cOBAqdPpNHMCzpgxQ5pMJru0BB555BGp1+sdCiC7desmi4qKat2Hq0FFhBCfAQOAtkKIfOBFYLbti77etpR3u5RyqpTyRyHEUpuBMAHTpZRNPoBuwIABDB48mM6dO6PTqaUNzQmLxaLJQqF27doxZcoU+vbt2yx7jlu2bGHNmjUOr+Xm5tZ7ruO5c+d4+eWXnT/Q1JFl8LbgwIIHBgbKoKAgGRQUJF966SX3mVGF1zAajbJ///5e//UPDAyUvXr1kiUlJVo3QaOorKyUFRUVsqKiQs6ZM8cTbdN8wosZDAY+/PBD+2KN9u0dTjAoFLUIDAxk/vz5/OIXvyAkJERrdZymsrKSqVOn8tNPPwFw5swZr9Xtc0YgISGBpKQkMjIy6NSpk9bqKJqAEIKOHTtSUFDAsWPHPDYsaNWqFSkpKQghCAoKIiMjo1mdXlW1vyInJ4f9+/d7XwFH3QNvC9W6LH/6059kUVGRptFZFe6jrKxM7ty5U0ZERHis+z9w4EB59epVWVRUJIuKipqd83j27NmyVatW3nCg+u5wICYmhnvuuQeA9PR056c2FD5PSEgIBoPBo7EgAgICCAsLazZO4/LycpYuXcq1a9cA2L59OyUlJZrp4xNGIDk5mT//+c9aq6FQeIXi4mJefPFFjh8/rrUqgI8YAYXCFcLCwnjzzTe59dZbm00v4O2332b9+vWcO3dOa1XsKCOgaLYEBQVx5513NisHcl5eXp3z/1ohpH3Fr4ZKCHEBKAEKtdYFaIvSozpKj5o0Zz06SCljrk/0CSMAIITIk1L2UnooPZQe3tWjeQykFAqFx1BGQKHwc3zJCCzQWgEbSo+aKD1q0uL08BmfgEKh0AZf6gkoFAoNUEZAofBzfMIICCF+JYQ4KIQ4IoR41kt1JgkhNgkh9gshfhRCPGFLbyOEWC+EOGz765VYVEIIvRBipxDiK9v7jkKIHFub/FMI4WSEiCbpECWEWCaEOCCE+EkIkaVFewghnrQ9k31CiM+EECHeag8hxN+FEOeFEPuqpTlsA2HlXZtOe4QQ9Yf1bboec23PZo8QYoUQIqratdk2PQ4KIYY0qjJHu4q8KYAe6/kEqUAQ1vBkN3mh3nigp+11OHAI67kJc4BnbenPYjtTwQv6PAV8Cnxle78UGGd7/T4wzQs6LAIesb0OAqK83R5Yo1P/DIRWa4eJ3moPHJ+z4bANgLuxRtoWQCaQ42E93Hreh71cT3+wnPhns4C11d7PBmZroMdKIBs4CMTb0uKBg16oOxHYCPwS+Mr2oSqs9sBrtJGHdIi0ffnEdelebQ/+G7a+DdZl7V8BQ7zZHtSOqemwDYAPgPGO8nlCj+uujQI+sb2u8Z0B1gJZztbjC8MBp88q8BRCiBSgB5ADxEkpq8K6nAXivKDCn4CZQFXUjWjgivzvAS/eaJOOwAXgI9uwZKEQohVebg8pZQHwJnASOANcBXbg/faoTl1toOVn16XzPhzhC0ZAU4QQYcAXwG+klNeqX5NWs+rROVQhRNU5jzs8WY8TBGDtfs6XUvbAupejhn/GS+3RGutJVh2BBKAV1ujWPoE32qAhmnLehyN8wQh47KyChhBCBGI1AJ9IKZfbks8JIeJt1+OB8x5Woy8wXAhxHFiCdUjwDhAlhKja5emNNskH8qWUObb3y7AaBW+3x53Az1LKC1JKI7Acaxt5uz2qU1cbeP2zW+28j/ttBqnJeviCEcgFuti8v0HAOGCVpysV1lA3fwN+klK+Xe3SKmCC7fUErL4CjyGlnC2lTJRSpmD937+VUt4PbALGelGPs8ApIURXW9IgrKHjvdoeWIcBmUIIg+0ZVenh1fa4jrraYBXw/2yzBJnA1WrDBrcjhPgV1mHjcCll6XX6jRNCBAshOgJdgP84XbAnnTyNcIDcjdU7fxT4nZfq7Ie1W7cH2GWTu7GOxzcCh4ENQBsvtsMA/js7kGp7kEeAz4FgL9TfHciztcn/Aa21aA/gD8ABYB+wGKvX2yvtAXyG1RdhxNo7eriuNsDqwP2L7XO7F+jlYT2OYB37V31e36+W/3c2PQ4CdzWmLrVsWKHwc3xhOKBQKDREGQGFws9RRkCh8HOUEVAo/BxlBBQKP0cZAYXCz1FGQKHwc/4/rBItwB7Q/LgAAAAASUVORK5CYII=\n", 433 | "text/plain": [ 434 | "
" 435 | ] 436 | }, 437 | "metadata": { 438 | "needs_background": "light" 439 | }, 440 | "output_type": "display_data" 441 | } 442 | ], 443 | "source": [ 444 | "# The Corresponding Ground Truth Mask\n", 445 | "plt.imshow(y_test[4].reshape(128,128), cmap='gray')" 446 | ] 447 | }, 448 | { 449 | "cell_type": "code", 450 | "execution_count": null, 451 | "metadata": { 452 | "id": "kgfegLP0L1f6" 453 | }, 454 | "outputs": [], 455 | "source": [] 456 | } 457 | ], 458 | "metadata": { 459 | "accelerator": "GPU", 460 | "colab": { 461 | "collapsed_sections": [ 462 | "UE8KCSfpN0J2", 463 | "LFCKHBo-m8ad" 464 | ], 465 | "provenance": [] 466 | }, 467 | "gpuClass": "standard", 468 | "kernelspec": { 469 | "display_name": "Python 3 (ipykernel)", 470 | "language": "python", 471 | "name": "python3" 472 | }, 473 | "language_info": { 474 | "codemirror_mode": { 475 | "name": "ipython", 476 | "version": 3 477 | }, 478 | "file_extension": ".py", 479 | "mimetype": "text/x-python", 480 | "name": "python", 481 | "nbconvert_exporter": "python", 482 | "pygments_lexer": "ipython3", 483 | "version": "3.7.12" 484 | } 485 | }, 486 | "nbformat": 4, 487 | "nbformat_minor": 1 488 | } 489 | -------------------------------------------------------------------------------- /pre-processing/Pre-Processing-Sentinel 1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "f2db867f", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "\n", 11 | "import imageio\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "import numpy as np\n", 14 | "import rasterio\n", 15 | "\n", 16 | "import snappy\n", 17 | "from os.path import join\n", 18 | "from glob import glob\n", 19 | "import pandas as pd\n", 20 | "import numpy as np\n", 21 | "import os\n", 22 | "import glob\n", 23 | "import jpy\n", 24 | "System = jpy.get_type('java.lang.System')\n", 25 | "System.gc()\n", 26 | "import gc\n", 27 | "\n", 28 | "import re\n", 29 | "from geomet import wkt\n", 30 | "from snappy import GPF\n", 31 | "from snappy import ProductIO\n", 32 | "from snappy import HashMap\n", 33 | "from snappy import jpy\n", 34 | "HashMap = snappy.jpy.get_type('java.util.HashMap')\n", 35 | "import time\n", 36 | "\n", 37 | "from osgeo import gdal, ogr\n", 38 | "import sys\n", 39 | "from osgeo import osr\n", 40 | "\n", 41 | "import configparser\n", 42 | "from os.path import expanduser\n", 43 | "os.chdir(r\"D:\\MICROS\\Sentinel_1\")\n", 44 | "\n", 45 | "\n", 46 | "import snappyfunctions_pre as snap" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 2, 52 | "id": "bf450a44", 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "data": { 57 | "text/html": [ 58 | "
\n", 59 | "\n", 72 | "\n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | " \n", 203 | " \n", 204 | " \n", 205 | " \n", 206 | " \n", 207 | " \n", 208 | " \n", 209 | " \n", 210 | " \n", 211 | " \n", 212 | " \n", 213 | " \n", 214 | " \n", 215 | " \n", 216 | " \n", 217 | " \n", 218 | " \n", 219 | " \n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | " \n", 243 | " \n", 244 | " \n", 245 | " \n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | " \n", 370 | " \n", 371 | " \n", 372 | " \n", 373 | " \n", 374 | " \n", 375 | " \n", 376 | " \n", 377 | " \n", 378 | " \n", 379 | " \n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | " \n", 385 | " \n", 386 | " \n", 387 | " \n", 388 | " \n", 389 | " \n", 390 | " \n", 391 | " \n", 392 | " \n", 393 | " \n", 394 | " \n", 395 | " \n", 396 | " \n", 397 | " \n", 398 | " \n", 399 | " \n", 400 | " \n", 401 | " \n", 402 | " \n", 403 | " \n", 404 | " \n", 405 | " \n", 406 | " \n", 407 | " \n", 408 | " \n", 409 | " \n", 410 | " \n", 411 | " \n", 412 | " \n", 413 | " \n", 414 | " \n", 415 | " \n", 416 | " \n", 417 | " \n", 418 | " \n", 419 | " \n", 420 | " \n", 421 | " \n", 422 | " \n", 423 | " \n", 424 | " \n", 425 | " \n", 426 | " \n", 427 | " \n", 428 | " \n", 429 | " \n", 430 | " \n", 431 | " \n", 432 | " \n", 433 | " \n", 434 | " \n", 435 | " \n", 436 | " \n", 437 | " \n", 438 | " \n", 439 | " \n", 440 | " \n", 441 | " \n", 442 | " \n", 443 | " \n", 444 | " \n", 445 | " \n", 446 | " \n", 447 | " \n", 448 | " \n", 449 | " \n", 450 | " \n", 451 | " \n", 452 | " \n", 453 | " \n", 454 | " \n", 455 | " \n", 456 | " \n", 457 | " \n", 458 | " \n", 459 | " \n", 460 | " \n", 461 | " \n", 462 | " \n", 463 | " \n", 464 | " \n", 465 | " \n", 466 | " \n", 467 | " \n", 468 | " \n", 469 | " \n", 470 | " \n", 471 | " \n", 472 | " \n", 473 | " \n", 474 | " \n", 475 | " \n", 476 | " \n", 477 | " \n", 478 | " \n", 479 | " \n", 480 | " \n", 481 | " \n", 482 | " \n", 483 | " \n", 484 | " \n", 485 | " \n", 486 | " \n", 487 | " \n", 488 | " \n", 489 | " \n", 490 | " \n", 491 | " \n", 492 | " \n", 493 | " \n", 494 | " \n", 495 | " \n", 496 | " \n", 497 | " \n", 498 | " \n", 499 | " \n", 500 | " \n", 501 | " \n", 502 | " \n", 503 | " \n", 504 | " \n", 505 | " \n", 506 | " \n", 507 | " \n", 508 | " \n", 509 | " \n", 510 | " \n", 511 | " \n", 512 | " \n", 513 | " \n", 514 | " \n", 515 | " \n", 516 | " \n", 517 | " \n", 518 | " \n", 519 | " \n", 520 | " \n", 521 | " \n", 522 | " \n", 523 | " \n", 524 | " \n", 525 | " \n", 526 | " \n", 527 | " \n", 528 | " \n", 529 | " \n", 530 | " \n", 531 | " \n", 532 | " \n", 533 | " \n", 534 | " \n", 535 | " \n", 536 | " \n", 537 | " \n", 538 | " \n", 539 | " \n", 540 | " \n", 541 | " \n", 542 | " \n", 543 | " \n", 544 | " \n", 545 | " \n", 546 | " \n", 547 | " \n", 548 | " \n", 549 | " \n", 550 | " \n", 551 | " \n", 552 | " \n", 553 | " \n", 554 | " \n", 555 | " \n", 556 | " \n", 557 | " \n", 558 | " \n", 559 | " \n", 560 | " \n", 561 | " \n", 562 | " \n", 563 | " \n", 564 | " \n", 565 | " \n", 566 | " \n", 567 | " \n", 568 | " \n", 569 | " \n", 570 | " \n", 571 | " \n", 572 | " \n", 573 | " \n", 574 | " \n", 575 | " \n", 576 | " \n", 577 | " \n", 578 | " \n", 579 | " \n", 580 | " \n", 581 | " \n", 582 | " \n", 583 | " \n", 584 | " \n", 585 | " \n", 586 | " \n", 587 | " \n", 588 | " \n", 589 | " \n", 590 | " \n", 591 | " \n", 592 | " \n", 593 | " \n", 594 | " \n", 595 | " \n", 596 | " \n", 597 | " \n", 598 | " \n", 599 | " \n", 600 | " \n", 601 | " \n", 602 | " \n", 603 | " \n", 604 | " \n", 605 | " \n", 606 | " \n", 607 | " \n", 608 | " \n", 609 | " \n", 610 | " \n", 611 | " \n", 612 | " \n", 613 | " \n", 614 | " \n", 615 | " \n", 616 | " \n", 617 | "
NameProcess_LevelSensing_DateRelevant_OrbitHeightWidthBand_Names
0S1A_IW_GRDH_1SDV_20180129T035110_20180129T0351...GRDH20180129022CA01671725826[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
1S1A_IW_GRDH_1SDV_20180305T154919_20180305T1549...GRDH20180305023D1B1672925752[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
2S1A_IW_GRDH_1SDV_20180504T154921_20180504T1549...GRDH201805040258A91672925748[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
3S1A_IW_GRDH_1SDV_20180529T035113_20180529T0351...GRDH201805290264111671725822[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
4S1A_IW_GRDH_1SDV_20180703T154924_20180703T1549...GRDH201807030273B91672925749[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
5S1A_IW_GRDH_1SDV_20180728T035127_20180728T0351...GRDH20180728027EBD1671725581[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
6S1A_IW_GRDH_1SDV_20180901T154928_20180901T1549...GRDH20180901028F3B1672925748[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
7S1A_IW_GRDH_1SDV_20180926T035119_20180926T0351...GRDH20180926029ABA1671125709[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
8S1A_IW_GRDH_1SDV_20181031T154929_20181031T1549...GRDH2018103102ABA31672925752[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
9S1A_IW_GRDH_1SDV_20181125T035119_20181125T0351...GRDH2018112502B8B21671325789[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
10S1A_IW_GRDH_1SDV_20190325T035117_20190325T0351...GRDH2019032502F78B1671425785[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
11S1A_IW_GRDH_1SDV_20190429T154927_20190429T1549...GRDH20190429030A731672225936[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
12S1A_IW_GRDH_1SDV_20190628T154930_20190628T1549...GRDH201906280325DC1670925936[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
13S1A_IW_GRDH_1SDV_20190723T035122_20190723T0351...GRDH201907230330A71670225785[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
14S1A_IW_GRDH_1SDV_20190827T154933_20190827T1549...GRDH201908270341B21671025936[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
15S1A_IW_GRDH_1SDV_20190921T035125_20190921T0351...GRDH20190921034E081670225786[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
16S1A_IW_GRDH_1SDV_20191026T154935_20191026T1550...GRDH20191026035FDD1670925940[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
17S1A_IW_GRDH_1SDV_20191120T035126_20191120T0351...GRDH20191120036C5B1670225789[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
18S1A_IW_GRDH_1SDV_20200223T154932_20200223T1549...GRDH20200223039CB51670925940[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
19S1A_IW_GRDH_1SDV_20200518T035125_20200518T0351...GRDH2020051803C70A1670225786[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
20S1A_IW_GRDH_1SDV_20200622T154936_20200622T1550...GRDH2020062203D6941670925936[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
21S1A_IW_GRDH_1SDV_20200717T035128_20200717T0351...GRDH2020071703E1741670425714[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
22S1A_IW_GRDH_1SDV_20200821T154940_20200821T1550...GRDH2020082103F2641671025936[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
23S1A_IW_GRDH_1SDV_20200915T035132_20200915T0351...GRDH2020091503FEF31670425715[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
24S1A_IW_GRDH_1SDV_20201020T154941_20201020T1550...GRDH202010200411241671025939[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
25S1A_IW_GRDH_1SDV_20201219T154940_20201219T1550...GRDH20201219042F4B1670925942[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
26S1B_IW_GRDH_1SDV_20180228T035033_20180228T0350...GRDH20180228011BFF1672225654[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
27S1B_IW_GRDH_1SDV_20180404T154854_20180404T1549...GRDH20180404012CFF1672525718[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
28S1B_IW_GRDH_1SDV_20180429T035034_20180429T0350...GRDH2018042901386C1672225652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
29S1B_IW_GRDH_1SDV_20180603T154857_20180603T1549...GRDH2018060301492F1672625716[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
30S1B_IW_GRDH_1SDV_20180628T035038_20180628T0351...GRDH201806280154371672325653[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
31S1B_IW_GRDH_1SDV_20180802T154900_20180802T1549...GRDH201808020164111672525715[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
32S1B_IW_GRDH_1SDV_20180827T035041_20180827T0351...GRDH20180827016F1E1672225651[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
33S1B_IW_GRDH_1SDV_20181001T154903_20181001T1549...GRDH20181001017F041672625717[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
34S1B_IW_GRDH_1SDV_20181026T035043_20181026T0351...GRDH201810260189FA1672225656[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
35S1B_IW_GRDH_1SDV_20190529T154903_20190529T1549...GRDH2019052901EFC41668626521[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
36S1B_IW_GRDH_1SDV_20190623T035044_20190623T0351...GRDH2019062301FA621672225652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
37S1B_IW_GRDH_1SDV_20190728T154906_20190728T1549...GRDH201907280209A21667626520[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
38S1B_IW_GRDH_1SDV_20190822T035047_20190822T0351...GRDH201908220214971671025652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
39S1B_IW_GRDH_1SDV_20190926T154910_20190926T1549...GRDH201909260224A21667626522[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
40S1B_IW_GRDH_1SDV_20191021T035049_20191021T0351...GRDH20191021022FC01671025656[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
41S1B_IW_GRDH_1SDV_20191220T035048_20191220T0351...GRDH20191220024BA51670925657[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
42S1B_IW_GRDH_1SDV_20200124T154907_20200124T1549...GRDH20200124025C241669826048[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
43S1B_IW_GRDH_1SDV_20200218T035046_20200218T0351...GRDH202002180267B11670925655[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
44S1B_IW_GRDH_1SDV_20200324T154906_20200324T1549...GRDH202003240278251669826043[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
45S1B_IW_GRDH_1SDV_20200418T035047_20200418T0351...GRDH202004180283741670925652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
46S1B_IW_GRDH_1SDV_20200523T154909_20200523T1549...GRDH202005230293591667726520[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
47S1B_IW_GRDH_1SDV_20200617T035050_20200617T0351...GRDH20200617029E1C1670925652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
48S1B_IW_GRDH_1SDV_20200722T154912_20200722T1549...GRDH2020072202ADDE1667726520[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
49S1B_IW_GRDH_1SDV_20200816T035053_20200816T0351...GRDH2020081602B8CC1671025652[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
50S1B_IW_GRDH_1SDV_20200920T154915_20200920T1549...GRDH2020092002C90A1667626521[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
51S1B_IW_GRDH_1SDV_20201015T035055_20201015T0351...GRDH2020101502D4311670925655[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
52S1B_IW_GRDH_1SDV_20210118T154913_20210118T1549...GRDH202101180300721669826048[Amplitude_VH, Intensity_VH, Amplitude_VV, Int...
\n", 618 | "
" 619 | ], 620 | "text/plain": [ 621 | " Name Process_Level \\\n", 622 | "0 S1A_IW_GRDH_1SDV_20180129T035110_20180129T0351... GRDH \n", 623 | "1 S1A_IW_GRDH_1SDV_20180305T154919_20180305T1549... GRDH \n", 624 | "2 S1A_IW_GRDH_1SDV_20180504T154921_20180504T1549... GRDH \n", 625 | "3 S1A_IW_GRDH_1SDV_20180529T035113_20180529T0351... GRDH \n", 626 | "4 S1A_IW_GRDH_1SDV_20180703T154924_20180703T1549... GRDH \n", 627 | "5 S1A_IW_GRDH_1SDV_20180728T035127_20180728T0351... GRDH \n", 628 | "6 S1A_IW_GRDH_1SDV_20180901T154928_20180901T1549... GRDH \n", 629 | "7 S1A_IW_GRDH_1SDV_20180926T035119_20180926T0351... GRDH \n", 630 | "8 S1A_IW_GRDH_1SDV_20181031T154929_20181031T1549... GRDH \n", 631 | "9 S1A_IW_GRDH_1SDV_20181125T035119_20181125T0351... GRDH \n", 632 | "10 S1A_IW_GRDH_1SDV_20190325T035117_20190325T0351... GRDH \n", 633 | "11 S1A_IW_GRDH_1SDV_20190429T154927_20190429T1549... GRDH \n", 634 | "12 S1A_IW_GRDH_1SDV_20190628T154930_20190628T1549... GRDH \n", 635 | "13 S1A_IW_GRDH_1SDV_20190723T035122_20190723T0351... GRDH \n", 636 | "14 S1A_IW_GRDH_1SDV_20190827T154933_20190827T1549... GRDH \n", 637 | "15 S1A_IW_GRDH_1SDV_20190921T035125_20190921T0351... GRDH \n", 638 | "16 S1A_IW_GRDH_1SDV_20191026T154935_20191026T1550... GRDH \n", 639 | "17 S1A_IW_GRDH_1SDV_20191120T035126_20191120T0351... GRDH \n", 640 | "18 S1A_IW_GRDH_1SDV_20200223T154932_20200223T1549... GRDH \n", 641 | "19 S1A_IW_GRDH_1SDV_20200518T035125_20200518T0351... GRDH \n", 642 | "20 S1A_IW_GRDH_1SDV_20200622T154936_20200622T1550... GRDH \n", 643 | "21 S1A_IW_GRDH_1SDV_20200717T035128_20200717T0351... GRDH \n", 644 | "22 S1A_IW_GRDH_1SDV_20200821T154940_20200821T1550... GRDH \n", 645 | "23 S1A_IW_GRDH_1SDV_20200915T035132_20200915T0351... GRDH \n", 646 | "24 S1A_IW_GRDH_1SDV_20201020T154941_20201020T1550... GRDH \n", 647 | "25 S1A_IW_GRDH_1SDV_20201219T154940_20201219T1550... GRDH \n", 648 | "26 S1B_IW_GRDH_1SDV_20180228T035033_20180228T0350... GRDH \n", 649 | "27 S1B_IW_GRDH_1SDV_20180404T154854_20180404T1549... GRDH \n", 650 | "28 S1B_IW_GRDH_1SDV_20180429T035034_20180429T0350... GRDH \n", 651 | "29 S1B_IW_GRDH_1SDV_20180603T154857_20180603T1549... GRDH \n", 652 | "30 S1B_IW_GRDH_1SDV_20180628T035038_20180628T0351... GRDH \n", 653 | "31 S1B_IW_GRDH_1SDV_20180802T154900_20180802T1549... GRDH \n", 654 | "32 S1B_IW_GRDH_1SDV_20180827T035041_20180827T0351... GRDH \n", 655 | "33 S1B_IW_GRDH_1SDV_20181001T154903_20181001T1549... GRDH \n", 656 | "34 S1B_IW_GRDH_1SDV_20181026T035043_20181026T0351... GRDH \n", 657 | "35 S1B_IW_GRDH_1SDV_20190529T154903_20190529T1549... GRDH \n", 658 | "36 S1B_IW_GRDH_1SDV_20190623T035044_20190623T0351... GRDH \n", 659 | "37 S1B_IW_GRDH_1SDV_20190728T154906_20190728T1549... GRDH \n", 660 | "38 S1B_IW_GRDH_1SDV_20190822T035047_20190822T0351... GRDH \n", 661 | "39 S1B_IW_GRDH_1SDV_20190926T154910_20190926T1549... GRDH \n", 662 | "40 S1B_IW_GRDH_1SDV_20191021T035049_20191021T0351... GRDH \n", 663 | "41 S1B_IW_GRDH_1SDV_20191220T035048_20191220T0351... GRDH \n", 664 | "42 S1B_IW_GRDH_1SDV_20200124T154907_20200124T1549... GRDH \n", 665 | "43 S1B_IW_GRDH_1SDV_20200218T035046_20200218T0351... GRDH \n", 666 | "44 S1B_IW_GRDH_1SDV_20200324T154906_20200324T1549... GRDH \n", 667 | "45 S1B_IW_GRDH_1SDV_20200418T035047_20200418T0351... GRDH \n", 668 | "46 S1B_IW_GRDH_1SDV_20200523T154909_20200523T1549... GRDH \n", 669 | "47 S1B_IW_GRDH_1SDV_20200617T035050_20200617T0351... GRDH \n", 670 | "48 S1B_IW_GRDH_1SDV_20200722T154912_20200722T1549... GRDH \n", 671 | "49 S1B_IW_GRDH_1SDV_20200816T035053_20200816T0351... GRDH \n", 672 | "50 S1B_IW_GRDH_1SDV_20200920T154915_20200920T1549... GRDH \n", 673 | "51 S1B_IW_GRDH_1SDV_20201015T035055_20201015T0351... GRDH \n", 674 | "52 S1B_IW_GRDH_1SDV_20210118T154913_20210118T1549... GRDH \n", 675 | "\n", 676 | " Sensing_Date Relevant_Orbit Height Width \\\n", 677 | "0 20180129 022CA0 16717 25826 \n", 678 | "1 20180305 023D1B 16729 25752 \n", 679 | "2 20180504 0258A9 16729 25748 \n", 680 | "3 20180529 026411 16717 25822 \n", 681 | "4 20180703 0273B9 16729 25749 \n", 682 | "5 20180728 027EBD 16717 25581 \n", 683 | "6 20180901 028F3B 16729 25748 \n", 684 | "7 20180926 029ABA 16711 25709 \n", 685 | "8 20181031 02ABA3 16729 25752 \n", 686 | "9 20181125 02B8B2 16713 25789 \n", 687 | "10 20190325 02F78B 16714 25785 \n", 688 | "11 20190429 030A73 16722 25936 \n", 689 | "12 20190628 0325DC 16709 25936 \n", 690 | "13 20190723 0330A7 16702 25785 \n", 691 | "14 20190827 0341B2 16710 25936 \n", 692 | "15 20190921 034E08 16702 25786 \n", 693 | "16 20191026 035FDD 16709 25940 \n", 694 | "17 20191120 036C5B 16702 25789 \n", 695 | "18 20200223 039CB5 16709 25940 \n", 696 | "19 20200518 03C70A 16702 25786 \n", 697 | "20 20200622 03D694 16709 25936 \n", 698 | "21 20200717 03E174 16704 25714 \n", 699 | "22 20200821 03F264 16710 25936 \n", 700 | "23 20200915 03FEF3 16704 25715 \n", 701 | "24 20201020 041124 16710 25939 \n", 702 | "25 20201219 042F4B 16709 25942 \n", 703 | "26 20180228 011BFF 16722 25654 \n", 704 | "27 20180404 012CFF 16725 25718 \n", 705 | "28 20180429 01386C 16722 25652 \n", 706 | "29 20180603 01492F 16726 25716 \n", 707 | "30 20180628 015437 16723 25653 \n", 708 | "31 20180802 016411 16725 25715 \n", 709 | "32 20180827 016F1E 16722 25651 \n", 710 | "33 20181001 017F04 16726 25717 \n", 711 | "34 20181026 0189FA 16722 25656 \n", 712 | "35 20190529 01EFC4 16686 26521 \n", 713 | "36 20190623 01FA62 16722 25652 \n", 714 | "37 20190728 0209A2 16676 26520 \n", 715 | "38 20190822 021497 16710 25652 \n", 716 | "39 20190926 0224A2 16676 26522 \n", 717 | "40 20191021 022FC0 16710 25656 \n", 718 | "41 20191220 024BA5 16709 25657 \n", 719 | "42 20200124 025C24 16698 26048 \n", 720 | "43 20200218 0267B1 16709 25655 \n", 721 | "44 20200324 027825 16698 26043 \n", 722 | "45 20200418 028374 16709 25652 \n", 723 | "46 20200523 029359 16677 26520 \n", 724 | "47 20200617 029E1C 16709 25652 \n", 725 | "48 20200722 02ADDE 16677 26520 \n", 726 | "49 20200816 02B8CC 16710 25652 \n", 727 | "50 20200920 02C90A 16676 26521 \n", 728 | "51 20201015 02D431 16709 25655 \n", 729 | "52 20210118 030072 16698 26048 \n", 730 | "\n", 731 | " Band_Names \n", 732 | "0 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 733 | "1 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 734 | "2 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 735 | "3 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 736 | "4 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 737 | "5 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 738 | "6 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 739 | "7 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 740 | "8 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 741 | "9 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 742 | "10 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 743 | "11 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 744 | "12 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 745 | "13 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 746 | "14 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 747 | "15 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 748 | "16 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 749 | "17 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 750 | "18 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 751 | "19 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 752 | "20 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 753 | "21 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 754 | "22 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 755 | "23 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 756 | "24 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 757 | "25 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 758 | "26 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 759 | "27 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 760 | "28 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 761 | "29 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 762 | "30 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 763 | "31 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 764 | "32 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 765 | "33 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 766 | "34 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 767 | "35 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 768 | "36 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 769 | "37 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 770 | "38 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 771 | "39 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 772 | "40 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 773 | "41 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 774 | "42 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 775 | "43 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 776 | "44 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 777 | "45 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 778 | "46 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 779 | "47 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 780 | "48 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 781 | "49 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 782 | "50 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 783 | "51 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... \n", 784 | "52 [Amplitude_VH, Intensity_VH, Amplitude_VV, Int... " 785 | ] 786 | }, 787 | "metadata": {}, 788 | "output_type": "display_data" 789 | } 790 | ], 791 | "source": [ 792 | "# Set target folder and extract metadata - Change accordingly\n", 793 | "product_path = str(r'D:\\\\MICROS\\\\Sentinel_1\\\\')\n", 794 | "\n", 795 | "input_S1_files =[]\n", 796 | "for item in os.listdir(product_path):\n", 797 | " if item.endswith('zip'):\n", 798 | " input_S1_files.append(item)\n", 799 | " \n", 800 | " \n", 801 | "name, process_level, sensing_date, relevant_orbit, height, width, band_names = ([] for i in range(7))\n", 802 | "\n", 803 | "for i in input_S1_files:\n", 804 | " process_level.append(i.split(\"_\")[2])\n", 805 | " sensing_date.append(i.split(\"_\")[4][0:-7])\n", 806 | " relevant_orbit.append(i.split(\"_\")[-2])\n", 807 | " # Read with snappy\n", 808 | " s1_read = snappy.ProductIO.readProduct(product_path + i)\n", 809 | " name.append(s1_read.getName())\n", 810 | " height.append(s1_read.getSceneRasterHeight())\n", 811 | " width.append(s1_read.getSceneRasterWidth())\n", 812 | " band_names.append(s1_read.getBandNames())\n", 813 | " \n", 814 | " \n", 815 | " \n", 816 | "# width = s1_read.getSceneRasterWidth()\n", 817 | "# height = s1_read.getSceneRasterHeight()\n", 818 | "# name = s1_read.getName()\n", 819 | "# description = s1_read.getDescription()\n", 820 | "# band_names = s1_read.getBandNames()\n", 821 | "# starttime = s1_read.getStartTime()\n", 822 | "# endtime = s1_read.getEndTime()\n", 823 | " \n", 824 | "df_s1_read = pd.DataFrame({'Name': name, 'Process_Level': process_level, 'Sensing_Date': sensing_date, 'Relevant_Orbit': relevant_orbit, 'Height': height, 'Width': width, 'Band_Names': band_names})\n", 825 | "display(df_s1_read)" 826 | ] 827 | }, 828 | { 829 | "cell_type": "code", 830 | "execution_count": 5, 831 | "id": "d923ea51", 832 | "metadata": {}, 833 | "outputs": [ 834 | { 835 | "name": "stdout", 836 | "output_type": "stream", 837 | "text": [ 838 | "Reading product S1B_IW_GRDH_1SDV_20200816T035053_20200816T035118_022943_02B8CC_49A2\n", 839 | "\n", 840 | "Subset implemented succesfully...\n", 841 | "Calibration implemented succesfully...\n", 842 | "Apply Orbit File implemented succesfully...\n", 843 | "Terrain Correction implemented succesfully...\n", 844 | "Saving the new product on disk...\n", 845 | "Reading product S1B_IW_GRDH_1SDV_20200920T154915_20200920T154940_023461_02C90A_E965\n", 846 | "\n", 847 | "Subset implemented succesfully...\n", 848 | "Calibration implemented succesfully...\n", 849 | "Apply Orbit File implemented succesfully...\n", 850 | "Terrain Correction implemented succesfully...\n", 851 | "Saving the new product on disk...\n", 852 | "Reading product S1B_IW_GRDH_1SDV_20201015T035055_20201015T035120_023818_02D431_147A\n", 853 | "\n", 854 | "Subset implemented succesfully...\n", 855 | "Calibration implemented succesfully...\n", 856 | "Apply Orbit File implemented succesfully...\n", 857 | "Terrain Correction implemented succesfully...\n", 858 | "Saving the new product on disk...\n", 859 | "Reading product S1B_IW_GRDH_1SDV_20210118T154913_20210118T154938_025211_030072_34BB\n", 860 | "\n", 861 | "Subset implemented succesfully...\n", 862 | "Calibration implemented succesfully...\n", 863 | "Apply Orbit File implemented succesfully...\n", 864 | "Terrain Correction implemented succesfully...\n", 865 | "Saving the new product on disk...\n" 866 | ] 867 | } 868 | ], 869 | "source": [ 870 | "outpath_name = str('D:\\\\MICROS\\\\Sentinel_1\\\\Subsets\\\\Limassol\\\\')\n", 871 | "\n", 872 | "# Create the output folder if not already exist\n", 873 | "if not(os.path.exists(outpath_name) and os.path.isdir(outpath_name)):\n", 874 | " os.makedirs(outpath_name)\n", 875 | "\n", 876 | "\n", 877 | "# Iterate through the product list \n", 878 | "for i in input_S1_files[49:]:\n", 879 | " \n", 880 | " s1_read = snappy.ProductIO.readProduct(product_path + i) # read the product\n", 881 | " name = s1_read.getName()\n", 882 | " output = str(outpath_name) + str(name)\n", 883 | " print(f'Reading product {name}\\n')\n", 884 | " \n", 885 | " subset = snap.subset(s1_read) # Subset Operator - snappy \n", 886 | "\n", 887 | "\n", 888 | " cal = snap.calibration(subset) # Calibration \n", 889 | " orbit = snap.applyorbitfile(cal) # Apply Orbit File\n", 890 | " terrain = snap.terraincorrection(orbit) # Terrain Correction\n", 891 | "\n", 892 | " snappy.ProductIO.writeProduct(terrain, output+\".tif\", 'GeoTIFF') \n", 893 | " print(f'Saving the new product on disk...')\n", 894 | " time.sleep(4)\n", 895 | " s1_read.closeIO()\n", 896 | " del s1_read, subset,cal,orbit, terrain\n", 897 | " gc.collect()\n", 898 | " \n" 899 | ] 900 | }, 901 | { 902 | "cell_type": "code", 903 | "execution_count": null, 904 | "id": "e712cdd9", 905 | "metadata": {}, 906 | "outputs": [], 907 | "source": [ 908 | "product_path = str(r'D:\\\\MICROS\\\\Ship_detection\\\\Raw\\\\')\n", 909 | "\n", 910 | "input_S1_files = sorted(os.listdir(product_path))\n", 911 | "\n", 912 | "for i in input_S1_files:\n", 913 | " s1_read = snappy.ProductIO.readProduct(product_path + i) # read the product\n", 914 | " \n", 915 | " width = s1_read.getSceneRasterWidth()\n", 916 | " height = s1_read.getSceneRasterHeight()\n", 917 | " name = s1_read.getName()\n", 918 | " description = s1_read.getDescription()\n", 919 | " band_names = s1_read.getBandNames()\n", 920 | " starttime = s1_read.getStartTime()\n", 921 | " endtime = s1_read.getEndTime()\n", 922 | " \n", 923 | " print(\"Product: %s, %s\" % (name, description))\n", 924 | " print(\"Raster size: %d x %d pixels\" % (width, height))\n", 925 | " print(\"Start time: \" + str(s1_read.getStartTime()))\n", 926 | " print(\"End time: \" + str(s1_read.getEndTime()))\n", 927 | " print(\"Bands: %s\" % (list(band_names)))\n", 928 | " print('\\n')\n", 929 | " time.sleep(5)\n", 930 | " s1_read.closeIO()\n", 931 | " s1_read = None\n", 932 | "# gc.collect()" 933 | ] 934 | }, 935 | { 936 | "cell_type": "code", 937 | "execution_count": 10, 938 | "id": "d0e7625b", 939 | "metadata": {}, 940 | "outputs": [ 941 | { 942 | "name": "stdout", 943 | "output_type": "stream", 944 | "text": [ 945 | "S1B_IW_GRDH_1SDV_20200920T154915_20200920T154940_023461_02C90A_E965.zip\n", 946 | "S1B_IW_GRDH_1SDV_20201015T035055_20201015T035120_023818_02D431_147A.zip\n", 947 | "S1B_IW_GRDH_1SDV_20210118T154913_20210118T154938_025211_030072_34BB.zip\n" 948 | ] 949 | } 950 | ], 951 | "source": [ 952 | "for i in input_S1_files[-3:]:\n", 953 | " print(i)" 954 | ] 955 | } 956 | ], 957 | "metadata": { 958 | "kernelspec": { 959 | "display_name": "Python 3", 960 | "language": "python", 961 | "name": "python3" 962 | }, 963 | "language_info": { 964 | "codemirror_mode": { 965 | "name": "ipython", 966 | "version": 3 967 | }, 968 | "file_extension": ".py", 969 | "mimetype": "text/x-python", 970 | "name": "python", 971 | "nbconvert_exporter": "python", 972 | "pygments_lexer": "ipython3", 973 | "version": "3.7.10" 974 | } 975 | }, 976 | "nbformat": 4, 977 | "nbformat_minor": 5 978 | } 979 | --------------------------------------------------------------------------------