├── .gitignore
├── test_TIFF.tif
├── README.md
├── ReprojectionParameters.xml
├── Import-VectorOp.txt
├── BandMathsOp.txt
├── sampleGlobalParameters.py
├── Land-Sea-MaskOp.txt
├── CollocateOp.txt
├── SubsetOp.txt
├── CalibrationOp.txt
├── ResampleOp.txt
├── ReprojectOp.txt
├── Terrain-CorrectionOp.txt
└── myScripts.py
/.gitignore:
--------------------------------------------------------------------------------
1 | /test_TIFF.tif.aux.xml
2 |
--------------------------------------------------------------------------------
/test_TIFF.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kedziorm/mySNAPscripts/HEAD/test_TIFF.tif
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # mySNAPscripts
2 | Python scripts which I use in my work wirh ESA SNAP tool
3 |
--------------------------------------------------------------------------------
/ReprojectionParameters.xml:
--------------------------------------------------------------------------------
1 |
2 | GEOGCS["WGS84(DD)",
3 | DATUM["WGS84",
4 | SPHEROID["WGS84", 6378137.0, 298.257223563]],
5 | PRIMEM["Greenwich", 0.0],
6 | UNIT["degree", 0.017453292519943295],
7 | AXIS["Geodetic longitude", EAST],
8 | AXIS["Geodetic latitude", NORTH]]
9 | Nearest
10 | 2.5
11 | 1.0
12 | 21.17044238633425
13 | 51.50199823054914
14 | 0.0
15 | 0.25
16 | 0.25
17 | 5
18 | 2
19 | false
20 | NaN
21 | true
22 | false
23 |
--------------------------------------------------------------------------------
/Import-VectorOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Import-Vector
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 | Usage:
9 | gpt Import-Vector [options]
10 |
11 | Description:
12 | Imports a shape file into a product
13 |
14 |
15 | Source Options:
16 | -Ssource= Sets source 'source' to .
17 | This is a mandatory source.
18 |
19 | Parameter Options:
20 | -PseparateShapes= Sets parameter 'separateShapes' to .
21 | Default value is 'true'.
22 | -PvectorFile= Sets parameter 'vectorFile' to .
23 |
24 | Graph XML Format:
25 |
26 | 1.0
27 |
28 | Import-Vector
29 |
30 | ${source}
31 |
32 |
33 | file
34 | boolean
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/BandMathsOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h BandMaths
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt BandMaths [options]
11 |
12 | Description:
13 | Create a product with one or more bands using mathematical expressions.
14 |
15 |
16 | Graph XML Format:
17 |
18 | 1.0
19 |
20 | BandMaths
21 |
22 | ${sourceProducts}
23 |
24 |
25 |
26 |
27 | string
28 | string
29 | string
30 | string
31 | string
32 | string
33 | double
34 | integer
35 | float
36 | float
37 | double
38 | double
39 |
40 | <.../>
41 |
42 |
43 |
44 | string
45 | string
46 | string
47 |
48 | <.../>
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/sampleGlobalParameters.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # Below are the sample global parameters for myScripts.py
5 |
6 | ########################################################
7 | ##### Global parameters:
8 | # Area - sample polygon below should describe part of the Eastern Poland
9 | # Derlo station coordinates: 52.16950 23.36930
10 | myExtent = [23.00, 52.00, 24.00, 52.25]
11 | constBorder = 0.25
12 | wkt = "POLYGON(({0:.2f} {1:.2f},{2:.2f} {1:.2f},{2:.2f} {3:.2f},{0:.2f} {3:.2f},{0:.2f} {4}))".format(myExtent[0], myExtent[1], myExtent[2], myExtent[3], int(myExtent[1]))
13 | createMAPparams = [myExtent[0] - constBorder, myExtent[1] - constBorder, myExtent[2] + constBorder, myExtent[3]]
14 |
15 | ## SMOS pixel size: Pixel Size = (0.260303676128387,-0.314965192009421)
16 | SMOSPS = 28963
17 | SentinelPS = 10.0
18 | histLabels = ["Values", "Frequency", "Wartości", "Liczebność"]
19 | getTerrainCorrected_DEM = "SRTM 3Sec" # Possible also: "ASTER 1sec GDEM", "SRTM 1Sec Grid", "ACE30"
20 | getTerrainCorrected_demResamplingMethod = "BILINEAR_INTERPOLATION" # Possible also: "NEAREST_NEIGHBOUR", "CUBIC_CONVOLUTION", "BICUBIC_INTERPOLATION"
21 | getTerrainCorrected_imgResamplingMethod = "BILINEAR_INTERPOLATION" # Possible also: "NEAREST_NEIGHBOUR", "CUBIC_CONVOLUTION", "BICUBIC_INTERPOLATION"
22 | getCollocatedResamplingType = "NEAREST_NEIGHBOUR" # Possible also: "BILINEAR_INTERPOLATION", "CUBIC_CONVOLUTION", "BISINC_CONVOLUTION"
23 | getReprojectedResampling = "Nearest"
24 |
25 | SentinelPath = os.path.join(home, "Testy")
26 | histogramDirectory = os.path.join(home,"Dropbox/DyzagregacjaSMOS/histograms")
27 | maps = os.path.join(home,"Dropbox/DyzagregacjaSMOS/maps")
28 | ########################################################
29 |
--------------------------------------------------------------------------------
/Land-Sea-MaskOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Land-Sea-Mask
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt Land-Sea-Mask [options]
11 |
12 | Description:
13 | Creates a bitmask defining land vs ocean.
14 |
15 |
16 | Source Options:
17 | -Ssource= Sets source 'source' to .
18 | This is a mandatory source.
19 |
20 | Parameter Options:
21 | -PbyPass= Sets parameter 'byPass' to .
22 | Default value is 'false'.
23 | -Pgeometry= Sets parameter 'geometry' to .
24 | -PinvertGeometry= Sets parameter 'invertGeometry' to .
25 | Default value is 'false'.
26 | -PlandMask= Sets parameter 'landMask' to .
27 | Default value is 'true'.
28 | -PsourceBands= The list of source bands.
29 | -PuseSRTM= Sets parameter 'useSRTM' to .
30 | Default value is 'true'.
31 |
32 | Graph XML Format:
33 |
34 | 1.0
35 |
36 | Land-Sea-Mask
37 |
38 | ${source}
39 |
40 |
41 | string,string,string,...
42 | boolean
43 | boolean
44 | string
45 | boolean
46 | boolean
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/CollocateOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Collocate
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt Collocate [options]
11 |
12 | Description:
13 | Collocates two products based on their geo-codings.
14 |
15 |
16 | Source Options:
17 | -Smaster= The source product which serves as master.
18 | This is a mandatory source.
19 | -Sslave= The source product which serves as slave.
20 | This is a mandatory source.
21 |
22 | Parameter Options:
23 | -PmasterComponentPattern= The text pattern to be used when renaming master components.
24 | Default value is '${ORIGINAL_NAME}_M'.
25 | -PrenameMasterComponents= Whether or not components of the master product shall be renamed in the target product.
26 | Default value is 'true'.
27 | -PrenameSlaveComponents= Whether or not components of the slave product shall be renamed in the target product.
28 | Default value is 'true'.
29 | -PresamplingType= The method to be used when resampling the slave grid onto the master grid.
30 | Default value is 'NEAREST_NEIGHBOUR'.
31 | -PslaveComponentPattern= The text pattern to be used when renaming slave components.
32 | Default value is '${ORIGINAL_NAME}_S'.
33 | -PtargetProductType= The product type string for the target product (informal)
34 | Default value is 'COLLOCATED'.
35 |
36 | Graph XML Format:
37 |
38 | 1.0
39 |
40 | Collocate
41 |
42 | ${master}
43 | ${slave}
44 |
45 |
46 | string
47 | boolean
48 | boolean
49 | string
50 | string
51 | resamplingType
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/SubsetOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Subset
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 | Usage:
9 | gpt Subset [options]
10 |
11 | Description:
12 | Create a spatial and/or spectral subset of a data product.
13 |
14 |
15 | Source Options:
16 | -Ssource= The source product to create the subset from.
17 | This is a mandatory source.
18 |
19 | Parameter Options:
20 | -PcopyMetadata= Whether to copy the metadata of the source product.
21 | Default value is 'false'.
22 | -PfullSwath= Forces the operator to extend the subset region to the full swath.
23 | Default value is 'false'.
24 | -PgeoRegion= The subset region in geographical coordinates using WKT-format,
25 | e.g. POLYGON(( , , ..., ))
26 | (make sure to quote the option due to spaces in ).
27 | If not given, the entire scene is used.
28 | -Pregion= The subset region in pixel coordinates.
29 | Use the following format: ,,,
30 | If not given, the entire scene is used. The 'geoRegion' parameter has precedence over this parameter.
31 | -PsourceBands= The list of source bands.
32 | -PsubSamplingX= The pixel sub-sampling step in X (horizontal image direction)
33 | Default value is '1'.
34 | -PsubSamplingY= The pixel sub-sampling step in Y (vertical image direction)
35 | Default value is '1'.
36 | -PtiePointGridNames= The comma-separated list of names of tie-point grids to be copied.
37 | If not given, all bands are copied.
38 |
39 | Graph XML Format:
40 |
41 | 1.0
42 |
43 | Subset
44 |
45 | ${source}
46 |
47 |
48 | string,string,string,...
49 | rectangle
50 | geometry
51 | int
52 | int
53 | boolean
54 | string,string,string,...
55 | boolean
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/CalibrationOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Calibration
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt Calibration [options]
11 |
12 | Description:
13 | Calibration of products
14 |
15 |
16 | Source Options:
17 | -Ssource= Sets source 'source' to .
18 | This is a mandatory source.
19 |
20 | Parameter Options:
21 | -PauxFile= The auxiliary file
22 | Value must be one of 'Latest Auxiliary File', 'Product Auxiliary File', 'External Auxiliary File'.
23 | Default value is 'Latest Auxiliary File'.
24 | -PcreateBetaBand= Create beta0 virtual band
25 | Default value is 'false'.
26 | -PcreateGammaBand= Create gamma0 virtual band
27 | Default value is 'false'.
28 | -PexternalAuxFile= The antenna elevation pattern gain auxiliary data file.
29 | -PoutputBetaBand= Output beta0 band
30 | Default value is 'false'.
31 | -PoutputGammaBand= Output gamma0 band
32 | Default value is 'false'.
33 | -PoutputImageInComplex= Output image in complex
34 | Default value is 'false'.
35 | -PoutputImageScaleInDb= Output image scale
36 | Default value is 'false'.
37 | -PoutputSigmaBand= Output sigma0 band
38 | Default value is 'true'.
39 | -PselectedPolarisations= The list of polarisations
40 | -PsourceBands= The list of source bands.
41 |
42 | Graph XML Format:
43 |
44 | 1.0
45 |
46 | Calibration
47 |
48 | ${source}
49 |
50 |
51 | string,string,string,...
52 | string
53 | file
54 | boolean
55 | boolean
56 | boolean
57 | boolean
58 | string,string,string,...
59 | boolean
60 | boolean
61 | boolean
62 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/ResampleOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Resample
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt Resample [options]
11 |
12 | Description:
13 | Resampling of a multi-size source product to a single-size target product.
14 |
15 |
16 | Source Options:
17 | -SsourceProduct= The source product which is to be resampled.
18 | This is a mandatory source.
19 |
20 | Parameter Options:
21 | -Pdownsampling= The method used for aggregation (downsampling to a coarser resolution).
22 | Value must be one of 'First', 'Min', 'Max', 'Mean', 'Median'.
23 | Default value is 'First'.
24 | -PflagDownsampling= The method used for aggregation (downsampling to a coarser resolution) of flags.
25 | Value must be one of 'First', 'FlagAnd', 'FlagOr', 'FlagMedianAnd', 'FlagMedianOr'.
26 | Default value is 'First'.
27 | -PreferenceBand= The name of the reference band. All other bands will be re-sampled to match its size and resolution. Either this or targetResolutionor targetWidth and targetHeight must be set.
28 | -PresampleOnPyramidLevels= This setting will increase performance when viewing the image, but accurate resamplings are only retrieved when zooming in on a pixel.
29 | Default value is 'true'.
30 | -PtargetHeight= The height that all bands of the target product shall have. If this is set, targetWidth must be set, too. Either this and targetWidth or referenceBand or targetResolution must be set.
31 | -PtargetResolution= The resolution that all bands of the target product shall have. The same value will be applied to scale image widths and heights. Either this or referenceBand or targetwidth and targetHeight must be set.
32 | -PtargetWidth= The width that all bands of the target product shall have. If this is set, targetHeight must be set, too. Either this and targetHeight or referenceBand or targetResolution must be set.
33 | -Pupsampling= The method used for interpolation (upsampling to a finer resolution).
34 | Value must be one of 'Nearest', 'Bilinear', 'Bicubic'.
35 | Default value is 'Nearest'.
36 |
37 | Graph XML Format:
38 |
39 | 1.0
40 |
41 | Resample
42 |
43 | ${sourceProduct}
44 |
45 |
46 | string
47 | integer
48 | integer
49 | integer
50 | string
51 | string
52 | string
53 | boolean
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/ReprojectOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Reproject
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 | Usage:
9 | gpt Reproject [options]
10 |
11 | Description:
12 | Reprojection of a source product to a target Coordinate Reference System.
13 |
14 |
15 | Source Options:
16 | -ScollocateWith= The source product will be collocated with this product.
17 | This is an optional source.
18 | -Ssource= The product which will be reprojected.
19 | This is a mandatory source.
20 |
21 | Parameter Options:
22 | -PaddDeltaBands= Whether to add delta longitude and latitude bands.
23 | Default value is 'false'.
24 | -Pcrs= A text specifying the target Coordinate Reference System, either in WKT or as an authority code. For appropriate EPSG authority codes see (www.epsg-registry.org). AUTO authority can be used with code 42001 (UTM), and 42002 (Transverse Mercator) where the scene center is used as reference. Examples: EPSG:4326, AUTO:42001
25 | -Peasting= The easting of the reference pixel.
26 | -PelevationModelName= The name of the elevation model for the orthorectification. If not given tie-point data is used.
27 | -Pheight= The height of the target product.
28 | -PincludeTiePointGrids= Whether tie-point grids should be included in the output product.
29 | Default value is 'true'.
30 | -PnoDataValue= The value used to indicate no-data.
31 | -Pnorthing= The northing of the reference pixel.
32 | -Porientation= The orientation of the output product (in degree).
33 | Valid interval is [-360,360].
34 | Default value is '0'.
35 | -Porthorectify= Whether the source product should be orthorectified. (Not applicable to all products)
36 | Default value is 'false'.
37 | -PpixelSizeX= The pixel size in X direction given in CRS units.
38 | -PpixelSizeY= The pixel size in Y direction given in CRS units.
39 | -PreferencePixelX= The X-position of the reference pixel.
40 | -PreferencePixelY= The Y-position of the reference pixel.
41 | -Presampling= The method used for resampling of floating-point raster data.
42 | Value must be one of 'Nearest', 'Bilinear', 'Bicubic'.
43 | Default value is 'Nearest'.
44 | -PtileSizeX= The tile size in X direction.
45 | -PtileSizeY= The tile size in Y direction.
46 | -Pwidth= The width of the target product.
47 | -PwktFile= A file which contains the target Coordinate Reference System in WKT format.
48 |
49 | Graph XML Format:
50 |
51 | 1.0
52 |
53 | Reproject
54 |
55 | ${source}
56 | ${collocateWith}
57 |
58 |
59 | file
60 | string
61 | string
62 | double
63 | double
64 | double
65 | double
66 | double
67 | double
68 | double
69 | integer
70 | integer
71 | integer
72 | integer
73 | boolean
74 | string
75 | double
76 | boolean
77 | boolean
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/Terrain-CorrectionOp.txt:
--------------------------------------------------------------------------------
1 | This is result of typing following command:
2 |
3 | ~/snap/bin/gpt -h Terrain-Correction
4 |
5 | You can get information about any SNAP operator using this command
6 |
7 | ==================================================================
8 |
9 | Usage:
10 | gpt Terrain-Correction [options]
11 |
12 | Description:
13 | RD method for orthorectification
14 |
15 |
16 | Source Options:
17 | -Ssource= Sets source 'source' to .
18 | This is a mandatory source.
19 |
20 | Parameter Options:
21 | -PapplyRadiometricNormalization= Sets parameter 'applyRadiometricNormalization' to .
22 | Default value is 'false'.
23 | -PauxFile= The auxiliary file
24 | Value must be one of 'Latest Auxiliary File', 'Product Auxiliary File', 'External Auxiliary File'.
25 | Default value is 'Latest Auxiliary File'.
26 | -PdemName= The digital elevation model.
27 | Default value is 'SRTM 3Sec'.
28 | -PdemResamplingMethod= Sets parameter 'demResamplingMethod' to .
29 | Default value is 'BILINEAR_INTERPOLATION'.
30 | -PexternalAuxFile= The antenne elevation pattern gain auxiliary data file.
31 | -PexternalDEMApplyEGM= Sets parameter 'externalDEMApplyEGM' to .
32 | Default value is 'true'.
33 | -PexternalDEMFile= Sets parameter 'externalDEMFile' to .
34 | -PexternalDEMNoDataValue= Sets parameter 'externalDEMNoDataValue' to .
35 | Default value is '0'.
36 | -PimgResamplingMethod= Sets parameter 'imgResamplingMethod' to .
37 | Default value is 'BILINEAR_INTERPOLATION'.
38 | -PincidenceAngleForGamma0= Sets parameter 'incidenceAngleForGamma0' to .
39 | Value must be one of 'Use incidence angle from Ellipsoid', 'Use local incidence angle from DEM', 'Use projected local incidence angle from DEM'.
40 | Default value is 'Use projected local incidence angle from DEM'.
41 | -PincidenceAngleForSigma0= Sets parameter 'incidenceAngleForSigma0' to .
42 | Value must be one of 'Use incidence angle from Ellipsoid', 'Use local incidence angle from DEM', 'Use projected local incidence angle from DEM'.
43 | Default value is 'Use projected local incidence angle from DEM'.
44 | -PmapProjection= The coordinate reference system in well known text format
45 | Default value is 'WGS84(DD)'.
46 | -PnodataValueAtSea= Mask the sea with no data value (faster)
47 | Default value is 'true'.
48 | -PoutputComplex= Sets parameter 'outputComplex' to .
49 | Default value is 'false'.
50 | -PpixelSpacingInDegree= The pixel spacing in degrees
51 | Default value is '0'.
52 | -PpixelSpacingInMeter= The pixel spacing in meters
53 | Default value is '0'.
54 | -PsaveBetaNought= Sets parameter 'saveBetaNought' to .
55 | Default value is 'false'.
56 | -PsaveDEM= Sets parameter 'saveDEM' to .
57 | Default value is 'false'.
58 | -PsaveGammaNought= Sets parameter 'saveGammaNought' to .
59 | Default value is 'false'.
60 | -PsaveIncidenceAngleFromEllipsoid= Sets parameter 'saveIncidenceAngleFromEllipsoid' to .
61 | Default value is 'false'.
62 | -PsaveLatLon= Sets parameter 'saveLatLon' to .
63 | Default value is 'false'.
64 | -PsaveLocalIncidenceAngle= Sets parameter 'saveLocalIncidenceAngle' to .
65 | Default value is 'false'.
66 | -PsaveProjectedLocalIncidenceAngle= Sets parameter 'saveProjectedLocalIncidenceAngle' to .
67 | Default value is 'false'.
68 | -PsaveSelectedSourceBand= Sets parameter 'saveSelectedSourceBand' to .
69 | Default value is 'true'.
70 | -PsaveSigmaNought= Sets parameter 'saveSigmaNought' to .
71 | Default value is 'false'.
72 | -PsourceBands= The list of source bands.
73 |
74 | Graph XML Format:
75 |
76 | 1.0
77 |
78 | Terrain-Correction
79 |
80 | ${source}
81 |
82 |
83 | string,string,string,...
84 | string
85 | file
86 | double
87 | boolean
88 | string
89 | string
90 | double
91 | double
92 | string
93 | boolean
94 | boolean
95 | boolean
96 | boolean
97 | boolean
98 | boolean
99 | boolean
100 | boolean
101 | boolean
102 | boolean
103 | boolean
104 | boolean
105 | string
106 | string
107 | string
108 | file
109 |
110 |
111 |
112 |
--------------------------------------------------------------------------------
/myScripts.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | #############################################################
5 | # AUTHOR: Mateusz Kędzior
6 | # PURPOSE: Python scripts to perform Sentinel-1 processing data using ESA SNAP
7 | # PREREQUISITES:
8 | # - install ESA SNAP, go to terminal and type:
9 | # cd ~/.snap/snap-python/snappy
10 | # sudo /usr/bin/python setup.py install
11 | # - install HDF5 libraries for Java:
12 | # sudo apt install libjhdf5-jni libjhdf5-java
13 | # - java_max_mem setting in ~/.snap/snap-python/snappy/snappy.ini
14 | # is not interpreted by snappy
15 | # so I set _JAVA_OPTIONS in the first lines of scripts to use 4 GB of RAM
16 | # - to avoid errors jhdf5 errors as described here: http://forum.step.esa.int/t/snappy-hdf5-error/867/3
17 | # execute following lines:
18 | # SNAP_HOME=~/snap
19 | # cd $SNAP_HOME/snap/modules/lib/x86_64
20 | # ln -s ../amd64/libjhdf.so
21 | # ln -s ../amd64/libjhdf5.so
22 |
23 | # DO NOT forget that snappy for ESA SNAP is not Google library!!
24 | # API SNAP documentation:
25 | # http://step.esa.int/docs/v3.0/apidoc/desktop/
26 | #############################################################
27 |
28 | # Prefixes added to file names:
29 | # calibrated (calculated sigma), 'Subset', 'Masked', 'Terrain Corrected',
30 | # 'soil moisture index', 'collocated' and 'histogram' files
31 | prefixes = ["cal", "sub", "msk", "TC", "SMI", "_coll_", "_hist_"]
32 |
33 | import os, sys
34 | reload(sys)
35 | sys.setdefaultencoding('utf8')
36 |
37 | import snappy
38 | from snappy import ProductIO
39 | #from snappy import GPF
40 | from snappy import jpy
41 |
42 | from os.path import expanduser
43 | home = expanduser("~")
44 |
45 | # Set below-normal priority, so that computer remain responsive during computations
46 | # You can check how to do that on non-Unix like machines at:
47 | # http://stackoverflow.com/questions/1023038/change-process-priority-in-python-cross-platform
48 | os.nice(20)
49 |
50 | # To avoid RuntimeError: java.lang.OutOfMemoryError: Java heap space
51 | print(("Current _JAVA_OPTIONS: '" + os.environ.get('_JAVA_OPTIONS', 'Not Set')))
52 | print("will be changed to '-Xmx4096m' to avoid OutOfMemoryError")
53 | os.environ["_JAVA_OPTIONS"] = "-Xmx4096m"
54 | os.system('export _JAVA_OPTIONS=-Xmx4096m')
55 | # To enable Java core dumping:
56 | os.system('ulimit -c unlimited')
57 |
58 | # Sample file used in testing:
59 | snappyPath = os.path.join(home, ".snap/snap-python/snappy")
60 | testdataPath = os.path.join(snappyPath,"testdata")
61 | sampleData = os.path.join(testdataPath, "MER_FRS_L1B_SUBSET.dim")
62 |
63 | ########################################################
64 | ##### Global parameters:
65 | # output files format:
66 | OutputType = [".dim", "BEAM-DIMAP"]
67 | SecondaryOutputType = [".tif", "GeoTIFF"]
68 | log_path = os.path.join(os.path.expanduser("~"),'Dropbox/DyzagregacjaSMOS/logi.log')
69 | pathToSaveStats = os.path.join(home,"Dropbox/DyzagregacjaSMOS/BandStatistics.csv")
70 | # pixel spacing
71 | destinationPS = float(100)
72 | # TODO: Use smaller shapefile? Subset shapefile?
73 | sampleSHP = os.path.join(home,"Dropbox/rzeki/waters_MPHP.shp")
74 |
75 | # Rest of global params in sampleGlobalParameters.py
76 | ########################################################
77 |
78 | def isSNAPprod(prod):
79 | return 'snap.core.datamodel.Product' in str(type(prod))
80 |
81 | def readProd(file1):
82 | import snappy, os
83 | if isSNAPprod(file1):
84 | # input parameter is already a SNAP product
85 | return file1
86 | if os.path.isfile(file1):
87 | prod = snappy.ProductIO.readProduct(file1)
88 | elif os.path.exists(file1):
89 | writeToLog("\t".join(["readProduct", str(file1), "is not a file!!!"]))
90 | prod = None
91 | else:
92 | writeToLog("\t".join(["readProduct", str(file1), "does *NOT* exists"]))
93 | prod = None
94 | return prod
95 |
96 | def isBandInProd(bandName, product, onlyVerify = False):
97 | if bandName in product.getBandNames():
98 | return True
99 | else:
100 | log_mode = "INFO" if onlyVerify else "ERROR"
101 | writeToLog("\t".join(["isBandInProd", bandName + " not in the " + product.getName()]),log_mode)
102 | writeToLog("\t".join(["isBandInProd","Available bands:","{0}".format(product.getBandNames())]),log_mode)
103 | return False
104 |
105 | def getStatsAndHist(inputFile,directorySuffix = None):
106 | getAllBandsStats(inputFile)
107 | saveHistForFiles(inputFile,histLabels[0], histLabels[1], None, "eng",directorySuffix)
108 | saveHistForFiles(inputFile,"Wartości", "Liczebność", None, "pl",directorySuffix)
109 |
110 | def ExecuteAndLog(command):
111 | cmdName = command[0:command.index("(")]
112 | logTxt = "\t".join([cmdName,command])
113 | writeToLog(logTxt,"info")
114 | return eval(command)
115 |
116 | def ExecLogStats(command, onlyStats=True, histPath="dyzagregowane"):
117 | cmdName = command[0:command.index("(")]
118 | product = ExecuteAndLog(command)
119 | logTxt2 = "\t".join([cmdName,product,get_whole_Product_size(product),getExtentStr(product)])
120 | writeToLog(logTxt2,"info")
121 | if onlyStats:
122 | getAllBandsStats(product)
123 | else:
124 | getStatsAndHist(product, histPath)
125 | return product
126 |
127 | def getAllowedFormats():
128 | # Allowed formats to write: GeoTIFF-BigTIFF,HDF5,Snaphu,BEAM-DIMAP,
129 | # GeoTIFF+XML,PolSARPro,NetCDF-CF,NetCDF-BEAM,ENVI,JP2,
130 | # Generic Binary BSQ,Gamma,CSV,NetCDF4-CF,GeoTIFF,NetCDF4-BEAM
131 | ProductIOPlugInManager = jpy.get_type(
132 | 'org.esa.snap.core.dataio.ProductIOPlugInManager')
133 |
134 | read_plugins = ProductIOPlugInManager.getInstance().getAllReaderPlugIns()
135 | write_plugins = ProductIOPlugInManager.getInstance().getAllWriterPlugIns()
136 |
137 | writerFormats = ""
138 | readerFormats = ""
139 |
140 | while write_plugins.hasNext():
141 | plugin = next(write_plugins)
142 | writerFormats = writerFormats + plugin.getFormatNames()[0] + ","
143 |
144 | while read_plugins.hasNext():
145 | plugin = next(read_plugins)
146 | readerFormats = readerFormats + plugin.getFormatNames()[0] + ","
147 |
148 | print(("Allowed formats to write: " + writerFormats))
149 | print(("Allowed formats to read: " + readerFormats))
150 |
151 | def simplifySMOSandSentinelfileName(basename):
152 | # Simplifies base name of SMOS and Sentinel file names:
153 | # Tested on following names:
154 | # fn1 = 'ext-SM_OPER_MIR_CLF33A_20160229T000000_20160302T235959_300_002_7_1.DBL.nc'
155 | # fn2 = 'S1A_IW_GRDH_1SDV_20160301T161042_20160301T161107_010178_00F048_FA33.zip'
156 | # fn3 = 'S1A_IW_GRDH_1SDV_20160301T161042_20160301T1'
157 | # fn4 = 'ext-SM_OPER_MIR_CLF33A_20160229T000000_20160302T235959_300_002_7_1'
158 | # fn5 = 'ThisShouldbeNotChanged'
159 | import re
160 | extension = "{0}{1}".format(os.path.splitext(os.path.splitext(basename)[0])[1], os.path.splitext(basename)[1])
161 | basename = os.path.splitext(os.path.splitext(basename)[0])[0]
162 | dict = {'Sent': 'S1A.*_(20\d{6}).*', 'SMOS': 'ext-SM_.*_(20\d{6}).*'}
163 | for name in dict.keys():
164 | matches = re.findall(dict[name], basename)
165 | if len(matches) > 0:
166 | basename = "{0}_{1}".format(name, matches[0])
167 | basename = basename.replace('":Soil_Moisture',"")
168 | return "{0}{1}".format(basename, extension)
169 |
170 | def newFilepath(Filepath, prefix, limited=True):
171 | directory = os.path.join(os.path.dirname(Filepath),prefix)
172 | if not os.path.exists(directory):
173 | os.makedirs(directory)
174 | baseName = simplifySMOSandSentinelfileName(os.path.basename(Filepath))
175 | baseName = baseName[0:180] if limited else os.path.splitext(baseName)[0]
176 | return os.path.join(directory,
177 | "_".join([prefix, baseName]) + OutputType[0])
178 |
179 | def getDateFromFileName(FileName):
180 | import re
181 | daty = re.findall('(\d{8})',FileName)
182 | if daty == []:
183 | daty = re.findall('(\d{4}.\d{2}.\d{2})',FileName)
184 | return daty[0].replace(".","")
185 | else:
186 | if validateDate(daty[0]):
187 | return daty[0]
188 | else:
189 | daty = re.findall('(\d{4}.\d{2}.\d{2})',FileName)
190 | return daty[0].replace(".","")
191 |
192 | def validateDate(date_text, dateFormat='%Y%m%d'):
193 | import datetime
194 | try:
195 | datetime.datetime.strptime(date_text, dateFormat)
196 | return True
197 | except ValueError:
198 | return False #raise ValueError("Incorrect data format, should be: "+ dateFormat)
199 |
200 | def getListOfFiles(folderPath):
201 | import os
202 | return os.listdir(folderPath)
203 |
204 | def getListOfDatesFromFileName(folderPath):
205 | mm = getListOfFiles(folderPath)
206 | posort = []
207 | for i in mm:
208 | posort.append([getDateFromFileName(i),i])
209 | posort.sort()
210 | return posort
211 |
212 | def getFilterredListOfDatesAndFiles(folderPath,extension=".nc"):
213 | import os
214 | myList = getListOfDatesFromFileName(folderPath)
215 | for j in myList:
216 | if (os.path.splitext(j[1])[1] == extension):
217 | print "\t".join([j[0],j[1],os.path.splitext(j[1])[1]])
218 |
219 | def writeFilterredListToFile(folderPath,extension=".nc"):
220 | import os
221 | charset = ""
222 | myList = getListOfDatesFromFileName(folderPath)
223 | f = open(os.path.join(folderPath,"list.txt"),'w')
224 | for j in myList:
225 | if (os.path.splitext(j[1])[1] == extension):
226 | charset = charset + "\t".join([j[0],j[1],os.path.splitext(j[1])[1]]) + "\n"
227 | f.write(charset)
228 | f.close()
229 |
230 | def unpackAllAndRemoveAllArchives(folderPath,extension="tgz"):
231 | import os, glob, tarfile
232 | path = os.path.join(folderPath,"*." + extension)
233 | FileList = glob.glob(path)
234 | for archive in FileList:
235 | tar=tarfile.open(archive)
236 | tar.extractall(path=folderPath)
237 | tar.close()
238 | os.remove(archive)
239 |
240 |
241 | def getDateFromSMOSfileName(SMOSfile1):
242 | import re
243 | import os
244 | result = re.findall("(20\d{6}).*", os.path.basename(SMOSfile1))
245 | if (len(result) != 1):
246 | writeToLog("\t".join(["getDateFromSMOSfileName", "Unable to get date from SMOS file name: {0}".format(os.path.basename(SMOSfile1))]))
247 | return False
248 | else:
249 | return result[0]
250 |
251 |
252 | def getNewFileName(SMOSfile1, SMOSfile2, destination, operation, band, filetype, getFullName=False, OutTyp=OutputType[0]):
253 | import os
254 | #if getFullName:
255 | # date1 = os.path.splitext(os.path.basename(SMOSfile1))[0]
256 | # date2 = os.path.splitext(os.path.basename(SMOSfile2))[0]
257 | #else:
258 | # date1 = getDateFromSMOSfileName(SMOSfile1)
259 | # date2 = getDateFromSMOSfileName(SMOSfile2)
260 | myFile1 = os.path.splitext(simplifySMOSandSentinelfileName(os.path.basename(SMOSfile1)))[0][0:20]
261 | myFile2 = os.path.splitext(simplifySMOSandSentinelfileName(os.path.basename(SMOSfile2)))[0][0:20]
262 | directory = os.path.join(destination,operation)
263 | if not os.path.exists(directory):
264 | os.makedirs(directory)
265 | return os.path.join(directory,
266 | "_".join([filetype, myFile1, "_" + operation + "_", myFile2,band]) + OutTyp)
267 |
268 |
269 | def writeToLog(message, messageType='ERROR', log_path = log_path):
270 | import logging,os
271 | logger = logging.getLogger('Dyzagregacja')
272 | hdlr = logging.FileHandler(log_path)
273 | formatter = logging.Formatter('%(asctime)s\t%(levelname)s\t%(message)s', datefmt='%H:%M:%S')
274 | hdlr.setFormatter(formatter)
275 | logger.addHandler(hdlr)
276 | logger.setLevel(logging.INFO)
277 | print("Writing to log file: '{0}'".format(log_path))
278 | if (messageType.upper() == 'ERROR'):
279 | logger.error(message)
280 | elif (messageType.upper() == 'WARNING'):
281 | logger.warning(message)
282 | else:
283 | logger.info(message)
284 | logger.removeHandler(hdlr)
285 |
286 |
287 | def convert_bytes(num):
288 | """
289 | this function will convert bytes to MB.... GB... etc
290 | """
291 | for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
292 | if num < 1024.0:
293 | return "%3.1f %s" % (num, x)
294 | num /= 1024.0
295 |
296 | def get_size(start_path = '.'):
297 | # returns total size of folder in bytes
298 | total_size = 0
299 | for dirpath, dirnames, filenames in os.walk(start_path):
300 | for f in filenames:
301 | fp = os.path.join(dirpath, f)
302 | total_size += os.path.getsize(fp)
303 | return total_size
304 |
305 | def getExtInLower(file_path):
306 | # Returns file extension in lower case
307 | import os
308 | return (os.path.splitext(file_path)[1]).lower()
309 |
310 | def get_whole_Product_size(file_path):
311 | total_size = file_size_in_bytes(file_path)
312 | # According to Python documentation there's no 'switch' or 'select case' in Python:
313 | # "An if ... elif ... elif ... sequence is a substitute for the switch or case statements found in other languages."
314 | myExt = getExtInLower(file_path)
315 | if (myExt == '.dim'):
316 | total_size += get_size(get_data_path(file_path))
317 | elif (myExt == '.shp'):
318 | shpExt = ['.dbf', '.prj', '.qix', '.qpj','.shx']
319 | for ext in shpExt:
320 | Shpfile = os.path.splitext(file_path)[0] + ext
321 | total_size += get_size(Shpfile)
322 | elif (myExt == '.nc'):
323 | sFile =(os.path.splitext(file_path)[0]).replace("_1.DBL","").replace("ext-","") + '.HDR'
324 | total_size += get_size(sFile)
325 | else:
326 | pass
327 | return convert_bytes(total_size)
328 |
329 | def get_data_path(file_path):
330 | # Gets '.data' folder for '.dim' files (products saved in "BEAM-DIMAP" format)
331 | data_path = os.path.splitext(file_path)[0] + '.data'
332 | if (not os.path.isdir(data_path)):
333 | message = "There is NO following folder '{0}'. Please ensure where data for '{1}' file are".format(data_path, file_path)
334 | writeToLog("\t".join(["get_data_path", message]))
335 | return data_path
336 |
337 | def file_size_in_bytes(file_path):
338 | """
339 | this function will return the file size
340 | """
341 | if os.path.isfile(file_path):
342 | file_info = os.stat(file_path)
343 | return float(file_info.st_size)
344 | else:
345 | return float(0)
346 |
347 | def file_size(file_path):
348 | return convert_bytes(file_size_in_bytes(file_path))
349 |
350 | def removeProduct(file_path):
351 | # TODO: http://forum.step.esa.int/t/how-to-properly-remove-delete-all-related-files-and-folders-esa-snap-product-from-python
352 | import shutil
353 | if (os.path.exists(file_path)):
354 | message = "{0}\t{1}\twhole product size\t{2}".format(os.path.basename(file_path), file_size(file_path), get_whole_Product_size(file_path))
355 | writeToLog("\t".join(["removeProduct", message]),"info")
356 | if (os.path.splitext(file_path)[1] == '.dim'):
357 | dirToRem = get_data_path(file_path)
358 | shutil.rmtree(dirToRem) # will delete a directory and all its contents.
359 | os.remove(file_path)
360 | else:
361 | writeToLog("\t".join(["removeProduct", "Trying to remove non-existing file: {0}".format(file_path)]),"warning")
362 |
363 | def isFontAvailable(font = 'Arial'):
364 | # Checks if font is installed within the system
365 | # Sample usage: isFontAvailable('DejaVu Sans') OR isFontAvailable('sans-serif')
366 | installed = True
367 | import matplotlib.pyplot as plt
368 | import warnings
369 | import tempfile, os
370 |
371 | with warnings.catch_warnings(record=True) as w:
372 | g=tempfile.NamedTemporaryFile(delete=True,suffix='.png')
373 | warnings.simplefilter("always")
374 | plt.rcParams['font.family'] = font
375 | plt.text(0,0,font)
376 | plt.savefig(g.name)
377 |
378 | if len(w):
379 | installed = False
380 | g.close()
381 | plt.clf()
382 | return installed
383 |
384 | def getFontForName():
385 | if isFontAvailable('DejaVu Sans'):
386 | return 'DejaVu Sans'
387 | elif isFontAvailable('Arial'):
388 | return 'Arial'
389 | else:
390 | writeToLog("getFontForName - There's no 'Arial' and 'DejaVu Sans' fonts, returning 'sans-serif'","warning")
391 | return 'sans-serif'
392 |
393 | def createMap(raster, vmax, vmin, output, shapefile=None, title=None, PDFinsteadOfPng=True):
394 | ###################################################################
395 | # Author: Mateusz Kędzior
396 | # Creates image from raster and shapefile
397 | # Based on: https://gist.github.com/jdherman/7434f7431d1cc0350dbe
398 | ######
399 | # TODO: Consider rewriting to pyQGIS
400 | # (http://docs.qgis.org/testing/en/docs/pyqgis_developer_cookbook/composer.html)
401 | #####
402 | # Prerequisities:
403 | # sudo apt-get install python-mpltoolkits.basemap
404 | ##################################################################
405 | ## Sample files for testing (comment in gedit: CTRL + M, uncomment: CTRL + SHIFT + M)
406 | #import os
407 | #from os.path import expanduser
408 | #home = expanduser("~")
409 |
410 | #SMOSfile = os.path.join(home,"Dropbox/Dane SMOS CATDS dla Wisły/DA_TC_MIR_CL_33/EXT-SM_RE02_MIR_CLF33A_20101231T000000_20120102T235959_272_001_7/ext-SM_RE02_MIR_CLF33A_20101231T000000_20120102T235959_272_001_7_1.DBL.nc")
411 | #SMOSraster = 'NETCDF:"' + SMOSfile + '":Soil_Moisture'
412 | #SentinelRaster = os.path.join(home,"Testy/calibrated_S1A_IW_GRDH_1SDV_20160512T161044_20160512T161.data/Sigma0_VH.img")
413 | #SMAPfile = os.path.join(home,"SMOSSMAPAquarius/SMAP/2015.04.15/SMAP_L3_SM_AP_20150415_R13080_001.h5")
414 | #SMAPraster = 'HDF5:"' + SMAPfile + '"://Soil_Moisture_Retrieval_Data/soil_moisture'
415 |
416 | #Aquariusfile = os.path.join(home,"SMOSSMAPAquarius/Aquarius/2015.04.13/Q2015103.L3m_DAY_SOILM_V4.0_rad_sm_1deg")
417 | #Aquariusraster = 'HDF5:"' + Aquariusfile + '"://l3m_data'
418 | #vmin = 0
419 | #vmax = 1
420 | #output = os.path.join(home,"testy.png")
421 | #shapefile = os.path.join(home,"Dropbox/mapy/dorzecze_Wisły")
422 | #createMap(SMOSraster, vmax, vmin, output, shapefile)
423 | #createMap(SentinelRaster, vmax, vmin, output)
424 | ###################################################################
425 |
426 | # Set font which contains polish characters:
427 | import matplotlib
428 | matplotlib.rc('font', family=getFontForName())
429 |
430 | from osgeo import gdal, osr
431 | import matplotlib.pyplot as plt
432 | import numpy as np
433 | from mpl_toolkits.basemap import Basemap
434 |
435 | # Clear plot
436 | plt.clf()
437 | m = None
438 | cmap = None
439 | im = None
440 |
441 |
442 | # By default, osgeo.gdal returns None on error, and does not normally raise informative exceptions
443 | gdal.UseExceptions()
444 |
445 | gdata = gdal.Open(raster)
446 | geo = gdata.GetGeoTransform()
447 |
448 | factor = float(gdata.GetMetadataItem('Soil_Moisture#scale_factor')) if gdata.GetMetadataItem('Soil_Moisture#scale_factor') != None else float(1)
449 |
450 | xres = geo[1]
451 | yres = geo[5]
452 |
453 | # Get "natural" block size, and total raster XY size.
454 | band = gdata.GetRasterBand(1)
455 | block_sizes = band.GetBlockSize()
456 | x_block_size = block_sizes[0]
457 | y_block_size = block_sizes[1]
458 | xsize = band.XSize
459 | ysize = band.YSize
460 | writeToLog("\t".join(["createMap", 'x_block_size: {0}, y_block_size: {1}.'.format(x_block_size, y_block_size)]),"info")
461 | writeToLog("\t".join(["createMap", 'xsize: {0}, ysize: {1}.'.format(xsize, ysize)]),"info")
462 |
463 | if (xsize < 5000):
464 | data = gdata.ReadAsArray()
465 | data = data * factor
466 | # 'data.min()' might return NaN, so I use 'np.nanmin(data)' instead
467 | writeToLog("\t".join(["createMap", 'Whole data min: {0}, max: {1}, mean: {2}.'.format(np.nanmin(data), np.nanmax(data), np.nanmean(data))]),"info")
468 | else:
469 | writeToLog("\t".join(["createMap", "I was unable to create map, becuase of the xsize"]))
470 | return
471 | m = Basemap(llcrnrlon=createMAPparams[0],llcrnrlat=createMAPparams[1],urcrnrlon=createMAPparams[2],urcrnrlat=createMAPparams[3])
472 | # draw coastlines, state and country boundaries, edge of map.
473 | #m.drawcoastlines()
474 | #m.drawstates()
475 | #m.drawcountries()
476 | # draw parallels.
477 | parallels = np.arange(0.,90,0.25)
478 | m.drawparallels(parallels,labels=[1,0,0,0],fontsize=11)
479 | # draw meridians
480 | meridians = np.arange(0.,90.,0.25)
481 | # labels - list of 4 values (default [0,0,0,0]) that control whether meridians are labelled where they intersect the left, right, top or bottom of the plot. For example labels=[1,0,0,1] will cause meridians to be labelled where they intersect the left and and bottom of the plot, but not the right and top
482 | m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=11)
483 | ny = data.shape[0]; nx = data.shape[1]
484 | lons, lats = m.makegrid(nx, ny) # get lat/lons of ny by nx evenly space grid.
485 | #m = Basemap(llcrnrlon=17.00,llcrnrlat=48.75,urcrnrlon=25.25,urcrnrlat=54.50)
486 |
487 | if shapefile is not None:
488 | m.readshapefile(shapefile,'shp',drawbounds=True, color='0.3')
489 | for info, shp in zip(m.shp_info, m.shp):
490 | m.plot(shp[0], shp[1], marker='+', markersize=8, markeredgewidth=2)
491 | xmin = geo[0] + xres * 0.5
492 | xmax = geo[0] + (xres * gdata.RasterXSize) - xres * 0.5
493 | ymin = geo[3] + (yres * gdata.RasterYSize) + yres * 0.5
494 | ymax = geo[3] - yres * 0.5
495 | x,y = np.mgrid[xmin:xmax+xres:xres, ymax+yres:ymin:yres]
496 | x,y = m(x,y)
497 | cmap = plt.cm.gist_rainbow
498 | cmap.set_under ('1.0')
499 | cmap.set_bad('0.8')
500 | im = m.pcolormesh(x,y, data.T, cmap=cmap, vmin=vmin, vmax=vmax)
501 | cb = plt.colorbar( orientation='horizontal', fraction=0.10, shrink=1.0, pad=0.10, aspect = 35)
502 | if title is not None:
503 | plt.title(title)
504 | if PDFinsteadOfPng:
505 | output = output.replace(".png",".pdf").replace(".jpg",".pdf").replace(".bmp",".pdf")
506 | writeToLog("\t".join(["createMap", 'Please note that your map will saved in PDF. If this is a source of issues, please set PDFinsteadOfPng to False']),"WARNING")
507 | else:
508 | writeToLog("\t".join(["createMap", 'Please note that your map will be **NOT** saved in pdf which may results in lower quality when printing']),"info")
509 | plt.savefig(output, bbox_inches='tight') # to take less space add: bbox_inches='tight', pad_inches=0
510 | # Clear and then close the figure:
511 | plt.clf()
512 | plt.close()
513 |
514 | def createMAPsForFolder(path, fileMask, outputPath, fileName, whatADD=[], shapefile=None):
515 | import fnmatch
516 | import os
517 |
518 | vmax = 1
519 | vmin = 0
520 |
521 | matches = []
522 | for root, dirnames, filenames in os.walk(path):
523 | for filename in fnmatch.filter(filenames, fileMask):
524 | matches.append(os.path.join(root, filename))
525 |
526 | for myFile in matches:
527 | if whatADD != []:
528 | raster = whatADD[0] + myFile + whatADD[1]
529 | else:
530 | raster = myFile
531 | writeToLog("\t".join(["createMAPsForFolder", 'Trying to get date for following file name: {0}'.format(myFile)]),"info")
532 | myDate = getDateFromFileName(myFile)
533 | output = os.path.join(outputPath,myDate + fileName)
534 | writeToLog("\t".join(["createMAPsForFolder", 'raster: {0}, output: {1}.'.format(raster, output)]),"info")
535 | createMap(raster, vmax, vmin, output, shapefile)
536 |
537 | def getSigma(SentinelFile):
538 | # calculate sigma (radar backscatter)
539 | # in ESA SNAP desktop: Radar --> Radiometric --> Calibrate
540 |
541 | global prefixes
542 | if os.path.exists(SentinelFile):
543 | newFile = newFilepath(SentinelFile, prefixes[0])
544 | if (not os.path.exists(newFile)):
545 | # Read sourceProduct
546 | sourceProduct = readProd(SentinelFile)
547 | # Use calibration operator - I've taken:
548 | # "org.esa.s1tbx.calibration.gpf.CalibrationOp" from the help window
549 | CalibrationOp = jpy.get_type("org.esa.s1tbx.calibration.gpf.CalibrationOp")
550 | CalOp = CalibrationOp()
551 | CalOp.setParameterDefaultValues()
552 | CalOp.setSourceProduct(sourceProduct)
553 | CalOp.setParameter('doExecute', True)
554 | # Don't need to create the target product. It is created by the operator.
555 | targetProduct = CalOp.getTargetProduct()
556 | print(("Starting writing to the file: " + newFile))
557 | snappy.ProductIO.writeProduct(targetProduct, newFile, OutputType[1])
558 | sourceProduct.dispose()
559 | targetProduct.dispose()
560 | del CalOp
561 | del CalibrationOp
562 | else:
563 | writeToLog("\t".join(["getSigma", "File already exists. Exit without changes."]),"WARNING")
564 | return newFile
565 |
566 |
567 | def getSubset(SentinelFile):
568 | global prefixes
569 | #Initialize:
570 | print(("Please execute getSubset method *AFTER* executing getSigma (after using Calibration operator)"))
571 | SubsetOp = snappy.jpy.get_type('org.esa.snap.core.gpf.common.SubsetOp')
572 | WKTReader = snappy.jpy.get_type('com.vividsolutions.jts.io.WKTReader')
573 | try:
574 | geom = WKTReader().read(wkt)
575 | except RuntimeError as e:
576 | writeToLog("\t".join(["getSubset", "Error when reading wkt: '{0}', exception: '{1}'".format(wkt,e)]))
577 | op = SubsetOp()
578 | # read source product and set properties:
579 | product = readProd(SentinelFile)
580 | op.setSourceProduct(product)
581 | op.setGeoRegion(geom)
582 | sub_product = op.getTargetProduct()
583 | # Ensure that file does not exists:
584 | newFile = newFilepath(SentinelFile, prefixes[1], False)
585 | if os.path.exists(newFile):
586 | writeToLog("\t".join(["getSubset", "It seems that subset of your data already exists. Bye!"]),"WARNING")
587 | else:
588 | print(("Starting writing to the file: " + newFile))
589 | ProductIO.writeProduct(sub_product, newFile, OutputType[1])
590 | product.dispose()
591 | sub_product.dispose()
592 | del op
593 | del SubsetOp
594 | return newFile
595 |
596 | def getSMI(file1, destP, WP = 0.108, FC = 0.319, band=None ):
597 | ####
598 | # Calculates Soil Moisture Index (according to Hunt et al. 2009):
599 | # FAW = (mv − WP )/(FC − WP ) SMI = −5 + 10*FAW
600 | # mv - current volumetric water content
601 | # WP - volumetric water content for wilting point
602 | # FC - volumetric water content for field capacity
603 | ####
604 | # For following period of time: 2013-04-16 2015-12-22
605 | # On station Derlo, in vegetational period (May - October)
606 | # we had following values of percentiles:
607 | # 2013 2014 2015 2013 - 2015
608 | #5% 0.114 0.1220 0.099 0.108
609 | #95% 0.344 0.3142 0.296 0.319
610 | # and they're used as default values
611 | ####
612 | import snappy
613 | from snappy import GPF
614 | from snappy import ProductIO
615 |
616 | global prefixes
617 | prefix = prefixes[4]
618 |
619 | resultFile = os.path.join(destP,prefix + os.path.splitext(os.path.basename(file1))[0] + SecondaryOutputType[0])
620 | if (not os.path.exists(resultFile)):
621 | products = [readProd(file1)]
622 | if band is None:
623 | if (isBandInProd('Soil_Moisture', products[0], True)):
624 | band = 'Soil_Moisture'
625 | else:
626 | # If band name is not provided, I take the first band name
627 | band = products[0].getBands()[0].getName()
628 | else:
629 | # verify if band within the product
630 | if (not isBandInProd(band, products[0])):
631 | return
632 |
633 | expr = "-5 + 10 * ( ({0} - {1}) / ({2} - {1}) )".format(band,WP,FC)
634 |
635 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
636 |
637 | HashMap = jpy.get_type('java.util.HashMap')
638 | BandDescriptor = jpy.get_type('org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor')
639 |
640 | targetBand1 = BandDescriptor()
641 | targetBand1.name = prefix
642 | targetBand1.description = "{0} calculated using expression: '{1}'".format(prefix,expr)
643 | targetBand1.type = 'float32'
644 | targetBand1.expression = expr
645 | targetBands = jpy.array(
646 | 'org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor', 1)
647 | targetBands[0] = targetBand1
648 | parameters = HashMap()
649 | parameters.put('targetBands', targetBands)
650 |
651 | result = GPF.createProduct('BandMaths', parameters, products)
652 | writeToLog("\t".join(["getSMI", "Calculating SMI using following expression: ", expr, "for file:", file1, "result:", resultFile]), "info")
653 | ProductIO.writeProduct(result, resultFile, SecondaryOutputType[1])
654 | for prod in products:
655 | prod.dispose()
656 | result.dispose()
657 | parameters = None
658 | products = None
659 | else:
660 | writeToLog("\t".join(["getSMI", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(resultFile))]),"WARNING")
661 | return resultFile
662 |
663 | def getFiltered(file1, minValue = 0.0, maxValue = 1.0, band = None):
664 | ####
665 | # Marks values outside specified range as NaN
666 | # http://forum.step.esa.int/t/mark-values-outside-specific-range-as-nan/4338
667 | ####
668 | import snappy
669 | from snappy import GPF
670 | from snappy import ProductIO
671 |
672 | resultFile = os.path.splitext(file1)[0] + "_filt" + os.path.splitext(file1)[1]
673 | if (not os.path.exists(resultFile)):
674 | products = [readProd(file1)]
675 | if band is None:
676 | if (isBandInProd('Soil_Moisture', products[0], True)):
677 | band = 'Soil_Moisture'
678 | elif (isBandInProd('sum', products[0], True)):
679 | band = 'sum'
680 | else:
681 | # If band name is not provided, I take the first band name
682 | band = products[0].getBands()[0].getName()
683 | else:
684 | # verify if band within the product
685 | if (not isBandInProd(band, products[0])):
686 | return
687 |
688 | expr = "if ({0} > {2} || {0} < {1}) then NaN else {0}".format(band,minValue,maxValue)
689 |
690 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
691 |
692 | HashMap = jpy.get_type('java.util.HashMap')
693 | BandDescriptor = jpy.get_type('org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor')
694 |
695 | targetBand1 = BandDescriptor()
696 | targetBand1.name = band
697 | targetBand1.description = "{0} filtered: '{1}'".format(band,expr)
698 | targetBand1.type = 'float32'
699 | targetBand1.expression = expr
700 | targetBands = jpy.array(
701 | 'org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor', 1)
702 | targetBands[0] = targetBand1
703 | parameters = HashMap()
704 | parameters.put('targetBands', targetBands)
705 |
706 | result = GPF.createProduct('BandMaths', parameters, products)
707 | writeToLog("\t".join(["getFiltered", "Filtering expression: ", expr, "for file:", file1, "result:", resultFile]), "info")
708 | ProductIO.writeProduct(result, resultFile, SecondaryOutputType[1])
709 | for prod in products:
710 | prod.dispose()
711 | result.dispose()
712 | parameters = None
713 | products = None
714 | else:
715 | writeToLog("\t".join(["getFiltered", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(resultFile))]),"WARNING")
716 | return resultFile
717 |
718 | def getOperation(file1, file2, destination, operation, band=['Soil_Moisture','Soil_Moisture'], outType = OutputType):
719 | import snappy
720 | from snappy import GPF
721 | from snappy import ProductIO
722 |
723 | products = [readProd(file1),
724 | readProd(file2)]
725 | #verify if products contain selected band
726 |
727 | if (not isBandInProd(band[0], products[0])):
728 | return
729 | if (not isBandInProd(band[1], products[1])):
730 | return
731 |
732 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
733 |
734 | HashMap = jpy.get_type('java.util.HashMap')
735 | BandDescriptor = jpy.get_type(
736 | 'org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor')
737 |
738 | targetBand1 = BandDescriptor()
739 | targetBand1.name = operation[1]
740 | targetBand1.type = 'float32'
741 |
742 | if (getProductInfo(file1) == getProductInfo(file2)):
743 | ## index is zero-based, so index 1 refers to the second product
744 | expr = "".join([band[0], ' ', operation[0], ' $sourceProduct.1.', band[1]])
745 | else:
746 | # in this case we need collocated product
747 | # first remove old products:
748 | for prod in products:
749 | prod.dispose()
750 | collocated = getCollocated(file1, file2, destination)
751 | products = [readProd(collocated)]
752 | # Sample expression: 'Sigma0_VH_M - Sigma0_VH_S'
753 | newBand1 = "{0}_M".format(band[0])
754 | newBand2 = "{0}_S".format(band[1])
755 | if (not isBandInProd(newBand1, products[0])):
756 | return
757 | if (not isBandInProd(newBand2, products[0])):
758 | return
759 | expr = "{0} {1} {2}".format(newBand1, operation[0],newBand2)
760 | prodlist = ""
761 | for prod in products:
762 | prodlist = prodlist + "'{0}'".format(prod.getName())
763 | writeToLog("\t".join(["getOperation", "{0}\t{1}".format(expr,prodlist)]), "info")
764 | targetBand1.expression = expr
765 | targetBand1.description = "Operation: {0} ({1}) using expression: '{2}'".format(operation[1], operation[0], expr)
766 |
767 | targetBands = jpy.array(
768 | 'org.esa.snap.core.gpf.common.BandMathsOp$BandDescriptor', 1)
769 | targetBands[0] = targetBand1
770 |
771 | parameters = HashMap()
772 | parameters.put('targetBands', targetBands)
773 |
774 | ## More at http://forum.step.esa.int/t/calculate-the-difference-or-division
775 | ## -between-bands-in-two-different-products
776 | result = GPF.createProduct('BandMaths', parameters, products)
777 |
778 | # TODO: this should be handled in smarter way!!!
779 | filetype = os.path.basename(file1).split("_")[3]
780 | writeToLog("\t".join(["getOperation", "filetype:", "{0}".format(filetype)]), "info")
781 | resultFile = getNewFileName(file1, file2, destination, operation[1], band[0], filetype, False, outType[0])
782 | ProductIO.writeProduct(result, resultFile, outType[1])
783 | for prod in products:
784 | prod.dispose()
785 | result.dispose()
786 | parameters = None
787 | products = None
788 | return resultFile
789 |
790 | def getProductInfo(file1):
791 | import snappy
792 | from snappy import GPF
793 | from snappy import ProductIO
794 |
795 | prod = readProd(file1)
796 | bandNames=''
797 | for i in prod.getBandNames():
798 | bandNames += "'{0}'".format(i)
799 | firstBand=prod.getBands()[0]
800 | width = firstBand.getRasterWidth()
801 | height = firstBand.getRasterHeight()
802 | prod.dispose()
803 | resolution = getProductRes(file1)
804 | return "Bands: {0}, width = {1}, height = {2}, resolution = {3}".format(bandNames,width, height, resolution)
805 |
806 | def getBandFromProduct(file1, bandNumber):
807 | import snappy
808 | from snappy import GPF
809 | from snappy import ProductIO
810 | # TODO: When I try to read 'Soil_Moisture_M.img' directly (not hdr file), I receive a NoneType object
811 | prod = readProd(file1)
812 |
813 | if (len(prod.getBands()) >= bandNumber):
814 | Band = prod.getBands()[bandNumber]
815 | else:
816 | writeToLog("\t".join(["getBandFromProduct", "Illegal band number {0}".format(bandNumber)]),"WARNING")
817 | Band = None
818 | # If 'prod.dispose()' line (below) is not commented, I receive an error message when usign this function in getBandStats
819 | # RuntimeError: java.lang.IllegalArgumentException: The name to be externalized must at least contain one character
820 | #prod.dispose()
821 | return Band
822 |
823 | def getBandRawData(file1,bandNumber):
824 | # reads whole band and return content as an array (matrix)
825 | # http://forum.step.esa.int/t/is-it-possible-to-read-whole-band-data-as-an-array-as-a-raw-data-from-python
826 | Band = getBandFromProduct(file1,bandNumber)
827 | Band.readRasterDataFully()
828 | return Band.getRasterData().getElems()
829 |
830 | def getAllBandsStats(file1, pathToSaveStats = pathToSaveStats):
831 | import snappy
832 | # TODO: When I try to read 'Soil_Moisture_M.img' directly (not hdr file), I receive a NoneType object
833 | prod = readProd(file1)
834 | if (not prod):
835 | errormsg = "getAllBandsStats - Error when reading '{0}' file".format(file1)
836 | writeToLog("\t".join(["getAllBandsStats", errormsg]))
837 | return errormsg
838 | numberOfBands = len(prod.getBands())
839 | prodName = prod.getName()
840 | fileName = prod.getFileLocation().getName()
841 | if (not pathToSaveStats):
842 | pathToSaveStats = os.path.join(SentinelPath,"BandStatistics.csv")
843 | ####
844 | for bandNumber in range(numberOfBands):
845 | Band = prod.getBands()[bandNumber]
846 | stats = Band.getStx()
847 | message = "FileName,Product,BandName,Min,Max,Avg,StdDev,CV,NumberOfPixels,TotalNumberOfPixels:\t" + ("\t".join(["{0}","{1}","{2}","{3}","{4}","{5}","{6}","{7}","{8}", "{9}"])).format(fileName, prodName, Band.getName(), stats.getMinimum(), stats.getMaximum(), stats.getMedian(), stats.getStandardDeviation(), stats.getCoefficientOfVariation(),int(Band.getNumDataElems()), int(stats.getHistogram().getTotals()[0]))
848 | with open(pathToSaveStats, "a") as myfile:
849 | myfile.write(message)
850 | myfile.write("\n")
851 | ###
852 | print("Stats saved in '{0}'".format(pathToSaveStats))
853 | prod.dispose()
854 | return "Statistics for all '{0}' bands of product '{1}' has been saved in '{2}'".format(numberOfBands,prodName,pathToSaveStats)
855 |
856 | def getBandHistogram(file1, bandNumber = 0):
857 | Band = getBandFromProduct(file1, bandNumber)
858 | stats = Band.getStx()
859 | return stats.getHistogram()
860 |
861 | def get_envi_header_dict(hdr):
862 | # Function from: http://gis.stackexchange.com/questions/48618/how-to-read-write-envi-metadata-using-gdal
863 | import re
864 | #Get all "key = {val}" type matches
865 | regex=re.compile(r'^(.+?)\s*=\s*({\s*.*?\n*.*?})$',re.M|re.I)
866 | matches=regex.findall(hdr)
867 |
868 | #Remove them from the header
869 | subhdr=regex.sub('',hdr)
870 |
871 | #Get all "key = val" type matches
872 | regex=re.compile(r'^(.+?)\s*=\s*(.*?)$',re.M|re.I)
873 | matches.extend(regex.findall(subhdr))
874 |
875 | return dict(matches)
876 |
877 | def read_envi_hdr(hdr_file):
878 | if os.path.exists(hdr_file):
879 | with open(hdr_file, 'r') as content_file:
880 | content = content_file.read()
881 | return get_envi_header_dict(content)
882 |
883 | def getMetadataValueFromHdr(hdr_file, HDRkey = 'data gain values'):
884 | metadata = read_envi_hdr(hdr_file)
885 | if metadata:
886 | value = metadata.get(HDRkey)
887 | return value.replace("{","").replace("}","").strip() if value else None
888 | else:
889 | return None
890 |
891 | def saveHistForFiles(file1, xtitle="Values", ytitle="Frequency", title="Band: ", suffix="eng",directorySuffix = None):
892 | # This just executes 'saveHistogramForFile' function below
893 | import glob
894 | if (os.path.splitext(file1)[1] == '.dim'):
895 | searchIn = os.path.join(get_data_path(file1),"*.img")
896 | for myFile in glob.glob(searchIn):
897 | saveHistogramForFile(myFile, xtitle, ytitle, title, suffix,directorySuffix)
898 | else:
899 | saveHistogramForFile(file1, xtitle, ytitle, title, suffix,directorySuffix)
900 |
901 | def getHistNewFileName(file1, suffix = "pl"):
902 | global prefixes
903 | # Since LaTeX has problems with svg support, I'm saving in PDF
904 | return os.path.split(os.path.split(file1)[0])[1] + simplifySMOSandSentinelfileName(os.path.basename(file1)) + prefixes[6] + suffix + ".pdf"
905 |
906 | def getHistNewFullPath(NewFileName, histogramDirectory, directorySuffix = None):
907 | directory = histogramDirectory
908 | if (not directorySuffix == None):
909 | directory = os.path.join(directory,directorySuffix)
910 | if not os.path.exists(directory):
911 | os.makedirs(directory)
912 | NewFullPath = os.path.join(directory,NewFileName)
913 | return NewFullPath
914 |
915 | def getHistFilePath(file1, suffix = "pl", directorySuffix = None):
916 | NewFileName = getHistNewFileName(file1, suffix)
917 | NewFullPath = getHistNewFullPath(NewFileName,histogramDirectory, directorySuffix)
918 | return NewFullPath
919 |
920 | def saveHistogramForFile(file1, xtitle="Values", ytitle="Frequency", title=None, suffix="eng",directorySuffix = None):
921 | # LIMITATIONS: This is *not* working with .dim files
922 | # Sample usage:
923 | # saveHistogramForFile(smallFile)
924 | # saveHistogramForFile(smallFile, "Wartości", "Liczebność", "Pasmo: ", "pl")
925 | from osgeo import gdal
926 | import numpy as np
927 |
928 | # Set font which contains polish characters:
929 | import matplotlib
930 | font = getFontForName()
931 | matplotlib.rc('font', family=font)
932 |
933 | import matplotlib.mlab as mlab
934 | import matplotlib.pyplot as plt
935 |
936 | dataset = gdal.Open(file1)
937 | band = dataset.GetRasterBand(1)
938 | data = np.squeeze(band.ReadAsArray())
939 | allElem = np.prod(data.shape)
940 | NaNElem = np.count_nonzero((np.isnan(data)))
941 | NaNprcnt = NaNElem/allElem
942 | data = data[np.logical_not(np.isnan(data))]
943 |
944 | if (os.path.splitext(file1)[1] == '.img'):
945 | hdrFile = os.path.splitext(file1)[0] + ".hdr"
946 | value = getMetadataValueFromHdr(hdrFile, 'data gain values')
947 | data = data * float(value) if value else data
948 | if (not title == None):
949 | bandName = getMetadataValueFromHdr(hdrFile, 'band names')
950 | title = title + bandName if bandName else title
951 |
952 | # TODO: data (scaling) factors should be handled in separate method
953 | if (file1.startswith("NETCDF")):
954 | factor = float(dataset.GetMetadataItem('Soil_Moisture#scale_factor')) if dataset.GetMetadataItem('Soil_Moisture#scale_factor') != None else float(1)
955 | data = data * factor
956 |
957 | # the histogram of the data
958 | n, bins, patches = plt.hist(data, facecolor='green') #, 10, normed=1, facecolor='green', alpha=0.75)
959 |
960 | if (float(NaNprcnt) > float(0)):
961 | xtitle = xtitle + ' (NA: {:.2%})'.format(NaNprcnt)
962 | plt.xlabel(xtitle)
963 | plt.ylabel(ytitle)
964 | if (not title == None):
965 | plt.title(title)
966 | plt.grid(True)
967 | NewFullPath = getHistFilePath(file1, suffix, directorySuffix)
968 | plt.savefig(NewFullPath)
969 | # Clear and then close the figure:
970 | plt.clf()
971 | plt.close()
972 |
973 |
974 | def getCollocated(file1, file2, destination):
975 | import snappy
976 | from snappy import GPF
977 | from snappy import ProductIO
978 |
979 | global prefixes
980 | # TODO: this should be handled in smarter way!!!
981 | #filetype = os.path.basename(file1).split("_")[3]
982 | filetype = "_"
983 | writeToLog("\t".join(["getCollocated", "filetype:", "{0}".format(filetype)]), "info")
984 | destinationPath = getNewFileName(file1, file2, destination,prefixes[5], "", filetype,True)
985 |
986 | if (not os.path.exists(destinationPath)):
987 | products = [readProd(file1), readProd(file2)]
988 |
989 | HashMap = jpy.get_type('java.util.HashMap')
990 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
991 |
992 | parameters = HashMap()
993 | sourceProducts = HashMap()
994 | sourceProducts.put("main", products[0])
995 | sourceProducts.put("subordinate", products[1])
996 | parameters.put('renameMainComponents', True)
997 | parameters.put('renameSubordinateComponents', True)
998 | parameters.put('mainComponentPattern', "${ORIGINAL_NAME}_M")
999 | parameters.put('subordinateComponentPattern', "${ORIGINAL_NAME}_S")
1000 | parameters.put('resamplingType', getCollocatedResamplingType)
1001 |
1002 | result = GPF.createProduct('Collocate', parameters, sourceProducts)
1003 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1004 |
1005 | for prod in products:
1006 | prod.dispose()
1007 | result.dispose()
1008 | sourceProducts = None
1009 | parameters = None
1010 | writeToLog("\t".join(["getCollocated", "Input files\t{0}\t{1}\tresamplingType\t{2}".format(getProductInfo(file1),getProductInfo(file2), getCollocatedResamplingType)]),"info")
1011 | writeToLog("\t".join(["getCollocated", "Collocated product saved as '{0}' \t {1}".format(os.path.basename(destinationPath), get_whole_Product_size(destinationPath))]),"info")
1012 | else:
1013 | writeToLog("\t".join(["getCollocated", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(destinationPath))]),"WARNING")
1014 | return destinationPath
1015 |
1016 |
1017 | def getDiff(file1, file2, destination, band=['Soil_Moisture','Soil_Moisture']):
1018 | # TODO: test output from SNAP desktop and from this file
1019 | return getOperation(file1, file2, destination,
1020 | ["-", "diff"], band, OutputType)
1021 |
1022 |
1023 | def getDivision(file1, file2, destination, band=['Soil_Moisture','Soil_Moisture']):
1024 | return getOperation(file1, file2, destination,
1025 | ["/", "div"], band, OutputType)
1026 |
1027 | def getSum(file1, file2, destination, band=['Soil_Moisture','Soil_Moisture']):
1028 | return getOperation(file1, file2, destination,
1029 | ["+", "sum"], band, SecondaryOutputType)
1030 |
1031 | # I will use Sentinel and SMOS data
1032 | # I will use two functions: getCoarseResProd (to SMOSPS resolution) or getBetterResProd (to destinationPS resolution)
1033 |
1034 | def getCoarseResProd(file1, destination):
1035 | destinationPath = newFilepath(file1, "aggregated")
1036 | return getResampled(file1, destinationPath, resolution=float(SMOSPS))
1037 |
1038 | def getBetterResProd(file1, destination):
1039 | destinationPath = newFilepath(file1, "interpolated")
1040 | return getResampled(file1, destinationPath, resolution=float(destinationPS))
1041 |
1042 | def getResampled(file1, destinationPath, resolution=destinationPS):
1043 | # TODO: this should be tested!!!
1044 | # More info: http://forum.step.esa.int/t/aggregation-and-interpolation-of-sentinel-products-should-i-use-snappy-or-gdal-tools/2522/3
1045 | import snappy
1046 | from snappy import GPF
1047 | from snappy import ProductIO
1048 | if (not os.path.exists(destinationPath)):
1049 | product = readProd(file1)
1050 | HashMap = jpy.get_type('java.util.HashMap')
1051 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
1052 | parameters = HashMap()
1053 | parameters.put('sourceProduct', product)
1054 | parameters.put('upsampling', "Bilinear")
1055 | parameters.put('downsampling', "Mean")
1056 | # As I checked in SNAP desktop, 'targetResolution' option is sometimes not available
1057 | # and I need to use targetHeight and targetWidth instead
1058 | # RuntimeError: org.esa.snap.core.gpf.OperatorException: Operator 'ResamplingOp': Value for 'Target resolution' must be of type 'Integer'.
1059 | # So I convert it to Integer
1060 | parameters.put('targetResolution', int(resolution))
1061 | result = GPF.createProduct('Resample', parameters, product)
1062 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1063 |
1064 | product.dispose()
1065 | result.dispose()
1066 | parameters = None
1067 | product = None
1068 | else:
1069 | writeToLog("\t".join(["getResampled", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(destinationPath))]),"WARNING")
1070 | return destinationPath
1071 |
1072 | def getTerrainCorrected(file1, crs='WGS84(DD)'):
1073 | # According to lveci: "GRD products are not terrain corrected. Due to the nature of SAR acquisitions, in order to get an accurate geocoding you need to account for the geometry of the acquisition"
1074 | # "Over scenes where you have a DEM, you should use range Doppler terrain correction"
1075 | # Radar --> Geometric --> Terrain Correction --> Range-Doppler Terrain Correction
1076 | # Save parameters (saveLatLon, saveDEM) means that additional information (elevation, latLon) will be saved in output file which is not needed for further processing
1077 | # once you downloaded a DEM for an area it stays in the aux folder until you delete it manually. So you won't need to download it again when you process data from the same area.
1078 | import snappy
1079 | from snappy import GPF
1080 | from snappy import ProductIO
1081 |
1082 | global prefixes
1083 | destinationPath = newFilepath(file1,prefixes[3])
1084 | if (not os.path.exists(destinationPath)):
1085 | product = readProd(file1)
1086 |
1087 | HashMap = jpy.get_type('java.util.HashMap')
1088 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
1089 | writeToLog("\t".join(["getTerrainCorrected", "DEM:",getTerrainCorrected_DEM,"destination (m):", str(destinationPS), "demResamplingMethod", getTerrainCorrected_demResamplingMethod, "imgResamplingMethod", getTerrainCorrected_imgResamplingMethod]),"info")
1090 | parameters = HashMap()
1091 | parameters.put('demName', getTerrainCorrected_DEM)
1092 | parameters.put('externalDEMApplyEGM', True)
1093 | parameters.put('demResamplingMethod', getTerrainCorrected_demResamplingMethod)
1094 | parameters.put('imgResamplingMethod', getTerrainCorrected_imgResamplingMethod)
1095 | parameters.put('pixelSpacingInMeter', float(destinationPS))
1096 | parameters.put('mapProjection', crs)
1097 | parameters.put('nodataValueAtSea', True)
1098 | # This is ONLY for saving DEM within output file - downloaded DEM will be NOT removed from .snap\AuxData\DEMs
1099 | parameters.put('saveDEM', False)
1100 | parameters.put('saveLatLon', False)
1101 | parameters.put('saveIncidenceAngleFromEllipsoid', False)
1102 | parameters.put('saveLocalIncidenceAngle', False)
1103 | parameters.put('saveProjectedLocalIncidenceAngle', False)
1104 |
1105 | result = GPF.createProduct('Terrain-Correction', parameters, product)
1106 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1107 |
1108 | product.dispose()
1109 | result.dispose()
1110 | parameters = None
1111 | product = None
1112 | else:
1113 | writeToLog("\t".join(["getTerrainCorrected", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(destinationPath))]),"WARNING")
1114 | return destinationPath
1115 |
1116 | def getReprojected(file1, destinationPath, crs='EPSG:4326'):
1117 | import snappy
1118 | from snappy import GPF
1119 | from snappy import ProductIO
1120 | if (not os.path.exists(destinationPath)):
1121 | product = readProd(file1)
1122 |
1123 | # TODO: Separate method for handling creating results of computations using 'GPF.createProduct'
1124 | # (it seems that part of code has been repeated multiple times in different methods of this script)
1125 | HashMap = jpy.get_type('java.util.HashMap')
1126 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
1127 |
1128 | parameters = HashMap()
1129 | parameters.put('crs', crs)
1130 | parameters.put('resampling', getReprojectedResampling)
1131 | result = GPF.createProduct('Reproject', parameters, product)
1132 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1133 | product.dispose()
1134 | result.dispose()
1135 | parameters = None
1136 | product = None
1137 | else:
1138 | writeToLog("\t".join(["getReprojected", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(destinationPath))]),"WARNING")
1139 |
1140 | return destinationPath
1141 |
1142 | def getMinMax(current,minV,maxV):
1143 | if current < minV:
1144 | minV = current
1145 | if current > maxV:
1146 | maxV = current
1147 | return [minV, maxV]
1148 |
1149 | def getExtent(file1):
1150 | ########
1151 | ## Get corner coordinates of the ESA SNAP product (get extent)
1152 | ########
1153 | # int step - the step given in pixels
1154 | step = 1
1155 | minLon = 999.99
1156 |
1157 | myProd = readProd(file1)
1158 | try:
1159 | GeoPos = snappy.ProductUtils.createGeoBoundary(myProd, step)
1160 | except RuntimeError as e:
1161 | writeToLog("\t".join(["getExtent", "Error!!!, Probably file: '{0}' has *no* bands. Result of len(myProd.getBands()): '{1}'".format(file1, len(myProd.getBands()))]))
1162 | writeToLog("\t".join(["getExtent", "Error message: '{0}'".format(e)]))
1163 | return [0.0, 0.0, 0.0, 0.0]
1164 | maxLon = -minLon
1165 | minLat = minLon
1166 | maxLat = maxLon
1167 | # TODO: probably there's better way to check min/max (?)
1168 | for element in GeoPos:
1169 | try:
1170 | lon = element.getLon()
1171 | [minLon, maxLon] = getMinMax(lon,minLon,maxLon)
1172 | except (NameError):
1173 | pass
1174 | try:
1175 | # TODO: separate method to get min and max
1176 | lat = element.getLat()
1177 | [minLat, maxLat] = getMinMax(lat,minLat,maxLat)
1178 | except (NameError):
1179 | pass
1180 | myProd.dispose()
1181 | return [minLon, maxLon, minLat, maxLat]
1182 |
1183 | def getExtentStr(file1):
1184 | array = getExtent(file1)
1185 | for i in range(len(array)):
1186 | array[i] = str(round(array[i],2))
1187 | return "\t".join(["Lon:",array[0], array[1],"Lat:", array[2], array[3]])
1188 |
1189 | def getProductRes(file1):
1190 | ##
1191 | # Gets product resolution in geographical degrees
1192 | ##
1193 | precision = 7
1194 | myProd = readProd(file1)
1195 | height = float(myProd.getSceneRasterHeight())
1196 | width = float(myProd.getSceneRasterWidth())
1197 | myProd.dispose()
1198 | #
1199 | [minLon, maxLon, minLat, maxLat] = getExtent(file1)
1200 | Lon = maxLon - minLon
1201 | Lat = maxLat - minLat
1202 | # TODO: THIS MUST BE FIXED!!!
1203 | # Tested on 'test_TIFF.tif' file in *this* repository
1204 | # For example: gdalinfo(test_TIFF.tif) shows me 'Pixel Size = (0.259366035461426,-0.316413879394531)'
1205 | # but this method returns: '0.2074928, 0.1582069'
1206 | return "{0}, {1}".format(round(Lon/width,precision), round(Lat/height,precision))
1207 |
1208 | def getGeometryName(file1 = sampleSHP):
1209 | # It seems that when adding SHP using 'addVectorToProduct',
1210 | # SHP is available under 'Vector Data' in name which is consistent with SHP file name
1211 | return os.path.splitext(os.path.basename(file1))[0]
1212 |
1213 | def addVectorToProduct(file1 = sampleSHP, file2 = sampleData, separateShapes = False):
1214 | # Imports shapefile (file1) to SNAP product (file2) and save such product as a new .dim file (destinationPath)
1215 | import snappy
1216 | from snappy import jpy
1217 | from snappy import GPF
1218 | from snappy import ProductIO
1219 | if (os.path.isfile(file1) and (isSNAPprod(file2) or os.path.isfile(file2))):
1220 | # TODO: Improve this way of file naming (call function to generate name?)
1221 | destinationPath = file2 + getGeometryName(file1) + '.dim'
1222 | if os.path.isfile(destinationPath):
1223 | writeToLog("\t".join(["addVectorToProduct", "It seems that destination file '{0}' with imported vector already exists. Bye!".format(destinationPath)]),"WARNING")
1224 | return destinationPath
1225 | # Initially this method was called 'getVector', but it seems that I must provide product, so I renamed it.
1226 | product = readProd(file2)
1227 | HashMap = jpy.get_type('java.util.HashMap')
1228 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
1229 | parameters = HashMap()
1230 | parameters.put('vectorFile', file1)
1231 | parameters.put('separateShapes', separateShapes)
1232 |
1233 | result = GPF.createProduct('Import-Vector', parameters, product)
1234 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1235 |
1236 | product.dispose()
1237 | result.dispose()
1238 | parameters = None
1239 | product = None
1240 | else:
1241 | writeToLog("\t".join(["addVectorToProduct", "It seems that vector file '{0}' *OR* SNAP product: '{1}' does NOT! exitst".format(file1, file2)]),"WARNING")
1242 | return destinationPath
1243 |
1244 | def getMasked(file1, maskFile=sampleSHP):
1245 | ###
1246 | # Masks product (file1) using shapefile (maskFile) which defines water
1247 | ###
1248 | # According to lveci: "For small areas like rivers and lake you will need it to be very precise. The lat/lons of the shape file will not be in the correct position in the SAR image if the image is in SAR geometry. You will need to apply the shape file after terrain correction or use to Update Georeference operator which does a sort of backwards geocoding into pixel bands.".
1249 | #http://forum.step.esa.int/t/import-vector-data-shapefile-from-snappy-python/4115
1250 |
1251 | import snappy
1252 | from snappy import GPF
1253 | from snappy import ProductIO
1254 | global prefixes
1255 | destinationPath = newFilepath(file1, prefixes[2], False)
1256 | if (not os.path.exists(destinationPath)):
1257 | writeToLog("\t".join(["getMasked", "maskingSHP:",maskFile,str(get_whole_Product_size(maskFile))]),"info")
1258 | prodWithVector = addVectorToProduct(maskFile, file1, False)
1259 | writeToLog("\t".join(["getMasked", "prodWithVector:",prodWithVector,str(get_whole_Product_size(prodWithVector)),getExtentStr(prodWithVector)]),"info")
1260 | product = readProd(prodWithVector)
1261 | HashMap = jpy.get_type('java.util.HashMap')
1262 | GPF.getDefaultInstance().getOperatorSpiRegistry().loadOperatorSpis()
1263 | parameters = HashMap()
1264 | #parameters.put('landMask', False)
1265 | parameters.put('useSRTM', True)
1266 | # TODO: Ensure that such geometry exists within file?
1267 | parameters.put('geometry', getGeometryName(maskFile))
1268 | parameters.put('invertGeometry', True)
1269 | parameters.put('byPass', False)
1270 | try:
1271 | result = GPF.createProduct('Land-Sea-Mask', parameters, product)
1272 | # This is mainly for handling 'org.esa.snap.core.gpf.OperatorException: expression: Undefined symbol'
1273 | except Exception, e:
1274 | writeToLog("\t".join(["getMasked", "!!!!! Error - please ensure that vector data '{0}' which you use for masking is located *within* the scene boundaries".format(getGeometryName(maskFile)) ]))
1275 | print("\n")
1276 | writeToLog("\t".join(["getMasked",str(e) ]))
1277 | print("\n")
1278 | writeToLog("\t".join(["getMasked", "I will return *NOT* masked data" ]))
1279 | product.dispose()
1280 | return prodWithVector
1281 |
1282 | ProductIO.writeProduct(result, destinationPath, 'BEAM-DIMAP')
1283 |
1284 | product.dispose()
1285 | result.dispose()
1286 | parameters = None
1287 | product = None
1288 | else:
1289 | writeToLog("\t".join(["getMasked", "It seems that destination file '{0}' already exists. Bye!".format(os.path.basename(destinationPath))]),"WARNING")
1290 | return destinationPath
1291 |
1292 | # For testing purposes
1293 | if os.path.isdir(snappyPath):
1294 | print("testdataPath: '{0}', sampleData: '{1}'".format(testdataPath, sampleData))
1295 | print("\ngetExtentStr(sampleData):")
1296 | print(getExtentStr(sampleData))
1297 | print("\ngetProductInfo(sampleData):")
1298 | print(getProductInfo(sampleData))
1299 | print("\nYou can read sample data by typing: 'readProd(sampleData)'")
1300 | else:
1301 | print("snappyPath: '{0}' is not a directory. Are you sure that you have installed ESA SNAP?".format(snappyPath))
1302 |
--------------------------------------------------------------------------------