├── .gitignore ├── docs ├── ibcao.png ├── etopothk.png ├── geotiff.png ├── surfvelmag.png ├── contour2000m.png └── Full_saturation_spectrum_CCW.png ├── colormaps ├── percentiles.cpt ├── speed_blue_red.cpt ├── greenland-topography.cpt ├── greenland-bath-topo-2.cpt ├── greenland-bath-topo-3.cpt ├── speed_blue_red_nonlin_0_1500.cpt ├── speed_blue_red_nonlin_0_3000.cpt ├── Full_saturation_spectrum_CCW.cpt ├── Full_saturation_spectrum_CCW_desatlight.cpt ├── Full_saturation_spectrum_CCW_orange.cpt ├── Full_saturation_spectrum_CCW_orange_desaturated.cpt ├── temp-c.cpt ├── wiki-2.0.cpt ├── greenland-bath-topo.cpt └── BlueYellowRed.cpt ├── __init__.py ├── scripts ├── point2line.py ├── dissolve_by_attribute.py ├── create_greenland_epsg3413_grid.py ├── scalar_within_poly.py ├── create_jif_utm22n_grid.py ├── extract_contours.py ├── create_greenland_bamber_grid.py ├── create_greenland_ext_epsg3413_grid.py ├── contour2shp.py ├── remap3d.py ├── vraster2lineshapefile.py ├── extract_interface.py ├── qgis_colorramp.py ├── extract_sigma_levels.py └── basemap_plot.py ├── setup.py ├── README.rst ├── nc_hillshade.py └── pypismtools.py /.gitignore: -------------------------------------------------------------------------------- 1 | build/* 2 | *~ 3 | *.pyc 4 | -------------------------------------------------------------------------------- /docs/ibcao.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/ibcao.png -------------------------------------------------------------------------------- /docs/etopothk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/etopothk.png -------------------------------------------------------------------------------- /docs/geotiff.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/geotiff.png -------------------------------------------------------------------------------- /docs/surfvelmag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/surfvelmag.png -------------------------------------------------------------------------------- /docs/contour2000m.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/contour2000m.png -------------------------------------------------------------------------------- /docs/Full_saturation_spectrum_CCW.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pism/pypismtools/HEAD/docs/Full_saturation_spectrum_CCW.png -------------------------------------------------------------------------------- /colormaps/percentiles.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Percentil colormap 3 | # COLOR_MODEL = RGB 4 | 16 8 48 107 50 115 179 216 5 | 50 115 179 216 84 247 251 255 6 | B 0 0 0 7 | F 255 255 255 8 | N 255 0 0 9 | -------------------------------------------------------------------------------- /colormaps/speed_blue_red.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # COLOR_MODEL = RGB 3 | 0 179 244 255 10 179 244 255 4 | 10 179 244 255 100 0 199 255 5 | 100 0 199 255 250 67 0 255 6 | 250 67 0 255 750 205 0 205 7 | 750 205 0 205 3000 255 0 0 8 | B 0 0 0 9 | F 255 255 255 10 | N 255 0 0 11 | -------------------------------------------------------------------------------- /colormaps/greenland-topography.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Speed log-color map 3 | # COLOR_MODEL = RGB 4 | 0 46 165 255 0.1 255 255 204 5 | 0.1 255 255 204 1000 244 189 69 6 | 1000 244 189 69 1250 102 51 12 7 | 1250 102 51 12 2000 190 190 190 8 | B 0 0 0 9 | F 255 255 255 10 | N 255 0 0 11 | -------------------------------------------------------------------------------- /colormaps/greenland-bath-topo-2.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Speed log-color map 3 | # COLOR_MODEL = RGB 4 | -1500 29 106 165 0 46 165 255 5 | 0 46 165 255 0.1 255 255 204 6 | 0.1 255 255 204 1000 244 189 69 7 | 1000 244 189 69 1250 102 51 12 8 | 1250 102 51 12 2000 190 190 190 9 | B 0 0 0 10 | F 255 255 255 11 | N 255 0 0 12 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "GeoTIFF", 3 | "get_dims", 4 | "get_projection_from_file", 5 | "add_inner_title", 6 | "get_golden_mean", 7 | "set_mode", 8 | "trend_estimator", 9 | "colorList", 10 | "gmtColormap", 11 | "smooth", 12 | "fftsmooth", 13 | "get_rmse", 14 | "get_avg", 15 | "unit_converter", 16 | "permute", 17 | ] 18 | -------------------------------------------------------------------------------- /colormaps/greenland-bath-topo-3.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Speed log-color map 3 | # COLOR_MODEL = RGB 4 | -2000 15 96 147 -1000 43 131 186 5 | -1000 43 131 186 0 203 240 255 6 | 0 203 240 255 0.1 51 102 0 7 | 0.1 51 102 0 300 255 255 204 8 | 300 255 255 204 1000 244 189 69 9 | 1000 244 189 69 1250 102 51 12 10 | 1250 102 51 12 2000 190 190 190 11 | B 0 0 0 12 | F 255 255 255 13 | N 255 0 0 14 | -------------------------------------------------------------------------------- /colormaps/speed_blue_red_nonlin_0_1500.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Speed log-color map 3 | # We map the colors ['#c6e3f9', '#57c8ff', '#002ae5', '#c51b9e', '#a91318'] 4 | # onto the array and log-transform it 5 | # np.log([10, 100, 250, 750, 1500]) 6 | # array([ 2.30258509, 4.60517019, 5.52146092, 6.62007321, 7.31322039]) 7 | # COLOR_MODEL = RGB 8 | 2.30258509 198 227 249 4.60517019 87 200 255 9 | 4.60517019 87 200 255 5.52146092 0 42 255 10 | 5.52146092 0 42 255 6.62007321 197 27 158 11 | 6.62007321 197 27 205 8.00636757 169 19 24 12 | B 0 0 0 13 | F 255 255 255 14 | N 255 0 0 15 | -------------------------------------------------------------------------------- /colormaps/speed_blue_red_nonlin_0_3000.cpt: -------------------------------------------------------------------------------- 1 | # Andy Aschwanden 2 | # Speed log-color map 3 | # We map the colors ['#c6e3f9', '#57c8ff', '#002ae5', '#c51b9e', '#a91318'] 4 | # onto the array and log-transform it 5 | # np.log([10, 100, 250, 750, 3000]) 6 | # array([ 2.30258509, 4.60517019, 5.52146092, 6.62007321, 8.00636757]) 7 | # COLOR_MODEL = RGB 8 | 2.30258509 198 227 249 4.60517019 87 200 255 9 | 4.60517019 87 200 255 5.52146092 0 42 255 10 | 5.52146092 0 42 255 6.62007321 197 27 158 11 | 6.62007321 197 27 205 8.00636757 169 19 24 12 | B 0 0 0 13 | F 255 255 255 14 | N 255 0 0 15 | -------------------------------------------------------------------------------- /colormaps/Full_saturation_spectrum_CCW.cpt: -------------------------------------------------------------------------------- 1 | # cpt-city/ggr/Full_saturation_spectrum_CCW.cpt 2 | # autogenerated GMT palette "Full saturation spectrum CCW" 3 | # cptutils version 1.15, Wed Apr 20 21:38:53 2005 4 | # COLOR_MODEL = RGB 5 | 1.683168e-01 252 255 0 3.267327e-01 9 255 0 6 | 3.267327e-01 9 160 0 3.366337e-01 0 160 4 7 | 3.366337e-01 0 160 4 4.950495e-01 0 255 247 8 | 4.950495e-01 0 255 247 5.049505e-01 0 247 255 9 | 5.049505e-01 0 247 255 6.633663e-01 0 4 255 10 | 6.633663e-01 0 4 255 6.732673e-01 9 0 255 11 | 6.732673e-01 9 0 255 8.316832e-01 252 0 255 12 | 8.316832e-01 252 0 255 8.415842e-01 255 0 242 13 | 8.415842e-01 255 0 242 1.000000e+00 255 0 0 14 | B 0 0 0 15 | F 255 255 255 16 | N 255 0 0 17 | -------------------------------------------------------------------------------- /colormaps/Full_saturation_spectrum_CCW_desatlight.cpt: -------------------------------------------------------------------------------- 1 | # cpt-city/ggr/Full_saturation_spectrum_CCW.cpt 2 | # autogenerated GMT palette "Full saturation spectrum CCW" 3 | # cptutils version 1.15, Wed Apr 20 21:38:53 2005 4 | # COLOR_MODEL = RGB 5 | 0.1683168 254 255 128 0.3267327 132 255 128 6 | 0.3267327 84 160 80 0.3366337 80 160 82 7 | 0.3366337 80 160 82 4 4.950495e-01 0 255 247 8 | 4.950495e-01 0 255 247 5.049505e-01 0 247 255 9 | 5.049505e-01 0 247 255 6.633663e-01 0 4 255 10 | 6.633663e-01 0 4 255 6.732673e-01 9 0 255 11 | 6.732673e-01 9 0 255 8.316832e-01 252 0 255 12 | 8.316832e-01 252 0 255 8.415842e-01 255 0 242 13 | 8.415842e-01 255 0 242 1.000000e+00 255 0 0 14 | B 0 0 0 15 | F 255 255 255 16 | N 255 0 0 17 | -------------------------------------------------------------------------------- /colormaps/Full_saturation_spectrum_CCW_orange.cpt: -------------------------------------------------------------------------------- 1 | # cpt-city/ggr/Full_saturation_spectrum_CCW.cpt 2 | # autogenerated GMT palette "Full saturation spectrum CCW" 3 | # cptutils version 1.15, Wed Apr 20 21:38:53 2005 4 | # COLOR_MODEL = RGB 5 | 0 213 122 100 0.09 254 255 55 6 | 0.09 254 255 55 0.19 132 255 120 7 | 0.19 84 160 80 0.31 46 119 48 8 | 0.31 46 119 48 4 4.950495e-01 0 255 247 9 | 4.950495e-01 0 255 247 5.049505e-01 0 247 255 10 | 5.049505e-01 0 247 255 6.233663e-01 0 4 255 11 | 6.233663e-01 0 4 255 6.732673e-01 9 0 255 12 | 6.732673e-01 9 0 255 8.316832e-01 252 0 255 13 | 8.316832e-01 252 0 255 8.415842e-01 255 0 242 14 | 8.415842e-01 255 0 242 1.000000e+00 255 0 0 15 | B 0 0 0 16 | F 255 255 255 17 | N 255 0 0 18 | -------------------------------------------------------------------------------- /colormaps/Full_saturation_spectrum_CCW_orange_desaturated.cpt: -------------------------------------------------------------------------------- 1 | # cpt-city/ggr/Full_saturation_spectrum_CCW.cpt 2 | # autogenerated GMT palette "Full saturation spectrum CCW" 3 | # cptutils version 1.15, Wed Apr 20 21:38:53 2005 4 | # COLOR_MODEL = RGB 5 | 0 213 177 149 0.09 254 255 125 6 | 0.09 254 255 125 0.19 132 255 140 7 | 0.19 84 160 80 0.31 78 120 79 8 | 0.31 78 120 79 4 4.950495e-01 0 255 247 9 | 4.950495e-01 0 255 247 5.049505e-01 0 247 255 10 | 5.049505e-01 0 247 255 6.233663e-01 0 4 255 11 | 6.233663e-01 0 4 255 6.732673e-01 9 0 255 12 | 6.732673e-01 9 0 255 8.316832e-01 252 0 255 13 | 8.316832e-01 252 0 255 8.415842e-01 255 0 242 14 | 8.415842e-01 255 0 242 1.000000e+00 255 0 0 15 | B 0 0 0 16 | F 255 255 255 17 | N 255 0 0 18 | -------------------------------------------------------------------------------- /colormaps/temp-c.cpt: -------------------------------------------------------------------------------- 1 | # COLOR MODEL = RGB 2 | -14 74 0 29 -12 74 0 29 3 | -12 49 0 126 -10 49 0 126 4 | -10 0 0 129 -8 0 0 129 5 | -8 0 50 179 -6 0 50 179 6 | -6 0 0 255 -4 0 0 255 7 | -4 0 125 255 -2 0 125 255 8 | -2 0 189 255 0 0 189 255 9 | 0 23 215 139 2 23 215 139 10 | 2 57 173 115 4 57 173 115 11 | 4 42 169 42 6 42 169 42 12 | 6 42 200 42 8 42 200 42 13 | 8 0 255 49 10 0 255 49 14 | 10 83 255 0 12 83 255 0 15 | 12 159 255 0 14 159 255 0 16 | 14 255 255 0 16 255 255 0 17 | 16 255 204 0 18 255 204 0 18 | 18 255 152 0 20 255 152 0 19 | 20 255 101 0 22 255 101 0 20 | 22 255 84 0 24 255 84 0 21 | 24 255 0 0 26 255 0 0 22 | 26 255 0 127 28 255 0 127 23 | 28 255 34 144 30 255 34 144 24 | 30 255 94 177 32 255 94 177 25 | 32 255 134 194 34 255 134 194 26 | 34 255 174 215 36 255 174 215 27 | 36 255 215 235 38 255 215 235 28 | 38 255 255 255 40 255 255 255 29 | -------------------------------------------------------------------------------- /colormaps/wiki-2.0.cpt: -------------------------------------------------------------------------------- 1 | # wiki-2.0.cpt 2 | # 3 | # extracted from Maps_template-fr.svg by Eric Gaba (Wikimedia 4 | # Commons user: Sting) and others, released under the following 5 | # licence: 6 | # 7 | # Permission is granted to copy, distribute and/or modify this document 8 | # under the terms of the GNU Free Documentation License, Version 1.2 or 9 | # any later version published by the Free Software Foundation; with no 10 | # Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts. A 11 | # copy of the license is included in the section entitled "GNU Free 12 | # Documentation License". 13 | # 14 | # Translated by J.J. Green 2009, translation under same licence 15 | # 16 | # $Id: wiki-2.0.cpt.body,v 1.1 2009/05/04 15:32:51 jjg Exp $ 17 | # COLOR_MODEL = RGB 18 | -10 113 171 216 -9 113 171 216 19 | -9 121 178 222 -8 121 178 222 20 | -8 132 185 227 -7 132 185 227 21 | -7 141 193 234 -6 141 193 234 22 | -6 150 201 240 -5 150 201 240 23 | -5 161 210 247 -4 161 210 247 24 | -4 172 219 251 -3 172 219 251 25 | -3 185 227 255 -2 185 227 255 26 | -2 198 236 255 -1 198 236 255 27 | -1 216 242 254 0 216 242 254 28 | 0 172 208 165 1 172 208 165 29 | 1 148 191 139 2 148 191 139 30 | 2 168 198 143 3 168 198 143 31 | 3 189 204 150 4 189 204 150 32 | 4 209 215 171 5 209 215 171 33 | 5 225 228 181 6 225 228 181 34 | 6 239 235 192 7 239 235 192 35 | 7 232 225 182 8 232 225 182 36 | 8 222 214 163 9 222 214 163 37 | 9 211 202 157 10 211 202 157 38 | 10 202 185 130 11 202 185 130 39 | 11 195 167 107 12 195 167 107 40 | 12 185 152 90 13 185 152 90 41 | 13 170 135 83 14 170 135 83 42 | 14 172 154 124 15 172 154 124 43 | 15 186 174 154 16 186 174 154 44 | 16 202 195 184 17 202 195 184 45 | 17 224 222 216 18 224 222 216 46 | 18 245 244 242 19 245 244 242 47 | B 0 0 0 48 | F 255 255 255 49 | N 255 0 0 50 | -------------------------------------------------------------------------------- /scripts/point2line.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (C) 2020 Andy Aschwanden 3 | 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | 6 | import ogr 7 | import osr 8 | 9 | 10 | if __name__ == "__main__": 11 | 12 | __spec__ = None 13 | 14 | parser = ArgumentParser( 15 | formatter_class=ArgumentDefaultsHelpFormatter, 16 | description="""A script to point shape file created by VectorFieldCalc to a MultiLine shape file.""", 17 | ) 18 | parser.add_argument("INFILE", nargs=1) 19 | parser.add_argument("OUTFILE", nargs=1) 20 | parser.add_argument( 21 | "--s_srs", 22 | dest="s_srs", 23 | help="Source CRS", 24 | default=None, 25 | ) 26 | parser.add_argument( 27 | "--t_srs", 28 | dest="t_srs", 29 | help="Target CRS", 30 | default=None, 31 | ) 32 | 33 | options = parser.parse_args() 34 | infile = options.INFILE[0] 35 | outfile = options.OUTFILE[0] 36 | 37 | s_ds = ogr.Open(infile) 38 | driver = ogr.GetDriverByName('ESRI Shapefile') 39 | t_ds = driver.CreateDataSource(outfile) 40 | 41 | s_layer = s_ds.GetLayer(0) 42 | s_spatialRef = s_layer.GetSpatialRef() 43 | 44 | if s_spatialRef == "": 45 | print(f"{infile} does not have a valid projection. Use '--s_srs projection' to set source CRS. (NOT implemented yet)") 46 | else: 47 | srs = s_spatialRef 48 | 49 | t_layer = t_ds.CreateLayer("profile", srs, ogr.wkbLineString) 50 | fieldDefn = ogr.FieldDefn('profile_id', ogr.OFTReal) 51 | t_layer.CreateField(fieldDefn) 52 | 53 | def profile(coords): 54 | p = ogr.Geometry(type=ogr.wkbLineString) 55 | for xy in coords: 56 | p.AddPoint_2D(xy[0],xy[1]) 57 | return p 58 | 59 | profiles = {} 60 | for feature in s_layer: 61 | profile_id = feature.path_id 62 | point_id = feature.point_id 63 | geom = feature.GetGeometryRef() 64 | point = geom.GetPoint() 65 | 66 | if profile_id not in profiles.keys(): 67 | profiles[profile_id] = [] 68 | 69 | profiles[profile_id].append((point[0], point[1])) 70 | 71 | for coords in profiles.values(): 72 | featureDefn = t_layer.GetLayerDefn() 73 | feature = ogr.Feature(featureDefn) 74 | p = profile(coords) 75 | feature.SetGeometry(p) 76 | t_layer.CreateFeature(feature) 77 | 78 | p.Destroy() 79 | feature.Destroy() 80 | 81 | t_ds.Destroy() 82 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | import os 3 | import sys 4 | 5 | PKG_NAME = "pypismtools" 6 | print(("\nInstalling %s" % PKG_NAME)) 7 | print("----------------------------") 8 | 9 | packages = ("numpy", "osgeo", "netCDF4", "pyproj") 10 | print("\nChecking dependencies:") 11 | not_installed = [] 12 | for package in packages: 13 | try: 14 | __import__(package) 15 | print((" - % s package is installed" % package)) 16 | except ImportError: 17 | print((" - % s package NOT installed" % package)) 18 | not_installed.append(package) 19 | if not_installed: 20 | print("Installation of the following packages is optional but recommended:") 21 | for package in not_installed: 22 | if package == "osgeo": 23 | print(" - GDAL python bindings") 24 | else: 25 | print((" - %s" % package)) 26 | print("Exiting") 27 | import sys 28 | 29 | sys.exit() 30 | 31 | setup( 32 | name=PKG_NAME, 33 | version="0.5", 34 | description="Python tools to evaluate PISM results", 35 | author="Andy Aschwanden", 36 | author_email="aaschwanden@alaska.edu", 37 | url="https://github.com/pism/pypismtools", 38 | classifiers=[ 39 | "Development Status :: 3 - Alpha", 40 | "Intended Audience :: Science/Research", 41 | "License :: OSI Approved :: GNU General Public License (GPL)", 42 | "Operating System :: POSIX", 43 | "Programming Language :: Python", 44 | "Topic :: Scientific/Engineering", 45 | "Topic :: Utilities", 46 | ], 47 | scripts=[ 48 | "scripts/basemap_plot.py", 49 | "scripts/contour2shp.py", 50 | "scripts/create_greenland_bamber_grid.py", 51 | "scripts/create_greenland_epsg3413_grid.py", 52 | "scripts/create_greenland_ext_epsg3413_grid.py", 53 | "scripts/create_jif_utm22n_grid.py", 54 | "scripts/dissolve_by_attribute.py", 55 | "scripts/extract_sigma_levels.py", 56 | "scripts/extract_interface.py", 57 | "scripts/extract_contours.py", 58 | "scripts/extract_profiles.py", 59 | "scripts/qgis_colorramp.py", 60 | "scripts/scalar_within_poly.py", 61 | "scripts/remap3d.py", 62 | "scripts/vraster2lineshapefile.py", 63 | "scripts/point2line.py", 64 | ], 65 | packages=[PKG_NAME], 66 | package_dir={PKG_NAME: "."}, 67 | package_data={PKG_NAME: ["colormaps/*.cpt"]}, 68 | ) 69 | 70 | print("\n*********************************************************************") 71 | print(("Make make sure you have\n %s\nin your search path!" % os.path.join(sys.prefix, "bin"))) 72 | print("*********************************************************************") 73 | -------------------------------------------------------------------------------- /scripts/dissolve_by_attribute.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 4 | import gdal 5 | import ogr 6 | import osr 7 | import os 8 | import logging 9 | import logging.handlers 10 | from shapely.geometry import shape, mapping 11 | from shapely.ops import unary_union 12 | import fiona 13 | import itertools 14 | 15 | 16 | # create logger 17 | logger = logging.getLogger(__name__) 18 | logger.setLevel(logging.DEBUG) 19 | 20 | # create file handler which logs even debug messages 21 | fh = logging.handlers.RotatingFileHandler("extract.log") 22 | fh.setLevel(logging.DEBUG) 23 | # create console handler with a higher log level 24 | ch = logging.StreamHandler() 25 | ch.setLevel(logging.INFO) 26 | # create formatter 27 | formatter = logging.Formatter("%(module)s:%(lineno)d - %(message)s") 28 | 29 | # add formatter to ch and fh 30 | ch.setFormatter(formatter) 31 | fh.setFormatter(formatter) 32 | 33 | # add ch to logger 34 | logger.addHandler(ch) 35 | logger.addHandler(fh) 36 | 37 | 38 | parser = ArgumentParser( 39 | formatter_class=ArgumentDefaultsHelpFormatter, 40 | description="""A script to extract interfaces (calving front, ice-ocean, or groundling line) from a PISM netCDF file, and save it as a shapefile (polygon).""", 41 | ) 42 | parser.add_argument("FILE", nargs=1) 43 | parser.add_argument("-a", "--attribute_field", dest="field", help="Attribute field to group by", default="timestep") 44 | parser.add_argument( 45 | "-o", "--output_filename", dest="out_file", help="Name of the output shape file", default="dissolved.shp" 46 | ) 47 | 48 | 49 | options = parser.parse_args() 50 | ifile = options.FILE[0] 51 | ofile = options.out_file 52 | field = options.field 53 | 54 | with fiona.open(ifile) as input: 55 | # preserve the schema of the original shapefile, including the crs 56 | meta = input.meta 57 | with fiona.open(ofile, "w", **meta) as output: 58 | # groupby clusters consecutive elements of an iterable which have the same key so you must first sort the features by the 'STATEFP' field 59 | e = sorted(input, key=lambda k: k["properties"][field]) 60 | # group by the attribute field 61 | for key, group in itertools.groupby(e, key=lambda x: x["properties"][field]): 62 | properties, geom = list(zip(*[(feature["properties"], shape(feature["geometry"])) for feature in group])) 63 | # write the feature, computing the unary_union of the elements in the group with the properties of the first element in the group 64 | output.write({"geometry": mapping(unary_union(geom)), "properties": properties[0]}) 65 | 66 | 67 | # Update the area 68 | shp_driver = ogr.GetDriverByName("ESRI Shapefile") 69 | ds = shp_driver.Open(ofile, 1) 70 | 71 | nl = ds.GetLayerCount() 72 | for k in range(nl): 73 | layer = ds.GetLayer(k) 74 | for feature in layer: 75 | geom = feature.GetGeometryRef() 76 | area = geom.GetArea() 77 | feature.SetField("area", int(area)) 78 | layer.SetFeature(feature) 79 | -------------------------------------------------------------------------------- /scripts/create_greenland_epsg3413_grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | from pyproj import Proj 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | 6 | from netCDF4 import Dataset as CDF 7 | 8 | # set up the argument parser 9 | parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) 10 | parser.description = "Create CDO-compliant grid description" 11 | parser.add_argument("FILE", nargs="*") 12 | parser.add_argument("-g", "--grid_spacing", dest="grid_spacing", type=float, help="use X m grid spacing", default=1800) 13 | parser.add_argument( 14 | "-f", 15 | "--format", 16 | dest="fileformat", 17 | type=str.upper, 18 | choices=["NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_CLASSIC", "NETCDF3_64BIT"], 19 | help="file format out output file", 20 | default="netcdf3_64bit", 21 | ) 22 | 23 | options = parser.parse_args() 24 | args = options.FILE 25 | grid_spacing = options.grid_spacing # convert 26 | 27 | fileformat = options.fileformat.upper() 28 | 29 | if len(args) == 0: 30 | nc_outfile = "grn" + str(grid_spacing) + "m.nc" 31 | elif len(args) == 1: 32 | nc_outfile = args[0] 33 | else: 34 | print("wrong number arguments, 0 or 1 arguments accepted") 35 | parser.print_help() 36 | import sys 37 | 38 | sys.exit(0) 39 | 40 | 41 | if __name__ == "__main__": 42 | 43 | # define output grid, these are the extents of Mathieu's domain (cell 44 | # corners) 45 | e0 = -638000 46 | n0 = -3349600 47 | e1 = 864700 48 | n1 = -657600 49 | 50 | # Add a buffer on each side such that we get nice grids up to a grid spacing 51 | # of 36 km. 52 | 53 | buffer_e = 40650 54 | buffer_n = 22000 55 | e0 -= buffer_e 56 | n0 -= buffer_n 57 | e1 += buffer_e 58 | n1 += buffer_n 59 | 60 | # Shift to cell centers 61 | e0 += grid_spacing / 2 62 | n0 += grid_spacing / 2 63 | e1 -= grid_spacing / 2 64 | n1 -= grid_spacing / 2 65 | 66 | de = dn = grid_spacing # m 67 | M = int((e1 - e0) / de) + 1 68 | N = int((n1 - n0) / dn) + 1 69 | 70 | easting = np.linspace(e0, e1, M) 71 | northing = np.linspace(n0, n1, N) 72 | ee, nn = np.meshgrid(easting, northing) 73 | 74 | # Set up EPSG 3413 (NSIDC north polar stereo) projection 75 | projection = "epsg:3413" 76 | proj = Proj(projection) 77 | 78 | lon, lat = proj(ee, nn, inverse=True) 79 | 80 | nc = CDF(nc_outfile, "w", format=fileformat) 81 | 82 | nc.createDimension("x", size=easting.shape[0]) 83 | nc.createDimension("y", size=northing.shape[0]) 84 | 85 | var = "x" 86 | var_out = nc.createVariable(var, "d", dimensions=("x")) 87 | var_out.axis = "X" 88 | var_out.long_name = "X-coordinate in Cartesian system" 89 | var_out.standard_name = "projection_x_coordinate" 90 | var_out.units = "meters" 91 | var_out[:] = easting 92 | 93 | var = "y" 94 | var_out = nc.createVariable(var, "d", dimensions=("y")) 95 | var_out.axis = "Y" 96 | var_out.long_name = "Y-coordinate in Cartesian system" 97 | var_out.standard_name = "projection_y_coordinate" 98 | var_out.units = "meters" 99 | var_out[:] = northing 100 | 101 | var = "lon" 102 | var_out = nc.createVariable(var, "d", dimensions=("y", "x")) 103 | var_out.units = "degrees_east" 104 | var_out.valid_range = -180.0, 180.0 105 | var_out.standard_name = "longitude" 106 | var_out[:] = lon 107 | 108 | var = "lat" 109 | var_out = nc.createVariable(var, "d", dimensions=("y", "x")) 110 | var_out.units = "degrees_north" 111 | var_out.valid_range = -90.0, 90.0 112 | var_out.standard_name = "latitude" 113 | var_out[:] = lat 114 | 115 | var = "dummy" 116 | var_out = nc.createVariable(var, "f", dimensions=("y", "x"), fill_value=-9999) 117 | var_out.units = "meters" 118 | var_out.long_name = "Just A Dummy" 119 | var_out.comment = "This is just a dummy variable for CDO." 120 | var_out.grid_mapping = "mapping" 121 | var_out.coordinates = "lon lat" 122 | var_out[:] = 0.0 123 | 124 | mapping = nc.createVariable("mapping", "c") 125 | mapping.ellipsoid = "WGS84" 126 | mapping.false_easting = 0.0 127 | mapping.false_northing = 0.0 128 | mapping.grid_mapping_name = "polar_stereographic" 129 | mapping.latitude_of_projection_origin = 90.0 130 | mapping.standard_parallel = 70.0 131 | mapping.straight_vertical_longitude_from_pole = -45.0 132 | 133 | from time import asctime 134 | 135 | historystr = "Created " + asctime() + "\n" 136 | nc.history = historystr 137 | nc.proj4 = projection 138 | nc.Conventions = "CF-1.5" 139 | nc.close() 140 | -------------------------------------------------------------------------------- /colormaps/greenland-bath-topo.cpt: -------------------------------------------------------------------------------- 1 | # bathymetry/topography for the Greenland Ice Sheet 2 | # COLOR_MODEL = RGB 3 | -1260 0 38 115 -1250 0 38 115 4 | -1250 0 38 115 -1240 0 41 117 5 | -1240 0 41 117 -1230 0 41 117 6 | -1230 0 41 117 -1220 0 43 122 7 | -1220 0 43 122 -1210 0 43 122 8 | -1210 0 43 122 -1200 0 45 128 9 | -1200 0 45 128 -1190 0 45 128 10 | -1190 0 45 128 -1180 0 46 130 11 | -1180 0 46 130 -1170 0 46 130 12 | -1170 0 46 130 -1160 0 47 135 13 | -1160 0 47 135 -1150 0 47 135 14 | -1150 0 47 135 -1140 0 51 140 15 | -1140 0 51 140 -1130 0 51 140 16 | -1130 0 51 140 -1120 0 53 145 17 | -1120 0 53 145 -1110 0 53 145 18 | -1110 0 53 145 -1100 0 54 148 19 | -1100 0 54 148 -1090 0 54 148 20 | -1090 0 54 148 -1080 0 56 153 21 | -1080 0 56 153 -1070 0 56 153 22 | -1070 0 56 153 -1060 0 58 158 23 | -1060 0 58 158 -1050 0 58 158 24 | -1050 0 58 158 -1040 0 63 163 25 | -1040 0 63 163 -1030 0 63 163 26 | -1030 0 63 163 -1020 0 65 168 27 | -1020 0 65 168 -1010 0 65 168 28 | -1010 0 65 168 -1000 0 65 171 29 | -1000 0 65 171 -990 0 65 171 30 | -990 0 65 171 -980 0 67 176 31 | -980 0 67 176 -970 0 67 176 32 | -970 0 67 176 -960 0 69 181 33 | -960 0 69 181 -950 0 69 181 34 | -950 0 69 181 -940 0 74 186 35 | -940 0 74 186 -930 0 74 186 36 | -930 0 74 186 -920 0 77 191 37 | -920 0 77 191 -910 0 77 191 38 | -910 0 77 191 -900 0 78 194 39 | -900 0 78 194 -890 0 78 194 40 | -890 0 78 194 -880 0 80 199 41 | -880 0 80 199 -870 0 80 199 42 | -870 0 80 199 -860 0 82 204 43 | -860 0 82 204 -850 0 82 204 44 | -850 0 82 204 -840 0 87 209 45 | -840 0 87 209 -830 0 87 209 46 | -830 0 87 209 -820 0 88 212 47 | -820 0 88 212 -810 0 88 212 48 | -810 0 88 212 -800 0 90 217 49 | -800 0 90 217 -790 0 90 217 50 | -790 0 90 217 -780 0 92 222 51 | -780 0 92 222 -770 0 92 222 52 | -770 0 92 222 -760 0 95 227 53 | -760 0 95 227 -750 0 95 227 54 | -750 0 95 227 -740 0 101 232 55 | -740 0 101 232 -730 0 101 232 56 | -730 0 101 232 -720 0 102 235 57 | -720 0 102 235 -710 0 102 235 58 | -710 0 102 235 -700 0 104 240 59 | -700 0 104 240 -690 0 104 240 60 | -690 0 104 240 -680 0 106 245 61 | -680 0 106 245 -670 0 106 245 62 | -670 0 106 245 -660 0 108 250 63 | -660 0 108 250 -650 0 108 250 64 | -650 0 108 250 -640 0 111 255 65 | -640 0 111 255 -630 0 111 255 66 | -630 0 111 255 -620 5 118 255 67 | -620 5 118 255 -610 5 118 255 68 | -610 5 118 255 -600 10 120 255 69 | -600 10 120 255 -590 10 120 255 70 | -590 10 120 255 -580 18 129 255 71 | -580 18 129 255 -570 18 129 255 72 | -570 18 129 255 -560 25 133 255 73 | -560 25 133 255 -550 25 133 255 74 | -550 25 133 255 -540 31 135 255 75 | -540 31 135 255 -530 31 135 255 76 | -530 31 135 255 -520 36 142 255 77 | -520 36 142 255 -510 36 142 255 78 | -510 36 142 255 -500 41 144 255 79 | -500 41 144 255 -490 41 144 255 80 | -490 41 144 255 -480 48 152 255 81 | -480 48 152 255 -470 48 152 255 82 | -470 48 152 255 -460 56 156 255 83 | -460 56 156 255 -450 56 156 255 84 | -450 56 156 255 -440 61 161 255 85 | -440 61 161 255 -430 61 161 255 86 | -430 61 161 255 -420 66 164 255 87 | -420 66 164 255 -410 66 164 255 88 | -410 66 164 255 -400 74 167 255 89 | -400 74 167 255 -390 74 167 255 90 | -390 74 167 255 -380 82 174 255 91 | -380 82 174 255 -370 82 174 255 92 | -370 82 174 255 -360 87 176 255 93 | -360 87 176 255 -350 87 176 255 94 | -350 87 176 255 -340 92 182 255 95 | -340 92 182 255 -330 92 182 255 96 | -330 92 182 255 -320 97 184 255 97 | -320 97 184 255 -310 97 184 255 98 | -310 97 184 255 -300 105 190 255 99 | -300 105 190 255 -290 105 190 255 100 | -290 105 190 255 -280 112 193 255 101 | -280 112 193 255 -270 112 193 255 102 | -270 112 193 255 -260 117 195 255 103 | -260 117 195 255 -250 117 195 255 104 | -250 117 195 255 -240 122 200 255 105 | -240 122 200 255 -230 122 200 255 106 | -230 122 200 255 -220 128 202 255 107 | -220 128 202 255 -210 128 202 255 108 | -210 128 202 255 -200 138 208 255 109 | -200 138 208 255 -190 138 208 255 110 | -190 138 208 255 -180 143 210 255 111 | -180 143 210 255 -170 143 210 255 112 | -170 143 210 255 -160 148 212 255 113 | -160 148 212 255 -150 148 212 255 114 | -150 148 212 255 -140 153 216 255 115 | -140 153 216 255 -130 153 216 255 116 | -130 153 216 255 -120 161 219 255 117 | -120 161 219 255 -110 161 219 255 118 | -110 161 219 255 -100 168 223 255 119 | -100 168 223 255 -90 168 223 255 120 | -90 168 223 255 -80 173 225 255 121 | -80 173 225 255 -70 173 225 255 122 | -70 173 225 255 -60 179 228 255 123 | -60 179 228 255 -50 179 228 255 124 | -50 179 228 255 -40 184 230 255 125 | -40 184 230 255 -30 184 230 255 126 | -30 184 230 255 -20 191 233 255 127 | -20 191 233 255 -10 191 233 255 128 | -10 191 233 255 0 191 233 255 129 | 0 51 102 0 250 129 195 31 130 | 250 129 195 31 500 255 255 204 131 | 500 255 255 204 1000 244 189 69 132 | 1000 244 189 69 1750 102 51 12 133 | 1750 102 51 12 2000 102 51 0 134 | 2000 102 51 0 4700 255 255 255 135 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | The ``pypismtools`` module 2 | ====================== 3 | 4 | pypismtools is a collection of python classes and functions to 5 | evaluate studies made with the Parallel Ice Sheet Model (PISM, 6 | http://www.pism-docs.org). pypismtools provides tools for binning and 7 | histogram plotting. It also includes helper functions and wrappers for 8 | things like unit conversion, defining figure sizes and parameters, and 9 | more. Additionally, some scripts are included, to plot netCDF 10 | variables over GeoTIFF files using basemap, to generate colorramps 11 | for QGIS, and to extract contour lines. 12 | 13 | Requirements 14 | ------------------------- 15 | 16 | The following python modules have to be installed previously: 17 | 18 | - netCDF4 19 | - gdal (with python bindings) 20 | - pyproj 21 | - py_udunits2 (from https://github.com/ckhroulev/py_udunits2) 22 | - matplotlib (http://matplotlib.org/) 23 | - basemap (http://matplotlib.org/basemap/) 24 | - python imaging library (PIL), optional, needed for some backgrounds) 25 | 26 | Installation 27 | ------------------------- 28 | 29 | To install for all users, run 30 | 31 | ``$ sudo python setup.py install`` 32 | 33 | To install for the current user, run 34 | 35 | ``$ python setup.py install --user`` 36 | 37 | 38 | Examples for basemap-plot.py 39 | ------------------------- 40 | 41 | basemap-plot.py is a script to plot a variety of ice sheet model relevant variables from a netCDF file from Greenland and Antarctica data sets. Projection information is retrieved from the first input file, and all subsequent plots are on-the-fly re-projected, which makes the script slow but flexible. 42 | 43 | - Download a test data set, e.g. the SeaRISE master data set from 44 | 45 | ``$ wget -nc http://websrv.cs.umt.edu/isis/images/a/a5/Greenland_5km_v1.1.nc`` 46 | 47 | - First, plot the magnitude of horizontal surface velocities 'surfvelmag' and save as 'surfvelmag.png'. 48 | 49 | ``$ basemap-plot.py --singlerow -v surfvelmag --colorbar_label -o surfvelmag.png Greenland_5km_v1.1.nc`` 50 | 51 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/surfvelmag.png 52 | :width: 300px 53 | :alt: Magnitude of surface velocities. 54 | 55 | Example 1: Magnitude of surface velocities. 56 | 57 | 58 | - Now, add coastlines (intermediate resolution 'i') and plot ice thickness 'thk' over an etopo background 59 | 60 | ``$ basemap-plot.py --background etopo --coastlines --map_resolution i --singlerow -v thk -o etopothk.png Greenland_5km_v1.1.nc`` 61 | 62 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/etopothk.png 63 | :width: 260px 64 | :alt: Ice thickness with ETOPO background. 65 | 66 | Example 2: Ice thickness with ETOPO background. 67 | 68 | - Use a GeoTIFF file as background, plot the colorbar horizontally. In this case, projection information is taken from the GeoTIFF: 69 | 70 | ``$ basemap-plot.py --geotiff mygeotiff.tif --singlecolumn -v 71 | surfvelmag --colorbar_label -o geotiff.png Greenland_5km_v1.1.nc`` 72 | 73 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/geotiff.png 74 | :width: 260px 75 | :alt: Ice thickness with ETOPO background. 76 | 77 | Example 3: Magnitude of surface velocities over a MODIS mosaic of Greenland. 78 | 79 | Examples for extract_profiles.py 80 | ------------------------- 81 | 82 | The script extract_profiles.py extracts variables stored in a NetCDF_ ``input.nc`` file along profiles given in a shape file ``myprofiles.shp`` and saves the extracted profiles in ``profile.nc``. 83 | 84 | ``extract_profiles.py myprofiles.shp input.nc profile.py`` 85 | 86 | 87 | Examples for qgis-colorramp.py 88 | ------------------------- 89 | 90 | qgis-colorramp-plot.py creates linear and log-scaled colorramps for QGIS_ from GMT_ colormaps. Many great colormap can be downloaded from http://soliton.vm.bytemark.co.uk/pub/cpt-city/. 91 | 92 | To show the bathymetry around Greenland, you can use the IBCAO colormap. By running the following command 93 | 94 | ``qgis-colorramp.py --vmin -5000 --vmax 1400 --extend -10000 4000 ibcao.cpt`` 95 | 96 | and you get a linear colorramp from -5000m to 1400m, where the first and last color 97 | will be extended to -10000 and 4000m, respectively (in ``ibcao.txt``). The result should like like 98 | 99 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/ibcao.png 100 | :width: 200px 101 | :alt: Linear DEM colormap IBCAO. 102 | 103 | For a nice log-scaled colormap to show speeds, try: 104 | 105 | ``qgis-colorramp.py --a 3 --log --extend 0 30000 Full_saturation_spectrum_CCW.cpt`` 106 | 107 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/Full_saturation_spectrum_CCW.png 108 | :width: 200px 109 | :alt: Log-scaled colorramp. 110 | 111 | To use the colorramp in QGIS, click on 'Layer Properties / Colormap' 112 | and then click on 'Load color map from file'. Choose the txt 113 | file. Also the colorbar is saved as a png file, and can be added in 114 | the 'Print Composer'. 115 | 116 | .. _QGIS: http://www.qgis.org/ 117 | .. _GMT: http://gmt.soest.hawaii.edu/ 118 | 119 | Examples for contour2shp.py 120 | ------------------------- 121 | 122 | contour2shp.py lets you extract a contour line from a variable in a 123 | netCDF file, and saves it as a polygon in a shapefile. Useful to create a polygon of a drainage basin from the 124 | mask. Or you can extract the 2000m elevation contour: 125 | 126 | ``contour2shp.py -v usrf -c 2000 -s -o poly.shp Greenland_5km_v1.1.nc`` 127 | 128 | .. figure:: https://github.com/pism/pypismtools/raw/master/docs/contour2000m.png 129 | :width: 200px 130 | :alt: 2000m contour line. 131 | 132 | Examples for create_greenland_grid.py 133 | ------------------------- 134 | 135 | create_greenland_grid.py creates a netCDF file with the SeaRISE Greenland grid with a given grid spacing. Run ``nc2cdo.py`` from pism/utils and you got a grid definition file that can be used for conservative remapping with CDO (https://code.zmaw.de/projects/cdo). 136 | 137 | ``create_greenland_grid.py -g 2 searise_2km_grid.nc`` 138 | 139 | Examples for create_greenland_epsg3413_grid.py 140 | ------------------------- 141 | 142 | Similar to ``create_greenland_grid.py`` but for the EPSG:3413 projection. Expects grid spacing in meters. 143 | 144 | ``create_greenland_epsg3413_grid.py -g 1800 grid_1800m_grid.nc`` 145 | -------------------------------------------------------------------------------- /scripts/scalar_within_poly.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (C) 2013 Andy Aschwanden 4 | 5 | from sys import stderr 6 | from argparse import ArgumentParser 7 | 8 | try: 9 | from netCDF4 import Dataset as NC 10 | except: 11 | from netCDF3 import Dataset as NC 12 | from osgeo import ogr 13 | 14 | # Set up the option parser 15 | parser = ArgumentParser() 16 | parser.description = "All values within a polygon defined by a shapefile are replaced by a scalar value." 17 | parser.add_argument("FILE", nargs=2) 18 | parser.add_argument( 19 | "-i", 20 | "--invert", 21 | dest="invert", 22 | action="store_true", 23 | help="Replace all values outside of the polygon with this value", 24 | default=False, 25 | ) 26 | parser.add_argument( 27 | "-s", "--scalar_value", dest="scalar_value", type=float, help="Replace with this value", default=0.0 28 | ) 29 | parser.add_argument( 30 | "-v", "--variables", dest="variables", help="Comma separated list of variables.", default=["bmelt"] 31 | ) 32 | 33 | options = parser.parse_args() 34 | args = options.FILE 35 | scalar_value = options.scalar_value 36 | variables = options.variables.split(",") 37 | invert = options.invert 38 | 39 | driver = ogr.GetDriverByName("ESRI Shapefile") 40 | data_source = driver.Open(args[0], 0) 41 | if data_source is None: 42 | print("Couldn't open file {}.\n".format(args[0])) 43 | import sys 44 | 45 | sys.exit(1) 46 | layer = data_source.GetLayer(0) 47 | srs = layer.GetSpatialRef() 48 | if not srs.IsGeographic(): 49 | print(("""Spatial Reference System in % s is not latlon. Converting.""" % filename)) 50 | # Create spatialReference, EPSG 4326 (lonlat) 51 | srs_geo = osr.SpatialReference() 52 | srs_geo.ImportFromEPSG(4326) 53 | 54 | nc = NC(args[1], "a") 55 | 56 | var = "lat" 57 | try: 58 | lat = nc.variables[var] 59 | except: 60 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 61 | import sys 62 | 63 | sys.exit() 64 | 65 | var = "lon" 66 | try: 67 | lon = nc.variables[var] 68 | except: 69 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 70 | import sys 71 | 72 | sys.exit() 73 | 74 | # get dimensions of first variables 75 | var = variables[0] 76 | try: 77 | first_var = nc.variables[var] 78 | except: 79 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 80 | 81 | for feature in layer: 82 | feature = layer.GetFeature(0) 83 | geometry = feature.GetGeometryRef() 84 | # Transform to latlon if needed 85 | if not srs.IsGeographic(): 86 | geometry.TransformTo(srs_geo) 87 | 88 | counter = 0 89 | ndim = first_var.ndim 90 | 91 | stderr.write("\n - Processing variable %s, precent done: " % var) 92 | stderr.write("000") 93 | 94 | if ndim == 2: 95 | M = first_var.shape[0] 96 | N = first_var.shape[1] 97 | max_counter = M * N 98 | for m in range(0, M): 99 | for n in range(0, N): 100 | x = lon[m, n] 101 | y = lat[m, n] 102 | wkt = "POINT(%f %f)" % (x, y) 103 | point = ogr.CreateGeometryFromWkt(wkt) 104 | if invert: 105 | if feature.GetGeometryRef().Contains(point): 106 | pass 107 | else: 108 | for var in variables: 109 | try: 110 | data = nc.variables[var] 111 | except: 112 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 113 | import sys 114 | 115 | sys.exit() 116 | data[m, n] = scalar_value 117 | else: 118 | if feature.GetGeometryRef().Contains(point): 119 | for var in variables: 120 | try: 121 | data = nc.variables[var] 122 | except: 123 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 124 | import sys 125 | 126 | sys.exit() 127 | data[m, n] = scalar_value 128 | 129 | stderr.write("\b\b\b%03d" % (100.0 * counter / max_counter)) 130 | counter += 1 131 | 132 | elif ndim == 3: 133 | K = data.shape[0] 134 | M = data.shape[1] 135 | N = data.shape[2] 136 | max_counter = K * M * N 137 | for k in range(0, K): 138 | for m in range(0, M): 139 | for n in range(0, N): 140 | x = lon[m, n] 141 | y = lat[m, n] 142 | wkt = "POINT(%f %f)" % (x, y) 143 | point = ogr.CreateGeometryFromWkt(wkt) 144 | if invert: 145 | if feature.GetGeometryRef().Contains(point): 146 | pass 147 | else: 148 | for var in variables: 149 | try: 150 | data = nc.variables[var] 151 | except: 152 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 153 | import sys 154 | 155 | sys.exit() 156 | data[k, m, n] = scalar_value 157 | else: 158 | if feature.GetGeometryRef().Contains(point): 159 | for var in variables: 160 | try: 161 | data = nc.variables[var] 162 | except: 163 | print(("ERROR: variable '%s' not found but needed... ending ..." % var)) 164 | import sys 165 | 166 | sys.exit() 167 | data[k, m, n] = scalar_value 168 | 169 | stderr.write("\b\b\b%03d" % (100.0 * counter / max_counter)) 170 | counter += 1 171 | else: 172 | print(("ERROR: %i dimensions currently not supported... ending..." % ndim)) 173 | nc.close() 174 | -------------------------------------------------------------------------------- /scripts/create_jif_utm22n_grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | from pyproj import Proj 4 | from argparse import ArgumentParser 5 | 6 | from netCDF4 import Dataset as CDF 7 | 8 | # set up the argument parser 9 | parser = ArgumentParser() 10 | parser.description = "Create CDO-compliant grid description" 11 | parser.add_argument("FILE", nargs="*") 12 | parser.add_argument("-g", "--grid_spacing", dest="grid_spacing", type=float, help="use X m grid spacing", default=1800) 13 | parser.add_argument( 14 | "-f", 15 | "--format", 16 | dest="fileformat", 17 | type=str.upper, 18 | choices=["NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_CLASSIC", "NETCDF3_64BIT"], 19 | help="file format out output file", 20 | default="netcdf3_64bit", 21 | ) 22 | 23 | options = parser.parse_args() 24 | args = options.FILE 25 | grid_spacing = options.grid_spacing # convert 26 | 27 | fileformat = options.fileformat.upper() 28 | 29 | if len(args) == 0: 30 | nc_outfile = "jif" + str(grid_spacing) + "m.nc" 31 | elif len(args) == 1: 32 | nc_outfile = args[0] 33 | else: 34 | print("wrong number arguments, 0 or 1 arguments accepted") 35 | parser.print_help() 36 | import sys 37 | 38 | sys.exit(0) 39 | 40 | 41 | if __name__ == "__main__": 42 | 43 | xdim = "x" 44 | ydim = "y" 45 | 46 | # define output grid, these are the extents of Mathieu's domain (cell 47 | # corners) 48 | e0 = 403000 49 | n0 = 6370000 50 | e1 = 683000 51 | n1 = 6691000 52 | 53 | # Shift to cell centers 54 | e0 += grid_spacing / 2 55 | n0 += grid_spacing / 2 56 | e1 -= grid_spacing / 2 57 | n1 -= grid_spacing / 2 58 | 59 | de = dn = grid_spacing # m 60 | M = int((e1 - e0) / de) + 1 61 | N = int((n1 - n0) / dn) + 1 62 | 63 | easting = np.linspace(e0, e1, M) 64 | northing = np.linspace(n0, n1, N) 65 | ee, nn = np.meshgrid(easting, northing) 66 | 67 | # UTM 8N projection: EPSG 32608 68 | projection = "+init=epsg:32608" 69 | proj = Proj(projection) 70 | 71 | lon, lat = proj(ee, nn, inverse=True) 72 | 73 | # number of grid corners 74 | grid_corners = 4 75 | # grid corner dimension name 76 | grid_corner_dim_name = "nv4" 77 | 78 | # array holding x-component of grid corners 79 | gc_easting = np.zeros((M, grid_corners)) 80 | # array holding y-component of grid corners 81 | gc_northing = np.zeros((N, grid_corners)) 82 | # array holding the offsets from the cell centers 83 | # in x-direction (counter-clockwise) 84 | de_vec = np.array([-de / 2, de / 2, de / 2, -de / 2]) 85 | # array holding the offsets from the cell centers 86 | # in y-direction (counter-clockwise) 87 | dn_vec = np.array([-dn / 2, -dn / 2, dn / 2, dn / 2]) 88 | # array holding lat-component of grid corners 89 | gc_lat = np.zeros((N, M, grid_corners)) 90 | # array holding lon-component of grid corners 91 | gc_lon = np.zeros((N, M, grid_corners)) 92 | 93 | for corner in range(0, grid_corners): 94 | ## grid_corners in x-direction 95 | gc_easting[:, corner] = easting + de_vec[corner] 96 | # grid corners in y-direction 97 | gc_northing[:, corner] = northing + dn_vec[corner] 98 | # meshgrid of grid corners in x-y space 99 | gc_ee, gc_nn = np.meshgrid(gc_easting[:, corner], gc_northing[:, corner]) 100 | # project grid corners from x-y to lat-lon space 101 | gc_lon[:, :, corner], gc_lat[:, :, corner] = proj(gc_ee, gc_nn, inverse=True) 102 | 103 | nc = CDF(nc_outfile, "w", format=fileformat) 104 | 105 | nc.createDimension(xdim, size=easting.shape[0]) 106 | nc.createDimension(ydim, size=northing.shape[0]) 107 | 108 | var = xdim 109 | var_out = nc.createVariable(var, "f", dimensions=(xdim)) 110 | var_out.axis = xdim 111 | var_out.long_name = "X-coordinate in Cartesian system" 112 | var_out.standard_name = "projection_x_coordinate" 113 | var_out.units = "meters" 114 | var_out[:] = easting 115 | 116 | var = ydim 117 | var_out = nc.createVariable(var, "f", dimensions=(ydim)) 118 | var_out.axis = ydim 119 | var_out.long_name = "Y-coordinate in Cartesian system" 120 | var_out.standard_name = "projection_y_coordinate" 121 | var_out.units = "meters" 122 | var_out[:] = northing 123 | 124 | var = "lon" 125 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim)) 126 | var_out.units = "degrees_east" 127 | var_out.valid_range = -180.0, 180.0 128 | var_out.standard_name = "longitude" 129 | var_out.bounds = "lon_bnds" 130 | var_out[:] = lon 131 | 132 | var = "lat" 133 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim)) 134 | var_out.units = "degrees_north" 135 | var_out.valid_range = -90.0, 90.0 136 | var_out.standard_name = "latitude" 137 | var_out.bounds = "lat_bnds" 138 | var_out[:] = lat 139 | 140 | nc.createDimension(grid_corner_dim_name, size=grid_corners) 141 | 142 | var = "lon_bnds" 143 | # Create variable 'lon_bnds' 144 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 145 | # Assign units to variable 'lon_bnds' 146 | var_out.units = "degreesE" 147 | # Assign values to variable 'lon_nds' 148 | var_out[:] = gc_lon 149 | 150 | var = "lat_bnds" 151 | # Create variable 'lat_bnds' 152 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 153 | # Assign units to variable 'lat_bnds' 154 | var_out.units = "degreesN" 155 | # Assign values to variable 'lat_bnds' 156 | var_out[:] = gc_lat 157 | 158 | var = "dummy" 159 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim), fill_value=np.nan) 160 | var_out.units = "meters" 161 | var_out.long_name = "Just A Dummy" 162 | var_out.comment = "This is just a dummy variable for CDO." 163 | var_out.grid_mapping = "mapping" 164 | var_out.coordinates = "lon lat" 165 | var_out[:] = 1.0 166 | 167 | mapping = nc.createVariable("mapping", "c") 168 | mapping.inverse_flattening = 298.257 169 | mapping.utm_zone_number = 8 170 | mapping.semi_major_axis = 6378137 171 | mapping.grid_mapping_name = "universal_transverse_mercator" 172 | mapping._CoordinateTransformType = "Projection" 173 | mapping._CoordinateAxisTypes = "GeoX GeoY" 174 | 175 | from time import asctime 176 | 177 | historystr = "Created " + asctime() + "\n" 178 | nc.history = historystr 179 | nc.proj4 = projection 180 | nc.Conventions = "CF-1.5" 181 | nc.close() 182 | -------------------------------------------------------------------------------- /scripts/extract_contours.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy as np 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | from netCDF4 import Dataset as NC 6 | from netcdftime import utime 7 | import gdal 8 | import ogr 9 | import osr 10 | import os 11 | import logging 12 | import logging.handlers 13 | 14 | try: 15 | import pypismtools.pypismtools as ppt 16 | except: 17 | import pypismtools as ppt 18 | 19 | # create logger 20 | logger = logging.getLogger(__name__) 21 | logger.setLevel(logging.DEBUG) 22 | 23 | # create file handler which logs even debug messages 24 | fh = logging.handlers.RotatingFileHandler("extract.log") 25 | fh.setLevel(logging.DEBUG) 26 | # create console handler with a higher log level 27 | ch = logging.StreamHandler() 28 | ch.setLevel(logging.INFO) 29 | # create formatter 30 | formatter = logging.Formatter("%(module)s:%(lineno)d - %(message)s") 31 | 32 | # add formatter to ch and fh 33 | ch.setFormatter(formatter) 34 | fh.setFormatter(formatter) 35 | 36 | # add ch to logger 37 | logger.addHandler(ch) 38 | logger.addHandler(fh) 39 | 40 | 41 | def create_memory_layer(dst_fieldname): 42 | """ 43 | Create a in-memory layer with 1 OFTInteger field 44 | """ 45 | 46 | srs = None 47 | if src_ds.GetProjectionRef() != "": 48 | srs = osr.SpatialReference() 49 | srs.ImportFromWkt(src_ds.GetProjection()) 50 | 51 | layer = mem_ds.CreateLayer("contours", srs) 52 | 53 | fd = ogr.FieldDefn("id", ogr.OFTInteger) 54 | layer.CreateField(fd) 55 | fd = ogr.FieldDefn("level", ogr.OFTReal) 56 | layer.CreateField(fd) 57 | 58 | return layer 59 | 60 | 61 | def validateShapePath(shapePath): 62 | """Validate shapefile extension""" 63 | return os.path.splitext(str(shapePath))[0] + ".shp" 64 | 65 | 66 | def validateShapeData(shapeData): 67 | """Make sure we can access the shapefile""" 68 | # Make sure the shapefile exists 69 | if not shapeData: 70 | raise ShapeDataError("The shapefile is invalid") 71 | # Make sure there is exactly one layer 72 | if shapeData.GetLayerCount() != 1: 73 | raise ShapeDataError("The shapefile must have exactly one layer") 74 | 75 | 76 | # Error 77 | 78 | 79 | class ShapeDataError(Exception): 80 | pass 81 | 82 | 83 | parser = ArgumentParser( 84 | formatter_class=ArgumentDefaultsHelpFormatter, 85 | description="""A script to extract contours from netCDF file, and save it as a shapefile.""", 86 | ) 87 | parser.add_argument("FILE", nargs=1) 88 | parser.add_argument( 89 | "-a", 90 | "--area_threshold", 91 | dest="area_threshold", 92 | type=float, 93 | help="Only save features with an area > area_threshold", 94 | default=200, 95 | ) 96 | parser.add_argument("-e", "--epsg", dest="epsg", type=int, help="Sets EPSG code", default=None) 97 | parser.add_argument( 98 | "-l", "--levels", dest="levels", help="Which contour levels to extract. Comma-separated list", default="0" 99 | ) 100 | parser.add_argument( 101 | "-o", "--output_filename", dest="out_file", help="Name of the output shape file", default="interface.shp" 102 | ) 103 | parser.add_argument("-v", "--variable", dest="dst_fieldname", help="Name of variable to use", default="usurf") 104 | 105 | options = parser.parse_args() 106 | filename = options.FILE[0] 107 | area_threshold = options.area_threshold 108 | epsg = options.epsg 109 | levels = np.array(options.levels.split(","), dtype=float) 110 | shp_filename = options.out_file 111 | ts_fieldname = "timestamp" 112 | dst_fieldname = options.dst_fieldname 113 | 114 | nc = NC(filename, "r") 115 | xdim, ydim, zdim, tdim = ppt.get_dims(nc) 116 | 117 | if tdim: 118 | time = nc.variables[tdim] 119 | time_units = time.units 120 | time_calendar = time.calendar 121 | cdftime = utime(time_units, time_calendar) 122 | timestamps = cdftime.num2date(time[:]) 123 | has_time = True 124 | else: 125 | tdim = None 126 | nc.close() 127 | 128 | src_ds = gdal.Open("NETCDF:{}:{}".format(filename, dst_fieldname)) 129 | 130 | # Get Memory Driver 131 | mem_driver = ogr.GetDriverByName("Memory") 132 | mem_ds = mem_driver.CreateDataSource("memory_layer") 133 | 134 | # Get SHP Driver 135 | shp_driver = ogr.GetDriverByName("ESRI Shapefile") 136 | shp_filename = validateShapePath(shp_filename) 137 | if os.path.exists(shp_filename): 138 | os.remove(shp_filename) 139 | dst_ds = shp_driver.CreateDataSource(shp_filename) 140 | 141 | srs = None 142 | if src_ds.GetProjectionRef() != "": 143 | srs = osr.SpatialReference() 144 | srs.ImportFromWkt(src_ds.GetProjection()) 145 | 146 | if epsg is not None: 147 | srs = osr.SpatialReference() 148 | srs.ImportFromEPSG(epsg) 149 | 150 | 151 | interface_layer = dst_ds.CreateLayer("interface", srs) 152 | fd = ogr.FieldDefn("area", ogr.OFTInteger) 153 | interface_layer.CreateField(fd) 154 | fd = ogr.FieldDefn("level", ogr.OFTReal) 155 | interface_layer.CreateField(fd) 156 | fd = ogr.FieldDefn(ts_fieldname, ogr.OFTString) 157 | interface_layer.CreateField(fd) 158 | fd = ogr.FieldDefn("timestep", ogr.OFTInteger) 159 | interface_layer.CreateField(fd) 160 | 161 | time_step = 0 162 | for k in np.arange(0, src_ds.RasterCount): 163 | 164 | if tdim is None: 165 | timestamp = "0-0-0" 166 | else: 167 | timestamp = timestamps[k] 168 | logger.info("Processing {}".format(timestamp)) 169 | srcband = src_ds.GetRasterBand(int(k + 1)) 170 | logger.debug("Running gdal.ContourGenerate()") 171 | tmp_layer = create_memory_layer(dst_fieldname) 172 | result = gdal.ContourGenerate(srcband, 0, 0, levels, 0, 0, tmp_layer, 0, 1, callback=gdal.TermProgress) 173 | 174 | logger.info("Saving results") 175 | featureDefn = interface_layer.GetLayerDefn() 176 | for feature in tmp_layer: 177 | # create a new feature 178 | outFeature = ogr.Feature(featureDefn) 179 | outFeature.SetGeometry(feature.GetGeometryRef()) 180 | i = outFeature.GetFieldIndex("timestep") 181 | outFeature.SetField(i, int(time_step)) 182 | i = outFeature.GetFieldIndex(ts_fieldname) 183 | outFeature.SetField(i, str(timestamp)) 184 | geom = feature.GetGeometryRef() 185 | area = geom.GetArea() 186 | i = outFeature.GetFieldIndex("area") 187 | outFeature.SetField(i, int(area)) 188 | # add the feature to the output layer 189 | if area >= area_threshold: 190 | interface_layer.CreateFeature(outFeature) 191 | 192 | time_step += 1 193 | 194 | # Clean-up 195 | interface_layer = None 196 | mem_ds = None 197 | src_ds = None 198 | -------------------------------------------------------------------------------- /scripts/create_greenland_bamber_grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | from pyproj import Proj 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | 6 | from netCDF4 import Dataset as CDF 7 | 8 | # set up the argument parser 9 | parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) 10 | parser.description = "Create CDO-compliant grid description" 11 | parser.add_argument("FILE", nargs="*") 12 | parser.add_argument("-g", "--grid_spacing", dest="grid_spacing", type=float, help="use X m grid spacing", default=5000) 13 | parser.add_argument( 14 | "-f", 15 | "--format", 16 | dest="fileformat", 17 | type=str.upper, 18 | choices=["NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_CLASSIC", "NETCDF3_64BIT"], 19 | help="file format out output file", 20 | default="netcdf3_64bit", 21 | ) 22 | 23 | options = parser.parse_args() 24 | args = options.FILE 25 | grid_spacing = options.grid_spacing # convert 26 | 27 | fileformat = options.fileformat.upper() 28 | 29 | if len(args) == 0: 30 | nc_outfile = "grn" + str(grid_spacing) + "m.nc" 31 | elif len(args) == 1: 32 | nc_outfile = args[0] 33 | else: 34 | print("wrong number arguments, 0 or 1 arguments accepted") 35 | parser.print_help() 36 | import sys 37 | 38 | sys.exit(0) 39 | 40 | 41 | if __name__ == "__main__": 42 | 43 | xdim = "x" 44 | ydim = "y" 45 | 46 | # define output grid 47 | 48 | e0 = -800000.0 49 | n0 = -3400000.0 50 | e1 = 700000.0 51 | n1 = -600000.0 52 | 53 | de = dn = grid_spacing # m 54 | M = int((e1 - e0) / de) + 1 55 | N = int((n1 - n0) / dn) + 1 56 | 57 | easting = np.linspace(e0, e1, M) 58 | northing = np.linspace(n0, n1, N) 59 | ee, nn = np.meshgrid(easting, northing) 60 | 61 | # Set up SeaRISE Projection 62 | projection = "+proj=stere +ellps=WGS84 +datum=WGS84 +lon_0=-39 +lat_0=90 +lat_ts=71 +units=m" 63 | proj = Proj(projection) 64 | 65 | lon, lat = proj(ee, nn, inverse=True) 66 | 67 | nc = CDF(nc_outfile, "w", format=fileformat) 68 | 69 | # number of grid corners 70 | grid_corners = 4 71 | # grid corner dimension name 72 | grid_corner_dim_name = "nv4" 73 | 74 | # array holding x-component of grid corners 75 | gc_easting = np.zeros((M, grid_corners)) 76 | # array holding y-component of grid corners 77 | gc_northing = np.zeros((N, grid_corners)) 78 | # array holding the offsets from the cell centers 79 | # in x-direction (counter-clockwise) 80 | de_vec = np.array([-de / 2, de / 2, de / 2, -de / 2]) 81 | # array holding the offsets from the cell centers 82 | # in y-direction (counter-clockwise) 83 | dn_vec = np.array([-dn / 2, -dn / 2, dn / 2, dn / 2]) 84 | # array holding lat-component of grid corners 85 | gc_lat = np.zeros((N, M, grid_corners)) 86 | # array holding lon-component of grid corners 87 | gc_lon = np.zeros((N, M, grid_corners)) 88 | 89 | for corner in range(0, grid_corners): 90 | ## grid_corners in x-direction 91 | gc_easting[:, corner] = easting + de_vec[corner] 92 | # grid corners in y-direction 93 | gc_northing[:, corner] = northing + dn_vec[corner] 94 | # meshgrid of grid corners in x-y space 95 | gc_ee, gc_nn = np.meshgrid(gc_easting[:, corner], gc_northing[:, corner]) 96 | # project grid corners from x-y to lat-lon space 97 | gc_lon[:, :, corner], gc_lat[:, :, corner] = proj(gc_ee, gc_nn, inverse=True) 98 | 99 | nc = CDF(nc_outfile, "w", format=fileformat) 100 | 101 | nc.createDimension(xdim, size=easting.shape[0]) 102 | nc.createDimension(ydim, size=northing.shape[0]) 103 | 104 | var = xdim 105 | var_out = nc.createVariable(var, "f", dimensions=(xdim)) 106 | var_out.axis = xdim 107 | var_out.long_name = "X-coordinate in Cartesian system" 108 | var_out.standard_name = "projection_x_coordinate" 109 | var_out.units = "meters" 110 | var_out[:] = easting 111 | 112 | var = ydim 113 | var_out = nc.createVariable(var, "f", dimensions=(ydim)) 114 | var_out.axis = ydim 115 | var_out.long_name = "Y-coordinate in Cartesian system" 116 | var_out.standard_name = "projection_y_coordinate" 117 | var_out.units = "meters" 118 | var_out[:] = northing 119 | 120 | var = "lon" 121 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim)) 122 | var_out.units = "degrees_east" 123 | var_out.valid_range = -180.0, 180.0 124 | var_out.standard_name = "longitude" 125 | var_out.bounds = "lon_bnds" 126 | var_out[:] = lon 127 | 128 | var = "lat" 129 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim)) 130 | var_out.units = "degrees_north" 131 | var_out.valid_range = -90.0, 90.0 132 | var_out.standard_name = "latitude" 133 | var_out.bounds = "lat_bnds" 134 | var_out[:] = lat 135 | 136 | nc.createDimension(grid_corner_dim_name, size=grid_corners) 137 | 138 | var = "lon_bnds" 139 | # Create variable 'lon_bnds' 140 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 141 | # Assign units to variable 'lon_bnds' 142 | var_out.units = "degreesE" 143 | # Assign values to variable 'lon_nds' 144 | var_out[:] = gc_lon 145 | 146 | var = "lat_bnds" 147 | # Create variable 'lat_bnds' 148 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 149 | # Assign units to variable 'lat_bnds' 150 | var_out.units = "degreesN" 151 | # Assign values to variable 'lat_bnds' 152 | var_out[:] = gc_lat 153 | 154 | var = "dummy" 155 | var_out = nc.createVariable(var, "f", dimensions=("y", "x"), fill_value=np.nan) 156 | var_out.units = "meters" 157 | var_out.long_name = "Just A Dummy" 158 | var_out.comment = "This is just a dummy variable for CDO." 159 | var_out.grid_mapping = "mapping" 160 | var_out.coordinates = "lon lat" 161 | var_out[:] = np.nan 162 | 163 | mapping = nc.createVariable("mapping", "c") 164 | mapping.ellipsoid = "WGS84" 165 | mapping.false_easting = 0.0 166 | mapping.false_northing = 0.0 167 | mapping.grid_mapping_name = "polar_stereographic" 168 | mapping.latitude_of_projection_origin = 90.0 169 | mapping.standard_parallel = 71.0 170 | mapping.straight_vertical_longitude_from_pole = -39.0 171 | 172 | from time import asctime 173 | 174 | historystr = "Created " + asctime() + "\n" 175 | nc.history = historystr 176 | nc.proj4 = projection 177 | nc.Conventions = "CF-1.5" 178 | nc.close() 179 | -------------------------------------------------------------------------------- /scripts/create_greenland_ext_epsg3413_grid.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | from pyproj import Proj 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | 6 | from netCDF4 import Dataset as CDF 7 | 8 | # set up the argument parser 9 | parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) 10 | parser.description = "Create CDO-compliant grid description" 11 | parser.add_argument("FILE", nargs="*") 12 | parser.add_argument("-g", "--grid_spacing", dest="grid_spacing", type=float, help="use X m grid spacing", default=1800) 13 | parser.add_argument( 14 | "-f", 15 | "--format", 16 | dest="fileformat", 17 | type=str.upper, 18 | choices=["NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_CLASSIC", "NETCDF3_64BIT"], 19 | help="file format out output file", 20 | default="netcdf4", 21 | ) 22 | 23 | options = parser.parse_args() 24 | args = options.FILE 25 | grid_spacing = options.grid_spacing # convert 26 | 27 | fileformat = options.fileformat.upper() 28 | 29 | if len(args) == 0: 30 | nc_outfile = "grn" + str(int(grid_spacing)) + "m.nc" 31 | elif len(args) == 1: 32 | nc_outfile = args[0] 33 | else: 34 | print("wrong number arguments, 0 or 1 arguments accepted") 35 | parser.print_help() 36 | import sys 37 | 38 | sys.exit(0) 39 | 40 | 41 | if __name__ == "__main__": 42 | 43 | xdim = "x" 44 | ydim = "y" 45 | 46 | # define output grid, these are the extents of Mathieu's domain (cell 47 | # corners) 48 | e0 = -638000 49 | n0 = -3349600 50 | e1 = 864700 51 | n1 = -657600 52 | 53 | # Add a buffer on each side such that we get nice grids up to a grid spacing 54 | # of 36 km. 55 | 56 | buffer_e = 148650 57 | buffer_n = 130000 58 | e0 -= buffer_e + 468000 59 | n0 -= buffer_n 60 | e1 += buffer_e 61 | n1 += buffer_n 62 | 63 | # Shift to cell centers 64 | e0 += grid_spacing / 2 65 | n0 += grid_spacing / 2 66 | e1 -= grid_spacing / 2 67 | n1 -= grid_spacing / 2 68 | 69 | de = dn = grid_spacing # m 70 | M = int((e1 - e0) / de) + 1 71 | N = int((n1 - n0) / dn) + 1 72 | 73 | easting = np.linspace(e0, e1, M) 74 | northing = np.linspace(n0, n1, N) 75 | ee, nn = np.meshgrid(easting, northing) 76 | 77 | # Set up EPSG 3413 (NSIDC north polar stereo) projection 78 | projection = "epsg:3413" 79 | proj = Proj(projection) 80 | 81 | lon, lat = proj(ee, nn, inverse=True) 82 | 83 | # number of grid corners 84 | grid_corners = 4 85 | # grid corner dimension name 86 | grid_corner_dim_name = "nv4" 87 | 88 | # array holding x-component of grid corners 89 | gc_easting = np.zeros((M, grid_corners)) 90 | # array holding y-component of grid corners 91 | gc_northing = np.zeros((N, grid_corners)) 92 | # array holding the offsets from the cell centers 93 | # in x-direction (counter-clockwise) 94 | de_vec = np.array([-de / 2, de / 2, de / 2, -de / 2]) 95 | # array holding the offsets from the cell centers 96 | # in y-direction (counter-clockwise) 97 | dn_vec = np.array([-dn / 2, -dn / 2, dn / 2, dn / 2]) 98 | # array holding lat-component of grid corners 99 | gc_lat = np.zeros((N, M, grid_corners)) 100 | # array holding lon-component of grid corners 101 | gc_lon = np.zeros((N, M, grid_corners)) 102 | 103 | for corner in range(0, grid_corners): 104 | ## grid_corners in x-direction 105 | gc_easting[:, corner] = easting + de_vec[corner] 106 | # grid corners in y-direction 107 | gc_northing[:, corner] = northing + dn_vec[corner] 108 | # meshgrid of grid corners in x-y space 109 | gc_ee, gc_nn = np.meshgrid(gc_easting[:, corner], gc_northing[:, corner]) 110 | # project grid corners from x-y to lat-lon space 111 | gc_lon[:, :, corner], gc_lat[:, :, corner] = proj(gc_ee, gc_nn, inverse=True) 112 | 113 | nc = CDF(nc_outfile, "w", format=fileformat) 114 | 115 | nc.createDimension(xdim, size=easting.shape[0]) 116 | nc.createDimension(ydim, size=northing.shape[0]) 117 | 118 | var = xdim 119 | var_out = nc.createVariable(var, "d", dimensions=(xdim)) 120 | var_out.axis = xdim 121 | var_out.long_name = "X-coordinate in Cartesian system" 122 | var_out.standard_name = "projection_x_coordinate" 123 | var_out.units = "meters" 124 | var_out[:] = easting 125 | 126 | var = ydim 127 | var_out = nc.createVariable(var, "d", dimensions=(ydim)) 128 | var_out.axis = ydim 129 | var_out.long_name = "Y-coordinate in Cartesian system" 130 | var_out.standard_name = "projection_y_coordinate" 131 | var_out.units = "meters" 132 | var_out[:] = northing 133 | 134 | var = "lon" 135 | var_out = nc.createVariable(var, "d", dimensions=(ydim, xdim)) 136 | var_out.units = "degrees_east" 137 | var_out.valid_range = -180.0, 180.0 138 | var_out.standard_name = "longitude" 139 | var_out.bounds = "lon_bnds" 140 | var_out[:] = lon 141 | 142 | var = "lat" 143 | var_out = nc.createVariable(var, "d", dimensions=(ydim, xdim)) 144 | var_out.units = "degrees_north" 145 | var_out.valid_range = -90.0, 90.0 146 | var_out.standard_name = "latitude" 147 | var_out.bounds = "lat_bnds" 148 | var_out[:] = lat 149 | 150 | nc.createDimension(grid_corner_dim_name, size=grid_corners) 151 | 152 | var = "lon_bnds" 153 | # Create variable 'lon_bnds' 154 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 155 | # Assign units to variable 'lon_bnds' 156 | var_out.units = "degreesE" 157 | # Assign values to variable 'lon_nds' 158 | var_out[:] = gc_lon 159 | 160 | var = "lat_bnds" 161 | # Create variable 'lat_bnds' 162 | var_out = nc.createVariable(var, "f", dimensions=(ydim, xdim, grid_corner_dim_name)) 163 | # Assign units to variable 'lat_bnds' 164 | var_out.units = "degreesN" 165 | # Assign values to variable 'lat_bnds' 166 | var_out[:] = gc_lat 167 | 168 | var = "dummy" 169 | var_out = nc.createVariable(var, "f", dimensions=("y", "x"), fill_value=-2e9) 170 | var_out.units = "meters" 171 | var_out.long_name = "Just A Dummy" 172 | var_out.comment = "This is just a dummy variable for CDO." 173 | var_out.grid_mapping = "mapping" 174 | var_out.coordinates = "lon lat" 175 | var_out[:] = 0.0 176 | 177 | mapping = nc.createVariable("mapping", "c") 178 | mapping.ellipsoid = "WGS84" 179 | mapping.false_easting = 0.0 180 | mapping.false_northing = 0.0 181 | mapping.grid_mapping_name = "polar_stereographic" 182 | mapping.latitude_of_projection_origin = 90.0 183 | mapping.standard_parallel = 70.0 184 | mapping.straight_vertical_longitude_from_pole = -45.0 185 | 186 | from time import asctime 187 | 188 | historystr = "Created " + asctime() + "\n" 189 | nc.history = historystr 190 | nc.proj4 = projection 191 | nc.Conventions = "CF-1.5" 192 | nc.close() 193 | -------------------------------------------------------------------------------- /scripts/contour2shp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy as np 4 | from skimage import measure 5 | from argparse import ArgumentParser 6 | 7 | from netCDF4 import Dataset as NC 8 | from netcdftime import utime 9 | import ogr 10 | import osr 11 | import os 12 | from pyproj import Proj 13 | 14 | try: 15 | import pypismtools.pypismtools as ppt 16 | except: 17 | import pypismtools as ppt 18 | 19 | 20 | def validateShapePath(shapePath): 21 | """Validate shapefile extension""" 22 | return os.path.splitext(str(shapePath))[0] + ".shp" 23 | 24 | 25 | def validateShapeData(shapeData): 26 | """Make sure we can access the shapefile""" 27 | # Make sure the shapefile exists 28 | if not shapeData: 29 | raise ShapeDataError("The shapefile is invalid") 30 | # Make sure there is exactly one layer 31 | if shapeData.GetLayerCount() != 1: 32 | raise ShapeDataError("The shapefile must have exactly one layer") 33 | 34 | 35 | # Error 36 | class ShapeDataError(Exception): 37 | pass 38 | 39 | 40 | def get_contours(array, x, y, projection, level): 41 | """ 42 | Find contours for a given level 43 | """ 44 | 45 | # Find contours at a constant value 46 | contours = sorted(measure.find_contours(array, level), key=lambda x: len(x)) 47 | 48 | i = list(range(0, len(x))) 49 | j = list(range(0, len(y))) 50 | 51 | lon = [] 52 | lat = [] 53 | contour_points = [] 54 | for k in range(0, len(contours)): 55 | contour = contours[k] 56 | contour_x = x[0] + contour[:, 1] * (x[-1] - x[0]) / (len(i) - 1) 57 | contour_y = y[0] + contour[:, 0] * (y[-1] - y[0]) / (len(j) - 1) 58 | # Convert to EPSG:4326 59 | contour_lon, contour_lat = projection(contour_x, contour_y, inverse=True) 60 | lon.append(contour_lon) 61 | lat.append(contour_lat) 62 | points = [(contour_lon[k], contour_lat[k]) for k in range(len(contour_lat))] 63 | contour_points.append(points) 64 | # reverse direction, last entry (longest contour) first. 65 | contour_points.reverse() 66 | 67 | if single: 68 | contour_points = [contour_points[0]] 69 | 70 | return contour_points 71 | 72 | 73 | parser = ArgumentParser( 74 | description="""A script to extract a (closed) contour line from a variable in a netCDF file, and save it as a shapefile (polygon).""" 75 | ) 76 | parser.add_argument("FILE", nargs=1) 77 | parser.add_argument( 78 | "-o", "--output_filename", dest="out_file", help="Name of the output shape file", default="countour.shp" 79 | ) 80 | parser.add_argument("-v", "--variable", dest="varname", help="""Variable to plot, default = 'mask'.""", default="mask") 81 | parser.add_argument( 82 | "-c", 83 | "--countour_levels", 84 | nargs="*", 85 | dest="contour_levels", 86 | help="""Contour-levels to extract, default = 0.""", 87 | default="0", 88 | ) 89 | parser.add_argument( 90 | "-s", 91 | "--single", 92 | dest="single", 93 | action="store_true", 94 | help="save only the longest contour line, Default=False", 95 | default=False, 96 | ) 97 | 98 | 99 | options = parser.parse_args() 100 | filename = options.FILE[0] 101 | shp_filename = options.out_file 102 | contour_levels = options.contour_levels 103 | varname = options.varname 104 | single = options.single 105 | 106 | nc = NC(filename, "r") 107 | nc_projection = ppt.get_projection_from_file(nc) 108 | 109 | xdim, ydim, zdim, tdim = ppt.get_dims(nc) 110 | var_order = (tdim, zdim, ydim, xdim) 111 | 112 | x = np.squeeze(nc.variables[xdim]) 113 | y = np.squeeze(nc.variables[ydim]) 114 | 115 | 116 | # Get driver 117 | driver = ogr.GetDriverByName("ESRI Shapefile") 118 | # Create shapeData 119 | shp_filename = validateShapePath(shp_filename) 120 | if os.path.exists(shp_filename): 121 | os.remove(shp_filename) 122 | shapeData = driver.CreateDataSource(shp_filename) 123 | # Create spatialReference, EPSG 4326 (lonlat) 124 | spatialReference = osr.SpatialReference() 125 | spatialReference.ImportFromEPSG(4326) 126 | layerName = os.path.splitext(os.path.split(shp_filename)[1])[0] 127 | layer = shapeData.CreateLayer(layerName, spatialReference, ogr.wkbPolygon) 128 | layerDefinition = layer.GetLayerDefn() 129 | field_defn = ogr.FieldDefn("level", ogr.OFTReal) 130 | layer.CreateField(field_defn) 131 | field_defn = ogr.FieldDefn("year", ogr.OFTReal) 132 | layer.CreateField(field_defn) 133 | field_defn = ogr.FieldDefn("timestamp", ogr.OFTDateTime) 134 | layer.CreateField(field_defn) 135 | 136 | if tdim: 137 | time = nc.variables["time"] 138 | time_units = time.units 139 | time_calendar = time.calendar 140 | if time[0] < 0: 141 | is_paleo = True 142 | else: 143 | is_paleo = False 144 | cdftime = utime(time_units, time_calendar) 145 | for k, t in enumerate(time): 146 | if is_paleo: 147 | timestamp = "1-1-1" 148 | my_year = k 149 | else: 150 | timestamp = cdftime.num2date(t) 151 | my_year = 0 152 | print(("Processing {}".format(timestamp))) 153 | for level in contour_levels: 154 | contour_var = np.array(ppt.permute(nc.variables[varname], var_order), order="C")[k, Ellipsis] 155 | contour_points = get_contours(contour_var, x, y, nc_projection, level) 156 | # For each contour 157 | polygon = ogr.Geometry(ogr.wkbPolygon) 158 | for point in range(0, len(contour_points)): 159 | geoLocations = contour_points[point] 160 | ring = ogr.Geometry(ogr.wkbLinearRing) 161 | # For each point, 162 | for pointIndex, geoLocation in enumerate(geoLocations): 163 | ring.AddPoint(geoLocation[0], geoLocation[1]) 164 | ring.CloseRings() 165 | polygon.AddGeometry(ring) 166 | # Create feature 167 | featureDefn = layer.GetLayerDefn() 168 | feature = ogr.Feature(featureDefn) 169 | feature.SetGeometry(polygon) 170 | feature.SetFID(k) 171 | i = feature.GetFieldIndex("level") 172 | feature.SetField(i, level) 173 | i = feature.GetFieldIndex("year") 174 | feature.SetField(i, my_year) 175 | i = feature.GetFieldIndex("timestamp") 176 | if not is_paleo: 177 | feature.SetField(i, str(timestamp)) 178 | polygon = None 179 | # Save feature 180 | layer.CreateFeature(feature) 181 | # Cleanup 182 | feature = None 183 | else: 184 | for level in contour_levels: 185 | contour_var = np.array(np.squeeze(ppt.permute(nc.variables[varname], var_order)), order="C") 186 | contour_points = get_contours(contour_var, x, y, nc_projection, level) 187 | # For each contour 188 | polygon = ogr.Geometry(ogr.wkbPolygon) 189 | for k in range(0, len(contour_points)): 190 | geoLocations = contour_points[k] 191 | ring = ogr.Geometry(ogr.wkbLinearRing) 192 | # For each point, 193 | for pointIndex, geoLocation in enumerate(geoLocations): 194 | ring.AddPoint(geoLocation[0], geoLocation[1]) 195 | ring.CloseRings() 196 | polygon.AddGeometry(ring) 197 | # Create feature 198 | featureDefn = layer.GetLayerDefn() 199 | feature = ogr.Feature(featureDefn) 200 | feature.SetGeometry(polygon) 201 | feature.SetFID(k) 202 | i = feature.GetFieldIndex("level") 203 | feature.SetField(i, level) 204 | polygon = None 205 | # Save feature 206 | layer.CreateFeature(feature) 207 | # Cleanup 208 | feature = None 209 | # Cleanup 210 | shapeData = None 211 | 212 | 213 | # save(shp_filename, contour_points, level) 214 | 215 | 216 | nc.close() 217 | -------------------------------------------------------------------------------- /nc_hillshade.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (C) 2017 Andy Aschwanden 3 | 4 | import os 5 | import numpy as np 6 | import logging 7 | import logging.handlers 8 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 9 | 10 | from netCDF4 import Dataset as NC 11 | 12 | # create logger 13 | logger = logging.getLogger("hillshade") 14 | logger.setLevel(logging.INFO) 15 | 16 | ch = logging.StreamHandler() 17 | ch.setLevel(logging.INFO) 18 | # create formatter 19 | formatter = logging.Formatter("%(name)s - %(module)s - %(message)s") 20 | 21 | # add formatter to ch and fh 22 | ch.setFormatter(formatter) 23 | 24 | # add ch to logger 25 | logger.addHandler(ch) 26 | 27 | 28 | class Hillshade(object): 29 | 30 | """ 31 | A class to add a hillshade to a netCDF time-series. 32 | 33 | Parameters 34 | ---------- 35 | 36 | ifile: netCDF file with dimensions ('time', 'y', 'x'). Other permutations are currently not supported 37 | variable: variable used to create a hillshade 38 | threshold_masking: bool. If True, variable[threshold_masking_variable < threshold_masking_value] = fill_value is set 39 | 40 | kwargs 41 | ---------- 42 | 43 | altitude: 44 | azimuth: 45 | fill_value: 46 | threshold_masking_variable: if threshold_masking is True, use this variable to mask variable 47 | threshold_masking_value: if threshold_masking is True, use this value to mask variable 48 | zf: 49 | """ 50 | 51 | def __init__(self, ifile, variable="usurf", threshold_masking=True, variables_to_mask=None, *args, **kwargs): 52 | super(Hillshade, self).__init__(*args, **kwargs) 53 | 54 | self.threshold_masking = threshold_masking 55 | self.do_masking = False 56 | self.ifile = ifile 57 | self.variable = variable 58 | if variables_to_mask is not None: 59 | self.variables_to_mask = variables_to_mask.split(",") 60 | self.do_masking = True 61 | else: 62 | self.variables_to_mask = variables_to_mask 63 | self.params = { 64 | "altitude": 45, 65 | "azimuth": 45, 66 | "fill_value": 0, 67 | "threshold_masking_variable": "thk", 68 | "threshold_masking_value": 10, 69 | "zf": 5, 70 | } 71 | for key, value in kwargs: 72 | if key in ("altitude", "azimuth", "fill_value", "hillshade_var", "zf"): 73 | self.params[key] = value 74 | 75 | self._check_vars() 76 | self.dx = self._get_dx() 77 | self._create_vars() 78 | 79 | def _check_vars(self): 80 | 81 | nc = NC(self.ifile, "r") 82 | for mvar in ["time"] + [self.variable]: 83 | if mvar in nc.variables: 84 | logger.info("variable {} found".format(mvar)) 85 | else: 86 | logger.info("variable {} NOT found".format(mvar)) 87 | 88 | if self.do_masking: 89 | for mvar in self.variables_to_mask + [self.params["threshold_masking_variable"]]: 90 | if mvar in nc.variables: 91 | logger.info("variable {} found".format(mvar)) 92 | else: 93 | logger.info("variable {} NOT found".format(mvar)) 94 | nc.close() 95 | 96 | def _cart2pol(self, x, y): 97 | """ 98 | cartesian to polar coordinates 99 | """ 100 | theta = np.arctan2(y, x) 101 | rho = np.sqrt(x ** 2 + y ** 2) 102 | return (theta, rho) 103 | 104 | def _create_vars(self): 105 | """ 106 | create netCDF variables if they don't exist yet 107 | """ 108 | 109 | ifile = self.ifile 110 | nc = NC(ifile, "a") 111 | variable = self.variable 112 | hs_var = variable + "_hs" 113 | if hs_var not in nc.variables: 114 | nc.createVariable(hs_var, "i", dimensions=("time", "y", "x"), fill_value=fill_value) 115 | nc.close() 116 | 117 | def _get_dx(self): 118 | 119 | nc = NC(self.ifile, "r") 120 | 121 | x0, x1 = nc.variables["x"][0:2] 122 | y0, y1 = nc.variables["y"][0:2] 123 | 124 | nc.close() 125 | 126 | dx = x1 - x0 127 | dy = y1 - y0 128 | 129 | assert dx == dy 130 | 131 | return dx 132 | 133 | def _hillshade(self, dem): 134 | """ 135 | shaded relief using the ESRI algorithm 136 | """ 137 | 138 | # lighting azimuth 139 | azimuth = self.params["azimuth"] 140 | azimuth = 360.0 - azimuth + 90 # convert to mathematic unit 141 | if (azimuth > 360) or (azimuth == 360): 142 | azimuth = azimuth - 360 143 | azimuth = azimuth * (np.pi / 180) # convert to radians 144 | 145 | # lighting altitude 146 | altitude = self.params["altitude"] 147 | altitude = (90 - altitude) * (np.pi / 180) # convert to zenith angle in radians 148 | 149 | # calc slope and aspect (radians) 150 | dx = self.dx 151 | fx, fy = np.gradient(dem, dx) # uses simple, unweighted gradient of immediate 152 | [asp, grad] = self._cart2pol(fy, fx) # convert to carthesian coordinates 153 | 154 | zf = self.params["zf"] 155 | grad = np.arctan(zf * grad) # steepest slope 156 | # convert asp 157 | asp[asp < np.pi] = asp[asp < np.pi] + (np.pi / 2) 158 | asp[asp < 0] = asp[asp < 0] + (2 * np.pi) 159 | 160 | # hillshade calculation 161 | h = 255.0 * ((np.cos(altitude) * np.cos(grad)) + (np.sin(altitude) * np.sin(grad) * np.cos(azimuth - asp))) 162 | h[h < 0] = 0 # set hillshade values to min of 0. 163 | 164 | return h 165 | 166 | def run(self): 167 | logger.info("Processing file {}".format(ifile)) 168 | fill_value = self.params["fill_value"] 169 | hs_var = self.variable + "_hs" 170 | nc = NC(ifile, "a") 171 | nt = len(nc.variables["time"][:]) 172 | for t in range(nt): 173 | logger.info("Processing time {} of {}".format(t, nt)) 174 | dem = nc.variables["usurf"][t, Ellipsis] 175 | hs = self._hillshade(dem) 176 | hs[dem == 0] = fill_value 177 | nc.variables[hs_var][t, Ellipsis] = hs 178 | if self.threshold_masking: 179 | m = nc.variables[self.params["threshold_masking_variable"]][t, Ellipsis] 180 | hs[m <= self.params["threshold_masking_value"]] = fill_value 181 | if self.do_masking: 182 | for mvar in self.variables_to_mask: 183 | mt = nc.variables[self.params["threshold_masking_variable"]][t, Ellipsis] 184 | m = nc.variables[mvar][t, Ellipsis] 185 | try: 186 | m_fill_value = nc.variables[mvar]._FillValue 187 | except: 188 | m_fill_value = fill_value 189 | m[mt < self.params["threshold_masking_value"]] = m_fill_value 190 | nc.variables[mvar][t, Ellipsis] = m 191 | 192 | nc.close() 193 | 194 | 195 | if __name__ == "__main__": 196 | 197 | # set up the option parser 198 | parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) 199 | parser.description = "Adding a hillshade to a netCDF file." 200 | parser.add_argument( 201 | "FILE", 202 | nargs=1, 203 | help="netCDF file with dimensions ('time', 'y', 'x'). Other permutations are currently not supported", 204 | ) 205 | parser.add_argument("-v", "--variable", dest="variable", help="Variable used for hillshade", default="usurf") 206 | parser.add_argument("--altitude", dest="altitude", type=float, help="Altitude for hillshade", default=45) 207 | parser.add_argument("--azimuth", dest="azimuth", type=float, help="Azimuth for hillshade", default=45) 208 | parser.add_argument("--fill_value", dest="fill_value", type=float, help="Fill value for masking", default=0) 209 | parser.add_argument( 210 | "--threshold_masking", 211 | dest="threshold_masking", 212 | action="store_false", 213 | help="Masking above threshold", 214 | default=True, 215 | ) 216 | parser.add_argument( 217 | "--threshold_masking_variable", 218 | dest="threshold_masking_variable", 219 | help="Variable to use for threshold masking", 220 | default="thk", 221 | ) 222 | parser.add_argument( 223 | "--threshold_masking_value", 224 | dest="threshold_masking_value", 225 | type=float, 226 | help="Value to use for threshold masking", 227 | default=10, 228 | ) 229 | parser.add_argument("--zf", dest="zf", type=float, help="Zf", default=5) 230 | 231 | options = parser.parse_args() 232 | ifile = options.FILE[0] 233 | delattr(options, "FILE") 234 | variable = options.variable 235 | delattr(options, "variable") 236 | hs = Hillshade(ifile, variable, **vars(options)) 237 | hs.run() 238 | -------------------------------------------------------------------------------- /scripts/remap3d.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import time 4 | import subprocess 5 | import numpy as np 6 | import pylab as plt 7 | from argparse import ArgumentParser 8 | from scipy.interpolate import interp1d 9 | 10 | try: 11 | from netCDF4 import Dataset as NC 12 | except: 13 | from netCDF3 import Dataset as NC 14 | 15 | try: 16 | import pypismtools.pypismtools as ppt 17 | except: 18 | import pypismtools as ppt 19 | 20 | 21 | def interp(z, var_old, thk_old, thk_new): 22 | """ 23 | 24 | Remaps/rescales 3D field from old surface to new surface. 25 | 26 | Parameters 27 | ---------- 28 | z : 1d array, vertical coordinate 29 | var_old : (Mx,My,Mz) array, input values 30 | thk_old : (Mx,My) array, input upper ice surface 31 | thk_new : (Mx,My) array, output upper surface 32 | 33 | Returns 34 | ------- 35 | 36 | var_new : (Mx,My,Mz) array 37 | """ 38 | 39 | indices = np.linspace(0, len(z) - 1, len(z)) 40 | try: 41 | Mt, Mx, My, Mz = var_old.shape 42 | except: 43 | Mx, My, Mz = var_old.shape 44 | Mt = 0 45 | # prefill new variable (but what is a safe value?) 46 | var_new = np.zeros_like(var_old) 47 | # do the actual interpolation 48 | if Mt > 0: 49 | for j in range(0, Mt): 50 | for k in range(0, Mx): 51 | for l in range(0, My): 52 | vH_new = thk_new[k, l] 53 | vH_old = thk_old[k, l] 54 | var_old_column = var_old[j, k, l, :] 55 | if (vH_new > 0) and (vH_old > 0): 56 | z_scale = vH_new / vH_old 57 | index_new = indices * z_scale 58 | f = interp1d(index_new, var_old_column) 59 | indices_belowH = indices[z < vH_new] 60 | index_belowHp1 = indices_belowH[-1] + 1 61 | index_new_belowH = indices[z < vH_old] 62 | index_new_belowHp1 = index_new_belowH[-1] + 1 63 | # prefill with uppermost value inside 64 | var_new_column = np.ones_like(var_old_column) * var_old_column[index_new_belowHp1] 65 | var_new_column[0:index_belowHp1] = f(indices_belowH) 66 | else: 67 | # fill with atmospheric value 68 | var_new_column = np.ones_like(var_old_column) * var_old_column[-1] 69 | var_new[j, k, l, :] = var_new_column 70 | else: 71 | for k in range(0, Mx): 72 | for l in range(0, My): 73 | vH_new = thk_new[k, l] 74 | vH_old = thk_old[k, l] 75 | var_old_column = var_old[k, l, :] 76 | if vH_new > 0: 77 | z_scale = vH_new / vH_old 78 | index_new = indices * z_scale 79 | f = interp1d(index_new, var_old_column) 80 | indices_belowH = indices[z < vH_new] 81 | index_belowHp1 = indices_belowH[-1] + 1 82 | index_new_belowH = indices[z < vH_old] 83 | index_new_belowHp1 = index_new_belowH[-1] + 1 84 | # prefill with uppermost value inside 85 | var_new_column = np.ones_like(var_old_column) * var_old_column[index_new_belowHp1] 86 | var_new_column[0:index_belowHp1] = f(indices_belowH) 87 | else: 88 | # fill with atmospheric value 89 | var_new_column = np.ones_like(var_old_column) * var_old_column[-1] 90 | var_new[k, l, :] = var_new_column 91 | return var_new 92 | 93 | 94 | def create_test_data(Mx, My, Mz): 95 | """ 96 | Create test data 97 | """ 98 | 99 | # elevation of old surface 100 | usurf_old = 1.85 * np.pi * np.ones((Mx, My)) 101 | # elevation of new surface 102 | usurf_new = 1.75 * np.pi * np.ones((Mx, My)) 103 | 104 | z = np.linspace(0, 2 * np.pi, Mz) 105 | var_old = np.zeros((Mx, My, Mz)) 106 | for k in range(0, Mx): 107 | for l in range(0, My): 108 | var_old[k, l, :] = np.sin(z) 109 | 110 | return z, var_old, usurf_old, usurf_new 111 | 112 | 113 | if __name__ == "__main__": 114 | 115 | # Set up the argument parser 116 | parser = ArgumentParser() 117 | parser.description = """A to remap/rescale 3D fields (e.g. enthalpy) from one ice sheet body 118 | to another, given their ice thicknesses. Both files need to have same (Mx,My) dimensions""" 119 | parser.add_argument("FILE", nargs="*") 120 | parser.add_argument("--test", dest="test", action="store_true", help="test with some fake date", default=False) 121 | parser.add_argument( 122 | "-c", 123 | "--copy_thickness", 124 | dest="copy_thickness", 125 | action="store_true", 126 | help="copy ice thickness to new file", 127 | default=False, 128 | ) 129 | parser.add_argument( 130 | "-v", 131 | "--variable", 132 | dest="varname", 133 | help="""Variable used for remapping, default = "enthalpy".""", 134 | default="enthalpy", 135 | ) 136 | 137 | options = parser.parse_args() 138 | args = options.FILE 139 | copy_thickness = options.copy_thickness 140 | test = options.test 141 | interp_var_name = options.varname 142 | thk_var_name = "thk" 143 | 144 | if not test and len(args) == 0: 145 | print("no arguments given, running with test data") 146 | test = True 147 | 148 | if not test: 149 | thk_file_name = args[0] 150 | from_file_name = args[1] 151 | to_file_name = args[2] 152 | 153 | subprocess.call(["ncks", "-O", from_file_name, to_file_name]) 154 | if copy_thickness: 155 | print(("copy ice thickness from %s to %s." % (thk_file_name, to_file_name))) 156 | subprocess.call(["ncks", "-A -v thk", thk_file_name, to_file_name]) 157 | 158 | # open file in append mode 159 | nc_to = NC(to_file_name, "a") 160 | # get dimensions from file 161 | xdim, ydim, zdim, tdim = ppt.get_dims(nc_to) 162 | # set variable order for permutation 163 | var_order = (tdim, xdim, ydim, zdim) 164 | # read in z-coordinate 165 | z = nc_to.variables[zdim][:] 166 | 167 | # read ice thickness 168 | print((" - reading variable %s from file %s" % (thk_var_name, to_file_name))) 169 | try: 170 | thk_to = np.squeeze(ppt.permute(nc_to.variables[thk_var_name], var_order)) 171 | except: 172 | print( 173 | ("ERROR: unknown or not-found variable '%s' in file %s ... ending ..." % (thk_var_name, to_file_name)) 174 | ) 175 | import sys 176 | 177 | sys.exit() 178 | 179 | # read interpolation variable 180 | print((" - reading variable %s from file %s" % (interp_var_name, to_file_name))) 181 | try: 182 | # Don't know why squeezing changes dimension ordering 183 | ## var_old = np.squeeze(ppt.permute(nc_to.variables[interp_var_name], var_order)) 184 | var_old = ppt.permute(nc_to.variables[interp_var_name], var_order) 185 | except: 186 | print( 187 | ( 188 | "ERROR: unknown or not-found variable '%s' in file %s ... ending ..." 189 | % (interp_var_name, to_file_name) 190 | ) 191 | ) 192 | import sys 193 | 194 | sys.exit() 195 | 196 | nc_thk = NC(thk_file_name, "r") 197 | 198 | # read ice thickness 199 | print((" - reading variable %s from file %s" % (thk_var_name, thk_file_name))) 200 | try: 201 | thk_from = np.squeeze(ppt.permute(nc_thk.variables[thk_var_name], var_order)) 202 | except: 203 | print( 204 | ( 205 | "ERROR: unknown or not-found variable '%s' in file %s ... ending ..." 206 | % (thk_var_name, thk_file_name) 207 | ) 208 | ) 209 | import sys 210 | 211 | sys.exit() 212 | else: 213 | # grid dimensions 214 | Mx = 5 215 | My = 4 216 | Mz = 401 217 | z, var_old, thk_from, thk_to = create_test_data(Mx, My, Mz) 218 | 219 | t = time.time() 220 | var_new = interp(z, var_old, thk_to, thk_from) 221 | elapsed = time.time() - t 222 | print(("time spent interpolating: %f" % elapsed)) 223 | 224 | if not test: 225 | 226 | interp_var = nc_to.variables[interp_var_name] 227 | input_dimensions = interp_var.dimensions 228 | 229 | # filter out irrelevant dimensions 230 | dimensions = [x for x in var_order if x in input_dimensions] 231 | # create the mapping 232 | mapping = [dimensions.index(x) for x in input_dimensions] 233 | 234 | if mapping: 235 | var_new = np.transpose(var_new, mapping) 236 | 237 | interp_var[:] = var_new 238 | 239 | Mt, Mx, My, Mz = var_new.shape 240 | i = np.floor(Mx / 2) 241 | j = np.floor(My / 2) 242 | 243 | plt.figure() 244 | plt.imshow(thk_to - thk_from) 245 | plt.colorbar() 246 | 247 | plt.figure() 248 | plt.plot(var_old[0, i, j, :], z, label="old") 249 | plt.plot(var_new[0, i, j, :], z, label="new") 250 | plt.legend() 251 | 252 | if not test: 253 | nc_thk.close() 254 | nc_to.close() 255 | -------------------------------------------------------------------------------- /scripts/vraster2lineshapefile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (C) 2015-17 Bob McNabb, Andy Aschwanden 3 | 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | from osgeo import gdal 6 | import fiona 7 | from fiona.crs import from_epsg 8 | import numpy as np 9 | from netCDF4 import Dataset as NC 10 | import cftime 11 | import re 12 | from shapely.geometry import LineString, mapping 13 | import sys 14 | 15 | import logging 16 | import logging.handlers 17 | 18 | # create logger 19 | logger = logging.getLogger(__name__) 20 | logger.setLevel(logging.DEBUG) 21 | 22 | # create file handler which logs even debug messages 23 | fh = logging.handlers.RotatingFileHandler("extract.log") 24 | fh.setLevel(logging.DEBUG) 25 | # create console handler with a higher log level 26 | ch = logging.StreamHandler() 27 | ch.setLevel(logging.ERROR) 28 | # create formatter 29 | formatter = logging.Formatter( 30 | "%(asctime)s - %(name)s - %(levelname)s - %(module)s:%(lineno)d - %(message)s" 31 | ) 32 | 33 | # add formatter to ch and fh 34 | ch.setFormatter(formatter) 35 | fh.setFormatter(formatter) 36 | 37 | # add ch to logger 38 | logger.addHandler(ch) 39 | logger.addHandler(fh) 40 | 41 | 42 | class GeoImgInfo: 43 | def __init__(self, in_filename, in_dir="."): 44 | self.filename = in_filename 45 | self.in_dir_path = in_dir 46 | try: 47 | self.gd = gdal.Open(self.filename) 48 | except: 49 | print(("could not open file %s" % self.filename)) 50 | 51 | self.gt = self.gd.GetGeoTransform() 52 | self.proj = self.gd.GetProjection() 53 | self.npix_x = self.gd.RasterXSize 54 | self.npix_y = self.gd.RasterYSize 55 | self.xmin = self.gt[0] 56 | self.xmax = self.gt[0] + self.npix_x * self.gt[1] + self.npix_y * self.gt[2] 57 | self.ymin = self.gt[3] + self.npix_x * self.gt[4] + self.npix_y * self.gt[5] 58 | self.ymax = self.gt[3] 59 | self.dx = self.gt[1] 60 | self.dy = self.gt[5] 61 | self.nodatavalue = self.gd.GetRasterBand(1).GetNoDataValue() 62 | self.RasterCount = self.gd.RasterCount 63 | 64 | 65 | def getRasterBandArray(in_filename, BandNo=1): 66 | 67 | gd = gdal.Open(in_filename) 68 | rb = gd.GetRasterBand(BandNo) 69 | return rb.ReadAsArray() 70 | 71 | 72 | def get_dims(nc): 73 | """ 74 | Gets dimensions from netcdf instance 75 | 76 | Parameters: 77 | ----------- 78 | nc: netCDF instance 79 | 80 | Returns: 81 | -------- 82 | xdim, ydim, zdim, tdim: dimensions 83 | """ 84 | 85 | # a list of possible x-dimensions names 86 | xdims = ["x", "x1"] 87 | # a list of possible y-dimensions names 88 | ydims = ["y", "y1"] 89 | # a list of possible z-dimensions names 90 | zdims = ["z", "z1"] 91 | # a list of possible time-dimensions names 92 | tdims = ["t", "time"] 93 | 94 | xdim = None 95 | ydim = None 96 | zdim = None 97 | tdim = None 98 | 99 | # assign x dimension 100 | for dim in xdims: 101 | if dim in list(nc.dimensions.keys()): 102 | xdim = dim 103 | # assign y dimension 104 | for dim in ydims: 105 | if dim in list(nc.dimensions.keys()): 106 | ydim = dim 107 | # assign z dimension 108 | for dim in zdims: 109 | if dim in list(nc.dimensions.keys()): 110 | zdim = dim 111 | # assign time dimension 112 | for dim in tdims: 113 | if dim in list(nc.dimensions.keys()): 114 | tdim = dim 115 | return xdim, ydim, zdim, tdim 116 | 117 | 118 | parser = ArgumentParser( 119 | formatter_class=ArgumentDefaultsHelpFormatter, 120 | description="Convert rasters containing (U,V) components of velocity field to vector line data.", 121 | ) 122 | parser.add_argument("FILE", nargs=1) 123 | parser.add_argument( 124 | "-U", "--Udata", dest="Udata", help="Raster containing x components of velocity" 125 | ) 126 | parser.add_argument( 127 | "-V", "--Vdata", dest="Vdata", help="Raster containing y components of velocity" 128 | ) 129 | parser.add_argument( 130 | "--Uerror", 131 | dest="Uerror", 132 | help="Raster containing x components of error", 133 | default=None, 134 | ) 135 | parser.add_argument( 136 | "--Verror", 137 | dest="Verror", 138 | help="Raster containing y components of error", 139 | default=None, 140 | ) 141 | parser.add_argument( 142 | "--epsg", 143 | dest="epsg", 144 | help="EPSG code of project. Overrides input projection", 145 | default=None, 146 | ) 147 | parser.add_argument( 148 | "-s", 149 | "--scale_factor", 150 | type=float, 151 | dest="scale_factor", 152 | help="Scales length of line. Default=1.", 153 | default=1.0, 154 | ) 155 | parser.add_argument( 156 | "-p", 157 | "--prune_factor", 158 | type=int, 159 | dest="prune_factor", 160 | help="Pruning. Only use every x-th value. Default=1", 161 | default=1, 162 | ) 163 | parser.add_argument( 164 | "-t", 165 | "--threshold", 166 | type=float, 167 | dest="threshold", 168 | help="Magnitude values smaller or equal than threshold will be masked. Default=None", 169 | default=0.0, 170 | ) 171 | 172 | 173 | args = parser.parse_args() 174 | prune_factor = args.prune_factor 175 | scale_factor = args.scale_factor 176 | threshold = args.threshold 177 | 178 | URasterInfo = GeoImgInfo(args.Udata) 179 | VRasterInfo = GeoImgInfo(args.Vdata) 180 | 181 | URasterCount = URasterInfo.RasterCount 182 | VRasterCount = VRasterInfo.RasterCount 183 | assert URasterCount == VRasterCount 184 | RasterCount = URasterCount 185 | RasterInfo = URasterInfo 186 | Ufill_value = URasterInfo.nodatavalue 187 | Vfill_value = VRasterInfo.nodatavalue 188 | 189 | gdi = gdal.Info(args.Udata) 190 | driver = re.search("Driver:[ \t]*([^\n\r]*)", gdi).group(1) 191 | 192 | # It would be nice to only use gdal and not netcdf4python 193 | tdim = None 194 | if driver == "netCDF/Network Common Data Format": 195 | 196 | nc_file_u = args.Udata.split(":")[1] 197 | nc = NC(nc_file_u, "r") 198 | xdim, ydim, zdim, tdim = get_dims(nc) 199 | 200 | if tdim: 201 | time = nc.variables[tdim] 202 | time_units = time.units 203 | time_calendar = time.calendar 204 | timestamps = cftime.num2date(time[:], time_units, calendar=time_calendar) 205 | has_time = True 206 | else: 207 | tdim = None 208 | nc.close() 209 | 210 | 211 | if args.epsg is None: 212 | crs = RasterInfo.proj 213 | else: 214 | crs = from_epsg(args.epsg) 215 | 216 | x = np.linspace(RasterInfo.xmin, RasterInfo.xmax, RasterInfo.npix_x) 217 | y = np.linspace(RasterInfo.ymin, RasterInfo.ymax, RasterInfo.npix_y) 218 | 219 | X, Y = np.meshgrid(x, np.flipud(y)) 220 | X = X[::prune_factor, ::prune_factor] 221 | Y = Y[::prune_factor, ::prune_factor] 222 | 223 | nx, ny = X.shape 224 | 225 | 226 | # create the schema (fields) and get the EPSG information for the dataset 227 | schema = { 228 | "properties": [ 229 | ("ux", "float"), 230 | ("uy", "float"), 231 | ("speed", "float"), 232 | ("ex", "float"), 233 | ("ey", "float"), 234 | ("timestamp", "str"), 235 | ], 236 | "geometry": "LineString", 237 | } 238 | 239 | 240 | # open the shapefile 241 | logger.info("Processing") 242 | with fiona.open( 243 | args.FILE[0], "w", crs=crs, driver="ESRI Shapefile", schema=schema 244 | ) as output: 245 | for k in range(RasterCount): 246 | if tdim is None: 247 | timestamp = "0-0-0" 248 | else: 249 | timestamp = timestamps[k] 250 | logger.info("Processing {}".format(timestamp)) 251 | print(("Processing {}".format(timestamp))) 252 | 253 | Ux = getRasterBandArray(args.Udata, BandNo=k + 1)[ 254 | ::prune_factor, ::prune_factor 255 | ] 256 | Uy = getRasterBandArray(args.Vdata, BandNo=k + 1)[ 257 | ::prune_factor, ::prune_factor 258 | ] 259 | 260 | Speed = np.sqrt(Ux**2 + Uy**2) 261 | 262 | prop_dict = {} 263 | prop_dict["ux"] = Ux 264 | prop_dict["uy"] = Uy 265 | prop_dict["speed"] = Speed 266 | 267 | # Read and add error of U component 268 | if args.Uerror is not None: 269 | Ex = getRasterBandArray(args.Uerror, BandNo=k + 1)[ 270 | ::prune_factor, ::prune_factor 271 | ] 272 | schema["properties"].append(("ex", "float")) 273 | prop_dict["ex"] = Ex 274 | else: 275 | prop_dict["ex"] = 0 * Ux 276 | # Read and add error of V component 277 | if args.Verror is not None: 278 | Ey = getRasterBandArray(args.Verror, BandNo=k + 1)[ 279 | ::prune_factor, ::prune_factor 280 | ] 281 | schema["properties"].append(("ey", "float")) 282 | prop_dict["ey"] = Ey 283 | else: 284 | prop_dict["ey"] = 0 * Uy 285 | 286 | # create features for each x,y pair, and give them the right properties 287 | m = 0 288 | for i in range(nx): 289 | for j in range(ny): 290 | if ( 291 | (Ux[i, j] != Ufill_value) 292 | & (Uy[i, j] != Vfill_value) 293 | & (Speed[i, j] > threshold) 294 | ): 295 | m += 1 296 | sys.stdout.write("\r") 297 | # Center cooridinates 298 | x_c, y_c = X[i, j], Y[i, j] 299 | # Start point 300 | x_a, y_a = ( 301 | X[i, j] - scale_factor * Ux[i, j] / 2, 302 | Y[i, j] - scale_factor * Uy[i, j] / 2, 303 | ) 304 | # End point 305 | x_e, y_e = ( 306 | X[i, j] + scale_factor * Ux[i, j] / 2, 307 | Y[i, j] + scale_factor * Uy[i, j] / 2, 308 | ) 309 | # Create LineString 310 | line = LineString([[x_a, y_a], [x_c, y_c], [x_e, y_e]]) 311 | line_dict = dict( 312 | [(k, float(v[i, j])) for (k, v) in prop_dict.items()] 313 | ) 314 | line_dict["timestamp"] = str(timestamp) 315 | output.write({"properties": line_dict, "geometry": mapping(line)}) 316 | 317 | print(" {} points found and written".format(str(m))) 318 | 319 | print("Done writing {}".format(args.FILE[0])) 320 | # close the shapefile now that we're all done 321 | output.close() 322 | -------------------------------------------------------------------------------- /scripts/extract_interface.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy as np 4 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 5 | from netCDF4 import Dataset as NC 6 | import cftime 7 | from osgeo import gdal 8 | from osgeo import ogr 9 | from osgeo import osr 10 | import os 11 | import logging 12 | import logging.handlers 13 | import pandas as pd 14 | 15 | # create logger 16 | logger = logging.getLogger(__name__) 17 | logger.setLevel(logging.DEBUG) 18 | 19 | # create file handler which logs even debug messages 20 | fh = logging.handlers.RotatingFileHandler("extract.log") 21 | fh.setLevel(logging.DEBUG) 22 | # create console handler with a higher log level 23 | ch = logging.StreamHandler() 24 | ch.setLevel(logging.INFO) 25 | # create formatter 26 | formatter = logging.Formatter("%(message)s") 27 | 28 | # add formatter to ch and fh 29 | ch.setFormatter(formatter) 30 | fh.setFormatter(formatter) 31 | 32 | # add ch to logger 33 | logger.addHandler(ch) 34 | logger.addHandler(fh) 35 | 36 | dtype_dict = { 37 | np.dtype("O"): ogr.OFTString, 38 | np.dtype("float64"): ogr.OFTReal, 39 | np.dtype("int64"): ogr.OFTInteger, 40 | np.dtype("bool"): ogr.OFTBinary, 41 | str: ogr.OFTString, 42 | } 43 | 44 | 45 | def create_memory_layer(dst_fieldname): 46 | """ 47 | Create a in-memory layer with 1 OFTInteger field 48 | """ 49 | 50 | srs = None 51 | if src_ds.GetProjectionRef() != "": 52 | srs = osr.SpatialReference() 53 | srs.ImportFromWkt(src_ds.GetProjection()) 54 | 55 | layer = mem_ds.CreateLayer("poly", srs, ogr.wkbMultiPolygon) 56 | 57 | fd = ogr.FieldDefn(dst_fieldname, ogr.OFTInteger) 58 | layer.CreateField(fd) 59 | dst_field = 0 60 | 61 | return layer, dst_field 62 | 63 | 64 | if __name__ == "__main__": 65 | 66 | parser = ArgumentParser( 67 | formatter_class=ArgumentDefaultsHelpFormatter, 68 | description="""A script to extract interfaces (calving front, ice-ocean, or groundling line) from a PISM netCDF file, and save it as a shapefile (polygon).""", 69 | ) 70 | parser.add_argument("FILE", nargs=1) 71 | parser.add_argument( 72 | "-a", 73 | "--area_threshold", 74 | dest="area_threshold", 75 | type=float, 76 | help="Only save features with an area > area_threshold", 77 | default=200, 78 | ) 79 | parser.add_argument( 80 | "-e", "--epsg", dest="epsg", type=int, help="Sets EPSG code", default=None 81 | ) 82 | parser.add_argument( 83 | "-l", 84 | "--level", 85 | dest="level", 86 | type=float, 87 | help="Which contour level to extract. Used in combination with 'contour'", 88 | default=1000, 89 | ) 90 | parser.add_argument( 91 | "-o", 92 | "--output_filename", 93 | dest="out_file", 94 | help="Name of the output shape file", 95 | default="interface.shp", 96 | ) 97 | parser.add_argument( 98 | "--ensemble_file", 99 | dest="ensemble_file", 100 | help="CSV file. If given, add parameter values as attributes", 101 | default=None, 102 | ) 103 | parser.add_argument( 104 | "-m", 105 | "--mask_variable", 106 | dest="dst_fieldname", 107 | help="Name of variable to use", 108 | default="mask", 109 | ) 110 | parser.add_argument( 111 | "-s", 112 | "--step", 113 | dest="step", 114 | type=int, 115 | help="Only extract every step value", 116 | default=1, 117 | ) 118 | parser.add_argument( 119 | "-t", 120 | "--type", 121 | dest="extract_type", 122 | choices=[ 123 | "calving_front", 124 | "grounded_floating", 125 | "ice_noice", 126 | "ice_ocean", 127 | "grounding_line", 128 | "ela", 129 | "contour", 130 | "sftgif", 131 | ], 132 | help="Interface to extract.", 133 | default="ice_ocean", 134 | ) 135 | 136 | options = parser.parse_args() 137 | filename = options.FILE[0] 138 | area_threshold = options.area_threshold 139 | epsg = options.epsg 140 | ensemble_file = options.ensemble_file 141 | extract_type = options.extract_type 142 | level = options.level 143 | shp_filename = options.out_file 144 | ts_fieldname = "timestamp" 145 | dst_fieldname = options.dst_fieldname 146 | step = options.step 147 | 148 | if ensemble_file: 149 | e_df = pd.read_csv(ensemble_file) 150 | 151 | nc = NC(filename, "r") 152 | xdim = "x" 153 | ydim = "y" 154 | zdim = "z" 155 | tdim = "time" 156 | 157 | if tdim: 158 | time = nc.variables[tdim] 159 | time_units = time.units 160 | time_calendar = time.calendar 161 | timestamps = cftime.num2date(time[:], time_units, time_calendar) 162 | has_time = True 163 | else: 164 | tdim = None 165 | 166 | if ensemble_file: 167 | run_id = nc.id 168 | nc.close() 169 | 170 | src_ds = gdal.Open("NETCDF:{}:{}".format(filename, dst_fieldname)) 171 | 172 | # Get Memory Driver 173 | mem_driver = ogr.GetDriverByName("Memory") 174 | mem_ds = mem_driver.CreateDataSource("memory_layer") 175 | 176 | # Get SHP Driver 177 | shp_driver = ogr.GetDriverByName("GPKG") 178 | if os.path.exists(shp_filename): 179 | os.remove(shp_filename) 180 | dst_ds = shp_driver.CreateDataSource(shp_filename) 181 | 182 | srs = None 183 | if src_ds.GetProjectionRef() != "": 184 | srs = osr.SpatialReference() 185 | srs.ImportFromWkt(src_ds.GetProjection()) 186 | 187 | if epsg is not None: 188 | srs = osr.SpatialReference() 189 | srs.ImportFromEPSG(epsg) 190 | 191 | interface_layer = dst_ds.CreateLayer("interface", srs, ogr.wkbPolygon) 192 | fd = ogr.FieldDefn(ts_fieldname, ogr.OFTString) 193 | interface_layer.CreateField(fd) 194 | fd = ogr.FieldDefn("area", ogr.OFTInteger) 195 | interface_layer.CreateField(fd) 196 | fd = ogr.FieldDefn("timestep", ogr.OFTDateTime) 197 | interface_layer.CreateField(fd) 198 | if ensemble_file: 199 | print("Creating additional fields") 200 | for field in e_df.keys(): 201 | fd = ogr.FieldDefn(field, dtype_dict[e_df[field].dtype]) 202 | interface_layer.CreateField(fd) 203 | 204 | interface_dst_field = 0 205 | 206 | bufferDist = 1 207 | if extract_type in ("calving_front"): 208 | a_value = 4 209 | b_value = 3 210 | elif extract_type in ("grounded_floating"): 211 | a_value = 3 212 | b_value = 2 213 | elif extract_type in ("ice_ocean"): 214 | a_value = 4 215 | b_value = [2, 3] 216 | elif extract_type in ("ice_noice", "sftgif"): 217 | a_value = 1 218 | b_value = 0 219 | elif extract_type in ("grounding_line"): 220 | a_value = 2 221 | b_value = [0, 3, 4] 222 | elif extract_type in ("ela"): 223 | a_value = 0 224 | b_value = 0 225 | elif extract_type in ("contour"): 226 | a_value = level 227 | b_value = level 228 | else: 229 | print(("Type {} not recognized".format(extact_type))) 230 | import sys 231 | 232 | sys.exit(0) 233 | 234 | time_step = 0 235 | for k in np.arange(0, src_ds.RasterCount, step): 236 | 237 | if tdim is None: 238 | timestamp = "0-0-0" 239 | else: 240 | timestamp = timestamps[k] 241 | logger.info("Processing {}".format(timestamp)) 242 | srcband = src_ds.GetRasterBand(int(k + 1)) 243 | poly_layer, dst_field = create_memory_layer(dst_fieldname) 244 | logger.debug("Running gdal.Polygonize()") 245 | result = gdal.Polygonize( 246 | srcband, None, poly_layer, dst_field, [], callback=gdal.TermProgress 247 | ) 248 | if extract_type in ["ela", "contour"]: 249 | poly_layer.SetAttributeFilter("{} > {}".format(dst_fieldname, b_value)) 250 | else: 251 | poly_layer.SetAttributeFilter("{} = {}".format(dst_fieldname, a_value)) 252 | logger.debug("Extracting interface A") 253 | a_layer, dst_field = create_memory_layer(dst_fieldname) 254 | featureDefn = a_layer.GetLayerDefn() 255 | for m, feature in enumerate(poly_layer): 256 | ingeom = feature.GetGeometryRef() 257 | geomBuffer = ingeom.Buffer(bufferDist) 258 | 259 | outFeature = ogr.Feature(featureDefn) 260 | outFeature.SetGeometry(geomBuffer) 261 | a_layer.CreateFeature(outFeature) 262 | 263 | if extract_type in ["grounding_line"]: 264 | poly_layer.SetAttributeFilter( 265 | "{dn} = {val1} OR {dn} = {val2} OR {dn} = {val3}".format( 266 | dn=dst_fieldname, val1=b_value[0], val2=b_value[1], val3=b_value[2] 267 | ) 268 | ) 269 | elif extract_type in ["ice_ocean"]: 270 | poly_layer.SetAttributeFilter( 271 | "{dn} = {val1} OR {dn} = {val2}".format( 272 | dn=dst_fieldname, val1=b_value[0], val2=b_value[1] 273 | ) 274 | ) 275 | elif extract_type in ["ela", "contour"]: 276 | poly_layer.SetAttributeFilter("{} < {}".format(dst_fieldname, b_value)) 277 | else: 278 | poly_layer.SetAttributeFilter("{} = {}".format(dst_fieldname, b_value)) 279 | logger.debug("Extracting interface B") 280 | b_layer, dst_field = create_memory_layer(dst_fieldname) 281 | featureDefn = b_layer.GetLayerDefn() 282 | for m, feature in enumerate(poly_layer): 283 | ingeom = feature.GetGeometryRef() 284 | geomBuffer = ingeom.Buffer(bufferDist) 285 | 286 | outFeature = ogr.Feature(featureDefn) 287 | outFeature.SetGeometry(geomBuffer) 288 | b_layer.CreateFeature(outFeature) 289 | 290 | # Now clip layers 291 | logger.debug("Clipping A and B") 292 | tmp_layer, dst_field = create_memory_layer(dst_fieldname) 293 | a_layer.Clip(b_layer, tmp_layer) 294 | poly_layer = None 295 | a_layer = None 296 | b_layer = None 297 | 298 | logger.info("Saving results") 299 | featureDefn = interface_layer.GetLayerDefn() 300 | for feature in tmp_layer: 301 | # create a new feature 302 | outFeature = ogr.Feature(featureDefn) 303 | outFeature.SetGeometry(feature.GetGeometryRef()) 304 | i = outFeature.GetFieldIndex("timestep") 305 | outFeature.SetField(i, int(time_step)) 306 | i = outFeature.GetFieldIndex(ts_fieldname) 307 | outFeature.SetField(i, timestamp.strftime()) 308 | geom = feature.GetGeometryRef() 309 | area = geom.GetArea() 310 | i = outFeature.GetFieldIndex("area") 311 | outFeature.SetField(i, int(area)) 312 | if ensemble_file: 313 | df = e_df[e_df["id"] == int(run_id)] 314 | for d in df.items(): 315 | key = d[0] 316 | val = d[1].values[0] 317 | i = outFeature.GetFieldIndex(key) 318 | print(i, key, val) 319 | outFeature.SetField(i, str(val)) 320 | # add the feature to the output layer 321 | if area >= area_threshold: 322 | interface_layer.CreateFeature(outFeature) 323 | 324 | time_step += 1 325 | 326 | # Clean-up 327 | poly_layer = None 328 | interface_layer = None 329 | mem_ds = None 330 | src_ds = None 331 | -------------------------------------------------------------------------------- /scripts/qgis_colorramp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | import matplotlib 4 | import pylab as plt 5 | import matplotlib as mpl 6 | from argparse import ArgumentParser 7 | 8 | try: 9 | from pypismtools import gmtColormap 10 | except: 11 | from pypismtools.pypismtools import gmtColormap 12 | 13 | 14 | def cmap_map(function, cmap): 15 | """ 16 | Applies function (which should operate on vectors of shape 3: 17 | [r, g, b], on colormap cmap. This routine will break any discontinuous points 18 | in a colormap. 19 | 20 | Adapted from http://www.scipy.org/Cookbook/Matplotlib/ColormapTransformations 21 | """ 22 | cdict = cmap._segmentdata 23 | step_dict = {} 24 | # First get the list of points where the segments start or end 25 | for key in ("red", "green", "blue"): 26 | step_dict[key] = [x[0] for x in cdict[key]] 27 | step_list = sum(list(step_dict.values()), []) 28 | step_list = np.array(list(set(step_list))) 29 | # Then compute the LUT, and apply the function to the LUT 30 | 31 | def reduced_cmap(step): 32 | return np.array(cmap(step)[0:3]) 33 | 34 | old_LUT = np.array(list(map(reduced_cmap, step_list))) 35 | new_LUT = np.array(list(map(function, old_LUT))) 36 | # Now try to make a minimal segment definition of the new LUT 37 | cdict = {} 38 | for i, key in enumerate(("red", "green", "blue")): 39 | this_cdict = {} 40 | for j, step in enumerate(step_list): 41 | if step in step_dict[key]: 42 | this_cdict[step] = new_LUT[j, i] 43 | elif new_LUT[j, i] != old_LUT[j, i]: 44 | this_cdict[step] = new_LUT[j, i] 45 | colorvector = sorted([x + (x[1],) for x in list(this_cdict.items())]) 46 | cdict[key] = colorvector 47 | 48 | return mpl.colors.LinearSegmentedColormap("colormap") 49 | 50 | 51 | # Set up the option parser 52 | parser = ArgumentParser() 53 | parser.description = """ 54 | A script to convert a GMT (*.cpt) colormap or matplotlib colormap into QGIS-readable color ramp. 55 | """ 56 | parser.add_argument("FILE", nargs=1) 57 | parser.add_argument( 58 | "--tick_format", 59 | dest="tick_format", 60 | help="Overwrite ormat of the tick marks.", 61 | default=None, 62 | ) 63 | parser.add_argument("--font_size", dest="font_size", help="Font size", default=12) 64 | parser.add_argument( 65 | "--type", 66 | dest="colorbar_type", 67 | choices=[ 68 | "linear", 69 | "log_speed_10_1500", 70 | "log_speed_10_3000", 71 | "log_speed_j", 72 | "log_speed_2", 73 | "log_speed_3", 74 | "log_speed_4", 75 | "gris_bath_topo", 76 | "gris_bath_topo_2", 77 | "gris_bath_topo_3", 78 | "gris_topo", 79 | "log_speed_m_day", 80 | ], 81 | help="Type of colorbar", 82 | default="linear", 83 | ) 84 | parser.add_argument("--ticks", dest="fticks", nargs="*", type=float, help="tick marks", default=None) 85 | parser.add_argument( 86 | "--colorbar_extend", 87 | dest="cb_extend", 88 | choices=["neither", "both", "min", "max"], 89 | help="""Extend of colorbar. Default='both'.""", 90 | default="both", 91 | ) 92 | parser.add_argument( 93 | "--colorbar_label", 94 | dest="colorbar_label", 95 | help="""Label for colorbar.""", 96 | default=None, 97 | ) 98 | parser.add_argument( 99 | "--vmin", 100 | dest="vmin", 101 | type=float, 102 | help=""" 103 | Vmin""", 104 | default=1, 105 | ) 106 | parser.add_argument( 107 | "--vmax", 108 | dest="vmax", 109 | type=float, 110 | help=""" 111 | Vmax)""", 112 | default=3000, 113 | ) 114 | parser.add_argument( 115 | "--extend", 116 | dest="extend", 117 | nargs=2, 118 | type=float, 119 | help=""" 120 | appends color ramp by repeating first and last color for value""", 121 | default=None, 122 | ) 123 | parser.add_argument( 124 | "--N", 125 | dest="N", 126 | type=int, 127 | help=""" 128 | a * logspace(vmin, vmax, N""", 129 | default=1022, 130 | ) 131 | parser.add_argument( 132 | "-r", 133 | "--reverse", 134 | dest="reverse", 135 | action="store_true", 136 | help="reverse color scale", 137 | default=False, 138 | ) 139 | parser.add_argument( 140 | "--orientation", 141 | dest="orientation", 142 | choices=["horizontal", "vertical"], 143 | help="Orientation, default = 'horizontal", 144 | default="horizontal", 145 | ) 146 | 147 | options = parser.parse_args() 148 | args = options.FILE 149 | colorbar_type = options.colorbar_type 150 | font_size = options.font_size 151 | tick_format = options.tick_format 152 | fticks = options.fticks 153 | extend = options.extend 154 | N = options.N 155 | vmin = options.vmin 156 | vmax = options.vmax 157 | reverse = options.reverse 158 | colorbar_label = options.colorbar_label 159 | cb_extend = options.cb_extend 160 | # experimental 161 | log_color = False 162 | orientation = options.orientation 163 | 164 | 165 | # read in CPT colormap 166 | cmap_file = args[0] 167 | try: 168 | cmap = getattr(plt.cm, cmap_file) 169 | prefix = cmap_file 170 | except: 171 | # import and convert colormap 172 | cdict = gmtColormap(cmap_file, log_color=log_color, reverse=reverse) 173 | prefix = ".".join(cmap_file.split(".")[0:-1]) 174 | suffix = cmap_file.split(".")[-1] 175 | cmap = mpl.colors.LinearSegmentedColormap("my_colormap", cdict) 176 | 177 | 178 | class nlcmap(object): 179 | def __init__(self, cmap, levels): 180 | self.cmap = cmap 181 | self.levels = np.asarray(levels, dtype="float64") 182 | self._x = self.levels 183 | self.levmax = self.levels.max() 184 | self.transformed_levels = np.linspace(0.0, self.levmax, len(self.levels)) 185 | 186 | def __call__(self, xi, alpha=1.0, **kw): 187 | yi = np.interp(xi, self._x, self.transformed_levels) 188 | return self.cmap(yi / self.levmax, alpha) 189 | 190 | 191 | levels = [0, 10, 100, 250, 750, 3000] 192 | levels.sort() 193 | 194 | 195 | if colorbar_type in ("linear"): 196 | data_values = np.linspace(vmin, vmax, N) 197 | norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) 198 | cb_extend = cb_extend 199 | format = "%2.0f" 200 | ticks = None 201 | elif colorbar_type in ("log_speed_10_3000"): 202 | data_values = np.linspace(vmin, vmax, N) 203 | norm = mpl.colors.LogNorm(vmin=10, vmax=3000) 204 | cb_extend = cb_extend 205 | colorbar_label = "m yr$^{-1}$" 206 | format = "%2.0f" 207 | ticks = [0, 10, 100, 300, 1000, 3000] 208 | elif colorbar_type in ("log_speed_10_1500"): 209 | data_values = np.linspace(vmin, vmax, N) 210 | norm = mpl.colors.LogNorm(vmin=10, vmax=1500) 211 | cb_extend = cb_extend 212 | if colorbar_label is None: 213 | colorbar_label = "m yr$^{-1}$" 214 | format = "%2.0f" 215 | ticks = [0, 10, 100, 300, 1000, 3000] 216 | elif colorbar_type in ("gris_bath_topo"): 217 | vmin = -800 218 | vmax = 3000 219 | data_values = np.linspace(vmin, vmax, N) 220 | N = len(data_values) 221 | norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) 222 | cb_extend = "both" 223 | format = "%i" 224 | ticks = [vmin, 0, 1000, 2000, vmax] 225 | elif colorbar_type in ("gris_bath_topo_2"): 226 | vmin = -1500 227 | vmax = 2000 228 | data_values = np.linspace(vmin, vmax, N) 229 | N = len(data_values) 230 | norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) 231 | cb_extend = "both" 232 | if colorbar_label is None: 233 | colorbar_label = "m a.s.l." 234 | format = "%i" 235 | ticks = [vmin, -750, 0, 1000, 2000, vmax] 236 | elif colorbar_type in ("gris_bath_topo_3"): 237 | vmin = -2000 238 | vmax = 2000 239 | data_values = np.linspace(vmin, vmax, N) 240 | N = len(data_values) 241 | norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) 242 | cb_extend = "both" 243 | if colorbar_label is None: 244 | colorbar_label = "m a.s.l." 245 | format = "%i" 246 | ticks = [vmin, -1000, 0, 1000, 2000, vmax] 247 | elif colorbar_type in ("gris_topo"): 248 | vmin = 0 249 | vmax = 2000 250 | data_values = np.linspace(vmin, vmax, N) 251 | N = len(data_values) 252 | norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) 253 | cb_extend = "both" 254 | if colorbar_label is None: 255 | colorbar_label = "m a.s.l." 256 | format = "%i" 257 | ticks = [vmin, 0, 1000, 2000, vmax] 258 | cmap.set_under("#2171b5") 259 | elif colorbar_type in ("log_speed_j", "log_speed_3"): 260 | data_values = np.logspace(-1, 3, N)[0:889] 261 | data_values[-1] = vmax 262 | N = len(data_values) 263 | norm = mpl.colors.LogNorm(vmin=1, vmax=vmax) 264 | cb_extend = "both" 265 | format = "%i" 266 | ticks = [1, 3, 10, 30, 100, 300, 1000, 3000] 267 | elif colorbar_type in ("log_speed_2"): 268 | data_values = np.logspace(-1, 2, N)[0:889] 269 | data_values[-1] = vmax 270 | N = len(data_values) 271 | norm = mpl.colors.LogNorm(vmin=1, vmax=vmax) 272 | cb_extend = "both" 273 | format = "%i" 274 | ticks = [1, 3, 10, 30, 100, 300] 275 | elif colorbar_type in ("log_speed_4"): 276 | data_values = np.logspace(-1, 4, N)[0:889] 277 | data_values[-1] = vmax 278 | N = len(data_values) 279 | norm = mpl.colors.LogNorm(vmin=1, vmax=vmax) 280 | cb_extend = "both" 281 | format = "%i" 282 | ticks = [1, 3, 10, 30, 100, 300, 1000, 3000, 10000] 283 | elif colorbar_type in ("log_speed_m_day"): 284 | data_values = np.logspace(vmin, vmax, N)[0:889] 285 | data_values[-1] = vmax 286 | N = len(data_values) 287 | norm = mpl.colors.LogNorm(vmin=vmin, vmax=vmax) 288 | cb_extend = "both" 289 | format = "%2.2f" 290 | ticks = [0.01, 0.1, 0.5, 1, 5, 10] 291 | else: 292 | pass 293 | 294 | if tick_format is not None: 295 | format = tick_format 296 | 297 | 298 | # you could apply a function to the colormap, e.g. to desaturate the colormap: 299 | # cmap = cmap_map(lambda x: x/2+0.5, cmap) 300 | matplotlib.rc("font", **{"sans-serif": ["Helvetica"]}) # "size": fontsize} 301 | matplotlib.rc("font", **{"size": font_size}) 302 | # create the colorbar 303 | fig = plt.figure() 304 | if orientation == "horizontal": 305 | ax1 = fig.add_axes([0.0, 0.5, 0.5, 0.03]) 306 | else: 307 | ax1 = fig.add_axes([0.05, 0.05, 0.03, 0.65]) 308 | if fticks is not None: 309 | ticks = fticks 310 | 311 | cb1 = mpl.colorbar.ColorbarBase( 312 | ax1, 313 | cmap=cmap, 314 | norm=norm, 315 | ticks=ticks, 316 | format=format, 317 | extend=cb_extend, 318 | spacing="proportional", 319 | orientation=orientation, 320 | ) 321 | 322 | if colorbar_label: 323 | cb1.set_label(colorbar_label) 324 | 325 | 326 | # save high-res colorbar as png 327 | for format in ["png"]: 328 | prefix = prefix + "_" + orientation 329 | out_file = ".".join([prefix, format]) 330 | print((" writing colorbar %s ..." % out_file)) 331 | fig.savefig(out_file, bbox_inches="tight", dpi=1200, transparent=True) 332 | 333 | # convert to RGBA array 334 | rgba = cb1.to_rgba(data_values, alpha=None) 335 | # QGIS wants 0..255 336 | rgba *= 255 337 | 338 | # create an output array combining data values and rgb values 339 | if extend: 340 | qgis_array = np.zeros((N + 2, 5)) 341 | for k in range(0, N): 342 | qgis_array[k + 1, 0] = data_values[k] 343 | qgis_array[k + 1, 1:4] = rgba[k, 0:3] 344 | qgis_array[k + 1, 4] = 255 345 | # repeat first color 346 | qgis_array[0, 0] = extend[0] 347 | qgis_array[0, 1:4] = rgba[0, 0:3] 348 | qgis_array[0, 4] = 255 349 | # repeat last color 350 | qgis_array[-1, 0] = extend[1] 351 | qgis_array[-1, 1:4] = rgba[-1, 0:3] 352 | qgis_array[-1, 4] = 255 353 | else: 354 | qgis_array = np.zeros((N, 5)) 355 | for k in range(N): 356 | qgis_array[k, 0] = data_values[k] 357 | qgis_array[k, 1:4] = rgba[k, 0:3] 358 | qgis_array[k, 4] = 255 359 | 360 | # save as ascii file 361 | out_file = ".".join([prefix, "txt"]) 362 | print((" writing colorramp %s ..." % out_file)) 363 | np.savetxt(out_file, qgis_array, delimiter=",", fmt=["%10.5f", "%i", "%i", "%i", "%i,"]) 364 | -------------------------------------------------------------------------------- /colormaps/BlueYellowRed.cpt: -------------------------------------------------------------------------------- 1 | # ../cpt/ncar/BlueYellowRed.cpt 2 | # autogenerated GMT palette "BlueYellowRed.txt" 3 | # cptutils version 1.41, Fri Jan 20 21:02:00 2012 4 | # COLOR_MODEL = RGB 5 | 0.000000e+00 5 35 80 1.000000e+00 5 35 80 6 | 1.000000e+00 5 37 83 2.000000e+00 5 37 83 7 | 2.000000e+00 5 39 86 3.000000e+00 5 39 86 8 | 3.000000e+00 5 40 89 4.000000e+00 5 40 89 9 | 4.000000e+00 5 42 92 5.000000e+00 5 42 92 10 | 5.000000e+00 6 44 95 6.000000e+00 6 44 95 11 | 6.000000e+00 6 46 98 7.000000e+00 6 46 98 12 | 7.000000e+00 6 48 100 8.000000e+00 6 48 100 13 | 8.000000e+00 6 49 103 9.000000e+00 6 49 103 14 | 9.000000e+00 6 51 106 1.000000e+01 6 51 106 15 | 1.000000e+01 6 53 109 1.100000e+01 6 53 109 16 | 1.100000e+01 6 55 112 1.200000e+01 6 55 112 17 | 1.200000e+01 6 57 115 1.300000e+01 6 57 115 18 | 1.300000e+01 7 59 118 1.400000e+01 7 59 118 19 | 1.400000e+01 7 60 121 1.500000e+01 7 60 121 20 | 1.500000e+01 7 62 124 1.600000e+01 7 62 124 21 | 1.600000e+01 7 64 127 1.700000e+01 7 64 127 22 | 1.700000e+01 7 66 130 1.800000e+01 7 66 130 23 | 1.800000e+01 7 68 133 1.900000e+01 7 68 133 24 | 1.900000e+01 7 69 136 2.000000e+01 7 69 136 25 | 2.000000e+01 7 71 138 2.100000e+01 7 71 138 26 | 2.100000e+01 7 73 141 2.200000e+01 7 73 141 27 | 2.200000e+01 8 75 144 2.300000e+01 8 75 144 28 | 2.300000e+01 8 77 147 2.400000e+01 8 77 147 29 | 2.400000e+01 8 78 150 2.500000e+01 8 78 150 30 | 2.500000e+01 8 82 156 2.600000e+01 8 82 156 31 | 2.600000e+01 10 84 158 2.700000e+01 10 84 158 32 | 2.700000e+01 12 87 159 2.800000e+01 12 87 159 33 | 2.800000e+01 15 89 161 2.900000e+01 15 89 161 34 | 2.900000e+01 17 92 163 3.000000e+01 17 92 163 35 | 3.000000e+01 19 94 164 3.100000e+01 19 94 164 36 | 3.100000e+01 21 97 166 3.200000e+01 21 97 166 37 | 3.200000e+01 24 99 168 3.300000e+01 24 99 168 38 | 3.300000e+01 26 102 169 3.400000e+01 26 102 169 39 | 3.400000e+01 28 104 171 3.500000e+01 28 104 171 40 | 3.500000e+01 30 107 173 3.600000e+01 30 107 173 41 | 3.600000e+01 33 109 174 3.700000e+01 33 109 174 42 | 3.700000e+01 35 112 176 3.800000e+01 35 112 176 43 | 3.800000e+01 37 114 178 3.900000e+01 37 114 178 44 | 3.900000e+01 39 116 179 4.000000e+01 39 116 179 45 | 4.000000e+01 41 119 181 4.100000e+01 41 119 181 46 | 4.100000e+01 44 121 182 4.200000e+01 44 121 182 47 | 4.200000e+01 46 124 184 4.300000e+01 46 124 184 48 | 4.300000e+01 48 126 186 4.400000e+01 48 126 186 49 | 4.400000e+01 50 129 187 4.500000e+01 50 129 187 50 | 4.500000e+01 53 131 189 4.600000e+01 53 131 189 51 | 4.600000e+01 55 134 191 4.700000e+01 55 134 191 52 | 4.700000e+01 57 136 192 4.800000e+01 57 136 192 53 | 4.800000e+01 59 139 194 4.900000e+01 59 139 194 54 | 4.900000e+01 62 141 196 5.000000e+01 62 141 196 55 | 5.000000e+01 66 146 199 5.100000e+01 66 146 199 56 | 5.100000e+01 68 147 200 5.200000e+01 68 147 200 57 | 5.200000e+01 69 149 200 5.300000e+01 69 149 200 58 | 5.300000e+01 71 150 201 5.400000e+01 71 150 201 59 | 5.400000e+01 72 152 201 5.500000e+01 72 152 201 60 | 5.500000e+01 74 153 202 5.600000e+01 74 153 202 61 | 5.600000e+01 76 155 202 5.700000e+01 76 155 202 62 | 5.700000e+01 77 156 203 5.800000e+01 77 156 203 63 | 5.800000e+01 79 157 204 5.900000e+01 79 157 204 64 | 5.900000e+01 81 159 204 6.000000e+01 81 159 204 65 | 6.000000e+01 82 160 205 6.100000e+01 82 160 205 66 | 6.100000e+01 84 162 205 6.200000e+01 84 162 205 67 | 6.200000e+01 85 163 206 6.300000e+01 85 163 206 68 | 6.300000e+01 87 165 207 6.400000e+01 87 165 207 69 | 6.400000e+01 89 166 207 6.500000e+01 89 166 207 70 | 6.500000e+01 90 167 208 6.600000e+01 90 167 208 71 | 6.600000e+01 92 169 208 6.700000e+01 92 169 208 72 | 6.700000e+01 93 170 209 6.800000e+01 93 170 209 73 | 6.800000e+01 95 172 209 6.900000e+01 95 172 209 74 | 6.900000e+01 97 173 210 7.000000e+01 97 173 210 75 | 7.000000e+01 98 174 211 7.100000e+01 98 174 211 76 | 7.100000e+01 100 176 211 7.200000e+01 100 176 211 77 | 7.200000e+01 102 177 212 7.300000e+01 102 177 212 78 | 7.300000e+01 103 179 212 7.400000e+01 103 179 212 79 | 7.400000e+01 105 180 213 7.500000e+01 105 180 213 80 | 7.500000e+01 106 182 213 7.600000e+01 106 182 213 81 | 7.600000e+01 108 183 214 7.700000e+01 108 183 214 82 | 7.700000e+01 110 184 215 7.800000e+01 110 184 215 83 | 7.800000e+01 113 186 215 7.900000e+01 113 186 215 84 | 7.900000e+01 115 187 216 8.000000e+01 115 187 216 85 | 8.000000e+01 118 189 216 8.100000e+01 118 189 216 86 | 8.100000e+01 120 190 217 8.200000e+01 120 190 217 87 | 8.200000e+01 122 192 218 8.300000e+01 122 192 218 88 | 8.300000e+01 125 193 218 8.400000e+01 125 193 218 89 | 8.400000e+01 127 194 219 8.500000e+01 127 194 219 90 | 8.500000e+01 129 196 220 8.600000e+01 129 196 220 91 | 8.600000e+01 132 197 220 8.700000e+01 132 197 220 92 | 8.700000e+01 134 199 221 8.800000e+01 134 199 221 93 | 8.800000e+01 137 200 221 8.900000e+01 137 200 221 94 | 8.900000e+01 139 202 222 9.000000e+01 139 202 222 95 | 9.000000e+01 141 203 223 9.100000e+01 141 203 223 96 | 9.100000e+01 144 204 223 9.200000e+01 144 204 223 97 | 9.200000e+01 146 206 224 9.300000e+01 146 206 224 98 | 9.300000e+01 149 207 224 9.400000e+01 149 207 224 99 | 9.400000e+01 151 209 225 9.500000e+01 151 209 225 100 | 9.500000e+01 153 210 226 9.600000e+01 153 210 226 101 | 9.600000e+01 156 211 226 9.700000e+01 156 211 226 102 | 9.700000e+01 158 213 227 9.800000e+01 158 213 227 103 | 9.800000e+01 160 214 228 9.900000e+01 160 214 228 104 | 9.900000e+01 163 216 228 1.000000e+02 163 216 228 105 | 1.000000e+02 165 217 229 1.010000e+02 165 217 229 106 | 1.010000e+02 170 220 230 1.020000e+02 170 220 230 107 | 1.020000e+02 172 221 231 1.030000e+02 172 221 231 108 | 1.030000e+02 174 222 232 1.040000e+02 174 222 232 109 | 1.040000e+02 176 223 233 1.050000e+02 176 223 233 110 | 1.050000e+02 178 224 234 1.060000e+02 178 224 234 111 | 1.060000e+02 179 225 235 1.070000e+02 179 225 235 112 | 1.070000e+02 181 226 236 1.080000e+02 181 226 236 113 | 1.080000e+02 183 227 237 1.090000e+02 183 227 237 114 | 1.090000e+02 185 228 238 1.100000e+02 185 228 238 115 | 1.100000e+02 187 229 239 1.110000e+02 187 229 239 116 | 1.110000e+02 189 230 240 1.120000e+02 189 230 240 117 | 1.120000e+02 191 231 241 1.130000e+02 191 231 241 118 | 1.130000e+02 193 232 242 1.140000e+02 193 232 242 119 | 1.140000e+02 195 233 243 1.150000e+02 195 233 243 120 | 1.150000e+02 196 233 243 1.160000e+02 196 233 243 121 | 1.160000e+02 198 234 244 1.170000e+02 198 234 244 122 | 1.170000e+02 200 235 245 1.180000e+02 200 235 245 123 | 1.180000e+02 202 236 246 1.190000e+02 202 236 246 124 | 1.190000e+02 204 237 247 1.200000e+02 204 237 247 125 | 1.200000e+02 206 238 248 1.210000e+02 206 238 248 126 | 1.210000e+02 208 239 249 1.220000e+02 208 239 249 127 | 1.220000e+02 210 240 250 1.230000e+02 210 240 250 128 | 1.230000e+02 211 241 251 1.240000e+02 211 241 251 129 | 1.240000e+02 213 242 252 1.250000e+02 213 242 252 130 | 1.250000e+02 215 243 253 1.260000e+02 215 243 253 131 | 1.260000e+02 219 245 255 1.270000e+02 219 245 255 132 | 1.270000e+02 255 255 200 1.280000e+02 255 255 200 133 | 1.280000e+02 255 254 197 1.290000e+02 255 254 197 134 | 1.290000e+02 255 253 193 1.300000e+02 255 253 193 135 | 1.300000e+02 255 252 190 1.310000e+02 255 252 190 136 | 1.310000e+02 255 251 187 1.320000e+02 255 251 187 137 | 1.320000e+02 255 250 184 1.330000e+02 255 250 184 138 | 1.330000e+02 255 249 180 1.340000e+02 255 249 180 139 | 1.340000e+02 255 248 177 1.350000e+02 255 248 177 140 | 1.350000e+02 255 247 174 1.360000e+02 255 247 174 141 | 1.360000e+02 255 246 171 1.370000e+02 255 246 171 142 | 1.370000e+02 255 245 167 1.380000e+02 255 245 167 143 | 1.380000e+02 255 244 164 1.390000e+02 255 244 164 144 | 1.390000e+02 255 243 161 1.400000e+02 255 243 161 145 | 1.400000e+02 255 243 158 1.410000e+02 255 243 158 146 | 1.410000e+02 255 242 154 1.420000e+02 255 242 154 147 | 1.420000e+02 255 241 151 1.430000e+02 255 241 151 148 | 1.430000e+02 255 240 148 1.440000e+02 255 240 148 149 | 1.440000e+02 255 239 144 1.450000e+02 255 239 144 150 | 1.450000e+02 255 238 141 1.460000e+02 255 238 141 151 | 1.460000e+02 255 237 138 1.470000e+02 255 237 138 152 | 1.470000e+02 255 236 135 1.480000e+02 255 236 135 153 | 1.480000e+02 255 235 131 1.490000e+02 255 235 131 154 | 1.490000e+02 255 234 128 1.500000e+02 255 234 128 155 | 1.500000e+02 255 233 125 1.510000e+02 255 233 125 156 | 1.510000e+02 255 232 122 1.520000e+02 255 232 122 157 | 1.520000e+02 255 230 115 1.530000e+02 255 230 115 158 | 1.530000e+02 255 229 113 1.540000e+02 255 229 113 159 | 1.540000e+02 254 227 111 1.550000e+02 254 227 111 160 | 1.550000e+02 254 226 109 1.560000e+02 254 226 109 161 | 1.560000e+02 253 225 108 1.570000e+02 253 225 108 162 | 1.570000e+02 253 224 106 1.580000e+02 253 224 106 163 | 1.580000e+02 252 222 104 1.590000e+02 252 222 104 164 | 1.590000e+02 252 221 102 1.600000e+02 252 221 102 165 | 1.600000e+02 251 220 100 1.610000e+02 251 220 100 166 | 1.610000e+02 251 219 98 1.620000e+02 251 219 98 167 | 1.620000e+02 250 217 97 1.630000e+02 250 217 97 168 | 1.630000e+02 250 216 95 1.640000e+02 250 216 95 169 | 1.640000e+02 249 215 93 1.650000e+02 249 215 93 170 | 1.650000e+02 249 214 91 1.660000e+02 249 214 91 171 | 1.660000e+02 248 212 89 1.670000e+02 248 212 89 172 | 1.670000e+02 248 211 87 1.680000e+02 248 211 87 173 | 1.680000e+02 247 210 85 1.690000e+02 247 210 85 174 | 1.690000e+02 247 208 84 1.700000e+02 247 208 84 175 | 1.700000e+02 246 207 82 1.710000e+02 246 207 82 176 | 1.710000e+02 246 206 80 1.720000e+02 246 206 80 177 | 1.720000e+02 245 205 78 1.730000e+02 245 205 78 178 | 1.730000e+02 245 203 76 1.740000e+02 245 203 76 179 | 1.740000e+02 244 202 74 1.750000e+02 244 202 74 180 | 1.750000e+02 244 201 73 1.760000e+02 244 201 73 181 | 1.760000e+02 243 200 71 1.770000e+02 243 200 71 182 | 1.770000e+02 242 197 67 1.780000e+02 242 197 67 183 | 1.780000e+02 242 194 65 1.790000e+02 242 194 65 184 | 1.790000e+02 241 191 63 1.800000e+02 241 191 63 185 | 1.800000e+02 241 188 62 1.810000e+02 241 188 62 186 | 1.810000e+02 240 185 60 1.820000e+02 240 185 60 187 | 1.820000e+02 240 182 58 1.830000e+02 240 182 58 188 | 1.830000e+02 239 179 56 1.840000e+02 239 179 56 189 | 1.840000e+02 239 176 54 1.850000e+02 239 176 54 190 | 1.850000e+02 238 173 53 1.860000e+02 238 173 53 191 | 1.860000e+02 238 170 51 1.870000e+02 238 170 51 192 | 1.870000e+02 237 167 49 1.880000e+02 237 167 49 193 | 1.880000e+02 237 164 47 1.890000e+02 237 164 47 194 | 1.890000e+02 236 161 45 1.900000e+02 236 161 45 195 | 1.900000e+02 236 159 44 1.910000e+02 236 159 44 196 | 1.910000e+02 236 156 42 1.920000e+02 236 156 42 197 | 1.920000e+02 235 153 40 1.930000e+02 235 153 40 198 | 1.930000e+02 235 150 38 1.940000e+02 235 150 38 199 | 1.940000e+02 234 147 36 1.950000e+02 234 147 36 200 | 1.950000e+02 234 144 34 1.960000e+02 234 144 34 201 | 1.960000e+02 233 141 33 1.970000e+02 233 141 33 202 | 1.970000e+02 233 138 31 1.980000e+02 233 138 31 203 | 1.980000e+02 232 135 29 1.990000e+02 232 135 29 204 | 1.990000e+02 232 132 27 2.000000e+02 232 132 27 205 | 2.000000e+02 231 129 25 2.010000e+02 231 129 25 206 | 2.010000e+02 231 126 24 2.020000e+02 231 126 24 207 | 2.020000e+02 230 123 22 2.030000e+02 230 123 22 208 | 2.030000e+02 230 120 20 2.040000e+02 230 120 20 209 | 2.040000e+02 229 118 19 2.050000e+02 229 118 19 210 | 2.050000e+02 228 116 18 2.060000e+02 228 116 18 211 | 2.060000e+02 227 114 18 2.070000e+02 227 114 18 212 | 2.070000e+02 225 112 17 2.080000e+02 225 112 17 213 | 2.080000e+02 224 110 16 2.090000e+02 224 110 16 214 | 2.090000e+02 223 108 15 2.100000e+02 223 108 15 215 | 2.100000e+02 222 107 15 2.110000e+02 222 107 15 216 | 2.110000e+02 221 105 14 2.120000e+02 221 105 14 217 | 2.120000e+02 220 103 13 2.130000e+02 220 103 13 218 | 2.130000e+02 218 101 12 2.140000e+02 218 101 12 219 | 2.140000e+02 217 99 12 2.150000e+02 217 99 12 220 | 2.150000e+02 216 97 11 2.160000e+02 216 97 11 221 | 2.160000e+02 215 95 10 2.170000e+02 215 95 10 222 | 2.170000e+02 214 93 9 2.180000e+02 214 93 9 223 | 2.180000e+02 213 91 8 2.190000e+02 213 91 8 224 | 2.190000e+02 212 89 8 2.200000e+02 212 89 8 225 | 2.200000e+02 210 87 7 2.210000e+02 210 87 7 226 | 2.210000e+02 209 85 6 2.220000e+02 209 85 6 227 | 2.220000e+02 208 83 5 2.230000e+02 208 83 5 228 | 2.230000e+02 207 82 5 2.240000e+02 207 82 5 229 | 2.240000e+02 206 80 4 2.250000e+02 206 80 4 230 | 2.250000e+02 205 78 3 2.260000e+02 205 78 3 231 | 2.260000e+02 203 76 2 2.270000e+02 203 76 2 232 | 2.270000e+02 202 74 2 2.280000e+02 202 74 2 233 | 2.280000e+02 200 70 0 2.290000e+02 200 70 0 234 | 2.290000e+02 198 69 1 2.300000e+02 198 69 1 235 | 2.300000e+02 195 68 2 2.310000e+02 195 68 2 236 | 2.310000e+02 193 67 3 2.320000e+02 193 67 3 237 | 2.320000e+02 190 65 4 2.330000e+02 190 65 4 238 | 2.330000e+02 188 64 4 2.340000e+02 188 64 4 239 | 2.340000e+02 185 63 5 2.350000e+02 185 63 5 240 | 2.350000e+02 183 62 6 2.360000e+02 183 62 6 241 | 2.360000e+02 180 61 7 2.370000e+02 180 61 7 242 | 2.370000e+02 178 60 8 2.380000e+02 178 60 8 243 | 2.380000e+02 175 58 9 2.390000e+02 175 58 9 244 | 2.390000e+02 173 57 10 2.400000e+02 173 57 10 245 | 2.400000e+02 170 56 11 2.410000e+02 170 56 11 246 | 2.410000e+02 168 55 12 2.420000e+02 168 55 12 247 | 2.420000e+02 165 54 12 2.430000e+02 165 54 12 248 | 2.430000e+02 163 53 13 2.440000e+02 163 53 13 249 | 2.440000e+02 160 52 14 2.450000e+02 160 52 14 250 | 2.450000e+02 158 50 15 2.460000e+02 158 50 15 251 | 2.460000e+02 155 49 16 2.470000e+02 155 49 16 252 | 2.470000e+02 153 48 17 2.480000e+02 153 48 17 253 | 2.480000e+02 150 47 18 2.490000e+02 150 47 18 254 | 2.490000e+02 148 46 19 2.500000e+02 148 46 19 255 | 2.500000e+02 145 45 19 2.510000e+02 145 45 19 256 | 2.510000e+02 143 43 20 2.520000e+02 143 43 20 257 | 2.520000e+02 140 42 21 2.530000e+02 140 42 21 258 | 2.530000e+02 135 40 23 2.540000e+02 135 40 23 259 | B 0 0 0 260 | F 255 255 255 261 | N 255 0 0 262 | -------------------------------------------------------------------------------- /scripts/extract_sigma_levels.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (C) 2015 Andy Aschwanden 3 | # 4 | 5 | 6 | """This script containts tools for extracting sigma level. 7 | """ 8 | 9 | from argparse import ArgumentParser 10 | import numpy as np 11 | from scipy.interpolate import interp1d 12 | 13 | from netCDF4 import Dataset as NC 14 | 15 | try: 16 | import pypismtools.pypismtools as ppt 17 | except ImportError: # pragma: nocover 18 | import pypismtools as ppt 19 | 20 | 21 | # {{{ http://code.activestate.com/recipes/496938/ (r1) 22 | """ 23 | A module that helps to inject time profiling code 24 | in other modules to measures actual execution times 25 | of blocks of code. 26 | 27 | """ 28 | 29 | __author__ = "Anand B. Pillai" 30 | __version__ = "0.1" 31 | 32 | import time 33 | 34 | 35 | def timeprofile(): 36 | """ A factory function to return an instance of TimeProfiler """ 37 | 38 | return TimeProfiler() 39 | 40 | 41 | class TimeProfiler: 42 | 43 | """ A utility class for profiling execution time for code """ 44 | 45 | def __init__(self): 46 | # Dictionary with times in seconds 47 | self.timedict = {} 48 | 49 | def mark(self, slot=""): 50 | """ Mark the current time into the slot 'slot' """ 51 | 52 | # Note: 'slot' has to be string type 53 | # we are not checking it here. 54 | 55 | self.timedict[slot] = time.time() 56 | 57 | def unmark(self, slot=""): 58 | """ Unmark the slot 'slot' """ 59 | 60 | # Note: 'slot' has to be string type 61 | # we are not checking it here. 62 | 63 | if slot in self.timedict: 64 | del self.timedict[slot] 65 | 66 | def lastdiff(self): 67 | """ Get time difference between now and the latest marked slot """ 68 | 69 | # To get the latest slot, just get the max of values 70 | return time.time() - max(self.timedict.values()) 71 | 72 | def elapsed(self, slot=""): 73 | """ Get the time difference between now and a previous 74 | time slot named 'slot' """ 75 | 76 | # Note: 'slot' has to be marked previously 77 | return time.time() - self.timedict.get(slot) 78 | 79 | def diff(self, slot1, slot2): 80 | """ Get the time difference between two marked time 81 | slots 'slot1' and 'slot2' """ 82 | 83 | return self.timedict.get(slot2) - self.timedict.get(slot1) 84 | 85 | def maxdiff(self): 86 | """ Return maximum time difference marked """ 87 | 88 | # Difference of max time with min time 89 | times = list(self.timedict.values()) 90 | return max(times) - min(times) 91 | 92 | def timegap(self): 93 | """ Return the full time-gap since we started marking """ 94 | 95 | # Return now minus min 96 | times = list(self.timedict.values()) 97 | return time.time() - min(times) 98 | 99 | def cleanup(self): 100 | """ Cleanup the dictionary of all marks """ 101 | 102 | self.timedict.clear() 103 | 104 | 105 | def permute(variable, output_order=("time", "z", "zb", "y", "x")): 106 | """ 107 | Permute dimensions of a NetCDF variable to match the output 108 | storage order. 109 | 110 | Parameters 111 | ---------- 112 | variable : a netcdf variable 113 | e.g. thk = nc.variables['thk'] 114 | output_order: dimension tuple (optional) 115 | default ordering is ('time', 'z', 'zb', 'y', 'x') 116 | 117 | Returns 118 | ------- 119 | var_perm : array_like 120 | """ 121 | 122 | input_dimensions = variable.dimensions 123 | 124 | # filter out irrelevant dimensions 125 | dimensions = [x for x in output_order if x in input_dimensions] 126 | 127 | # create the mapping 128 | mapping = [input_dimensions.index(x) for x in dimensions] 129 | 130 | if mapping: 131 | return np.transpose(variable[:], mapping) 132 | else: 133 | return variable[:] # so that it does not break processing "mapping" 134 | 135 | 136 | def output_dimensions(input_dimensions): 137 | """Build a list of dimension names used to define a variable in the 138 | output file.""" 139 | _, _, zdim, tdim = get_dims_from_variable(input_dimensions) 140 | 141 | if tdim: 142 | result = [stationdim, tdim, profiledim] 143 | else: 144 | result = [stationdim, profiledim] 145 | 146 | if zdim: 147 | result.append(zdim) 148 | 149 | return result 150 | 151 | 152 | def get_dims_from_variable(var_dimensions): 153 | """ 154 | Gets dimensions from netcdf variable 155 | 156 | Parameters: 157 | ----------- 158 | var: netCDF variable 159 | 160 | Returns: 161 | -------- 162 | xdim, ydim, zdim, tdim: dimensions 163 | """ 164 | 165 | def find(candidates, collection): 166 | """Return one of the candidates if it was found in the collection or 167 | None otherwise. 168 | 169 | """ 170 | for name in candidates: 171 | if name in collection: 172 | return name 173 | return None 174 | 175 | # possible x-dimensions names 176 | xdims = ["x", "x1"] 177 | # possible y-dimensions names 178 | ydims = ["y", "y1"] 179 | # possible z-dimensions names 180 | zdims = ["z", "zb"] 181 | # possible time-dimensions names 182 | tdims = ["t", "time"] 183 | 184 | return [find(dim, var_dimensions) for dim in [xdims, ydims, zdims, tdims]] 185 | 186 | 187 | def copy_dimensions(in_file, out_file, exclude_list): 188 | """Copy dimensions from in_file to out_file, excluding ones in 189 | exclude_list.""" 190 | for name, dim in in_file.dimensions.items(): 191 | if name not in exclude_list and name not in out_file.dimensions: 192 | if dim.isunlimited(): 193 | out_file.createDimension(name, None) 194 | else: 195 | out_file.createDimension(name, len(dim)) 196 | 197 | 198 | def copy_time_dimension(in_file, out_file, name): 199 | """Copy time dimension, the corresponding coordinate variable, and the 200 | corresponding time bounds variable (if present) from an in_file to 201 | an out_file. 202 | 203 | """ 204 | var_in = in_file.variables[name] 205 | var_out = create_variable_like(in_file, name, out_file) 206 | var_out[:] = in_file.variables[name][:] 207 | 208 | try: 209 | bounds_name = var_in.bounds 210 | var_out = create_variable_like(bounds_name, in_file, out_file) 211 | var_out[:] = in_file.variables[bounds_name][:] 212 | except AttributeError: 213 | # we get here if var_in does not have a bounds attribute 214 | pass 215 | 216 | 217 | def copy_attributes(var_in, var_out): 218 | """Copy attributes from var_in to var_out. Give special treatment to 219 | _FillValue and coordinates. 220 | 221 | """ 222 | _, _, _, tdim = get_dims_from_variable(var_in.dimensions) 223 | for att in var_in.ncattrs(): 224 | if att == "_FillValue": 225 | continue 226 | elif att == "coordinates": 227 | if tdim: 228 | coords = "{0} lat lon".format(tdim) 229 | else: 230 | coords = "lat lon" 231 | setattr(var_out, "coordinates", coords) 232 | 233 | else: 234 | setattr(var_out, att, getattr(var_in, att)) 235 | 236 | 237 | def copy_global_attributes(in_file, out_file): 238 | "Copy global attributes from in_file to out_file." 239 | for attribute in in_file.ncattrs(): 240 | setattr(out_file, attribute, getattr(in_file, attribute)) 241 | 242 | 243 | def create_variable_like(in_file, var_name, out_file, dimensions=None, fill_value=-2e9): 244 | """Create a variable in an out_file that is the same var_name in 245 | in_file, except possibly depending on different dimensions, 246 | provided in dimensions. 247 | 248 | """ 249 | var_in = in_file.variables[var_name] 250 | try: 251 | fill_value = var_in._FillValue 252 | except AttributeError: 253 | # fill_value was set elsewhere 254 | pass 255 | 256 | if dimensions is None: 257 | dimensions = var_in.dimensions 258 | 259 | dtype = var_in.dtype 260 | 261 | var_out = out_file.createVariable(var_name, dtype, dimensions=dimensions, fill_value=fill_value) 262 | copy_attributes(var_in, var_out) 263 | return var_out 264 | 265 | 266 | if __name__ == "__main__": 267 | # Set up the option parser 268 | description = """A script to extract data along (possibly multiple) profile using 269 | piece-wise constant or bilinear interpolation. 270 | The profile must be given as a ESRI shape file.""" 271 | parser = ArgumentParser() 272 | parser.description = description 273 | 274 | parser.add_argument("INPUTFILE", nargs=1, help="input NetCDF file name") 275 | parser.add_argument("OUTPUTFILE", nargs=1, help="output NetCDF file name", default="out.nc") 276 | parser.add_argument("-n", "--n_levels", dest="n_levels", help="no. of levels", default=25) 277 | parser.add_argument( 278 | "-a", 279 | "--age_iso", 280 | dest="age_iso", 281 | help="list of increasing iso age levels", 282 | default="9000,11700,29000,57000,115000", 283 | ) 284 | parser.add_argument( 285 | "-v", "--variable", dest="variables", help="comma-separated list with variables", default="age" 286 | ) 287 | 288 | options = parser.parse_args() 289 | fill_value = -2e9 290 | variables = options.variables.split(",") 291 | n_levels = options.n_levels 292 | age_iso = np.fromstring(options.age_iso, dtype=float, sep=",") 293 | n_age_iso = len(age_iso) 294 | 295 | print("-----------------------------------------------------------------") 296 | print(("Running script %s ..." % __file__.split("/")[-1])) 297 | print("-----------------------------------------------------------------") 298 | print(("Opening NetCDF file %s ..." % options.INPUTFILE[0])) 299 | try: 300 | # open netCDF file in 'read' mode 301 | nc_in = NC(options.INPUTFILE[0], "r") 302 | except: 303 | print(("ERROR: file '%s' not found or not NetCDF format ... ending ..." % options.INPUTFILE[0])) 304 | import sys 305 | 306 | sys.exit() 307 | 308 | # get file format 309 | format = nc_in.file_format 310 | # get the dimensions 311 | xdim, ydim, zdim, tdim = ppt.get_dims(nc_in) 312 | # read projection information 313 | projection = ppt.get_projection_from_file(nc_in) 314 | # new sigma coordinate with n_levels 315 | depth_out = np.linspace(0, 1, n_levels + 1)[1:] 316 | nd = len(depth_out) 317 | 318 | nt = len(nc_in.dimensions[tdim]) 319 | nx = len(nc_in.dimensions[xdim]) 320 | ny = len(nc_in.dimensions[ydim]) 321 | 322 | # We should bail here if no z-dim is found 323 | if zdim is not None: 324 | z = nc_in.variables[zdim][:] 325 | 326 | # use same file format as input file 327 | print("Creating dimensions") 328 | nc_out = NC(options.OUTPUTFILE[0], "w", format=format) 329 | copy_global_attributes(nc_in, nc_out) 330 | 331 | # re-create dimensions from an input file in an output file, but 332 | # skip vertical dimension 333 | copy_dimensions(nc_in, nc_out, zdim) 334 | ddim = "depth" 335 | nc_out.createDimension(ddim, nd) 336 | isodim = "ni" 337 | nc_out.createDimension(isodim, n_age_iso) 338 | 339 | out_dims = (tdim, zdim, ydim, xdim) 340 | 341 | # copy mapplane dimension variables 342 | for var_name in (xdim, ydim): 343 | var_out = create_variable_like(nc_in, var_name, nc_out, dimensions=(var_name,)) 344 | var_out[:] = nc_in.variables[var_name][:] 345 | 346 | # create new sigma coordinate 347 | sigma_var = nc_out.createVariable(ddim, "d", dimensions=(ddim,)) 348 | sigma_var.long_name = "depth below surface" 349 | sigma_var.axis = "Z" 350 | sigma_var.positive = "down" 351 | sigma_var[:] = depth_out 352 | 353 | if tdim is not None: 354 | copy_time_dimension(nc_in, nc_out, tdim) 355 | 356 | standard_name = "land_ice_thickness" 357 | for name in list(nc_in.variables.keys()): 358 | v = nc_in.variables[name] 359 | if getattr(v, "standard_name", "") == standard_name: 360 | print(("variabe {0} found by its standard_name {1}".format(name, standard_name))) 361 | myvar = name 362 | pass 363 | 364 | thickness = permute(nc_in.variables[myvar], output_order=out_dims) 365 | thk_min = 500 # m (minimum ice thickness) 366 | 367 | iso_name = "depth_iso" 368 | iso_name_norm = "depth_iso_norm" 369 | 370 | print((" - reading variable %s" % (myvar))) 371 | 372 | print("Copying variables") 373 | 374 | for var_name in variables: 375 | 376 | print((" Reading variable %s" % var_name)) 377 | profiler = timeprofile() 378 | var_in = nc_in.variables[var_name] 379 | in_dims = var_in.dimensions 380 | datatype = var_in.dtype 381 | var_in_data = permute(var_in, output_order=out_dims) 382 | 383 | profiler.mark("interpolation") 384 | if tdim is not None: 385 | out_var = create_variable_like(nc_in, var_name, nc_out, dimensions=(tdim, ddim, ydim, xdim)) 386 | iso_var = nc_out.createVariable( 387 | iso_name, datatype="double", dimensions=(tdim, isodim, ydim, xdim), fill_value=fill_value 388 | ) 389 | iso_var_norm = nc_out.createVariable( 390 | iso_name_norm, datatype="double", dimensions=(tdim, isodim, ydim, xdim), fill_value=fill_value 391 | ) 392 | 393 | for t in range(nt): 394 | for m in range(ny): 395 | for n in range(nx): 396 | thk = thickness[t, m, n] 397 | v = var_in_data[t, :, m, n] 398 | if thk > thk_min: 399 | z_surf = z[z < thk][-1] 400 | depth_in = 1 - z[z < thk] / z_surf 401 | v_in = v[z < thk] 402 | f = interp1d(depth_in, v_in) 403 | v_out = f(depth_out) 404 | v_out[np.nonzero(v_out < 0)] = 0 405 | out_var[t, :, m, n] = v_out 406 | f = interp1d(v_in[2:], depth_in[2:], fill_value=fill_value) 407 | for k, age_level in enumerate(age_iso): 408 | try: 409 | d_out = f(age_level) 410 | except: 411 | d_out = fill_value 412 | iso_var_norm[t, k, m, n] = d_out 413 | # depth of isochrone 414 | depth_in = z_surf - z[z < thk] 415 | f = interp1d(v_in[2:], depth_in[2:], fill_value=fill_value) 416 | for k, age_level in enumerate(age_iso): 417 | try: 418 | d_out = f(age_level) 419 | except: 420 | d_out = fill_value 421 | iso_var[t, k, m, n] = d_out 422 | 423 | else: 424 | out_var = create_variable_like(nc_in, var_name, nc_out, dimensions=(ddim, ydim, xdim)) 425 | iso_var = nc_out.createVariable( 426 | iso_name, datatype="double", dimensions=(isodim, ydim, xdim), fill_value=fill_value 427 | ) 428 | iso_var_norm = nc_out.createVariable( 429 | iso_name_norm, datatype="double", dimensions=(isodim, ydim, xdim), fill_value=fill_value 430 | ) 431 | 432 | for m in range(ny): 433 | for n in range(nx): 434 | thk = thickness[m, n] 435 | v = var_in_data[:, m, n] 436 | if thk > thk_min: 437 | z_surf = z[z < thk][-1] 438 | depth_in = 1 - z[z < thk] / z_surf 439 | v_in = v[z < thk] 440 | f = interp1d(depth_in, v_in) 441 | v_out = f(depth_out) 442 | v_out[np.nonzero(v_out < 0)] = 0 443 | out_var[:, m, n] = v_out 444 | f = interp1d(v_in[2:], depth_in[2:], fill_value=fill_value) 445 | for k, age_level in enumerate(age_iso): 446 | try: 447 | d_out = f(age_level) 448 | except: 449 | d_out = fill_value 450 | iso_var_norm[t, k, m, n] = d_out 451 | # depth of isochrone 452 | depth_in = z_surf - z[z < thk] 453 | f = interp1d(v_in[2:], depth_in[2:], fill_value=fill_value) 454 | for k, age_level in enumerate(age_iso): 455 | try: 456 | d_out = f(age_level) 457 | except: 458 | d_out = fill_value 459 | iso_var[k, m, n] = d_out 460 | 461 | iso_var.grid_mapping = "mapping" 462 | iso_var_norm.grid_mapping = "mapping" 463 | 464 | p = profiler.elapsed("interpolation") 465 | print((" - interpolated in %3.4f s" % p)) 466 | 467 | for var in ("run_stats", "pism_config", "mapping"): 468 | if var in nc_in.variables: 469 | create_variable_like(nc_in, var, nc_out) 470 | 471 | # writing global attributes 472 | import time 473 | import sys 474 | 475 | script_command = " ".join([time.ctime(), ":", __file__.split("/")[-1], " ".join([str(l) for l in sys.argv[1:]])]) 476 | if hasattr(nc_in, "history"): 477 | history = nc_in.history 478 | nc_out.history = script_command + "\n " + history 479 | else: 480 | nc_out.history = script_command 481 | 482 | nc_in.close() 483 | nc_out.close() 484 | print(("Extracted 3D variable(s) {} to file {}".format(variables, options.OUTPUTFILE[0]))) 485 | -------------------------------------------------------------------------------- /pypismtools.py: -------------------------------------------------------------------------------- 1 | """ 2 | pypismtools: Tools to evaluate PISM parameter studies 3 | 4 | pypismtools is a module to facilitate evaluation of PISM parameter 5 | studies. It mainly comprises two classes, Observation and Experiment, 6 | which act as containers for observational data and PISM model 7 | simulations, along with helper functions. The experiment class 8 | determines information about an experiment from the netcdf file 9 | directly, especially from the "pism_overrides" flag. Such information 10 | can then be used for labeling, plotting, evaluation, etc. The indend 11 | is to provide a robust tool to evaluate data, and to avoid common mistakes 12 | such as mis-labeling plots. Additional functions include routines to 13 | permute (netcdf) dimension, convert units using udunits, to estimate 14 | trends, and to import GMT colormaps. 15 | """ 16 | 17 | __author__ = "Andy Aschwanden" 18 | 19 | import numpy as np 20 | import pylab as plt 21 | 22 | from netCDF4 import Dataset as CDF 23 | 24 | from pyproj import Proj 25 | from osgeo import gdal 26 | from osgeo import osr 27 | 28 | 29 | # FIXME: how to provide DEBUG flag to module 30 | DEBUG = None 31 | 32 | 33 | class GeoTIFF(object): 34 | 35 | """ 36 | A class to read a GeoTIFF 37 | 38 | Parameters 39 | ---------- 40 | 41 | filename: a valid geotiff file 42 | """ 43 | 44 | def __init__(self, file_name): 45 | 46 | self.file_name = file_name 47 | 48 | try: 49 | print(("\n opening GeoTIFF file %s" % file_name)) 50 | self.gtiff = gdal.Open(file_name) 51 | except: 52 | print(("could not open file %s" % file_name)) 53 | 54 | self.RasterArray = self.gtiff.ReadAsArray() 55 | self.gtiff_projection = self.gtiff.GetProjection() 56 | 57 | osr_gtiff = osr.SpatialReference() 58 | osr_gtiff.ImportFromWkt(self.gtiff_projection) 59 | self.proj4 = osr_gtiff.ExportToProj4() 60 | 61 | geoT = self.gtiff.GetGeoTransform() 62 | pxwidth = self.gtiff.RasterXSize 63 | pxheight = self.gtiff.RasterYSize 64 | ulx = geoT[0] 65 | uly = geoT[3] 66 | rezX = geoT[1] 67 | rezY = geoT[5] 68 | rx = ulx + pxwidth * rezX 69 | ly = uly + pxheight * rezY 70 | self.width = np.abs(pxwidth * rezX) 71 | self.height = np.abs(pxheight * rezY) 72 | self.center_x = ulx + pxwidth * rezX / 2 73 | self.center_y = uly + pxheight * rezY / 2 74 | self.easting = np.arange(ulx, rx + rezX, rezX) 75 | self.northing = np.arange(ly, uly - rezY, -rezY) 76 | self.X, self.Y = np.meshgrid(self.easting, self.northing) 77 | 78 | p_osr_gtiff = Proj(self.proj4) 79 | self.lon_0, self.lat_0 = p_osr_gtiff(self.center_x, self.center_y, inverse=True) 80 | self.lon, self.lat = p_osr_gtiff(self.X, self.Y, inverse=True) 81 | 82 | 83 | def get_dims(nc): 84 | """ 85 | Gets dimensions from netcdf instance 86 | 87 | Parameters: 88 | ----------- 89 | nc: netCDF instance 90 | 91 | Returns: 92 | -------- 93 | xdim, ydim, zdim, tdim: dimensions 94 | """ 95 | 96 | # a list of possible x-dimensions names 97 | xdims = ["x", "x1"] 98 | # a list of possible y-dimensions names 99 | ydims = ["y", "y1"] 100 | # a list of possible z-dimensions names 101 | zdims = ["z", "z1"] 102 | # a list of possible time-dimensions names 103 | tdims = ["t", "time"] 104 | 105 | xdim = None 106 | ydim = None 107 | zdim = None 108 | tdim = None 109 | 110 | # assign x dimension 111 | for dim in xdims: 112 | if dim in list(nc.dimensions.keys()): 113 | xdim = dim 114 | # assign y dimension 115 | for dim in ydims: 116 | if dim in list(nc.dimensions.keys()): 117 | ydim = dim 118 | # assign z dimension 119 | for dim in zdims: 120 | if dim in list(nc.dimensions.keys()): 121 | zdim = dim 122 | # assign time dimension 123 | for dim in tdims: 124 | if dim in list(nc.dimensions.keys()): 125 | tdim = dim 126 | return xdim, ydim, zdim, tdim 127 | 128 | 129 | def get_projection_from_file(nc): 130 | """ 131 | Gets a Proj projection instance from a pointer to a netCDF file 132 | 133 | Parameters 134 | ---------- 135 | nc : a netCDF object instance 136 | 137 | Returns 138 | ------- 139 | p : Proj4 projection instance 140 | """ 141 | 142 | from pyproj import Proj 143 | 144 | # First, check if we have a global attribute 'proj4' 145 | # which contains a Proj4 string: 146 | try: 147 | p = Proj(str(nc.proj4)) 148 | print("Found projection information in global attribute proj4, using it") 149 | except: 150 | try: 151 | p = Proj(str(nc.projection)) 152 | print("Found projection information in global attribute projection, using it") 153 | except: 154 | try: 155 | # go through variables and look for 'grid_mapping' attribute 156 | for var in list(nc.variables.keys()): 157 | if hasattr(nc.variables[var], "grid_mapping"): 158 | mappingvarname = nc.variables[var].grid_mapping 159 | print(('Found projection information in variable "%s", using it' % mappingvarname)) 160 | break 161 | var_mapping = nc.variables[mappingvarname] 162 | p = Proj( 163 | proj="stere", 164 | ellps=var_mapping.ellipsoid, 165 | datum=var_mapping.ellipsoid, 166 | units="m", 167 | lat_ts=var_mapping.standard_parallel, 168 | lat_0=var_mapping.latitude_of_projection_origin, 169 | lon_0=var_mapping.straight_vertical_longitude_from_pole, 170 | x_0=var_mapping.false_easting, 171 | y_0=var_mapping.false_northing, 172 | ) 173 | except: 174 | print("No mapping information found, return empy string.") 175 | p = "" 176 | 177 | return p 178 | 179 | 180 | def add_inner_title(ax, title, loc, size=None, **kwargs): 181 | """ 182 | Adds an inner title to a given axis, with location loc. 183 | 184 | from http://matplotlib.sourceforge.net/examples/axes_grid/demo_axes_grid2.html 185 | """ 186 | from matplotlib.offsetbox import AnchoredText 187 | from matplotlib.patheffects import withStroke 188 | 189 | if size is None: 190 | size = dict(size=plt.rcParams["legend.fontsize"]) 191 | at = AnchoredText(title, loc=loc, prop=size, pad=0.0, borderpad=0.5, frameon=False, **kwargs) 192 | ax.add_artist(at) 193 | return at 194 | 195 | 196 | def get_golden_mean(): 197 | """ 198 | Returns golden mean (sqrt(5) - 1.0) / 2.0 199 | """ 200 | return (np.sqrt(5) - 1.0) / 2.0 201 | 202 | 203 | def set_mode(mode, aspect_ratio=0.95): 204 | """ 205 | Set the print mode, i.e. document and font size. Options are: 206 | - onecol: width=80mm, font size=8pt. Appropriate for 1-column figures 207 | - twocol: width=160mm, font size=8pt. Default. 208 | Appropriate for 2-column figures 209 | - medium: width=121mm, font size=7pt. 210 | - small_font: width=121mm, font size=6pt. 211 | - height: height=2.5in. 212 | - small: width=80mm, font size=6pt 213 | - presentation: width=85mm, font size=10pt. For presentations. 214 | """ 215 | 216 | linestyle = "-" 217 | 218 | def set_onecol(): 219 | """ 220 | Define parameters for "publish" mode and return value for pad_inches 221 | """ 222 | 223 | fontsize = 6 224 | lw = 0.5 225 | markersize = 2 226 | fig_width = 3.15 # inch 227 | fig_height = aspect_ratio * fig_width # inch 228 | fig_size = [fig_width, fig_height] 229 | 230 | params = { 231 | "backend": "ps", 232 | "axes.linewidth": 0.5, 233 | "lines.linewidth": lw, 234 | "axes.labelsize": fontsize, 235 | "font.size": fontsize, 236 | "xtick.labelsize": fontsize, 237 | "ytick.labelsize": fontsize, 238 | "legend.fontsize": fontsize, 239 | "lines.linestyle": linestyle, 240 | "lines.markersize": markersize, 241 | "font.size": fontsize, 242 | "figure.figsize": fig_size, 243 | } 244 | 245 | plt.rcParams.update(params) 246 | 247 | return lw, 0.30 248 | 249 | def set_small(): 250 | """ 251 | Define parameters for "publish" mode and return value for pad_inches 252 | """ 253 | 254 | fontsize = 6 255 | lw = 0.5 256 | markersize = 2 257 | fig_width = 3.15 # inch 258 | fig_height = aspect_ratio * fig_width # inch 259 | fig_size = [fig_width, fig_height] 260 | 261 | params = { 262 | "backend": "ps", 263 | "axes.linewidth": 0.5, 264 | "lines.linewidth": lw, 265 | "axes.labelsize": fontsize, 266 | "font.size": fontsize, 267 | "xtick.labelsize": fontsize, 268 | "ytick.labelsize": fontsize, 269 | "legend.fontsize": fontsize, 270 | "lines.linestyle": linestyle, 271 | "lines.markersize": markersize, 272 | "lines.markeredgewidth": 0.2, 273 | "font.size": fontsize, 274 | "figure.figsize": fig_size, 275 | } 276 | 277 | plt.rcParams.update(params) 278 | 279 | return lw, 0.20 280 | 281 | def set_72mm(): 282 | """ 283 | Define parameters for "72mm" mode and return value for pad_inches 284 | """ 285 | 286 | fontsize = 6 287 | markersize = 3 288 | lw = 0.7 289 | fig_width = 2.8 # inch 290 | fig_height = aspect_ratio * fig_width # inch 291 | fig_size = [fig_width, fig_height] 292 | 293 | params = { 294 | "backend": "ps", 295 | "axes.linewidth": 0.35, 296 | "lines.linewidth": lw, 297 | "axes.labelsize": fontsize, 298 | "font.size": fontsize, 299 | "xtick.labelsize": fontsize, 300 | "ytick.labelsize": fontsize, 301 | "legend.fontsize": fontsize, 302 | "lines.linestyle": linestyle, 303 | "lines.markersize": markersize, 304 | "font.size": fontsize, 305 | "figure.figsize": fig_size, 306 | } 307 | 308 | plt.rcParams.update(params) 309 | 310 | return lw, 0.20 311 | 312 | def set_50mm(): 313 | """ 314 | Define parameters for "72mm" mode and return value for pad_inches 315 | """ 316 | 317 | fontsize = 5 318 | markersize = 2.5 319 | lw = 0.6 320 | fig_width = 2.0 # inch 321 | fig_height = aspect_ratio * fig_width # inch 322 | fig_size = [fig_width, fig_height] 323 | 324 | params = { 325 | "backend": "ps", 326 | "axes.linewidth": 0.3, 327 | "lines.linewidth": lw, 328 | "axes.labelsize": fontsize, 329 | "font.size": fontsize, 330 | "xtick.labelsize": fontsize, 331 | "ytick.labelsize": fontsize, 332 | "legend.fontsize": fontsize, 333 | "lines.linestyle": linestyle, 334 | "lines.markersize": markersize, 335 | "font.size": fontsize, 336 | "figure.figsize": fig_size, 337 | } 338 | 339 | plt.rcParams.update(params) 340 | 341 | return lw, 0.10 342 | 343 | def set_medium(): 344 | """ 345 | Define parameters for "medium" mode and return value for pad_inches 346 | """ 347 | 348 | fontsize = 8 349 | markersize = 3 350 | lw = 0.75 351 | fig_width = 3.15 # inch 352 | fig_height = aspect_ratio * fig_width # inch 353 | fig_size = [fig_width, fig_height] 354 | 355 | params = { 356 | "backend": "ps", 357 | "axes.linewidth": 0.5, 358 | "lines.linewidth": lw, 359 | "axes.labelsize": fontsize, 360 | "font.size": fontsize, 361 | "xtick.labelsize": fontsize, 362 | "ytick.labelsize": fontsize, 363 | "legend.fontsize": fontsize, 364 | "lines.linestyle": linestyle, 365 | "lines.markersize": markersize, 366 | "font.size": fontsize, 367 | "figure.figsize": fig_size, 368 | } 369 | 370 | plt.rcParams.update(params) 371 | 372 | return lw, 0.10 373 | 374 | def set_small_font(): 375 | """ 376 | Define parameters for "small_font" mode and return value for pad_inches 377 | """ 378 | 379 | fontsize = 6 380 | markersize = 2 381 | lw = 0.6 382 | fig_width = 3.15 # inch 383 | fig_height = aspect_ratio * fig_width # inch 384 | fig_size = [fig_width, fig_height] 385 | 386 | params = { 387 | "backend": "ps", 388 | "axes.linewidth": 0.5, 389 | "lines.linewidth": lw, 390 | "axes.labelsize": fontsize, 391 | "font.size": fontsize, 392 | "xtick.labelsize": fontsize, 393 | "ytick.labelsize": fontsize, 394 | "legend.fontsize": fontsize, 395 | "lines.linestyle": linestyle, 396 | "lines.markersize": markersize, 397 | "font.size": fontsize, 398 | "figure.figsize": fig_size, 399 | } 400 | 401 | plt.rcParams.update(params) 402 | 403 | return lw, 0.10 404 | 405 | def set_large_font(): 406 | """ 407 | Define parameters for "large_font" mode and return value for pad_inches 408 | """ 409 | 410 | fontsize = 10 411 | markersize = 9 412 | lw = 0.75 413 | fig_width = 6.2 # inch 414 | fig_height = aspect_ratio * fig_width # inch 415 | fig_size = [fig_width, fig_height] 416 | 417 | params = { 418 | "backend": "ps", 419 | "axes.linewidth": 0.5, 420 | "lines.linewidth": lw, 421 | "axes.labelsize": fontsize, 422 | "font.size": fontsize, 423 | "xtick.labelsize": fontsize, 424 | "ytick.labelsize": fontsize, 425 | "legend.fontsize": fontsize, 426 | "lines.linestyle": linestyle, 427 | "lines.markersize": markersize, 428 | "font.size": fontsize, 429 | "figure.figsize": fig_size, 430 | } 431 | 432 | plt.rcParams.update(params) 433 | 434 | return lw, 0.20 435 | 436 | def set_presentation(): 437 | """ 438 | Define parameters for "presentation" mode and return value 439 | for pad_inches 440 | """ 441 | 442 | fontsize = 8 443 | lw = 1.5 444 | markersize = 3 445 | fig_width = 6.64 # inch 446 | fig_height = aspect_ratio * fig_width # inch 447 | fig_size = [fig_width, fig_height] 448 | 449 | params = { 450 | "backend": "ps", 451 | "axes.linewidth": 0.75, 452 | "lines.linewidth": lw, 453 | "axes.labelsize": fontsize, 454 | "font.size": fontsize, 455 | "xtick.labelsize": fontsize, 456 | "ytick.labelsize": fontsize, 457 | "lines.linestyle": linestyle, 458 | "lines.markersize": markersize, 459 | "legend.fontsize": fontsize, 460 | "font.size": fontsize, 461 | "figure.figsize": fig_size, 462 | } 463 | 464 | plt.rcParams.update(params) 465 | 466 | return lw, 0.2 467 | 468 | def set_twocol(): 469 | """ 470 | Define parameters for "twocol" mode and return value for pad_inches 471 | """ 472 | 473 | fontsize = 7 474 | lw = 0.75 475 | markersize = 3 476 | fig_width = 6.3 # inch 477 | fig_height = aspect_ratio * fig_width # inch 478 | fig_size = [fig_width, fig_height] 479 | 480 | params = { 481 | "backend": "ps", 482 | "axes.linewidth": 0.5, 483 | "lines.linewidth": lw, 484 | "axes.labelsize": fontsize, 485 | "font.size": fontsize, 486 | "xtick.labelsize": fontsize, 487 | "ytick.labelsize": fontsize, 488 | "lines.linestyle": linestyle, 489 | "lines.markersize": markersize, 490 | "legend.fontsize": fontsize, 491 | "font.size": fontsize, 492 | "figure.figsize": fig_size, 493 | } 494 | 495 | plt.rcParams.update(params) 496 | 497 | return lw, 0.35 498 | 499 | def set_height(): 500 | """ 501 | Define parameters for "twocol" mode and return value for pad_inches 502 | """ 503 | fontsize = 8 504 | lw = 1.1 505 | markersize = 1.5 506 | fig_height = 2.5 # inch 507 | fig_width = fig_height / aspect_ratio # inch 508 | fig_size = [fig_width, fig_height] 509 | 510 | params = { 511 | "backend": "ps", 512 | "axes.linewidth": 0.65, 513 | "lines.linewidth": lw, 514 | "axes.labelsize": fontsize, 515 | "font.size": fontsize, 516 | "xtick.labelsize": fontsize, 517 | "ytick.labelsize": fontsize, 518 | "lines.linestyle": linestyle, 519 | "lines.markersize": markersize, 520 | "legend.fontsize": fontsize, 521 | "font.size": fontsize, 522 | "figure.figsize": fig_size, 523 | } 524 | 525 | plt.rcParams.update(params) 526 | 527 | return lw, 0.025 528 | 529 | if mode == "onecol": 530 | return set_onecol() 531 | elif mode == "small": 532 | return set_small() 533 | elif mode == "medium": 534 | return set_medium() 535 | elif mode == "72mm": 536 | return set_72mm() 537 | elif mode == "50mm": 538 | return set_50mm() 539 | elif mode == "small_font": 540 | return set_small_font() 541 | elif mode == "large_font": 542 | return set_large_font() 543 | elif mode == "presentation": 544 | return set_presentation() 545 | elif mode == "twocol": 546 | return set_twocol() 547 | elif mode == "height": 548 | return set_height() 549 | else: 550 | print(("%s mode not recognized, using onecol instead" % mode)) 551 | return set_twocol() 552 | 553 | 554 | def trend_estimator(x, y): 555 | """ 556 | Trend estimator 557 | 558 | Simultaneous estimation of bias, trend, annual, semi-annual and 559 | 161-day sinusoid (alias period S2 tide errors). 560 | 561 | Parameters 562 | ---------- 563 | x, y : array_like, x must have units "years" 564 | 565 | Returns 566 | ------- 567 | x : ndarray 568 | The solution (or the result of the last iteration for an unsuccessful 569 | call). 570 | cov_x : ndarray 571 | Uses the fjac and ipvt optional outputs to construct an 572 | estimate of the jacobian around the solution. ``None`` if a 573 | singular matrix encountered (indicates very flat curvature in 574 | some direction). This matrix must be multiplied by the 575 | residual standard deviation to get the covariance of the 576 | parameter estimates -- see curve_fit. 577 | infodict : dict 578 | a dictionary of optional outputs with the key s:: 579 | 580 | - 'nfev' : the number of function calls 581 | - 'fvec' : the function evaluated at the output 582 | - 'fjac' : A permutation of the R matrix of a QR 583 | factorization of the final approximate 584 | Jacobian matrix, stored column wise. 585 | Together with ipvt, the covariance of the 586 | estimate can be approximated. 587 | - 'ipvt' : an integer array of length N which defines 588 | a permutation matrix, p, such that 589 | fjac*p = q*r, where r is upper triangular 590 | with diagonal elements of nonincreasing 591 | magnitude. Column j of p is column ipvt(j) 592 | of the identity matrix. 593 | - 'qtf' : the vector (transpose(q) * fvec). 594 | 595 | mesg : str 596 | A string message giving information about the cause of failure. 597 | ier : int 598 | An integer flag. If it is equal to 1, 2, 3 or 4, the solution was 599 | found. Otherwise, the solution was not found. In either case, the 600 | optional output variable 'mesg' gives more information. 601 | 602 | Notes 603 | ----- 604 | Code snipplet provided by Anthony Arendt, March 13, 2011. 605 | Uses scipy.optimize.leastsq, see documentation of 606 | scipy.optimize.leastsq for details. 607 | """ 608 | 609 | try: 610 | from scipy import optimize 611 | except: 612 | print("scipy.optimize not found. Please install.") 613 | exit(1) 614 | 615 | def fitfunc(p, x): 616 | return ( 617 | p[0] 618 | + p[1] * x 619 | + p[2] * np.cos(2.0 * np.pi * (x - p[3]) / 1.0) 620 | + p[4] * np.cos(2.0 * np.pi * (x - p[5]) / 0.5) 621 | + p[6] * np.cos(2.0 * np.pi * (x - p[7]) / 0.440794) 622 | ) 623 | 624 | def errfunc(p, x, y): 625 | return fitfunc(p, x) - y 626 | 627 | p0 = [0.0, -80.0, 40.0, 0.0, 10.0, 0.0, 1.0, 0.0] 628 | 629 | return optimize.leastsq(errfunc, p0[:], args=(x, y), full_output=1) 630 | 631 | 632 | def colorList(): 633 | """ 634 | Returns a list with colors, e.g for line plots. etc. 635 | """ 636 | colors = [ 637 | "#084594", # dark blue 638 | "#FF7F00", # orange 639 | "#984EA3", # violet 640 | "#E41A1C", # red 641 | "#4DAF4A", # green 642 | "#377EB8", # light blue 643 | "#FB9A99", # light red 644 | "#FB9A99", # light orange 645 | "#CAB2D6", # light violet 646 | "brown", 647 | "pink", 648 | ] 649 | return colors 650 | 651 | 652 | def gmtColormap(fileName, log_color=False, reverse=False): 653 | """ 654 | Import a CPT colormap from GMT. 655 | 656 | Parameters 657 | ---------- 658 | fileName : a cpt file. 659 | 660 | Example 661 | ------- 662 | >>> cdict = gmtColormap("mycolormap.cpt") 663 | >>> gmt_colormap = colors.LinearSegmentedColormap("my_colormap", cdict) 664 | 665 | Notes 666 | ----- 667 | This code snipplet modified after 668 | http://www.mail-archive.com/matplotlib-users@lists.sourceforge.net/msg09547.html 669 | """ 670 | import colorsys 671 | import os 672 | 673 | try: 674 | try: 675 | f = open(fileName) 676 | except: 677 | # Check if it's a colormap provided in colormaps/ 678 | basedir, fname = os.path.split(__file__) 679 | my_file = os.path.join(basedir, "colormaps", fileName) 680 | f = open(my_file) 681 | except: 682 | print("file ", fileName, "not found") 683 | return None 684 | 685 | lines = f.readlines() 686 | f.close() 687 | 688 | x = [] 689 | r = [] 690 | g = [] 691 | b = [] 692 | colorModel = "RGB" 693 | for l in lines: 694 | ls = l.split() 695 | if l[0] == "#": 696 | if ls[-1] == "HSV": 697 | colorModel = "HSV" 698 | continue 699 | else: 700 | continue 701 | if ls[0] == "B" or ls[0] == "F" or ls[0] == "N": 702 | pass 703 | else: 704 | x.append(float(ls[0])) 705 | r.append(float(ls[1])) 706 | g.append(float(ls[2])) 707 | b.append(float(ls[3])) 708 | xtemp = float(ls[4]) 709 | rtemp = float(ls[5]) 710 | gtemp = float(ls[6]) 711 | btemp = float(ls[7]) 712 | 713 | x.append(xtemp) 714 | r.append(rtemp) 715 | g.append(gtemp) 716 | b.append(btemp) 717 | 718 | if reverse: 719 | r.reverse() 720 | g.reverse() 721 | b.reverse() 722 | 723 | x = np.array(x, np.float32) 724 | r = np.array(r, np.float32) 725 | g = np.array(g, np.float32) 726 | b = np.array(b, np.float32) 727 | if colorModel == "HSV": 728 | for i in range(r.shape[0]): 729 | rr, gg, bb = colorsys.hsv_to_rgb(r[i] / 360.0, g[i], b[i]) 730 | r[i] = rr 731 | g[i] = gg 732 | b[i] = bb 733 | if colorModel == "HSV": 734 | for i in range(r.shape[0]): 735 | rr, gg, bb = colorsys.hsv_to_rgb(r[i] / 360.0, g[i], b[i]) 736 | r[i] = rr 737 | g[i] = gg 738 | b[i] = bb 739 | if colorModel == "RGB": 740 | r = r / 255.0 741 | g = g / 255.0 742 | b = b / 255.0 743 | 744 | if log_color: 745 | xNorm = np.zeros((len(x),)) 746 | xNorm[1::] = np.logspace(-1, 0, len(x) - 1) 747 | xNorm[1::-2] /= 4 748 | else: 749 | xNorm = (x - x[0]) / (x[-1] - x[0]) 750 | 751 | red = [] 752 | blue = [] 753 | green = [] 754 | for i in range(len(x)): 755 | red.append([xNorm[i], r[i], r[i]]) 756 | green.append([xNorm[i], g[i], g[i]]) 757 | blue.append([xNorm[i], b[i], b[i]]) 758 | colorDict = {"red": red, "green": green, "blue": blue} 759 | return colorDict 760 | 761 | 762 | def smooth(x, window_len=11, window="hanning"): 763 | """ 764 | Smooth the data using a window with requested size (running mean, 765 | moving average, low pass filtering). 766 | 767 | This method is based on the convolution of a scaled window with the signal. 768 | The signal is prepared by introducing reflected copies of the signal 769 | (with the window size) in both ends so that transient parts are minimized 770 | in the begining and end part of the output signal. 771 | 772 | Parameters 773 | ---------- 774 | x : array_like, the input signal 775 | window_len : the dimension of the smoothing window; should be an odd integer 776 | window : the type of window from "flat", "hanning", "hamming", 777 | "bartlett", "blackman" flat window will produce a moving average smoothing. 778 | 779 | Returns 780 | ------- 781 | y : the smoothed signal 782 | 783 | Example 784 | ------- 785 | t = np.linspace(-2,2,0.1) 786 | x = np.sin(t) + np.randn(len(t))*0.1 787 | y = smooth(x) 788 | 789 | See also 790 | -------- 791 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, 792 | numpy.convolve 793 | scipy.signal.lfilter 794 | 795 | Notes 796 | ----- 797 | Downloaded from http://www.scipy.org/Cookbook/SignalSmooth. 798 | 799 | TODO 800 | ---- 801 | the window parameter could be the window itself if an array instead 802 | of a string 803 | """ 804 | 805 | if x.ndim != 1: 806 | raise ValueError("smooth only accepts 1 dimension arrays.") 807 | 808 | if x.size < window_len: 809 | raise ValueError("Input vector needs to be bigger than window size.") 810 | 811 | if window_len < 3: 812 | return x 813 | 814 | if not window in ["flat", "hanning", "hamming", "bartlett", "blackman"]: 815 | raise ValueError("Window is one of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'") 816 | 817 | s = np.r_[2 * x[0] - x[window_len:1:-1], x, 2 * x[-1] - x[-1:-window_len:-1]] 818 | 819 | if window == "flat": # moving average 820 | w = np.ones(window_len, "d") 821 | else: 822 | w = eval("np." + window + "(window_len)") 823 | 824 | y = np.convolve(w / w.sum(), s, mode="same") 825 | return y[window_len - 1 : -window_len + 1] 826 | 827 | 828 | def fftsmooth(x, window_len=11, window="hanning"): 829 | """ 830 | Smooth the data using a window with requested size (running mean, 831 | moving average, low pass filtering). 832 | 833 | This method is based on the convolution of a scaled window with the signal. 834 | The signal is prepared by introducing reflected copies of the signal 835 | (with the window size) in both ends so that transient parts are minimized 836 | in the begining and end part of the output signal. 837 | 838 | Parameters 839 | ---------- 840 | x : array_like, the input signal 841 | window_len : the dimension of the smoothing window; should be an odd integer 842 | window : the type of window from "flat", "hanning", "hamming", 843 | "bartlett", "blackman" flat window will produce a moving average smoothing. 844 | 845 | Returns 846 | ------- 847 | y : the smoothed signal 848 | 849 | Example 850 | ------- 851 | t = np.linspace(-2,2,0.1) 852 | x = np.sin(t) + np.randn(len(t))*0.1 853 | y = smooth(x) 854 | 855 | See also 856 | -------- 857 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, 858 | numpy.convolve 859 | scipy.signal.lfilter 860 | 861 | Notes 862 | ----- 863 | Downloaded from http://www.scipy.org/Cookbook/SignalSmooth, but replaced 864 | np.convovle with faster scipy.signal.fftconvolve 865 | 866 | TODO 867 | ---- 868 | the window parameter could be the window itself if an array instead 869 | of a string 870 | """ 871 | 872 | from scipy.signal import fftconvolve 873 | 874 | if x.ndim != 1: 875 | raise ValueError("smooth only accepts 1 dimension arrays.") 876 | 877 | if x.size < window_len: 878 | raise ValueError("Input vector needs to be bigger than window size.") 879 | 880 | if window_len < 3: 881 | return x 882 | 883 | if not window in ["flat", "hanning", "hamming", "bartlett", "blackman"]: 884 | raise ValueError("Window is one of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'") 885 | 886 | s = np.r_[2 * x[0] - x[window_len:1:-1], x, 2 * x[-1] - x[-1:-window_len:-1]] 887 | 888 | if window == "flat": # moving average 889 | w = np.ones(window_len, "d") 890 | else: 891 | w = eval("np." + window + "(window_len)") 892 | 893 | y = fftconvolve(w / w.sum(), s, mode="same") 894 | return y[window_len - 1 : -window_len + 1] 895 | 896 | 897 | def get_rmse(a, b, N, w=None): 898 | """ 899 | Returns the (weighted) root mean square error of differences between a and b. 900 | 901 | Parameters 902 | ---------- 903 | a, b : array_like 904 | N : number of valid values 905 | w : weights 906 | 907 | Returns 908 | ------- 909 | rmse : scalar 910 | """ 911 | 912 | if w is None: 913 | w = np.ones_like(a) 914 | c = (a.ravel() - b.ravel()) / w.ravel() 915 | if isinstance(c, np.ma.MaskedArray): 916 | return np.sqrt(np.linalg.norm(np.ma.compressed(c), 2) ** 2.0 / N) 917 | else: 918 | return np.sqrt(np.linalg.norm(c, 2) ** 2.0 / N) 919 | 920 | 921 | def get_avg(a, b, N, relative=False): 922 | """ 923 | Returns the average difference between a and b. 924 | 925 | Parameters 926 | ---------- 927 | a,b : array_like 928 | N : number of values 929 | 930 | Returns 931 | ------- 932 | avg : scalar 933 | 934 | Notes 935 | ----- 936 | The average is the sum of elements of the difference (a - b) 937 | divided by the number of elements N. 938 | """ 939 | if relative is False: 940 | c = a.ravel() - b.ravel() 941 | else: 942 | c = a.ravel() - b.ravel() / b.ravel() 943 | if isinstance(c, np.ma.MaskedArray): 944 | return np.linalg.norm(np.ma.compressed(c), 1) / N 945 | else: 946 | return np.linalg.norm(c, 1) / N 947 | 948 | 949 | def unit_converter(data, inunit, outunit): 950 | """ 951 | Unit converter. Takes an (numpy) array, valid udunits inunits and outunits 952 | as strings, and returns the array in outunits. 953 | 954 | Parameters 955 | ---------- 956 | data : array_like 957 | inunit : string 958 | unit to convert from, must be UDUNITS-compatible string 959 | outunit : string 960 | unit to conver to, must be UDUNITS-compatible string 961 | 962 | Returns 963 | ------- 964 | out : array_like 965 | 966 | Example 967 | ------- 968 | >>> import numpy as np 969 | >>> c = Converter("kg","Gt") 970 | >>> out = c(np.array([1,2])*1e12) 971 | >>> out = array([ 1., 2.]) 972 | """ 973 | 974 | inunit = str(inunit) 975 | outunit = str(outunit) 976 | if isinstance(data, np.ma.MaskedArray): 977 | mask = data.mask 978 | else: 979 | mask = None 980 | data = np.array(data) 981 | if not (inunit == outunit): 982 | try: 983 | try: 984 | from cf_units import Unit 985 | 986 | in_unit = Unit(inunit) 987 | out_unit = Unit(outunit) 988 | outdata = in_unit.convert(data, out_unit) 989 | except: 990 | from udunits2 import Converter, System, Unit 991 | 992 | sys = System() 993 | c = Converter((Unit(sys, inunit), Unit(sys, outunit))) 994 | outdata = c(data) 995 | except: 996 | print("Neither cf_units or udunits2 module found, you're on your own.") 997 | c = 1.0 / 1e3 998 | outdata = c * data 999 | else: 1000 | outdata = data 1001 | 1002 | if mask is not None: 1003 | return np.ma.array(outdata, mask=mask) 1004 | else: 1005 | return outdata 1006 | 1007 | 1008 | def permute(variable, output_order=("time", "z", "zb", "y", "x")): 1009 | """ 1010 | Permute dimensions of a NetCDF variable to match the output 1011 | storage order. 1012 | 1013 | Parameters 1014 | ---------- 1015 | variable : a netcdf variable 1016 | e.g. thk = nc.variables['thk'] 1017 | output_order: dimension tuple (optional) 1018 | default ordering is ('time', 'z', 'zb', 'y', 'x') 1019 | 1020 | Returns 1021 | ------- 1022 | var_perm : array_like 1023 | """ 1024 | 1025 | input_dimensions = variable.dimensions 1026 | 1027 | # filter out irrelevant dimensions 1028 | dimensions = [x for x in output_order if x in input_dimensions] 1029 | 1030 | # create the mapping 1031 | mapping = [dimensions.index(x) for x in input_dimensions] 1032 | 1033 | if mapping: 1034 | return np.transpose(variable[:], mapping) 1035 | else: 1036 | return variable[:] # so that it does not break processing "mapping" 1037 | -------------------------------------------------------------------------------- /scripts/basemap_plot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (C) 2011-2013 Andy Aschwanden 3 | # 4 | # Script creates a basemap plot of a variable in a netCDF file 5 | # with a geotiff background (if given). 6 | # Does a 1x2, 1x3, 2x2, 3x2 grid plots 7 | 8 | from mpl_toolkits.basemap import Basemap, cm 9 | from mpl_toolkits.axes_grid1 import ImageGrid 10 | import numpy as np 11 | import pylab as plt 12 | from matplotlib import colors 13 | from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter 14 | 15 | from pyproj import Proj 16 | 17 | try: 18 | from netCDF4 import Dataset as NC 19 | except: 20 | from netCDF3 import Dataset as NC 21 | 22 | try: 23 | import pypismtools.pypismtools as ppt 24 | except: 25 | import pypismtools as ppt 26 | 27 | 28 | class Variable(object): 29 | 30 | """ 31 | A class containing variable-specific stuff such as colorbars, tickmarks, etc 32 | """ 33 | 34 | def __init__(self, var_name, kwargs): 35 | 36 | self.var_name = varname 37 | 38 | kwargsdict = {} 39 | expected_args = ["ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label"] 40 | for key in list(kwargs.keys()): 41 | if key in expected_args: 42 | kwargsdict[key] = kwargs[key] 43 | 44 | if "ticks" in kwargsdict: 45 | self.ticks = kwargsdict["ticks"] 46 | 47 | if "cmap" in kwargsdict: 48 | self.cmap = kwargsdict["cmap"] 49 | 50 | if "norm" in kwargsdict: 51 | self.norm = kwargsdict["norm"] 52 | 53 | if "vmin" in kwargsdict: 54 | self.vmin = kwargsdict["vmin"] 55 | 56 | if "vmax" in kwargsdict: 57 | self.vmax = kwargsdict["vmax"] 58 | 59 | if "extend" in kwargsdict: 60 | self.extend = kwargsdict["extend"] 61 | 62 | if "format" in kwargsdict: 63 | self.format = kwargsdict["format"] 64 | 65 | if "colorbar_label" in kwargsdict: 66 | self.colorbar_label = kwargsdict["colorbar_label"] 67 | 68 | 69 | # Set up the option parser 70 | parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) 71 | parser.description = "A script to plot a variable in a netCDF file over a GeoTiff. Uses GDAL python bindings, Proj4, and Basemap. Script is fine-tuned for whole Greenland plots, but can be adapted for other needs." 72 | parser.add_argument("FILE", nargs="*") 73 | parser.add_argument("--alpha", dest="alpha", help="transparency of overlay", default=1.0) 74 | parser.add_argument( 75 | "--background", dest="background", help="Draw a background (bluemarble, etopo, shadedrelief", default=None 76 | ) 77 | parser.add_argument( 78 | "--bounds", dest="bounds", nargs=2, type=float, help="lower and upper bound for colorbar, eg. -1 1", default=None 79 | ) 80 | parser.add_argument( 81 | "--boundary_tol", 82 | dest="boundary_tol", 83 | nargs=1, 84 | type=float, 85 | help="""if set, color areas brown where obs <= boundary_tol but data >= boundary_tol, 86 | works for difference plots only.""", 87 | default=None, 88 | ) 89 | parser.add_argument( 90 | "--colorbar_position", 91 | dest="colorbar_position", 92 | choices=["bottom", "right", "upper", "left"], 93 | help="position of the colorbar for n x m plots", 94 | default="bottom", 95 | ) 96 | parser.add_argument( 97 | "--obs_file", 98 | dest="obs_file", 99 | help=""" 100 | file with observations for difference plot, 101 | experiment - observation. Must be on same grid as experiments. Default is None""", 102 | default=None, 103 | ) 104 | parser.add_argument( 105 | "--colormap", 106 | dest="colormap", 107 | help="""path to a cpt colormap, or a pylab colormap, 108 | e.g. Blues""", 109 | default=None, 110 | ) 111 | parser.add_argument("--coastlines", dest="coastlines", action="store_true", help="adds a coastlines", default=False) 112 | parser.add_argument( 113 | "-c", "--colorbar", dest="colorbar", action="store_true", help="saves a colorbar seperately", default=False 114 | ) 115 | parser.add_argument( 116 | "--colorbar_label", dest="colorbar_label", action="store_true", help="saves a colorbar seperately", default=False 117 | ) 118 | parser.add_argument( 119 | "--drawmapscale", 120 | dest="drawmapscale", 121 | action="store_true", 122 | help="draws a map scale in the lower left corner", 123 | default=False, 124 | ) 125 | parser.add_argument( 126 | "--inner_titles", 127 | dest="inner_titles", 128 | help="add an inner title, give a list like --inner_title 'a),b),c)'", 129 | default=None, 130 | ) 131 | parser.add_argument( 132 | "--singlerow", dest="singlerow", action="store_true", help="all plots on a single row", default=False 133 | ) 134 | parser.add_argument( 135 | "--singlecolumn", dest="singlecolumn", action="store_true", help="all plots on a single column", default=False 136 | ) 137 | parser.add_argument( 138 | "--map_resolution", 139 | dest="map_res", 140 | choices=["l", "i", "h", "f"], 141 | help="Resolution of boundary database (see Basemap), default = 'l' (low)", 142 | default="l", 143 | ) 144 | parser.add_argument( 145 | "-o", 146 | "--output_filename", 147 | dest="out_file", 148 | help="Name of the output file. Suffix defines output format", 149 | default="foo.png", 150 | ) 151 | parser.add_argument("--geotiff_file", dest="geotiff_filename", help="GeoTIFF filename", default=None) 152 | parser.add_argument("--shape_file", dest="shape_filename", nargs="+", help="Shapefile filename", default=None) 153 | parser.add_argument("--out_unit", dest="outunit", help="Output unit, default is unit in file", default=None) 154 | parser.add_argument( 155 | "-p", 156 | "--print_size", 157 | dest="print_mode", 158 | choices=["onecol", "medium", "twocol", "height", "presentation", "small_font"], 159 | help="sets figure size and font size, available options are: \ 160 | 'onecol','medium','twocol','presentation'", 161 | default="twocol", 162 | ) 163 | parser.add_argument( 164 | "-r", 165 | "--output_resolution", 166 | dest="out_res", 167 | help=""" 168 | Graphics resolution in dots per inch (DPI), default 169 | = 300""", 170 | default=300, 171 | ) 172 | parser.add_argument("--relative", dest="relative", action="store_true", help="do relative differences.", default=False) 173 | parser.add_argument( 174 | "--no_rasterize", dest="rasterized", action="store_false", help="Don't rasterize plot. Slow.", default=True 175 | ) 176 | parser.add_argument("--tol", dest="tol", type=float, help="tolerance", default=None) 177 | parser.add_argument("--level", dest="level", type=int, help="level, for 3D data only. Default = 0", default=0) 178 | parser.add_argument( 179 | "-s", 180 | "--shaded", 181 | dest="shaded", 182 | action="store_true", 183 | help="""Shaded topography. CAREFUL, this options is experimental. 184 | It uses imshow, which does not support masked arrays, 185 | and we also get the projection slighly wrong.""", 186 | default=False, 187 | ) 188 | parser.add_argument( 189 | "-v", "--variable", dest="varname", help="""Variable to plot, default = 'csurf'.""", default="csurf" 190 | ) 191 | 192 | options = parser.parse_args() 193 | args = options.FILE 194 | 195 | nt = len(args) 196 | required_no_args = 0 197 | max_no_args = 24 198 | if nt < required_no_args: 199 | print(("received $i arguments, at least %i expected" % (nt, required_no_args))) 200 | import sys.exit 201 | 202 | sys.exit 203 | elif nt > max_no_args: 204 | print(("received $i arguments, no more thant %i accepted" % (nt, max_no_args))) 205 | import sys.exit 206 | 207 | sys.exit 208 | else: 209 | pass 210 | 211 | alpha = float(options.alpha) 212 | background = options.background 213 | bounds = options.bounds 214 | boundary_tol = options.boundary_tol 215 | colormap = options.colormap 216 | coastlines = options.coastlines 217 | colorbar = options.colorbar 218 | colorbar_label = options.colorbar_label 219 | colorbar_position = options.colorbar_position 220 | drawmapscale = options.drawmapscale 221 | if options.inner_titles != None: 222 | inner_titles = options.inner_titles.split(",") 223 | else: 224 | inner_titles = None 225 | level = options.level 226 | map_res = options.map_res 227 | geotiff_filename = options.geotiff_filename 228 | print_mode = options.print_mode 229 | obs_file = options.obs_file 230 | outunit = options.outunit 231 | out_res = int(options.out_res) 232 | out_file = options.out_file 233 | shaded = options.shaded 234 | singlerow = options.singlerow 235 | singlecolumn = options.singlecolumn 236 | relative = options.relative 237 | rasterized = options.rasterized 238 | tol = options.tol 239 | varname = options.varname 240 | shape_filename = options.shape_filename 241 | 242 | cmap = None 243 | if colormap is not None: 244 | try: 245 | cdict = plt.cm.datad[colormap] 246 | except: 247 | # import and convert colormap 248 | cdict = ppt.gmtColormap(colormap) 249 | cmap = colors.LinearSegmentedColormap("my_colormap", cdict) 250 | 251 | # check output format 252 | suffix = out_file.split(".")[-1] 253 | if suffix not in ("png", "pdf", "ps", "eps", "svg"): 254 | print(("Requested output format %s not supported, try png, pdf, svg, ps, eps" % suffix)) 255 | import sys.exit 256 | 257 | sys.exit 258 | 259 | # set constants and other stuff 260 | geotiff_rasterized = True 261 | 262 | vars_speed = ( 263 | "csurf", 264 | "cbase", 265 | "cbar", 266 | "magnitude", 267 | "balvelmag", 268 | "surfvelmag", 269 | "velbase_mag", 270 | "velsurf_mag", 271 | "velshear_mag", 272 | ) 273 | vars_dem = ("thk", "usurf", "usrf", "surface_altitude", "surface", "land_ice_thickness") 274 | vars_topo = ("topg", "bedrock_altitude", "bed") 275 | vars_dh = ("dhdt", "climatic_mass_balance_cumulative") 276 | vars_cmb = ("climatic_mass_balance", "climatic_mass_balance_original") 277 | vars_temp = ("ice_surface_temp", "temppabase", "temppa", "temp_pa") 278 | vars_melt = "bmelt" 279 | vars_heat = "bheatflx" 280 | vars_div = ("divQ", "divHU", "divUH", "divHU_umt", "divHU_cresis", "divHU_searise", "res_flux") 281 | vars_tempice = "tempicethk_basal" 282 | vars_stress = ("tauc", "tauc_mag", "taub_mag") 283 | vars_hydro = "tillwat" 284 | vars_hydro_log = "bwat" 285 | vars_ratio_1 = "sliding_r" 286 | vars_rel = "tau_rel" 287 | vars_rel_log = "tau_r" 288 | 289 | if varname in vars_speed: 290 | 291 | if cmap is None: 292 | try: 293 | basedir = ppt.__file__.split(ppt.__package__) 294 | cdict = ppt.gmtColormap( 295 | basedir[0] + ppt.__package__ + "/colormaps/Full_saturation_spectrum_CCW_orange.cpt" 296 | ) 297 | cmap = colors.LinearSegmentedColormap("my_colormap", cdict) 298 | except: 299 | cmap = plt.cm.Blues 300 | 301 | vmin = 1.0 302 | vmax = 3e3 303 | norm = colors.LogNorm(vmin=vmin, vmax=vmax) 304 | 305 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 306 | attr_vals = ( 307 | [1, 3, 10, 30, 100, 300, 1000, 3000], 308 | cmap, 309 | norm, 310 | vmin, 311 | vmax, 312 | "both", 313 | "%d", 314 | "m yr$^{\mathregular{-1}}$", 315 | ) 316 | var_dict = dict(list(zip(attr_keys, attr_vals))) 317 | variable = Variable(varname, var_dict) 318 | 319 | elif varname in vars_melt: 320 | 321 | if cmap is None: 322 | cmap = plt.cm.OrRd 323 | 324 | vmin = 0.001 325 | vmax = 1 326 | norm = colors.LogNorm(vmin=vmin, vmax=vmax) 327 | 328 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 329 | attr_vals = ([0.001, 0.01, 0.1, 1], cmap, norm, vmin, vmax, "max", None, "m yr$^{\mathregular{-1}}$") 330 | var_dict = dict(list(zip(attr_keys, attr_vals))) 331 | variable = Variable(varname, var_dict) 332 | 333 | elif varname in vars_heat: 334 | 335 | if cmap is None: 336 | cmap = plt.cm.jet 337 | 338 | vmin = 10 339 | vmax = 150 340 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 341 | 342 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 343 | attr_vals = (None, cmap, norm, vmin, vmax, "both", None, "W m$^{\mathregular{-2}}$") 344 | var_dict = dict(list(zip(attr_keys, attr_vals))) 345 | variable = Variable(varname, var_dict) 346 | 347 | elif varname in vars_stress: 348 | 349 | if cmap is None: 350 | cmap = plt.cm.jet 351 | 352 | vmin = 1e4 353 | vmax = 1.25e6 354 | norm = colors.LogNorm(vmin=vmin, vmax=vmax) 355 | 356 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 357 | attr_vals = ([0, 1e3, 1e4, 1e5, 1e6, 1e7], cmap, norm, vmin, vmax, "both", "%1.0e", "Pa") 358 | var_dict = dict(list(zip(attr_keys, attr_vals))) 359 | variable = Variable(varname, var_dict) 360 | 361 | elif varname in vars_tempice: 362 | 363 | if cmap is None: 364 | cmap = plt.cm.OrRd 365 | 366 | vmin = 0.1 367 | vmax = 100 368 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 369 | 370 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 371 | attr_vals = ([25, 50, 75, 100], cmap, norm, vmin, vmax, "max", "%i", "m") 372 | var_dict = dict(list(zip(attr_keys, attr_vals))) 373 | variable = Variable(varname, var_dict) 374 | 375 | elif varname in vars_dem: 376 | 377 | if cmap is None: 378 | cmap = plt.cm.Blues 379 | 380 | vmin = 0.1 381 | vmax = None 382 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 383 | 384 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 385 | attr_vals = (None, cmap, norm, vmin, vmax, "max", "%d", "m") 386 | var_dict = dict(list(zip(attr_keys, attr_vals))) 387 | variable = Variable(varname, var_dict) 388 | 389 | elif varname in vars_topo: 390 | 391 | if cmap is None: 392 | cmap = plt.cm.Blues 393 | 394 | ## vmin = -5000 395 | ## vmax = 1400 396 | vmin = -1000 397 | vmax = 2100 398 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 399 | 400 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 401 | attr_vals = (None, cmap, norm, vmin, vmax, "both", "%d", "m a.s.l.") 402 | var_dict = dict(list(zip(attr_keys, attr_vals))) 403 | variable = Variable(varname, var_dict) 404 | 405 | elif varname in vars_dh: 406 | 407 | if cmap is None: 408 | cmap = plt.cm.RdBu 409 | 410 | vmin = None 411 | vmax = None 412 | norm = None 413 | 414 | attr_keys = ("ticks", "vmin", "vmax", "norm", "cmap", "extend", "format", "colorbar_label") 415 | attr_vals = (None, vmin, vmax, norm, cmap, "both", None, "m") 416 | var_dict = dict(list(zip(attr_keys, attr_vals))) 417 | variable = Variable(varname, var_dict) 418 | 419 | elif varname in vars_cmb: 420 | 421 | if cmap is None: 422 | cmap = plt.cm.RdBu 423 | 424 | vmin = None 425 | vmax = None 426 | norm = None 427 | 428 | attr_keys = ("ticks", "vmin", "vmax", "norm", "cmap", "extend", "format", "colorbar_label") 429 | attr_vals = (None, vmin, vmax, norm, cmap, "both", None, "kg m$^{\mathregular{2}}$ yr$^{\mathregular{-1}}$") 430 | var_dict = dict(list(zip(attr_keys, attr_vals))) 431 | variable = Variable(varname, var_dict) 432 | 433 | elif varname in vars_temp: 434 | 435 | if cmap is None: 436 | cmap = plt.cm.gist_rainbow_r 437 | 438 | vmin = None 439 | vmax = None 440 | norm = None 441 | 442 | attr_keys = ("ticks", "vmin", "vmax", "norm", "cmap", "extend", "format", "colorbar_label") 443 | attr_vals = (None, vmin, vmax, norm, cmap, "both", None, "\u00B0C") 444 | var_dict = dict(list(zip(attr_keys, attr_vals))) 445 | variable = Variable(varname, var_dict) 446 | 447 | elif varname in vars_div: 448 | 449 | if cmap is None: 450 | cmap = plt.cm.gist_ncar 451 | 452 | vmin = None 453 | vmax = None 454 | norm = None 455 | 456 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 457 | attr_vals = (None, cmap, norm, vmin, vmax, "both", None, "m yr$^{\mathregular{-1}}$") 458 | var_dict = dict(list(zip(attr_keys, attr_vals))) 459 | variable = Variable(varname, var_dict) 460 | 461 | elif varname in vars_hydro: 462 | 463 | if cmap is None: 464 | cmap = plt.cm.jet 465 | 466 | vmin = 0.001 467 | vmax = 2 468 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 469 | 470 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 471 | attr_vals = (None, cmap, norm, vmin, vmax, "max", None, "m") 472 | var_dict = dict(list(zip(attr_keys, attr_vals))) 473 | variable = Variable(varname, var_dict) 474 | 475 | elif varname in vars_hydro_log: 476 | 477 | if cmap is None: 478 | cmap = plt.cm.jet 479 | 480 | vmin = 0.001 481 | vmax = 10 482 | norm = colors.LogNorm(vmin=vmin, vmax=vmax) 483 | 484 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 485 | attr_vals = (None, cmap, norm, vmin, vmax, "max", None, "m") 486 | var_dict = dict(list(zip(attr_keys, attr_vals))) 487 | variable = Variable(varname, var_dict) 488 | 489 | elif varname in vars_ratio_1: 490 | 491 | if cmap is None: 492 | cmap = plt.cm.OrRd 493 | 494 | vmin = 0 495 | vmax = 1 496 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 497 | 498 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 499 | attr_vals = ([-10, -1, 0, 1, 10], cmap, norm, vmin, vmax, "neither", "%i", "1") 500 | var_dict = dict(list(zip(attr_keys, attr_vals))) 501 | variable = Variable(varname, var_dict) 502 | 503 | elif varname in vars_rel_log: 504 | 505 | if cmap is None: 506 | cmap = plt.cm.gist_ncar_r 507 | 508 | vmin = 1 509 | vmax = 1000 510 | norm = colors.LogNorm(vmin=vmin, vmax=vmax) 511 | 512 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 513 | attr_vals = ([1, 10, 100, 1000], cmap, norm, vmin, vmax, "both", "%i", "1") 514 | var_dict = dict(list(zip(attr_keys, attr_vals))) 515 | variable = Variable(varname, var_dict) 516 | 517 | elif varname in vars_rel: 518 | 519 | if cmap is None: 520 | cmap = plt.cm.PRGn 521 | 522 | vmin = -10 523 | vmax = 10 524 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 525 | 526 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format", "colorbar_label") 527 | attr_vals = ([-10, -1, 0, 1, 10], cmap, norm, vmin, vmax, "both", "%i", "1") 528 | var_dict = dict(list(zip(attr_keys, attr_vals))) 529 | variable = Variable(varname, var_dict) 530 | 531 | else: 532 | 533 | if cmap is None: 534 | cmap = plt.cm.gist_ncar 535 | 536 | vmin = None 537 | vmax = None 538 | norm = None 539 | 540 | attr_keys = ("ticks", "cmap", "norm", "vmin", "vmax", "extend", "format") 541 | attr_vals = (None, cmap, norm, vmin, vmax, "both", None) 542 | var_dict = dict(list(zip(attr_keys, attr_vals))) 543 | variable = Variable(varname, var_dict) 544 | 545 | bounds_min = -1 546 | bounds_max = 1 547 | if bounds is not None: 548 | bounds_min = bounds[0] 549 | bounds_max = bounds[1] 550 | variable.vmin = bounds_min 551 | variable.vmax = bounds_max 552 | variable.norm = colors.Normalize(vmin=variable.vmin, vmax=variable.vmax) 553 | 554 | if obs_file is not None: 555 | variable.vmin = bounds_min 556 | variable.vmax = bounds_max 557 | variable.norm = colors.Normalize(vmin=variable.vmin, vmax=variable.vmax) 558 | variable.ticks = None 559 | 560 | if geotiff_filename is not None: 561 | geotiff = ppt.GeoTIFF(geotiff_filename) 562 | width = geotiff.width 563 | height = geotiff.height 564 | lat_0 = geotiff.lat_0 565 | lon_0 = geotiff.lon_0 566 | lat = geotiff.lat 567 | lon = geotiff.lon 568 | else: 569 | filename = args[0] 570 | print((" opening NetCDF file %s ..." % filename)) 571 | try: 572 | nc = NC(filename, "r") 573 | except: 574 | print(("ERROR: file '%s' not found or not NetCDF format ... ending ..." % filename)) 575 | import sys 576 | 577 | sys.exit() 578 | 579 | xdim, ydim, zdim, tdim = ppt.get_dims(nc) 580 | 581 | # coordinate variable in x-direction 582 | x_var = np.squeeze(nc.variables[xdim][:]) 583 | # coordinate variable in y-direction 584 | y_var = np.squeeze(nc.variables[ydim][:]) 585 | 586 | center_x = (x_var[0] + x_var[-1]) / 2 587 | center_y = (y_var[0] + y_var[-1]) / 2 588 | nc_projection = ppt.get_projection_from_file(nc) 589 | lon_0, lat_0 = nc_projection(center_x, center_y, inverse=True) 590 | width = 1.2 * (np.max(x_var) - np.min(x_var)) 591 | height = 1.0 * (np.max(y_var) - np.min(y_var)) 592 | 593 | nc.close() 594 | 595 | 596 | if obs_file is not None: 597 | print((" opening NetCDF file %s ..." % obs_file)) 598 | try: 599 | # open netCDF file in 'append' mode 600 | nc = NC(obs_file, "r") 601 | except: 602 | print(("ERROR: file '%s' not found or not NetCDF format ... ending ..." % obs_file)) 603 | import sys 604 | 605 | sys.exit() 606 | 607 | # get the dimensions 608 | xdim, ydim, zdim, tdim = ppt.get_dims(nc) 609 | # set up dimension ordering 610 | dim_order = (tdim, zdim, ydim, xdim) 611 | 612 | myvar = varname 613 | for name in list(nc.variables.keys()): 614 | v = nc.variables[name] 615 | if getattr(v, "standard_name", "") == varname: 616 | print(("variabe {0} found by its standard_name {1}".format(name, varname))) 617 | myvar = name 618 | print((" - reading variable %s from file %s" % (myvar, obs_file))) 619 | try: 620 | data = np.squeeze(ppt.permute(nc.variables[myvar], dim_order)) 621 | except: 622 | print(("ERROR: unknown or not-found variable '%s' in file %s ... ending ..." % (variable.var_name, obs_file))) 623 | exit(2) 624 | 625 | try: 626 | inunit = str(nc.variables[myvar].units) 627 | except: 628 | print(("ERROR: units not found in variable '%s' in file %s ... ending ..." % (variable.var_name, obs_file))) 629 | exit(2) 630 | 631 | if outunit is not None: 632 | data = ppt.unit_converter(data, inunit, outunit) 633 | 634 | if variable.var_name in vars_dem: 635 | mask = data <= variable.vmin 636 | obs_values = np.ma.array(data, mask=mask) 637 | elif variable.var_name in vars_topo: 638 | obs_values = data 639 | else: 640 | try: 641 | fill = nc.variables[var]._FillValue 642 | mask = data == fill 643 | except: 644 | mask = np.zeros_like(data) 645 | mask[data <= tol] = 1 646 | if tol: 647 | mask[data <= tol] = 1 648 | obs_values = np.ma.array(data, mask=mask) 649 | 650 | nc.close() 651 | 652 | 653 | print(" creating Basemap ...") 654 | m = Basemap(width=width, height=height, resolution=map_res, projection="stere", lat_0=lat_0, lon_0=lon_0) 655 | 656 | if geotiff_filename is not None: 657 | xx_gtiff, yy_gtiff = m(lon, lat) 658 | 659 | lats = [] 660 | lons = [] 661 | values = [] 662 | ocean_mask = [] 663 | 664 | for k in range(0, nt): 665 | 666 | filename = args[k] 667 | print((" opening NetCDF file %s ..." % filename)) 668 | try: 669 | # open netCDF file in 'append' mode 670 | nc = NC(filename, "r") 671 | except: 672 | print(("ERROR: file '%s' not found or not NetCDF format ... ending ..." % filename)) 673 | import sys 674 | 675 | sys.exit(1) 676 | 677 | # get the dimensions 678 | xdim, ydim, zdim, tdim = ppt.get_dims(nc) 679 | # set up dimension ordering 680 | dim_order = (tdim, zdim, ydim, xdim) 681 | # add lat/lon values 682 | lats.append(np.squeeze(ppt.permute(nc.variables["lat"], dim_order))) 683 | lons.append(np.squeeze(ppt.permute(nc.variables["lon"], dim_order))) 684 | 685 | myvar = varname 686 | for name in list(nc.variables.keys()): 687 | v = nc.variables[name] 688 | if getattr(v, "standard_name", "") == varname: 689 | print(("variabe {0} found by its standard_name {1}".format(name, varname))) 690 | myvar = name 691 | pass 692 | print((" - reading variable %s from file %s" % (myvar, filename))) 693 | try: 694 | data = np.squeeze(ppt.permute(nc.variables[myvar], dim_order)) 695 | if data.ndim == 3: 696 | data = data[level, :] 697 | except: 698 | print(("ERROR: unknown or not-found variable '%s' in file %s ... ending ..." % (variable.var_name, filename))) 699 | import sys 700 | 701 | sys.exit(1) 702 | 703 | try: 704 | inunit = str(nc.variables[myvar].units) 705 | except: 706 | print(("ERROR: units not found in variable '%s' in file %s ... ending ..." % (myvar, filename))) 707 | import sys 708 | 709 | sys.exit(1) 710 | 711 | if outunit is not None: 712 | data = ppt.unit_converter(data, inunit, outunit) 713 | 714 | if variable.var_name in vars_dem: 715 | mask = data <= variable.vmin 716 | values.append(np.ma.array(data, mask=mask)) 717 | else: 718 | try: 719 | fill = nc.variables[var]._FillValue 720 | mask = data == fill 721 | values.append(np.ma.array(data, mask=mask)) 722 | except: 723 | values.append(data) 724 | 725 | ocean_mask_varname = "oceanmask" 726 | if ocean_mask_varname in list(nc.variables.keys()): 727 | ocean_mask.append(np.squeeze(ppt.permute(nc.variables["mask"]))) 728 | else: 729 | ocean_mask.append(np.zeros_like(data)) 730 | nc.close() 731 | 732 | 733 | # set the print mode 734 | if print_mode in "height": 735 | ntn = nt 736 | if ntn == 2: 737 | lw, pad_inches = ppt.set_mode(print_mode, aspect_ratio=0.75) 738 | if ntn == 3: 739 | lw, pad_inches = ppt.set_mode(print_mode, aspect_ratio=0.55) 740 | elif ntn == 4: 741 | lw, pad_inches = ppt.set_mode(print_mode, aspect_ratio=0.35) 742 | elif ntn == 5: 743 | lw, pad_inches = ppt.set_mode(print_mode, aspect_ratio=0.25) 744 | else: 745 | lw, pad_inches = ppt.set_mode(print_mode) 746 | else: 747 | lw, pad_inches = ppt.set_mode(print_mode) 748 | 749 | # make a separate colorbar (if requested) 750 | if colorbar: 751 | 752 | fig = plt.figure() 753 | ax = fig.add_axes([0.05, 0.05, 0.05, 0.9]) 754 | 755 | plt.matplotlib.colorbar.ColorbarBase( 756 | ax, 757 | cmap=variable.cmap, 758 | norm=variable.norm, 759 | extend=variable.extend, 760 | drawedges=False, 761 | ticks=variable.ticks, 762 | format=variable.format, 763 | ) 764 | 765 | OUTNAME = var + "_colorbar." + suffix 766 | print((" writing colorbar %s ..." % OUTNAME)) 767 | plt.savefig(OUTNAME, bbox_inches="tight") 768 | 769 | 770 | # create the figure 771 | fig = plt.figure() 772 | if singlerow: 773 | grid = ImageGrid( 774 | fig, 775 | 111, # similar to subplot(111) 776 | nrows_ncols=(1, nt), # creates 1 x nt grid of axes 777 | axes_pad=0.05, # pad between axes in inch. 778 | cbar_mode="single", 779 | cbar_size=0.115, 780 | cbar_location="right", 781 | share_all=True, 782 | ) 783 | elif singlecolumn: 784 | grid = ImageGrid( 785 | fig, 786 | 111, # similar to subplot(111) 787 | nrows_ncols=(nt, 1), # creates nt x 1 grid of axes 788 | axes_pad=0.05, # pad between axes in inch. 789 | cbar_mode="single", 790 | cbar_size=0.115, 791 | cbar_location="bottom", 792 | share_all=True, 793 | ) 794 | else: 795 | grid = ImageGrid( 796 | fig, 797 | 111, # similar to subplot(111) 798 | nrows_ncols=(nt / 3, 3), # creates 2 x nt/2 grid of axes 799 | axes_pad=0.1, # pad between axes in inch. 800 | cbar_mode="single", 801 | cbar_size=0.115, 802 | cbar_location=colorbar_position, 803 | share_all=True, 804 | ) 805 | 806 | 807 | if variable.var_name not in (vars_speed, vars_dem, vars_topo) and (bounds is None): 808 | variable.vmin = data.min() 809 | variable.vmax = data.max() 810 | 811 | if bounds: 812 | variable.norm = colors.Normalize(vmin=variable.vmin, vmax=variable.vmax) 813 | variable.extend = "both" 814 | variable.ticks = None 815 | variable.format = None 816 | 817 | for k in range(0, nt): 818 | ax = grid[k] 819 | m.ax = ax 820 | xx, yy = m(lons[k], lats[k]) 821 | 822 | # Draw a background if given 823 | if background == "bluemable": 824 | m.bluemarble() 825 | elif background == "etopo": 826 | m.etopo() 827 | elif background == "shadedrelief": 828 | m.shadedrelief() 829 | else: 830 | pass 831 | 832 | # Plot GeoTIFF file if given 833 | if geotiff_filename is not None: 834 | if shaded: 835 | m.imshow(np.flipud(geotiff.RasterArray), cmap=plt.cm.gray, rasterized=geotiff_rasterized) 836 | else: 837 | m.pcolormesh( 838 | xx_gtiff, yy_gtiff, np.flipud(geotiff.RasterArray), cmap=plt.cm.gray, rasterized=geotiff_rasterized 839 | ) 840 | 841 | # Draw a boundary mask. Areas where 842 | # obs_values <= boundary_tol and values > boundary_tol 843 | # are colored brown. 844 | if boundary_tol and obs_file: 845 | boundary_mask = np.zeros_like(data) 846 | b_mask = np.ones_like(data) 847 | b_mask[np.logical_and((obs_values <= boundary_tol), (values[k] > boundary_tol))] = 0 848 | b_mask[ocean_mask[k] == 4] = 1 849 | boundary_mask = np.ma.array(data=boundary_mask, mask=b_mask) 850 | b = m.pcolormesh(xx, yy, boundary_mask, cmap=plt.cm.BrBG, alpha=alpha, rasterized=rasterized) 851 | 852 | # If observations are given, calculate absolute or relative differences 853 | if obs_file: 854 | if relative: 855 | data = (values[k] - obs_values) / obs_values 856 | cs = m.pcolormesh(xx, yy, data, cmap=variable.cmap, alpha=alpha, norm=variable.norm, rasterized=rasterized) 857 | else: 858 | data = values[k] - obs_values 859 | cs = m.pcolormesh(xx, yy, data, cmap=variable.cmap, alpha=alpha, norm=variable.norm, rasterized=rasterized) 860 | else: 861 | # otherwise just plot data 862 | data = values[k] 863 | if shaded: 864 | from matplotlib.colors import LightSource 865 | 866 | # create light source object. 867 | lightsource = LightSource(hsv_min_val=0.1, hsv_max_val=0.9, hsv_min_sat=0.85, hsv_max_sat=0.15) 868 | # convert data to rgba array including shading from light source. 869 | # (must specify color map) 870 | data = lightsource.shade(data, variable.cmap) 871 | cs = m.imshow(data, cmap=variable.cmap, alpha=alpha, norm=variable.norm, rasterized=rasterized) 872 | else: 873 | cs = m.pcolormesh(xx, yy, data, cmap=variable.cmap, alpha=alpha, norm=variable.norm, rasterized=rasterized) 874 | 875 | dlat = np.abs(lats[k][-1, -1] - lats[k][0, 0]) 876 | dlon = np.abs(lons[k][-1, -1] - lons[k][0, 0]) 877 | if dlat > 20: 878 | parallels_spacing = 5 879 | elif (dlat > 10) and (dlat <= 20): 880 | parallels_spacing = 2 881 | elif (dlat > 5) and (dlat <= 10): 882 | parallels_spacing = 1 883 | elif (dlat > 5) and (dlat <= 1): 884 | parallels_spacing = 1 885 | elif (dlat > 5) and (dlat <= 1): 886 | parallels_spacing = 1 887 | else: 888 | parallels_spacing = 0.5 889 | if dlon > 20: 890 | meridian_spacing = 10 891 | elif (dlon > 11) and (dlon <= 20): 892 | meridian_spacing = 5 893 | elif (dlon > 6) and (dlon <= 11): 894 | meridian_spacing = 2 895 | elif (dlon > 2) and (dlon <= 6): 896 | meridian_spacing = 1 897 | elif (dlon > 0.6) and (dlon <= 2): 898 | meridian_spacing = 0.5 899 | else: 900 | meridian_spacing = 0.5 901 | 902 | if singlerow: 903 | m.drawmeridians(np.arange(-175.0, 175.0, meridian_spacing), labels=[0, 0, 0, 1], linewidth=0.5) 904 | if k == 0: 905 | m.drawparallels(np.arange(-90.0, 90.0, parallels_spacing), labels=[1, 0, 0, 0], linewidth=0.5) 906 | else: 907 | m.drawparallels(np.arange(-90.0, 90.0, parallels_spacing), labels=[0, 0, 0, 0], linewidth=0.5) 908 | elif singlecolumn: 909 | m.drawparallels(np.arange(-90.0, 90.0, parallels_spacing), labels=[1, 0, 0, 0], linewidth=0.5) 910 | if k == nt - 1: 911 | m.drawmeridians(np.arange(-175.0, 175.0, meridian_spacing), labels=[0, 0, 0, 1], linewidth=0.5) 912 | else: 913 | m.drawmeridians(np.arange(-175.0, 175.0, meridian_spacing), labels=[0, 0, 0, 0], linewidth=0.5) 914 | else: 915 | if (k == 0) or (k == 3): 916 | m.drawparallels(np.arange(-90.0, 90.0, parallels_spacing), labels=[1, 0, 0, 0], linewidth=0.5) 917 | else: 918 | m.drawparallels(np.arange(-90.0, 90.0, parallels_spacing), labels=[0, 0, 0, 0], linewidth=0.5) 919 | if k >= 3: 920 | m.drawmeridians(np.arange(-175.0, 175.0, meridian_spacing), labels=[0, 0, 0, 1], linewidth=0.5) 921 | else: 922 | m.drawmeridians(np.arange(-90.0, 90.0, meridian_spacing), labels=[0, 0, 0, 0], linewidth=0.5) 923 | 924 | # add coastlines if requested (default is False) 925 | if coastlines: 926 | m.drawcoastlines(linewidth=0.25) 927 | 928 | if inner_titles: 929 | for ax in range(0, nt): 930 | t = ppt.add_inner_title(fig.axes[ax], inner_titles[ax], loc=2) 931 | t.patch.set_ec("none") 932 | 933 | if drawmapscale: 934 | x_c = m.llcrnrx + np.abs(m.urcrnrx - m.llcrnrx) * 0.15 935 | y_c = m.llcrnry + np.abs(m.urcrnry - m.llcrnry) * 0.075 936 | lon_c, lat_c = m(x_c, y_c, inverse=True) 937 | ms_width = np.abs(m.urcrnrx - m.llcrnrx) * 0.2 / 1e3 938 | m.drawmapscale( 939 | lon_c, lat_c, lon_0, lat_0, ms_width, units="km", fontsize=plt.rcParams["font.size"], barstyle="fancy" 940 | ) 941 | 942 | contour_colors = ["white", "black"] 943 | if shape_filename: 944 | for index, shpfile in enumerate(shape_filename): 945 | shpfile = shpfile.split(".shp")[0] 946 | m.readshapefile(shpfile, "my_shapefile", linewidth=0.75, color=contour_colors[index]) 947 | # m.readshapefile(shape_filename.split('.shp')[0], 948 | # 'my_shapefile', linewidth=1.1) 949 | 950 | 951 | if singlerow: 952 | cbar = plt.matplotlib.colorbar.ColorbarBase( 953 | fig.axes[nt], 954 | cmap=variable.cmap, 955 | norm=variable.norm, 956 | extend=variable.extend, 957 | orientation="vertical", 958 | drawedges=False, 959 | ticks=variable.ticks, 960 | format=variable.format, 961 | ) 962 | elif singlecolumn: 963 | cbar = plt.matplotlib.colorbar.ColorbarBase( 964 | fig.axes[nt], 965 | cmap=variable.cmap, 966 | norm=variable.norm, 967 | extend=variable.extend, 968 | orientation="horizontal", 969 | drawedges=False, 970 | ticks=variable.ticks, 971 | ) 972 | else: 973 | if colorbar_position in ("bottom", "upper"): 974 | orientation = "horizontal" 975 | else: 976 | orientation = "vertical" 977 | cbar = plt.matplotlib.colorbar.ColorbarBase( 978 | fig.axes[nt], 979 | cmap=variable.cmap, 980 | norm=variable.norm, 981 | extend=variable.extend, 982 | orientation=orientation, 983 | drawedges=False, 984 | ticks=variable.ticks, 985 | format=variable.format, 986 | ) 987 | 988 | # to prevent the pdf file having white lines 989 | cbar.solids.set_edgecolor("face") 990 | if colorbar_label: 991 | cbar.set_label(variable.colorbar_label) 992 | 993 | print((" writing image %s ..." % out_file)) 994 | # fig.savefig(out_file, bbox_inches='tight', dpi=out_res, pad_inches=pad_inches) 995 | fig.savefig(out_file, bbox_inches="tight", dpi=out_res) 996 | 997 | plt.close() 998 | del fig 999 | --------------------------------------------------------------------------------