├── .gitignore
├── .hgignore
├── .project
├── .pydevproject
├── Docs
└── Thesis Excerpt
│ ├── geopy.kilepr
│ ├── geopy.pdf
│ ├── geopy.tex
│ └── references.bib
├── LICENSE
├── README.md
└── src
├── archive
├── atmdyn
│ ├── __init__.py
│ ├── constants.py
│ ├── etaVar.py
│ ├── f2pyVar.py
│ ├── meteoVar.py
│ └── srfcVar.py
├── atmdyn_test.py
├── plotting_test_old.py
├── read_snowdas.py
└── runoff_FRB_ARB.py
├── datasets
├── C1W.py
├── CESM.py
├── CFSR.py
├── CORDEX.py
├── CRU.py
├── CaSPAr.py
├── ClimateStations.py
├── EC.py
├── ERA5.py
├── GHCN.py
├── GPCC.py
├── MergedForcing.py
├── NARR.py
├── NRCan.py
├── PCIC.py
├── PRISM.py
├── SnoDAS.py
├── Unity.py
├── WRF.py
├── WSC.py
├── __init__.py
├── common.py
└── misc.py
├── geodata
├── __init__.py
├── base.py
├── gdal.py
├── misc.py
├── netcdf.py
├── station.py
└── stats.py
├── geodata_test.py
├── legacy_plotting
├── __init__.py
├── areastats.py
├── legacy.py
└── multimap.py
├── misc_test.py
├── plotting
├── __init__.py
├── archive
│ ├── lineplots.py
│ ├── mapplots.py
│ ├── misc.py
│ └── old_plots.py
├── axes.py
├── colormaps
│ ├── HTML_Colors.png
│ ├── __init__.py
│ ├── cmap_data
│ │ ├── cbathy.dat
│ │ ├── coolavhrrmap.dat
│ │ ├── ctopo.dat
│ │ ├── odv.dat
│ │ ├── redblue_dark.dat
│ │ ├── redblue_light.dat
│ │ ├── redgreen.dat
│ │ └── rscolmap.dat
│ ├── cmap_samples.png
│ ├── colormaps.ipynb
│ └── colormaps.py
├── figure.py
├── mapsetup.py
├── misc.py
├── properties.py
├── stylesheets
│ ├── default.mplstyle
│ ├── myggplot.mplstyle
│ ├── presentation.mplstyle
│ └── publication.mplstyle
└── taylor.py
├── plotting_test.py
├── processing
├── __init__.py
├── bc_methods.py
├── biascorrection.py
├── export.py
├── exstns.py
├── misc.py
├── multiprocess.py
├── newvars.py
├── process.py
├── regrid.py
├── shpavg.py
├── wrfavg.py
└── yaml_samples
│ ├── export.yaml
│ ├── exstns.yaml
│ ├── regrid.yaml
│ ├── shpavg.yaml
│ └── wrfavg.yaml
└── utils
├── __init__.py
├── ascii.py
├── constants.py
├── fix_time.py
├── misc.py
├── nanfunctions.py
├── nctools.py
├── signalsmooth.py
├── simple_regrid.py
└── stats.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.orig
2 | *~
3 | *.o
4 | *.so
5 | *.mod
6 | *.pyc
7 | *.kate-swp
8 |
9 | *.aux
10 | *.log
11 | *.out
12 | *.toc
13 | *.lof
14 | *.fff
15 | *.ttt
16 | *.nav
17 | *.snm
18 | *.bbl
19 | *.blg
20 | *.backup
21 |
22 | *.eps
23 | *.ps
24 | *.pdf
25 | *.jpg
26 | *.tif
27 | *.tiff
28 | *.png
29 | *.zip
30 | *.gz
31 | *.bz2
32 | *.bz
33 | *.tgz
34 | *.tar
35 |
36 | *.nc
37 |
38 | data/
39 | figures/
40 | .settings/
41 | bin/
42 | test/
43 | gmon/.out
44 |
45 | .directory
46 | .vscode/sftp.json
47 |
--------------------------------------------------------------------------------
/.hgignore:
--------------------------------------------------------------------------------
1 | syntax: glob
2 | *.orig
3 | *~
4 | *.o
5 | *.so
6 | *.mod
7 | *.pyc
8 | *.kate-swp
9 |
10 | syntax: glob
11 | *.aux
12 | *.log
13 | *.out
14 | *.toc
15 | *.lof
16 | *.fff
17 | *.ttt
18 | *.nav
19 | *.snm
20 | *.bbl
21 | *.blg
22 | *.backup
23 |
24 | syntax: glob
25 | *.eps
26 | *.ps
27 | *.pdf
28 | *.jpg
29 | *.tif
30 | *.tiff
31 | *.png
32 | *.zip
33 | *.gz
34 | *.bz2
35 | *.bz
36 | *.tgz
37 | *.tar
38 |
39 | syntax: glob
40 | *.nc
41 |
42 | syntax: regexp
43 | ^Articles$
44 | ^archive$
45 | ^data$
46 | ^figures$
47 | ^.settings$
48 | ^bin$
49 | ^test$
50 | ^gmon\.out$
51 |
52 | syntax: glob
53 | datasets/wrfavg
54 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | GeoPy
4 |
5 |
6 | LazyArray
7 | Projects
8 | WAFO
9 | WRF Tools
10 |
11 |
12 |
13 | org.python.pydev.PyDevBuilder
14 |
15 |
16 |
17 |
18 | de.loskutov.FileSync.FSBuilder
19 |
20 |
21 |
22 |
23 |
24 | org.python.pydev.pythonNature
25 |
26 |
27 |
--------------------------------------------------------------------------------
/.pydevproject:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | /GeoPy/src
7 |
8 |
9 |
10 | python interpreter
11 |
12 | geospatial
13 |
14 |
15 |
--------------------------------------------------------------------------------
/Docs/Thesis Excerpt/geopy.kilepr:
--------------------------------------------------------------------------------
1 | [General]
2 | def_graphic_ext=pdf
3 | img_extIsRegExp=false
4 | img_extensions=.eps .jpg .jpeg .png .pdf .ps .fig .gif
5 | kileprversion=2
6 | kileversion=2.1.3
7 | lastDocument=geopy.tex
8 | masterDocument=
9 | name=geopy
10 | pkg_extIsRegExp=false
11 | pkg_extensions=.cls .sty .bbx .cbx .lbx
12 | src_extIsRegExp=false
13 | src_extensions=.tex .ltx .latex .dtx .ins
14 |
15 | [Tools]
16 | MakeIndex=
17 | QuickBuild=
18 |
19 | [document-settings,item:geopy.tex]
20 | Bookmarks=
21 | Encoding=UTF-8
22 | Highlighting=LaTeX
23 | Indentation Mode=latex
24 | Mode=LaTeX
25 |
26 | [document-settings,item:references.bib]
27 | Bookmarks=
28 | Encoding=UTF-8
29 | Highlighting=BibTeX
30 | Indentation Mode=latex
31 | Mode=BibTeX
32 |
33 | [item:geopy.kilepr]
34 | archive=true
35 | column=27
36 | encoding=
37 | highlight=
38 | line=0
39 | mode=
40 | open=false
41 | order=-1
42 |
43 | [item:geopy.tex]
44 | archive=true
45 | column=0
46 | encoding=UTF-8
47 | highlight=LaTeX
48 | line=164
49 | mode=LaTeX
50 | open=true
51 | order=0
52 |
53 | [item:references.bib]
54 | archive=true
55 | column=0
56 | encoding=UTF-8
57 | highlight=BibTeX
58 | line=0
59 | mode=BibTeX
60 | open=true
61 | order=1
62 |
63 | [view-settings,view=0,item:geopy.tex]
64 | CursorColumn=0
65 | CursorLine=164
66 | JumpList=
67 | ViMarks=.,164,0,[,164,0,],164,0
68 |
69 | [view-settings,view=0,item:references.bib]
70 | CursorColumn=0
71 | CursorLine=0
72 | JumpList=
73 | ViMarks=
74 |
--------------------------------------------------------------------------------
/Docs/Thesis Excerpt/geopy.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aerler/GeoPy/319016a7f9340b29fdc3c6d4b9df5fbe78378da8/Docs/Thesis Excerpt/geopy.pdf
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GeoPy
2 | The backbone of my geo-spatial analysis code.
3 |
4 |
5 | If you find my code useful, please consider citing one of my papers:
6 |
7 | [Erler, Andre R., W. Richard Peltier, 2017: Projected Hydro-climatic Changes in Two Major River Basins at the Canadian West Coast Based on High-resolution Regional Climate Simulations, J. Climate, 30, 8081-8105.](http://journals.ametsoc.org/doi/abs/10.1175/JCLI-D-16-0870.1)
8 |
9 | [Erler, Andre R., W. Richard Peltier, 2016: Projected Changes in Precipitation Extremes for Western Canada based on High-resolution Regional Climate Simulations, J. Climate, 29, 8841-8863.](http://journals.ametsoc.org/doi/abs/10.1175/JCLI-D-15-0530.1)
10 |
11 | [Erler, Andre R., W. Richard Peltier, Marc d'Orgeville, 2015: Dynamically Downscaled High Resolution Hydro-Climate Projections for Western Canada, J. Climate, 28, 423-450.](http://journals.ametsoc.org/doi/abs/10.1175/JCLI-D-14-00174.1)
12 |
13 |
14 | A high-level description of different modules and their functionality is available in PDF format (excerpt from my thesis):
15 | [Docs/Thesis Excerpt/geopy.pdf](https://github.com/aerler/GeoPy/blob/master/Docs/Thesis%20Excerpt/geopy.pdf)
16 |
--------------------------------------------------------------------------------
/src/archive/atmdyn/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-08-24
3 |
4 | This package contains functions that provide variables and functions common used in atmospheric dynamics.
5 | Most of the code was adapted from modules originally developed for the PyGeode plugin AtmDyn.
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
--------------------------------------------------------------------------------
/src/archive/atmdyn/constants.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2010-11-26, adapted on 2013-08-24
3 |
4 | Some default physical constants (mostly used in meteoVar).
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | from numpy import pi, sin
10 |
11 | # actual constants
12 | R = 8.31447215 # J/(mol K), universal gas constant (Wikipedia)
13 | cp = 1005.7 # J/(kg K), specific heat of dry air per mass (AMS Glossary)
14 | g0 = 9.80665 # m/s**2, for geopotential altitude (else actually y-dependent g(y))
15 | Mair = 0.0289644 # kg/mol, Molecular mass of dry air
16 | Re = 6371229 # m, Radius of planet earth
17 | T0 = 273.15 # K, Temperature at 0 deg C, i.e. negative absolute zero in Celsius
18 | Omega = 2*pi/((23*60+56)*60+4.1) # 1/s, Earth's rotation rate (using siderial day)
19 | # some derived constants, for convenience
20 | Cp = cp*Mair # J/(mol K), specific heat of dry air per mole
21 | Rd = R/Mair # gas constant for dry air
22 | kappa = R/Cp # ~7/2, adiabatic exponent for dry air
23 | # not exactly physical constants
24 | fc = 2*Omega*sin(pi/4) # Coriolis parameter at 45 deg N
25 | p0 = 1e5 # reference pressure (e.g. for potential temperature)
26 |
--------------------------------------------------------------------------------
/src/archive/atmdyn/etaVar.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2011-02-24
3 |
4 | compute some quantities from hybrid vertical coordinates
5 | everything here is specific to hybrid coordinates
6 |
7 | @author: Andre R. Erler
8 | '''
9 |
10 | from pygeode.atmdyn.constants import Rd, g0
11 | from pygeode.atmdyn.properties import variablePlotatts # import plot properties from different file
12 | from pygeode.var import Var
13 |
14 | # surface pressure from log-surface-pressure
15 | def SurfacePressure(lnsp):
16 | # exponentiate
17 | ps = lnsp.exp()
18 | # and add meta data
19 | ps.name = 'ps'
20 | # attributes
21 | ps.atts = {}
22 | ps.atts['name'] = ps.name
23 | ps.atts['long_name'] = 'surface pressure'
24 | ps.atts['standard_name'] = 'surface pressure'
25 | ps.atts['units'] = 'Pa' # currently units are Pa (as in surface pressure)
26 | # plot attributes (defaults)
27 | ps.plotatts = variablePlotatts['ps']
28 | return ps
29 |
30 | # pressure from eta coordinates (on half levels)
31 | def Pressure(ps, eta):
32 | from numpy import empty
33 | from pygeode.axis import TAxis
34 | from pygeode.varoperations import transpose
35 | # Note: the 'A' and 'B' coefficients are assumed to be stored as
36 | # auxiliary arrays with the eta-axis
37 | # for now just extend A and B
38 | A = empty(len(eta)+1); B = empty(len(eta)+1);
39 | A[1:] = eta.auxarrays['A']; B[1:] = eta.auxarrays['B']
40 | A[0] = 0; B[0] = 0; # this is correct: highest half-level has zero pressure
41 | # compute A and B on full levels and substitute back into eta axis
42 | eta.auxarrays['A'] = (A[1:] + A[0:-1]) /2
43 | eta.auxarrays['B'] = (B[1:] + B[0:-1]) /2
44 | # compute pressure values from parameters stored with axis eta
45 | p = eta.auxasvar('A') + eta.auxasvar('B')*ps
46 | # transpose into common order: time first, then eta
47 | if p.hasaxis(TAxis):
48 | p = transpose(p,p.axes[p.whichaxis(TAxis)].name,eta.name)
49 | # short name
50 | p.name = 'p'
51 | # attributes
52 | p.atts['name'] = p.name
53 | p.atts['long_name'] = 'pressure'
54 | p.atts['standard_name'] = 'pressure'
55 | p.atts['units'] = 'Pa' # currently units are Pa (as in surface pressure)
56 | # plot attributes (defaults)
57 | p.plotatts = variablePlotatts['p']
58 | return p
59 |
60 | # return a geopotential variable
61 | def GeopotHeight(T, ps, phis):
62 | Z = GeopotHeightVar(T, ps, phis)
63 | # enforce same axis order as T
64 | order = []
65 | for ax in T.axes:
66 | order.append(Z.whichaxis(ax))
67 | return Z.transpose(*order)
68 |
69 | # geopotential height in eta coordinates (on full model levels)
70 | class GeopotHeightVar(Var):
71 | # geopotential height is geopotential divided by standard gravity g0
72 | # geopotential can be computed from temperature and pressure
73 | # initialization
74 | # Note: currently the pressure field is computed on the fly from eta-coefficients
75 | # and surface pressure; this is faster than using a pre-computed 3D pressure field.
76 | def __init__(self, T, ps, phis):
77 | from numpy import diff
78 | from pygeode.axis import Hybrid
79 | from pygeode.varoperations import sorted
80 | # precondition input and store internally
81 | assert T.hasaxis(Hybrid), 'this function only computes geopotential from hybrid coordinates'
82 | # T: make vertical axis varying the slowest (default is 2nd slowest, after time)
83 | ietaT = T.whichaxis(Hybrid)
84 | inOrder = [ietaT] + list(range(0,ietaT)) + list(range(ietaT+1,T.naxes))
85 | self.T = T.transpose(*inOrder)
86 | # surface fields
87 | self.ps = ps # surface pressure
88 | self.phis = phis
89 | # get vertical coefficients for hybrid coordinate
90 | self.A = T.axes[ietaT].auxarrays['A']
91 | self.B = T.axes[ietaT].auxarrays['B']
92 | # construct output axes: make eta varying the fastest, to prevent break-up in loop-over routine
93 | outAxes = T.axes[0:ietaT] + T.axes[ietaT+1:] + (T.axes[ietaT],)
94 | # ensure eta axis is ordered properly
95 | if not all(diff(self.T.eta.values)>0):
96 | self.T = sorted(self.T, eta=1)
97 | from warnings import warn
98 | warn('The vertical axis (eta) was not in the expected order - the sorted-fct. has been applied.')
99 | # attributes
100 | atts = {}
101 | atts['name'] = 'z'
102 | atts['long_name'] = 'geopotential height'
103 | atts['standard_name'] = 'geopotential'
104 | atts['units'] = 'm'
105 | atts['g0'] = g0
106 | atts['Rd'] = Rd
107 | # plot attributes (defaults)
108 | plotatts = variablePlotatts['z']
109 | # make proper Var-instance
110 | Var.__init__(self, axes=outAxes, dtype=self.T.dtype, name='z', values=None, atts=atts, plotatts=plotatts)
111 | self.ieta = self.whichaxis(Hybrid)
112 | # make sure axes are assigned properly and eta is the innermost axis
113 | assert self.naxes == T.naxes
114 | assert self.ieta == T.naxes-1
115 | # actual computation
116 | def getview(self, view, pbar):
117 | from numpy import empty, prod, log #, arange, min
118 | # Geopotential requires the integration of pressure differences and temperature;
119 | # technically T has to be virtual temperature, but I'm ignoring that for now.
120 | # The computation is performed explicitly, level by level.
121 | # I believe the temperature data is actually on full levels.
122 | # Geopotential is computed on full levels as well.
123 | # source: IFS (31r1) Documentation, Part 3, pp. 6-8
124 | # url: http://www.ecmwf.int/research/ifsdocs/CY31r1/index.html
125 | # detect size and shape
126 | lev = view.integer_indices[self.ieta] # actually requested levels
127 | ie = prod(view.shape[0:self.ieta]+view.shape[self.ieta+1:]) # all non-vertical coordinates
128 | # construct new view for input variables
129 | #TODO: implement slicing more efficiently: only extend axis towards bottom (top is not necessary)
130 | # minLev = min(lev); lev = lev - minLev # adjust lev to use for indexing of extended field
131 | # newLev = arange(minLev,self.shape[self.ieta])
132 | # inView = view.modify_slice(self.ieta, newLev)
133 | # NOTE: it entirely escapes my comprehension, why the above does not work, but it gives ridiculous results
134 | inView = view.unslice(self.ieta) # just request entire axis... actually not necessary but simpler
135 | ke = inView.shape[self.ieta] # length of vertical coordinate
136 | # get data and cast into 2D array
137 | T = inView.get(self.T).reshape(ke,ie) # map_to(self.T.axes).
138 | T = (Rd/g0) * T # scale T (avoids some unnecessary operations)
139 | # allocate output data
140 | phi = empty((ke,ie), dtype=self.dtype)
141 | # ps & phi0 have different axes (2D)
142 | ps = view.get(self.ps).reshape(1,ie)
143 | phis = view.get(self.phis).reshape((1,ie))
144 | # initial conditions on half-levels
145 | hlPhi = phis.copy()/g0 # convert to height (divide by g0)
146 | # compute half-level pressures adjacent to first model level
147 | pp = self.A[ke-1] + self.B[ke-1]*ps
148 | pm = self.A[ke-2] + self.B[ke-2]*ps
149 | # special treatment of first model level (full level)
150 | tmp = log(pp/pm) # used later
151 | phi[ke-1,:] = hlPhi + T[ke-1,:]*(1 - tmp*pm/(pp-pm));
152 | # loop over levels in reverse order
153 | for k in range(ke-2,0,-1):
154 | # compute half-level geopotential
155 | hlPhi += T[k,:] * tmp
156 | # advance pressure calculation
157 | pp = pm.copy(); pm = self.A[k-1] + self.B[k-1]*ps
158 | tmp = log(pp/pm)
159 | # correction has to be applied to get full levels
160 | phi[k,:] = hlPhi + T[k,:]*(1 - tmp*pm/(pp-pm)) # apply correction, store value
161 | # last step requires special treatment (to avoid index out of bounds in pm
162 | hlPhi += T[k,:] * log(pp/pm)
163 | phi[0,:] = hlPhi + T[0,:]*log(2) # apply different correction
164 | # extract the requested slice
165 | phi = phi[lev,:]
166 | # return value in correct shape: transpose to make eta innermost axis, apply desired shape
167 | return phi.transpose().reshape(view.shape)
168 |
169 | # compute geopotential height using the integrate function
170 | #def GeopotHeight(T, p, phi0):
171 | # from pygeode.intgr import integrate
172 | # from pygeode.deriv import deriv
173 | # eta = T.getaxis(Hybrid)
174 | # dphi = Rd/g0 * T * deriv(p.log(), eta)
175 | # phi = integrate(dphi, eta, v0=phi0, order=-1)
176 | # phi.name = 'phi'
177 | # # attributes
178 | # phi.atts = {}
179 | # phi.atts['name'] = 'phi'
180 | # phi.atts['long_name'] = 'geopotential height'
181 | # phi.atts['standard_name'] = 'geopotential'
182 | # phi.atts['units'] = 'm'
183 | # return phi
--------------------------------------------------------------------------------
/src/archive/atmdyn/f2pyVar.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2011-05-17
3 |
4 | f2py wrappers for Fortran routines
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | from pygeode.atmdyn.properties import variablePlotatts # import plot properties from different file
10 | from pygeode.var import Var
11 |
12 | class RelativeVorticity(Var):
13 | '''
14 | Reltive Vorticity (vertical component).
15 | This is a wrapper class that overloads getview and calls a Fortran library via f2py.
16 | '''
17 | def __init__(self, u, v, perix=True, name=None, atts=None, plotatts=None):
18 | '''
19 | relativeVorticity(u, v, perix=True, name=None, atts=None, plotatts=None)
20 | '''
21 | from pygeode.axis import Lat, Lon
22 | from pygeode.atmdyn.constants import Re
23 | ## check axes
24 | # order: latitude and longitude have to be the last two
25 | assert u.whichaxis(Lat)==u.naxes-2 and u.whichaxis(Lon)==u.naxes-1, 'Unsupported axes order.'
26 | # homogeneity
27 | assert u.axes==v.axes, 'Axes are incompatible!'
28 | # handle meta data
29 | if name is None: name = 'ze'
30 | # parameter (set defaults, if not present)
31 | defatts = {'name': 'zeta', 'units': r'$s^{-1}$', 'long_name': 'Relative Vorticity (vertical)',
32 | 'standard_name': 'Relative Vorticity', 'Re': Re, 'perix':perix}
33 | if atts: defatts.update(atts)
34 | # plot parameter
35 | defplot = variablePlotatts['ze']
36 | if plotatts: defplot.update(plotatts)
37 | # make proper Var-instance
38 | Var.__init__(self, axes=u.axes, dtype=u.dtype, name=name, values=None, atts=defatts, plotatts=defplot)
39 | # save variables and parameters
40 | self.perix = perix
41 | self.u = u; self.v = v
42 | self.lon = u.getaxis(Lon); self.lat = u.getaxis(Lat)
43 | self.Re = self.atts['Re']
44 |
45 | def getview(self, view, pbar):
46 | # Here the call to the Fortran library happens in all its glory.
47 | from numpy import empty, append, product
48 | from warnings import warn
49 | from f2py import meteo
50 | from pygeode.axis import XAxis
51 | # print(" >>>>> Hi, I'm a zeta instance! <<<<<") # to see how often this is called
52 | # extend view
53 | extView = view
54 | lperix = self.perix # switch for x-boundary handling for fortran code
55 | # extend view in lat and lon direction
56 | for i in [self.naxes-2, self.naxes-1]:
57 | # check if extension is possible
58 | if not view.shape[i]==self.shape[i]:
59 | intidx = extView.integer_indices[i] # get all indices
60 | if min(intidx)>0: intidx = append((min(intidx)-1),intidx) # extend below
61 | if max(intidx)max(idx): upper = upper-1 # cut off last
94 | # trim axis
95 | zeta = zeta.take(list(range(lower,upper)),axis=i)
96 | # return output
97 | return zeta
98 |
99 |
100 | class PotentialVorticity(Var):
101 | '''
102 | Potential Vorticity.
103 | This is a wrapper class that overloads getview and calls a Fortran library via f2py.
104 | '''
105 | def __init__(self, u, v, th, rho, w=None, z=None, perix=True, name=None, atts=None, plotatts=None):
106 | '''
107 | PotentialVorticity(u, v, th, rho, w=None, z=None, name=None, atts=None, plotatts=None)
108 | '''
109 | from pygeode.axis import Lat, Lon, Height, ZAxis, TAxis
110 | from pygeode.atmdyn.constants import Re, Omega
111 | ## check axes
112 | # order
113 | assert u.whichaxis(TAxis)==0 and u.whichaxis(Lat)==2 and u.whichaxis(Lon)==3, 'Unsupported axes order.'
114 | # homogeneity
115 | assert u.axes==v.axes, 'Axes are incompatible!'
116 | assert th.axes==rho.axes, 'Axes are incompatible!'
117 | assert u.axes==th.axes, 'Axes are incompatible!'
118 | if w: assert u.axes==w.axes, 'Axes are incompatible!' # should have same axes as u & v
119 | if z: # z is a field, e.g. geopotential on hybrid axis
120 | zField = True
121 | assert u.whichaxis(ZAxis)==1, 'Position of vertical axis is not supported.'
122 | if not z.axes==th.axes: # sort z's axes if necessary
123 | order = []
124 | for ax in th.axes:
125 | order.append(ax.name)
126 | z = z.transpose(*order)
127 | # assert z.axes==th.axes, 'Axes are incompatible!' # should have same axes as th & rho
128 | else: # just use height axis as z-field
129 | zField = False
130 | u.whichaxis(Height)==1, 'Position of vertical axis is not supported.'
131 | z = u.getaxis(Height) # expand to field later
132 | # handle meta data
133 | if th.name=='th':
134 | if name is None: name = 'PV'
135 | # parameter (set defaults, if not present
136 | defatts = {'name': 'PV', 'units': r'$K m^2 (s kg)^{-1}$', 'long_name': 'Ertel Potential Vorticity',
137 | 'standard_name': 'isentropic PV', 'Re': Re, 'Omega': Omega, 'perix':perix}
138 | elif th.name=='s':
139 | if name is None: name = 'PVs'
140 | # parameter (set defaults, if not present
141 | defatts = {'name': 'PVs', 'units': r'$J m^2 (K s)^{-1} kg^{-2}$', 'long_name': 'Entropy Potential Vorticity',
142 | 'standard_name': 'Entropy PV', 'Re': Re, 'Omega': Omega, 'perix':perix}
143 | if atts: defatts.update(atts)
144 | # plot parameter
145 | defplot = variablePlotatts[name]
146 | if plotatts: defplot.update(plotatts)
147 | # make proper Var-instance
148 | Var.__init__(self, axes=th.axes, dtype=th.dtype, name=name, values=None, atts=defatts, plotatts=defplot)
149 | # save variables and parameters
150 | self.perix = perix
151 | self.zField = zField
152 | self.u = u; self.v = v; self.w = w
153 | self.th = th; self.rho = rho; self.z = z
154 | self.lon = u.getaxis(Lon); self.lat = u.getaxis(Lat)
155 | self.Re = self.atts['Re']; self.Omega = self.atts['Omega']
156 |
157 | def getview(self, view, pbar):
158 | '''
159 | Here the call to the Fortran library happens in all its glory.
160 | Note: My own Fortran/f2py implementation is more than twice as fast as the NumPy version!
161 | '''
162 | from numpy import empty, zeros, append
163 | from warnings import warn
164 | from f2py import meteo
165 | from pygeode.axis import XAxis
166 | # print(" >>>>> Hi, I'm a PV instance! <<<<<") # to see how often this is called
167 | # extend view
168 | extView = view
169 | lperix = self.perix # switch for x-boundary handling for fortran code
170 | # extend view in vertical, lat, and lon direction (but not in time!)
171 | for i in range(1,self.naxes):
172 | # check if extension is possible
173 | if not view.shape[i]==self.shape[i]:
174 | intidx = extView.integer_indices[i] # get all indices
175 | if min(intidx)>0: intidx = append((min(intidx)-1),intidx) # extend below
176 | if max(intidx)max(idx): upper = upper-1 # cut off last
216 | # trim axis
217 | PV = PV.take(list(range(lower,upper)),axis=i)
218 | # return output
219 | return PV
220 |
221 | class Theta(Var):
222 | '''
223 | Potential Temperature.
224 | This is a wrapper class that overloads getview and calls a Fortran library via f2py.
225 | '''
226 | def __init__(self, T, p, name=None, atts=None, plotatts=None):
227 | '''
228 | Theta(T, p, name=None, atts=None, plotatts=None)
229 | '''
230 | from pygeode.atmdyn.constants import p0, kappa
231 | # input checks
232 | if not T.axes==p.axes:
233 | # need to transpose p to fix axes order between T and p
234 | iaxes = list(range(len(T.axes)))
235 | for i in range(len(T.axes)):
236 |
237 | assert p.hasaxis(T.axes[i]), 'Axes of T and p are incompatible!'
238 | iaxes[p.whichaxis(T.axes[i])] = i # order of axes in p
239 | p = p.transpose(*iaxes)
240 | # handle meta data
241 | if name is None: name = 'th'
242 | # parameter (set defaults, if not present
243 | defatts = {'name': 'th', 'units': 'K', 'long_name': 'potential temperature',
244 | 'standard_name': r'$\theta$', 'p0': p0, 'kappa': kappa}
245 | if atts: defatts.update(atts)
246 | # plot parameter
247 | defplot = {'plottitle': 'theta', 'plotunits': 'K'}
248 | if plotatts: defplot.update(plotatts)
249 | # make proper Var-instance
250 | Var.__init__(self, axes=T.axes, dtype=T.dtype, name=name, values=None, atts=defatts, plotatts=plotatts)
251 | # save T & p and parameters
252 | self.T = T; self.p = p
253 | self.p0 = self.atts['p0']; self.kappa = self.atts['kappa']
254 |
255 | def getview(self, view, pbar):
256 | '''
257 | Here the call to the Fortran library happens in all its glory.
258 | Note: My own Fortran/f2py implementation is more than twice as fast as the NumPy version!
259 | '''
260 | from f2py import meteo
261 | # get input data
262 | inview = view # trivial here
263 | T = inview.get(self.T)
264 | p = inview.get(self.p)
265 | # reshape to 1D
266 | viewShape = T.shape # old shape (used to reshape output)
267 | T = T.reshape(T.size)
268 | p = p.reshape(p.size)
269 | # do number crunching (in Fortran)
270 | th = meteo.potentialtemperature(T,p,self.p0,self.kappa)
271 | # th = T*(self.p0/p)**self.kappa # NumPy
272 | # reshape output
273 | th = th.reshape(viewShape)
274 | # return output
275 | return th
--------------------------------------------------------------------------------
/src/archive/atmdyn/meteoVar.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2010-11-17
3 |
4 | Module that contains functions to compute common meteorological quantities.
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | from pygeode.atmdyn.constants import Rd, kappa, g0, cp, p0, T0
10 | from pygeode.atmdyn.properties import variablePlotatts # import plot properties from different file
11 |
12 | # function to look for available vertical axis
13 | def findZAxis(var):
14 | from pygeode.axis import Height, ZAxis
15 | # use geometric height axis if available
16 | if var.hasaxis(Height): z = var.axes[var.whichaxis(Height)]
17 | # otherwise use any vertical axis available, and go ahead anyway
18 | else: z = var.axes[var.whichaxis(ZAxis)]
19 | return z
20 |
21 | # figure out proper vertical axis and perform some checks
22 | def findAxis(var, z, ax):
23 | from pygeode.axis import Axis, ZAxis, Height
24 | ## sort out axis ax and field z
25 | # only var given: use vertical axis of var
26 | if not z and not ax: z = ax = findZAxis(var)
27 | # no height field/vector, but axis given: use axis as height vector
28 | elif not z: z = ax
29 | # no axis given, but a height field/vector
30 | elif not ax:
31 | # if z is an axis, use it
32 | if isinstance(z,Axis): ax = z
33 | # otherwise use vertical axis from var
34 | else: ax = findZAxis(var)
35 | # else: everything given
36 | ## perform error checking
37 | assert var.hasaxis(ax), 'variable %s does not have axis %s'%(var.name,ax.name)
38 | # if we only have an axis (no field/vector)
39 | if isinstance(z,Axis):
40 | assert ax==z, 'inconsistent input axes'
41 | assert isinstance(ax,Height), 'axis %s is not an Height axis'%ax.name
42 | # if z is a field/vector
43 | else:
44 | assert isinstance(ax,ZAxis), 'no vertical (ZAxis) axis found'
45 | assert z.hasaxis(ax), 'field/vector %s does not have axis %s'%(z.name,ax.name)
46 | # return properly formated axis
47 | return z,ax
48 |
49 | # density of dry air
50 | def verticalVelocity(w, z, interpType='linear', **kwargs):
51 | from pygeode.axis import Height, ZAxis
52 | from pygeode.interp import interpolate
53 | from warnings import warn
54 | # figure out axes
55 | oldZ = findZAxis(w)
56 | assert isinstance(z,ZAxis), 'interpolation only works along vertical axis'
57 | if (not isinstance(z,Height)) or (not isinstance(oldZ,Height)):
58 | warn('The current implementation of verticalVelocity is designed for interpolation between Height coordinates only.')
59 | assert oldZ.__class__ == z.__class__, 'old and new axes are not of the same type (class)'
60 | # interpolate values
61 | vv = interpolate(w, oldZ, z, interp_type=interpType)
62 | # attributes (defaults)
63 | vv.atts['name'] = 'w'
64 | vv.atts['units'] = 'm/s'
65 | vv.atts['long_name'] = 'vertical velocity on full model levels'
66 | vv.atts['standard_name'] = 'vertical velocity'
67 | vv.atts['interpolation'] = interpType
68 | # plot attributes (defaults)
69 | vv.plotatts = variablePlotatts['w']
70 | # apply user-defined attributes (from kwargs; override defaults)
71 | vv.atts.update(kwargs)
72 | # assign short name
73 | vv.name = vv.atts['name']
74 | vv.units = vv.atts['units']
75 | return vv
76 |
77 | # density of dry air
78 | def Rho(T, p, **kwargs):
79 | # compute values
80 | rho = p/(Rd*T)
81 | # attributes (defaults)
82 | rho.atts['name'] = 'rho'
83 | rho.atts['units'] = r'$kg/m^3$'
84 | rho.atts['long_name'] = 'density of dry air'
85 | rho.atts['standard_name'] = r'$\rho$'
86 | rho.atts['Rd'] = Rd # constants
87 | # plot attributes (defaults)
88 | rho.plotatts = variablePlotatts['rho']
89 | # apply user-defined attributes (from kwargs; override defaults)
90 | rho.atts.update(kwargs)
91 | # assign short name
92 | rho.name = rho.atts['name']
93 | rho.units = rho.atts['units']
94 | return rho
95 |
96 | # potential temperature
97 | def Theta(T, p, **kwargs):
98 | # compute values
99 | th = T*(p0/p)**kappa
100 | # attributes (defaults)
101 | th.atts['name'] = 'th'
102 | th.atts['units'] = 'K'
103 | th.atts['long_name'] = 'Potential Temperature' # change name
104 | th.atts['standard_name'] = r'$\theta$'
105 | th.atts['p0'] = p0 # constants
106 | th.atts['kappa'] = kappa
107 | # plot attributes (defaults)
108 | th.plotatts = variablePlotatts['th']
109 | # apply user-defined attributes (from kwargs; override defaults)
110 | th.atts.update(kwargs)
111 | # assign short name
112 | th.name = th.atts['name']
113 | th.units = th.atts['units']
114 | return th
115 |
116 | # entropy (of an ideal gas)
117 | def Entropy(t, p=None, **kwargs):
118 | from pygeode.ufunc import log
119 | # compute values
120 | if p: s = cp*log(t/T0) - Rd*log(p/p0) # if pressure is given, assume t is normal temperature
121 | else: s = cp*log(t/T0) # is no p is given, assume t is potential temperature
122 | # attributes (defaults)
123 | s.atts['name'] = 's'
124 | s.atts['units'] = 'J/(kg K)'
125 | s.atts['long_name'] = 'Entropy' # change name
126 | s.atts['standard_name'] = 's'
127 | s.atts['cp'] = cp # constants
128 | s.atts['T0'] = T0
129 | s.atts['Rd'] = Rd
130 | s.atts['p0'] = p0
131 | # plot attributes (defaults)
132 | s.plotatts = variablePlotatts['s']
133 | # apply user-defiend attributes (from kwargs; override defaults)
134 | s.atts.update(kwargs)
135 | # assign short name
136 | s.name = s.atts['name']
137 | s.units = s.atts['units']
138 | return s
139 |
140 | # lapse-rate
141 | def LR(T, z=None, ax=None, **kwargs):
142 | # figure out axis
143 | (z, ax) = findAxis(T,z,ax)
144 | # Note: if height is available as a variable, pass it explicitly as z-argument
145 | # compute values
146 | lr = -1*T.deriv(ax, dx=z)
147 | # attributes (defaults)
148 | lr.atts['name'] = 'lr'
149 | lr.atts['units'] = T.atts['units']+'/'+z.atts['units'] # change units
150 | lr.atts['long_name'] = 'temperature lapse-rate' # change name
151 | lr.atts['standard_name'] = r'$\gamma$'
152 | # plot attributes (defaults)
153 | lr.plotatts = variablePlotatts['lr']
154 | # apply user-defined attributes (from kwargs; override defaults)
155 | lr.atts.update(kwargs)
156 | # assign short name
157 | lr.name = lr.atts['name']
158 | lr.units = lr.atts['units']
159 | return lr
160 |
161 | # potential temperature lapse-rate
162 | def ThetaLR(th, z=None, ax=None, **kwargs):
163 | # figure out axis
164 | (z, ax) = findAxis(th,z,ax)
165 | # Note: if height is available as a variable, pass it explicitly as z-argument
166 | # compute values
167 | thlr = th.deriv(ax,dx=z)
168 | # attributes (defaults)
169 | thlr.atts['name'] = 'thlr'
170 | thlr.atts['units'] = th.atts['units']+'/'+z.atts['units'] # change units
171 | thlr.atts['long_name'] = 'potential temperature lapse-rate' # change name
172 | thlr.atts['standard_name'] = r'$\partial_z\theta$'
173 | thlr.atts['p0'] = p0 # theta constants
174 | thlr.atts['kappa'] = kappa
175 | # plot attributes (defaults)
176 | thlr.plotatts = variablePlotatts['thle']
177 | # apply user-defined attributes (from kwargs; override defaults)
178 | thlr.atts.update(kwargs)
179 | # assign short name
180 | thlr.name = thlr.atts['name']
181 | thlr.units = thlr.atts['units']
182 | return thlr
183 |
184 | # Brunt-Vaeisaelae Frequency Squared N2
185 | def N2(th, z=None, ax=None, entropy=False, **kwargs):
186 | # figure out axis
187 | (z, ax) = findAxis(th,z,ax)
188 | # Note: if height is available as a variable, pass it explicitly as z-argument
189 | # compute values
190 | if entropy: # assume w is in fact entropy, and not theta
191 | nn = (g0/cp)*th.deriv(ax,dx=z)
192 | else: # standard
193 | nn = g0*th.deriv(ax,dx=z)/th
194 | # attributes (defaults)
195 | nn.atts['name'] = 'N2'
196 | nn.atts['units'] = r'$1/s^2$' # assign units
197 | nn.atts['long_name'] = 'Brunt-V\"ais\"al\"a Frequency Squared' # change name
198 | nn.atts['standard_name'] = r'$N^2$'
199 | nn.atts['g0'] = g0 # constants
200 | nn.atts['cp'] = cp # entropy constants
201 | nn.atts['T0'] = T0
202 | nn.atts['Rd'] = Rd
203 | nn.atts['p0'] = p0
204 | # plot attributes (defaults)
205 | nn.plotatts = variablePlotatts['N2']
206 | # apply user-defined attributes (from kwargs; override defaults)
207 | nn.atts.update(kwargs)
208 | # assign short name
209 | nn.name = nn.atts['name']
210 | nn.units = nn.atts['units']
211 | return nn
212 |
--------------------------------------------------------------------------------
/src/archive/atmdyn/srfcVar.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2011-05-30
3 |
4 | a module containing functions to compute common atmospheric 2D/surface variables
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | from pygeode.atmdyn.properties import variablePlotatts # import plot properties from different file
10 |
11 | # new axes for multiple surface values
12 | from pygeode.axis import ZAxis
13 |
14 | ## WMO-Tropopause
15 |
16 | # axis for TP height
17 | class TPdef(ZAxis):
18 | name = 'TPdef' # default name
19 | units = ''
20 | plotatts = ZAxis.plotatts.copy()
21 | plotatts['formatstr'] = '%d' # just print integers
22 | # Formatting attributes for axis labels and ticks (see formatter method for application)
23 | plotatts['plottitle'] = 'TP Def. #' # name displayed in plots (axis label)
24 | plotatts['plotunits'] = '' # displayed units (after offset and scalefactor have been applied)
25 |
26 | # a variable class that computes parameters computed from a single column
27 | from pygeode.var import Var
28 | class WMOTP(Var):
29 | # initialization
30 | def __init__(self, T, z=None,axis=ZAxis,bnd=None,threshold=2e-3,deltaZ=2e3,revIdx=False,index=1,name='WMOTP',atts=None,plotatts=None,**kwargs):
31 | from numpy import asarray
32 | # some checks and defaults
33 | assert T.hasaxis(axis), 'target variable is not defined along fit axis'
34 | if z: assert z.hasaxis(axis), 'source variable is not defined along fit axis'
35 | ## precondition target variable
36 | iaxis = T.whichaxis(axis)
37 | #TODO: defaults for bnd, depending on type of coordinate
38 | # if vertical boundaries are given, apply first
39 | if bnd: T = T(**{T.axes[iaxis].name: bnd})
40 | # make vertical axis varying the fastest
41 | order = list(range(0,iaxis)) + list(range(iaxis+1,T.naxes)) + [iaxis]
42 | T = T.transpose (*order)
43 | ## precondition height data
44 | # if a source field is given
45 | if z:
46 | iaxis = z.whichaxis(axis)
47 | # if vertical boundaries are given, apply first
48 | if bnd: z = z(**{z.axes[iaxis].name: bnd})
49 | # make vertical axis varying the fastest
50 | order = list(range(0,iaxis)) + list(range(iaxis+1,z.naxes)) + [iaxis]
51 | z = z.transpose (*order)
52 | # shorten axis, check again
53 | axis = T.getaxis(axis.name) # get new (shortened) axis
54 | assert T.axes[-1]==axis, 'there is a problem with the vertical axes in T'
55 | if z: assert z.axes[-1]==axis, 'there is a problem with the vertical axes in z'
56 | ## create axis for TP height (currently degenerate)
57 | paxis = TPdef(index) # variable index serves as enumerator for TP definitions
58 | axes = T.axes[:-1] + (paxis,)
59 | ## set attributes
60 | # fit attributes
61 | defatts = dict()
62 | defatts['T'] = T.name
63 | defatts['tgtUnits'] = T.atts['units']
64 | defatts['threshold'] = threshold
65 | defatts['deltaZ'] = deltaZ
66 | if z:
67 | defatts['z'] = z.name
68 | defatts['srcUnits'] = z.atts['units']
69 | else:
70 | defatts['z'] = axis.name
71 | defatts['srcUnits'] = axis.atts['units']
72 | defatts['axis'] = axis.name
73 | defatts['bnd'] = bnd or ''
74 | defatts['revIdx'] = revIdx
75 | # variable attributes
76 | defatts['name'] = name
77 | defatts['units'] = defatts['srcUnits']
78 | defatts['long_name'] = 'WMO Tropopause Height' # change name
79 | defatts['standard_name'] = 'TP Height'
80 | if atts: defatts.update(atts)
81 | # plotatts
82 | defplotatts = variablePlotatts['z']
83 | if plotatts: defplotatts.update(plotatts)
84 | Var.__init__(self, name=name, axes=axes, dtype=T.dtype, atts=defatts, plotatts=defplotatts)
85 | # save references
86 | self.T = T # the fit variable
87 | self.threshold = threshold # lapse-rate threshold that defines the tropopause
88 | self.deltaZ = deltaZ # vertical depth for which the threshold criterion has to hold
89 | self.z = z # the domain on which the model fct. is defined
90 | self.axis = axis # the axis along which the model operates
91 | self.bnd = bnd # upper and lower profile boundaries (axis indices or coordinate values)
92 | self.revIdx = revIdx # reverse index order of target and source field
93 | self.args = kwargs # keyword arguments that can be passed to fct (static)
94 | # method to compute parameters
95 | def getview(self, view, pbar):
96 | from numpy import empty, prod, flipud, fliplr
97 | from f2py import meteo
98 | # from multiprocessing import Pool
99 | ## construct new view
100 | inView = view.replace_axis(self.naxes-1, self.axis)
101 | ## get data
102 | T = inView.map_to(self.T).get(self.T)
103 | te = prod(T.shape[:-1]) # number of profiles
104 | ke = T.shape[-1] # vertical levels
105 | if self.z:
106 | z = inView.map_to(self.z).get(self.z)
107 | assert z.shape==T.shape
108 | ze = te
109 | else:
110 | z = self.axis.values
111 | ze = 1
112 | # cast arrays into two-dimensional array to loop over profiles
113 | T = T.reshape((te,ke))
114 | z = z.reshape((ze,ke))
115 | # allocate output data
116 | zTP = empty(view.shape, dtype=self.dtype)
117 | zTP = zTP.reshape((te,zTP.shape[-1])) # TP-axis is degenerate anyway, but well...
118 | # if array starts with highest level, flip array
119 | if self.revIdx:
120 | T = fliplr(T)
121 | if self.z: z = fliplr(z)
122 | else: z = flipud(z)
123 | # determine tropopause height according to WMO definition
124 | zTP = meteo.tropopausewmo(T, z, self.threshold, self.deltaZ, te, ze, ke)
125 | return zTP.reshape(view.shape)
126 |
127 |
128 | ## Isentropic Coordinates
129 |
130 | # axis for isentropic coordinates
131 | class Isentrope(ZAxis):
132 | name = 'theta' # default name
133 | units = 'K'
134 | plotatts = ZAxis.plotatts.copy()
135 | plotatts['formatstr'] = '%d' # just print integers
136 | # Formatting attributes for axis labels and ticks (see formatter method for application)
137 | plotatts['plottitle'] = 'Potentential Temperature' # name displayed in plots (axis label)
138 | plotatts['plotunits'] = 'K' # displayed units (after offset and scalefactor have been applied)
139 |
140 | # interpolate to isentropic surface
141 | def interp2theta(var, theta, values, interp='linear', **kwargs):
142 | from pygeode.interp import interpolate, sorted
143 | # inaxis axis
144 | iaxis = var.whichaxis(ZAxis)
145 | # # sort theta (must be monotonically increasing)
146 | # var = sorted(var, iaxis, reverse=False)
147 | # theta = sorted(theta, iaxis, reverse=False)
148 | # prepare input
149 | inaxis = var.axes[iaxis]
150 | assert theta.hasaxis(inaxis), 'vertical axis of var and theta are incompatible'
151 | # create new axis
152 | outaxis = Isentrope(values=values, **kwargs)
153 | # interpolate to isentropic levels
154 | ivar = interpolate(var, inaxis, outaxis, inx=theta, interp_type=interp)
155 | # return variable interpolated to isentropic level(s)
156 | return ivar
157 |
158 | ## Dynamical Tropopause
159 |
160 | # axis for PV iso-surfaces (dynamical tropopause)
161 | class DynamicalTP(ZAxis):
162 | name = 'PViso' # default name
163 | units = '(K m^2)/(s kg)'
164 | plotatts = ZAxis.plotatts.copy()
165 | plotatts['formatstr'] = '%3.1f' # one digit behind decimal
166 | # Formatting attributes for axis labels and ticks (see formatter method for application)
167 | plotatts['plottitle'] = 'Dynamical TP' # name displayed in plots (axis label)
168 | plotatts['plotunits'] = 'PVU' # displayed units (after offset and scalefactor have been applied)
169 | plotatts['scalefactor'] = 1e6 # conversion factor; assumed units are meters
--------------------------------------------------------------------------------
/src/archive/atmdyn_test.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2010-11-12, adapted on 2013-08-24
3 |
4 | Unittest for the atmdyn package; mainly for meteoVar, etaVar, f2pyVar, and srfcVar
5 | (the code was adapted from the PyGeode plugin AtmDyn).
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
10 | import unittest
11 | import matplotlib as mpl
12 | #from matplotlib.pyplot import rcParams
13 | #mpl.rc('lines', linewidth=1.5)
14 | #mpl.rc('font', size=22)
15 | axlbl = dict(labelsize='large')
16 | mpl.rc('axes', **axlbl)
17 | mpl.rc('xtick', **axlbl)
18 | mpl.rc('ytick', **axlbl)
19 | #mpl.rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
20 | mpl.rc('text', usetex=True)
21 | import matplotlib.pylab as pyl
22 | # my own imports
23 |
24 | from atmdyn import meteoVar, etaVar, f2pyVar, srfcVar
25 |
26 |
27 | class Variables(unittest.TestCase):
28 | all = False # whether or not to test lengthy computations
29 | plot = False # whether or not to display plots
30 | dataset = 'CRU' # dataset to use (some tests require a specific dataset)
31 | slice = {'time':1, 'lon':(20,30),'lat':(30,40)}
32 |
33 | def setUp(self):
34 | if self.dataset == 'CRU':
35 | from myDatasets.loadLM import openLM, allLM, LMroot, LCs, hostname
36 | # time-step/file definition for COSMO data
37 | LC = 'LC1' # smallest data set
38 | self.slice = self.sliceLM
39 | # load COSMO data set
40 | if hostname=='komputer': self.data = openLM(LC)
41 | elif hostname=='erlkoenig':
42 | self.data = allLM(LC=LC,tag='test',specialVars=[],LMdata=True) # 'PV','PVs','s'
43 | # save root folder for later
44 | self.rootFolder = LMroot + LCs[LC] + '/'
45 | # # add potential temperature (because it's so ubiquitous)
46 | # if ('T' in self.data.vardict) and ('p' in self.data.vardict):
47 | # self.data += f2pyVar.Theta(self.data.T, self.data.p) # use Fortran implementation
48 |
49 | def tearDown(self):
50 | pass
51 |
52 | def output(self, var, all, slice=None):
53 | # function to generate diagnostic output for a new variable
54 | print()
55 | print()
56 | print((' * '+var.atts['long_name']+' ['+var.atts['units']+']'))
57 | print((var.axes))
58 | if all:
59 | # compute slice only once and load in memory
60 | if slice is None: slice = self.slice
61 | slicedVar = var(**slice).load()
62 | # compute some stats
63 | min = slicedVar.min()
64 | mean = slicedVar.mean()
65 | max = slicedVar.max()
66 | # print output
67 | print()
68 | print((' min: %g'%(min)))
69 | print((' mean: %g'%(mean)))
70 | print((' max: %g'%(max)))
71 |
72 | def makePlot(self, var, z=None, plotAxis='', slice=None, xlim=None, ylim=None):
73 | from numpy import mean
74 | # figure out slice
75 | if not slice: slice = self.slice
76 | # determine plot dimension
77 | if not plotAxis:
78 | vertical = True
79 | if self.dataset=='lm': slice['z'] = (0,20e3); plotAxis = 'z'
80 | elif self.dataset=='ifs': slice['eta'] = (30,91); plotAxis = 'eta'
81 | else:
82 | if plotAxis=='z' or plotAxis=='eta': vertical = True
83 | else: vertical = False
84 | # collapse all dimensions except plotAxis
85 | for dim,lim in slice.items():
86 | if not dim==plotAxis: slice[dim] = mean(lim)
87 | # construct plot
88 | ax = pyl.axes()
89 | # if not vertical:
90 | # if self.dataset=='lm':
91 | # plotvar(var(**self.slice), ax=ax, lblx=True)
92 | # if self.dataset=='lm': ax.set_ylim((0,20))
93 | # elif self.dataset=='ifs':
94 | # if not z: z = self.data.z
95 | # vv = var(**self.slice).squeeze().get()*var.plotatts['scalefactor']+var.plotatts['offset']
96 | # if var.hasaxis(z.name):
97 | # z(**{z.name:self.slice[z.name]}).values*z.plotatts['scalefactor']+z.plotatts['offset']
98 | # else: zz = z(**self.slice).squeeze().get()*z.plotatts['scalefactor']+z.plotatts['offset']
99 | # pyl.plot(vv,zz)
100 | # if z.name=='z': ax.set_ylim((0,20))
101 | # else:
102 | var.plotatts['plotname'] = r'$\theta$'
103 | plotvar(var(**self.slice), ax=ax, lblx=True, lbly=True)
104 | # axes limits
105 | if xlim: ax.set_xlim(xlim)
106 | if ylim: ax.set_ylim(ylim)
107 | pyl.show()
108 | # return axes for modification...
109 | return ax
110 |
111 | ## test cases
112 |
113 | def testLoad(self):
114 | # print final dataset
115 | print((self.data))
116 |
117 | def testIntersect(self):
118 | from pygeode.formats.netcdf import open
119 | from myDatasets.utils import intersectDatasets
120 | # load new test data
121 | file = "TPfittest.nc"
122 | newData = open(self.rootFolder+file)
123 | # merge datasets with different slicing
124 | mergedData = intersectDatasets(newData,self.data)
125 | print(mergedData)
126 |
127 | def testTemperature(self):
128 | # print diagnostics
129 | self.output(self.data.T,self.all)
130 | # test plot
131 | if self.plot: self.makePlot(self.data.T, xlim=(150,350))
132 |
133 | def testPressure(self):
134 | # this method is specific to hybrid coordinates
135 | if self.dataset == 'ifs':
136 | # print pressure on eta levels diagnostics
137 | self.output(self.data.p,self.all)
138 | # test plot: either as function of eta or z
139 | # if self.plot: self.makePlot(self.data.p,z=self.data.eta,plotAxis='eta')
140 | if self.plot: self.makePlot(self.data.p)
141 |
142 | def testGeopotHeight(self):
143 | # this method is specific to hybrid coordinates
144 | if self.dataset == 'ifs':
145 | # compute geopotential on eta levels
146 | z = etaVar.GeopotHeight(self.data.T, self.data.ps, self.data.zs)
147 | # print diagnostics
148 | self.output(self.data.z,self.all)
149 | # test plot: this should be a strait 45 degree line!
150 | if self.plot: self.makePlot(self.data.z, xlim=(0,20))
151 |
152 | def testVerticalVelocity(self):
153 | # print vertical velocity diagnostics
154 | if self.dataset == 'lm':
155 | # # interpolate vertical velocity to full levels
156 | # vv = meteoVar.verticalVelocity(self.data.w, self.data.z)
157 | assert self.data.w.hasaxis('z')
158 | assert self.data.w.axes == self.data.u.axes
159 | # w is added to dataset upon load
160 | self.output(self.data.w,self.all)
161 |
162 | def testRho(self):
163 | # print density diagnostics
164 | rho = meteoVar.Rho(self.data.T, self.data.p)
165 | self.output(rho,self.all)
166 |
167 | def testF2pyTheta(self):
168 | # Fortran implementation of theta (mainly for test purpose)
169 | theta = f2pyVar.Theta(self.data.T, self.data.p)
170 | # print potential temperature diagnostics
171 | self.output(theta,self.all)
172 | # test plot
173 | if self.plot: self.makePlot(theta, xlim=(250,550))
174 |
175 | def testIsentropicSrfc(self):
176 | # trim
177 | z = self.data.p(z=(4e3,16e3))
178 | th = self.data.th(z=(4e3,16e3))
179 | # interpolate Z to isentrope
180 | Z320K = srfcVar.interp2theta(z, th, [320])
181 | # print potential temperature diagnostics
182 | self.output(Z320K,self.all)
183 | # test plot
184 | if self.plot: self.makePlot(Z320K, plotAxis='lat', ylim=(0,20))
185 |
186 | def testTheta(self):
187 | # print potential temperature diagnostics
188 | th = meteoVar.Theta(self.data.T, self.data.p)
189 | self.output(th,self.all)
190 |
191 | def testEntropy(self):
192 | # compute entropy
193 | s = meteoVar.Entropy(self.data.T, self.data.p)
194 | # print entropy diagnostics
195 | self.output(s,self.all)
196 |
197 | def testLR(self):
198 | # compute temperature lapse-rate
199 | lr = meteoVar.LR(self.data.T,z=self.data.z)
200 | self.output(lr,self.all)
201 |
202 | def testThetaLR(self):
203 | # compute potential temperature lapse-rate
204 | thlr = meteoVar.ThetaLR(self.data.th,z=self.data.z)
205 | self.output(thlr,self.all)
206 |
207 | def testN2(self):
208 | # compute Brunt-Vaisaila Frequency Squared
209 | nn = meteoVar.N2(self.data.th,z=self.data.z)
210 | self.output(nn,self.all)
211 | # test plot
212 | if self.plot: self.makePlot(nn, xlim=(0,7))
213 |
214 | def testF2pyZeta(self):
215 | # compute relative vorticity
216 | zeta = f2pyVar.RelativeVorticity(self.data.u, self.data.v)
217 | # print diagnostics
218 | self.output(zeta,self.all, self.slice)
219 | # test plot
220 | if self.plot: self.makePlot(zeta, xlim=(-1,1))
221 |
222 | def testF2pyPV(self):
223 | # some required fields
224 | rho = meteoVar.Rho(self.data.T, self.data.p)
225 | # Fortran implementation of PV
226 | if self.dataset=='lm':
227 | PV = f2pyVar.PotentialVorticity(self.data.u, self.data.v, self.data.th, rho, w=self.data.w)
228 | elif self.dataset=='ifs':
229 | PV = f2pyVar.PotentialVorticity(self.data.u, self.data.v, self.data.th, rho, z=self.data.z)
230 | # print potential temperature diagnostics
231 | self.output(PV,self.all, self.slice)
232 | # test plot
233 | if self.plot: self.makePlot(PV, xlim=(0,10))
234 |
235 | def testF2pyWMOTP(self):
236 | # compute TP height
237 | slice = {'lat':(30,70),'lon':(0,1)}
238 | if self.dataset=='lm':
239 | bnd=(4e3,18e3)
240 | zTP = srfcVar.WMOTP(self.data.T(z=bnd,**slice), axis=self.data.z(z=bnd), bnd=bnd)
241 | elif self.dataset=='ifs':
242 | bnd = (35,75)
243 | zTP = srfcVar.WMOTP(self.data.T(eta=bnd,**slice), z=self.data.z(eta=bnd,**slice), bnd=bnd)
244 | # print potential temperature diagnostics
245 | self.output(zTP,self.all,slice)
246 | # test plot
247 | if self.plot: self.makePlot(zTP, plotAxis='lat', ylim=(0,20))
248 |
249 | def suite(tests=[]):
250 | # creates a testsuite
251 | if tests != []: # only include sets given in list
252 | s = unittest.TestSuite(list(map(Variables,tests)))
253 | else: # test everything
254 | s = unittest.TestLoader().loadTestsFromTestCase(Variables)
255 | return s
256 |
257 | if __name__ == "__main__":
258 | #import sys;sys.argv = ['', 'Test.testLoad']
259 | #unittest.main(verbosity=1)
260 | # list of test cases:
261 | # 'testLoad', 'testPressure', 'testGeopotHeight', 'testLR', 'testTheta', 'testThetaLR', 'testEntropy', 'testN2'
262 | tests = ['testF2pyPV']
263 | # run tests
264 | unittest.TextTestRunner(verbosity=2).run(suite(tests))
--------------------------------------------------------------------------------
/src/archive/plotting_test_old.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2010-11-12
3 |
4 | Unittest for TPdef package (tropopause analysis tools and plot functions)
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | import unittest
10 | from matplotlib.pylab import show
11 |
12 |
13 | def modelOutput(var, all):
14 | # function to generate diagnostic output for a new variable
15 | print()
16 | print((' * '+var.atts['long_name']+' ['+var.atts['units']+']'))
17 | print((var.axes))
18 | if all:
19 | # compute slice only once and load in memory
20 | slicedVar = var(lon=(20,25),lat=(40,45)).load()
21 | # compute some stats
22 | min = slicedVar.min()
23 | mean = slicedVar.mean()
24 | max = slicedVar.max()
25 | # print output
26 | print()
27 | print((' min: %.4f'%(min)))
28 | print((' mean: %.4f'%(mean)))
29 | print((' max: %.4f'%(max)))
30 |
31 | class TPanalysis(unittest.TestCase):
32 | # whether or not to test lengthy computations
33 | all = True
34 | show = True
35 | dataset = 'ifs'
36 |
37 | def setUp(self):
38 | from pygeode.atmdyn import meteoVar
39 | # load main dataset
40 | if self.dataset == 'lm':
41 | from myDatasets.loadLM import openLM, allLM, LMroot, LCs, hostname
42 | # time-step/file definition for COSMO data
43 | LC = 'LC1' # smallest data set
44 | self.rootFolder = LMroot + LCs[LC] + '/' # path to data set
45 | # load COSMO data set
46 | if hostname=='komputer':
47 | data = openLM(LC)
48 | data += meteoVar.Entropy(data.T, data.p) # add entropy
49 | elif hostname=='erlkoenig':
50 | data = allLM(LC=LC,tag='test',specialVars=['PV','PVs','s'],LMdata=False)
51 | # add more variables to dataset
52 | data += meteoVar.Theta(data.T, data.p)
53 | data += meteoVar.N2(data.th)
54 | elif self.dataset == 'ifs':
55 | from myDatasets.loadIFS import allIFS, IFSroot
56 | # # some pre-computed data
57 | # file = 'ecmwfTestData183JanNH.nc'
58 | # load ECMWF-IPY data set
59 | data = allIFS('JanNH', serverData=False)
60 | # add N2
61 | data += meteoVar.N2(data.s, z=data.z, entropy=True)
62 | self.rootFolder = IFSroot
63 | # save dataset
64 | self.data = data
65 |
66 | def tearDown(self):
67 | pass
68 |
69 | def testOutput(self):
70 | # select a variable
71 | var = self.data.T # Entropy
72 | # print some diagnostics
73 | print(); print()
74 | print((' --- '+var.name+' --- '))
75 | modelOutput(var, self.all)
76 |
77 | def testProfile(self):
78 | # plot a profile
79 | from myPlots.plots import linePlot
80 | # select slice
81 | if self.dataset == 'lm':
82 | slice1 = {'time':8*86400, 'lon':0,'lat':55,'z':(0,20e3)}
83 | slice2 = {'time':9*86400, 'lon':0,'lat':55,'z':(0,20e3)}
84 | zAxis = self.data.z
85 | elif self.dataset == 'ifs':
86 | slice1 = {'time':3.5, 'lon':0,'lat':45,'eta':(30,91)}
87 | slice2 = {'time':3.5, 'lon':5,'lat':55,'eta':(30,91)}
88 | zAxis = self.data.phi
89 | ## first example
90 | # select some options
91 | kwargs = {}
92 | kwargs['clevs'] = [None, None, (150,450,10), None] # data plot limits / (0,7,7)
93 | kwargs['labels'] = [None, '', ['Temperature', 'Pot. Temp.'], 'Pressure Profile'] # for legend / [ , ]
94 | kwargs['legends'] = [4, 3, 4, 1] # location; can also be kwargs /
95 | # plot
96 | # f1 = linePlot([[self.data.T, self.data.th], [self.data.p]], [slice1, slice2], axis=zAxis, **kwargs)
97 | ## second example
98 | # select some options
99 | kwargs = {}
100 | kwargs['clevs'] = [None, None] # data plot limits / (0,7,7)
101 | kwargs['labels'] = [None, ['Day 8', 'Day 9']] # for legend / [ , ]
102 | kwargs['legends'] = [3, 1] # location; can also be kwargs /
103 | # make new axis
104 | pAxis = self.data.p; pAxis.plotatts['plotorder'] = -1; pAxis.plotatts['plotscale'] = 'log'
105 | # plot
106 | f2 = linePlot([[self.data.T], [self.data.p]], [[slice1, slice2]], axis=[[pAxis],[pAxis]], **kwargs)
107 | show()
108 |
109 | def testLine(self):
110 | # plot a a horizontal line
111 | from myPlots.plots import linePlot
112 | # select slice
113 | if self.dataset == 'lm':
114 | tSlice = {'time':(0,15*86400),'lon':0,'lat':55,'z':0}
115 | ySlice = {'time':9*86400, 'lon':0,'lat':(25,75),'z':10e3}
116 | elif self.dataset == 'ifs':
117 | tSlice = {'time':(3,14), 'lon':0,'lat':45,'eta':91}
118 | ySlice = {'time':3.5, 'lon':5,'lat':(25,75),'eta':60}
119 | ## line plot example
120 | # select some options
121 | kwargs = {}
122 | kwargs['clevs'] = [None, None, None, None] # data plot limits / (0,7,7)
123 | kwargs['labels'] = ['T, meridional', 'T, time-series', 'u, meridional', 'u, time-series'] # for legend / [ , ]
124 | kwargs['legends'] = [1, 3, 2, 4] # location; can also be kwargs /
125 | # plot
126 | f = linePlot([[self.data.T], [self.data.u]], [ySlice, tSlice], axis=None, transpose=True, **kwargs)
127 | show()
128 |
129 | def testSynopCombi(self):
130 | # slice and projection setup
131 | days = 3
132 | lambConv = {'projection':'gall', 'llcrnrlon':-15, 'llcrnrlat':30, 'urcrnrlon':45, 'urcrnrlat':70,
133 | 'parallels':[30,40,50,60,70], 'meridians':[-10,0,10,20,30,40]}
134 | # plot setup
135 | cbar = {'manual':False,'orientation':'horizontal'}
136 | cbls = [(4,16,7),(0,5,6)]
137 | clevs = [(4,16,10),(10,50,10),(00,300,10),(0,20,10)]
138 | sbplt = (2,2)
139 | # plot a map of TP height and sharpness
140 | f = self.data.synop(['TPhgt','TPsharp','TPval','RMSE'], days ,clevs=clevs, subplot=sbplt, geos=lambConv)
141 | # folder='/home/me/Research/Tropopause Definition/Figures/'
142 | # sf = dict(dpi=600,transparent=True)
143 | # f.savefig(folder+'HTPdN2day0304.pdf',**sf)
144 | show()
145 |
146 | def testSynopSingle(self):
147 | # slice and projection setup
148 | days=[3.0,3.25,3.5,3.75]
149 | lambConv = {'projection':'lcc', 'lat_0':50, 'lat_2':50, 'lon_0':15, 'width':4e7/6, 'height':4e7/8,
150 | 'parallels':[20,40,60,80], 'meridians':[-30,0,30,60], 'labels':[1,0,0,1]}
151 | # plot setup
152 | colorbar = {'manual':True,'location':'right'}
153 | margins = {}#{'left':0.025,'wspace':0.025,'right': 0.95}
154 | cbls = 7
155 | clevs = (4,16,50)
156 | # plot a map of TP height and sharpness
157 | f = self.data.synop('TPhgt', days, clevs=clevs,cbls=cbls,colorbar=colorbar,margins=margins,geos=lambConv)
158 | # folder='/home/me/Research/Tropopause Definition/Figures/'
159 | # sf = dict(dpi=600,transparent=True)
160 | # f.savefig(folder+'HTPdN2day0304.pdf',**sf)
161 | show()
162 |
163 | def testHovmoeller(self):
164 | # make hovmoeller plot of TP height and TP sharpness
165 | # plot setup
166 | cbar = {'manual':True, 'location':'right', 'orientation':'vertical'}
167 | cbls = [(4,16,7),(0,50,6)]
168 | clevs = [(4,16,50),(0,50,50)]
169 | sbplt = (1,2)
170 | # plot a map of TP height and sharpness
171 | f = self.data.synop(['TPhgt','TPsharp'], 0 ,clevs=clevs, cbls=cbls, subplot=sbplt, colorbar=cbar)
172 | # f = self.data.hovmoeller(['TPhgt','TPsharp'], slice={'lat':(30,70)},transpose=True,clevs=clevs,cbls=cbls,colorbar=colorbar)
173 | # folder='/home/me/Research/Tropopause Definition/Figures/'
174 | # sf = dict(dpi=600,transparent=True)
175 | # f.savefig(folder+'HTPdN2hovmoeller.pdf',**sf)
176 | show()
177 |
178 | def suite(tests=[]):
179 | # creates a testsuite
180 | if tests != []: # only include sets given in list
181 | s = unittest.TestSuite(list(map(TPanalysis,tests)))
182 | else: # test everything
183 | s = unittest.TestLoader().loadTestsFromTestCase(TPanalysis)
184 | return s
185 |
186 | if __name__ == "__main__":
187 | #import sys;sys.argv = ['', 'Test.testLoad']
188 | #unittest.main(verbosity=1)
189 | # list of test cases:
190 | # 'testProfile', 'testTPana', 'testSynop', 'testHovmoeller'
191 | tests = ['testSynopSingle']
192 | # run tests
193 | unittest.TextTestRunner(verbosity=2).run(suite(tests))
194 | # show output
195 | show()
--------------------------------------------------------------------------------
/src/archive/read_snowdas.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from osgeo import gdal
3 | from pylab import *
4 | from ftplib import FTP
5 | import os as os
6 | import subprocess
7 | from matplotlib.colors import LogNorm
8 |
9 | #import matplotlib as mpl
10 | #mpl.rcParams['text.usetex']=True
11 | #mpl.rcParams['text.dvipnghack'] = True
12 |
13 |
14 |
15 |
16 | ## change working directory
17 | processing_dir = '/Users/menounos/snowdas/tmp/'
18 |
19 | ## polygon file
20 | poly_file = '/Users/menounos/snowdas/shapefiles/totalmica_finalboundary.shp'
21 |
22 |
23 | os.chdir(processing_dir) # Change current working directory
24 |
25 |
26 | ## clean out old files
27 | os.system('rm ' + processing_dir + '*')
28 |
29 |
30 |
31 | gdal_config = '--config GDAL_DATA /Library/Frameworks/GDAL.framework/Versions/1.10/Resources/gdal/ '
32 |
33 | path = 'DATASETS/NOAA/G02158/unmasked/'
34 |
35 | day = '15'
36 | month = '08_Aug'
37 | year = '2014'
38 |
39 | tar_file ='SNODAS_unmasked_' + year + month[0:2] + day+ '.tar'
40 |
41 | product = 'zz_ssmv11034tS__T0001TTNATS' + year + month[0:2] + day + '05HP001.dat'
42 |
43 | print(tar_file)
44 | print(product)
45 |
46 | snowdas_path = path + '/' + year + '/' + month
47 |
48 |
49 | ftp_site = 'sidads.colorado.edu'
50 |
51 | ftp = FTP(ftp_site) # connect to host, default port
52 | ftp.login('anonymous', 'anonymous@')
53 |
54 | ftp.cwd(snowdas_path)
55 | ftp.retrlines('LIST')
56 |
57 | ftp.retrbinary("RETR " + tar_file, open(tar_file, 'wb').write)
58 |
59 | print('Done downloading file', tar_file, '...')
60 |
61 |
62 | ## untar file
63 |
64 | print(processing_dir + tar_file)
65 |
66 | os.system('ls -l /Users/menounos/snowdas/tmp')
67 |
68 | os.system('tar -xvf ' + processing_dir + tar_file)
69 |
70 | os.system('gunzip *')
71 |
72 | files = os.listdir(processing_dir)
73 |
74 | ## file file of interest and then write appropriate header file
75 |
76 |
77 | for file in files:
78 | if file == product:
79 | print('writing header for file', file, 'now')
80 |
81 | f_out = open(processing_dir + file[:-4] + '.hdr', 'wb')
82 | f_out.write('byteorder M')
83 | f_out.write('\n')
84 | f_out.write('layout bil')
85 | f_out.write('\n')
86 | f_out.write('nbands 1')
87 | f_out.write('\n')
88 | f_out.write('nbits 16')
89 | f_out.write('\n')
90 | f_out.write('ncols 8192')
91 | f_out.write('\n')
92 | f_out.write('nrows 4096')
93 | f_out.write('\n')
94 | f_out.write('ulxmap -130.517083333332')
95 | f_out.write('\n')
96 | f_out.write('ulymap 58.232916666654')
97 | f_out.write('\n')
98 | f_out.write('xdim 0.0083333333333')
99 | f_out.write('\n')
100 | f_out.write('ydim 0.0083333333333')
101 | f_out.write('\n')
102 |
103 | f_out.close()
104 |
105 |
106 | print('warping and clipping')
107 |
108 |
109 | ## project grid to BC Albers (EPSG:3005)
110 | ## WGS84 lat lon EPSG:4326
111 |
112 |
113 | epsg_wgs84 = ' -s_srs EPSG:4326 '
114 |
115 | epsg_bcalb = ' -t_srs EPSG:3005 '
116 |
117 | bcalb_resample = ' -r near '
118 |
119 | f_name = processing_dir + product
120 |
121 | f_name2 = processing_dir + 'SWE_unclipped.tif'
122 |
123 |
124 | os.system('/Users/menounos/anaconda/bin/gdalwarp ' + \
125 | epsg_wgs84 + gdal_config + epsg_bcalb + bcalb_resample + f_name + " " + f_name2)
126 |
127 |
128 | print('here!')
129 |
130 | ## now do clipping
131 |
132 | os.system('/Users/menounos/snowdas/clip.sh')
133 |
134 | #subprocess.call(['./Users/menounos/snowdas/clip'])
135 |
136 | print('here!!!')
137 |
138 | ds = gdal.Open(processing_dir + 'swe_clip.tif', gdal.GA_ReadOnly)
139 |
140 |
141 | band = ds.GetRasterBand(1)
142 | print(band.GetNoDataValue())
143 | # None ## normally this would have a finite value, e.g. 1e+20
144 | ar = band.ReadAsArray()
145 | print(np.isnan(ar).all())
146 | # False
147 | print('%.1f%% masked' % (np.isnan(ar).sum() * 100.0 / ar.size))
148 | # 43.0% masked
149 |
150 | print(ar.max())
151 | print(ar.min())
152 |
153 | ar[ar>20000] = 0
154 |
155 |
156 | print(ar.sum()/1e6)
157 |
158 |
159 | masked_array=np.ma.masked_where(ar == 0, ar)
160 | cmap = matplotlib.cm.jet
161 | cmap.set_bad('w',1.)
162 |
163 | #cmap = plt.get_cmap('jet', 5)
164 |
165 | plt.figure()
166 | #plt.rc( 'text', usetex=True )
167 |
168 |
169 | plt.contourf(np.flipud(masked_array/1000.0), cmap=cmap, vmin=0.1, vmax=4)
170 | plt.title(r'Snow Water Equivalent (m): ' + year + '/' + month[0:2] + '/' + day + '\n' +
171 | 'Total Volume (km$^{3}$): ' + repr(round(ar.sum()/1e6, 2)))
172 | plt.colorbar()
173 |
174 | plt.savefig('/Users/menounos/snowdas/Snowdas_' + year + '_' + month[0:2] + '_' + day + '.jpg', dpi=600)
175 |
176 | plt.show()
177 |
178 |
--------------------------------------------------------------------------------
/src/archive/runoff_FRB_ARB.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-02-14
3 |
4 | Script to generate plots for my first downscaling paper!
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 | # external imports
9 | # from types import NoneType
10 | # from warnings import warn
11 | # import numpy as np
12 | # use common MPL instance
13 | # import matplotlib as mpl
14 | import matplotlib.pylab as pyl
15 | # from plotting.legacy import loadMPL
16 | # mpl,pyl = loadMPL(linewidth=.75)
17 | # from plotting.misc import loadStyleSheet
18 | # loadStyleSheet(stylesheet='myggplot', lpresentation=False, lpublication=True)
19 | # from mpl_toolkits.axes_grid1 import ImageGrid
20 | # internal imports
21 | # PyGeoDat stuff
22 | # from utils.signalsmooth import smooth
23 | from plotting.figure import getFigAx
24 | # from plotting.misc import getPlotValues
25 | # from geodata.base import Variable
26 | # from geodata.misc import AxisError, ListError
27 | from datasets.WRF import loadWRF_ShpEns
28 | from datasets.Unity import loadUnity_ShpTS
29 | from datasets.WSC import basins as basin_dict
30 | # ARB project related stuff
31 | from projects.ARB_settings import figure_folder
32 |
33 | # def linePlot(varlist, ax=None, fig=None, linestyles=None, varatts=None, legend=None,
34 | # xline=None, yline=None, title=None, flipxy=None, xlabel=None, ylabel=None, xlim=None,
35 | # ylim=None, lsmooth=False, lprint=False, **kwargs):
36 | # ''' A function to draw a list of 1D variables into an axes, and annotate the plot based on variable properties. '''
37 | # warn('Deprecated function: use Figure or Axes class methods.')
38 | # # create axes, if necessary
39 | # if ax is None:
40 | # if fig is None: fig,ax = getFigAx(1) # single panel
41 | # else: ax = fig.axes[0]
42 | # # varlist is the list of variable objects that are to be plotted
43 | # #print varlist
44 | # if isinstance(varlist,Variable): varlist = [varlist]
45 | # elif not isinstance(varlist,(tuple,list)) or not all([isinstance(var,Variable) for var in varlist]): raise TypeError
46 | # for var in varlist: var.squeeze() # remove singleton dimensions
47 | # # linestyles is just a list of line styles for each plot
48 | # if isinstance(linestyles,(basestring,NoneType)): linestyles = [linestyles]*len(varlist)
49 | # elif not isinstance(linestyles,(tuple,list)):
50 | # if not all([isinstance(linestyles,basestring) for var in varlist]): raise TypeError
51 | # if len(varlist) != len(linestyles): raise ListError, "Failed to match linestyles to varlist!"
52 | # # varatts are variable-specific attributes that are parsed for special keywords and then passed on to the
53 | # if varatts is None: varatts = [dict()]*len(varlist)
54 | # elif isinstance(varatts,dict):
55 | # tmp = [varatts[var.name] if var.name in varatts else dict() for var in varlist]
56 | # if any(tmp): varatts = tmp # if any variable names were found
57 | # else: varatts = [varatts]*len(varlist) # assume it is one varatts dict, which will be used for all variables
58 | # elif not isinstance(varatts,(tuple,list)): raise TypeError
59 | # if not all([isinstance(atts,dict) for atts in varatts]): raise TypeError
60 | # # check axis: they need to have only one axes, which has to be the same for all!
61 | # if len(varatts) != len(varlist): raise ListError, "Failed to match varatts to varlist!"
62 | # for var in varlist:
63 | # if var.ndim > 1: raise AxisError, "Variable '{}' has more than one dimension; consider squeezing.".format(var.name)
64 | # elif var.ndim == 0: raise AxisError, "Variable '{}' is a scalar; consider display as a line.".format(var.name)
65 | # # loop over variables
66 | # plts = []; varname = None; varunits = None; axname = None; axunits = None # list of plot handles
67 | # for var,linestyle,varatt in zip(varlist,linestyles,varatts):
68 | # varax = var.axes[0]
69 | # # scale axis and variable values
70 | # axe, axunits, axname = getPlotValues(varax, checkunits=axunits, checkname=None)
71 | # val, varunits, varname = getPlotValues(var, checkunits=varunits, checkname=None)
72 | # # variable and axis scaling is not always independent...
73 | # if var.plot is not None and varax.plot is not None:
74 | # if 'preserve' in var.plot and 'scalefactor' in varax.plot:
75 | # if varax.units != axunits and var.plot.preserve == 'area':
76 | # val /= varax.plot.scalefactor
77 | # # figure out keyword options
78 | # kwatts = kwargs.copy(); kwatts.update(varatt) # join individual and common attributes
79 | # if 'label' not in kwatts: kwatts['label'] = var.name # default label: variable name
80 | # # N.B.: other scaling behavior could be added here
81 | # if lprint: print varname, varunits, val.mean()
82 | # if lsmooth: val = smooth(val)
83 | # # figure out orientation
84 | # if flipxy: xx,yy = val, axe
85 | # else: xx,yy = axe, val
86 | # # call plot function
87 | # if linestyle is None: plts.append(ax.plot(xx, yy, **kwatts)[0])
88 | # else: plts.append(ax.plot(xx, yy, linestyle, **kwatts)[0])
89 | # # set axes limits
90 | # if isinstance(xlim,(list,tuple)) and len(xlim)==2: ax.set_xlim(*xlim)
91 | # elif xlim is not None: raise TypeError
92 | # if isinstance(ylim,(list,tuple)) and len(ylim)==2: ax.set_ylim(*ylim)
93 | # elif ylim is not None: raise TypeError
94 | # # set title
95 | # if title is not None:
96 | # ax.set_title(title, dict(fontsize='x-large'))
97 | # pos = ax.get_position()
98 | # pos = pos.from_bounds(x0=pos.x0, y0=pos.y0, width=pos.width, height=pos.height-0.03)
99 | # ax.set_position(pos)
100 | # # set axes labels
101 | # if flipxy: xname,xunits,yname,yunits = varname,varunits,axname,axunits
102 | # else: xname,xunits,yname,yunits = axname,axunits,varname,varunits
103 | # if not xlabel: xlabel = '{0:s} [{1:s}]'.format('Seasonal Cycle',xunits) if xunits else '{0:s}'.format(xname)
104 | # else: xlabel = xlabel.format(xname,xunits)
105 | # if not ylabel: ylabel = '{0:s} [{1:s}]'.format(yname,yunits) if yunits else '{0:s}'.format(yname)
106 | # else: ylabel = ylabel.format(yname,yunits)
107 | # # a typical custom label that makes use of the units would look like this: 'custom label [{1:s}]',
108 | # # where {} will be replaced by the appropriate default units (which have to be the same anyway)
109 | # xpad = 2; xticks = ax.get_xaxis().get_ticklabels()
110 | # ypad = -2; yticks = ax.get_yaxis().get_ticklabels()
111 | # # len(xticks) > 0 is necessary to avoid errors with AxesGrid, which removes invisible tick labels
112 | # if len(xticks) > 0 and xticks[-1].get_visible(): ax.set_xlabel(xlabel, labelpad=xpad)
113 | # elif len(yticks) > 0 and not title: yticks[0].set_visible(False) # avoid overlap
114 | # if len(yticks) > 0 and yticks[-1].get_visible(): ax.set_ylabel(ylabel, labelpad=ypad)
115 | # elif len(xticks) > 0: xticks[0].set_visible(False) # avoid overlap
116 | # # make monthly ticks
117 | # if axname == 'time' and axunits == 'month':
118 | # ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) # ax.minorticks_on()
119 | # # add legend
120 | # if legend:
121 | # legatts = dict()
122 | # if ax.get_yaxis().get_label():
123 | # legatts['fontsize'] = ax.get_yaxis().get_label().get_fontsize()
124 | # if isinstance(legend,dict): legatts.update(legend)
125 | # elif isinstance(legend,(int,np.integer,float,np.inexact)): legatts['loc'] = legend
126 | # ax.legend(**legatts)
127 | # # add orientation lines
128 | # if isinstance(xline,(int,np.integer,float,np.inexact)): ax.axhline(y=xline, color='black')
129 | # elif isinstance(xline,dict): ax.axhline(**xline)
130 | # if isinstance(yline,(int,np.integer,float,np.inexact)): ax.axvline(x=yline, color='black')
131 | # elif isinstance(xline,dict): ax.axvline(**yline)
132 | # # return handle
133 | # return plts
134 |
135 |
136 | if __name__ == '__main__':
137 |
138 | ## runoff differences plot
139 |
140 | #settings
141 | basins = ['FRB','ARB']
142 | # basins.reverse()
143 | exp = 'max-ens'
144 | period = 15
145 | grid = 'arb2_d02'
146 | variables = ['precip','runoff','sfroff']
147 | # figure
148 | lprint = True
149 | lfield = True
150 | lgage = True
151 | ldisc = False # scale sfroff to discharge
152 | lprecip = True # scale sfroff by precip bias
153 | # figure parameters for saving
154 | # sf, figformat, margins, subplot, figsize = getFigureSettings(2, cbar=False, sameSize=False)
155 | # make figure and axes
156 | # fig, axes = pyl.subplots(*subplot, sharex=True, sharey=False, facecolor='white', figsize=figsize)
157 | # margins = dict(bottom=0.11, left=0.11, right=.975, top=.95, hspace=0.05, wspace=0.05)
158 | # fig.subplots_adjust(**margins) # hspace, wspace
159 | nax = len(basins)
160 | paper_folder = '/home/data/Figures/Basins/'
161 | # fig = pyl.figure(1, figsize=(6.25,3.75))
162 | # axes = ImageGrid(fig, (0.07,0.11,0.91,0.82), nrows_ncols = (1, nax), axes_pad = 0.2, aspect=False, label_mode = "L")
163 | fig, axes = getFigAx((1,2), name=None, title='IC Ensemble Average (Hist., Mid-, End-Century)',
164 | title_font='x-large', figsize=(6.25,3.75),
165 | stylesheet='myggplot', lpublication=True, yright=False, xtop=False,
166 | variable_plotargs=None, dataset_plotargs=None, plot_labels=None,
167 | sharex=True, AxesGrid=False, direction='row',
168 | axes_pad = 0., aspect=False, margins=(0.075,0.11,0.95,0.81),)
169 | # loop over panels/basins
170 | for n,ax,basin in zip(range(nax),axes,basins):
171 | # for basin in basins:
172 |
173 | # load meteo data
174 | if lfield:
175 | print(' - loading Data')
176 | unity = loadUnity_ShpTS(varlist=['precip'], shape='shpavg')
177 | unity = unity(shape_name=basin).load().climMean()
178 | # unity['precip'][:] *= 86400. # scale with basin area
179 | wrf = loadWRF_ShpEns(name=exp, domains=2, shape='shpavg', filetypes=['srfc','lsm'],
180 | varlist=variables[:]) # WRF
181 | wrf = wrf(shape_name=basin).load().climMean()
182 | # for varname in variables: wrf[varname][:] *= 86400. # scale with basin area
183 | # load basin data
184 | basin = basin_dict[basin] # Basin(basin=basin, basins_dict=)
185 | if lgage: gage = basin.getMainGage()
186 | # load/compute variables
187 | varlist = []
188 | if lgage:
189 | # discharge = gage.discharge
190 | # print discharge.mean()
191 | # discharge[:] /= ( unity.atts.shp_area * 86400)
192 | # discharge.plot = wrf.runoff.plot
193 | # discharge.units = wrf.runoff.units
194 | # print discharge.mean()
195 | discharge = gage.runoff
196 | discharge.name = 'Observed River Runoff'
197 | varlist += [discharge]
198 | if lfield:
199 | runoff = wrf.runoff; runoff.name = 'Total Runoff'
200 | sfroff = wrf.sfroff; sfroff.name = 'Surface Runoff'
201 | varlist += [runoff, sfroff]
202 | if ldisc:
203 | s_sfroff = sfroff.copy(deepcopy=True)
204 | s_sfroff.name = 'Scaled Sfroff'
205 | s_sfroff *= discharge.getArray().mean()/sfroff.getArray().mean()
206 | print(s_sfroff)
207 | varlist += [s_sfroff]
208 | elif lprecip:
209 | assert unity.precip.units == wrf.precip.units
210 | scale = unity.precip.getArray().mean()/wrf.precip.getArray().mean()
211 | s_sfroff = sfroff.copy(deepcopy=True); s_sfroff *= scale; s_sfroff.name = 'Scaled Surface Runoff'
212 | s_runoff = runoff.copy(deepcopy=True); s_runoff *= scale; s_runoff.name = 'Scaled Total Runoff'
213 | varlist += [s_sfroff, s_runoff]
214 | if lfield and lgage:
215 | difference = sfroff - discharge
216 | difference.name = 'Difference'
217 | varlist += [difference]
218 | if ldisc or lprecip:
219 | s_difference = s_sfroff - discharge
220 | s_difference.name = 'Scaled Difference'
221 | varlist += [s_difference]
222 | for var in varlist: var.plot = discharge.plot # harmonize plotting
223 | #print sfroff.plot.name, sfroff.plot.units
224 |
225 | # plot properties
226 | varatts = dict()
227 | varatts['Total Runoff'] = dict(color='purple', linestyle='--')
228 | varatts['Surface Runoff'] = dict(color='green', linestyle='--')
229 | varatts['Observed River Runoff'] = dict(color='green', linestyle='', marker='o', markersize=5)
230 | varatts['Difference'] = dict(color='red', linestyle='--')
231 | # add scaled variables
232 | satts = {}
233 | for key,val in varatts.items():
234 | val = val.copy(); val['linestyle'] = '-'
235 | satts['Scaled '+key] = val
236 | varatts.update(satts)
237 | # determine legend
238 | if n == 0: legend = None
239 | else: legend = dict(loc=1, labelspacing=0.125, handlelength=2.5, handletextpad=0.5, fancybox=True)
240 | # plot runoff
241 | print(' - creating plot')
242 | ax.title_size = 'large'
243 | ax.title_height = 0.04
244 | plts = ax.linePlot(varlist, plotatts=varatts, title=basin.long_name, hline=0,
245 | xlim=(0.5,12.5), lperi=True, lparasiteMeans=True,
246 | ylim=(-2.5,5), xlabel='Seasonal Cycle [{1:s}]', legend=legend, lprint=True)
247 |
248 | if lprint:
249 | print(' - writing file')
250 | if ldisc: filename = 'runoff_discharge.pdf'
251 | elif lprecip: filename = 'runoff_precip.pdf'
252 | else: filename = 'runoff_test.pdf'
253 | print(('\nSaving figure in '+filename))
254 | fig.savefig(paper_folder+filename, dpi=300) # save figure to pdf
255 | print(figure_folder)
256 |
257 | ## show plots after all iterations
258 | pyl.show()
259 |
--------------------------------------------------------------------------------
/src/datasets/C1W.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on Nov. 07, 2020
3 |
4 | A module to read ERA5 data; this includes converting GRIB files to NetCDF-4,
5 | as well as functions to load the converted and aggregated data.
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
10 | # external imports
11 | import os.path as osp
12 | import pandas as pd
13 | import numpy as np
14 | import netCDF4 as nc # netCDF4-python module
15 | import xarray as xr
16 | from collections import namedtuple
17 | # internal imports
18 | from datasets.common import getRootFolder
19 | from geodata.gdal import GridDefinition
20 | from datasets.misc import loadXRDataset, getFolderFileName
21 |
22 |
23 | ## Meta-vardata
24 |
25 | dataset_name = 'C1W'
26 | root_folder = getRootFolder(dataset_name=dataset_name, fallback_name='NRCan') # get dataset root folder based on environment variables
27 |
28 | # C1W projection
29 | projdict = dict(proj='aea', lat_0=40, lon_0=-96, lat_1=20, lat_2=60, x_0=0, y_0=0, ellps='GRS80', towgs84='0,0,0,0,0,0,0', units='m', name=dataset_name)
30 | proj4_string = '+proj=aea +lat_0=40 +lon_0=-96 +lat_1=20 +lat_2=60 +x_0=0 +y_0=0 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +name={} +no_defs'.format(dataset_name)
31 | # Carcajou Watershed, 5 km
32 | ccj1_geotransform = (-1618000, 5000, 0, 3058000, 0, 5000)
33 | ccj1_size = (22, 33)
34 | ccj1_grid = GridDefinition(name=dataset_name, projection=None, geotransform=ccj1_geotransform, size=ccj1_size)
35 |
36 | varatts_list = dict()
37 | # attributes of soil variables from reanalysis
38 | varatts_list['C1W_Soil'] = dict(
39 | # state variables
40 | soil_temp_0cm_50cm = dict(name='Tsl1',units='K', long_name='Soil Temperature, Layer 1'),
41 | soil_temp_50cm_100cm = dict(name='Tsl2',units='K', long_name='Soil Temperature, Layer 2'),
42 | soil_temp_100cm_200cm = dict(name='Tsl3',units='K', long_name='Soil Temperature, Layer 3'),
43 | swe = dict(name='snow', units='kg/m^2', scalefactor=1, long_name='Snow Water Equivalent'), # water equivalent
44 | # axes (don't have their own file)
45 | time_stamp = dict(name='time_stamp', units='', long_name='Time Stamp'), # readable time stamp (string)
46 | time = dict(name='time', units='days', long_name='Days'), # time coordinate
47 | lon = dict(name='lon', units='deg', long_name='Longitude'), # geographic longitude
48 | lat = dict(name='lat', units='deg', long_name='Latitude'), # geographic latitude
49 | )
50 | # list of variables to load
51 | default_varlists = {name:[atts['name'] for atts in varatts.values()] for name,varatts in varatts_list.items()}
52 | # list of sub-datasets/subsets with titles
53 | DSNT = namedtuple(typename='Dataset', field_names=['name','interval','resolution','title',])
54 | dataset_attributes = dict(C1W_Soil_1deg = DSNT(name='C1W_Soil_1deg',interval='1M', resolution=1.0, title='1 deg. Soil Ensemble',), # 1 degree Ensemble
55 | C1W_Soil = DSNT(name='C1W_Soil',interval='1M', resolution=0.05, title='0.05 deg. Soil Ensemble',),) # 1 degree Ensemble
56 |
57 | # settings for NetCDF-4 files
58 | avgfolder = root_folder + dataset_name.lower()+'avg/'
59 | avgfile = 'c1w{0:s}_clim{1:s}.nc' # the filename needs to be extended: biascorrection, grid and period
60 | tsfile = 'c1w_{0:s}{1:s}{2:s}_monthly.nc' # extend with biascorrection, variable and grid type
61 | daily_folder = root_folder + dataset_name.lower()+'_daily/'
62 | netcdf_filename = 'c1w_{:s}_daily.nc' # extend with variable name
63 | netcdf_dtype = np.dtype('180, lon_data-360, lon_data) })
91 | # update name and title with sub-dataset
92 | xds.attrs['name'] = subset
93 | xds.attrs['title'] = dataset_attributes[subset].title + xds.attrs['title'][len(subset)-1:]
94 | return xds
95 |
96 |
97 | ## Dataset API
98 |
99 | dataset_name # dataset name
100 | root_folder # root folder of the dataset
101 | orig_file_pattern = netcdf_filename # filename pattern: variable name (daily)
102 | ts_file_pattern = tsfile # filename pattern: variable name and grid
103 | clim_file_pattern = avgfile # filename pattern: grid and period
104 | data_folder = avgfolder # folder for user data
105 | grid_def = {'CCJ1':ccj1_grid} # just one for now...
106 | LTM_grids = [] # grids that have long-term mean data
107 | TS_grids = ['',] # grids that have time-series data
108 | grid_res = {res:0.25 for res in TS_grids} # no special name, since there is only one...
109 | default_grid = None
110 | # functions to access specific datasets
111 | loadLongTermMean = None # climatology provided by publisher
112 | loadDailyTimeSeries = None # daily time-series data
113 | # monthly time-series data for batch processing
114 | loadTimeSeries = loadC1W # sort of... with defaults
115 | loadClimatology = None # pre-processed, standardized climatology
116 | loadStationClimatology = None # climatologies without associated grid (e.g. stations)
117 | loadStationTimeSeries = None # time-series without associated grid (e.g. stations)
118 | loadShapeClimatology = None # climatologies without associated grid (e.g. provinces or basins)
119 | loadShapeTimeSeries = None # time-series without associated grid (e.g. provinces or basins)
120 |
121 |
122 | ## abuse for testing
123 | if __name__ == '__main__':
124 |
125 | import time, gc, os
126 |
127 | #print('xarray version: '+xr.__version__+'\n')
128 | xr.set_options(keep_attrs=True)
129 |
130 | # import dask
131 | # from dask.distributed import Client, LocalCluster
132 | # # force multiprocessing (4 cores)
133 | # cluster = LocalCluster(n_workers=2, memory_limit='1GB')
134 | # cluster = LocalCluster(n_workers=4, memory_limit='6GB')
135 | # cluster = LocalCluster(n_workers=1)
136 | # client = Client(cluster)
137 |
138 |
139 | modes = []
140 | modes += ['load_TimeSeries']
141 | # modes += ['simple_test']
142 |
143 | grid = None; resampling = None
144 |
145 | dataset = 'C1W_Soil'
146 | resolution = 'NA005'
147 |
148 | # variable list
149 | varlist = ['Tsl1']
150 |
151 | # period = (2010,2019)
152 | # period = (1997,2018)
153 | # period = (1980,2018)
154 |
155 | # loop over modes
156 | for mode in modes:
157 |
158 |
159 | if mode == 'simple_test':
160 |
161 | folder = root_folder + dataset.lower()+'avg/'
162 | filename = '{}_{}_monthly.nc'.format(dataset.lower(), resolution.lower())
163 |
164 | import xarray as xr
165 | xds = xr.load_dataset(folder + filename, decode_times=False, chunks={'time':1, 'lat':64, 'lon':64})
166 | print(xds)
167 |
168 | elif mode == 'load_TimeSeries':
169 |
170 | pass
171 | lxarray = False
172 | varname = varlist[0]
173 | xds = loadC1W(varlist=varlist, resolution=resolution, dataset=dataset, )
174 | print(xds)
175 | print('')
176 | xv = xds[varname]
177 | print(xv)
178 | print(xv.name)
179 | if lxarray:
180 | print(('Size in Memory: {:6.1f} MB'.format(xv.nbytes/1024./1024.)))
181 |
--------------------------------------------------------------------------------
/src/datasets/PCIC.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-07-09
3 |
4 | This module contains meta data and access functions for the updated PRISM climatology as distributed by
5 | PCIC (Pacific Climate Impact Consortium).
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
10 | # external imports
11 | import numpy as np
12 | import os
13 | # internal imports
14 | from geodata.netcdf import DatasetNetCDF
15 | from geodata.gdal import addGDALtoDataset
16 | from utils.nctools import writeNetCDF
17 | from datasets.common import getRootFolder, grid_folder, transformPrecip
18 | from datasets.common import loadObservations, addLandMask, addLengthAndNamesOfMonth, getFileName
19 | # from geodata.utils import DatasetError
20 | from warnings import warn
21 | from geodata.gdal import GridDefinition
22 |
23 | ## PCIC PRISM Meta-data
24 |
25 | dataset_name = 'PCIC'
26 | root_folder = getRootFolder(dataset_name=dataset_name) # get dataset root folder based on environment variables
27 |
28 | # PRISM grid definition
29 | dlat = dlon = 1./120. # 0.0083333333
30 | dlat2 = dlon2 = 1./240. # half step
31 | nlat = 1680 # slat = 14 deg
32 | nlon = 3241 # slon = 27 deg
33 | # N.B.: coordinates refer to grid points (CF convention), commented values refer to box edges (GDAL convention)
34 | llclat = 48. # 48.0000000000553
35 | # llclat = 48.0000000000553 # 48.
36 | llclon = -140. # -140.0
37 |
38 | geotransform = (llclon-dlon2, dlon, 0.0, llclat-dlat2, 0.0, dlat)
39 | size = (nlon,nlat) # (x,y) map size of PRISM grid
40 | # make GridDefinition instance
41 | PCIC_grid = GridDefinition(name=dataset_name, projection=None, geotransform=geotransform, size=size)
42 |
43 |
44 | ## Functions that handle access to the original PCIC NetCDF files
45 |
46 | # variable attributes and names in original PCIC files
47 | ltmvaratts = dict(tmin = dict(name='Tmin', units='K', atts=dict(long_name='Minimum 2m Temperature'), offset=273.15), # 2m minimum temperature
48 | tmax = dict(name='Tmax', units='K', atts=dict(long_name='Maximum 2m Temperature'), offset=273.15), # 2m maximum temperature
49 | pr = dict(name='precip', units='mm/month', atts=dict(long_name='Total Precipitation'), transform=transformPrecip), # total precipitation
50 | # axes (don't have their own file; listed in axes)
51 | time = dict(name='time', units='days', atts=dict(long_name='days since beginning of year'), offset=-5493), # time coordinate
52 | lon = dict(name='lon', units='deg E', atts=dict(long_name='Longitude')), # geographic longitude field
53 | lat = dict(name='lat', units='deg N', atts=dict(long_name='Latitude'))) # geographic latitude field
54 | # N.B.: the time-series time offset is chose such that 1979 begins with the origin (time=0)
55 | # list of variables to load
56 | ltmvarlist = list(ltmvaratts.keys()) # also includes coordinate fields
57 |
58 | # loads data from original PCIC NetCDF files
59 | # climatology
60 | ltmfolder = root_folder + 'climatology/' # climatology subfolder
61 | ltmfile = '{0:s}_monClim_PRISM_historical_run1_197101-200012.nc' # expand with variable name
62 | def loadPCIC_LTM(name=dataset_name, varlist=None, varatts=ltmvaratts, filelist=None, folder=ltmfolder):
63 | ''' Get a properly formatted dataset the monthly PCIC PRISM climatology. '''
64 | # translate varlist
65 | if varlist is None: varlist = list(varatts.keys())
66 | #if varlist and varatts: varlist = translateVarNames(varlist, varatts)
67 | # generate file list
68 | filelist = [ltmfile.format(var) for var in varlist if var not in ('time','lat','lon')]
69 | # load variables separately
70 | dataset = DatasetNetCDF(name=name, folder=folder, filelist=filelist, varlist=varlist, varatts=varatts, ncformat='NETCDF4')
71 | dataset = addGDALtoDataset(dataset, projection=None, geotransform=None, gridfolder=grid_folder)
72 | # N.B.: projection should be auto-detected as geographic
73 | # return formatted dataset
74 | return dataset
75 |
76 |
77 | ## Functions that provide access to well-formatted PCIC PRISM NetCDF files
78 |
79 | # pre-processed climatology files (varatts etc. should not be necessary)
80 | avgfile = 'pcic{0:s}_clim{1:s}.nc' # formatted NetCDF file
81 | avgfolder = root_folder + 'pcicavg/' # prefix
82 | # function to load these files...
83 | def loadPCIC(name=dataset_name, period=None, grid=None, resolution=None, varlist=None, varatts=None,
84 | folder=None, filelist=None, lautoregrid=True):
85 | ''' Get the pre-processed monthly PCIC PRISM climatology as a DatasetNetCDF. '''
86 | if folder is None: folder = avgfolder
87 | # only the climatology is available
88 | if period is not None:
89 | warn('Only the full climatology is currently available: setting \'period\' to None.')
90 | period = None
91 | # load standardized climatology dataset with PRISM-specific parameters
92 | dataset = loadObservations(name=name, folder=folder, projection=None, period=period, grid=grid,
93 | varlist=varlist, varatts=varatts, filepattern=avgfile, filelist=filelist,
94 | lautoregrid=lautoregrid, mode='climatology')
95 | # # make sure all fields are masked
96 | # dataset.load()
97 | # dataset.mask(dataset.datamask, maskSelf=False)
98 | # return formatted dataset
99 | return dataset
100 |
101 | # function to load station data
102 | def loadPCIC_Stn(name=dataset_name, period=None, station=None, resolution=None, varlist=None,
103 | varatts=None, folder=avgfolder, filelist=None):
104 | ''' Get the pre-processed monthly PCIC PRISM climatology at station locations as a DatasetNetCDF. '''
105 | # only the climatology is available
106 | if period is not None:
107 | warn('Only the full climatology is currently available: setting \'period\' to None.')
108 | period = None
109 | # load standardized climatology dataset with PCIC-specific parameters
110 | dataset = loadObservations(name=name, folder=folder, grid=None, station=station, shape=None,
111 | varlist=varlist, varatts=varatts, filepattern=avgfile, projection=None,
112 | filelist=filelist, lautoregrid=False, period=period, mode='climatology')
113 | # return formatted dataset
114 | return dataset
115 |
116 | # function to load averaged data
117 | def loadPCIC_Shp(name=dataset_name, period=None, shape=None, resolution=None, varlist=None,
118 | varatts=None, folder=avgfolder, filelist=None, lencl=False):
119 | ''' Get the pre-processed monthly PCIC PRISM climatology averaged over regions as a DatasetNetCDF. '''
120 | # only the climatology is available
121 | if period is not None:
122 | warn('Only the full climatology is currently available: setting \'period\' to None.')
123 | period = None
124 | # load standardized climatology dataset with PCIC-specific parameters
125 | dataset = loadObservations(name=name, folder=folder, grid=None, station=None, shape=shape, lencl=lencl,
126 | varlist=varlist, varatts=varatts, filepattern=avgfile, projection=None,
127 | filelist=filelist, lautoregrid=False, period=period, mode='climatology')
128 | # return formatted dataset
129 | return dataset
130 |
131 |
132 | ## Dataset API
133 |
134 | dataset_name # dataset name
135 | root_folder # root folder of the dataset
136 | ts_file_pattern = None
137 | clim_file_pattern = avgfile # filename pattern
138 | data_folder = avgfolder # folder for user data
139 | grid_def = {'':PCIC_grid} # no special name, since there is only one...
140 | LTM_grids = [''] # grids that have long-term mean data
141 | TS_grids = [] # grids that have time-series data
142 | grid_res = {'':0.008} # approximate resolution in degrees at 45 degrees latitude
143 | default_grid = PCIC_grid
144 | # functions to access specific datasets
145 | loadLongTermMean = loadPCIC_LTM # climatology provided by publisher
146 | loadTimeSeries = None # time-series data
147 | loadClimatology = loadPCIC # pre-processed, standardized climatology
148 | loadStationClimatology = loadPCIC_Stn # climatologies without associated grid (e.g. stations or basins)
149 | loadShapeClimatology = loadPCIC_Shp
150 |
151 |
152 | if __name__ == '__main__':
153 |
154 | mode = 'test_climatology'
155 | # mode = 'test_point_climatology'
156 | # mode = 'convert_climatology'
157 | pntset = 'shpavg' # 'ecprecip
158 |
159 | # do some tests
160 | if mode == 'test_climatology':
161 |
162 | # load NetCDF dataset
163 | dataset = loadPCIC(grid='wc2_d01')
164 | # dataset = loadPCIC()
165 | print(dataset)
166 | print('')
167 | stnds = loadPCIC_Stn(station='ecprecip')
168 | print(stnds)
169 | print('')
170 | print((dataset.geotransform))
171 | print((dataset.precip.masked))
172 | print((dataset.precip.getArray().mean()))
173 | print('')
174 | # display
175 | import pylab as pyl
176 | pyl.imshow(np.flipud(dataset.datamask.getArray()[:,:]))
177 | pyl.colorbar(); pyl.show(block=True)
178 |
179 |
180 | elif mode == 'test_point_climatology':
181 |
182 | # load point climatology
183 | print('')
184 | if pntset in ('shpavg',): dataset = loadPCIC_Shp(shape=pntset)
185 | else: dataset = loadPCIC_Stn(station=pntset)
186 | print(dataset)
187 | print('')
188 | print((dataset.time))
189 | print((dataset.time.coord))
190 |
191 | ## convert PCIC NetCDF files to proper climatology
192 | elif mode == 'convert_climatology':
193 |
194 | # load dataset
195 | source = loadPCIC_LTM().load() # load, otherwise masking does not work!
196 | # change meta-data
197 | source.name = 'PCIC'
198 | source.title = 'PCIC PRISM Climatology'
199 | # load data into memory (and ignore last time step, which is just the annual average)
200 | # source.load(time=(0,12)) # exclusive the last index
201 | # N.B.: now we need to trim the files beforehand...
202 | # make normal dataset
203 | dataset = source.copy()
204 | source.close()
205 |
206 | ## add new variables
207 | # add landmask (it's not really a landmask, thought)
208 | dataset.precip.mask(maskValue=-9999.) # mask all fields using the missing value flag
209 | maskatts = dict(name='datamask', units='', long_name='Mask for Climatology Fields',
210 | description='where this mask is non-zero, no data is available')
211 | addLandMask(dataset, maskname='datamask',atts=maskatts) # create mask from precip mask
212 | # add length and names of month
213 | addLengthAndNamesOfMonth(dataset, noleap=False)
214 | # add mean temperature
215 | T2 = dataset.Tmin + dataset.Tmax # average temperature is just the average between min and max
216 | T2 /= 2.
217 | T2.name = 'T2'; T2.atts.long_name='Average 2m Temperature'
218 | print(T2)
219 | dataset += T2 # add to dataset
220 | # rewrite time axis
221 | time = dataset.time
222 | time.load(data=np.arange(1,13, dtype=time.dtype)) # 1 to 12 (incl.) for climatology
223 | time.units = 'month'; time.atts.long_name='Month of the Year'
224 | print(time)
225 | # print diagnostic
226 | print(dataset)
227 | print('')
228 | for var in dataset:
229 | #print(var)
230 | if not var.strvar:
231 | print(('Mean {0:s}: {1:s} {2:s}'.format(var.atts.long_name, str(var.mean()), var.units)))
232 | #print('')
233 | print('')
234 |
235 | # clean some offending attributes
236 | for var in dataset.axes.values():
237 | for name in ('NAME','CLASS'):
238 | if name in var.atts: del var.atts[name]
239 |
240 |
241 | ## create new NetCDF file
242 | # figure out a different filename
243 | filename = getFileName(name='PCIC', filepattern=avgfile)
244 | if os.path.exists(avgfolder+filename): os.remove(avgfolder+filename)
245 | # write data and some annotation
246 | sink = writeNetCDF(dataset, avgfolder+filename, close=False)
247 | # add_strvar(sink,'name_of_month', name_of_month, 'time', # add names of month
248 | # atts=dict(name='name_of_month', units='', long_name='Name of the Month'))
249 | sink.close() # close...
250 | print(('Saving Climatology to: '+filename))
251 | print(avgfolder)
252 |
253 |
--------------------------------------------------------------------------------
/src/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-09-08
3 |
4 | A package that provides access to a variety of datasets for use with the geodata package.
5 | The modules contain package itself exposes functions to load certain datasets, while the
6 | submodules also contain meta data and projection parameters.
7 |
8 | @author: Andre R. Erler, GPL v3
9 | '''
10 |
11 | dataset_list = ['NARR','CFSR','GPCC','CRU','PRISM','PCIC','EC','WSC','Unity']
12 | gridded_datasets = ['NARR','CFSR','GPCC','CRU','PRISM','PCIC','Unity']
13 |
14 | # from datasets.NARR import loadNARR_LTM, loadNARR_TS, loadNARR
15 | # from datasets.CFSR import loadCFSR_TS, loadCFSR
16 | # from datasets.GPCC import loadGPCC_LTM, loadGPCC_TS, loadGPCC
17 | # from datasets.CRU import loadCRU_TS, loadCRU
18 | # from datasets.PRISM import loadPRISM
19 | # from datasets.Unity import loadUnity
20 |
--------------------------------------------------------------------------------
/src/geodata/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-08-19
3 |
4 | A package that provides a unified interface to various datasets (including netcdf datasets)
5 | and adds additional functionality, such as regridding and time averaging.
6 |
7 | The package is primarily intended for use with climatological datasets.
8 |
9 | @author: Andre R. Erler, GPL v3
10 | '''
11 |
12 | #from geodata.base import * # can cause import errors with graphics, even if graphics are not needed
--------------------------------------------------------------------------------
/src/geodata/station.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-02-13
3 |
4 | A module that introduces a special class intended for station datasets (i.e. time-series only).
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | # internal imports
10 | from geodata.base import Dataset, Variable, Axis
11 | from geodata.netcdf import DatasetNetCDF
12 |
13 | ## the basic station class, without any geographic information
14 | class StationDataset(Dataset):
15 | '''
16 | A Dataset class that is intended for station time-series data (usually one-dimensional), at one
17 | particular location; this class also holds additional meta data.
18 | '''
19 |
20 | def __init__(self, name=None, title=None, ID=None, varlist=None, atts=None, **kwargs):
21 | '''
22 | This class can be initialized simply with a name and a (optionally) a set of variables.
23 |
24 | Station Attributes:
25 | ID = @property # station ID
26 | Basic Attributes:
27 | name = @property # short name (links to name in atts)
28 | title = @property # descriptive name (links to name in atts)
29 | variables = dict() # dictionary holding Variable instances
30 | axes = dict() # dictionary holding Axis instances (inferred from Variables)
31 | atts = AttrDict() # dictionary containing global attributes / meta data
32 | '''
33 | # initialize Dataset using parent constructor (kwargs are necessary, in order to support multiple inheritance)
34 | super(StationDataset,self).__init__(name=name, title=title, varlist=varlist, atts=atts, **kwargs)
35 | # set remaining attibutes
36 | if ID is not None or 'ID' not in self.atts: self.atts['ID'] = ID
37 |
38 | @property
39 | def ID(self):
40 | ''' The station ID, usually an alphanumerical code. '''
41 | return self.atts['ID']
42 | @ID.setter
43 | def ID(self, ID):
44 | self.atts['ID'] = ID
45 |
46 |
47 | ## the NetCDF version of the station dataset
48 | class StationNetCDF(StationDataset,DatasetNetCDF):
49 | '''
50 | A StationDataset, associated with a NetCDF file, inheriting the properties of DatasetNetCDF.
51 | WARNING: this class has not been tested!
52 | '''
53 |
54 | def __init__(self, name=None, title=None, ID=None, dataset=None, filelist=None, varlist=None, varatts=None,
55 | atts=None, axes=None, multifile=False, check_override=None, folder='', mode='r', ncformat='NETCDF4',
56 | squeeze=True):
57 | '''
58 | Create a Dataset from one or more NetCDF files; Variables are created from NetCDF variables.
59 |
60 | Station Attributes:
61 | ID = @property # station ID
62 | NetCDF Attributes:
63 | mode = 'r' # a string indicating whether read ('r') or write ('w') actions are intended/permitted
64 | datasets = [] # list of NetCDF datasets
65 | dataset = @property # shortcut to first element of self.datasets
66 | filelist = [] # files used to create datasets
67 | Basic Attributes:
68 | variables = dict() # dictionary holding Variable instances
69 | axes = dict() # dictionary holding Axis instances (inferred from Variables)
70 | atts = AttrDict() # dictionary containing global attributes / meta data
71 | '''
72 | # call parent constructor
73 | super(StationNetCDF,self).__init__(self,self, name=None, title=None, ID=None, dataset=None, filelist=None,
74 | varlist=None, varatts=None, atts=None, axes=None, multifile=False,
75 | check_override=None, folder='', mode='r', ncformat='NETCDF4', squeeze=True)
76 |
77 |
--------------------------------------------------------------------------------
/src/legacy_plotting/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on Apr 15, 2016
3 |
4 | A retirement home for old plotting functions that are still used, but should not be developed further.
5 | Mainly used by the 'areastats' and 'multimap' scripts and associated modules.
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
10 |
--------------------------------------------------------------------------------
/src/plotting/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-10-10
3 |
4 | A package providing plotting functionality for common climatological applications, such as projected maps of
5 | surface or pressure level data.
6 |
7 | The package is intended for use with climatological datasets in the form of Dataset classes.
8 |
9 | @author: Andre R. Erler, GPL v3
10 | '''
11 |
12 | # from plotting.figure import getFigAx # creates circular reference
--------------------------------------------------------------------------------
/src/plotting/archive/lineplots.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-03-16
3 |
4 | some useful plotting functions that take advantage of variable meta data
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | # external imports
10 | from types import NoneType
11 | from warnings import warn
12 | import numpy as np
13 | import matplotlib as mpl
14 | # import matplotlib.pylab as pyl
15 | # #from mpl_toolkits.axes_grid1 import ImageGrid
16 | # linewidth = .75
17 | # mpl.rc('lines', linewidth=linewidth)
18 | # if linewidth == 1.5: mpl.rc('font', size=12)
19 | # elif linewidth == .75: mpl.rc('font', size=8)
20 | # else: mpl.rc('font', size=10)
21 | # # prevent figures from closing: don't run in interactive mode, or plt.show() will not block
22 | # pyl.ioff()
23 | # internal imports
24 | from utils.signalsmooth import smooth
25 | from plotting.misc import getPlotValues, getFigAx, updateSubplots
26 | from geodata.base import Variable
27 | from geodata.misc import AxisError, ListError
28 |
29 | #import pdb
30 | #pdb.set_trace()
31 |
32 | def linePlot(varlist, ax=None, fig=None, linestyles=None, varatts=None, legend=None,
33 | xline=None, yline=None, title=None, flipxy=None, xlabel=None, ylabel=None, xlim=None,
34 | ylim=None, lsmooth=False, lprint=False, **kwargs):
35 | ''' A function to draw a list of 1D variables into an axes, and annotate the plot based on variable properties. '''
36 | warn('Deprecated function: use Figure or Axes class methods.')
37 | # create axes, if necessary
38 | if ax is None:
39 | if fig is None: fig,ax = getFigAx(1) # single panel
40 | else: ax = fig.axes[0]
41 | # varlist is the list of variable objects that are to be plotted
42 | #print varlist
43 | if isinstance(varlist,Variable): varlist = [varlist]
44 | elif not isinstance(varlist,(tuple,list)) or not all([isinstance(var,Variable) for var in varlist]): raise TypeError
45 | for var in varlist: var.squeeze() # remove singleton dimensions
46 | # linestyles is just a list of line styles for each plot
47 | if isinstance(linestyles,(str,NoneType)): linestyles = [linestyles]*len(varlist)
48 | elif not isinstance(linestyles,(tuple,list)):
49 | if not all([isinstance(linestyles,str) for var in varlist]): raise TypeError
50 | if len(varlist) != len(linestyles): raise ListError("Failed to match linestyles to varlist!")
51 | # varatts are variable-specific attributes that are parsed for special keywords and then passed on to the
52 | if varatts is None: varatts = [dict()]*len(varlist)
53 | elif isinstance(varatts,dict):
54 | tmp = [varatts[var.name] if var.name in varatts else dict() for var in varlist]
55 | if any(tmp): varatts = tmp # if any variable names were found
56 | else: varatts = [varatts]*len(varlist) # assume it is one varatts dict, which will be used for all variables
57 | elif not isinstance(varatts,(tuple,list)): raise TypeError
58 | if not all([isinstance(atts,dict) for atts in varatts]): raise TypeError
59 | # check axis: they need to have only one axes, which has to be the same for all!
60 | if len(varatts) != len(varlist): raise ListError("Failed to match varatts to varlist!")
61 | for var in varlist:
62 | if var.ndim > 1: raise AxisError("Variable '{}' has more than one dimension; consider squeezing.".format(var.name))
63 | elif var.ndim == 0: raise AxisError("Variable '{}' is a scalar; consider display as a line.".format(var.name))
64 | # loop over variables
65 | plts = []; varname = None; varunits = None; axname = None; axunits = None # list of plot handles
66 | for var,linestyle,varatt in zip(varlist,linestyles,varatts):
67 | varax = var.axes[0]
68 | # scale axis and variable values
69 | axe, axunits, axname = getPlotValues(varax, checkunits=axunits, checkname=None)
70 | val, varunits, varname = getPlotValues(var, checkunits=varunits, checkname=None)
71 | # variable and axis scaling is not always independent...
72 | if var.plot is not None and varax.plot is not None:
73 | if 'preserve' in var.plot and 'scalefactor' in varax.plot:
74 | if varax.units != axunits and var.plot.preserve == 'area':
75 | val /= varax.plot.scalefactor
76 | # figure out keyword options
77 | kwatts = kwargs.copy(); kwatts.update(varatt) # join individual and common attributes
78 | if 'label' not in kwatts: kwatts['label'] = var.name # default label: variable name
79 | # N.B.: other scaling behavior could be added here
80 | if lprint: print(varname, varunits, val.mean())
81 | if lsmooth: val = smooth(val)
82 | # figure out orientation
83 | if flipxy: xx,yy = val, axe
84 | else: xx,yy = axe, val
85 | # call plot function
86 | if linestyle is None: plts.append(ax.plot(xx, yy, **kwatts)[0])
87 | else: plts.append(ax.plot(xx, yy, linestyle, **kwatts)[0])
88 | # set axes limits
89 | if isinstance(xlim,(list,tuple)) and len(xlim)==2: ax.set_xlim(*xlim)
90 | elif xlim is not None: raise TypeError
91 | if isinstance(ylim,(list,tuple)) and len(ylim)==2: ax.set_ylim(*ylim)
92 | elif ylim is not None: raise TypeError
93 | # set title
94 | if title is not None:
95 | ax.set_title(title, dict(fontsize='medium'))
96 | pos = ax.get_position()
97 | pos = pos.from_bounds(x0=pos.x0, y0=pos.y0, width=pos.width, height=pos.height-0.03)
98 | ax.set_position(pos)
99 | # set axes labels
100 | if flipxy: xname,xunits,yname,yunits = varname,varunits,axname,axunits
101 | else: xname,xunits,yname,yunits = axname,axunits,varname,varunits
102 | if not xlabel: xlabel = '{0:s} [{1:s}]'.format(xname,xunits) if xunits else '{0:s}'.format(xname)
103 | else: xlabel = xlabel.format(xname,xunits)
104 | if not ylabel: ylabel = '{0:s} [{1:s}]'.format(yname,yunits) if yunits else '{0:s}'.format(yname)
105 | else: ylabel = ylabel.format(yname,yunits)
106 | # a typical custom label that makes use of the units would look like this: 'custom label [{1:s}]',
107 | # where {} will be replaced by the appropriate default units (which have to be the same anyway)
108 | xpad = 2; xticks = ax.get_xaxis().get_ticklabels()
109 | ypad = -2; yticks = ax.get_yaxis().get_ticklabels()
110 | # len(xticks) > 0 is necessary to avoid errors with AxesGrid, which removes invisible tick labels
111 | if len(xticks) > 0 and xticks[-1].get_visible(): ax.set_xlabel(xlabel, labelpad=xpad)
112 | elif len(yticks) > 0 and not title: yticks[0].set_visible(False) # avoid overlap
113 | if len(yticks) > 0 and yticks[-1].get_visible(): ax.set_ylabel(ylabel, labelpad=ypad)
114 | elif len(xticks) > 0: xticks[0].set_visible(False) # avoid overlap
115 | # make monthly ticks
116 | if axname == 'time' and axunits == 'month':
117 | ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) # ax.minorticks_on()
118 | # add legend
119 | if legend:
120 | legatts = dict()
121 | if ax.get_yaxis().get_label():
122 | legatts['fontsize'] = ax.get_yaxis().get_label().get_fontsize()
123 | if isinstance(legend,dict): legatts.update(legend)
124 | elif isinstance(legend,(int,np.integer,float,np.inexact)): legatts['loc'] = legend
125 | ax.legend(**legatts)
126 | # add orientation lines
127 | if isinstance(xline,(int,np.integer,float,np.inexact)): ax.axhline(y=xline, color='black')
128 | elif isinstance(xline,dict): ax.axhline(**xline)
129 | if isinstance(yline,(int,np.integer,float,np.inexact)): ax.axvline(x=yline, color='black')
130 | elif isinstance(xline,dict): ax.axvline(**yline)
131 | # return handle
132 | return plts
133 |
134 |
135 | # add common/shared legend to a multi-panel plot
136 | def addSharedLegend(fig, plts=None, legs=None, fontsize=None, **kwargs):
137 | ''' add a common/shared legend to a multi-panel plot '''
138 | # complete input
139 | warn('Deprecated function: use Figure or Axes class methods.')
140 | if legs is None: legs = [plt.get_label() for plt in plts]
141 | elif not isinstance(legs, (list,tuple)): raise TypeError
142 | if not isinstance(plts, (list,tuple,NoneType)): raise TypeError
143 | # figure out fontsize and row numbers
144 | fontsize = fontsize or fig.axes[0].get_yaxis().get_label().get_fontsize() # or fig._suptitle.get_fontsize()
145 | nlen = len(plts) if plts else len(legs)
146 | if fontsize > 11: ncols = 2 if nlen == 4 else 3
147 | else: ncols = 3 if nlen == 6 else 4
148 | # make room for legend
149 | leghgt = np.ceil(nlen/ncols) * fontsize + 0.055
150 | ax = fig.add_axes([0, 0, 1,leghgt]) # new axes to hold legend, with some attributes
151 | ax.set_frame_on(False); ax.axes.get_yaxis().set_visible(False); ax.axes.get_xaxis().set_visible(False)
152 | fig = updateSubplots(fig, mode='shift', bottom=leghgt) # shift bottom upwards
153 | # define legend parameters
154 | legargs = dict(loc=10, ncol=ncols, borderaxespad=0., fontsize=fontsize, frameon=True,
155 | labelspacing=0.1, handlelength=1.3, handletextpad=0.3, fancybox=True)
156 | legargs.update(kwargs)
157 | # create legend and return handle
158 | if plts: legend = ax.legend(plts, legs, **legargs)
159 | else: legend = ax.legend(legs, **legargs)
160 | return legend
161 |
162 |
163 |
164 | # plots with error shading
165 | def addErrorPatch(ax, var, err, color=None, axis=None, xerr=True, alpha=0.25, check=False, cap=-1):
166 | from numpy import append, where, isnan
167 | from matplotlib.patches import Polygon
168 | warn('Deprecated function: use Figure or Axes class methods.')
169 | if isinstance(var,Variable):
170 | if axis is None and var.ndim > 1: raise AxisError
171 | x = var.getAxis(axis).getArray()
172 | y = var.getArray();
173 | if isinstance(err,Variable): e = err.getArray()
174 | else: e = err
175 | else:
176 | if axis is None: raise ValueError
177 | y = axis; x = var; e = err
178 | if check:
179 | e = where(isnan(e),0,e)
180 | if cap > 0: e = where(e>cap,0,e)
181 | if xerr:
182 | ix = append(x-e,(x+e)[::-1])
183 | iy = append(y,y[::-1])
184 | else:
185 | ix = append(y,y[::-1])
186 | iy = append(x-e,(x+e)[::-1])
187 | if color is None: raise NotImplementedError # should take color from plot line (variable)
188 | patch = Polygon(list(zip(ix,iy)), alpha=alpha, facecolor=color, edgecolor=color)
189 | ax.add_patch(patch)
190 | return patch
191 |
192 |
193 | if __name__ == '__main__':
194 | pass
--------------------------------------------------------------------------------
/src/plotting/archive/mapplots.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-03-19
3 |
4 | some useful functions to make map and surface plots that take advantage of variable meta data
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | # external imports
10 | import matplotlib.pylab as pyl
11 | import matplotlib as mpl
12 | #from mpl_toolkits.axes_grid1 import ImageGrid
13 | linewidth = .75
14 | mpl.rc('lines', linewidth=linewidth)
15 | if linewidth == 1.5: mpl.rc('font', size=12)
16 | elif linewidth == .75: mpl.rc('font', size=8)
17 | else: mpl.rc('font', size=10)
18 | # prevent figures from closing: don't run in interactive mode, or plt.show() will not block
19 | pyl.ioff()
20 | # internal imports
21 | from plotting.misc import expandLevelList
22 |
23 |
24 | # function to plot
25 | def srfcPlot():
26 | raise NotImplementedError
27 | return
28 |
29 | # function to place (shared) colorbars at a specified figure margins
30 | def sharedColorbar(fig, cf, clevs, colorbar, cbls, subplot, margins):
31 | loc = colorbar.pop('location','bottom')
32 | # determine size and spacing
33 | if loc=='top' or loc=='bottom':
34 | orient = colorbar.pop('orientation','horizontal') # colorbar orientation
35 | je = subplot[1] # number of colorbars: number of rows
36 | ie = subplot[0] # number of plots per colorbar: number of columns
37 | cbwd = colorbar.pop('cbwd',0.025) # colorbar height
38 | sp = margins['wspace']
39 | wd = (margins['right']-margins['left'] - sp*(je-1))/je # width of each colorbar axis
40 | else:
41 | orient = colorbar.pop('orientation','vertical') # colorbar orientation
42 | je = subplot[0] # number of colorbars: number of columns
43 | ie = subplot[1] # number of plots per colorbar: number of rows
44 | cbwd = colorbar.pop('cbwd',0.025) # colorbar width
45 | sp = margins['hspace']
46 | wd = (margins['top']-margins['bottom'] - sp*(je-1))/je # width of each colorbar axis
47 | shrink = colorbar.pop('shrinkFactor',1)
48 | # shift existing subplots
49 | if loc=='top': newMargin = margins['top']-margins['hspace'] -cbwd
50 | elif loc=='right': newMargin = margins['right']-margins['left']/2 -cbwd
51 | else: newMargin = 2*margins[loc] + cbwd
52 | fig.subplots_adjust(**{loc:newMargin})
53 | # loop over variables (one colorbar for each)
54 | for i in range(je):
55 | if dir=='vertical': ii = je-i-1
56 | else: ii = i
57 | offset = (wd+sp)*float(ii) + wd*(1-shrink)/2 # offset due to previous colorbars
58 | # horizontal colorbar(s) at the top
59 | if loc == 'top': ci = i; cax = [margins['left']+offset, newMargin+margins['hspace'], shrink*wd, cbwd]
60 | # horizontal colorbar(s) at the bottom
61 | elif loc == 'bottom': ci = i; cax = [margins['left']+offset, margins[loc], shrink*wd, cbwd]
62 | # vertical colorbar(s) to the left (get axes reference right!)
63 | elif loc == 'left': ci = i*ie; cax = [margins[loc], margins['bottom']+offset, cbwd, shrink*wd]
64 | # vertical colorbar(s) to the right (get axes reference right!)
65 | elif loc == 'right': ci = i*ie; cax = [newMargin+margins['wspace'], margins['bottom']+offset, cbwd, shrink*wd]
66 | # make colorbar
67 | fig.colorbar(mappable=cf[ci],cax=fig.add_axes(cax),ticks=expandLevelList(cbls[i],clevs[i]),
68 | orientation=orient,**colorbar)
69 | # return figure with colorbar (just for the sake of returning something)
70 | return fig
71 |
72 | if __name__ == '__main__':
73 | pass
--------------------------------------------------------------------------------
/src/plotting/archive/misc.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2011-02-28
3 |
4 | utility functions, mostly for plotting, that are not called directly
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | # external imports
10 | from types import ModuleType
11 | import numpy as np
12 | from warnings import warn
13 | # internal imports
14 | from geodata.misc import VariableError, AxisError, isInt
15 | from utils.signalsmooth import smooth # commonly used in conjunction with plotting...
16 |
17 | # import matplotlib as mpl
18 | # import matplotlib.pylab as pyl
19 |
20 | # load matplotlib with some custom defaults
21 | def loadMPL(linewidth=None, mplrc=None, backend='QT4Agg', lion=False):
22 | import matplotlib as mpl
23 | mpl.use(backend) # enforce QT4
24 | import matplotlib.pylab as pyl
25 | # some custom defaults
26 | if linewidth is not None:
27 | mpl.rc('lines', linewidth=linewidth)
28 | if linewidth == 1.5: mpl.rc('font', size=12)
29 | elif linewidth == .75: mpl.rc('font', size=8)
30 | else: mpl.rc('font', size=10)
31 | # apply rc-parameters from dictionary (override custom defaults)
32 | if (mplrc is not None) and isinstance(mplrc,dict):
33 | # loop over parameter groups
34 | for (key,value) in mplrc.items():
35 | mpl.rc(key,**value) # apply parameters
36 | # prevent figures from closing: don't run in interactive mode, or pyl.show() will not block
37 | if lion: pyl.ion()
38 | else: pyl.ioff()
39 | # return matplotlib instance with new parameters
40 | return mpl, pyl
41 |
42 |
43 | # method to check units and name, and return scaled plot value (primarily and internal helper function)
44 | def getPlotValues(var, checkunits=None, checkname=None):
45 | ''' Helper function to check variable/axis, get (scaled) values for plot, and return appropriate units. '''
46 | if var.plot is not None:
47 | varname = var.plot.name
48 | if checkname is not None and varname != checkname: # only check plotname!
49 | raise VariableError("Expected variable name '{}', found '{}'.".format(checkname,varname))
50 | else: varname = var.atts['name']
51 | val = var.getArray(unmask=True, copy=True) # the data to plot
52 | if var.plot is not None:
53 | if var.units != var.plot.units:
54 | val = val * var.plot.scalefactor
55 | varunits = var.plot.units
56 | else:
57 | varunits = var.atts['units']
58 | if var.plot is not None and 'offset' in var.plot: val += var.plot.offset
59 | if checkunits is not None and varunits != checkunits:
60 | raise VariableError("Units for variable '{}': expected {}, found {}.".format(var.name,checkunits,varunits))
61 | # return values, units, name
62 | return val, varunits, varname
63 |
64 |
65 | # Log-axis ticks
66 | def logTicks(ticks, base=None, power=0):
67 | ''' function to generate ticks for a given power of 10 based on a template '''
68 | if not isinstance(ticks, (list,tuple)): raise TypeError
69 | # translate base into power
70 | if base is not None:
71 | if not isinstance(base,(int,np.number,float,np.inexact)): raise TypeError
72 | power = int(np.round(np.log(base)/np.log(10)))
73 | if not isinstance(power,(int,np.integer)): raise TypeError
74 | print(power)
75 | # generate ticks and apply template
76 | strtck = ['']*8
77 | for i in ticks:
78 | if not isinstance(i,(int,np.integer)) or i >= 8: raise ValueError
79 | idx = i-2
80 | if i in ticks: strtck[idx] = str(i)
81 | # adjust order of magnitude
82 | if power > 0: strtck[idx] += '0'*power
83 | elif power < 0: strtck[idx] = '0.' + '0'*(-1-power) + strtck[idx]
84 | # return ticks
85 | return strtck
86 |
87 | # special version for wave numbers
88 | # N, returns ['2','','4','','6','','','']
89 | def nTicks(**kwargs): return logTicks([2,4,6],**kwargs)
90 |
91 | # special version for pressure levelse
92 | # p, returns ['2','3','','5','','7','','']
93 | def pTicks(**kwargs): return logTicks([2,3,5,7],**kwargs)
94 |
95 |
96 | # function to expand level lists and colorbar ticks
97 | def expandLevelList(levels, data=None):
98 | ''' figure out level list based on level parameters and actual data '''
99 | # trivial case: already numpy array
100 | if isinstance(levels,np.ndarray):
101 | return levels
102 | # tuple with three or two elements: use as argument to linspace
103 | elif isinstance(levels,tuple) and (len(levels)==3 or len(levels)==2):
104 | return np.linspace(*levels)
105 | # list or long tuple: recast as array
106 | elif isinstance(levels,(list,tuple)):
107 | return np.asarray(levels)
108 | # use additional info in data to determine limits
109 | else:
110 | # figure out vector limits
111 | # use first two elements, third is number of levels
112 | if isinstance(data,(tuple,list)) and len(data)==3:
113 | minVec = min(data[:2]); maxVec = max(data[:2])
114 | # just treat as level list
115 | else:
116 | minVec = min(data); maxVec = max(data)
117 | # interpret levels as number of levels in given interval
118 | # only one element: just number of levels
119 | if isinstance(levels,(tuple,list,np.ndarray)) and len(levels)==1:
120 | return np.linspace(minVec,maxVec,levels[0])
121 | # numerical value: use as number of levels
122 | elif isinstance(levels,(int,float)):
123 | return np.linspace(minVec,maxVec,levels)
124 |
125 |
126 | ## legacy functions
127 |
128 | # method to return a figure and an array of ImageGrid axes
129 | def getFigAx(subplot, name=None, title=None, figsize=None, mpl=None, margins=None,
130 | sharex=None, sharey=None, AxesGrid=False, ngrids=None, direction='row',
131 | axes_pad = None, add_all=True, share_all=None, aspect=False,
132 | label_mode='L', cbar_mode=None, cbar_location='right',
133 | cbar_pad=None, cbar_size='5%', axes_class=None, lreduce=True):
134 | # configure matplotlib
135 | warn('Deprecated function: use Figure or Axes class methods.')
136 | if mpl is None: import matplotlib as mpl
137 | elif isinstance(mpl,dict): mpl = loadMPL(**mpl) # there can be a mplrc, but also others
138 | elif not isinstance(mpl,ModuleType): raise TypeError
139 | from plotting.figure import MyFigure # prevent circular reference
140 | # figure out subplots
141 | if isinstance(subplot,(np.integer,int)):
142 | if subplot == 1: subplot = (1,1)
143 | elif subplot == 2: subplot = (1,2)
144 | elif subplot == 3: subplot = (1,3)
145 | elif subplot == 4: subplot = (2,2)
146 | elif subplot == 6: subplot = (2,3)
147 | elif subplot == 9: subplot = (3,3)
148 | else: raise NotImplementedError
149 | elif not (isinstance(subplot,(tuple,list)) and len(subplot) == 2) and all(isInt(subplot)): raise TypeError
150 | # create figure
151 | if figsize is None:
152 | if subplot == (1,1): figsize = (3.75,3.75)
153 | elif subplot == (1,2) or subplot == (1,3): figsize = (6.25,3.75)
154 | elif subplot == (2,1) or subplot == (3,1): figsize = (3.75,6.25)
155 | else: figsize = (6.25,6.25)
156 | #elif subplot == (2,2) or subplot == (3,3): figsize = (6.25,6.25)
157 | #else: raise NotImplementedError
158 | # figure out margins
159 | if margins is None:
160 | # N.B.: the rectangle definition is presumably left, bottom, width, height
161 | if subplot == (1,1): margins = (0.09,0.09,0.88,0.88)
162 | elif subplot == (1,2) or subplot == (1,3): margins = (0.06,0.1,0.92,0.87)
163 | elif subplot == (2,1) or subplot == (3,1): margins = (0.09,0.11,0.88,0.82)
164 | elif subplot == (2,2) or subplot == (3,3): margins = (0.055,0.055,0.925,0.925)
165 | else: margins = (0.09,0.11,0.88,0.82)
166 | #elif subplot == (2,2) or subplot == (3,3): margins = (0.09,0.11,0.88,0.82)
167 | #else: raise NotImplementedError
168 | if title is not None: margins = margins[:3]+(margins[3]-0.03,) # make room for title
169 | if AxesGrid:
170 | if share_all is None: share_all = True
171 | if axes_pad is None: axes_pad = 0.05
172 | # create axes using the Axes Grid package
173 | fig = mpl.pylab.figure(facecolor='white', figsize=figsize, FigureClass=MyFigure)
174 | if axes_class is None:
175 | from plotting.axes import MyLocatableAxes
176 | axes_class=(MyLocatableAxes,{})
177 | from mpl_toolkits.axes_grid1 import ImageGrid
178 | # AxesGrid: http://matplotlib.org/mpl_toolkits/axes_grid/users/overview.html
179 | grid = ImageGrid(fig, margins, nrows_ncols = subplot, ngrids=ngrids, direction=direction,
180 | axes_pad=axes_pad, add_all=add_all, share_all=share_all, aspect=aspect,
181 | label_mode=label_mode, cbar_mode=cbar_mode, cbar_location=cbar_location,
182 | cbar_pad=cbar_pad, cbar_size=cbar_size, axes_class=axes_class)
183 | # return figure and axes
184 | axes = tuple([ax for ax in grid]) # this is already flattened
185 | if lreduce and len(axes) == 1: axes = axes[0] # return a bare axes instance, if there is only one axes
186 | else:
187 | # create axes using normal subplot routine
188 | if axes_pad is None: axes_pad = 0.03
189 | wspace = hspace = axes_pad
190 | if share_all:
191 | sharex='all'; sharey='all'
192 | if sharex is True or sharex is None: sharex = 'col' # default
193 | if sharey is True or sharey is None: sharey = 'row'
194 | if sharex: hspace -= 0.015
195 | if sharey: wspace -= 0.015
196 | # create figure
197 | from matplotlib.pyplot import subplots
198 | # GridSpec: http://matplotlib.org/users/gridspec.html
199 | fig, axes = subplots(subplot[0], subplot[1], sharex=sharex, sharey=sharey,
200 | squeeze=lreduce, facecolor='white', figsize=figsize, FigureClass=MyFigure)
201 | # there is also a subplot_kw=dict() and fig_kw=dict()
202 | # just adjust margins
203 | margin_dict = dict(left=margins[0], bottom=margins[1], right=margins[0]+margins[2],
204 | top=margins[1]+margins[3], wspace=wspace, hspace=hspace)
205 | fig.subplots_adjust(**margin_dict)
206 | # add figure title
207 | if name is not None: fig.canvas.set_window_title(name) # window title
208 | if title is not None: fig.suptitle(title) # title on figure (printable)
209 | # return Figure/ImageGrid and tuple of axes
210 | #if AxesGrid: fig = grid # return ImageGrid instead of figure
211 | return fig, axes
212 |
213 |
214 | # function to adjust subplot parameters
215 | def updateSubplots(fig, mode='shift', **kwargs):
216 | ''' simple helper function to move (relocate), shift, or scale subplot margins '''
217 | warn('Deprecated function: use Figure or Axes class methods.')
218 | pos = fig.subplotpars
219 | margins = dict() # original plot margins
220 | margins['left'] = pos.left; margins['right'] = pos.right
221 | margins['top'] = pos.top; margins['bottom'] = pos.bottom
222 | margins['wspace'] = pos.wspace; margins['hspace'] = pos.hspace
223 | # update subplot margins
224 | if mode == 'move': margins.update(kwargs)
225 | else:
226 | for key,val in kwargs.items():
227 | if key in margins:
228 | if mode == 'shift': margins[key] += val
229 | elif mode == 'scale': margins[key] *= val
230 | # finally, actually update figure
231 | fig.subplots_adjust(**margins)
232 | # and now repair damage: restore axes
233 | for ax in fig.axes:
234 | if ax.get_title():
235 | pos = ax.get_position()
236 | pos = pos.from_bounds(x0=pos.x0, y0=pos.y0, width=pos.width, height=pos.height-0.03)
237 | ax.set_position(pos)
238 |
239 |
240 | ## add subplot/axes label
241 | def addLabel(ax, label=None, loc=1, stroke=False, size=None, prop=None, **kwargs):
242 | from matplotlib.offsetbox import AnchoredText
243 | from matplotlib.patheffects import withStroke
244 | from string import lowercase
245 | warn('Deprecated function: use Figure or Axes class methods.')
246 | # expand list
247 | if not isinstance(ax,(list,tuple)): ax = [ax]
248 | l = len(ax)
249 | if not isinstance(label,(list,tuple)): label = [label]*l
250 | if not isinstance(loc,(list,tuple)): loc = [loc]*l
251 | if not isinstance(stroke,(list,tuple)): stroke = [stroke]*l
252 | # settings
253 | if prop is None:
254 | prop = dict()
255 | if not size: prop['size'] = 18
256 | args = dict(pad=0., borderpad=1.5, frameon=False)
257 | args.update(kwargs)
258 | # cycle over axes
259 | at = [] # list of texts
260 | for i in range(l):
261 | if label[i] is None:
262 | label[i] = '('+lowercase[i]+')'
263 | elif isinstance(label[i],int):
264 | label[i] = '('+lowercase[label[i]]+')'
265 | # create label
266 | at.append(AnchoredText(label[i], loc=loc[i], prop=prop, **args))
267 | ax[i].add_artist(at[i]) # add to axes
268 | if stroke[i]:
269 | at[i].txt._text.set_path_effects([withStroke(foreground="w", linewidth=3)])
270 | return at
271 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/HTML_Colors.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aerler/GeoPy/319016a7f9340b29fdc3c6d4b9df5fbe78378da8/src/plotting/colormaps/HTML_Colors.png
--------------------------------------------------------------------------------
/src/plotting/colormaps/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from .colormaps import get_color, cm
4 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/cbathy.dat:
--------------------------------------------------------------------------------
1 | 8, 241, 255
2 | 8, 237, 255
3 | 8, 234, 255
4 | 8, 231, 255
5 | 8, 228, 255
6 | 8, 225, 255
7 | 8, 222, 255
8 | 8, 218, 255
9 | 8, 215, 255
10 | 8, 212, 255
11 | 8, 209, 255
12 | 8, 204, 255
13 | 8, 199, 255
14 | 9, 194, 255
15 | 9, 190, 255
16 | 9, 185, 255
17 | 9, 180, 255
18 | 9, 175, 255
19 | 9, 171, 255
20 | 9, 166, 255
21 | 9, 161, 255
22 | 9, 160, 255
23 | 9, 158, 255
24 | 9, 157, 255
25 | 9, 156, 255
26 | 9, 154, 255
27 | 9, 153, 255
28 | 9, 152, 255
29 | 9, 150, 255
30 | 9, 149, 255
31 | 9, 148, 255
32 | 9, 146, 255
33 | 9, 145, 255
34 | 9, 144, 255
35 | 9, 142, 255
36 | 9, 141, 255
37 | 10, 140, 255
38 | 10, 134, 255
39 | 10, 129, 255
40 | 10, 124, 255
41 | 10, 119, 255
42 | 10, 114, 255
43 | 10, 109, 255
44 | 10, 103, 255
45 | 10, 98, 255
46 | 10, 93, 255
47 | 10, 88, 255
48 | 11, 83, 255
49 | 11, 77, 255
50 | 10, 72, 247
51 | 9, 67, 240
52 | 8, 62, 232
53 | 8, 57, 225
54 | 7, 51, 217
55 | 6, 46, 210
56 | 5, 41, 202
57 | 5, 36, 195
58 | 4, 31, 188
59 | 3, 25, 180
60 | 2, 20, 173
61 | 2, 15, 165
62 | 1, 10, 158
63 | 0, 5, 150
64 | 0, 0, 143
65 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/coolavhrrmap.dat:
--------------------------------------------------------------------------------
1 | 255, 45, 142
2 | 255, 41, 155
3 | 255, 37, 168
4 | 255, 34, 181
5 | 255, 30, 194
6 | 255, 26, 206
7 | 255, 23, 219
8 | 255, 19, 232
9 | 255, 30, 194
10 | 255, 19, 232
11 | 250, 30, 194
12 | 250, 19, 232
13 | 245, 30, 194
14 | 245, 19, 232
15 | 237, 30, 194
16 | 237, 19, 232
17 | 212, 30, 194
18 | 212, 19, 232
19 | 187, 30, 194
20 | 187, 19, 232
21 | 162, 30, 194
22 | 162, 19, 232
23 | 137, 30, 194
24 | 137, 19, 232
25 | 112, 30, 194
26 | 112, 19, 232
27 | 87, 30, 194
28 | 87, 19, 232
29 | 62, 30, 194
30 | 62, 19, 232
31 | 0, 0, 247
32 | 0, 0, 251
33 | 0, 0, 255
34 | 0, 0, 255
35 | 0, 3, 255
36 | 0, 7, 255
37 | 0, 11, 255
38 | 0, 15, 255
39 | 0, 19, 255
40 | 0, 23, 255
41 | 0, 27, 255
42 | 0, 31, 255
43 | 0, 35, 255
44 | 0, 39, 255
45 | 0, 43, 255
46 | 0, 47, 255
47 | 0, 51, 255
48 | 0, 55, 255
49 | 0, 59, 255
50 | 0, 63, 255
51 | 0, 67, 255
52 | 0, 71, 255
53 | 0, 75, 255
54 | 0, 79, 255
55 | 0, 83, 255
56 | 0, 87, 255
57 | 0, 91, 255
58 | 0, 95, 255
59 | 0, 99, 255
60 | 0, 103, 255
61 | 0, 107, 255
62 | 0, 111, 255
63 | 0, 115, 255
64 | 0, 119, 255
65 | 0, 123, 255
66 | 0, 127, 255
67 | 0, 131, 255
68 | 0, 135, 255
69 | 0, 139, 255
70 | 0, 143, 255
71 | 0, 147, 255
72 | 0, 151, 255
73 | 0, 155, 255
74 | 0, 159, 255
75 | 0, 163, 255
76 | 0, 167, 255
77 | 0, 171, 255
78 | 0, 175, 255
79 | 0, 179, 255
80 | 0, 183, 255
81 | 0, 187, 255
82 | 0, 191, 255
83 | 0, 195, 255
84 | 0, 199, 255
85 | 0, 203, 255
86 | 0, 207, 255
87 | 0, 211, 255
88 | 0, 215, 255
89 | 0, 219, 255
90 | 0, 223, 255
91 | 0, 227, 255
92 | 0, 231, 255
93 | 0, 235, 255
94 | 0, 239, 255
95 | 0, 243, 255
96 | 0, 247, 255
97 | 0, 251, 255
98 | 0, 255, 255
99 | 0, 255, 255
100 | 3, 255, 251
101 | 7, 255, 247
102 | 11, 255, 243
103 | 15, 255, 239
104 | 19, 255, 235
105 | 23, 255, 231
106 | 27, 255, 227
107 | 31, 255, 223
108 | 35, 255, 219
109 | 39, 255, 215
110 | 43, 255, 211
111 | 47, 255, 207
112 | 51, 255, 203
113 | 55, 255, 199
114 | 59, 255, 195
115 | 63, 255, 191
116 | 67, 255, 187
117 | 71, 255, 183
118 | 75, 255, 179
119 | 79, 255, 175
120 | 83, 255, 171
121 | 87, 255, 167
122 | 91, 255, 163
123 | 95, 255, 159
124 | 99, 255, 155
125 | 103, 255, 151
126 | 107, 255, 147
127 | 111, 255, 143
128 | 115, 255, 139
129 | 119, 255, 135
130 | 123, 255, 131
131 | 127, 255, 127
132 | 131, 255, 123
133 | 135, 255, 119
134 | 139, 255, 115
135 | 143, 255, 111
136 | 147, 255, 107
137 | 151, 255, 103
138 | 155, 255, 99
139 | 159, 255, 95
140 | 163, 255, 91
141 | 167, 255, 87
142 | 171, 255, 83
143 | 175, 255, 79
144 | 179, 255, 75
145 | 183, 255, 71
146 | 187, 255, 67
147 | 191, 255, 63
148 | 195, 255, 59
149 | 199, 255, 55
150 | 203, 255, 51
151 | 207, 255, 47
152 | 211, 255, 43
153 | 215, 255, 39
154 | 219, 255, 35
155 | 223, 255, 31
156 | 227, 255, 27
157 | 231, 255, 23
158 | 235, 255, 19
159 | 239, 255, 15
160 | 243, 255, 11
161 | 247, 255, 7
162 | 251, 255, 3
163 | 255, 255, 0
164 | 255, 251, 0
165 | 255, 247, 0
166 | 255, 243, 0
167 | 255, 239, 0
168 | 255, 235, 0
169 | 255, 231, 0
170 | 255, 227, 0
171 | 255, 223, 0
172 | 255, 219, 0
173 | 255, 215, 0
174 | 255, 211, 0
175 | 255, 207, 0
176 | 255, 203, 0
177 | 255, 199, 0
178 | 255, 195, 0
179 | 255, 191, 0
180 | 255, 187, 0
181 | 255, 183, 0
182 | 255, 179, 0
183 | 255, 175, 0
184 | 255, 171, 0
185 | 255, 167, 0
186 | 255, 163, 0
187 | 255, 159, 0
188 | 255, 155, 0
189 | 255, 151, 0
190 | 255, 147, 0
191 | 255, 143, 0
192 | 255, 139, 0
193 | 255, 135, 0
194 | 255, 131, 0
195 | 255, 127, 0
196 | 255, 123, 0
197 | 255, 119, 0
198 | 255, 115, 0
199 | 255, 111, 0
200 | 255, 107, 0
201 | 255, 103, 0
202 | 255, 99, 0
203 | 255, 95, 0
204 | 255, 91, 0
205 | 255, 87, 0
206 | 255, 83, 0
207 | 255, 79, 0
208 | 255, 75, 0
209 | 255, 71, 0
210 | 255, 67, 0
211 | 255, 63, 0
212 | 255, 59, 0
213 | 255, 55, 0
214 | 255, 51, 0
215 | 255, 47, 0
216 | 255, 43, 0
217 | 255, 39, 0
218 | 255, 35, 0
219 | 255, 31, 0
220 | 255, 27, 0
221 | 255, 23, 0
222 | 255, 19, 0
223 | 255, 15, 0
224 | 255, 11, 0
225 | 255, 7, 0
226 | 255, 3, 0
227 | 255, 0, 0
228 | 250, 0, 0
229 | 246, 0, 0
230 | 241, 0, 0
231 | 237, 0, 0
232 | 233, 0, 0
233 | 228, 0, 0
234 | 224, 0, 0
235 | 219, 0, 0
236 | 215, 0, 0
237 | 211, 0, 0
238 | 206, 0, 0
239 | 202, 0, 0
240 | 197, 0, 0
241 | 193, 0, 0
242 | 189, 0, 0
243 | 184, 0, 0
244 | 180, 0, 0
245 | 175, 0, 0
246 | 171, 0, 0
247 | 167, 0, 0
248 | 162, 0, 0
249 | 158, 0, 0
250 | 153, 0, 0
251 | 149, 0, 0
252 | 145, 0, 0
253 | 140, 0, 0
254 | 136, 0, 0
255 | 131, 0, 0
256 | 131, 0, 0
257 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/ctopo.dat:
--------------------------------------------------------------------------------
1 | 255, 253, 145
2 | 251, 251, 143
3 | 247, 249, 140
4 | 243, 246, 138
5 | 240, 244, 136
6 | 236, 242, 134
7 | 232, 240, 131
8 | 228, 238, 129
9 | 225, 236, 127
10 | 221, 234, 125
11 | 217, 232, 122
12 | 213, 230, 120
13 | 210, 227, 118
14 | 206, 225, 116
15 | 202, 223, 114
16 | 198, 221, 111
17 | 195, 219, 109
18 | 191, 217, 107
19 | 187, 215, 105
20 | 183, 213, 102
21 | 180, 211, 100
22 | 176, 208, 98
23 | 172, 206, 96
24 | 168, 204, 93
25 | 165, 202, 91
26 | 161, 200, 89
27 | 157, 198, 87
28 | 153, 196, 84
29 | 150, 194, 82
30 | 146, 192, 80
31 | 142, 190, 78
32 | 138, 187, 76
33 | 135, 185, 73
34 | 131, 183, 71
35 | 127, 181, 69
36 | 124, 179, 67
37 | 120, 177, 64
38 | 116, 175, 62
39 | 112, 173, 60
40 | 109, 171, 58
41 | 105, 168, 55
42 | 101, 166, 53
43 | 97, 164, 51
44 | 94, 162, 49
45 | 90, 160, 46
46 | 86, 158, 44
47 | 82, 156, 42
48 | 79, 154, 40
49 | 75, 152, 38
50 | 71, 149, 35
51 | 67, 147, 33
52 | 64, 145, 31
53 | 60, 143, 29
54 | 56, 141, 26
55 | 52, 139, 24
56 | 49, 137, 22
57 | 45, 135, 20
58 | 41, 133, 17
59 | 37, 131, 15
60 | 34, 128, 13
61 | 30, 126, 11
62 | 26, 124, 8
63 | 22, 122, 6
64 | 19, 120, 4
65 | 15, 118, 2
66 | 11, 116, 0
67 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/odv.dat:
--------------------------------------------------------------------------------
1 | 238, 198, 239
2 | 234, 176, 233
3 | 230, 146, 226
4 | 226, 121, 220
5 | 222, 100, 214
6 | 218, 79, 208
7 | 215, 72, 204
8 | 211, 81, 198
9 | 207, 105, 193
10 | 202, 124, 191
11 | 197, 143, 191
12 | 188, 154, 194
13 | 176, 156, 201
14 | 158, 150, 211
15 | 138, 134, 223
16 | 114, 112, 233
17 | 89, 86, 245
18 | 70, 67, 253
19 | 51, 60, 255
20 | 37, 62, 252
21 | 27, 68, 246
22 | 24, 80, 240
23 | 26, 96, 236
24 | 33, 114, 235
25 | 43, 136, 237
26 | 55, 153, 242
27 | 69, 175, 248
28 | 83, 192, 252
29 | 95, 206, 252
30 | 106, 222, 247
31 | 114, 232, 239
32 | 119, 239, 228
33 | 120, 241, 218
34 | 117, 240, 207
35 | 111, 236, 194
36 | 102, 227, 179
37 | 91, 218, 162
38 | 78, 208, 146
39 | 64, 199, 131
40 | 51, 193, 118
41 | 38, 190, 106
42 | 27, 191, 96
43 | 18, 196, 87
44 | 11, 204, 78
45 | 5, 215, 67
46 | 3, 223, 60
47 | 4, 228, 53
48 | 7, 231, 48
49 | 13, 230, 44
50 | 21, 226, 42
51 | 32, 220, 42
52 | 43, 214, 44
53 | 55, 208, 48
54 | 67, 203, 55
55 | 79, 200, 62
56 | 90, 198, 71
57 | 101, 198, 79
58 | 111, 200, 87
59 | 121, 204, 94
60 | 131, 210, 100
61 | 141, 217, 103
62 | 151, 224, 104
63 | 163, 232, 103
64 | 174, 237, 100
65 | 187, 241, 94
66 | 200, 241, 87
67 | 212, 238, 79
68 | 226, 230, 68
69 | 237, 220, 59
70 | 244, 209, 50
71 | 249, 201, 43
72 | 252, 193, 37
73 | 253, 186, 31
74 | 254, 181, 27
75 | 254, 173, 23
76 | 255, 167, 19
77 | 255, 161, 16
78 | 254, 154, 13
79 | 253, 147, 10
80 | 250, 141, 7
81 | 246, 135, 5
82 | 241, 129, 4
83 | 235, 122, 2
84 | 228, 114, 1
85 | 223, 106, 0
86 | 220, 97, 0
87 | 220, 88, 0
88 | 224, 77, 0
89 | 231, 68, 0
90 | 240, 57, 2
91 | 248, 43, 3
92 | 252, 30, 5
93 | 254, 18, 7
94 | 251, 7, 10
95 | 246, 1, 13
96 | 237, 0, 16
97 | 227, 0, 20
98 | 220, 1, 24
99 | 215, 4, 30
100 | 213, 8, 35
101 | 213, 13, 42
102 | 214, 21, 49
103 | 217, 31, 57
104 | 221, 44, 65
105 | 225, 59, 73
106 | 230, 76, 81
107 | 236, 96, 89
108 | 243, 117, 98
109 | 249, 139, 109
110 | 254, 161, 120
111 | 255, 182, 132
112 | 255, 197, 143
113 | 255, 199, 144
114 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/redblue_dark.dat:
--------------------------------------------------------------------------------
1 | 0, 0, 255
2 | 0, 0, 252
3 | 0, 0, 250
4 | 0, 0, 248
5 | 0, 0, 246
6 | 0, 0, 244
7 | 0, 0, 242
8 | 0, 0, 240
9 | 0, 0, 238
10 | 0, 0, 236
11 | 0, 0, 234
12 | 0, 0, 232
13 | 0, 0, 230
14 | 0, 0, 228
15 | 0, 0, 226
16 | 0, 0, 224
17 | 0, 0, 222
18 | 0, 0, 220
19 | 0, 0, 218
20 | 0, 0, 216
21 | 0, 0, 214
22 | 0, 0, 212
23 | 0, 0, 210
24 | 0, 0, 208
25 | 0, 0, 206
26 | 0, 0, 204
27 | 0, 0, 202
28 | 0, 0, 200
29 | 0, 0, 198
30 | 0, 0, 196
31 | 0, 0, 194
32 | 0, 0, 192
33 | 0, 0, 190
34 | 0, 0, 188
35 | 0, 0, 186
36 | 0, 0, 184
37 | 0, 0, 182
38 | 0, 0, 180
39 | 0, 0, 178
40 | 0, 0, 176
41 | 0, 0, 174
42 | 0, 0, 172
43 | 0, 0, 170
44 | 0, 0, 167
45 | 0, 0, 165
46 | 0, 0, 163
47 | 0, 0, 161
48 | 0, 0, 159
49 | 0, 0, 157
50 | 0, 0, 155
51 | 0, 0, 153
52 | 0, 0, 151
53 | 0, 0, 149
54 | 0, 0, 147
55 | 0, 0, 145
56 | 0, 0, 143
57 | 0, 0, 141
58 | 0, 0, 139
59 | 0, 0, 137
60 | 0, 0, 135
61 | 0, 0, 133
62 | 0, 0, 131
63 | 0, 0, 129
64 | 0, 0, 127
65 | 0, 0, 125
66 | 0, 0, 123
67 | 0, 0, 121
68 | 0, 0, 119
69 | 0, 0, 117
70 | 0, 0, 115
71 | 0, 0, 113
72 | 0, 0, 111
73 | 0, 0, 109
74 | 0, 0, 107
75 | 0, 0, 105
76 | 0, 0, 103
77 | 0, 0, 101
78 | 0, 0, 99
79 | 0, 0, 97
80 | 0, 0, 95
81 | 0, 0, 93
82 | 0, 0, 91
83 | 0, 0, 89
84 | 0, 0, 87
85 | 0, 0, 85
86 | 0, 0, 82
87 | 0, 0, 80
88 | 0, 0, 78
89 | 0, 0, 76
90 | 0, 0, 74
91 | 0, 0, 72
92 | 0, 0, 70
93 | 0, 0, 68
94 | 0, 0, 66
95 | 0, 0, 64
96 | 0, 0, 62
97 | 0, 0, 60
98 | 0, 0, 58
99 | 0, 0, 56
100 | 0, 0, 54
101 | 0, 0, 52
102 | 0, 0, 50
103 | 0, 0, 48
104 | 0, 0, 46
105 | 0, 0, 44
106 | 0, 0, 42
107 | 0, 0, 40
108 | 0, 0, 38
109 | 0, 0, 36
110 | 0, 0, 34
111 | 0, 0, 32
112 | 0, 0, 30
113 | 0, 0, 28
114 | 0, 0, 26
115 | 0, 0, 24
116 | 0, 0, 22
117 | 0, 0, 20
118 | 0, 0, 18
119 | 0, 0, 16
120 | 0, 0, 14
121 | 0, 0, 12
122 | 0, 0, 10
123 | 0, 0, 8
124 | 0, 0, 6
125 | 0, 0, 4
126 | 0, 0, 2
127 | 0, 0, 0
128 | 0, 0, 0
129 | 2, 0, 0
130 | 4, 0, 0
131 | 6, 0, 0
132 | 8, 0, 0
133 | 10, 0, 0
134 | 12, 0, 0
135 | 14, 0, 0
136 | 16, 0, 0
137 | 18, 0, 0
138 | 20, 0, 0
139 | 22, 0, 0
140 | 24, 0, 0
141 | 26, 0, 0
142 | 28, 0, 0
143 | 30, 0, 0
144 | 32, 0, 0
145 | 34, 0, 0
146 | 36, 0, 0
147 | 38, 0, 0
148 | 40, 0, 0
149 | 42, 0, 0
150 | 44, 0, 0
151 | 46, 0, 0
152 | 48, 0, 0
153 | 50, 0, 0
154 | 52, 0, 0
155 | 54, 0, 0
156 | 56, 0, 0
157 | 58, 0, 0
158 | 60, 0, 0
159 | 62, 0, 0
160 | 64, 0, 0
161 | 66, 0, 0
162 | 68, 0, 0
163 | 70, 0, 0
164 | 72, 0, 0
165 | 74, 0, 0
166 | 76, 0, 0
167 | 78, 0, 0
168 | 80, 0, 0
169 | 82, 0, 0
170 | 85, 0, 0
171 | 87, 0, 0
172 | 89, 0, 0
173 | 91, 0, 0
174 | 93, 0, 0
175 | 95, 0, 0
176 | 97, 0, 0
177 | 99, 0, 0
178 | 101, 0, 0
179 | 103, 0, 0
180 | 105, 0, 0
181 | 107, 0, 0
182 | 109, 0, 0
183 | 111, 0, 0
184 | 113, 0, 0
185 | 115, 0, 0
186 | 117, 0, 0
187 | 119, 0, 0
188 | 121, 0, 0
189 | 123, 0, 0
190 | 125, 0, 0
191 | 127, 0, 0
192 | 129, 0, 0
193 | 131, 0, 0
194 | 133, 0, 0
195 | 135, 0, 0
196 | 137, 0, 0
197 | 139, 0, 0
198 | 141, 0, 0
199 | 143, 0, 0
200 | 145, 0, 0
201 | 147, 0, 0
202 | 149, 0, 0
203 | 151, 0, 0
204 | 153, 0, 0
205 | 155, 0, 0
206 | 157, 0, 0
207 | 159, 0, 0
208 | 161, 0, 0
209 | 163, 0, 0
210 | 165, 0, 0
211 | 167, 0, 0
212 | 170, 0, 0
213 | 172, 0, 0
214 | 174, 0, 0
215 | 176, 0, 0
216 | 178, 0, 0
217 | 180, 0, 0
218 | 182, 0, 0
219 | 184, 0, 0
220 | 186, 0, 0
221 | 188, 0, 0
222 | 190, 0, 0
223 | 192, 0, 0
224 | 194, 0, 0
225 | 196, 0, 0
226 | 198, 0, 0
227 | 200, 0, 0
228 | 202, 0, 0
229 | 204, 0, 0
230 | 206, 0, 0
231 | 208, 0, 0
232 | 210, 0, 0
233 | 212, 0, 0
234 | 214, 0, 0
235 | 216, 0, 0
236 | 218, 0, 0
237 | 220, 0, 0
238 | 222, 0, 0
239 | 224, 0, 0
240 | 226, 0, 0
241 | 228, 0, 0
242 | 230, 0, 0
243 | 232, 0, 0
244 | 234, 0, 0
245 | 236, 0, 0
246 | 238, 0, 0
247 | 240, 0, 0
248 | 242, 0, 0
249 | 244, 0, 0
250 | 246, 0, 0
251 | 248, 0, 0
252 | 250, 0, 0
253 | 252, 0, 0
254 | 255, 0, 0
255 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/redblue_light.dat:
--------------------------------------------------------------------------------
1 | 127, 0, 0
2 | 136, 0, 0
3 | 145, 0, 0
4 | 154, 0, 0
5 | 163, 0, 0
6 | 173, 0, 0
7 | 182, 0, 0
8 | 191, 0, 0
9 | 200, 0, 0
10 | 209, 0, 0
11 | 218, 0, 0
12 | 227, 0, 0
13 | 236, 0, 0
14 | 245, 0, 0
15 | 255, 0, 0
16 | 255, 0, 0
17 | 255, 8, 8
18 | 255, 16, 16
19 | 255, 24, 24
20 | 255, 32, 32
21 | 255, 41, 41
22 | 255, 49, 49
23 | 255, 57, 57
24 | 255, 65, 65
25 | 255, 74, 74
26 | 255, 82, 82
27 | 255, 90, 90
28 | 255, 98, 98
29 | 255, 106, 106
30 | 255, 115, 115
31 | 255, 123, 123
32 | 255, 131, 131
33 | 255, 139, 139
34 | 255, 148, 148
35 | 255, 156, 156
36 | 255, 164, 164
37 | 255, 172, 172
38 | 255, 180, 180
39 | 255, 189, 189
40 | 255, 197, 197
41 | 255, 205, 205
42 | 255, 213, 213
43 | 255, 222, 222
44 | 255, 230, 230
45 | 255, 238, 238
46 | 255, 246, 246
47 | 255, 255, 255
48 | 255, 255, 255
49 | 246, 246, 255
50 | 238, 238, 255
51 | 230, 230, 255
52 | 222, 222, 255
53 | 213, 213, 255
54 | 205, 205, 255
55 | 197, 197, 255
56 | 189, 189, 255
57 | 180, 180, 255
58 | 172, 172, 255
59 | 164, 164, 255
60 | 156, 156, 255
61 | 148, 148, 255
62 | 139, 139, 255
63 | 131, 131, 255
64 | 123, 123, 255
65 | 115, 115, 255
66 | 106, 106, 255
67 | 98, 98, 255
68 | 90, 90, 255
69 | 82, 82, 255
70 | 74, 74, 255
71 | 65, 65, 255
72 | 57, 57, 255
73 | 49, 49, 255
74 | 41, 41, 255
75 | 32, 32, 255
76 | 24, 24, 255
77 | 16, 16, 255
78 | 8, 8, 255
79 | 0, 0, 255
80 | 0, 0, 255
81 | 0, 0, 245
82 | 0, 0, 236
83 | 0, 0, 227
84 | 0, 0, 218
85 | 0, 0, 209
86 | 0, 0, 200
87 | 0, 0, 191
88 | 0, 0, 182
89 | 0, 0, 173
90 | 0, 0, 163
91 | 0, 0, 154
92 | 0, 0, 145
93 | 0, 0, 136
94 | 0, 0, 127
95 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/redgreen.dat:
--------------------------------------------------------------------------------
1 | 127, 255, 127
2 | 128, 253, 126
3 | 128, 252, 126
4 | 129, 251, 125
5 | 129, 250, 125
6 | 130, 249, 124
7 | 130, 248, 124
8 | 131, 247, 123
9 | 131, 246, 123
10 | 132, 245, 122
11 | 132, 244, 122
12 | 133, 243, 121
13 | 133, 242, 121
14 | 134, 241, 120
15 | 134, 240, 120
16 | 135, 239, 119
17 | 135, 238, 119
18 | 136, 237, 118
19 | 136, 236, 118
20 | 137, 235, 117
21 | 137, 234, 117
22 | 138, 233, 116
23 | 138, 232, 116
24 | 139, 231, 115
25 | 139, 230, 115
26 | 140, 229, 114
27 | 140, 228, 114
28 | 141, 227, 113
29 | 141, 226, 113
30 | 142, 225, 112
31 | 142, 224, 112
32 | 143, 223, 111
33 | 143, 222, 111
34 | 144, 221, 110
35 | 144, 220, 110
36 | 145, 219, 109
37 | 145, 218, 109
38 | 146, 217, 108
39 | 146, 216, 108
40 | 147, 215, 107
41 | 147, 214, 107
42 | 148, 213, 106
43 | 148, 212, 106
44 | 149, 211, 105
45 | 149, 210, 105
46 | 150, 209, 104
47 | 150, 208, 104
48 | 151, 207, 103
49 | 151, 206, 103
50 | 152, 205, 102
51 | 152, 204, 102
52 | 153, 203, 101
53 | 153, 202, 101
54 | 154, 201, 100
55 | 154, 200, 100
56 | 155, 199, 99
57 | 155, 198, 99
58 | 156, 197, 98
59 | 156, 196, 98
60 | 157, 195, 97
61 | 157, 194, 97
62 | 158, 193, 96
63 | 158, 192, 96
64 | 159, 191, 95
65 | 159, 190, 95
66 | 160, 189, 94
67 | 160, 188, 94
68 | 161, 187, 93
69 | 161, 186, 93
70 | 162, 185, 92
71 | 162, 184, 92
72 | 163, 183, 91
73 | 163, 182, 91
74 | 164, 181, 90
75 | 164, 180, 90
76 | 165, 179, 89
77 | 165, 178, 89
78 | 166, 177, 88
79 | 166, 176, 88
80 | 167, 175, 87
81 | 167, 174, 87
82 | 168, 173, 86
83 | 168, 172, 86
84 | 169, 171, 85
85 | 169, 170, 85
86 | 170, 169, 84
87 | 170, 168, 84
88 | 171, 167, 83
89 | 171, 166, 83
90 | 172, 165, 82
91 | 172, 164, 82
92 | 173, 163, 81
93 | 173, 162, 81
94 | 174, 161, 80
95 | 174, 160, 80
96 | 175, 159, 79
97 | 175, 158, 79
98 | 176, 157, 78
99 | 176, 156, 78
100 | 177, 155, 77
101 | 177, 154, 77
102 | 178, 153, 76
103 | 178, 152, 76
104 | 179, 151, 75
105 | 179, 150, 75
106 | 180, 149, 74
107 | 180, 148, 74
108 | 181, 147, 73
109 | 181, 146, 73
110 | 182, 145, 72
111 | 182, 144, 72
112 | 183, 143, 71
113 | 183, 142, 71
114 | 184, 141, 70
115 | 184, 140, 70
116 | 185, 139, 69
117 | 185, 138, 69
118 | 186, 137, 68
119 | 186, 136, 68
120 | 187, 135, 67
121 | 187, 134, 67
122 | 188, 133, 66
123 | 188, 132, 66
124 | 189, 131, 65
125 | 189, 130, 65
126 | 190, 129, 64
127 | 190, 128, 64
128 | 191, 127, 63
129 | 191, 126, 63
130 | 192, 125, 62
131 | 192, 124, 62
132 | 193, 123, 61
133 | 193, 122, 61
134 | 194, 121, 60
135 | 194, 120, 60
136 | 195, 119, 59
137 | 195, 118, 59
138 | 196, 117, 58
139 | 196, 116, 58
140 | 197, 115, 57
141 | 197, 114, 57
142 | 198, 113, 56
143 | 198, 112, 56
144 | 199, 111, 55
145 | 199, 110, 55
146 | 200, 109, 54
147 | 200, 108, 54
148 | 201, 107, 53
149 | 201, 106, 53
150 | 202, 105, 52
151 | 202, 104, 52
152 | 203, 103, 51
153 | 203, 102, 51
154 | 204, 101, 50
155 | 204, 100, 50
156 | 205, 99, 49
157 | 205, 98, 49
158 | 206, 97, 48
159 | 206, 96, 48
160 | 207, 95, 47
161 | 207, 94, 47
162 | 208, 93, 46
163 | 208, 92, 46
164 | 209, 91, 45
165 | 209, 90, 45
166 | 210, 89, 44
167 | 210, 88, 44
168 | 211, 87, 43
169 | 211, 86, 43
170 | 212, 85, 42
171 | 212, 84, 42
172 | 213, 83, 41
173 | 213, 82, 41
174 | 214, 81, 40
175 | 214, 80, 40
176 | 215, 79, 39
177 | 215, 78, 39
178 | 216, 77, 38
179 | 216, 76, 38
180 | 217, 75, 37
181 | 217, 74, 37
182 | 218, 73, 36
183 | 218, 72, 36
184 | 219, 71, 35
185 | 219, 70, 35
186 | 220, 69, 34
187 | 220, 68, 34
188 | 221, 67, 33
189 | 221, 66, 33
190 | 222, 65, 32
191 | 222, 64, 32
192 | 223, 63, 31
193 | 223, 62, 31
194 | 224, 61, 30
195 | 224, 60, 30
196 | 225, 59, 29
197 | 225, 58, 29
198 | 226, 57, 28
199 | 226, 56, 28
200 | 227, 55, 27
201 | 227, 54, 27
202 | 228, 53, 26
203 | 228, 52, 26
204 | 229, 51, 25
205 | 229, 50, 25
206 | 230, 49, 24
207 | 230, 48, 24
208 | 231, 47, 23
209 | 231, 46, 23
210 | 232, 45, 22
211 | 232, 44, 22
212 | 233, 43, 21
213 | 233, 42, 21
214 | 234, 41, 20
215 | 234, 40, 20
216 | 235, 39, 19
217 | 235, 38, 19
218 | 236, 37, 18
219 | 236, 36, 18
220 | 237, 35, 17
221 | 237, 34, 17
222 | 238, 33, 16
223 | 238, 32, 16
224 | 239, 31, 15
225 | 239, 30, 15
226 | 240, 29, 14
227 | 240, 28, 14
228 | 241, 27, 13
229 | 241, 26, 13
230 | 242, 25, 12
231 | 242, 24, 12
232 | 243, 23, 11
233 | 243, 22, 11
234 | 244, 21, 10
235 | 244, 20, 10
236 | 245, 19, 9
237 | 245, 18, 9
238 | 246, 17, 8
239 | 246, 16, 8
240 | 247, 15, 7
241 | 247, 14, 7
242 | 248, 13, 6
243 | 248, 12, 6
244 | 249, 11, 5
245 | 249, 10, 5
246 | 250, 9, 4
247 | 250, 8, 4
248 | 251, 7, 3
249 | 251, 6, 3
250 | 252, 5, 2
251 | 252, 4, 2
252 | 253, 3, 1
253 | 253, 2, 1
254 | 254, 1, 0
255 | 255, 0, 0
256 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_data/rscolmap.dat:
--------------------------------------------------------------------------------
1 | 0, 0, 123
2 | 0, 0, 124
3 | 0, 0, 125
4 | 0, 0, 126
5 | 0, 0, 128
6 | 0, 0, 129
7 | 0, 0, 131
8 | 0, 0, 132
9 | 0, 0, 134
10 | 0, 0, 135
11 | 0, 0, 137
12 | 0, 0, 139
13 | 0, 0, 140
14 | 0, 0, 142
15 | 0, 0, 144
16 | 0, 3, 146
17 | 0, 6, 148
18 | 0, 9, 150
19 | 0, 12, 152
20 | 0, 15, 154
21 | 0, 18, 156
22 | 0, 21, 158
23 | 0, 24, 160
24 | 0, 27, 162
25 | 0, 30, 165
26 | 0, 33, 167
27 | 0, 36, 170
28 | 0, 39, 172
29 | 0, 42, 175
30 | 0, 45, 177
31 | 0, 49, 180
32 | 0, 52, 183
33 | 0, 56, 186
34 | 0, 60, 190
35 | 0, 64, 193
36 | 0, 68, 196
37 | 0, 72, 200
38 | 0, 76, 203
39 | 0, 80, 207
40 | 0, 83, 210
41 | 0, 87, 214
42 | 0, 91, 217
43 | 0, 95, 221
44 | 0, 98, 224
45 | 0, 102, 228
46 | 0, 106, 231
47 | 0, 110, 235
48 | 0, 114, 237
49 | 0, 119, 239
50 | 0, 124, 241
51 | 0, 129, 244
52 | 0, 133, 246
53 | 0, 138, 248
54 | 0, 143, 250
55 | 0, 148, 253
56 | 0, 151, 253
57 | 0, 155, 253
58 | 0, 158, 253
59 | 0, 162, 254
60 | 0, 166, 254
61 | 0, 169, 254
62 | 0, 173, 254
63 | 0, 177, 255
64 | 0, 180, 254
65 | 0, 183, 252
66 | 0, 186, 251
67 | 0, 190, 249
68 | 0, 193, 247
69 | 0, 196, 246
70 | 0, 199, 244
71 | 0, 203, 242
72 | 0, 204, 237
73 | 0, 205, 232
74 | 0, 207, 227
75 | 0, 208, 222
76 | 0, 209, 217
77 | 0, 211, 212
78 | 0, 212, 207
79 | 0, 214, 202
80 | 0, 213, 198
81 | 0, 212, 193
82 | 0, 211, 188
83 | 0, 209, 183
84 | 0, 208, 179
85 | 0, 207, 174
86 | 0, 206, 169
87 | 0, 204, 164
88 | 0, 203, 160
89 | 0, 202, 155
90 | 0, 200, 150
91 | 0, 199, 145
92 | 0, 198, 141
93 | 0, 196, 136
94 | 0, 195, 131
95 | 0, 193, 126
96 | 0, 193, 122
97 | 0, 192, 118
98 | 0, 191, 113
99 | 0, 190, 109
100 | 0, 189, 105
101 | 0, 188, 100
102 | 0, 187, 96
103 | 0, 186, 91
104 | 0, 186, 88
105 | 0, 186, 85
106 | 0, 186, 81
107 | 0, 186, 78
108 | 0, 186, 75
109 | 0, 186, 71
110 | 0, 186, 68
111 | 0, 187, 64
112 | 0, 188, 61
113 | 0, 189, 58
114 | 0, 190, 55
115 | 0, 191, 52
116 | 0, 192, 49
117 | 0, 193, 46
118 | 0, 194, 43
119 | 0, 196, 40
120 | 0, 198, 38
121 | 0, 200, 35
122 | 0, 202, 32
123 | 0, 204, 29
124 | 0, 206, 27
125 | 0, 208, 24
126 | 0, 210, 21
127 | 0, 213, 18
128 | 0, 214, 16
129 | 0, 216, 14
130 | 0, 218, 12
131 | 0, 220, 9
132 | 0, 221, 7
133 | 0, 223, 5
134 | 0, 225, 3
135 | 0, 227, 0
136 | 15, 228, 0
137 | 30, 230, 0
138 | 45, 232, 0
139 | 60, 234, 0
140 | 75, 236, 0
141 | 90, 238, 0
142 | 105, 240, 0
143 | 120, 242, 0
144 | 128, 243, 0
145 | 136, 245, 0
146 | 144, 246, 0
147 | 152, 248, 0
148 | 160, 249, 0
149 | 168, 251, 0
150 | 176, 252, 0
151 | 184, 254, 0
152 | 188, 254, 0
153 | 192, 254, 0
154 | 196, 254, 0
155 | 200, 254, 0
156 | 204, 254, 0
157 | 208, 254, 0
158 | 212, 254, 0
159 | 217, 254, 0
160 | 219, 253, 0
161 | 221, 251, 0
162 | 223, 250, 0
163 | 226, 248, 0
164 | 228, 247, 0
165 | 230, 245, 0
166 | 232, 244, 0
167 | 235, 242, 0
168 | 236, 240, 0
169 | 238, 238, 0
170 | 239, 236, 0
171 | 241, 233, 0
172 | 243, 231, 0
173 | 244, 229, 0
174 | 246, 227, 0
175 | 248, 224, 0
176 | 248, 222, 0
177 | 249, 220, 0
178 | 250, 218, 0
179 | 251, 216, 0
180 | 251, 214, 0
181 | 252, 212, 0
182 | 253, 210, 0
183 | 254, 208, 0
184 | 254, 206, 0
185 | 254, 203, 0
186 | 254, 201, 0
187 | 254, 198, 0
188 | 254, 196, 0
189 | 254, 193, 0
190 | 254, 191, 0
191 | 255, 188, 0
192 | 255, 186, 0
193 | 255, 183, 0
194 | 255, 180, 0
195 | 254, 177, 0
196 | 254, 175, 0
197 | 254, 172, 0
198 | 254, 169, 0
199 | 253, 166, 0
200 | 252, 164, 0
201 | 251, 161, 0
202 | 250, 159, 0
203 | 249, 156, 0
204 | 248, 153, 0
205 | 247, 151, 0
206 | 246, 148, 0
207 | 244, 145, 0
208 | 243, 143, 0
209 | 241, 140, 0
210 | 240, 138, 0
211 | 238, 135, 0
212 | 237, 132, 0
213 | 235, 130, 0
214 | 234, 127, 0
215 | 232, 124, 0
216 | 231, 122, 0
217 | 229, 119, 0
218 | 228, 117, 0
219 | 226, 114, 0
220 | 224, 111, 0
221 | 223, 109, 0
222 | 221, 106, 0
223 | 219, 103, 0
224 | 218, 101, 0
225 | 216, 98, 0
226 | 214, 95, 0
227 | 212, 92, 0
228 | 210, 89, 0
229 | 208, 86, 0
230 | 206, 83, 0
231 | 204, 80, 0
232 | 202, 77, 0
233 | 200, 74, 0
234 | 198, 71, 0
235 | 196, 67, 0
236 | 194, 64, 0
237 | 192, 61, 0
238 | 190, 58, 0
239 | 188, 54, 0
240 | 187, 51, 0
241 | 186, 48, 0
242 | 185, 44, 0
243 | 183, 41, 0
244 | 182, 38, 0
245 | 181, 34, 0
246 | 180, 31, 0
247 | 178, 27, 0
248 | 177, 24, 0
249 | 176, 21, 0
250 | 175, 17, 0
251 | 173, 14, 0
252 | 172, 11, 0
253 | 171, 7, 0
254 | 170, 4, 0
255 | 168, 0, 0
256 |
--------------------------------------------------------------------------------
/src/plotting/colormaps/cmap_samples.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aerler/GeoPy/319016a7f9340b29fdc3c6d4b9df5fbe78378da8/src/plotting/colormaps/cmap_samples.png
--------------------------------------------------------------------------------
/src/plotting/colormaps/colormaps.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # colormaps.py
5 | #
6 | # purpose: Extra colormaps for matplotlib
7 | # author: Filipe P. A. Fernandes
8 | # e-mail: ocefpaf@gmail
9 | # web: http://ocefpaf.tiddlyspot.com/
10 | # created: 11-Oct-2010
11 | # modified: Fri 16 Aug 2013 11:47:32 PM BRT
12 | #
13 | # obs:
14 | #
15 |
16 |
17 |
18 | import os
19 |
20 | from glob import glob
21 | from colorsys import hsv_to_rgb
22 |
23 | import numpy as np
24 | import matplotlib.pyplot as plt
25 |
26 | from matplotlib import colors
27 | from scipy.signal import sawtooth
28 |
29 | cmap_path = os.path.join(os.path.dirname(__file__), 'cmap_data')
30 |
31 |
32 | class Bunch(dict):
33 | def __init__(self, **kw):
34 | dict.__init__(self, kw)
35 | self.__dict__ = self
36 |
37 |
38 | def get_color(color):
39 | """http://stackoverflow.com/questions/10254207/
40 | color-and-line-writing-using-matplotlib"""
41 | for hue in range(color):
42 | hue = 1. * hue / color
43 | col = [int(x) for x in hsv_to_rgb(hue, 1.0, 230)]
44 | yield "#{0:02x}{1:02x}{2:02x}".format(*col)
45 |
46 |
47 | def cmat2cmpl(rgb, reverse=False):
48 | """Convert RGB matplotlib colormap."""
49 | rgb = np.asanyarray(rgb)
50 | if reverse:
51 | rgb = np.flipud(rgb)
52 | return colors.ListedColormap(rgb)
53 |
54 |
55 | def phasemap_cm(m=256):
56 | """Colormap periodic/circular data (phase)."""
57 |
58 | theta = 2 * np.pi * np.arange(0, m) / m
59 | circ = np.exp(1j * theta)
60 |
61 | # Vertices of colour triangle.
62 | vred, vgreen, vblue = -2, 1 - np.sqrt(3) * 1j, 1 + np.sqrt(3) * 1j
63 |
64 | vredc = vred - circ
65 | vgreenc = vgreen - circ
66 | vbluec = vblue - circ
67 |
68 | red = np.abs(np.imag(vgreenc * np.conj(vbluec)))
69 | green = np.abs(np.imag(vbluec * np.conj(vredc)))
70 | blue = np.abs(np.imag(vredc * np.conj(vgreenc)))
71 |
72 | return (1.5 * np.c_[red, green, blue] /
73 | np.abs(np.imag((vred - vgreen) * np.conj(vred - vblue))))
74 |
75 |
76 | def zebra_cm(a=4, m=0.5, n=256):
77 | """Zebra palette colormap with NBANDS broad bands and NENTRIES rows in
78 | the color map.
79 |
80 | The default is 4 broad bands
81 |
82 | cmap = zebra(nbands, nentries)
83 |
84 | References
85 | ----------
86 | [1] Hooker, S. B. et al, Detecting Dipole Ring Separatrices with Zebra
87 | Palettes, IEEE Transactions on Geosciences and Remote Sensing, vol. 33,
88 | 1306-1312, 1995
89 |
90 | Notes
91 | -----
92 | Saturation and value go from m to 1 don't use m = 0.
93 | a = 4 -> there are this many large bands in the palette
94 | """
95 | x = np.arange(0, n)
96 | hue = np.exp(-3. * x / n)
97 | sat = m + (1. - m) * (0.5 * (1. + sawtooth(2. * np.pi * x / (n / a))))
98 | val = m + (1. - m) * 0.5 * (1. + np.cos(2. * np.pi * x / (n / a / 2.)))
99 | return np.array([hsv_to_rgb(h, s, v) for h, s, v in zip(hue, sat, val)])
100 |
101 |
102 | def ctopo_pos_neg_cm(m=256):
103 | """Colormap for positive/negative data with gray scale only
104 | original from cushman-roisin book cd-rom."""
105 | dx = 1. / (m - 1)
106 | values = np.arange(0., 1., dx)
107 | return np.c_[values, values, values]
108 |
109 |
110 | def avhrr_cm(m=256):
111 | """AHVRR colormap used by NOAA Coastwatch."""
112 |
113 | x = np.arange(0.0, m) / (m - 1)
114 |
115 | xr = [0.0, 0.2, 0.4, 0.5, 0.6, 0.8, 1.0]
116 | rr = [0.5, 1.0, 1.0, 0.5, 0.5, 0.0, 0.5]
117 |
118 | xg = [0.0, 0.4, 0.6, 1.0]
119 | gg = [0.0, 1.0, 1.0, 0.0]
120 |
121 | xb = [0.0, 0.2, 0.4, 0.5, 0.6, 0.8, 1.0]
122 | bb = [0.0, 0.0, 0.5, 0.5, 1.0, 1.0, 0.5]
123 |
124 | r = np.interp(x, xr, rr)
125 | g = np.interp(x, xg, gg)
126 | b = np.interp(x, xb, bb)
127 |
128 | return np.flipud(np.c_[r, g, b])
129 |
130 |
131 | def load_cmap(fname):
132 | return np.loadtxt(fname, delimiter=',') / 255
133 |
134 |
135 | # Functions colormaps.
136 | arrays = dict(zebra=zebra_cm(),
137 | avhrr=avhrr_cm(),
138 | phasemap=phasemap_cm(),
139 | ctopo_pos_neg=ctopo_pos_neg_cm())
140 |
141 | # Data colormaps.
142 | for fname in glob('%s/*.dat' % cmap_path):
143 | cmap = os.path.basename(fname).split('.')[0]
144 | data = load_cmap(fname)
145 | arrays.update({cmap: data})
146 |
147 | cm = Bunch()
148 | for key, value in arrays.items():
149 | cm.update({key: cmat2cmpl(value)})
150 | cm.update({'%s_r' % key: cmat2cmpl(value, reverse=True)})
151 |
152 |
153 | if __name__ == '__main__':
154 | data = np.outer(np.arange(0, 1, 0.01), np.ones(10))
155 | fig = plt.figure(figsize=(10, 5))
156 | fig.subplots_adjust(top=0.8, bottom=0.05, left=0.01, right=0.99)
157 | cmaps = sorted([m for m in list(cm.keys()) if not m.endswith("_r")])
158 | length = len(cmaps)
159 | for k, cmap in enumerate(cmaps):
160 | plt.subplot(1, length + 1, k + 1)
161 | plt.axis("off")
162 | plt.imshow(data, aspect='auto', cmap=cm.get(cmap), origin="lower")
163 | plt.title(cmap, rotation=90, fontsize=10)
164 |
--------------------------------------------------------------------------------
/src/plotting/mapsetup.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-11-08
3 |
4 | This module defines a MapSetup Class that carries parameters concerning map setup and annotation.
5 | The class is intended for use the with plotting functions in this package.
6 |
7 | @author: Andre R. Erler, GPL v3
8 | '''
9 |
10 | import pickle, os
11 | from mpl_toolkits.basemap import Basemap
12 |
13 | rsphere = (6378137.00, 6356752.3142)
14 |
15 | class MapSetup(object):
16 | ''' The MapSetup class that carries parameters concerning map setup and annotation and contains methods
17 | to annotate map or axes objects with these data. '''
18 |
19 | def __init__(self, name=None, projection=None, resolution=None, grid=None, scale=None, point_markers=None,
20 | lat_full=None, lat_half=None, lon_full=None, lon_half=None):
21 | ''' Construct a MapSetup instance from input parameters. '''
22 | self.name = name or 'N/A'
23 | # projection parameters
24 | if not isinstance(projection,dict): raise TypeError
25 | self.projection = projection
26 | if resolution is None and 'resolution' in projection:
27 | self.resolution = projection['resolution']
28 | else: self.resolution = resolution
29 | self.grid = grid
30 | # initialize basemap object
31 | self.basemap = Basemap(**projection) # make just one basemap with dummy axes handle
32 | # N.B.: class methods will still take a basemap object as an argument, because the class instance
33 | # basemap is just a template and plots are generated from copies of this basemap.
34 | # map grid etc.
35 | self.lat_full = lat_full
36 | self.lat_half = lat_half
37 | self.lon_full = lon_full
38 | self.lon_half = lon_half
39 | self.scale = scale
40 | # more annotation
41 | self.point_markers = point_markers
42 |
43 | # get projection
44 | def getProjectionSettings(self):
45 | ''' return elements of the projection dict and a bit more; mostly legacy '''
46 | # return values
47 | return self.projection, self.grid, self.resolution
48 |
49 | # draw lat/lon grid
50 | def drawGrid(self, basemap, left=True, bottom=True, minor=True):
51 | ''' add meridians and parallels; 'left' and 'bottom' indicate whether parallel and meridians are labeled '''
52 | # labels = [left,right,top,bottom]
53 | if self.lat_full is not None:
54 | basemap.drawparallels(self.lat_full,linewidth=1, labels=[left,False,False,False])
55 | if self.lat_half is not None:
56 | basemap.drawparallels(self.lat_half,linewidth=0.5, labels=[left and minor,False,False,False])
57 | if self.lon_full is not None:
58 | basemap.drawmeridians(self.lon_full,linewidth=1, labels=[False,False,False,bottom])
59 | if self.lon_half is not None:
60 | basemap.drawmeridians(self.lon_half,linewidth=0.5, labels=[False,False,False,bottom and minor])
61 |
62 | # draw map scale
63 | def drawScale(self, basemap):
64 | ''' add a map scale to the map axes '''
65 | if self.scale is not None:
66 | basemap.drawmapscale(*self.scale, barstyle='fancy', fontsize=8, yoffset=0.01*(basemap.ymax-basemap.ymin))
67 |
68 | # utils annotations that I usually do
69 | def miscAnnotation(self, basemap, maskland=False, ocean_color='blue', land_color='green'):
70 | ''' add coastlines, countries, color ocean and background etc. '''
71 | # land/sea mask
72 | basemap.drawlsmask(ocean_color=ocean_color, land_color=land_color,resolution=self.resolution,grid=self.grid)
73 | if maskland: basemap.fillcontinents(color='white',lake_color='white') # mask land
74 | else: basemap.drawcountries(linewidth=0.5)
75 | # add general map stuff
76 | basemap.drawcountries(linewidth=1., linestyle='solid', color='k', antialiased=1, ax=None, zorder=None)
77 | basemap.drawcoastlines(linewidth=0.5)
78 | basemap.drawmapboundary(fill_color='k',linewidth=1.)
79 |
80 |
81 | # mark stations
82 | def markPoints(self, ax, basemap, pointset='default'):
83 | ''' mark points and label them '''
84 | if self.point_markers is not None:
85 | if isinstance(self.point_markers,dict):
86 | point_markers = self.point_markers[pointset]
87 | else:
88 | point_markers = self.point_markers
89 | # loop over points
90 | for name,lon,lat in point_markers:
91 | xx,yy = basemap(lon, lat)
92 | basemap.plot(xx,yy,'ko',markersize=3)
93 | ax.text(xx+1.5e4,yy-1.5e4,name,ha='left',va='top',fontsize=8)
94 |
95 |
96 | ## function that serves a MapSetup instance with complementary pickles
97 | def getMapSetup(lpickle=False, folder=None, name=None, lrm=False, **kwargs):
98 | ''' function that serves a MapSetup instance with complementary pickles '''
99 | # handle pickling
100 | if lpickle:
101 | if not isinstance(folder,str): raise TypeError
102 | if not os.path.exists(folder): raise IOError(folder)
103 | filename = '{0:s}/{1:s}.pickle'.format(folder,name)
104 | if os.path.exists(filename) and not lrm:
105 | # open existing MapSetup from pickle
106 | with open(filename, 'rb') as filehandle:
107 | mapSetup = pickle.load(filehandle,)
108 | else:
109 | if lrm and os.path.exists(filename): os.remove(filename)
110 | # create new MapSetup and also pickle it
111 | mapSetup = MapSetup(name=name, **kwargs)
112 | with open(filename, 'wb') as filehandle:
113 | pickle.dump(mapSetup, filehandle)
114 | else:
115 | # instantiate object
116 | mapSetup = MapSetup(name=name, **kwargs)
117 | # return MapSetup instance
118 | return mapSetup
119 |
--------------------------------------------------------------------------------
/src/plotting/stylesheets/default.mplstyle:
--------------------------------------------------------------------------------
1 | # adapted from Matplotlib's ggplot stylesheet
2 | # Andre R. Erler, GPL v3, 12/03/2015
3 |
4 | patch.linewidth: 1.0
5 | patch.facecolor: blue # blue
6 | patch.edgecolor: black
7 | patch.antialiased: True
8 |
9 | font.size: 10
10 |
11 | axes.facecolor: white
12 | axes.edgecolor: black
13 | axes.linewidth: 1
14 | axes.grid: False
15 | axes.titlesize: x-large
16 | axes.labelsize: large
17 | axes.labelcolor: black
18 | axes.axisbelow: False
19 |
20 | axes.prop_cycle: cycler('color', 'bgrcmyk')
21 |
22 | xtick.color: black
23 | xtick.direction: in
24 |
25 | ytick.color: black
26 | ytick.direction: in
27 |
28 | grid.color: black
29 | grid.linestyle: : # dotted line
30 |
31 | figure.facecolor: white
32 | figure.edgecolor: white
33 |
34 | legend.frameon: True
35 | legend.fancybox: True
36 | legend.shadow: False
37 | legend.framealpha: 0.5
38 |
39 |
40 | # 'xx-large': 1.728
41 | # 'x-large': 1.44
42 | # 'large': 1.2
43 | # 'larger': 1.2
44 | # 'medium': 1.0
45 | # 'smaller': 0.833
46 | # 'small': 0.833
47 | # 'x-small': 0.694
48 | # 'xx-small': 0.579
49 |
--------------------------------------------------------------------------------
/src/plotting/stylesheets/myggplot.mplstyle:
--------------------------------------------------------------------------------
1 | # adapted from Matplotlib's ggplot stylesheet
2 | # Andre R. Erler, GPL v3, 20/01/2015
3 |
4 | patch.linewidth: 0.5
5 | patch.facecolor: 348ABD # blue
6 | patch.edgecolor: EEEEEE
7 | patch.antialiased: True
8 |
9 | font.size: 10.0
10 |
11 | axes.facecolor: E2E2E2 # E5E5E5
12 | axes.edgecolor: white
13 | axes.linewidth: 1
14 | axes.grid: True
15 | axes.titlesize: x-large
16 | axes.labelsize: large
17 | axes.labelcolor: 555555
18 | axes.axisbelow: True # grid/ticks are below elements (eg lines, text)
19 |
20 | axes.prop_cycle: cycler('color', ['348ABD', '988ED5', 'E24A33', '777777', 'FBC15E', '8EBA42', 'FFB5B8'])
21 | # 348ABD : blue
22 | # 988ED5 : purple
23 | # E24A33 : red
24 | # 777777 : gray
25 | # FBC15E : yellow
26 | # 8EBA42 : green
27 | # FFB5B8 : pink
28 |
29 | xtick.color: 555555
30 | xtick.direction: out
31 |
32 | ytick.color: 555555
33 | ytick.direction: out
34 |
35 | grid.color: white
36 | grid.linestyle: - # solid line
37 |
38 | figure.facecolor: white
39 | figure.edgecolor: 0.50
40 |
41 | legend.frameon: True
42 | legend.fancybox: True
43 | legend.shadow: False
44 | legend.framealpha: 0.7 # to better hide grid lines
--------------------------------------------------------------------------------
/src/plotting/stylesheets/presentation.mplstyle:
--------------------------------------------------------------------------------
1 | # Matplotlib Stylesheet for Presentations
2 | # this stylesheet mainly defines item sizes suitable for presentations
3 | # it is meant to be used in conjunction with other stylesheets
4 | # Andre R. Erler, GPL v3, 01/08/2015
5 |
6 | font.size: 9
7 |
8 | axes.titlesize : medium
9 | axes.labelsize : large
10 |
11 | lines.linewidth : 1.5
12 | lines.markersize : 7
13 |
14 | xtick.labelsize : small
15 | ytick.labelsize : small
16 |
17 | legend.fontsize: medium
--------------------------------------------------------------------------------
/src/plotting/stylesheets/publication.mplstyle:
--------------------------------------------------------------------------------
1 | # Matplotlib Stylesheet for Publication
2 | # this stylesheet mainly defines item sizes suitable for print publications
3 | # it is meant to be used in conjunction with other stylesheets
4 | # Andre R. Erler, GPL v3, 31/07/2015
5 |
6 | font.size: 8
7 |
8 | axes.titlesize : large
9 | axes.labelsize : large
10 |
11 | lines.linewidth : 1
12 | lines.markersize : 3
13 |
14 | xtick.labelsize : medium
15 | ytick.labelsize : medium
16 |
17 | legend.shadow: False
18 | legend.fontsize: medium
19 | #legend.framealpha: 0.7
--------------------------------------------------------------------------------
/src/plotting/taylor.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | __version__ = "Time-stamp: <2010-12-27 17:42 yannick@lyopc469>"
4 | __author__ = "Yannick Copin "
5 |
6 | """
7 | Taylor diagram (Taylor, 2001) test implementation.
8 |
9 | http://www-pcmdi.llnl.gov/about/staff/Taylor/CV/Taylor_diagram_primer.htm
10 | """
11 |
12 | import numpy as NP
13 |
14 | class TaylorDiagram(object):
15 | """Taylor diagram: plot model standard deviation and correlation
16 | to reference (data) sample in a single-quadrant polar plot, with
17 | r=stddev and theta=arccos(correlation).
18 | """
19 |
20 | def __init__(self, refsample):
21 | """refsample is the reference (data) sample to be compared to."""
22 |
23 | self.ref = NP.asarray(refsample)
24 |
25 | def setup_axes(self, fig, rect=111):
26 | """Set up Taylor diagram axes, i.e. single quadrant polar
27 | plot, using mpl_toolkits.axisartist.floating_axes.
28 |
29 | Wouldn't the ideal be to define its own non-linear
30 | transformation, so that coordinates are directly r=stddev and
31 | theta=correlation? I guess it would allow
32 | """
33 |
34 | from matplotlib.projections import PolarAxes
35 | import mpl_toolkits.axisartist.floating_axes as FA
36 | import mpl_toolkits.axisartist.grid_finder as GF
37 |
38 | tr = PolarAxes.PolarTransform()
39 |
40 | # Correlation labels
41 | rlocs = NP.concatenate((NP.arange(10)/10.,[0.95,0.99]))
42 | tlocs = NP.arccos(rlocs) # Conversion to polar angles
43 | gl1 = GF.FixedLocator(tlocs) # Positions
44 | tf1 = GF.DictFormatter(dict(list(zip(tlocs, list(map(str,rlocs))))))
45 |
46 | ghelper = FA.GridHelperCurveLinear(tr,
47 | extremes=(0,NP.pi/2, # 1st quadrant
48 | 0,1.5*self.ref.std()),
49 | grid_locator1=gl1,
50 | tick_formatter1=tf1,
51 | )
52 |
53 | ax = FA.FloatingSubplot(fig, rect, grid_helper=ghelper)
54 | fig.add_subplot(ax)
55 |
56 | # Adjust axes
57 | ax.axis["top"].set_axis_direction("bottom") # "Angle axis"
58 | ax.axis["top"].toggle(ticklabels=True, label=True)
59 | ax.axis["top"].major_ticklabels.set_axis_direction("top")
60 | ax.axis["top"].label.set_axis_direction("top")
61 | ax.axis["top"].label.set_text("Correlation")
62 |
63 | ax.axis["left"].set_axis_direction("bottom") # "X axis"
64 | ax.axis["left"].label.set_text("Standard deviation")
65 |
66 | ax.axis["right"].set_axis_direction("top") # "Y axis"
67 | ax.axis["right"].toggle(ticklabels=True)
68 | ax.axis["right"].major_ticklabels.set_axis_direction("left")
69 |
70 | ax.axis["bottom"].set_visible(False) # Useless
71 |
72 | # Grid
73 | ax.grid()
74 |
75 | self._ax = ax # Graphical axes
76 | self.ax = ax.get_aux_axes(tr) # Polar coordinates
77 |
78 | # Add reference point and stddev contour
79 | print("Reference std:", self.ref.std())
80 | self.ax.plot([0],self.ref.std(),'ko', label='_')
81 | t = NP.linspace(0,NP.pi/2)
82 | r = NP.zeros_like(t) + self.ref.std()
83 | self.ax.plot(t,r,'k--', label='_')
84 |
85 | return self.ax
86 |
87 | def get_coords(self, sample):
88 | """Computes theta=arccos(correlation),rad=stddev of sample
89 | wrt. reference sample."""
90 |
91 | std = NP.std(sample)
92 | corr = NP.corrcoef(self.ref, sample) # [[1,rho],[rho,1]]
93 | theta = NP.arccos(corr[0,1])
94 |
95 | print("Sample std,rho:",std,corr[0,1])
96 |
97 | return theta,std
98 |
99 | def plot_sample(self, sample, *args, **kwargs):
100 | """Add sample to the Taylor diagram. args and kwargs are
101 | directly propagated to the plot command."""
102 |
103 | t,r = self.get_coords(sample)
104 | l, = self.ax.plot(t,r, *args, **kwargs) # (theta,radius)
105 |
106 | return l
107 |
108 |
109 | if __name__=='__main__':
110 |
111 | import matplotlib.pyplot as PLT
112 |
113 | x = NP.linspace(0,4*NP.pi,100)
114 | data = NP.sin(x) # Data
115 | m1 = data + 0.2*NP.random.randn(len(x)) # Model 1
116 | m2 = 0.8*data + .1*NP.random.randn(len(x)) # Model 2
117 | m3 = NP.sin(x-NP.pi/10) # Model 3
118 |
119 | dia = TaylorDiagram(data)
120 |
121 | fig = PLT.figure(figsize=(10,4))
122 | ax1 = fig.add_subplot(1,2,1, xlabel='X', ylabel='Y')
123 | ax2 = dia.setup_axes(fig, 122)
124 |
125 | ax1.plot(x,data,'ko', label='Data')
126 | ax1.plot(x,m1,'b-', label='Model 1')
127 | ax1.plot(x,m2,'r-', label='Model 2')
128 | ax1.plot(x,m3,'g-', label='Model 3')
129 |
130 | dia.plot_sample(m1, 'bo')
131 | dia.plot_sample(m2, 'ro')
132 | dia.plot_sample(m3, 'go')
133 |
134 | ax1.legend(numpoints=1, prop=dict(size='small'), loc='best')
135 |
136 | PLT.show()
137 |
--------------------------------------------------------------------------------
/src/processing/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-11-03
3 |
4 | A module to facilitate processing of datasets. Parallel processing is also supports.
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
--------------------------------------------------------------------------------
/src/processing/yaml_samples/export.yaml:
--------------------------------------------------------------------------------
1 | # YAML configuration file for conversion to ASCII raster (processing.ascii_raster.py)
2 | # 21/04/2016, Andre R. Erler
3 |
4 | NP: 1 # environment variable has precedence; likely I/O limited
5 | loverwrite: true # this is usually run manually anyway...
6 | modes: [climatology,]
7 | load_list: ['lat2D','lon2D','zs','snow','pet_wrf','waterflx','liqprec','solprec','precip','evap','snwmlt',
8 | 'ps','u10','v10','Q2','Tmin','Tmax','T2','TSmin','TSmax','grdflx','A','SWD','e','GLW','SWDNB','SWUPB','LWDNB','LWUPB']
9 | periods: [15,] # climatology periods to process
10 | # Datasets
11 | datasets: [] # this will generally not work, because we don't have snow/-melt...
12 | resolutions: Null # process all applicable
13 | lLTM: false # not relevant anyway
14 | # CESM
15 | CESM_project: Null # all available experiments
16 | load3D: false
17 | CESM_experiments: Null
18 | CESM_filetypes: ['atm','lnd'] # used to load dataset; both are necessary
19 | # WRF
20 | WRF_project: Null # all available experiments
21 | WRF_experiments: Null # all available experiments
22 | WRF_domains: Null # inner domain onto inner domain
23 | WRF_filetypes: ['srfc','xtrm','hydro','lsm','rad'] # used to load dataset; not processed individually
24 | # bias correction parameters
25 | bc_method: Null
26 | bc_tag: ''
27 | #bc_method: 'AABC' # bias correction method (Null: no bias correction)
28 | #bc_tag: 'AABC_'
29 | #obs_dataset: 'NRCan' # the observational dataset
30 | #bc_reference: Null # reference experiment (Null: auto-detect based on name)
31 | #bc_args: { grid: Null, domain: Null, lgzip: True, # missing/Null parameters are inferred from experiment
32 | # varmap: [ Tmin: ['Tmin','TSmin'], Tmax: ['Tmax','TSmax'], T2: ['T2','Tmean'], pet_wrf: ['pet_wrf','evap'],
33 | # SWDNB: ['SWDNB','SWUPB','SWD'], SWD: ['SWDNB','SWUPB','SWD'], ] }
34 | #grids: # mapping with list of resolutions
35 | # - can1 # Continental Scale (mostly Canada), 5km resolution
36 | ## export parameters for ASCII
37 | #export_parameters:
38 | # project: 'CAN' # project tag, mainly for folder
39 | # format: ASCII_raster # ArcInfo ASCII raster format
40 | # compute_list: ['waterflx','liqwatflx','pet'], # variables that should be (re-)computed
41 | # exp_list: ['lat2D','lon2D','zs','waterflx','liqwatflx','pet','pet_wrf'], # varlist for export
42 | # folder: '/data/HGS/{PROJECT:s}/{GRID:s}/{EXPERIMENT:s}/{PERIOD:s}/climate_forcing/' # destination folder
43 | # prefix: '{GRID:s}' # file prefix
44 | # noDataValue: -9999 # masked/missing values
45 | # fillValue: 0 # in case we interpolate across a missing value...
46 | # lm3: true # convert water flux from kg/m^2/s to m^3/s
47 | grids: # mapping with list of resolutions
48 | - Null # native grid
49 | # export parameters for NetCDF
50 | export_parameters:
51 | project: 'AUX' # project tag, mainly for folder
52 | format: 'NetCDF' # ArcInfo ASCII raster format
53 | compute_list: ['waterflx','liqwatflx','pet'] # variables that should be (re-)computed
54 | exp_list: ['netrad','netrad_bb0','netrad_bb','vapdef','pet','pet_wrf','petrad','petwnd','Tmin','Tmax','T2','Tmean','TSmin','TSmax',
55 | 'SWD','SWDNB','SWUPB','zs','lat2D','lon2D','waterflx','liqwatflx','liqprec','solprec','precip','snow','snowh','snwmlt',]
56 | filetype: 'aux' # filetype name of export file
57 | lm3: false # convert water flux from kg/m^2/s to m^3/s
--------------------------------------------------------------------------------
/src/processing/yaml_samples/exstns.yaml:
--------------------------------------------------------------------------------
1 | # YAML configuration file for batch station extraction (processing.exstns.py)
2 | # 20/04/2016, Andre R. Erler
3 |
4 | NP: 2 # environment variable has precedence
5 | # N.B.: station extraction tends to be relatively fast, but I/O limited
6 | loverwrite: false # only recompute if source is newer
7 | modes: ['time-series',]
8 | varlist: Null # process all variables
9 | periods: Null # climatology periods to process
10 | # Datasets
11 | datasets: Null # process all applicable
12 | resolutions: Null # process all applicable
13 | lLTM: false
14 | # CESM
15 | CESM_project: Null # all available experiments
16 | load3D: false
17 | CESM_experiments: Null
18 | CESM_filetypes: ['atm','lnd']
19 | # WRF
20 | WRF_project: Null # all available experiments (fast enough)
21 | WRF_experiments: Null # all available experiments
22 | domains: Null # inner domain onto inner domain
23 | WRF_filetypes: ['srfc','xtrm','hydro','lsm','rad','plev3d','aux'] # process all filetypes except snow
24 | # define station data
25 | stations:
26 | EC: # all Environment Canada weather stations
27 | - precip # precip stations from EC module
28 | - temp # temperature stations from EC module
--------------------------------------------------------------------------------
/src/processing/yaml_samples/regrid.yaml:
--------------------------------------------------------------------------------
1 | # YAML configuration file for batch regridding (processing.regrid.py)
2 | # 20/04/2016, Andre R. Erler
3 |
4 | NP: 3 # environment variable has precedence
5 | loverwrite: false # only recompute if source is newer
6 | modes: ['climatology',]
7 | varlist: Null # process all variables
8 | periods: [15,] # climatology periods to process
9 | # Datasets
10 | datasets: [] # process all applicable
11 | resolutions: Null # process all applicable
12 | lLTM: true
13 | # CESM
14 | CESM_project: Null # all available experiments
15 | load3D: false
16 | CESM_experiments: Null
17 | CESM_filetypes: ['atm','lnd']
18 | # WRF
19 | WRF_project: Null # all available experiments
20 | WRF_experiments: Null # all available experiments
21 | domains: Null # inner domain onto inner domain
22 | WRF_filetypes: ['srfc','xtrm','hydro','lsm','rad','plev3d','aux'] # process all filetypes except snow
23 | # grid to project onto
24 | grids: # mapping with list of resolutions
25 | arb2: ['d02',] # inner Western Canada
26 | glb1: ['d02',] # inner Great Lakes
27 |
--------------------------------------------------------------------------------
/src/processing/yaml_samples/shpavg.yaml:
--------------------------------------------------------------------------------
1 | # YAML configuration file for batch shape averaging (processing.shpavg.py)
2 | # 20/04/2016, Andre R. Erler
3 |
4 | NP: 3 # environment variable has precedence
5 | loverwrite: false # only recompute if source is newer
6 | lappend: true # append to existing file, instead of recompute all
7 | modes: ['time-series',]
8 | varlist: Null # process all variables
9 | periods: Null # climatology periods to process
10 | # Datasets
11 | datasets: Null # process all applicable
12 | resolutions: Null # process all applicable
13 | lLTM: false
14 | # CESM
15 | CESM_project: Null # all available experiments
16 | load3D: false
17 | CESM_experiments: Null
18 | CESM_filetypes: ['atm','lnd']
19 | # WRF
20 | WRF_experiments: Null # all available experiments
21 | domains: Null # inner domain onto inner domain
22 | WRF_filetypes: ['srfc','xtrm','hydro','lsm','rad','plev3d','aux'] # process all filetypes except snow
23 | # define shape data
24 | #WRF_project: Null # all available experiments
25 | #shape_name: 'shpavg'
26 | #shapes:
27 | # provinces: Null # all Canadian provinces from EC module
28 | # basins: Null # all river basins (in Canada) from WSC module
29 | WRF_project: 'WesternCanada' # just western Canada
30 | shape_name: 'wcavg' # Western Canadian shapes
31 | shapes:
32 | provinces: ['BC','AB'] # Canadian provinces from EC module
33 | basins: ['PSB','NorthernPSB','SouthernPSB','FRB','UpperFRB','LowerFRB','CRB',
34 | 'ARB','UpperARB','LowerARB','SSR','NRB',] # river basins (in Canada) from WSC module
35 | #WRF_project: 'GreatLakes' # just Great Lakes simulations
36 | #shape_name: 'glbshp' # only Canadian river basins
37 | #shapes:
38 | # provinces: ['MB','ON','QC'] # Canadian provinces from EC module
39 | # basins: ['LandGLB','GLB','GRW','UpperGRW','LowerGRW','NorthernGRW','SouthernGRW','WesternGRW','SNW'] # river basins (in Canada) from WSC module
40 | #shape_name: 'glakes' # Great Lakes
41 | #shapes:
42 | # great_lakes: None # the Great Lakes of North America
43 |
44 | # N.B.: averaging over many shapes is computationally very expensive
--------------------------------------------------------------------------------
/src/processing/yaml_samples/wrfavg.yaml:
--------------------------------------------------------------------------------
1 | # YAML configuration file for batch averaging of WRF time-series (processing.wrfavg.py)
2 | # 20/04/2016, Andre R. Erler
3 |
4 | NP: 3 # environment variable has precedence
5 | loverwrite: false # only recompute if source is newer
6 | varlist: Null # process all variables
7 | periods: [15,] # climatology periods to process
8 | offset: 0 # number of years from simulation start
9 | WRF_project: Null # all available experiments
10 | WRF_experiments: Null # all available experiments
11 | domains: Null # inner domain onto inner domain
12 | WRF_filetypes: ['srfc','xtrm','hydro','lsm','plev3d','rad'] # process all output filetypes, except 'snow'
13 | # grid to project onto
14 | grid: Null # no on-the-fly regridding
15 |
--------------------------------------------------------------------------------
/src/utils/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2014-07-30
3 |
4 | A package that contains various utility modules, including modules that were adapted from other sources.
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
--------------------------------------------------------------------------------
/src/utils/constants.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2010-11-26, adapted from PyGeode on 2013-08-24
3 |
4 | Some default physical constants.
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | from numpy import pi, sin
10 |
11 | # actual constants
12 | R = 8.31447215 # J/(mol K), universal gas constant (Wikipedia)
13 | cp = 1005.7 # J/(kg K), specific heat of dry air per mass (AMS Glossary)
14 | g0 = 9.80665 # m/s**2, for geopotential altitude (else actually y-dependent g(y))
15 | Mair = 0.0289644 # kg/mol, Molecular mass of dry air
16 | Re = 6371229 # m, Radius of planet earth
17 | T0 = 273.15 # K, Temperature at 0 deg C, i.e. negative absolute zero in Celsius
18 | Omega = 2*pi/((23*60+56)*60+4.1) # 1/s, Earth's rotation rate (using siderial day)
19 | # some derived constants, for convenience
20 | Cp = cp*Mair # J/(mol K), specific heat of dry air per mole
21 | Rd = R/Mair # gas constant for dry air
22 | kappa = R/Cp # ~7/2, adiabatic exponent for dry air
23 | # not exactly physical constants
24 | fc = 2*Omega*sin(pi/4) # Coriolis parameter at 45 deg N
25 | p0 = 1e5 # reference pressure (e.g. for potential temperature)
26 | sig = 5.67051e-8 # Stefan-Boltzmann constant (AMS Glossary, WRF)
27 | lw = 2.45e6 # latent heat of vaporization [J/kg] of water at 20 deg Celsius and mean sealevel pressure (FAO)
28 |
29 | # thresholds for wet-day variables (from AMS glossary and ETCCDI Climate Change Indices)
30 | precip_thresholds = [0.2, 1., 10., 20.]
31 | # N.B.: importing from wrfavg in WRF Tools causes a name collition; the variable is placed here
32 | # to avoid further import errors, because this module does not import anything non-standard
33 |
--------------------------------------------------------------------------------
/src/utils/fix_time.py:
--------------------------------------------------------------------------------
1 | #!/usr/local/bin/python3.6
2 | # encoding: utf-8
3 | '''
4 | A simple script to fix the time coordinate/index in concatenated NetCDF files; the script replaces the time
5 | coordinate with the number of month since a reference time (1979-01) by default; it also reads the NetCDF
6 | attribute 'begin_date' and changes 'end_date' based on the length of the time axis (assimung monthly steps).
7 |
8 | @author: Andre R. Erler
9 |
10 | @copyright: 2019 Aquanty Inc. All rights reserved.
11 |
12 | @license: GPL v3
13 |
14 | @contact: aerler@aquanty.com
15 | @deffield updated: 30/05/2019
16 | '''
17 |
18 | import os, sys
19 | import numpy as np
20 | import netCDF4 as nc
21 | import pandas as pd
22 |
23 | # find reference date
24 | ref_date = os.getenv('NC_REFERENCE_DATE', '1979-01')
25 | print("Using reference date: "+ref_date)
26 | ref_dt = pd.to_datetime(ref_date)
27 |
28 | # read start date option
29 | master_start_date = os.getenv('NC_REFERENCE_DATE', None)
30 | if master_start_date is not None:
31 | master_start_dt = pd.to_datetime(master_start_date)
32 |
33 | # get file list
34 | file_list = sys.argv[1:] # first is script name
35 | # print("Looping over file list:")
36 | # print(file_list)
37 | # print("")
38 |
39 | # loop over file list
40 | for ncfile in file_list:
41 |
42 | if not os.path.exists(ncfile):
43 | raise IOError(ncfile)
44 |
45 | print("Opening file: '{}'".format(ncfile))
46 | # open file
47 | ds = nc.Dataset(ncfile,'a')
48 | if master_start_date is None:
49 | start_date = ds.getncattr('begin_date')
50 | print(" Start date ('begin_date'): "+start_date)
51 | start_dt = pd.to_datetime(start_date)
52 | else:
53 | start_date = master_start_date
54 | start_dt = master_start_dt
55 | ds.setncattr('begin_date',start_date)
56 |
57 | # compute offset to reference
58 | start_month = (start_dt.year - ref_dt.year)*12 + (start_dt.month - ref_dt.month)
59 |
60 | # fix time axis
61 | tax = ds['time']
62 | tax_len = len(tax)
63 | print(' New time index: {} - {}'.format(start_month,start_month+len(tax)))
64 | tax[:] = np.arange(start_month,start_month+tax_len, dtype=tax.dtype)
65 | # change time units
66 | tax.setncattr('units','month since '+ref_date)
67 |
68 | # compute and set end date
69 | end_year = start_dt.year + (start_dt.month + tax_len -1)//12
70 | end_month = (start_dt.month + tax_len -1)%12
71 | end_date = '{YEAR:04d}-{MON:02d}'.format(YEAR=end_year,MON=end_month)
72 | print(" End date ('end_date'): "+end_date)
73 | ds.setncattr('end_date',end_date)
74 |
75 | # save and close file
76 | ds.sync(); ds.close()
77 |
--------------------------------------------------------------------------------
/src/utils/signalsmooth.py:
--------------------------------------------------------------------------------
1 | """
2 | cookb_signalsmooth.py
3 |
4 | from: http://scipy.org/Cookbook/SignalSmooth
5 | """
6 |
7 | import numpy as np
8 |
9 | class UnmaskAndPad(object):
10 | ''' decorator class to preprocess arrays for smoothing '''
11 |
12 | def __init__(self, smoother):
13 | ''' store the smoothing operation we are going to apply '''
14 | self.smoother = smoother
15 |
16 | def __call__(self, data, pad_value=0, **kwargs):
17 | ''' unmask and pad data, execute smoother, and restore mask '''
18 |
19 | if not isinstance(data,np.ndarray):
20 | raise TypeError(data)
21 |
22 | # remove mask
23 | if isinstance(data, np.ma.masked_array):
24 | mask = data.mask; fill_value = data._fill_value
25 | data = data.filled(pad_value) # not actually inplace
26 | else:
27 | mask = None
28 | # remove NaN
29 | if np.issubdtype(data.dtype, np.inexact):
30 | nan_mask = np.isnan(data)
31 | data[nan_mask] = pad_value
32 | if np.isinf(data).any():
33 | raise NotImplementedError("Non-finite values except NaN are currently not handled in smoothing.")
34 | else:
35 | nan_mask = None
36 |
37 | # apply smoother
38 | data = self.smoother(data, **kwargs)
39 |
40 | # restore NaN
41 | if nan_mask is not None:
42 | data[nan_mask] = np.NaN
43 | # restore mask
44 | if mask is not None:
45 | data = np.ma.masked_array(data, mask=mask)
46 | data._fill_value = fill_value
47 |
48 | # return
49 | return data
50 |
51 |
52 | @UnmaskAndPad
53 | def smooth(x, window_len=11, window='hanning'):
54 | """smooth the data using a window with requested size.
55 |
56 | This method is based on the convolution of a scaled window with the signal.
57 | The signal is prepared by introducing reflected copies of the signal
58 | (with the appropriate size) in both ends so that transient parts are minimized
59 | in the begining and end part of the output signal.
60 |
61 | input:
62 | x: the input signal
63 | window_len: the dimension of the smoothing window
64 | window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
65 | flat window will produce a moving average smoothing.
66 |
67 | output:
68 | the smoothed signal
69 |
70 | example:
71 |
72 | import numpy as np
73 | t = np.linspace(-2,2,0.1)
74 | x = np.sin(t)+np.random.randn(len(t))*0.1
75 | y = smooth(x)
76 |
77 | see also:
78 |
79 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
80 | scipy.signal.lfilter
81 |
82 | TODO: the window parameter could be the window itself if an array instead of a string
83 | """
84 |
85 | if x.ndim != 1:
86 | raise ValueError("smooth only accepts 1 dimension arrays.")
87 |
88 | if x.size < window_len:
89 | raise ValueError("Input vector needs to be of equal size or bigger than window size.")
90 |
91 | if window_len < 3:
92 | return x
93 |
94 | if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
95 | raise ValueError("Window should be one of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
96 |
97 | s=np.r_[ 2*x[0]-x[window_len//2:0:-1], x, 2*x[-1]-x[-2:-(window_len//2)-2:-1] ]
98 | #print(len(s))
99 |
100 | if window == 'flat': #moving average
101 | w = np.ones(window_len,'d')
102 | else:
103 | w = getattr(np, window)(window_len)
104 | y = np.convolve(w/w.sum(), s, mode='same')
105 | return y[window_len//2:-(window_len//2)]
106 |
107 |
108 | #*********** part2: 2d
109 |
110 | from scipy import signal
111 |
112 | def twoDim_kern(size, window, sizey=None):
113 | """ Returns a normalized 2D kernel array for convolutions """
114 | size = int(size)
115 | if not sizey:
116 | sizey = size
117 | else:
118 | sizey = int(sizey)
119 | x, y = np.mgrid[-size:size+1, -sizey:sizey+1]
120 | if window=='gauss':
121 | g = np.exp(-(x**2/float(size) + y**2/float(sizey)))
122 | elif window=='flat':
123 | g = np.ones((size,sizey))
124 | elif window=='hanning':
125 | g1d_x = np.hanning(size)
126 | g1d_y = np.hanning(sizey)
127 | g = np.sqrt(np.outer(g1d_x,g1d_y))
128 | elif window=='hamming':
129 | g1d_x = np.hamming(size)
130 | g1d_y = np.hamming(sizey)
131 | g = np.sqrt(np.outer(g1d_x,g1d_y))
132 | elif window=='bartlett':
133 | g1d_x = np.bartlett(size)
134 | g1d_y = np.bartlett(sizey)
135 | g = np.sqrt(np.outer(g1d_x,g1d_y))
136 | elif window=='blackman':
137 | g1d_x = np.blackman(size)
138 | g1d_y = np.blackman(sizey)
139 | Temp = np.outer(g1d_x,g1d_y)
140 | Temp[np.abs(Temp)<1e-15] = 0
141 | g = np.sqrt(Temp)
142 | # NOTE: For the blackman window some elements have tiny negative values which
143 | # become problematic when taking the square root. So I've added the above
144 | # code to fix this.
145 | return g/g.sum()
146 |
147 | @UnmaskAndPad
148 | def smooth_image(im, window='gauss', n=10, ny=None):
149 | """ blurs the image by convolving with a kernel of typical
150 | size n. The optional keyword argument ny allows for a different
151 | size in the y direction.
152 | """
153 | n = int(n)
154 | if not ny:
155 | ny = n
156 | else:
157 | ny = int(ny)
158 | g = twoDim_kern(size=n,window=window,sizey=ny)
159 | [mx,my] = im.shape
160 | ox = 2*(n//2)+mx
161 | oy = 2*(ny//2)+my
162 | S = np.zeros((ox,oy))
163 | S[n//2:-(n//2),ny//2:-(ny//2)] = im
164 | for i in np.arange(n//2,ox-(n//2)):
165 | S[i,:] = np.r_[ 2*im[i-(n//2),0]-im[i-(n//2),ny//2:0:-1], im[i-(n//2),:],
166 | 2*im[i-(n//2),-1]-im[i-(n//2),-2:-(ny//2)-2:-1] ]
167 | for j in np.arange(ny//2,oy-(ny//2)):
168 | S[:,j] = np.r_[ 2*im[0,j-(ny//2)]-im[n//2:0:-1,j-(ny//2)],
169 | im[:,j-(ny//2)], 2*im[-1,j-(ny//2)]-im[-2:-(n//2)-2:-1,j-(ny//2)] ]
170 | TL = np.zeros((n//2,ny//2))
171 | TR = np.zeros((n//2,ny//2))
172 | BL = np.zeros((n//2,ny//2))
173 | BR = np.zeros((n//2,ny//2))
174 | for i in np.arange(ox-(n//2),ox):
175 | TL[i-ox+(n//2),:] = 2*S[i,ny//2]-S[i,2*(ny//2):ny//2:-1]
176 | TR[i-ox+(n//2),:] = 2*S[i,-1-(ny//2)]-S[i,-2-(ny//2):-2*(ny//2)-2:-1]
177 | for i in np.arange(n//2):
178 | BL[i,:] = 2*S[i,ny//2]-S[i,2*(ny//2):ny//2:-1]
179 | BR[i,:] = 2*S[i,-1-(ny//2)]-S[i,-2-(ny//2):-2*(ny//2)-2:-1]
180 | S[0:n//2,0:ny//2] = BL
181 | S[ox-(n//2):ox,0:ny//2] = TL
182 | S[0:n//2,oy-(ny//2):oy] = BR
183 | S[ox-(n//2):ox,oy-(ny//2):oy] = TR
184 | improc = signal.convolve(S,g,mode='same')
185 | return(improc[n//2:-(n//2),ny//2:-(ny//2)])
186 |
187 |
188 | def smooth_demo():
189 | import matplotlib.pyplot as plt
190 |
191 | t = np.linspace(-4,4,100)
192 | x = np.sin(t)
193 | xn = x + np.random.randn(len(t)) * 0.1
194 | y = smooth(x)
195 | ws = 31
196 |
197 | plt.subplot(211)
198 | plt.plot(np.ones(ws))
199 |
200 | windows=['flat', 'hanning', 'hamming', 'bartlett', 'blackman']
201 |
202 | #plt.hold(True)
203 | for w in windows[1:]:
204 | #eval('plt.plot('+w+'(ws) )')
205 | plt.plot(getattr(np, w)(ws))
206 |
207 | plt.axis([0,30,0,1.1])
208 |
209 | plt.legend(windows)
210 | plt.title("The smoothing windows")
211 | plt.subplot(212)
212 | plt.plot(x)
213 | plt.plot(xn)
214 | for w in windows:
215 | plt.plot(smooth(xn,window_len=10,window=w))
216 | l = ['original signal', 'signal with noise']
217 | l.extend(windows)
218 | plt.legend(l)
219 | plt.title("Smoothing a noisy signal")
220 | #plt.show()
221 |
222 |
223 | def smooth_image_demo():
224 | import matplotlib.pyplot as plt
225 |
226 | windows=['gauss', 'flat', 'hanning', 'hamming', 'bartlett', 'blackman']
227 |
228 | X, Y = np.mgrid[-70:70, -70:70]
229 | Z = np.cos((X**2+Y**2)/200.)+ np.random.normal(size=X.shape)
230 |
231 | plt.figure()
232 | plt.subplot(121)
233 | plt.imshow(Z)
234 | plt.title("The perturbed signal")
235 |
236 | for w in windows:
237 | [n,ny] = Z.shape
238 | g = twoDim_kern(size=31,window=w)
239 | Z2 = smooth_image(Z,window=w,n=5)
240 | plt.figure()
241 | plt.subplot(121)
242 | plt.imshow(g)
243 | plt.colorbar(orientation="horizontal")
244 | plt.title("Weight function "+w)
245 | plt.subplot(122)
246 | plt.imshow(Z2)
247 | plt.colorbar(orientation="horizontal")
248 | plt.title("Smoothed using window "+w)
249 |
250 |
251 | if __name__=='__main__':
252 |
253 | import matplotlib.pyplot as plt
254 |
255 | # part 1: 1d
256 | smooth_demo()
257 |
258 | # part 2: 2d
259 | smooth_image_demo()
260 |
261 |
--------------------------------------------------------------------------------
/src/utils/simple_regrid.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 2013-07-29
3 |
4 | An extension of the NetCDF-4 Dataset class with GDAL reprojection and resampling features
5 |
6 | @author: Andre R. Erler, GPL v3
7 | '''
8 |
9 | import numpy as np
10 | import netCDF4 as nc
11 | from osgeo import gdal, osr
12 |
13 | # register RAM driver
14 | ramdrv = gdal.GetDriverByName('MEM')
15 |
16 | ## geo-reference base class for datasets
17 | class ProjDataset(object):
18 | '''
19 | A container class for GDAL geo-referencing information with regriding functionality
20 | '''
21 | # constructor routine
22 | def __init__(self, projection=None, geotransform=None, size=None):
23 | '''
24 | initialize dataset (GDAL projection objects are passed explicitly);
25 | this function should be overloaded to load projections for specific datasets
26 | '''
27 | # create GDAL meta data and objects
28 | self.projection = projection # GDAL projection object
29 | self.geotransform = geotransform # GDAL geotransform vector
30 | self.size = size # x/y size tuple, can be None
31 | ## GeoTransform Vector definition:
32 | # GT(2) & GT(4) are zero for North-up
33 | # GT(1) & GT(5) are image width and height in pixels
34 | # GT(0) & GT(3) are the (x/y) coordinates of the top left corner
35 | # function to return a GDAL dataset
36 | def getProj(self, bands, dtype='float32', size=None):
37 | '''
38 | generic function that returns a gdal dataset, ready for use
39 | '''
40 | # determine GDAL data type
41 | if dtype == 'float32': gdt = gdal.GDT_Float32
42 | # determine size
43 | if not size: size = self.size # should be default
44 | # create GDAL dataset
45 | dset = ramdrv.Create('', int(size[0]), int(size[1]), int(bands), int(gdt))
46 | #if bands > 6: # add more bands, if necessary
47 | #for i in xrange(bands-6): dset.AddBand()
48 | # N.B.: for some reason a dataset is always initialized with 6 bands
49 | # set projection parameters
50 | dset.SetGeoTransform(self.geotransform) # does the order matter?
51 | dset.SetProjection(self.projection.ExportToWkt()) # is .ExportToWkt() necessary?
52 | # return dataset
53 | return dset
54 |
55 | ## simple lat/lon geo-referencing system
56 | class LatLonProj(ProjDataset):
57 | '''
58 | A container class for GDAL geo-referencing information with regriding functionality
59 | '''
60 | def __init__(self, lon, lat):
61 | '''
62 | initialize projection dataset based on (regular) lat/lon vectors
63 | '''
64 | epsg = 4326 # EPSG code for regular lat/long grid
65 | # size of dataset
66 | size = (len(lon), len(lat))
67 | # GDAL geotransform vector
68 | dx = lon[1]-lon[0]; dy = lat[1]-lat[0]
69 | ulx = lon[0]-dx/2.; uly = lat[0]-dy/2. # coordinates of upper left corner (same for source and sink)
70 | # GT(2) & GT(4) are zero for North-up; GT(1) & GT(5) are pixel width and height; (GT(0),GT(3)) is the top left corner
71 | geotransform = (ulx, dx, 0., uly, 0., dy)
72 | # GDAL projection
73 | projection = osr.SpatialReference()
74 | projection.ImportFromEPSG(epsg)
75 | # create GDAL projection object from parent instance
76 | super(LatLonProj,self).__init__(projection=projection, geotransform=geotransform, size=size)
77 | self.epsg = epsg # save projection code number
78 |
79 | ## function to reproject and resample a 2D array
80 | def regridArray(data, srcprj, tgtprj, interpolation='bilinear', missing=None):
81 | '''
82 | A function that regrids (reproject and resample) an array based on a source and target projection object
83 | (using GDAL as a backend); the two inner-most dimensions have to be latitude/y and longitude/x.
84 | '''
85 | # condition data (assuming a numpy array)
86 | dshape = data.shape[0:-2]; ndim = data.ndim
87 | assert ndim > 1, 'data array needs to have at least two dimensions'
88 | sxe = data.shape[-1]; sye = data.shape[-2] # (bnd,lat,lon)
89 | if ndim == 2: bnds = 1
90 | else: bnds = np.prod(dshape)
91 | data = data.reshape(bnds,sye,sxe)
92 | ## create source and target dataset
93 | assert srcprj.size == (sxe, sye), 'data array and data grid have to be of compatible size'
94 | srcdata = srcprj.getProj(bnds); tgtdata = tgtprj.getProj(bnds)
95 | txe, tye = tgtprj.size
96 | fill = np.zeros((tye,txe))
97 | if missing: fill += missing
98 | # assign data
99 | for i in range(bnds):
100 | srcdata.GetRasterBand(i+1).WriteArray(data[i,:,:])
101 | # srcdata.GetRasterBand(i+1).WriteArray(np.flipud(data[i,:,:]))
102 | tgtdata.GetRasterBand(i+1).WriteArray(fill.copy())
103 | if missing:
104 | srcdata.GetRasterBand(i+1).SetNoDataValue(missing)
105 | tgtdata.GetRasterBand(i+1).SetNoDataValue(missing)
106 | # determine GDAL interpolation
107 | if interpolation == 'bilinear': gdal_interp = gdal.GRA_Bilinear
108 | elif interpolation == 'nearest': gdal_interp = gdal.GRA_NearestNeighbour
109 | elif interpolation == 'lanczos': gdal_interp = gdal.GRA_Lanczos
110 | elif interpolation == 'convolution': gdal_interp = gdal.GRA_Cubic # cubic convolution
111 | elif interpolation == 'cubicspline': gdal_interp = gdal.GRA_CubicSpline # cubic spline
112 | else: print(('Unknown interpolation method: '+interpolation))
113 | ## reproject and resample
114 | # srcproj = srcprj.projection.ExportToWkt(); tgtproj = tgtprj.projection.ExportToWkt()
115 | # err = gdal.ReprojectImage(srcdata, tgtdata, srcproj, tgtproj, gdal_interp)
116 | err = gdal.ReprojectImage(srcdata, tgtdata, None, None, gdal_interp)
117 | if err != 0: print(('ERROR CODE %i'%err))
118 | # get data field
119 | if bnds == 1: outdata = tgtdata.ReadAsArray()[:,:] # for 2D fields
120 | else: outdata = tgtdata.ReadAsArray(0,0,txe,tye)[0:bnds,:,:] # ReadAsArray(0,0,xe,ye)
121 | if ndim == 2: outdata = outdata.squeeze()
122 | else: outdata = outdata.reshape(dshape+outdata.shape[-2:])
123 | # return data
124 | return outdata
125 |
126 |
127 | # run a test
128 | if __name__ == '__main__':
129 |
130 | # input
131 | folder = '/media/tmp/' # RAM disk
132 | infile = 'prismavg/prism_clim.nc'
133 | # infile = 'gpccavg/gpcc_25_clim_1979-1981.nc'
134 | likefile = 'gpccavg/gpcc_05_clim_1979-1981.nc'
135 |
136 | # load input dataset
137 | inData = nc.Dataset(filename=folder+infile)
138 | lon = inData.variables['lon'][:]; lat = inData.variables['lat'][:]
139 | inProj = LatLonProj(lon=lon, lat=lat)
140 | # print inData.variables['lat'][:]
141 |
142 | # # load pattern dataset
143 | # likeData = nc.Dataset(filename=folder+likefile)
144 | # likeProj = LatLonProj(lon=likeData.variables['lon'][:], lat=likeData.variables['lat'][:])
145 | # print likeData.variables['lat'][:]
146 | # define new grid
147 | dlon = dlat = 0.125
148 | slon = np.floor(lon[0]); elon = np.ceil(lon[-1])
149 | slat = np.floor(lat[0]); elat = np.ceil(lat[-1])
150 | newlon = np.linspace(slon+dlon/2,elon-dlon/2,(elon-slon)/dlon)
151 | newlat = np.linspace(slat+dlat/2,elat-dlat/2,(elat-slat)/dlat)
152 | likeProj = LatLonProj(lon=newlon, lat=newlat)
153 |
154 | # create lat/lon projection
155 | outdata = regridArray(inData.variables['rain'][:], inProj, likeProj, interpolation='convolution', missing=-9999)
156 |
157 | # display
158 | import pylab as pyl
159 | for i in range(1):
160 | # pyl.imshow(outdata[i,:,:]); pyl.colorbar(); pyl.show(block=True)
161 | # pyl.imshow(np.flipud(likeData.variables['rain'][i,:,:])); pyl.colorbar(); pyl.show(block=True)
162 | pyl.imshow(np.flipud(outdata[i,:,:])); pyl.colorbar(); pyl.show(block=True)
163 | # pyl.imshow(np.flipud(outdata[i,:,:]-likeData.variables['rain'][i,:,:])); pyl.colorbar(); pyl.show(block=True)
164 |
165 |
--------------------------------------------------------------------------------
/src/utils/stats.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on Mar 30, 2015
3 |
4 | Modified statistical functions from scipy.stats
5 |
6 | @author: Andre R. Erler
7 | '''
8 |
9 | import numpy as np
10 | # imports from scipy's internal stats-helper module
11 | from scipy.stats.stats import rankdata, distributions
12 | from scipy.stats._stats_py import _chk_asarray
13 | from scipy.special import betainc
14 |
15 | # helper function
16 | def _sum_of_squares(x):
17 | return np.sum(x**2)
18 |
19 | # helper function
20 | def _betai(a, b, x):
21 | x = np.asarray(x)
22 | x = np.where(x < 1.0, x, 1.0) # if x > 1 then return 1.0
23 | return betainc(a, b, x)
24 |
25 | ## Pearson's linear correlation coefficient
26 | def pearsonr(x, y, dof=None):
27 | """
28 | Calculates a Pearson correlation coefficient and the p-value for testing
29 | non-correlation.
30 |
31 | The Pearson correlation coefficient measures the linear relationship
32 | between two datasets. Strictly speaking, Pearson's correlation requires
33 | that each dataset be normally distributed. Like other correlation
34 | coefficients, this one varies between -1 and +1 with 0 implying no
35 | correlation. Correlations of -1 or +1 imply an exact linear
36 | relationship. Positive correlations imply that as x increases, so does
37 | y. Negative correlations imply that as x increases, y decreases.
38 |
39 | The p-value roughly indicates the probability of an uncorrelated system
40 | producing datasets that have a Pearson correlation at least as extreme
41 | as the one computed from these datasets. The p-values are not entirely
42 | reliable but are probably reasonable for datasets larger than 500 or so.
43 |
44 | This is a modified version that supports an optional argument to set the
45 | degrees of freedom (dof) manually.
46 |
47 | Parameters
48 | ----------
49 | x : (N,) array_like
50 | Input
51 | y : (N,) array_like
52 | Input
53 | dof : int or None, optional
54 | Input
55 |
56 | Returns
57 | -------
58 | (Pearson's correlation coefficient,
59 | 2-tailed p-value)
60 |
61 | References
62 | ----------
63 | http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation
64 |
65 | """
66 | # x and y should have same length.
67 | x = np.asarray(x)
68 | y = np.asarray(y)
69 | n = len(x)
70 | mx = x.mean()
71 | my = y.mean()
72 | xm, ym = x-mx, y-my
73 | r_num = np.add.reduce(xm * ym)
74 | r_den = np.sqrt(_sum_of_squares(xm) * _sum_of_squares(ym))
75 | r = r_num / r_den
76 |
77 | # Presumably, if abs(r) > 1, then it is only some small artifact of floating
78 | # point arithmetic.
79 | r = max(min(r, 1.0), -1.0)
80 | df = n-2 if dof is None else dof
81 | if abs(r) == 1.0:
82 | prob = 0.0
83 | else:
84 | t_squared = r*r * (df / ((1.0 - r) * (1.0 + r)))
85 | prob = _betai(0.5*df, 0.5, df / (df + t_squared))
86 | return r, prob
87 |
88 |
89 | ## Spearman's rank correlation coefficient
90 | def spearmanr(a, b=None, axis=0, dof=None):
91 | """
92 | Calculates a Spearman rank-order correlation coefficient and the p-value
93 | to test for non-correlation.
94 |
95 | The Spearman correlation is a nonparametric measure of the monotonicity
96 | of the relationship between two datasets. Unlike the Pearson correlation,
97 | the Spearman correlation does not assume that both datasets are normally
98 | distributed. Like other correlation coefficients, this one varies
99 | between -1 and +1 with 0 implying no correlation. Correlations of -1 or
100 | +1 imply an exact monotonic relationship. Positive correlations imply that
101 | as x increases, so does y. Negative correlations imply that as x
102 | increases, y decreases.
103 |
104 | The p-value roughly indicates the probability of an uncorrelated system
105 | producing datasets that have a Spearman correlation at least as extreme
106 | as the one computed from these datasets. The p-values are not entirely
107 | reliable but are probably reasonable for datasets larger than 500 or so.
108 |
109 | Parameters
110 | ----------
111 | a, b : 1D or 2D array_like, b is optional
112 | One or two 1-D or 2-D arrays containing multiple variables and
113 | observations. Each column of `a` and `b` represents a variable, and
114 | each row entry a single observation of those variables. See also
115 | `axis`. Both arrays need to have the same length in the `axis`
116 | dimension.
117 | axis : int or None, optional
118 | If axis=0 (default), then each column represents a variable, with
119 | observations in the rows. If axis=0, the relationship is transposed:
120 | each row represents a variable, while the columns contain observations.
121 | If axis=None, then both arrays will be raveled.
122 | dof : int or None, optional
123 | If dof=None (default), the degrees of freedom will be inferred from
124 | the array length.
125 |
126 | Returns
127 | -------
128 | rho : float or ndarray (2-D square)
129 | Spearman correlation matrix or correlation coefficient (if only 2
130 | variables are given as parameters. Correlation matrix is square with
131 | length equal to total number of variables (columns or rows) in a and b
132 | combined.
133 | p-value : float
134 | The two-sided p-value for a hypothesis test whose null hypothesis is
135 | that two sets of data are uncorrelated, has same dimension as rho.
136 |
137 | Notes
138 | -----
139 | Changes in scipy 0.8.0: rewrite to add tie-handling, and axis.
140 |
141 | References
142 | ----------
143 | [CRCProbStat2000]_ Section 14.7
144 |
145 | .. [CRCProbStat2000] Zwillinger, D. and Kokoska, S. (2000). CRC Standard
146 | Probability and Statistics Tables and Formulae. Chapman & Hall: New
147 | York. 2000.
148 |
149 | Examples
150 | --------
151 | >>> spearmanr([1,2,3,4,5],[5,6,7,8,7])
152 | (0.82078268166812329, 0.088587005313543798)
153 | >>> np.random.seed(1234321)
154 | >>> x2n=np.random.randn(100,2)
155 | >>> y2n=np.random.randn(100,2)
156 | >>> spearmanr(x2n)
157 | (0.059969996999699973, 0.55338590803773591)
158 | >>> spearmanr(x2n[:,0], x2n[:,1])
159 | (0.059969996999699973, 0.55338590803773591)
160 | >>> rho, pval = spearmanr(x2n,y2n)
161 | >>> rho
162 | array([[ 1. , 0.05997 , 0.18569457, 0.06258626],
163 | [ 0.05997 , 1. , 0.110003 , 0.02534653],
164 | [ 0.18569457, 0.110003 , 1. , 0.03488749],
165 | [ 0.06258626, 0.02534653, 0.03488749, 1. ]])
166 | >>> pval
167 | array([[ 0. , 0.55338591, 0.06435364, 0.53617935],
168 | [ 0.55338591, 0. , 0.27592895, 0.80234077],
169 | [ 0.06435364, 0.27592895, 0. , 0.73039992],
170 | [ 0.53617935, 0.80234077, 0.73039992, 0. ]])
171 | >>> rho, pval = spearmanr(x2n.T, y2n.T, axis=1)
172 | >>> rho
173 | array([[ 1. , 0.05997 , 0.18569457, 0.06258626],
174 | [ 0.05997 , 1. , 0.110003 , 0.02534653],
175 | [ 0.18569457, 0.110003 , 1. , 0.03488749],
176 | [ 0.06258626, 0.02534653, 0.03488749, 1. ]])
177 | >>> spearmanr(x2n, y2n, axis=None)
178 | (0.10816770419260482, 0.1273562188027364)
179 | >>> spearmanr(x2n.ravel(), y2n.ravel())
180 | (0.10816770419260482, 0.1273562188027364)
181 |
182 | >>> xint = np.random.randint(10,size=(100,2))
183 | >>> spearmanr(xint)
184 | (0.052760927029710199, 0.60213045837062351)
185 |
186 | """
187 | a, axisout = _chk_asarray(a, axis)
188 | ar = np.apply_along_axis(rankdata,axisout,a)
189 |
190 | br = None
191 | if b is not None:
192 | b, axisout = _chk_asarray(b, axis)
193 | br = np.apply_along_axis(rankdata,axisout,b)
194 | n = a.shape[axisout] if dof is None else dof
195 | rs = np.corrcoef(ar,br,rowvar=axisout)
196 |
197 | olderr = np.seterr(divide='ignore') # rs can have elements equal to 1
198 | try:
199 | t = rs * np.sqrt((n-2) / ((rs+1.0)*(1.0-rs)))
200 | finally:
201 | np.seterr(**olderr)
202 | prob = distributions.t.sf(np.abs(t),n-2)*2
203 |
204 | if rs.shape == (2,2):
205 | return rs[1,0], prob[1,0]
206 | else:
207 | return rs, prob
208 |
209 |
210 | if __name__ == '__main__':
211 | pass
212 |
--------------------------------------------------------------------------------