├── blocks.md
├── plots.md
├── io.md
├── maps.md
├── misc.md
├── data_types.md
├── compile.sh
├── introduction.md
├── oop.md
├── .replit
├── avatar.png
├── io
├── data
│ ├── UV500storm.nc
│ ├── GYRE_OOPE_Y1950D000.nc.000
│ ├── GYRE_OOPE_Y1950D000.nc.001
│ ├── GYRE_OOPE_Y1950D000.nc.002
│ ├── GYRE_OOPE_Y1950D000.nc.003
│ ├── ISAS13_20120115_fld_TEMP.nc
│ ├── ISAS13_20120215_fld_TEMP.nc
│ ├── ISAS13_20120315_fld_TEMP.nc
│ ├── ISAS13_20120415_fld_TEMP.nc
│ ├── nao.txt
│ └── nina34.csv
├── text.py
├── pand.py
└── xar.py
├── misc
├── figs
│ └── nemo-index.png
├── data
│ ├── .gitattributes
│ ├── surface_thetao.nc
│ ├── mesh_mask_eORCA1_v2.2.nc
│ └── oni.data
├── interpol.py
├── shapefiles.py
├── mean_sst_dask.py
├── practical_session.py
├── nemo.py
├── dask_covariance.py
├── eof_analysis.py
└── dask_examples.py
├── data_types
├── figs
│ ├── corder.pdf
│ ├── corder.png
│ ├── forder.pdf
│ ├── forder.png
│ ├── tikz_dict.pdf
│ ├── tikz_dict.png
│ ├── tikz_list.pdf
│ ├── tikz_list.png
│ ├── static_typing.png
│ ├── dynamic_typing.png
│ ├── compile.sh
│ ├── corder.py
│ ├── forder.py
│ ├── tikz_list.tex
│ └── tikz_dict.tex
├── numerics.py
├── vars.py
├── dict.py
├── list.py
└── strings.py
├── introduction
├── figs
│ ├── spyder.png
│ ├── console.png
│ ├── ipython.png
│ ├── notebook.png
│ ├── run_file.png
│ ├── run_sel.png
│ └── args_spyder.png
├── intro.py
├── start.py
└── libinstall.py
├── maps
├── figs
│ ├── pyngl_examples.000001.png
│ ├── pyngl_examples.000002.png
│ ├── pyngl_examples.000003.png
│ ├── pyngl_examples.000004.png
│ ├── pyngl_examples.000005.png
│ ├── pyngl_examples.000006.png
│ ├── pyngl_examples.000007.png
│ └── pyngl_examples.000008.png
├── pyngl.py
└── carto.py
├── requirements.txt
├── .gitignore
├── convert.sh
├── _config.yml
├── plots
├── scatters.py
├── quivers.py
├── pyplot_settings.py
├── geometrical_shapes.py
├── text.py
├── axes.py
├── xy.py
├── panels.py
└── contours.py
├── _toc.yml
├── .github
└── workflows
│ └── publish.yml
├── README.md
└── blocks
├── ifsta.py
├── loops.py
└── functions.py
/blocks.md:
--------------------------------------------------------------------------------
1 | # Blocks
2 |
--------------------------------------------------------------------------------
/plots.md:
--------------------------------------------------------------------------------
1 | # Graphics
2 |
--------------------------------------------------------------------------------
/io.md:
--------------------------------------------------------------------------------
1 | # Input/Outputs
2 |
--------------------------------------------------------------------------------
/maps.md:
--------------------------------------------------------------------------------
1 | # Cartography
2 |
--------------------------------------------------------------------------------
/misc.md:
--------------------------------------------------------------------------------
1 | # Miscellaneous
2 |
--------------------------------------------------------------------------------
/data_types.md:
--------------------------------------------------------------------------------
1 | # Data types
2 |
--------------------------------------------------------------------------------
/compile.sh:
--------------------------------------------------------------------------------
1 | jupyter-book build .
2 |
--------------------------------------------------------------------------------
/introduction.md:
--------------------------------------------------------------------------------
1 | # Introduction
2 |
--------------------------------------------------------------------------------
/oop.md:
--------------------------------------------------------------------------------
1 | # Object-oriented programming
2 |
--------------------------------------------------------------------------------
/.replit:
--------------------------------------------------------------------------------
1 | language = "python3"
2 | run = "python"
--------------------------------------------------------------------------------
/avatar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/avatar.png
--------------------------------------------------------------------------------
/io/data/UV500storm.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/UV500storm.nc
--------------------------------------------------------------------------------
/misc/figs/nemo-index.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/misc/figs/nemo-index.png
--------------------------------------------------------------------------------
/data_types/figs/corder.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/corder.pdf
--------------------------------------------------------------------------------
/data_types/figs/corder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/corder.png
--------------------------------------------------------------------------------
/data_types/figs/forder.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/forder.pdf
--------------------------------------------------------------------------------
/data_types/figs/forder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/forder.png
--------------------------------------------------------------------------------
/introduction/figs/spyder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/spyder.png
--------------------------------------------------------------------------------
/data_types/figs/tikz_dict.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/tikz_dict.pdf
--------------------------------------------------------------------------------
/data_types/figs/tikz_dict.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/tikz_dict.png
--------------------------------------------------------------------------------
/data_types/figs/tikz_list.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/tikz_list.pdf
--------------------------------------------------------------------------------
/data_types/figs/tikz_list.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/tikz_list.png
--------------------------------------------------------------------------------
/introduction/figs/console.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/console.png
--------------------------------------------------------------------------------
/introduction/figs/ipython.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/ipython.png
--------------------------------------------------------------------------------
/introduction/figs/notebook.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/notebook.png
--------------------------------------------------------------------------------
/introduction/figs/run_file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/run_file.png
--------------------------------------------------------------------------------
/introduction/figs/run_sel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/run_sel.png
--------------------------------------------------------------------------------
/data_types/figs/static_typing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/static_typing.png
--------------------------------------------------------------------------------
/introduction/figs/args_spyder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/introduction/figs/args_spyder.png
--------------------------------------------------------------------------------
/data_types/figs/dynamic_typing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/data_types/figs/dynamic_typing.png
--------------------------------------------------------------------------------
/io/data/GYRE_OOPE_Y1950D000.nc.000:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/GYRE_OOPE_Y1950D000.nc.000
--------------------------------------------------------------------------------
/io/data/GYRE_OOPE_Y1950D000.nc.001:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/GYRE_OOPE_Y1950D000.nc.001
--------------------------------------------------------------------------------
/io/data/GYRE_OOPE_Y1950D000.nc.002:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/GYRE_OOPE_Y1950D000.nc.002
--------------------------------------------------------------------------------
/io/data/GYRE_OOPE_Y1950D000.nc.003:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/GYRE_OOPE_Y1950D000.nc.003
--------------------------------------------------------------------------------
/io/data/ISAS13_20120115_fld_TEMP.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/ISAS13_20120115_fld_TEMP.nc
--------------------------------------------------------------------------------
/io/data/ISAS13_20120215_fld_TEMP.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/ISAS13_20120215_fld_TEMP.nc
--------------------------------------------------------------------------------
/io/data/ISAS13_20120315_fld_TEMP.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/ISAS13_20120315_fld_TEMP.nc
--------------------------------------------------------------------------------
/io/data/ISAS13_20120415_fld_TEMP.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/io/data/ISAS13_20120415_fld_TEMP.nc
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000001.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000001.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000002.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000002.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000003.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000003.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000004.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000004.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000005.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000005.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000006.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000006.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000007.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000007.png
--------------------------------------------------------------------------------
/maps/figs/pyngl_examples.000008.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umr-marbec/python-training/HEAD/maps/figs/pyngl_examples.000008.png
--------------------------------------------------------------------------------
/misc/data/.gitattributes:
--------------------------------------------------------------------------------
1 | surface_thetao.nc filter=lfs diff=lfs merge=lfs -text
2 | mesh_mask_eORCA1_v2.2.nc filter=lfs diff=lfs merge=lfs -text
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | jupyter-book
2 | xarray
3 | dask
4 | pandas
5 | netcdf4
6 | numpy
7 | scipy
8 | cartopy
9 | matplotlib
10 | jupytext
11 |
--------------------------------------------------------------------------------
/misc/data/surface_thetao.nc:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:09762ffb9fd9e9b21604686d478e2f63791c09fa8d4c6e928bd122def2326a39
3 | size 141541027
4 |
--------------------------------------------------------------------------------
/misc/data/mesh_mask_eORCA1_v2.2.nc:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:c0f9b999c03ef3ac3ff089a2d994fdb8f82fd3344a88febd152ceba5a18634cf
3 | size 3085625
4 |
--------------------------------------------------------------------------------
/data_types/figs/compile.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | for f in tikz*tex
4 | do
5 | fin=`echo $f | sed s/.tex/.pdf/`
6 | fout=`echo $f | sed s/.tex/.svg/`
7 | pdflatex $f
8 | pdf2svg $fin $fout
9 | done
10 |
11 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Removes latex temporary files
2 | **/*.ipynb
3 | **.aux
4 | **.log
5 | **.nav
6 | **.out
7 | **.snm
8 | **.toc
9 | **.html
10 | **.swp
11 | **.vrb
12 | *.DS_Store
13 | **.npz
14 | **todo**
15 | **__pycache__**
16 | **/weights.nc
17 |
18 |
19 | **outfile.txt**
20 | **example.nc**
21 | **example.csv**
22 |
23 | # Remove R files
24 | .Rhistory
25 |
26 | **/.ipynb_checkpoints
27 | **/dask-worker-space
28 | _build
29 |
30 | # gimp files
31 | **xcf
32 | link.sh
33 |
34 | **/pacific_mask.nc
35 |
36 | **/*LogFile
37 | **/bilinear*
38 | misc/*png
39 | clean.sh
40 |
41 | **/ipbes_regions_subregions_shape*
42 | **/IPBES_Regions_Subregions2*
43 |
--------------------------------------------------------------------------------
/data_types/figs/corder.py:
--------------------------------------------------------------------------------
1 | import pylab as plt
2 | import numpy as np
3 | plt.rcParams['font.size'] = 15
4 |
5 | nx, ny = 3, 4
6 |
7 | x = np.arange(nx) + 0.5
8 | y = np.arange(ny) + 0.5
9 |
10 | plt.figure()
11 | ax = plt.gca()
12 | ax.set_aspect('equal')
13 | ax.set_facecolor('lightgray')
14 | ax.set_xticks(np.arange(nx + 1))
15 | ax.set_yticks(np.arange(ny + 1))
16 | plt.grid(linestyle='--')
17 | plt.setp(ax.get_xticklabels(), visible=False)
18 | plt.setp(ax.get_yticklabels(), visible=False)
19 | plt.xlim(0, nx)
20 | plt.ylim(0, ny)
21 |
22 | cpt = 0
23 | for i in range(0, nx):
24 | for j in range(ny):
25 | plt.text(x[i], y[j], cpt, ha='center', va='center')
26 | cpt += 1
27 |
28 | plt.savefig('corder.png', bbox_inches='tight')
29 |
--------------------------------------------------------------------------------
/data_types/figs/forder.py:
--------------------------------------------------------------------------------
1 | import pylab as plt
2 | import numpy as np
3 | plt.rcParams['font.size'] = 15
4 |
5 | nx, ny = 3, 4
6 |
7 | x = np.arange(nx) + 0.5
8 | y = np.arange(ny) + 0.5
9 |
10 | plt.figure()
11 | ax = plt.gca()
12 | ax.set_aspect('equal')
13 | ax.set_facecolor('lightgray')
14 | ax.set_xticks(np.arange(nx + 1))
15 | ax.set_yticks(np.arange(ny + 1))
16 | plt.grid(linestyle='--')
17 | plt.setp(ax.get_xticklabels(), visible=False)
18 | plt.setp(ax.get_yticklabels(), visible=False)
19 | plt.xlim(0, nx)
20 | plt.ylim(0, ny)
21 |
22 | cpt = 0
23 | for j in range(ny):
24 | for i in range(0, nx):
25 | plt.text(x[i], y[j], cpt, ha='center', va='center')
26 | cpt += 1
27 |
28 | plt.savefig('forder.png', bbox_inches='tight')
29 |
30 |
--------------------------------------------------------------------------------
/convert.sh:
--------------------------------------------------------------------------------
1 | for dir in blocks data_types introduction io oop plots maps
2 | do
3 | cd $dir
4 | for f in *py
5 | do
6 | echo $f
7 | if [ $f = "pyngl.py" ]; then
8 | echo "============================== PyNGL not processed"
9 | continue
10 | fi
11 |
12 | # Convert .py to ipynb
13 | jupytext --to notebook $f
14 | fout=`echo $f | sed "s/.py/.ipynb/"`
15 |
16 | # execute notebooks
17 | jupyter nbconvert --execute --to notebook $fout
18 |
19 | final_file=`echo $fout | sed "s/ipynb/nbconvert.ipynb/"`
20 | mv $final_file $fout
21 |
22 | done
23 | cd ..
24 | done
25 |
26 | cd misc
27 | for f in nemo.py eof_analysis.py interpol.py practical_session.py shapefiles.py
28 | do
29 |
30 | echo $f
31 |
32 | # Convert .py to ipynb
33 | jupytext --to notebook $f
34 | fout=`echo $f | sed "s/.py/.ipynb/"`
35 |
36 | # execute notebooks
37 | jupyter nbconvert --execute --to notebook $fout
38 |
39 | final_file=`echo $fout | sed "s/ipynb/nbconvert.ipynb/"`
40 | mv $final_file $fout
41 | done
42 |
43 | for f in dask_covariance.py dask_examples.py mean_sst_dask.py
44 | do
45 | echo $f
46 | # Convert .py to ipynb
47 | jupytext --to notebook $f
48 | done
49 |
50 | cd ..
51 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | # Book settings
2 | # Learn more at https://jupyterbook.org/customize/config.html
3 |
4 | title: Introduction to Python
5 | author: Nicolas Barrier
6 | logo: avatar.png
7 |
8 | parse:
9 | myst_enable_extensions:
10 | # don't forget to list any other extensions you want enabled,
11 | # including those that are enabled by default!
12 | - html_image
13 |
14 | # Force re-execution of notebooks on each build.
15 | # See https://jupyterbook.org/content/execute.html
16 | execute:
17 | execute_notebooks: off
18 | timeout: 1000
19 |
20 | # Define the name of the latex output file for PDF builds
21 | latex:
22 | latex_documents:
23 | targetname: book.tex
24 |
25 | # Add a bibtex file so that we can create citations
26 | #bibtex_bibfiles:
27 | # - references.bib
28 |
29 | # Information about where the book exists on the web
30 | repository:
31 | url: https://github.com/umr-marbec/python-training
32 | path_to_book: ./ # Optional path to your book, relative to the repository root
33 | branch: master # Which branch of the repository should be used when creating links (optional)
34 |
35 | # Add GitHub buttons to your book
36 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository
37 | html:
38 | use_issues_button: true
39 | use_repository_button: true
40 |
--------------------------------------------------------------------------------
/plots/scatters.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Scatter plots
17 | #
18 | # Scatter plots can be obtained by using the [scatter](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.scatter.html) method.
19 |
20 | # +
21 | import numpy as np
22 | import pylab as plt
23 |
24 | np.random.seed(seed=1)
25 | N = 50
26 | x = np.random.rand(N)
27 | y = np.random.rand(N)
28 | z = np.pi * (15 * np.random.rand(N))**2 # 0 to 15 point radiuses
29 | # -
30 |
31 | # Scatter plots with the size as a function of data value
32 | plt.figure()
33 | plt.scatter(x, y, s=z, c='k')
34 | plt.show()
35 |
36 | # Scatter plots with color as a function of data value
37 | plt.figure()
38 | cs = plt.scatter(x, y, s=600, c=z,
39 | cmap=plt.cm.jet, alpha=0.5, marker='o')
40 | plt.colorbar(cs)
41 | plt.show()
42 |
43 | # Scatter plots with color and size as a function of data value
44 | plt.figure()
45 | cs = plt.scatter(x, y, s=z, c=z,
46 | cmap=plt.cm.jet, alpha=0.5, marker='o')
47 | plt.colorbar(cs)
48 | plt.show()
49 |
--------------------------------------------------------------------------------
/_toc.yml:
--------------------------------------------------------------------------------
1 | format: jb-article
2 | root: README.md
3 | sections:
4 | - file: introduction.md
5 | sections:
6 | - file: introduction/intro.ipynb
7 | - file: introduction/start.ipynb
8 | - file: introduction/libinstall.ipynb
9 | - file: data_types.md
10 | sections:
11 | - file: data_types/vars.ipynb
12 | - file: data_types/numerics.ipynb
13 | - file: data_types/list.ipynb
14 | - file: data_types/dict.ipynb
15 | - file: data_types/strings.ipynb
16 | - file: data_types/nmp.ipynb
17 | - file: blocks.md
18 | sections:
19 | - file: blocks/ifsta.ipynb
20 | - file: blocks/loops.ipynb
21 | - file: blocks/functions.ipynb
22 | - file: plots.md
23 | sections:
24 | - file: plots/xy.ipynb
25 | - file: plots/contours.ipynb
26 | - file: plots/geometrical_shapes.ipynb
27 | - file: plots/panels.ipynb
28 | - file: plots/quivers.ipynb
29 | - file: plots/scatters.ipynb
30 | - file: plots/text.ipynb
31 | - file: plots/axes.ipynb
32 | - file: plots/pyplot_settings.ipynb
33 | - file: oop.md
34 | sections:
35 | - file: oop/oop.ipynb
36 | - file: io.md
37 | sections:
38 | - file: io/text.ipynb
39 | - file: io/pand.ipynb
40 | - file: io/xar.ipynb
41 | - file: maps.md
42 | sections:
43 | - file: maps/carto.ipynb
44 | - file: misc.md
45 | sections:
46 | - file: misc/nemo.ipynb
47 | - file: misc/interpol.ipynb
48 | - file: misc/eof_analysis.ipynb
49 | - file: misc/dask_examples.ipynb
50 | - file: misc/mean_sst_dask.ipynb
51 | - file: misc/dask_covariance.ipynb
52 | - file: misc/shapefiles.ipynb
53 | - file: misc/practical_session.ipynb
54 |
--------------------------------------------------------------------------------
/plots/quivers.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Quiver plots
17 | #
18 | # Quiver plots are obtained by using the [quiver](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.quiver.html) function.
19 |
20 | # +
21 | import scipy.io.netcdf as nc
22 | import numpy as np
23 | import matplotlib.pyplot as plt
24 |
25 | f = nc.netcdf_file('../io/data/UV500storm.nc', mmap=False)
26 | u = f.variables['u'][0]
27 | v = f.variables['v'][0]
28 | u = np.ma.masked_where(np.abs(u)>=999, u)
29 | v = np.ma.masked_where(np.abs(v)>=999, v)
30 | x = f.variables['lon'][:]
31 | y = f.variables['lat'][:]
32 | vel = np.sqrt(u*u + v*v, where=(np.ma.getmaskarray(u) == False))
33 |
34 | f.close()
35 | # -
36 |
37 | # ## Using colormap
38 |
39 | plt.figure()
40 | q = plt.quiver(x, y, u, v, vel, cmap=plt.cm.get_cmap('hsv'), scale=1000)
41 | q.set_clim(0, 50)
42 | cb = plt.colorbar(q)
43 | cb.set_label('Wind speed (m/s)')
44 | plt.show()
45 |
46 | # ## Using reference arrow
47 | #
48 | # Adding a reference arrow is done by using the [quiverkey](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.quiverkey.html) function
49 |
50 | plt.figure()
51 | q = plt.quiver(x, y, u, v, scale=1000)
52 | keys = plt.quiverkey(q, -131, 21, 70, 'Wind speed\n(50 m/s)', coordinates='data')
53 | plt.show()
54 |
--------------------------------------------------------------------------------
/data_types/figs/tikz_list.tex:
--------------------------------------------------------------------------------
1 | \documentclass[convert={outext=.png}]{standalone}
2 | \usepackage{tikz}
3 |
4 |
5 | \usetikzlibrary{arrows,decorations.pathmorphing,backgrounds,positioning,fit,matrix}
6 | \usetikzlibrary{patterns}
7 | \usepackage{subcaption}
8 | \tikzstyle{hatch1}=[pattern=north west lines, pattern color=green]
9 | \tikzstyle{hatch2}=[pattern=north west lines, pattern color=orange]
10 | \tikzstyle{hatch3}=[pattern=north west lines, pattern color=purple]
11 | \tikzstyle{hatch4}=[pattern=north east lines, pattern color=blue]
12 | \tikzstyle{hatch5}=[pattern=north east lines, pattern color=yellow]
13 | \tikzstyle{hatch6}=[pattern=north east lines, pattern color=magenta]
14 | \usepackage[margin=1in]{geometry}
15 | \tikzstyle{nodeleg}=[right, midway, inner sep = 0.7cm]
16 |
17 | \begin{document}
18 |
19 | \begin{tikzpicture}
20 | \filldraw[step=1.0,black, thin, fill=blue!20] (0., 0. ) grid (4, 1) rectangle(0, 0);
21 | \draw (0.5, 0.5 ) node {L};
22 | \draw (1.5, 0.5 ) node {I};
23 | \draw (2.5, 0.5 ) node {S};
24 | \draw (3.5, 0.5 ) node {T};
25 |
26 | \def\wpos{-0.3}
27 | \def\wneg{-1}
28 | \def\lab{-0.5}
29 |
30 | \draw (0.5, \wpos ) node {0};
31 | \draw (1.5, \wpos ) node {1};
32 | \draw (2.5, \wpos ) node {2};
33 | \draw (3.5, \wpos ) node {3};
34 | \draw (\lab, \wpos ) node [anchor=east, align=right]{Positive indexes};
35 |
36 | \draw (0.5, \wneg ) node {-4};
37 | \draw (1.5, \wneg ) node {-3};
38 | \draw (2.5, \wneg ) node {-2};
39 | \draw (3.5, \wneg ) node {-1};
40 | \draw (\lab, \wneg ) node [anchor=east, align=right]{Negative indexes};
41 | \end{tikzpicture}
42 | \end{document}
43 |
--------------------------------------------------------------------------------
/data_types/figs/tikz_dict.tex:
--------------------------------------------------------------------------------
1 | \documentclass[convert={outext=.png}]{standalone}
2 | \usepackage{tikz}
3 |
4 |
5 | \usetikzlibrary{arrows,decorations.pathmorphing,backgrounds,positioning,fit,matrix}
6 | \usetikzlibrary{patterns}
7 | \usepackage{subcaption}
8 | \tikzstyle{hatch1}=[pattern=north west lines, pattern color=green]
9 | \tikzstyle{hatch2}=[pattern=north west lines, pattern color=orange]
10 | \tikzstyle{hatch3}=[pattern=north west lines, pattern color=purple]
11 | \tikzstyle{hatch4}=[pattern=north east lines, pattern color=blue]
12 | \tikzstyle{hatch5}=[pattern=north east lines, pattern color=yellow]
13 | \tikzstyle{hatch6}=[pattern=north east lines, pattern color=magenta]
14 | \usepackage[margin=1in]{geometry}
15 | \tikzstyle{nodeleg}=[right, midway, inner sep = 0.7cm]
16 |
17 | \begin{document}
18 | \begin{tikzpicture}
19 | \filldraw[step=1.0,black, thin, fill=red!20] (0., 0. ) grid (4, 1) rectangle(0, 0);
20 | \draw (0.5, 0.5 ) node {C};
21 | \draw (1.5, 0.5 ) node {D};
22 | \draw (2.5, 0.5 ) node {I};
23 | \draw (3.5, 0.5 ) node {T};
24 |
25 | \def\wpos{-0.3}
26 | \def\wneg{-1}
27 | \def\lab{-0.5}
28 |
29 | \draw (0.5, \wpos ) node {key2};
30 | \draw (1.5, \wpos ) node {key0};
31 | \draw (2.5, \wpos ) node {key1};
32 | \draw (3.5, \wpos ) node {key3};
33 | \draw (\lab, \wpos ) node [anchor=east, align=right]{Keys};
34 |
35 | % \draw (0.5, \wneg ) node {-4};
36 | % \draw (1.5, \wneg ) node {-3};
37 | % \draw (2.5, \wneg ) node {-2};
38 | % \draw (3.5, \wneg ) node {-1};
39 | % \draw (\lab, \wneg ) node [anchor=east, align=right]{Negative indexes};
40 | \end{tikzpicture}
41 | \end{document}
42 |
43 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | # Simple workflow for deploying static content to GitHub Pages
2 | name: Deploy static content to Pages
3 |
4 | on:
5 | # Runs on pushes targeting the default branch
6 | push:
7 | branches: ["master"]
8 |
9 | # Allows you to run this workflow manually from the Actions tab
10 | workflow_dispatch:
11 |
12 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
13 | permissions:
14 | contents: read
15 | pages: write
16 | id-token: write
17 |
18 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
19 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
20 | concurrency:
21 | group: "pages"
22 | cancel-in-progress: false
23 |
24 | jobs:
25 | # Single deploy job since we're just deploying
26 | deploy:
27 | environment:
28 | name: github-pages
29 | url: ${{ steps.deployment.outputs.page_url }}
30 | runs-on: ubuntu-latest
31 | steps:
32 | - name: Checkout
33 | uses: actions/checkout@v4
34 | with:
35 | lfs: 'true'
36 |
37 | - name: Update apt
38 | run: sudo apt update -yq
39 |
40 | - name: Install NetCDF and Latex
41 | run: sudo apt install -yq python3-pip
42 |
43 | - name: Install dependencies
44 | run: pip install -r requirements.txt
45 |
46 | - name: Convert Python into Notebook
47 | run: bash convert.sh
48 |
49 | - name: Create website
50 | run: jupyter-book build .
51 |
52 | - name: Setup Pages
53 | uses: actions/configure-pages@v5
54 |
55 | - name: Upload artifact
56 | uses: actions/upload-pages-artifact@v3
57 | with:
58 | # Upload entire repository
59 | path: '_build/html'
60 |
61 | - name: Deploy to GitHub Pages
62 | id: deployment
63 | uses: actions/deploy-pages@v4
64 |
--------------------------------------------------------------------------------
/misc/interpol.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.11.4
10 | # kernelspec:
11 | # display_name: Python 3 (ipykernel)
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Data interpolation
17 | #
18 | # Data interpolation is achieved by using the [xesmf](https://xesmf.readthedocs.io/en/latest/) library.
19 | #
20 | # It works easily with `xarray` and `dask` and therefore can manage parallel computation. As a start, let's try to interpolate our global `SST` temperature from the ORCA grid to a regular one.
21 | #
22 | # ## Reading of the SST
23 |
24 | # +
25 | import xarray as xr
26 | import numpy as np
27 | import xesmf as xe
28 |
29 | data = xr.open_dataset("data/surface_thetao.nc")
30 | data = data['thetao']
31 | data
32 | # -
33 |
34 | # ## Initialisation of the output grid
35 | #
36 | # Then, a `Dataset` object that contains the output grid must be created
37 |
38 | dsout = xr.Dataset()
39 | dsout['lon'] = (['lon'], np.arange(-179, 179 + 1))
40 | dsout['lat'] = (['lat'], np.arange(-89, 89 + 1))
41 | dsout
42 |
43 | # ## Renaming the input coordinates
44 | #
45 | # We also need to insure that the coordinates variables have the same names.
46 |
47 | data = data.rename({'nav_lon' : 'lon', 'nav_lat' : 'lat'})
48 | data
49 |
50 | # ## Creating the interpolator
51 | #
52 | # When this is done, the interpolator object can be created as follows:
53 |
54 | regridder = xe.Regridder(data, dsout, 'bilinear', ignore_degenerate=True, reuse_weights=False, periodic=True, filename='weights.nc')
55 | regridder
56 |
57 | # Note that the `ignore_degenerate` argument is necessary for handling the ORCA grid.
58 | #
59 | # ## Interpolating the data set
60 |
61 | dataout = regridder(data)
62 |
63 | # ## Comparing the results
64 | #
65 | # Let's display the original SST values for the first time-step
66 |
67 | # +
68 | import matplotlib.pyplot as plt
69 |
70 | mesh = xr.open_dataset("data/mesh_mask_eORCA1_v2.2.nc")
71 | lonf = mesh['glamf'].data[0]
72 | latf = mesh['gphif'].data[0]
73 |
74 | toplot = data.isel(time_counter=0, olevel=0).data
75 | cs = plt.pcolormesh(lonf, latf, toplot[1:, 1:], cmap=plt.cm.jet)
76 | cs.set_clim(-2, 30)
77 | # -
78 |
79 | toplot = dataout.isel(time_counter=0, olevel=0).data
80 | cs = plt.pcolormesh(dataout['lon'], dataout['lat'], toplot[1:, 1:], cmap=plt.cm.jet)
81 | cs.set_clim(-2, 30)
82 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Python training
2 |
3 | ## Author
4 |
5 | Nicolas Barrier, IRD, UMR Marbec ([www.nicolasbarrier.fr/](https://www.nicolasbarrier.fr/))
6 |
7 | ## How to use class materials
8 |
9 | Download and install [Anaconda](https://www.anaconda.com/products/individual) for **Python 3**.
10 |
11 | Create a Conda environment as follows:
12 |
13 | ```
14 | conda create --name python-training
15 | conda activate python-training
16 | ```
17 |
18 | When done, open a Terminal and install the libraries:
19 |
20 | ```
21 | conda install -c conda-forge -y netCDF4
22 | conda install -c conda-forge -y xarray
23 | conda install -c conda-forge -y dask
24 | conda install -c conda-forge -y pandas
25 | conda install -c conda-forge -y jupytext
26 | conda install -c conda-forge -y jupyter
27 | conda install -c conda-forge -y cartopy
28 | conda install -c conda-forge -y shapefile
29 | conda install -c conda-forge -y eofs
30 | conda install -c conda-forge -y nc-time-axis
31 | conda install -c conda-forge -y python-graphviz
32 | ```
33 |
34 | Now, navigate to the folder containing the training sessions and type:
35 |
36 | ```
37 | jupyter notebook &
38 | ```
39 |
40 | Then open/run/annotate the file at will.
41 |
42 | ## Program
43 |
44 | ### Introduction
45 | - Introduction (introduction/intro.py) (description, applications)
46 | - Getting started (introduction/start.py) (install, running)
47 | - Installing libraries (introduction/libinstall.py)
48 |
49 | ### Data types
50 | - Variable definition (data_types/vars.py)
51 | - Numerical variables (data_types/numerics.py)
52 | - List (data_types/list.py)
53 | - Dictionaries (data_types/dict.py)
54 | - String (data_types/string.py)
55 | - Numpy arrays (data_types/nmp.py) (Scipy, Numpy libraries)
56 |
57 | ### Conditional statements, loops, functions (blocks)
58 | - Conditional statements (blocks/ifsta.py)
59 | - Loops (blocks/loops.py)
60 | - Functions (blocks/functions.py)
61 |
62 | ### Graphics (Matplotlib(https://matplotlib.org/))
63 |
64 | - XY (plots/xy.py)
65 | - Contours (plots/contours.py)
66 | - Geometrical shapes (plots/geometrical_shapes.py)
67 | - Panels (plots/panels.py)
68 | - Quivers (plots/quivers.py)
69 | - Scatter plots (plots/scatters.py)
70 | - Text and maths (plots/text.py)
71 | - Axes management (plots/axes.py)
72 | - Configutation (plots/pyplot_settings.py)
73 |
74 | ### Object oriented programming
75 | - Class definition (oop/oop.py)
76 |
77 | ### IO
78 | - Text (io/text.py)
79 | - CSV (io/pand.py) (*pandas*)
80 | - NetCDF (io/xar.py) (*xarray*)
81 |
82 | ### Maps
83 | - Cartopy (maps/carto.py)
84 |
--------------------------------------------------------------------------------
/io/data/nao.txt:
--------------------------------------------------------------------------------
1 | Hurrell PC-Based North Atlantic Oscillation Index (DJFM)
2 | 1899. -0.69
3 | 1900. -1.32
4 | 1901. -0.37
5 | 1902. -0.69
6 | 1903. 1.33
7 | 1904. -0.01
8 | 1905. 0.83
9 | 1906. 0.83
10 | 1907. 1.11
11 | 1908. 0.56
12 | 1909. -0.50
13 | 1910. 0.43
14 | 1911. 0.47
15 | 1912. -0.43
16 | 1913. 1.40
17 | 1914. 0.47
18 | 1915. -0.70
19 | 1916. -0.22
20 | 1917. -1.18
21 | 1918. 0.03
22 | 1919. -0.87
23 | 1920. 1.58
24 | 1921. 0.69
25 | 1922. 0.59
26 | 1923. 0.36
27 | 1924. -0.65
28 | 1925. 1.09
29 | 1926. -0.31
30 | 1927. 0.50
31 | 1928. 0.16
32 | 1929. -0.59
33 | 1930. 0.42
34 | 1931. -0.68
35 | 1932. -0.09
36 | 1933. -0.10
37 | 1934. 0.97
38 | 1935. 0.07
39 | 1936. -1.94
40 | 1937. -0.15
41 | 1938. 0.54
42 | 1939. -0.13
43 | 1940. -1.94
44 | 1941. -1.23
45 | 1942. -1.07
46 | 1943. 0.62
47 | 1944. 0.52
48 | 1945. -0.08
49 | 1946. -0.27
50 | 1947. -1.86
51 | 1948. 0.26
52 | 1949. 1.02
53 | 1950. 0.48
54 | 1951. -0.93
55 | 1952. -0.14
56 | 1953. -0.01
57 | 1954. 0.15
58 | 1955. -1.25
59 | 1956. -1.04
60 | 1957. 0.04
61 | 1958. -1.26
62 | 1959. 0.10
63 | 1960. -1.81
64 | 1961. 0.74
65 | 1962. -0.92
66 | 1963. -1.29
67 | 1964. -0.88
68 | 1965. -0.81
69 | 1966. -1.56
70 | 1967. 0.62
71 | 1968. -0.15
72 | 1969. -2.57
73 | 1970. -1.15
74 | 1971. -0.66
75 | 1972. 0.12
76 | 1973. 1.20
77 | 1974. 0.37
78 | 1975. 0.74
79 | 1976. 1.22
80 | 1977. -1.34
81 | 1978. -0.55
82 | 1979. -1.44
83 | 1980. -0.35
84 | 1981. 0.29
85 | 1982. -0.21
86 | 1983. 1.29
87 | 1984. 0.43
88 | 1985. -0.80
89 | 1986. -0.01
90 | 1987. -0.66
91 | 1988. -0.17
92 | 1989. 2.46
93 | 1990. 1.91
94 | 1991. 0.56
95 | 1992. 1.76
96 | 1993. 1.76
97 | 1994. 0.87
98 | 1995. 1.29
99 | 1996. -1.44
100 | 1997. 0.71
101 | 1998. -0.01
102 | 1999. 0.66
103 | 2000. 1.61
104 | 2001. -1.01
105 | 2002. 0.62
106 | 2003. 0.05
107 | 2004. -0.12
108 | 2005. 0.36
109 | 2006. -0.90
110 | 2007. 1.16
111 | 2008. 1.06
112 | 2009. 0.04
113 | 2010. -2.57
114 | 2011. -0.68
115 | 2012. 1.60
116 | 2013. -1.68
117 | 2014. 0.91
118 | 2015. 1.87
119 | 2016. 1.00
120 | 2017. 0.89
121 | 2018. -0.16
122 | 2019. 0.76
123 |
--------------------------------------------------------------------------------
/plots/pyplot_settings.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Changing default Matplotlib settings
17 | #
18 | # In order to change the default Matplotlib settings (for instance default colors, default linewidth, etc.), the user needs to modify the values of the `matplotlib.pyplot.rcParams` dictionary.
19 | #
20 | # For a detailed description of all the customizarion methods, visisit [matplotlib](https://matplotlib.org/3.1.3/tutorials/introductory/customizing.html) webpage.
21 |
22 | # +
23 | import matplotlib as mpl
24 | import matplotlib.pyplot as plt
25 | import numpy as np
26 |
27 | x = np.linspace(0, 2*np.pi, 200)
28 | y = [np.cos(x), np.sin(x), np.cos(2*x), np.cos(x/2.)]
29 | y = np.array(y).T
30 | # -
31 |
32 | plt.figure()
33 | plt.plot(x, y)
34 | plt.show()
35 |
36 | # ## In a script
37 | #
38 | # If the user wants to change the default values only within a given script, this is done as follows:
39 |
40 | from cycler import cycler # used to define color cycles
41 | plt.rcParams['lines.linewidth'] = 5
42 | plt.rcParams['axes.prop_cycle'] = cycler('color', ['darkorange', 'plum', 'gold'])
43 | plt.figure()
44 | plt.plot(x, y)
45 | plt.show()
46 |
47 | # ## Changing the parameters for all sessions
48 | #
49 | # Default matplotlib parameters are stored in a
50 | # [matplotlibrc](https://matplotlib.org/3.1.3/tutorials/introductory/customizing.html#the-matplotlibrc-file) configuration file. Python will search this file into 3 different locations in the following order order:
51 | #
52 | # 1. in the current working directory (usually used for specific customizations that you do not want to apply elsewhere)
53 | # 2. `$MATPLOTLIBRC` if it is a file, else `$MATPLOTLIBRC/matplotlibrc`
54 | # 3. On `.config/matplotlib/matplotlibrc` (Linux) or `.matplotlib/matplotlibrc` (other platforms)
55 | #
56 | # If no `matplotlibrc` file is found, then the default value will be used. The path of the `matplotlibrc` file that is used in a given session can be obtained by using the `matplotlib_fname` method.
57 |
58 | print(mpl.matplotlib_fname())
59 |
60 | # To change your Matplotlib default parameters, download the [sample matplotlibrc](https://matplotlib.org/3.1.3/tutorials/introductory/customizing.html#matplotlibrc-sample) file and put it in any of the three directories described above.
61 | #
62 | # Then, uncomment the lines you are interested in and change the values.
63 |
--------------------------------------------------------------------------------
/data_types/numerics.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Numerics
17 | #
18 | # ## Comparisons
19 | #
20 | # The following expressions can be used to compare numerical values (source: [python3](https://docs.python.org/3/library/stdtypes.html))
21 | #
22 | # | Python expression | Meaning
23 | # |:-----------------:|:-----------------------:
24 | # | ```not(a)``` | not a
25 | # | ```a == b``` | a equal b
26 | # | ```a != b``` | a not equal b
27 | # | ```a & b``` | a and b
28 | # | ```a \| b``` | a or b
29 | # | ```a >= b``` | a greater equal b
30 | # | ```a > b``` | a greater b
31 | # | ```a <= b``` | a less equal b
32 | # | ```a < b``` | a less b
33 | #
34 | # ## Operations
35 | #
36 | # Binary operations are listed below (source: [python3](https://docs.python.org/3/library/stdtypes.html))
37 | #
38 | # |Python expression | Meaning
39 | # |:------------------------------:|:-------------------------------------:
40 | # | ```x + y``` | sum of x and y
41 | # | ```x - y``` | difference of x and y
42 | # | ```x * y``` | product of x and y
43 | # | ```x / y``` |quotient of x and y
44 | # | ```x // y``` |floored quotient of x and y
45 | # | ```x % y``` |remainder of x / y
46 | # | ```-x``` | x negated
47 | # | ```abs(x)``` | absolute value or magnitude of x
48 | # | ```complex(re, im)``` | a complex $re + i\times im$
49 | # | ```c.conjugate()``` | conjugate of the complex number c
50 | # | ```divmod(x, y)``` | the pair ```(x // y, x % y)```
51 | # | ```pow(x, y)``` | x to the power y
52 | # | ```x ** y``` | x to the power y
53 |
54 | x = 13
55 | y = 5
56 |
57 | print(x / y) # division
58 | print(x // y) # floored quotient
59 | print(x % y) # rest
60 | print(divmod(x, y))
61 |
62 | print(pow(x, y))
63 | print(x**y)
64 |
65 | # +
66 | c = complex(x, y)
67 | print(c)
68 | print(c.real)
69 | print(c.imag)
70 |
71 | cc = c.conjugate()
72 | print(cc)
73 |
74 | print(c * cc)
75 | print(abs(c))
76 |
77 | # +
78 | x = 11
79 | print(x)
80 |
81 | # x = x + 1
82 | x += 1
83 | print(x)
84 |
85 | x *= 2
86 | # x = x * 2
87 | print(x)
88 |
89 | # x = x / 3
90 | x /= 3
91 | print(x)
92 |
93 | # x = x - 2
94 | x -= 2
95 | print(x)
96 |
--------------------------------------------------------------------------------
/io/text.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Text files
17 | #
18 | # ## Opening files
19 | #
20 | # To open file a text file, use the `with` statement. It allows to insure that the file is properly opened and properly closed, even if an error is encountered (see [the-with-statement](https://docs.python.org/3/reference/compound_stmts.html#the-with-statement) for a detailed description).
21 |
22 | # +
23 | # defining the name of the file to read
24 | filename = 'data/nao.txt'
25 |
26 | with open(filename,'r') as f:
27 | print(f.closed)
28 | print(f.closed)
29 | # -
30 |
31 | # ## Reading
32 |
33 | # read all the lines and store them in a list
34 | with open(filename,'r') as f:
35 | lines = f.readlines()
36 | print(len(lines))
37 | print('@%s@' %lines[0])
38 | print('#%s#' %lines[0].strip()) # removes the \n char
39 |
40 | # Read all the file and store as 1 string
41 | with open(filename,'r') as f:
42 | data = f.read()
43 | print(len(data))
44 | print(data[:10])
45 |
46 | # Read the first 200 chars of the file and store as 1 string
47 | with open(filename,'r') as f:
48 | data = f.read(200)
49 | print(len(data))
50 | print(data)
51 |
52 | # loop over all the lines of the file, and extract the first one
53 | # finishes when "end of line is found"
54 | with open(filename,'r') as f:
55 | for line in f:
56 | print(line.strip())
57 |
58 | # to parse a certain range of the file, one
59 | # way is to use the itertools package
60 | import itertools
61 | with open(filename,'r') as f:
62 | for line in itertools.islice(f, 5, 10):
63 | print(line.strip())
64 |
65 | # ## Writting
66 | #
67 | # Files are written out line by line.
68 |
69 | # +
70 | # Generates some data
71 | import numpy as np
72 |
73 | xdata = np.linspace(0, np.pi/4., 5)
74 | cosx = np.cos(xdata)
75 | sinx = np.sin(xdata)
76 | tanx = np.tan(xdata)
77 | # -
78 |
79 | # opening the file
80 | with open('data/outfile.txt', 'w') as fout:
81 |
82 | # writting the header: 4 strings separated by tabs.
83 | header = ['x', 'cos', 'sin', 'tan']
84 | string='%s\t%s\t%s\t%s\n' % (header[0], header[1], header[2], header[3]) # writes the header
85 | print(string)
86 | fout.write(string)
87 |
88 |
89 | # looping over all the data
90 | for x, c, s, t in zip(xdata, cosx, sinx, tanx):
91 | # writting the string associated with the data
92 | string = '%.4f\t%.8f\t%.8f\t%.8f\n' %(x, c, s, t) # writes the data
93 | print(string)
94 | fout.write(string)
95 |
--------------------------------------------------------------------------------
/blocks/ifsta.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Code blocks
17 | #
18 | # In Python, **conditional statements, functions and loops** are defined within **code blocks**, which have in common:
19 | #
20 | # - The block definition must end with `:`
21 | # - The code inside the block must be *indented to the right*
22 | # - When leaving the block, the code *must be indented to the left*
23 | #
24 | # **There is no begin/end statements, everything is handled by the indent**
25 | #
26 | #
27 | # Common errors
28 | #
29 | # Indent issues are very common at the beginning, be carefull
30 | #
31 | #
32 | #
33 | #
34 | # Text Editor issues
35 | #
36 | # With some text editors, the tab keys would make your code incompatible with other systems.
37 | # Use 4 spaces instead.
38 | #
39 | #
40 | # # Conditional statements
41 | #
42 | # ## Definition
43 | #
44 | # Conditional statements allow to perform predifined actions depending on certain conditions.
45 | #
46 | # ## Defining conditional statements
47 | #
48 | # The general structure of a conditional statement is:
49 | #
50 | # ```
51 | # if(cond1):
52 | # action1
53 | # elif(cond2):
54 | # action2
55 | # else:
56 | # action3
57 | # ```
58 |
59 | # +
60 | x = -10
61 | y = -11
62 | z = -12
63 |
64 | # Care with the indent
65 | if ((x==y) & (x==z)):
66 | print('Equality')
67 | elif ((x <= y) & (y <= z)):
68 | print('Increasing order')
69 | elif((x >= y) & (y >= z)):
70 | print('Decreasing order')
71 | else:
72 | print('No order')
73 | # -
74 |
75 | # The use of brackets in not always necessary:
76 |
77 | # +
78 | x = ['a', 'b', 'c']
79 |
80 |
81 | if 'a' in x:
82 | print('a in list')
83 | else:
84 | print('a not in list')
85 |
86 | if ('z' in x):
87 | print('z in list')
88 | else:
89 | print('z not in list')
90 | # -
91 |
92 | # There is the possibility to write conditional statements in one line:
93 |
94 | # possibility to write if statements in one line
95 | print('a in list') if 'a' in x else print('a not in list')
96 | print('z in list') if 'z' in x else print('z not in list')
97 | if('a' in x): print('match')
98 |
99 | # For imbricated conditions, the code need to be indented one extra time. **Again, carefull with the indent.**
100 |
101 | x = y = -10
102 | if(x == y):
103 | # starts a new block with new conditional testing
104 | if(x > 0):
105 | print('x == y and positive')
106 | else:
107 | print('x == y and negative')
108 |
109 |
--------------------------------------------------------------------------------
/introduction/intro.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Introduction
17 | #
18 | # ## What is Python?
19 | #
20 | # Python is an interpreted, object-oriented, high-level programming language with dynamic semantics. Source: [python.org](https://www.python.org/doc/essays/blurb/)
21 | #
22 | # - *interpreted*: no compilation (contrary to Java, C/C++, Fortran)
23 | # - *object-oriented*: based on the concept of objects, associated with attributes (object's data) and methods (object's procedures).
24 | # - *high-level*: close to human languages (contrary to C, which is a middle-level language)
25 | # - *dynamic semantics*: no variable declaration, possibility to mix-up data types.
26 | #
27 | # ## Why use Python?
28 | #
29 | # The main advantages of using Python are:
30 | #
31 | # - Presence of third-party modules ([Python Package Index](https://pypi.org/))
32 | # - Extensive support libraries ([NumPy](https://numpy.org/) for numerical calculations, [Pandas](https://pandas.pydata.org/) for data analytics)
33 | # - Open source and community development
34 | # - Easy to learn
35 | # - User-friendly data structures (lists, dictionaries, etc.)
36 | # - Interactive (`ipython`, `notebook`)
37 | # - Portable across Operating Systems
38 | #
39 | # Source: [geeksforgeeks.org](https://www.geeksforgeeks.org/python-language-advantages-applications/).
40 | #
41 | # ## Applications
42 | #
43 | # The main applications are:
44 | #
45 | # - Web Development: creation of web frameworks ([Django](https://www.djangoproject.com/), [Flask](https://www.palletsprojects.com/p/flask/))
46 | # - Machine learning ([scikit-learn](https://scikit-learn.org/), [TensorFlow](https://www.tensorflow.org/))
47 | # - Data analysis ([scipy](https://www.scipy.org/), [pandas](https://pandas.pydata.org/), [xarray](http://xarray.pydata.org/en/stable/))
48 | # - Data visualization ([matplotlib](https://matplotlib.org/), [basemap](https://matplotlib.org/basemap/), [PyNgl](https://www.pyngl.ucar.edu/), [cartopy](https://scitools.org.uk/cartopy/docs/latest/))
49 | # - Documentation ([Sphinx](https://www.sphinx-doc.org/en/master/), [Read the Docs](https://readthedocs.org/))
50 | # - Data download ([cdsapi](https://pypi.org/project/cdsapi/) for *Climate Data Store*, [motuclient](https://pypi.org/project/motuclient/) for *Copernicus Marine Products*)
51 | #
52 | # Source: [freecodecamp.org](https://www.freecodecamp.org/news/what-can-you-do-with-python-the-3-main-applications-518db9a68a78/)
53 | #
54 | # ## They use Python
55 | #
56 | # Famous Python users:
57 | #
58 | # - Dropbox: desktop client
59 | # - Instragram: website
60 | # - Netflix, Spotify: analytics for suggestions
61 | # - Facebook: 21% of the codebase
62 | # - Google: *Python where we can, C++ where we must*
63 | #
64 | # Source: [netguru.com](https://www.netguru.com/blog/8-top-companies-that-use-python-for-their-apps-examples-of-top-notch-python-applications)
65 |
--------------------------------------------------------------------------------
/misc/shapefiles.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Reading shapefiles
17 | #
18 | # ## Downloading the file
19 | #
20 | # First, we will download a sample shape file:
21 |
22 | # +
23 | import wget
24 | import zipfile
25 | import os.path
26 |
27 | if not os.path.isfile('data/IPBES_Regions_Subregions2.shp'):
28 | url = 'https://zenodo.org/record/3928281/files/ipbes_regions_subregions_shape_1.1.zip'
29 | wget.download(url, 'data/ipbes_regions_subregions_shape_1.1.zip')
30 | with zipfile.ZipFile("data/ipbes_regions_subregions_shape_1.1.zip","r") as zip_ref:
31 | zip_ref.extractall("data")
32 | # -
33 |
34 | # ## Reading the shapefile
35 |
36 | import shapefile as pyshp
37 | import numpy as np
38 | import matplotlib.pyplot as plt
39 | import cartopy.crs as ccrs
40 | import cartopy.feature as cfeature
41 |
42 | data = pyshp.Reader('data/IPBES_Regions_Subregions2.shp', encoding='ISO8859-1')
43 | data
44 |
45 | shapes = data.shapes()
46 |
47 | nshapes = len(shapes)
48 |
49 | single = shapes[0]
50 |
51 | single.shapeType
52 |
53 | single.shapeTypeName
54 |
55 | parts = single.parts
56 | parts
57 |
58 | points = np.array(single.points) # npoints, nx, ny
59 | points.shape
60 |
61 | fields = data.fields
62 | fields
63 |
64 | records = data.records()
65 | singlerec = records[4]
66 | singlerec
67 | i = 0
68 | for a in records:
69 | count = a[2]
70 | if(count == 'France'):
71 | break
72 | i += 1
73 |
74 | # +
75 | ax = plt.axes(projection=ccrs.PlateCarree())
76 |
77 | cmap = plt.cm.jet
78 |
79 | # get the shapes and extract the points
80 | single = shapes[i]
81 | points = np.array(single.points)
82 | npoints = len(points)
83 |
84 | # get the number of parts
85 | parts = list(single.parts)
86 | nparts = len(parts)
87 |
88 | # get the record
89 | singlerec = records[i]
90 | xmin, xmax = points[:, 0].min(), points[:, 0].max()
91 | ymin, ymax = points[:, 1].min(), points[:, 1].max()
92 |
93 | if nparts == 1:
94 | plt.plot(points[:, 0], points[:, 1])
95 | else:
96 | # if parts does not start with 0:
97 | # we add 0 at the beginning of the list
98 | if parts[0] != 0:
99 | parts = [0] + parts
100 | # if parts does not end with npoints
101 | # it is added at the end.
102 | if parts[-1] != npoints:
103 | parts = parts + [npoints]
104 | nparts = len(parts) - 1
105 | for p in range(nparts):
106 | # get the colour
107 | color = cmap(p / (nparts - 1))
108 | start = parts[p]
109 | end = parts[p + 1]
110 | iii = range(start, end)
111 | plt.plot(points[iii, 0], points[iii, 1], color=color, transform=ccrs.PlateCarree(), linewidth=2)
112 | #l = ax.add_feature(cfeature.LAND)
113 | #l = ax.add_feature(cfeature.COASTLINE)
114 | ax.set_extent([xmin, xmax, ymin, ymax])
115 | plt.show()
116 | # -
117 |
118 |
--------------------------------------------------------------------------------
/misc/mean_sst_dask.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3 (ipykernel)
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Computation of global mean SST
17 | #
18 | # In this example, we illustrate possible bad choices of chunk when computing horizontal mean SST time-series and time-average SST.
19 |
20 | # # Extraction of data
21 | #
22 | # First, the SST data is extracted, as well as the cell surfaces.
23 |
24 | from dask.diagnostics import Profiler, ResourceProfiler, CacheProfiler, visualize
25 | import xarray as xr
26 | import matplotlib.pyplot as plt
27 |
28 | data = xr.open_dataset('data/surface_thetao.nc')
29 | data = data.isel(olevel=0)
30 |
31 | mesh = xr.open_dataset('data/mesh_mask_eORCA1_v2.2.nc')
32 | mesh = mesh.isel(z=0, t=0)
33 |
34 | surface = mesh['e2t'] * mesh['e1t']
35 | surface
36 |
37 | # ## Bad choice of chunks
38 | #
39 | # First, let's make a try by divinding our dataset into tiles:
40 |
41 | chunk = {'x': 150, 'y':100}
42 | thetao = data['thetao'].chunk(chunk)
43 | thetao
44 |
45 | # The surface array is also decomposed into the same chunks
46 |
47 | surfbis = surface.chunk(chunk)
48 | surfbis
49 |
50 | # Now, the mean time-series is computing by weighting using the cell surface:
51 |
52 | tmean = (thetao * surfbis).sum(dim=['x', 'y']) / surfbis.sum(dim=['x', 'y'])
53 |
54 | # %%time
55 | with Profiler() as prof, ResourceProfiler(dt=0.25) as rprof, CacheProfiler() as cprof:
56 | tmean.compute()
57 |
58 | visualize([prof, rprof, cprof], show=False)
59 |
60 | tmean.data.visualize()
61 |
62 | l = tmean.plot()
63 |
64 | # Now, the time-average map is computed:
65 |
66 | time_mean = thetao.mean(dim='time_counter')
67 |
68 | # %%time
69 | with Profiler() as prof, ResourceProfiler(dt=0.25) as rprof, CacheProfiler() as cprof:
70 | time_mean.compute()
71 |
72 | visualize([prof, rprof, cprof], show=False)
73 |
74 | time_mean.data.visualize()
75 |
76 | l = time_mean.plot(robust=True, cmap=plt.cm.jet)
77 |
78 | # ## Better choice of chunks
79 | #
80 | # The performance in the above are disappointing. This this due to a bad chunking choice. If the SST is now chunked along the time only.
81 |
82 | chunk = {'time_counter': 70}
83 | thetao = data['thetao'].chunk(chunk)
84 | thetao
85 |
86 | tmean = (thetao * surface).sum(dim=['x', 'y']) / surface.sum(dim=['x', 'y'])
87 |
88 | # %%time
89 | with Profiler() as prof, ResourceProfiler(dt=0.25) as rprof, CacheProfiler() as cprof:
90 | tmean.compute()
91 |
92 | visualize([prof, rprof, cprof], show=False)
93 |
94 | tmean.data.visualize()
95 |
96 | l = tmean.plot()
97 |
98 | time_mean = thetao.mean(dim='time_counter')
99 |
100 | # %%time
101 | with Profiler() as prof, ResourceProfiler(dt=0.25) as rprof, CacheProfiler() as cprof:
102 | time_mean.compute()
103 |
104 | visualize([prof, rprof, cprof], show=False)
105 |
106 | time_mean.data.visualize()
107 |
108 | l = time_mean.plot(robust=True, cmap=plt.cm.jet)
109 |
110 |
111 |
--------------------------------------------------------------------------------
/plots/geometrical_shapes.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Geometrical forms
17 | #
18 | # The drawing of geometrical forms is achieved by using the objects of the [matplotlib.patches](https://matplotlib.org/3.1.1/api/patches_api.html) module.
19 | #
20 | # Once a `Patches` object is created, it must be added to the current axes by using the `add_artist` method.
21 | #
22 | #
23 | # Warning! The limits of the axis must be set manually using the set_xlim and set_ylim methods.
24 | #
25 |
26 | import numpy as np
27 | import matplotlib.patches as patch
28 | import matplotlib.pyplot as plt
29 |
30 | # ## Polygons
31 | #
32 | # A polygon can be draw by providing a `(N, 2)` array containing the X and Y coordinates.
33 |
34 | # +
35 | px = np.array([-21.065, -34.685, -35.459, -29.887, -29., -29, -9])
36 | py = np.array([64.86, 56.189, 53.036, 52.011, 43., 43,52])
37 | xy = np.transpose(np.array([px, py])) # 7 x 2
38 |
39 | fig = plt.figure()
40 | ax = plt.gca()
41 | ax.set_aspect('equal')
42 | p = patch.Polygon(xy, closed=True,
43 | fill=True, hatch='/', alpha=0.4, facecolor='c',
44 | edgecolor='r', linewidth=2)
45 | ax.set_xlim(px.min(), px.max())
46 | ax.set_ylim(py.min(), py.max())
47 | ax.add_artist(p)
48 | plt.show()
49 | # -
50 |
51 | # ## Ellipse
52 |
53 | xy_center = [0, 0]
54 | fig = plt.figure()
55 | ax = plt.gca()
56 | ax.set_aspect('equal')
57 | plt.plot(xy_center[0], xy_center[1], marker='o', color='k', linestyle='none')
58 | p = patch.Ellipse(xy_center, height=5, width=10, angle=45,
59 | fill=True, linewidth=2)
60 | ax.set_xlim(-10, 10)
61 | ax.set_ylim(-5, 5)
62 | ax.add_artist(p)
63 | plt.show()
64 |
65 | # ## Circles
66 |
67 | # +
68 | xy_center = [0, 0]
69 |
70 | plt.figure()
71 | ax = plt.gca()
72 | ax.set_aspect('equal', 'box')
73 | plt.plot(xy_center[0], xy_center[1], marker='o', color='k', linestyle='none')
74 | p = patch.Circle(xy_center, radius=5, fill=True, color='Gold')
75 | ax.add_artist(p)
76 | ax.set_xlim(-5, 5)
77 | ax.set_ylim(-5, 5)
78 | plt.show()
79 | # -
80 |
81 | # ## Rectangles
82 |
83 | # +
84 | xy_center = [0, 0]
85 |
86 | plt.figure()
87 | ax = plt.gca()
88 | ax.set_aspect('equal')
89 | plt.plot(xy_center[0], xy_center[1], marker='o', color='k', linestyle='none')
90 | # here, coord is lower left!
91 | p = patch.Rectangle(xy_center, height=5, width=10, angle=0,
92 | fill=True, linewidth=2)
93 | plt.xlim(0, 10)
94 | plt.ylim(0, 5)
95 | ax.add_artist(p)
96 | plt.show()
97 | # -
98 |
99 | # ## Arcs
100 |
101 | # +
102 | xy_center = [0, 0]
103 |
104 | plt.figure()
105 | ax = plt.gca()
106 | ax.set_aspect('equal')
107 | # cannot be filled!
108 | plt.plot(xy_center[0], xy_center[1], marker='o', color='k', linestyle='none')
109 | hyp = np.sqrt(10**2 + 20**2)
110 | p = patch.Arc(xy_center, height=10, width=20, angle=0, theta1=0, theta2=180, linewidth=2)
111 | plt.xlim(-10, 10)
112 | plt.ylim(0, 5)
113 | ax.add_artist(p)
114 | plt.show()
115 |
--------------------------------------------------------------------------------
/introduction/start.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Getting started
17 | #
18 | # ## Python Install
19 | #
20 | # ### Anaconda
21 | #
22 | # Is is strongly advised to install Python by using [Anaconda](https://www.anaconda.com/):
23 | #
24 | # - Ready to go Python, with the main libraries installed (Numpy, Scipy, Matplotlib)
25 | # - Possibility to create multiple environments with different versions of Python and packages ([conda](https://conda.io/en/latest/)).
26 | #
27 | # In practice:
28 | #
29 | # - Download the distribution corresponding to your system (cf. [Download](https://www.anaconda.com/distribution/#download-section))
30 | # - Install it in a place where you have read and write access.
31 | #
32 | #
33 | #
34 | #
35 | # ## Running Python
36 | #
37 | # ### Python console
38 | #
39 | # To run Python in normal mode, type in a terminal:
40 | #
41 | # ```
42 | # python
43 | # ```
44 | #
45 | #
46 | #
47 | # ### Interactive Python console
48 | #
49 | # To run Python in interactive mode, type in a terminal:
50 | #
51 | # ```
52 | # ipython
53 | # ```
54 | #
55 | #
56 | #
57 | # ### Spyder (IDE)
58 | #
59 | # To run the Python IDE, type in a terminal:
60 | #
61 | # ```
62 | # spyder &
63 | # ```
64 | #
65 | #
66 | #
67 | #
68 | # ### Jupyter Notebook
69 | #
70 | # To run the Jupyter Notebook, type in a terminal:
71 | #
72 | # ```
73 | # jupyter notebook &
74 | # ```
75 | #
76 | #
77 |
78 | # ## Running scripts
79 | #
80 | # Open a text editor and type in:
81 | #
82 | # ```
83 | # import sys
84 | #
85 | # # my first program (comment)
86 | # print('hello ', sys.argv)
87 | # ```
88 | #
89 | # Save as ```hello.py```
90 | #
91 | # ### Running using python
92 | #
93 | # From the terminal type:
94 | #
95 | # ```
96 | # python hello.py arg1 arg2 arg3
97 | # ```
98 | #
99 | # You should see:
100 | #
101 | # ```
102 | # hello ['hello.py', 'arg1', 'arg2', 'arg3']
103 | # ```
104 | #
105 | #
106 | # Note: The sys.argv statements returns the list of arguments, with the 1st element the name of the script.
107 | #
108 | #
109 | #
110 | # ### Running using ipython
111 | #
112 | # Open `ipython` from the terminal, then type:
113 | #
114 | # ```
115 | # run hello.py arg1 arg2 arg3
116 | # ```
117 | #
118 | # To check the environment, type `whos`. You should see:
119 | #
120 | # ```
121 | # In [2]: whos
122 | # Variable Type Data/Info
123 | # ------------------------------
124 | # sys module
125 | # ```
126 | #
127 | # ### Running from Spyder
128 | #
129 | # Open `spyder`, open the file and click on the **Run -> Configuration per file** menu. Add arguments to the program as follows:
130 | #
131 | #
132 | #
133 | # Then, click on the **Run file** button to run all the program or the **Run selection** button to run the current line
134 | #
135 | # **Run file button:**
136 | #
137 | #
138 | #
139 | # **Run selection button:**
140 | #
141 | #
142 |
--------------------------------------------------------------------------------
/plots/text.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Text display
17 |
18 | # +
19 | import matplotlib.pyplot as plt
20 | import numpy as np
21 | import matplotlib as mp
22 |
23 | plt.rcParams['text.usetex'] = False
24 |
25 | x = np.linspace(0, 2*np.pi, 100)
26 | y = np.cos(x)
27 | # -
28 |
29 | # ## Text in data coordinates
30 | #
31 | # Text in data coordinates is written by using the [text](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.text.html) method.
32 |
33 | plt.figure()
34 | ax = plt.gca()
35 | plt.plot(x, y)
36 | # text in data coordinates
37 | plt.text(np.pi, 0.5, 'text ex', fontsize=20,
38 | ha='center', va='center')
39 | plt.show()
40 |
41 | # ## Text in figure coordinates
42 | #
43 | # Text in data coordinates is written by using the [figtext](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.figtext.html) method.
44 |
45 | # +
46 | plt.figure()
47 | ax = plt.gca()
48 | plt.plot(x, y)
49 | # text in figure coordinates (ranging from 0 to 1)
50 |
51 | from matplotlib.font_manager import FontProperties
52 | myfont = FontProperties()
53 | myfont.set_family('monospace')
54 |
55 | plt.figtext(0.5, 0.5, 'text ex', fontsize=20,
56 | ha='center', va='center', fontproperties=myfont)
57 | plt.show()
58 | # -
59 |
60 | # ## Bounding box
61 | #
62 | # To enclose your text within fancy box, set the `bbox` argument, which is a dictionary of box settings
63 |
64 | # +
65 | # property for bounding box
66 | bbox_prop = dict(boxstyle="rarrow, pad=0.0", fc="cyan", ec="b", lw=2)
67 |
68 | plt.figure()
69 | ax = plt.gca()
70 | plt.plot(x, y)
71 | # text in data coordinates (ranging from 0 to 1)
72 | plt.text(np.pi, 0.5, 'text ex', fontsize=20, family='monospace',
73 | ha='center', va='center', rotation=45, bbox=bbox_prop)
74 | plt.show()
75 | # -
76 |
77 | # ## Changing fonts
78 | #
79 | # To change the font property, set the `fontproperties` argument, which takes a`matplotlib.font_manager.FontProperties` object.
80 |
81 | # +
82 | import matplotlib.font_manager as mpfm
83 |
84 | plt.figure()
85 | ax = plt.gca()
86 | plt.plot(x, y)
87 |
88 | myfont = mpfm.FontProperties()
89 | myfont.set_family('monospace')
90 |
91 | plt.figtext(0.5, 0.5, 'text ex', fontsize=20,
92 | ha='center', va='center', fontproperties=myfont)
93 |
94 | plt.show()
95 | # -
96 |
97 | # ## Mathematical text
98 | #
99 | # To add mathematical formulae, put your text between `$` symbols.
100 |
101 | plt.figure()
102 | ax = plt.gca()
103 | plt.plot(x, y)
104 | plt.text(np.pi, 0.5, r'$\sum_{i=0}^\infty x_i$', fontsize=20,
105 | ha='center', va='center')
106 | plt.show()
107 |
108 | # You can also use TeX rendering by setting to True the `text.usetex` key of the Matplotlib parameters.
109 | #
110 | #
111 | # Warning! To use TeX rendering, latex and dvipng need to be installed.
112 | #
113 | #
114 | # Note that in case of TeX rendering, mathematical formulae starts with `$\displaystyle` and and with `$`
115 |
116 | # +
117 | plt.rcParams['text.usetex'] = True
118 |
119 | plt.figure()
120 | ax = plt.gca()
121 | plt.plot(x, y)
122 | plt.text(np.pi, 0.5, r'$\displaystyle \sum_{i=0}^\infty x_i$', fontsize=20,
123 | ha='center', va='center')
124 | plt.show()
125 |
--------------------------------------------------------------------------------
/data_types/vars.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Variables
17 | #
18 | # ## Variable assignments
19 | #
20 | # Python is a *dynamical binding* and typing language, contrary to C/C++, Java and Fortran, who are *static binding* (source: [pythonconquerstheuniverse](https://pythonconquerstheuniverse.wordpress.com/2009/10/03/static-vs-dynamic-typing-of-programming-languages/))
21 | #
22 | # **Static typing:**
23 | #
24 | #
25 | #
26 | # **Dynamic typing:**
27 | #
28 | #
29 | #
30 | # Therefore, one variable name can be reused for different objects. Variable assignment is done with the ```=``` sign:
31 |
32 | x = 1
33 | type(x)
34 |
35 | x = 2.3 # float
36 | type(x)
37 |
38 | x = 'string' # string
39 | type(x)
40 |
41 | # ## Variables as objects
42 | #
43 | # Python is object oriented. Therefore, each assigned variable is an object. Informations about the objects are accessible via the `type` function:
44 |
45 | type(x) # class of the object
46 |
47 | # The list of attributes and methods associated with the object are accessible via the `dir` function.
48 |
49 | dir(x) # list of methods/attributes
50 |
51 | # ### Object's attribute
52 | #
53 | # An object's attribute is a data which is associated with the object.
54 | #
55 | # To obtain an object's attribute, the general syntax is `object.attribute`. For instance, multidimensional arrays, which are `numpy.array` objects, have the following attributes:
56 |
57 | import numpy as np
58 | x = np.array([0, 1, 2, 3, 4, 5, 6])
59 |
60 | x.dtype
61 |
62 | x.ndim
63 |
64 | x.shape
65 |
66 | # ### Object's method
67 | #
68 | # Methods are functions that are associated with an object, which use and eventually modify the object's attributes.
69 | #
70 | # To call an object's method, the general syntax is `object.method(arg1, arg2, ...)`. For instance, to compute the mean of the `numpy.array` defined in the above:
71 |
72 | m = x.mean(keepdims=True)
73 | m
74 |
75 | s = x.std()
76 | s
77 |
78 | # To get some help about a method or a function, use the `help` function:
79 |
80 | help(x.mean)
81 |
82 | # ### Method vs. function
83 | #
84 | # It should be noted that object's method are not called in the same way as module's functions. For instance, there are two ways to compute the mean of a numpy array.
85 | #
86 | # It can be done by using the `mean` *method* of the `x` object:
87 |
88 | x.mean()
89 |
90 | # Or by using the `mean` *function* of the `numpy` module applied on the `x` object:
91 |
92 | np.mean(x)
93 |
94 | #
95 | # Note: In this case, the numpy function simply calls the object's method.
96 | #
97 |
98 | # ## Transtyping
99 |
100 | # To convert an object to another type:
101 |
102 | # convert string to a list
103 | xlist = list(x)
104 | type(xlist)
105 |
106 | # ## Testing object's class
107 | #
108 | # A usefull method is the `isinstance` one, which allows to determine whether an object if of a given list of class.
109 |
110 | x = [0, 1, 2]
111 | print(isinstance(x, (tuple)))
112 | print(isinstance(x, (list, tuple)))
113 |
114 | # ## Object's types: mutable and immutable
115 | #
116 | # There are of **two** main caterogies of objects: (source: [geeksforgeeks](https://www.geeksforgeeks.org/mutable-vs-immutable-objects-in-python/)):
117 | # - *Mutable objects*: Can change their contents: **list, dict, set and custom objects** (*numpy.arrays* for instance)
118 | # - *Immutable objects*: Can't change their contents: **int, float, bool, string, unicode, tuple**}
119 | #
120 | # For instance, the following statement will raise an error:
121 |
122 | x = 'string'
123 | # x[0] = 1
124 |
125 | # since string are immutable, although they are close to a list object.
126 |
--------------------------------------------------------------------------------
/plots/axes.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.11.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Managing axis
17 | #
18 | # In this section, some features about axes manipulation will be shown. First, let's create some data.
19 |
20 | # +
21 | import matplotlib.pyplot as plt
22 | import matplotlib as mp
23 | import numpy as np
24 |
25 | x = np.linspace(0, 2*np.pi, 30)
26 | y = np.sin(x)
27 | # -
28 |
29 | # ## Changing the plot limits
30 |
31 | # +
32 | # Setting the limits of the figure
33 | fig = plt.figure()
34 | ax = plt.gca()
35 | plt.plot(x, y)
36 | plt.xlim(x.min(), x.max())
37 | plt.ylim(y.min(), y.max())
38 |
39 | plt.show()
40 | # -
41 |
42 | # ## Changing the ticks and tick labels
43 | #
44 | # Changing the tick locations and tick labels are achieved by using the `set_xticks` and `set_xticklabels` methods.
45 |
46 | # +
47 | # Tick handling
48 | fig = plt.figure()
49 | ax = plt.gca()
50 | plt.plot(x, y)
51 |
52 | # Defines location of xticks and sets the xticklabels
53 | # $$ stands for mathematical writting
54 | xticks = np.arange(0, 2*np.pi+np.pi/2., np.pi/2)
55 | xticklabels = ["$0$", r"$\frac{\pi}{2}$", "$\pi$", r"$\frac{3\pi}{2}$", '$2\pi$']
56 |
57 | # Length of xticks and xticklabels should be the same
58 | ax.set_xticks(xticks)
59 | ax.set_xticklabels(xticklabels, rotation=45, ha='center', fontsize=20)
60 |
61 | plt.show()
62 | # -
63 |
64 | # Same thing can be done for the `y-axis`
65 |
66 | # +
67 | fig = plt.figure()
68 | ax = plt.gca()
69 | plt.plot(x, y)
70 |
71 | # Defines location of yticks and sets the yticklabels
72 | yticks = np.arange(-1, 1 + 0.25, 0.25)
73 | yticklabels = ['%.1f' %y for y in yticks]
74 |
75 | # Length of yticks and yticklabels should be the same
76 | ax.set_yticks(yticks)
77 | ax.set_yticklabels(yticklabels)
78 |
79 | plt.show()
80 | # -
81 |
82 | # ## Axis aspect
83 | #
84 | # By defaut, the shape of the axis is set as equal to the shape of the figure. To overcome this, use the `axis.axis` or the `axis.set_aspect` method (as shown [here](https://matplotlib.org/3.1.0/gallery/subplots_axes_and_figures/axis_equal_demo.html).
85 | #
86 | # For instance, a circle may look like an ellipse:
87 |
88 | # +
89 | import matplotlib.pyplot as plt
90 | import numpy as np
91 |
92 | # Plot circle of radius 3.
93 | an = np.linspace(0, 2 * np.pi, 100)
94 | x = 3 * np.cos(an)
95 | y = 3 * np.sin(an)
96 | # -
97 |
98 | # The figure display will show the data as an ellipse instead of a circle:
99 |
100 | fig = plt.figure()
101 | plt.plot(x, y)
102 | plt.show()
103 |
104 | # This can be corrected by using the `ax.axis` method:
105 |
106 | fig = plt.figure()
107 | ax = plt.gca()
108 | plt.plot(x, y)
109 | ax.axis('equal') # must be put before the xlim/ylim
110 | ax.set_xlim(-3, 3)
111 | ax.set_ylim(-3, 3)
112 | plt.show()
113 |
114 | # It can be also be achieved by using the `set_aspect` function, which allows to control whether the axes limits or the axes shape should be modified to insure a proper aspect.
115 | #
116 | # Modifying the data limits:
117 |
118 | fig = plt.figure()
119 | ax = plt.gca()
120 | plt.plot(x, y)
121 | ax.set_aspect('equal', 'datalim') # changes the axis limits to have the good aspect
122 | plt.show()
123 |
124 | # Modifying the axis shape:
125 |
126 | fig = plt.figure()
127 | ax = plt.gca()
128 | plt.plot(x, y)
129 | ax.set_aspect('equal', 'box') # changes the axes shape to have the good aspect
130 | plt.show()
131 |
132 | # ## Hiding tick labels
133 | #
134 | # Tick labels can be hidden by using the `set_visible` method:
135 |
136 | fig = plt.figure()
137 | ax = plt.gca()
138 | ax.plot(x, y)
139 | ax.axis('equal')
140 | ax.get_xaxis().set_visible(False) # removes xlabels
141 | ax.get_yaxis().set_visible(False) # removes xlabels
142 | plt.show()
143 |
144 | # ## Removing axis box
145 | #
146 | # The removal of the axis box is done by calling the `axis` method with a `False` argument:
147 |
148 | fig = plt.figure()
149 | ax = plt.gca()
150 | ax.set_aspect('equal')
151 | ax.plot(x, y)
152 | ax.axis(False)
153 | plt.show()
154 |
--------------------------------------------------------------------------------
/misc/practical_session.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Practical session on `xarray`
17 | #
18 | # ## Instructions
19 |
20 | # - Load the `python-training/misc/data/mesh_mask_eORCA1_v2.2.nc` file using `open_dataset`.
21 |
22 | # - Extract the surface (`z=0`) and first time step (`t=0`) using `isel`
23 |
24 | # - Plot the land-sea mask (`tmask`) variable.
25 |
26 | # - Compute the cell surface (`e1t x e2t`)
27 |
28 | # - Load the `python-training/misc/data/surface_thato.nc` file using `open_dataset`.
29 |
30 | # - Extract the SST (`thetao` variable) at the surface (`olevel=0`)
31 |
32 | # - Compute and display the time-average SST
33 |
34 | # - Compute the mean SST over years 1958-1962
35 |
36 | # - Compute the mean over years 2014-2018
37 |
38 | # - Plot the SST difference between 2018-2014 and 1958-1962
39 |
40 | # - Compute the SST global mean time-series (weight by cell surface $e1t \times e2t$)
41 |
42 | # - Remove the monthly clim from the time-series using `groupy` on `time_counter.month`
43 |
44 | # - Compute the rolling mean of the time-series, using a 3-year window. Plot the raw and smoothed anomalies
45 |
46 | # ## Corrections
47 |
48 | # - Load the `python-training/misc/data/mesh_mask_eORCA1_v2.2.nc` file using `open_dataset`.
49 |
50 | # +
51 | import xarray as xr
52 |
53 | mesh = xr.open_dataset('data/mesh_mask_eORCA1_v2.2.nc')
54 | mesh
55 | # -
56 |
57 | # - Extract the surface (`z=0`) and first time step (`t=0`) using `isel`
58 |
59 | mesh = mesh.isel(z=0, t=0)
60 | mesh
61 |
62 | # - Plot the land-sea mask (`tmask`) variable.
63 |
64 | tmask = mesh['tmask']
65 | tmask.plot()
66 |
67 | # - Compute the cell surface (`e1t x e2t`)
68 |
69 | surface = mesh['e1t'] * mesh['e2t']
70 | surface
71 |
72 | # Here the output DatarArray has no name. You can give him one as follows:
73 |
74 | surface.name = 'surface'
75 | surface
76 |
77 | # - Load the `python-training/misc/data/surface_thato.nc` file using `open_dataset`.
78 |
79 | data = xr.open_dataset('data/surface_thetao.nc')
80 | data
81 |
82 | # - Extract the SST (`thetao` variable) at the surface (`olevel=0`)
83 |
84 | thetao = data['thetao'].isel(olevel=0)
85 | thetao
86 |
87 | # - Compute and display the time-average SST
88 |
89 | # +
90 | import matplotlib.pyplot as plt
91 | plt.rcParams['text.usetex'] = False
92 |
93 | theta_mean = thetao.mean(dim='time_counter')
94 | theta_mean.plot(robust=True, cmap=plt.cm.jet)
95 | # -
96 |
97 | # - Compute the mean SST over years 1958-1962
98 |
99 | sst_early = thetao.sel(time_counter=slice('1958-01-01', '1962-12-31')).mean(dim='time_counter')
100 | sst_early
101 |
102 | # - Compute the mean over years 2014-2018
103 |
104 | sst_late = thetao.sel(time_counter=slice('2014-01-01', '2018-12-31')).mean(dim='time_counter')
105 | sst_late
106 |
107 | # - Plot the SST difference between 2014-2018 and 1958-1962
108 |
109 | (sst_late - sst_early).plot(robust=True)
110 |
111 | # - Compute the SST global mean time-series (weight by cell surface $e1t \times e2t$)
112 |
113 | # A first possibility would be to compute it using `sum`:
114 |
115 | ts1 = (thetao * surface * tmask).sum(dim=['x', 'y']) / ((surface * tmask).sum(dim=['x', 'y']))
116 | ts1.plot()
117 |
118 | # Another solution would be to use the `xarray.weight` method:
119 |
120 | theta_weighted = thetao.weighted(surface * tmask)
121 | ts2 = theta_weighted.mean(dim=['x', 'y'])
122 | ts2.plot()
123 |
124 | # - Remove the monthly clim from the time-series using `groupy` on `time_counter.month`
125 |
126 | clim = ts1.groupby('time_counter.month').mean(dim='time_counter')
127 | clim.plot()
128 |
129 | anom = ts1.groupby('time_counter.month') - clim
130 | anom.plot()
131 |
132 | # - Compute the rolling mean of the time-series, using a 3-year window. Plot the raw and smoothed anomalies
133 |
134 | tsroll = anom.rolling(time_counter=3*12 + 1, center=True).mean(dim='time_counter').dropna('time_counter')
135 | tsroll
136 |
137 | anom.plot(label='raw')
138 | tsroll.plot(label='smoothed')
139 |
--------------------------------------------------------------------------------
/blocks/loops.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Loops
17 | #
18 | # ## Definition
19 | #
20 | # Loops should be used when a set of actions must be repeated a certain number of times (for instance on each element of a list).
21 | #
22 | # There is mainly two ways to perform a loop: by using a ```for``` statement, or by using a ```while``` statement.
23 | #
24 | # ## Loops in Python
25 | #
26 | # In Python, the general structure of a loop is:
27 | #
28 | # for v in iterable:
29 | # action
30 | #
31 | # while(condition):
32 | # action
33 | #
34 | #
35 | # You can always replace a for loop by a while loop, and conversely.
36 | #
37 |
38 | # First, let's create a list, which is an iterable
39 |
40 | # creates a list
41 | x = ['a', 'b', 'c', 'd', 'e']
42 | x
43 |
44 | # You can loop over a list by using its `index` as follows:
45 |
46 | # loop using the list index
47 | for p in range(0, len(x)):
48 | print('iterable ', p, 'element', x[p])
49 |
50 | # Here, the iteration is done on an integer (`p`), which is an integer.
51 |
52 | # But the iteration can also be performed on the elements themselves:
53 |
54 | # loop using the list elements (works for iterables, such as list, tuples)
55 | for v in x:
56 | print('iterable ', v) # temp: element itself
57 |
58 | # ## Pairwise loops
59 | # There is the possiblity to loop simultaneously over different elements using the `zip` method, which returns a `tuple` of pairs.
60 |
61 | x = ['a', 'b', 'c', 'd']
62 | x
63 |
64 | y = ['w', 'x', 'y', 'z']
65 | y
66 |
67 | list(zip(x, y))
68 |
69 | for i, j in zip(x, y):
70 | print(i, j)
71 |
72 | # The `zip` method method will stop when the end of one of the iterable has been reached.
73 |
74 | z = ['alpha', 'beta']
75 |
76 | for val in zip(x, y, z):
77 | print(val)
78 |
79 | # Any for `loop` can be converted into a `while` loop, and conversely. For instance, to navigate on a list:
80 |
81 | # +
82 | # any for loop can be converted into while loop, and conversely
83 | x = ['a', 'b', 'c', 'd']
84 |
85 |
86 | p = 0
87 | while p < len(x):
88 | print('index ', p, 'value', x[p]) # p: index of the element
89 | p += 1 # iteration of counter
90 | # -
91 |
92 | # To navigate in a list starting by the end:
93 |
94 | p = len(x) - 1
95 | while (p >= 0):
96 | print('index ', p, 'value', x[p]) # p: index of the element
97 | p -= 1 # iteration of counter
98 |
99 | # ## Imbricated loops
100 | #
101 | # Imbricated loops are achieved by indenting the code as many times as necessary
102 |
103 | for i in range(0, 2):
104 | for j in range(0, 3):
105 | for k in range(0, 1):
106 | print('i', i, 'j', j, 'k', k)
107 |
108 | # ## Loop comprehension
109 | #
110 | # Python allows writting loops in a very synthetic way, which is called *loop comprehension.* For instance, the following loop:
111 |
112 | # equivalent to (but much shorter and more elegant)
113 | combs2 = []
114 | for x in [1, 2, 3]:
115 | for y in [3, 1, 4]:
116 | if x != y:
117 | combs2.append((x, y))
118 | combs2
119 |
120 | # can be written as follows:
121 |
122 | combs1 = [(x, y) for x in [1, 2, 3] for y in [3, 1, 4] if x != y]
123 | combs1
124 |
125 | # If you have 2 lists, `x` and `y`:
126 |
127 | x = list(range(1, 10))
128 | y = [3, 5, 6]
129 | list(x)
130 |
131 | # If you want to extract the element of `x` which are not contained in `y`:
132 |
133 | z = [a for a in x if a not in y]
134 | z
135 |
136 | # ## Loop controls: break and continue
137 | #
138 | # `break` allows to leave a `loop` when a condition is met:
139 |
140 | # Break: leaves the loop when condition is met
141 | for p in range(0, 10):
142 |
143 | print('===== ', p)
144 |
145 | if(p > 3):
146 | break
147 |
148 | # On the other hand, `continue` does not leave the loop but the statements that come after are not reached.
149 |
150 | # Continue: skip the end of the block when condition is met
151 | for p in range(0, 10):
152 |
153 | print('++++++ ', p)
154 |
155 | if(p > 3):
156 | continue
157 |
158 | print('below')
159 |
--------------------------------------------------------------------------------
/misc/nemo.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Working with NEMO files
17 | #
18 | # Here are some examples for plotting outputs on a NEMO grid.
19 | #
20 | # ## Building variable
21 | #
22 | # Let's first build a bathymetric field using the vertical scale factors.
23 |
24 | # +
25 | import numpy as np
26 | import matplotlib.pyplot as plt
27 | import xarray as xr
28 | import cartopy.feature as cfeature
29 | import cartopy.crs as ccrs
30 |
31 | data = xr.open_dataset('data/mesh_mask_eORCA1_v2.2.nc')
32 | data = data.isel(t=0)
33 | tmask = data['tmask'].values
34 | e3t = data['e3t_0'].values
35 | lon = data['glamt'].values
36 | lat = data['gphit'].values
37 |
38 | bathy = np.sum(e3t * tmask, axis=0)
39 | bathy = np.ma.masked_where(bathy == 0, bathy)
40 | # -
41 |
42 | # ## First try
43 | #
44 | # If we first try to use the `pcolormesh` as we learned, here is what comes out:
45 |
46 | fig = plt.figure(figsize=(12, 15))
47 | ax = plt.axes(projection=ccrs.PlateCarree())
48 | cs = ax.pcolormesh(lon, lat, bathy, transform=ccrs.PlateCarree())
49 | ax.add_feature(cfeature.LAND, zorder=50)
50 | ax.add_feature(cfeature.COASTLINE, zorder=51)
51 | cb = plt.colorbar(cs, shrink=0.3)
52 |
53 | # We have an error message saying that the longitudes and latitudes are not monotonic. Let's improve our figure.
54 |
55 | # ## Better way
56 | #
57 | # As indicated in [the documentation](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.pcolormesh.html), the `pcolormesh` method assumes the following layout:
58 | #
59 | # ```
60 | # (X[i+1, j], Y[i+1, j]) (X[i+1, j+1], Y[i+1, j+1])
61 | # +--------+
62 | # | C[i,j] |
63 | # +--------+
64 | # (X[i, j], Y[i, j]) (X[i, j+1], Y[i, j+1])
65 | # ```
66 | #
67 | # Therefore, the good way to draw is to provide the coordinates of the `F` points (upper-right corners), and to give a sub-array of `T` points.
68 | #
69 | # **Grid layout of NEMO outputs:**
70 | #
71 | #
72 |
73 | lonf = data['glamf'].data
74 | latf = data['gphif'].data
75 | fig = plt.figure()
76 | ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=0))
77 | cs = ax.pcolormesh(lonf, latf, bathy[1:, 1:], transform=ccrs.PlateCarree(), shading='flat')
78 | ax.set_extent([-20, 20, -20, 20], crs=ccrs.PlateCarree())
79 | ax.add_feature(cfeature.COASTLINE, zorder=2)
80 | ax.add_feature(cfeature.LAND, zorder=1)
81 | cb = plt.colorbar(cs, shrink=0.5)
82 |
83 | # ## Contour plots
84 |
85 | # However, the drawing of contour plots is not simple on irregular grid. Instead, we need to use the
86 | # `tricontour` method, as indicated [here](https://matplotlib.org/stable/gallery/images_contours_and_fields/irregulardatagrid.html).
87 |
88 | # First, we recover the coordinates on the `T` points, not on the `F` points as for `pcolormesh`.
89 |
90 | lont = data['glamt'].data
91 | latt = data['gphit'].data
92 |
93 | # Then, we extract the data mask.
94 |
95 | mask = (np.ma.getmaskarray(bathy))
96 |
97 | # Now, we extract the bathy, longitudes and latitudes on wet points and we convert into 1D arrays:
98 |
99 | lon1d = np.ravel(lont[~mask])
100 | lat1d = np.ravel(latt[~mask])
101 | bat1d = np.ravel(bathy[~mask])
102 | bat1d
103 |
104 | # The next step is to convert our 1D geographical coordinates (lon/lat) into the coordinates of the output map. If we want to draw our contours on a Mollweide projection:
105 |
106 | # +
107 | projin = ccrs.PlateCarree()
108 | projout = ccrs.Mollweide(central_longitude=180)
109 | #projout = ccrs.PlateCarree(central_longitude=0)
110 |
111 | output = projout.transform_points(projin, lon1d, lat1d)
112 | lonout = output[..., 0]
113 | latout = output[..., 1]
114 | latout.shape
115 | # -
116 |
117 | # Now, we can add contours using the `tricontour` method:
118 |
119 | fig = plt.figure(figsize=(12, 12))
120 | ax = plt.axes(projection=projout)
121 | cs = ax.pcolormesh(lonf, latf, bathy[1:, 1:], transform=projin)
122 | cl = ax.tricontour(lonout, latout, bat1d, levels=np.arange(0, 6000 + 1000, 1000), colors='k', linewidths=0.5)
123 | ax.add_feature(cfeature.LAND, zorder=100)
124 | l = ax.add_feature(cfeature.COASTLINE, zorder=101, linewidth=2)
--------------------------------------------------------------------------------
/io/pand.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # CSV
17 | #
18 | # Reading, writting and analysing CSV files is achived by using the [pandas](https://pandas.pydata.org) library.
19 | #
20 | # ## Opening a CSV
21 | #
22 | # The reading of CSV files is done by using the [read_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.read_csv.html) method.
23 |
24 | # +
25 | import pandas as pd
26 |
27 | data = pd.read_csv('./data/nina34.csv',
28 | delim_whitespace=True, # use spaces as delimiter
29 | skipfooter=3, # skips the last 2 lines
30 | na_values=-99.99, # sets missing values
31 | engine='python' # sets engine to Python (default C does not support skip footer)
32 | )
33 | data
34 | # -
35 |
36 | # It returns a [pandas.DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) object.
37 |
38 | # To get the names of the line and columns:
39 |
40 | data.index
41 |
42 | data.columns
43 |
44 | # To display some lines at the beginning or at the end:
45 |
46 | data.head(3)
47 |
48 | data.tail(3)
49 |
50 | # ## Data extraction
51 | #
52 | # To extract data from the DataFrame, you can either
53 | #
54 | # - extract one column
55 | # - use column/row names
56 | # - use column/row indexes
57 | #
58 | # ### Extracting one column
59 | #
60 | # To extract a whole column, we can provide a list of column names as follows:
61 |
62 | col = data[['JAN', 'FEB']]
63 | col
64 |
65 | # ### Using names
66 | #
67 | # Extracting data using column and row names is done by using the [loc](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.loc.html#pandas-dataframe-loc) method.
68 |
69 | dataex = data.loc[:, ['JAN', 'FEB']]
70 | dataex
71 |
72 | dataex = data.loc[[1950, 1960], :]
73 | dataex
74 |
75 | dataex = data.loc[1950:1953, ['JAN', 'FEB']]
76 | dataex
77 |
78 | # ### Using indexes
79 | #
80 | # Extracting data using column and row names is done by using the [iloc](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.iloc.html#pandas-dataframe-iloc) method.
81 |
82 | dataex = data.iloc[:5, 0]
83 | dataex
84 |
85 | dataex = data.iloc[2, :]
86 | dataex
87 |
88 | dataex = data.iloc[slice(2, 6), [0, 1]]
89 | dataex
90 |
91 | dataex = data.iloc[slice(2, 6), :].loc[:, ['OCT', 'NOV']]
92 | dataex
93 |
94 | # ## Extracting data arrays
95 | #
96 | # To extract the data arrays, use the `values` attributes.
97 |
98 | array = data.values
99 | array.shape
100 |
101 | # ## Plotting
102 | #
103 | # `pandas` comes with some functions to draw quick plots.
104 |
105 | # +
106 | import matplotlib.pyplot as plt
107 |
108 | l = data.loc[:, ['JAN', 'FEB']].plot()
109 | # -
110 |
111 | l = data.loc[1970, :].plot()
112 |
113 | l = data.T.loc[:, 1995:2000].plot()
114 |
115 | # ## Creating dataframes
116 | #
117 | # To create a data frame is done by using the [pandas.DataFrame](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html) method.
118 |
119 | # +
120 | import numpy as np
121 |
122 | # init a date object: 10 elements with a 1h interval
123 | date = pd.date_range('1/1/2012', periods=10, freq='H')
124 |
125 | x = np.arange(10)
126 | y = np.arange(10)*0.5
127 | cat = ['A']*2 + ['C'] + ['A'] + 3*['B'] + ['C'] + ['D'] + ['A']
128 |
129 | data = pd.DataFrame({'xvalue': x,
130 | 'yvalue': y,
131 | 'cat': cat},
132 | index=date)
133 | data
134 | # -
135 |
136 | # ## Mathematical operations
137 | #
138 | # Mathematical operations can be done by using the available pandas methods. Note that it is done only on numerical types. By default, the mean over all the rows is performed:
139 |
140 | datam = data.loc[:, ['xvalue', 'yvalue']].mean()
141 | datam
142 |
143 | # But you can also compute means over columns:
144 |
145 | # mean over the second dimension (columns)
146 | datam = data.loc[:, ['xvalue', 'yvalue']].mean(axis=1)
147 | datam
148 |
149 | # There is also the possibility to do some treatments depending on the value of a caterogical variable (here, the column called `cat`).
150 |
151 | data_sorted = data.sort_values(by="cat")
152 | data_sorted
153 |
154 | # You can count the occurrences:
155 |
156 | data.groupby("cat").size()
157 |
158 | data.groupby("cat").mean()
159 |
160 | data.groupby("cat").std()
161 |
162 | # ## Writting a CSV
163 | #
164 | # Writting a CSV file is done by calling the [DataFrame.to_csv](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_csv.html) method.
165 |
166 | data.to_csv('data/example.csv', sep=';')
167 |
--------------------------------------------------------------------------------
/data_types/dict.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Dictionaries
17 | #
18 | # ## Definition
19 | #
20 | # A dictionary can be viewed as an *unordered* list of elements (called *values*), which are *not necessarily of the same type* and are accessible via a unique label, which must be an immutable object (called *keys*).
21 | #
22 | #
23 | #
24 | # ## Usage
25 | #
26 | # Dictionaries are used:
27 | #
28 | # - To manipulate the global environment (```globals()``` dictionary)
29 | # - To handle function arguments (```**kwargs``` arguments)
30 | # - Class objects are associated with a dictionary (```__dict__``` attribute)
31 | # - To manipulate some objects (```pandas.DataFrame```, ```xarray.Dataset```)
32 | #
33 | # ## Manipulation
34 | #
35 | # To have more about dictionaries, visit [python.org](https://docs.python.org/3/tutorial/datastructures.html#dictionaries)
36 | #
37 | # ### Creating dictionaries
38 | #
39 | # Creating dictionaries is done by using `{}`. To create an empty one:
40 |
41 | data = {} # empty dictionary
42 | data
43 |
44 | # To create one with values:
45 |
46 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
47 | data
48 |
49 | # You can also initialize a dictionary by giving the same value to all keys using the `fromkeys` method:
50 |
51 | data = dict.fromkeys(['key0', 'key1', 3], 'new val')
52 | data
53 |
54 | # ### Accessing elements
55 | #
56 | # Accessing elements is done by using the `get` method:
57 |
58 | # Getting and replacing dict. elements
59 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
60 | data.get('datstr')
61 |
62 | # If the key does not exist, nothing is returned:
63 |
64 | data.get('toto')
65 |
66 | # except if you provide an additional argument, which is the return value if key is not found
67 |
68 | data.get('toto', 0)
69 |
70 | # To get a dictionary element can also be done using a `dict[key]` syntax. However, this way is not safe since the programs stops if a key is not found:
71 |
72 | data['datstr']
73 | # data['toto'] # fails!
74 |
75 | # ### Changing/adding values
76 |
77 | # To add or overwrite a value in a dict:
78 |
79 | data['datlist'] = [0, 1, 2] # add a new element to the dict (key=datlist)
80 | data
81 |
82 | data['datstr'] = 'new string' # overwrites a given value (key=datstr)
83 | data
84 |
85 | # In the above, the existing key is overwritten. In order to prevent overwritting, adding elements can be achieved by using the `setdefault` method. If the key already exists, nothing is done and the function returns the old value. If the key does not exist, the dictionary is updated and the associated value is returned.
86 |
87 | # If we create a dictionary:
88 |
89 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
90 |
91 | # If we try to overwrite an existing key of the dict:
92 |
93 | added = data.setdefault('datstr', 'final string')
94 | added, data
95 |
96 | # In this case, the dictionary is not updated and the function returns the value that was on the dictionary.
97 | #
98 | # If now we try to add a new key to the dict:
99 |
100 | # datstrbis not found, value is set
101 | added = data.setdefault('datstrbis', 'final string')
102 | added, data
103 |
104 | # In this case, the value provided in the argument is returned and the dictionary is updated.
105 | #
106 | # To check if a key is in a dictionary, use the `in` statement:
107 |
108 | # check whether dict contains a given key
109 | iskey = 'datstr' in data
110 | istoto = ('toto' in data)
111 | print(iskey)
112 | print(istoto)
113 |
114 | # To recover the list of keys:
115 |
116 | data.keys()
117 |
118 | # To recover the list of values:
119 |
120 | data.values()
121 |
122 | # To recover the key/values couples as a tuple:
123 |
124 | data.items()
125 |
126 | # ### Removing elements
127 |
128 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
129 |
130 | # To empty a dictionary:
131 |
132 | data.clear() # Removes all the elements
133 | data
134 |
135 | # To remove an element based on the value of the key, use the `pop` method (it returns the removed value):
136 |
137 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
138 | removed = data.pop("dataint")
139 | removed, data
140 |
141 | # ### Concatenation
142 |
143 | # Concatenation is done by using the `update` method. If we have two dictionaries:
144 |
145 | data = {'dataint':10 , 'datstr':'This is a dictionnary'}
146 | data2 = {'dataint':14,' datflt':0.5}
147 |
148 | # To send `data2` into `data`:
149 |
150 | data.update(data2)
151 | data
152 |
153 | # Note that in this case, the `dataint` value of the destination dict has been overwritten by the value of the source dict. You can also use the following syntax:
154 |
155 | data.update(keytoto='toto', keylala='lala') # equivalent to data.update({'toto':'toto_var'})
156 | data
157 |
158 | data
159 |
160 | # A usage of `update` can be to include all the variables defined in a dictionary accessible into the global working environment, defined in the `globals()` dictionary. For instance, to use `dataint`, which is defined in the `data` dict, we send the content of `data` into `globals()`:
161 |
162 | globals().update(data)
163 |
164 | dataint
165 |
--------------------------------------------------------------------------------
/introduction/libinstall.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Python libraries
17 | #
18 | # ## Install libraries
19 | #
20 | # To install libraries, you can either use [conda](https://docs.conda.io/en/latest/), [pip](https://pip.pypa.io/en/stable/) or install from source files.
21 | #
22 | # *Pip* only installs Python packages. The user needs to manually install the required external tools (compilers, C libraries, etc.).
23 | #
24 | # *Conda* installs everything that is needed (compiler, libraries) and has the ability to create isolated environments, which may contain different versions of the packages.
25 | #
26 | #
27 | #
Pip vs. Conda! For further details about the difference between Pip and Conda, visit
28 | #
anaconda.com
29 | #
30 | #
31 | # ### Install from Pip
32 | #
33 | # All the packages available on Pip are listed on the [pip](https://pypi.org/) webpage. To install any of them, type on a terminal:
34 | #
35 | # ```
36 | # pip install package_name
37 | # ```
38 | #
39 | # For install, to manipulate NetCDF files,
40 | #
41 | # ```
42 | # pip install netCDF4 xarray
43 | # ```
44 | #
45 | # ### Install from Conda
46 | #
47 | # All the packages available on the Conda default channel are listed on the [Conda repository](https://repo.anaconda.com/pkgs/). To install any of them, type on a terminal:
48 | #
49 | # ```
50 | # conda install package_name
51 | # ```
52 | #
53 | # For install, to manipulate NetCDF files,
54 | #
55 | # ```
56 | # conda install netCDF4 xarray
57 | # ```
58 | #
59 | # #### Conda virtual environments
60 | #
61 | # To create virtual environments (for instance for spatial representation), type in a terminal:
62 | #
63 | # ```
64 | # # PyNGL env. for Python 3
65 | # conda create --name pyngl3 --channel conda-forge pynio pyngl
66 | #
67 | # # PyNGL env. for Python 2
68 | # conda create --name pyngl2 --channel conda-forge pynio pyngl python=2.7
69 | # ```
70 | #
71 | # In this case, the packages are downloaded from a community channel, the [conda-forge](https://conda-forge.org/).
72 | #
73 | # To change environment, type
74 | #
75 | # ```
76 | # conda activate pyngl2 # switch env. to pygnl2
77 | # conda activate pyngl3 # switch env. to pygnl3
78 | # conda activate base # go back to default env.
79 | # ```
80 | #
81 | # To list all the environments, type:
82 | #
83 | # ```
84 | # conda env list
85 | # ```
86 | #
87 | #
88 | # Default environment! The default environment is named base
89 | #
90 | #
91 | # #### Export environment
92 | #
93 | # Conda allows to export an environment into a text file as follows:
94 | #
95 | # ```
96 | # conda env export > env.yaml
97 | # ```
98 | #
99 | # #### Import environment
100 | #
101 | # You can also import an environment as follows:
102 | #
103 | # ```
104 | # conda env create -f env.yaml
105 | # ```
106 | #
107 | # #### Connect environments to Jupyter
108 | #
109 | # You can also allow Jupyter to access your environments as follows:
110 | #
111 | # ```
112 | # conda activate pyngl
113 | # conda install ipython ipykernel
114 | # ipython kernel install --name "pyngl" --user
115 | # ```
116 | #
117 | # ### Install from source
118 | #
119 | # To install a package from sources, unzip the archive and type:
120 | #
121 | # ```
122 | # python setup.py install --home=/my/directory/
123 | # ```
124 | #
125 | #
126 | # Note. At the beginning, you are unlikely to do that.
127 | #
128 | #
129 | # ## Loading libraries
130 | #
131 | # Libraries are loaded by using the `import` statement (generally at the beginning of the scripts) as follows:
132 |
133 | # +
134 | # loading the numpy library
135 | import numpy
136 |
137 | # loading matplotlib with the shortname mp
138 | import matplotlib as mp
139 | # -
140 |
141 | # In this case, the objects of the imported modules are stored into defined **namespaces**, which prevent conflicts among object names.
142 |
143 | # ### Calling functions
144 | #
145 | # Using namespaces, a module's function is called as follows:
146 | #
147 | # `module.function(arg1, arg2, arg3, ...)`
148 | #
149 | # For instance:
150 |
151 | numpy.mean([0, 1, 2], keepdims=True)
152 |
153 | mp.is_interactive()
154 |
155 | # To get some help about a function, use the `help` function:
156 |
157 | help(mp.is_interactive)
158 |
159 | # ### Bad practice!
160 | #
161 | # Libraries can also be loaded as follows:
162 |
163 | from pandas import DataFrame
164 |
165 | # Here, we import the `DataFrame` from the `pandas` module.
166 |
167 | from scipy.stats import *
168 |
169 | # Here, we import all the content of the `scipy.stats` module into the current namespace.
170 |
171 | #
172 | # Warning! I strongly recommend to never use this way of importing modules, since they may be in conflict with other objects.
173 | #
174 |
175 | import numpy as np
176 | x = np.array([1e4, 1e6])
177 | x
178 |
179 | from numpy import *
180 | log10(x)
181 |
182 | from math import *
183 | # log10(x[0]))
184 |
185 | # Here, the `numpy.log10` method has been overwritten by the `math.log10` one, which works on `float` objects, not on `arrays`.
186 |
--------------------------------------------------------------------------------
/misc/dask_covariance.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3 (ipykernel)
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Example of covariance using Dask
17 | #
18 | # In this Notebook, the use of Dask is parallel mode is illustrated in goal to compute lead-lag covariances.
19 | #
20 | # ## Import of data
21 | #
22 | # ### SST anomalies
23 | #
24 | # First, SST data are extracted.
25 |
26 | # +
27 | import matplotlib.pyplot as plt
28 | import xarray as xr
29 | import scipy.signal as sig
30 | import numpy as np
31 | import pandas as pd
32 | from dask.diagnostics import Profiler, ResourceProfiler, CacheProfiler, visualize
33 |
34 | data = xr.open_dataset('data/surface_thetao.nc')
35 | data = data.isel(olevel=0)
36 | data = data['thetao']
37 | data
38 | # -
39 |
40 | # Then, monthly anomalies are computed:
41 |
42 | clim = data.groupby('time_counter.month').mean(dim='time_counter')
43 |
44 | anom = data.groupby('time_counter.month') - clim
45 | anom
46 |
47 | # ## Oni index
48 | #
49 | # Now, the ONI index is extracted from the CSV file.
50 |
51 | nino = pd.read_csv('data/oni.data', skiprows=1, skipfooter=8, engine='python', header=None, index_col=0, delim_whitespace=True, na_values=-99.9)
52 | nino
53 |
54 | # It needs to be converted into 1D array. This is done by manipulating the years and columns
55 |
56 | years = nino.index.values
57 | years
58 |
59 | months = nino.columns.values
60 | months
61 |
62 | mm, yy = np.meshgrid(months, years)
63 | yy = np.ravel(yy)
64 | mm = np.ravel(mm)
65 | date = yy * 100 + mm
66 | date
67 |
68 | nino = np.ravel(nino.values)
69 |
70 | # Now, we extract the values that correspond to the length of the SST time-series (1958-2018)
71 |
72 | iok = np.nonzero((date >= 195801) & (date <= 201812))[0]
73 | date[iok]
74 |
75 | # Finally, the time-series is converted into a data arraty.
76 |
77 | tmean = xr.DataArray(
78 | data = nino[iok],
79 | name = 'oni',
80 | coords={'time_counter' : data['time_counter']}
81 | )
82 | l = tmean.plot()
83 | tmean
84 |
85 | # ## First test on covariance analysis
86 | #
87 | # Here, the covariance is computed using numpy arrays.
88 |
89 | nt, ny, nx = data.shape
90 | nt, ny, nx
91 |
92 | # Now, the correlation lags are extracted.
93 |
94 | lags = sig.correlation_lags(nt, nt)
95 | lags
96 |
97 | # The index of the $0$-lag covariance is extracted.
98 |
99 | izero = np.nonzero(lags == 0)[0][0]
100 | izero
101 |
102 | # Now, the covariance is computed by using a `for` loop using Numpy arrays.
103 |
104 | # %%time
105 | covariance = np.zeros((ny, nx, len(lags)))
106 | dataval = anom.values # t, y, x
107 | tmeanval = tmean.values
108 | for s in range(ny):
109 | for i in range(nx):
110 | temp = dataval[:, s, i]
111 | covariance[s, i, :] = sig.correlate(temp, tmeanval) / nt
112 | covariance.shape
113 |
114 | cs = plt.pcolormesh(covariance[:, :, izero])
115 | cs.set_clim(-1, 1)
116 | plt.colorbar(cs)
117 |
118 | # ## Using user-defined functions in parallel.
119 | #
120 | # To compute covariance in parallel mode, a function that works on Numpy arrays must be created. It basically does the same thing as in the above. Except that in this case, `time` becomes the rightmost dimension.
121 |
122 | # +
123 | import scipy.signal as sig
124 | import numpy as np
125 |
126 | def gufunc_cov(x, y):
127 | print("@@@@@@@@@@@@@@@@@@@@@@@@@ ", x.shape, y.shape)
128 | nx = x.shape[0]
129 | ny = x.shape[1]
130 | ntime = x.shape[-1]
131 | lags = sig.correlation_lags(ntime, ntime)
132 | nlags = len(lags)
133 | output = np.zeros((nx, ny, nlags))
134 | for s in range(nx):
135 | for i in range(ny):
136 | temp = x[s, i]
137 | output[s, i, :] = sig.correlate(temp, y) / ntime
138 | return output
139 |
140 |
141 | # -
142 |
143 | # Now that it is done, create a new method that returns a `xr.apply_ufunc` object. The first argument is the above function, the second argument is the SST `DataArray`, the third argument is the `Nino` index. The `input_core_dims` provides the names of the dimensions that will not be broadcasted (here, `time`). Since the `correlate` function returns an array of dimensions (`y, x, lags`), we need to specify the new lag dimension using the `output_core_dims` anf the `dask_gufunc_kwargs` arguments:
144 |
145 | def xarray_cov(x, y, dim):
146 | return xr.apply_ufunc(
147 | gufunc_cov,
148 | x,
149 | y,
150 | input_core_dims=[[dim], [dim]],
151 | output_core_dims=[['lags']],
152 | dask="parallelized",
153 | output_dtypes=[np.float32],
154 | dask_gufunc_kwargs = {'output_sizes' : {'lags': 1463}}
155 | )
156 |
157 |
158 | # Now, we read our data based on a specific chunk layout. **Note that the `time` dimension must remain unchunked.
159 |
160 | anom = anom.chunk({'y':50, 'x': 50})
161 | anom
162 |
163 | # %%time
164 | with Profiler() as prof, ResourceProfiler(dt=0.25) as rprof, CacheProfiler() as cprof:
165 | calc = xarray_cov(anom, tmean, dim='time_counter').compute()
166 |
167 | # We see that the calculation time is less than the original one. We can now visualize the resource usage:
168 |
169 | visualize([prof, rprof, cprof], show=False)
170 |
171 | # Finally, we can verify that both calculations (Numpy vs. Dask) returns the same results. First, `NaN` values are replaced by 0 in both results.
172 |
173 | calc = calc.fillna(0)
174 | covariance[np.isnan(covariance)] = 0
175 |
176 | np.all(covariance == calc.values)
177 |
--------------------------------------------------------------------------------
/misc/eof_analysis.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.11.4
10 | # kernelspec:
11 | # display_name: Python 3 (ipykernel)
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # EOF analysis
17 | #
18 | # EOF analysis is performed using the [Eofs](https://ajdawson.github.io/eofs/latest/index.html) package. In the following, the steps for the computation of an EOF decomposition is provided. The objective would be to compute the El Nino index based on the SST of the Northern Pacific.
19 | #
20 | # ## Extraction of Pacific mask
21 | #
22 | # The Pacific mask can first be extracted based on coordinates (longitudes and latitudes) as follows:
23 |
24 | # +
25 | import xarray as xr
26 | import numpy as np
27 | import matplotlib.pyplot as plt
28 | plt.rcParams['text.usetex'] = False
29 |
30 | data = xr.open_dataset('data/mesh_mask_eORCA1_v2.2.nc')
31 | data = data.isel(z=0, t=0)
32 | lon = data['glamt'].values
33 | lat = data['gphit'].values
34 | mask = data['tmask'].values
35 |
36 | # converts lon from Atl to Pac.
37 | lon[lon < 0] += 360
38 |
39 | # mask based on latitudes
40 | ilat = (lat <= 60) & (lat >= -20)
41 | mask[~ilat] = 0
42 |
43 | # mask based on longitudes
44 | ilon = (lon >= 117 ) & (lon <= 260)
45 | mask[~ilon] = 0
46 |
47 | # extracting the domain using the slices
48 | ilat, ilon = np.nonzero(mask == 1)
49 | ilat = slice(ilat.min(), ilat.max() + 1)
50 | ilon = slice(ilon.min(), ilon.max() + 1)
51 | mask = mask[ilat, ilon]
52 |
53 | plt.figure()
54 | cs = plt.imshow(mask, interpolation='none')
55 | cb = plt.colorbar(cs)
56 | # -
57 |
58 | # ## Computation of seasonal anomalies
59 | #
60 | # Now, we need to compute the seasonal anomalies of SST fields. First, we read the SST values and extract the spurious `olevel` dimension.
61 |
62 | data = xr.open_dataset("data/surface_thetao.nc")
63 | data = data.isel(olevel=0, x=ilon, y=ilat)
64 | ntime = data.dims['time_counter']
65 | data = data['thetao']
66 |
67 | # Now, we compute the anomalies using the `groupy` methods:
68 |
69 | clim = data.groupby('time_counter.month').mean()
70 | anoms = data.groupby('time_counter.month') - clim
71 |
72 | # ## Detrending the time-series
73 | #
74 | # Now that the anomalies have been computed, the linear trend is removed using the `detrend` function. Since the detrend function does not manage NaNs, the filled values are first replaced by 0s
75 |
76 | # +
77 | import scipy.signal as sig
78 | import time
79 |
80 | anoms = anoms.fillna(0)
81 | anoms_detrend = sig.detrend(anoms, axis=0)
82 | print(type(anoms_detrend))
83 | # -
84 |
85 | # Note that in the `detrend` function returns a `numpy.array` object. Hence, no benefit will be taken from the `xarray` structure in the EOF calculation.
86 |
87 | # ## Extracting the weights
88 | #
89 | # Now, the next step is to extract the weights for the EOFs, based on the cell surface and mask.
90 |
91 | mesh = xr.open_dataset('data/mesh_mask_eORCA1_v2.2.nc')
92 | mesh = mesh.isel(t=0, x=ilon, y=ilat)
93 | surf = mesh['e1t'] * mesh['e2t']
94 | surf = surf.data * mask # surf in Pacific, 0 elsewhere
95 | weights = surf / np.sum(surf) # normalization of weights
96 |
97 | # **Since EOF are based on covariance, the root-square of the weights must be used.**
98 |
99 | weights = np.sqrt(weights)
100 |
101 | # ## Computation of EOFS (standard mode)
102 | #
103 | # The EOFS can now be computed. First, an EOF solver must be initialized. **The `time` dimension must always be the first one when using numpy.array as inputs.**
104 |
105 | # +
106 | import eofs
107 | from eofs.standard import Eof
108 |
109 | solver = Eof(anoms_detrend, weights=weights)
110 | # -
111 |
112 | # Now, EOF components can be extracted. First, the covariance maps are extracted.
113 |
114 | # +
115 | neofs = 2
116 | nlat, nlon = surf.shape
117 | covmaps = solver.eofsAsCovariance(neofs=neofs)
118 | print(type(covmaps))
119 |
120 | plt.figure()
121 | plt.subplot(211)
122 | cs = plt.imshow(covmaps[0], cmap=plt.cm.RdBu_r)
123 | cs.set_clim(-1, 1)
124 | cb = plt.colorbar(cs)
125 | plt.subplot(212)
126 | cs = plt.imshow(covmaps[1], cmap=plt.cm.RdBu_r)
127 | cs.set_clim(-1, 1)
128 | cb = plt.colorbar(cs)
129 | # -
130 |
131 | # Then, we can recover the explained variance:
132 |
133 | eofvar = solver.varianceFraction(neigs=neofs) * 100
134 | eofvar
135 |
136 | # Finally, we can obtain the principal components. To obtain normalized time-series, the `pscaling` argument must be equal to 1.
137 |
138 | pcs = solver.pcs(pcscaling=1, npcs=neofs).T
139 | plt.figure()
140 | plt.plot(pcs[0], label='pc1')
141 | plt.plot(pcs[1], label='pc2')
142 | leg = plt.legend()
143 | plt.gca().set_ylim(-3, 3)
144 | plt.gca().set_xlim(0, len(pcs[0]) - 1)
145 | plt.savefig('ts1')
146 |
147 | # ## EOF computation (xarray mode)
148 | #
149 | # In order to have EOF as an `xarray` with all its features, the Eof method of the `eofs.xarray` submodule must be used.
150 |
151 | from eofs.xarray import Eof
152 |
153 | # Since it uses named labels, the `time_counter` dimension must first be renamed in `time`:
154 |
155 | anoms = anoms.rename({'time_counter': 'time'})
156 | anoms
157 |
158 | # To make sure it works, coordinate variables need to be removed.
159 |
160 | anoms = anoms.drop_vars(['nav_lat', 'nav_lon', 'time_centered', 'month'])
161 | anoms
162 |
163 | solver = Eof(anoms, weights=weights)
164 |
165 | neofs = 2
166 | covmaps = solver.eofsAsCovariance(neofs=neofs)
167 | covmaps
168 |
169 | plt.figure()
170 | cs = covmaps.isel(mode=0).plot()
171 | cs.set_clim(-1, 1)
172 |
173 | pcs = solver.pcs(pcscaling=1, npcs=neofs)
174 | pcs
175 |
176 | plt.figure()
177 | l = pcs.plot.line(x='time')
178 | plt.savefig('ts2')
179 |
180 |
181 |
--------------------------------------------------------------------------------
/plots/xy.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # XY plots
17 | #
18 | # XY plots are achieved by using the [plot](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.plot.html) function
19 | #
20 | # ## Simple XY plots
21 |
22 | # +
23 | import matplotlib as mp
24 | import matplotlib.pyplot as plt
25 | import numpy as np
26 |
27 | x = np.linspace(0, 2*np.pi, 20)
28 | y = np.sin(x)
29 | z = np.cos(x)
30 | t = np.tan(x)
31 | t = np.ma.masked_where(np.abs(t) > 100, t)
32 |
33 | # +
34 | fig = plt.figure() # initialize figure
35 | ax = plt.gca() # initialize axis (here, optional)
36 | plt.plot(x, y, color='Plum', linestyle='--', linewidth=2,
37 | marker='o', markeredgewidth=2, markerfacecolor='FireBrick',
38 | markeredgecolor='black', markersize=12)
39 | plt.title('First plot')
40 | plt.xlabel('X label')
41 | plt.ylabel('Y label')
42 |
43 | # plt.savefig('figs/xy.pdf', bbox_inches='tight') # save the figure
44 | # tight: remove the white spaces around the figure
45 |
46 | plt.show() # display the figure
47 |
48 | plt.close(fig)
49 | # -
50 |
51 | # ## Multiline XY plots
52 | #
53 | # There is several ways to plot multiline XY plots.
54 | #
55 | # First, this can be achieved by using the `label` argument of the `plot` function:
56 |
57 | fig = plt.figure()
58 | ax = plt.gca()
59 | l0 = plt.plot(x, y, label='sin')
60 | l1 = plt.plot(x, z, label='cos')
61 | leg = plt.legend(fontsize=10, loc=0, ncol=1)
62 | plt.show()
63 |
64 | # You can also set the legend by providing a list of `matplotlib.lines.Line2D` objects and their corresponding labels. However, it remains to user to be sure that the right label goes with the right line.
65 |
66 | fig = plt.figure()
67 | ax = plt.gca()
68 | l0 = plt.plot(x, y)
69 | l1 = plt.plot(x, z)
70 | leg = plt.legend([l1[0], l0[0]], ['sin', 'cos'], loc=0, ncol=1, fontsize=10)
71 | plt.show()
72 |
73 | # 1D Arrays can be comined into a multidimensional one prior to plotting. The first dimension of the `y` array must have the same number
74 | # of elements as the `x` array.
75 | #
76 | # In this case, the user needs to set the legend by providing the list of the corresponding labels (the user must insure that the right label goes with the right line)
77 |
78 | # +
79 | arr = np.array([y, z]).T
80 | print(arr.shape)
81 |
82 | fig = plt.figure()
83 | ax = plt.gca()
84 | plt.plot(x, arr)
85 | leg = plt.legend(['sin', 'cos'], loc=0, ncol=1, fontsize=10)
86 | # -
87 |
88 | # If several legends should be plotted, they must be added to the current graph by using the `matplotlib.axes.Axes.add_artist` function:
89 |
90 | # +
91 | fig = plt.figure() #initialize figure
92 | ax = plt.gca()
93 |
94 | l0 = plt.plot(x, y, marker='o')
95 | l1 = plt.plot(x, z, color='0.8')
96 | l2 = plt.plot(x, -z, linestyle='--')
97 |
98 | # Draw the legend for the
99 | leg1 = plt.legend([l0[0], l1[0]], ['sin', 'cos'],
100 | loc=(0.5, 0), ncol=2)
101 | # add legend to the axes
102 | ax.add_artist(leg1)
103 |
104 | # Draw second legend
105 | leg2 = plt.legend([l2[0]], ['-cos'], loc=1)
106 | plt.show()
107 | # -
108 |
109 | # ## Using colortable colors
110 | #
111 | # To use colormap colors to draw xy plots, you first need to define a colormap object:
112 |
113 | cmap = plt.cm.jet # Defining colormap (cf. matplotlib website)
114 | print(cmap(0)) # picks the first color (blue)
115 | print(cmap(1.)) # picks the first color (red)
116 |
117 | plt.figure()
118 | cmap = plt.cm.jet # Defining colormap (cf. matplotlib website)
119 | for p in range(0, len(x)):
120 | cindex = p / (len(x) - 1) # value between 0 and 1
121 | color = cmap(cindex)
122 | plt.plot(x[p:p+1], y[p:p+1], color=color, linestyle='none', marker='o')
123 | plt.show()
124 |
125 | # ## Using twin axis
126 | #
127 | # Using twin axis (shared x or shared y) is achieved by using the `twinx` or `twiny` methods.
128 |
129 | # +
130 | # Shared x and y axis
131 | plt.figure()
132 | ax1 = plt.gca()
133 | ax1.plot(x, y, 'k')
134 | ax1.set_ylabel('cos', color='k')
135 |
136 | ax2 = ax1.twinx()
137 | ax2.plot(x, t, color='FireBrick')
138 | ax2.set_ylim(-10, 10)
139 | ax2.set_ylabel('tan', color='FireBrick')
140 | plt.setp(ax2.get_yticklabels(), color='FireBrick')
141 | ax2.spines['right'].set_color('FireBrick')
142 |
143 | plt.show()
144 |
145 | # +
146 | plt.figure()
147 | ax1 = plt.gca()
148 | ax1.plot(y, x, 'k')
149 | ax1.set_xlabel('cos', color='k')
150 |
151 | ax2 = ax1.twiny()
152 | ax2.plot(t, x, color='FireBrick')
153 | ax2.set_xlabel('tan', color='FireBrick')
154 | ax2.set_xlim(-10, 10)
155 | plt.setp(ax2.get_xticklabels(), color='FireBrick')
156 | ax2.spines['top'].set_color('FireBrick')
157 |
158 | plt.show()
159 | # -
160 |
161 | #
162 | #
Note. To know how to manage multiple spines, visit
Matplotlib.
163 | #
164 | #
165 | # ## Filled XY plots
166 | #
167 | # Filled XY plots are achieved by using the [fill_between](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.fill_between.html) function.
168 | #
169 | # It is highly recommended to set `interpolate` to True to insure a proper layout.
170 |
171 | # +
172 | y = np.cos(3 * x)
173 |
174 | fig = plt.figure()
175 | ax = plt.gca()
176 | ax.set_title('no interpolation')
177 | ax.fill_between(x, 0, y, facecolor='r', where=y>0,
178 | interpolate=False, edgecolor='k')
179 | ax.fill_between(x, 0, y, facecolor='b', where=y<0,
180 | interpolate=False, edgecolor='k')
181 | plt.show()
182 | # -
183 |
184 | fig = plt.figure()
185 | ax = plt.gca()
186 | ax.set_title('interpolation')
187 | ax.fill_between(x, 0, y, facecolor='r', where=y>0,
188 | interpolate=True, edgecolor='k')
189 | ax.fill_between(x, 0, y, facecolor='b', where=y<0,
190 | interpolate=True, edgecolor='k')
191 | plt.show()
192 |
--------------------------------------------------------------------------------
/plots/panels.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Panelling
17 |
18 | import matplotlib.pyplot as plt
19 | plt.rcParams['text.usetex'] = False
20 | plt.rcParams['xtick.direction'] = 'out'
21 | plt.rcParams['ytick.direction'] = 'out'
22 |
23 | # ## Drawing panels
24 | #
25 | # Panels are obtained by using the [subplot](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.subplot.html) method, which returns an axe object.
26 | #
27 | #
28 | # Remark The plt.gca() method is identical to plt.subplot(1, 1, 1)
29 | #
30 |
31 | fig = plt.figure()
32 | for p in range(1, 3*3+1):
33 | ax = plt.subplot(3, 3, p) # number of rows, number of columns, subplot index (starts at 1!)
34 | plt.text(0.5, 0.5, 'Axes number '+str(p), ha='center', va='center')
35 | ax.get_xaxis().set_visible(False)
36 | ax.get_yaxis().set_visible(False)
37 |
38 | # ## Setting panel properties
39 | #
40 | # The [subplots_adjust](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.subplots_adjust.html) method allows to control some panelling properties (horizontal and vertical spacing, margins, etc.).
41 |
42 | fig = plt.figure()
43 | plt.subplots_adjust(wspace=0.4, hspace=0.1,
44 | left = 0.05, right=0.95,
45 | bottom=0.05, top=0.95)
46 | for p in range(1, 3*3+1):
47 | ax = plt.subplot(3, 3, p)
48 | plt.text(0.5, 0.5, 'Axes number '+str(p), ha='center', va='center')
49 | ax.get_xaxis().set_visible(False)
50 | ax.get_yaxis().set_visible(False)
51 | plt.show()
52 |
53 | # ## Managing properties
54 | #
55 | # It is possible to store the outcomes of the `subplot` method into a list, to manage some axis properties *a posteriori*
56 |
57 | # +
58 | fig = plt.figure()
59 |
60 | listax = []
61 | for p in range(1, 3*3+1):
62 | ax = plt.subplot(3, 3, p)
63 | listax.append(ax)
64 | plt.text(0.5, 0.5, 'Axes number '+str(p), ha='center', va='center')
65 | ax.get_xaxis().set_visible(False)
66 | ax.get_yaxis().set_visible(False)
67 |
68 | for ax in [listax[6], listax[7], listax[8]]:
69 | ax.get_xaxis().set_visible(True)
70 |
71 | for ax in [listax[0], listax[3], listax[6]]:
72 | ax.get_yaxis().set_visible(True)
73 |
74 | for ax in [listax[0], listax[1], listax[2]]:
75 | ax.set_title('Nom titre')
76 |
77 | plt.show()
78 | # -
79 |
80 | # ## Defining panel grid
81 | #
82 | # To define grid panels, use the [subplot2grid](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.subplot2grid.html) medhod.
83 |
84 | # +
85 | fig = plt.figure()
86 | # size of the subplot: 3 by 3
87 | # location of the plot: 0(top), 0(left), spans 3 columns (spans 1 row, default)
88 | ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
89 | ax1.get_xaxis().set_visible(False)
90 | ax1.get_yaxis().set_visible(False)
91 | plt.text(0.5, 0.5, 'Axe 1', ha='center', va='center')
92 |
93 | ax2 = plt.subplot2grid((3, 3), (1, 1), rowspan=2)
94 | ax2.get_xaxis().set_visible(False)
95 | ax2.get_yaxis().set_visible(False)
96 | plt.text(0.5, 0.5, 'Axe 2', ha='center', va='center')
97 |
98 | plt.show()
99 | # -
100 |
101 | # ## Manual position of subplots
102 | #
103 | # The [axes](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.pyplot.axes.html) method allows to create axes by providing its position relative to figure coordinates.
104 |
105 | fig = plt.figure()
106 | ax = plt.gca() # add a first axes
107 | ax.text(0.5, 0.5,'First axe', ha='center', va='center')
108 | ax = plt.axes([0.7, 0.7, 0.25, 0.25])
109 | plt.text(0.5, 0.5, 'Small panel', ha='center', va='center')
110 | ax.get_xaxis().set_visible(False)
111 | ax.get_yaxis().set_visible(False)
112 | plt.show()
113 |
114 | # This can be usefull for instance to **manually position colorbars**.
115 |
116 | # +
117 | import numpy as np
118 |
119 | delta = 0.01
120 | x = np.arange(-3.0, 3.0, delta)
121 | y = np.arange(-2.0, 2.0, delta)
122 | xx, yy = np.meshgrid(x, y)
123 | zz = xx * yy
124 |
125 | plt.figure()
126 | plt.subplots_adjust(bottom=0.25)
127 | cs = plt.pcolormesh(zz)
128 | cax = plt.axes([0.25, 0.05, 0.5, 0.1]) # define the position of the colorbar
129 | plt.colorbar(cs, cax, orientation='horizontal')
130 | plt.show()
131 | # -
132 |
133 | # ## Displaying plots on identical axes
134 | #
135 | # For aligning several plots, the `ImageGrid` function can be used. For contour plots with individual colorbars:
136 |
137 | # +
138 | from mpl_toolkits.axes_grid1 import AxesGrid, ImageGrid
139 |
140 | fig = plt.figure(figsize=(12, 8))
141 | axgr = ImageGrid(fig, 111, nrows_ncols=(2, 2),
142 | label_mode='L', aspect=False, share_all=True, axes_pad=[1, 0.5],
143 | cbar_mode='each', cbar_size="5%", cbar_pad='5%')
144 |
145 | # recover the list of cbar axes
146 | cbar_axes = axgr.cbar_axes
147 |
148 | # Loop over all the axes within the image grid
149 | for i, ax in enumerate(axgr):
150 | print(i, ax)
151 | cs = ax.pcolormesh(zz * (i + 1))
152 | cb = cbar_axes[i].colorbar(cs)
153 | cb.set_label('label')
154 | # -
155 |
156 | # Note that `xticklabels` appear only on the bottom panels, while `yticklabels` only appear on the left panels.
157 | #
158 | # For plots with a single colorbar, set `cbar_mode=single`
159 |
160 | # +
161 | fig = plt.figure(figsize=(12, 8))
162 | axgr = ImageGrid(fig, 111, nrows_ncols=(2, 2),
163 | label_mode='L', aspect=False, share_all=True, axes_pad=[1, 0.5],
164 | cbar_mode='single', cbar_size="5%", cbar_pad='5%')
165 |
166 | # recover the list of cbar axes
167 | cbar_axes = axgr.cbar_axes
168 |
169 | # Loop over all the axes within the image grid
170 | for i, ax in enumerate(axgr):
171 | cs = ax.pcolormesh(zz)
172 | cb = cbar_axes[i].colorbar(cs)
173 | cb.set_label('label')
174 | # -
175 |
176 | # Same thing for time series:
177 |
178 | # +
179 | fig = plt.figure(figsize=(12, 8))
180 | axgr = ImageGrid(fig, 111, nrows_ncols=(2, 1),
181 | label_mode='L', aspect=False, share_all=True, axes_pad=[1, 0.5])
182 |
183 | x = np.linspace(0, 4*np.pi, 1000)
184 | y0 = np.cos(x)
185 | y1 = np.sin(x)
186 | y = [y0, y1]
187 | label = ['cos', 'sin']
188 |
189 | # Loop over all the axes within the image grid
190 | for i, ax in enumerate(axgr):
191 | cs = ax.plot(x, y[i])
192 | ax.set_title(label[0])
193 | ax.set_xticks(np.arange(0, 4*np.pi, np.pi))
194 | ax.set_xticklabels(['0', r'$\pi$', r'$2\pi$', r'$3\pi$', ])
195 | ax.grid(True)
--------------------------------------------------------------------------------
/io/data/nina34.csv:
--------------------------------------------------------------------------------
1 | JAN FEB MAR APR MAY JUN JUL AUG SEP OCT NOV DEC
2 | 1948 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99
3 | 1949 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99
4 | 1950 24.55 25.06 25.87 26.28 26.18 26.46 26.29 25.88 25.74 25.69 25.47 25.29
5 | 1951 25.24 25.71 26.90 27.58 27.92 27.73 27.60 27.02 27.23 27.20 27.25 26.91
6 | 1952 26.67 26.74 27.17 27.80 27.79 27.18 26.53 26.30 26.36 26.26 25.92 26.21
7 | 1953 26.74 27.00 27.57 28.04 28.28 28.12 27.43 26.94 27.01 26.87 26.88 27.00
8 | 1954 26.98 27.03 26.90 26.64 27.12 26.80 26.11 25.43 25.12 25.23 25.57 25.26
9 | 1955 25.61 25.81 26.22 26.60 26.66 26.55 26.15 25.51 25.28 24.41 24.25 24.57
10 | 1956 25.34 25.76 26.46 26.85 27.13 26.81 26.23 25.68 25.73 25.75 25.56 25.71
11 | 1957 26.04 26.54 27.46 28.23 28.55 28.36 28.17 27.69 27.44 27.42 27.62 27.90
12 | 1958 28.33 28.24 28.27 28.27 28.31 27.99 27.32 26.85 26.40 26.45 26.75 26.62
13 | 1959 27.07 27.18 27.47 27.88 27.70 27.37 26.44 26.09 25.92 26.24 26.04 26.18
14 | 1960 26.27 26.29 26.98 27.49 27.68 27.24 26.88 26.70 26.44 26.22 26.26 26.22
15 | 1961 26.23 26.56 26.94 27.36 27.75 27.67 26.89 26.19 25.78 25.71 26.07 25.97
16 | 1962 25.96 26.19 26.80 27.13 27.05 27.08 26.76 26.33 25.94 25.97 25.75 25.67
17 | 1963 25.77 26.22 27.18 27.78 27.63 27.62 27.78 27.48 27.40 27.36 27.47 27.62
18 | 1964 27.34 27.13 27.02 26.95 26.82 26.59 26.33 25.60 25.32 25.37 25.26 25.23
19 | 1965 25.66 26.19 26.94 27.38 27.99 28.09 27.90 27.97 28.01 28.17 28.12 27.96
20 | 1966 27.67 27.55 28.21 28.16 27.55 27.64 27.33 26.48 26.27 26.22 26.23 26.03
21 | 1967 25.88 26.11 26.50 26.74 27.35 27.47 26.97 26.44 25.86 25.97 26.08 25.95
22 | 1968 25.69 25.68 26.33 27.10 27.19 27.88 27.58 27.01 26.72 26.75 27.20 27.27
23 | 1969 27.50 27.86 27.82 28.13 28.29 27.69 27.08 27.02 27.15 27.34 27.10 26.98
24 | 1970 26.83 26.95 27.14 27.74 27.63 27.04 26.21 25.60 25.65 25.60 25.57 25.27
25 | 1971 24.81 25.18 25.92 26.63 26.95 26.60 26.13 25.75 25.72 25.47 25.56 25.37
26 | 1972 25.62 26.30 27.09 27.89 28.32 28.18 28.14 27.95 27.95 28.26 28.61 28.69
27 | 1973 28.34 27.95 27.55 27.24 26.96 26.55 25.76 25.22 25.06 24.73 24.33 24.33
28 | 1974 24.46 25.10 25.84 26.46 26.64 26.53 26.39 26.33 26.08 25.78 25.61 25.63
29 | 1975 26.09 26.07 26.19 26.86 26.80 26.23 25.90 25.33 25.05 24.89 25.05 24.67
30 | 1976 24.54 25.49 26.46 26.88 27.20 27.35 27.13 26.98 27.02 27.46 27.41 27.09
31 | 1977 27.32 27.13 27.47 27.44 27.72 27.74 27.38 26.85 27.12 27.35 27.19 27.29
32 | 1978 27.17 27.00 27.09 27.12 27.20 27.02 26.74 26.07 26.01 26.25 26.33 26.54
33 | 1979 26.41 26.53 27.27 27.83 27.69 27.43 26.82 26.75 26.99 26.83 26.99 27.11
34 | 1980 27.08 26.97 27.31 27.75 28.01 27.94 27.23 26.48 26.44 26.46 26.60 26.65
35 | 1981 26.18 26.11 26.64 27.29 27.36 27.27 26.65 26.33 26.53 26.42 26.29 26.40
36 | 1982 26.67 26.59 27.41 28.03 28.39 28.26 27.66 27.58 28.21 28.71 28.62 28.80
37 | 1983 28.89 28.69 28.66 28.77 28.84 28.27 27.18 26.59 26.20 25.56 25.41 25.57
38 | 1984 25.88 26.56 26.77 27.14 27.20 26.83 26.79 26.61 26.38 26.04 25.52 25.25
39 | 1985 25.38 26.03 26.50 26.64 26.90 26.81 26.56 26.30 26.02 26.23 26.33 26.19
40 | 1986 25.89 26.05 26.87 27.49 27.40 27.42 27.18 27.17 27.24 27.53 27.71 27.72
41 | 1987 27.68 27.88 28.27 28.39 28.56 28.65 28.59 28.42 28.36 27.96 27.77 27.54
42 | 1988 27.45 27.03 27.38 27.38 26.68 25.99 25.56 25.66 25.72 24.82 24.65 24.63
43 | 1989 24.58 25.28 26.08 26.74 27.06 27.14 26.72 26.33 26.42 26.32 26.25 26.46
44 | 1990 26.56 26.96 27.33 27.90 28.02 27.64 27.38 27.07 26.94 26.93 26.81 26.95
45 | 1991 27.03 27.07 27.32 28.01 28.20 28.25 28.05 27.53 27.14 27.58 27.90 28.29
46 | 1992 28.38 28.53 28.66 29.02 28.98 28.30 27.51 26.91 26.65 26.42 26.42 26.44
47 | 1993 26.69 27.16 27.67 28.41 28.71 28.08 27.52 26.99 27.07 26.77 26.71 26.76
48 | 1994 26.64 26.81 27.39 28.08 28.24 28.04 27.54 27.38 27.20 27.47 27.81 27.85
49 | 1995 27.57 27.49 27.75 28.10 27.82 27.59 27.08 26.23 25.88 25.84 25.60 25.65
50 | 1996 25.69 25.89 26.67 27.35 27.55 27.29 26.85 26.64 26.27 26.27 26.29 25.95
51 | 1997 26.01 26.38 27.04 27.98 28.58 28.82 28.86 28.75 28.85 29.08 29.12 28.89
52 | 1998 28.93 28.78 28.62 28.60 28.51 27.34 26.30 25.57 25.46 25.25 25.24 24.91
53 | 1999 24.86 25.43 26.33 26.70 26.79 26.53 26.13 25.63 25.63 25.48 25.12 24.86
54 | 2000 24.78 25.21 26.30 26.95 27.07 26.94 26.63 26.37 26.20 26.02 25.92 25.68
55 | 2001 25.81 26.12 26.80 27.33 27.60 27.54 27.25 26.80 26.52 26.57 26.33 26.19
56 | 2002 26.39 26.71 27.29 27.84 28.24 28.44 28.03 27.72 27.81 27.96 28.17 27.97
57 | 2003 27.15 27.39 27.65 27.68 27.32 27.44 27.50 27.11 26.99 27.14 27.04 27.03
58 | 2004 26.81 26.98 27.40 27.89 28.00 27.86 27.78 27.62 27.53 27.44 27.36 27.34
59 | 2005 27.21 27.11 27.73 28.08 28.24 27.77 27.06 26.80 26.68 26.65 26.11 25.68
60 | 2006 25.63 26.08 26.57 27.50 27.85 27.73 27.30 27.16 27.32 27.41 27.69 27.74
61 | 2007 27.24 26.88 27.10 27.50 27.46 27.37 26.71 26.14 25.61 25.32 25.17 25.01
62 | 2008 24.86 25.08 26.07 26.83 27.09 27.04 26.99 26.72 26.47 26.37 26.25 25.74
63 | 2009 25.66 25.96 26.59 27.47 27.99 28.04 27.78 27.42 27.40 27.60 28.16 28.34
64 | 2010 28.07 28.01 28.18 28.20 27.71 27.04 26.22 25.47 25.12 25.03 25.06 25.00
65 | 2011 25.00 25.64 26.36 27.05 27.41 27.35 26.87 26.21 25.92 25.67 25.52 25.54
66 | 2012 25.67 26.08 26.67 27.32 27.61 27.75 27.54 27.32 27.10 26.98 26.86 26.34
67 | 2013 26.01 26.24 27.03 27.57 27.53 27.30 26.91 26.47 26.54 26.52 26.56 26.43
68 | 2014 26.05 26.14 27.00 27.90 28.25 27.96 27.23 26.82 27.01 27.16 27.46 27.31
69 | 2015 27.05 27.17 27.75 28.52 28.85 28.90 28.75 28.79 28.93 29.08 29.42 29.26
70 | 2016 29.11 29.01 28.90 28.72 28.23 27.69 26.82 26.28 26.14 25.98 25.94 26.10
71 | 2017 26.12 26.67 27.32 28.03 28.30 28.06 27.54 26.70 26.29 26.15 25.74 25.62
72 | 2018 25.57 25.97 26.48 27.31 27.73 27.77 27.42 26.94 27.19 27.62 27.61 27.49
73 | 2019 27.19 27.46 28.09 28.44 28.48 28.18 27.64 26.90 26.75 27.20 27.22 27.12
74 | 2020 27.18 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99 -99.99
75 | -99.99
76 | Nino 3.4 Index using ersstv5 from CPC
77 | https://www.esrl.noaa.gov/psd/data/climateindices/list/for info
78 |
--------------------------------------------------------------------------------
/misc/data/oni.data:
--------------------------------------------------------------------------------
1 | 1950 2021
2 | 1950 -1.53 -1.34 -1.16 -1.18 -1.07 -0.85 -0.54 -0.42 -0.39 -0.44 -0.60 -0.80
3 | 1951 -0.82 -0.54 -0.17 0.18 0.36 0.58 0.70 0.89 0.99 1.15 1.04 0.81
4 | 1952 0.53 0.37 0.34 0.29 0.20 0.00 -0.08 0.00 0.15 0.10 0.04 0.15
5 | 1953 0.40 0.60 0.63 0.66 0.75 0.77 0.75 0.73 0.78 0.84 0.84 0.81
6 | 1954 0.76 0.47 -0.05 -0.41 -0.54 -0.50 -0.64 -0.84 -0.90 -0.77 -0.73 -0.66
7 | 1955 -0.68 -0.62 -0.69 -0.80 -0.79 -0.72 -0.68 -0.75 -1.09 -1.42 -1.67 -1.47
8 | 1956 -1.11 -0.76 -0.63 -0.54 -0.52 -0.51 -0.57 -0.55 -0.46 -0.42 -0.43 -0.43
9 | 1957 -0.25 0.06 0.41 0.72 0.92 1.11 1.25 1.32 1.33 1.39 1.53 1.74
10 | 1958 1.81 1.66 1.27 0.93 0.74 0.64 0.57 0.43 0.39 0.44 0.50 0.61
11 | 1959 0.61 0.62 0.52 0.33 0.20 -0.07 -0.18 -0.28 -0.09 -0.03 0.05 -0.04
12 | 1960 -0.10 -0.10 -0.07 0.03 0.02 0.03 0.13 0.24 0.27 0.20 0.12 0.05
13 | 1961 0.04 0.03 0.04 0.09 0.23 0.27 0.14 -0.13 -0.30 -0.26 -0.19 -0.16
14 | 1962 -0.24 -0.22 -0.20 -0.26 -0.28 -0.20 -0.04 -0.07 -0.11 -0.22 -0.31 -0.43
15 | 1963 -0.40 -0.15 0.15 0.27 0.31 0.52 0.86 1.14 1.22 1.29 1.37 1.31
16 | 1964 1.07 0.62 0.12 -0.33 -0.58 -0.58 -0.60 -0.66 -0.76 -0.80 -0.82 -0.78
17 | 1965 -0.59 -0.28 -0.07 0.18 0.46 0.83 1.22 1.54 1.85 1.98 1.97 1.72
18 | 1966 1.37 1.17 0.98 0.66 0.35 0.24 0.24 0.12 -0.05 -0.10 -0.18 -0.30
19 | 1967 -0.41 -0.48 -0.53 -0.45 -0.24 -0.00 0.05 -0.16 -0.30 -0.38 -0.34 -0.44
20 | 1968 -0.64 -0.74 -0.62 -0.44 -0.04 0.28 0.58 0.53 0.45 0.55 0.73 0.98
21 | 1969 1.13 1.09 0.95 0.77 0.61 0.43 0.36 0.51 0.79 0.86 0.81 0.63
22 | 1970 0.51 0.34 0.29 0.19 0.04 -0.30 -0.63 -0.76 -0.77 -0.74 -0.86 -1.15
23 | 1971 -1.36 -1.38 -1.12 -0.85 -0.73 -0.74 -0.80 -0.77 -0.82 -0.85 -0.96 -0.90
24 | 1972 -0.71 -0.35 0.06 0.41 0.67 0.92 1.13 1.37 1.58 1.84 2.09 2.12
25 | 1973 1.84 1.25 0.54 -0.10 -0.54 -0.87 -1.11 -1.28 -1.45 -1.71 -1.95 -2.03
26 | 1974 -1.84 -1.55 -1.23 -1.03 -0.91 -0.77 -0.53 -0.37 -0.41 -0.61 -0.75 -0.64
27 | 1975 -0.54 -0.57 -0.65 -0.73 -0.83 -0.98 -1.13 -1.20 -1.37 -1.43 -1.55 -1.65
28 | 1976 -1.56 -1.17 -0.73 -0.47 -0.28 -0.05 0.18 0.35 0.62 0.81 0.86 0.85
29 | 1977 0.71 0.64 0.34 0.23 0.21 0.34 0.35 0.42 0.57 0.73 0.81 0.79
30 | 1978 0.69 0.42 0.06 -0.18 -0.31 -0.29 -0.36 -0.42 -0.42 -0.29 -0.08 0.00
31 | 1979 0.03 0.07 0.20 0.28 0.23 0.05 0.04 0.17 0.33 0.45 0.52 0.64
32 | 1980 0.59 0.46 0.34 0.38 0.48 0.46 0.25 0.03 -0.07 0.02 0.11 -0.01
33 | 1981 -0.26 -0.50 -0.47 -0.37 -0.26 -0.29 -0.30 -0.25 -0.16 -0.13 -0.15 -0.08
34 | 1982 -0.05 0.07 0.19 0.47 0.66 0.72 0.79 1.07 1.58 1.97 2.18 2.23
35 | 1983 2.18 1.92 1.54 1.29 1.06 0.72 0.31 -0.08 -0.46 -0.81 -1.00 -0.91
36 | 1984 -0.60 -0.42 -0.34 -0.43 -0.51 -0.45 -0.30 -0.16 -0.24 -0.56 -0.92 -1.14
37 | 1985 -1.04 -0.85 -0.77 -0.78 -0.78 -0.63 -0.49 -0.46 -0.40 -0.35 -0.27 -0.36
38 | 1986 -0.49 -0.47 -0.31 -0.20 -0.12 -0.04 0.22 0.44 0.71 0.94 1.14 1.22
39 | 1987 1.23 1.19 1.06 0.95 0.97 1.22 1.51 1.70 1.65 1.48 1.25 1.11
40 | 1988 0.81 0.54 0.14 -0.31 -0.88 -1.30 -1.30 -1.11 -1.19 -1.48 -1.80 -1.85
41 | 1989 -1.69 -1.43 -1.08 -0.83 -0.58 -0.40 -0.31 -0.27 -0.24 -0.22 -0.16 -0.05
42 | 1990 0.14 0.21 0.28 0.29 0.29 0.31 0.33 0.38 0.39 0.35 0.40 0.41
43 | 1991 0.41 0.26 0.22 0.26 0.45 0.64 0.73 0.64 0.62 0.79 1.21 1.53
44 | 1992 1.71 1.63 1.48 1.29 1.06 0.73 0.37 0.09 -0.13 -0.25 -0.28 -0.13
45 | 1993 0.09 0.30 0.50 0.67 0.70 0.57 0.32 0.25 0.15 0.10 0.04 0.06
46 | 1994 0.06 0.07 0.17 0.31 0.42 0.41 0.44 0.43 0.55 0.74 1.01 1.09
47 | 1995 0.96 0.72 0.53 0.30 0.14 -0.03 -0.24 -0.54 -0.81 -0.97 -1.00 -0.98
48 | 1996 -0.90 -0.75 -0.59 -0.39 -0.31 -0.30 -0.27 -0.32 -0.35 -0.40 -0.45 -0.49
49 | 1997 -0.50 -0.36 -0.10 0.28 0.75 1.22 1.60 1.90 2.14 2.33 2.40 2.39
50 | 1998 2.24 1.93 1.44 0.99 0.45 -0.13 -0.78 -1.12 -1.31 -1.35 -1.48 -1.57
51 | 1999 -1.55 -1.30 -1.07 -0.98 -1.02 -1.04 -1.10 -1.11 -1.16 -1.26 -1.46 -1.65
52 | 2000 -1.66 -1.41 -1.07 -0.81 -0.71 -0.64 -0.55 -0.51 -0.55 -0.63 -0.75 -0.74
53 | 2001 -0.68 -0.52 -0.44 -0.34 -0.25 -0.12 -0.08 -0.13 -0.19 -0.29 -0.35 -0.31
54 | 2002 -0.15 0.03 0.09 0.20 0.43 0.65 0.79 0.86 1.01 1.21 1.31 1.14
55 | 2003 0.92 0.63 0.38 -0.04 -0.26 -0.16 0.08 0.21 0.26 0.29 0.35 0.35
56 | 2004 0.37 0.31 0.23 0.17 0.17 0.28 0.47 0.64 0.70 0.67 0.66 0.69
57 | 2005 0.64 0.58 0.45 0.43 0.29 0.11 -0.06 -0.14 -0.11 -0.29 -0.57 -0.84
58 | 2006 -0.85 -0.77 -0.57 -0.37 -0.14 -0.03 0.10 0.30 0.54 0.77 0.94 0.94
59 | 2007 0.66 0.22 -0.12 -0.32 -0.38 -0.47 -0.56 -0.81 -1.07 -1.34 -1.50 -1.60
60 | 2008 -1.64 -1.52 -1.29 -1.01 -0.84 -0.61 -0.37 -0.23 -0.24 -0.35 -0.55 -0.73
61 | 2009 -0.85 -0.79 -0.61 -0.33 0.01 0.28 0.45 0.58 0.71 1.01 1.36 1.56
62 | 2010 1.50 1.22 0.84 0.35 -0.17 -0.66 -1.05 -1.35 -1.56 -1.64 -1.64 -1.59
63 | 2011 -1.42 -1.19 -0.93 -0.73 -0.55 -0.44 -0.48 -0.62 -0.83 -1.01 -1.09 -1.04
64 | 2012 -0.86 -0.72 -0.59 -0.47 -0.26 -0.01 0.25 0.37 0.37 0.27 0.05 -0.21
65 | 2013 -0.43 -0.43 -0.34 -0.30 -0.36 -0.41 -0.40 -0.32 -0.26 -0.18 -0.17 -0.27
66 | 2014 -0.42 -0.46 -0.27 0.04 0.21 0.16 0.05 0.07 0.23 0.49 0.64 0.66
67 | 2015 0.55 0.47 0.53 0.70 0.93 1.18 1.52 1.86 2.16 2.42 2.57 2.64
68 | 2016 2.48 2.14 1.58 0.94 0.39 -0.07 -0.36 -0.54 -0.63 -0.69 -0.67 -0.56
69 | 2017 -0.34 -0.16 0.05 0.20 0.30 0.31 0.14 -0.11 -0.38 -0.65 -0.84 -0.97
70 | 2018 -0.92 -0.85 -0.70 -0.50 -0.22 -0.01 0.09 0.23 0.49 0.76 0.90 0.81
71 | 2019 0.75 0.72 0.71 0.66 0.54 0.45 0.28 0.14 0.19 0.35 0.51 0.55
72 | 2020 0.50 0.48 0.40 0.19 -0.08 -0.30 -0.41 -0.57 -0.89 -1.17 -1.27 -1.19
73 | 2021 -1.05 -0.93 -0.84 -0.66 -0.48 -0.35 -99.90 -99.90 -99.90 -99.90 -99.90 -99.90
74 | -99.9
75 | ONI from CPC
76 | Provided by NOAA/PSL
77 | From http://www.cpc.ncep.noaa.gov/data/indices/oni.ascii.txt
78 | As of 09/20/2015, uses NOAA ERSST V5 from V3
79 | represents 3 month running mean of ERSST.v5 SST
80 | anomalies in the Niño 3.4 region
81 | (5N-5oS, 120o-170oW)]
82 |
--------------------------------------------------------------------------------
/data_types/list.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Lists
17 | #
18 | # ## Definition
19 | #
20 | # A list is an *ordered* sequence of elements, which are *not necessarily of the same type* and are accessible via a unique *index* (integer), which is the element's position within the list.
21 | #
22 | #
23 | #
24 | #
25 | # Note. Python tuple can be viewed as immutable list. [] are replaced by ()
26 | #
27 | #
28 | #
29 | # ## Usage
30 | #
31 | # Lists are used ([python.org](https://docs.python.org/fr/3/tutorial/datastructures.html)):
32 | #
33 | # - The script arguments are stored in a list of strings (```sys.argv```)
34 | # - The Python path is stored in a list (```sys.path```)
35 | # - Used in loops (repeat operations over a list of objects)
36 | # - The `dir` function returns methods/attributes as a list of string
37 | # - Might be used as *stacks* (last-in, first-out). Not optimized for first-in, first-out.
38 | # - To handle function arguments (```*args``` arguments)
39 | #
40 | # ## Manipulation
41 | #
42 | # To have more about lists, visit [python.org](https://docs.python.org/3/tutorial/datastructures.html#more-on-lists)
43 |
44 | # ### List creation
45 | #
46 | # The creation of a list is done by using `[]`. To create an empty list:
47 |
48 | x = [] # empty list
49 | x
50 |
51 | # To create a list with elements:
52 |
53 | x = [1, 'string', map]
54 | x
55 |
56 | # ### Adding elements
57 | #
58 | # Two methods are available for adding ellements to a list. First, the `append` method, which adds an element to a list:
59 |
60 | x = [1]
61 | x.append([1, 2, 3, 4]) # append -> add list in list
62 | x.append('String')
63 | x
64 |
65 | # To get the length of a list:
66 |
67 | len(x)
68 |
69 | # The `x` list now contains 3 elements, one `int`, one `list` and one `string`.
70 | #
71 | # The `extend` method, on the other hand, adds **the elements** of an object to a list. Let's repeat the above operations but using replacing `append` by `extend.
72 |
73 | y = [1]
74 | y.extend([1, 2, 3, 4]) # extend -> add list elements in a list
75 | y.extend('String') # !!! Strings are considered as a list of char!
76 | y
77 |
78 | len(y)
79 |
80 | # The `y` list now contains `int` and `char` elements. The elements of the first list (`1, 2, 3, 4`) have been added. And the characters of the `string` variable as well (in this case, `string` behaves like a list of `char`). `extend` can thus be used for list concatenation.
81 | #
82 | # Another way for concatnating lists is by using `+` or `*` symbol.
83 |
84 | x = [0, 1, 2]
85 | y = [3, 4, 5]
86 | x + y
87 |
88 | 2 * x
89 |
90 | 2 * y
91 |
92 | # **As you see, `+` and `*` should not be used for mathematical operations on `list`!**
93 |
94 | # ### Removing elements
95 | #
96 | # There is two methods to remove elements from a `list`. First, `pop` removes the element whose index is given as argument (if no arguments, the last element is removed). The function returns the value of the removed element.
97 |
98 | x = [15, 16, 17, 18, 19]
99 | x
100 |
101 | removed_val = x.pop(2)
102 | removed_val, x
103 |
104 | removed_last = x.pop()
105 | removed_last, x
106 |
107 | # The `remove` method removes the *first* element within the list that match the *value* provided as argument. This method does not return anything, contrary to `pop`.
108 |
109 | x = [16, 17, 16, 20]
110 | x
111 |
112 | x.remove(16)
113 | x
114 |
115 | # If the value is not found, the `remove` function raises an error.
116 |
117 | # ### List copy
118 | #
119 | # The copy of mutable objects, like `list`, must be done carefully. First, let's create one list `x`:
120 |
121 | x = [1, 2, 3, 4, 5]
122 |
123 | # To copy the `x` list inside a new variable `y`, one natural thing to do would be:
124 |
125 | y = x
126 |
127 | # However, looking at memory addresses using the `id` command shows that both variable share the same memory address.
128 |
129 | id(x), id(y)
130 |
131 | # Therefore, if you modify `x`, modifications will also be visible on `y`, and conversely:
132 |
133 | x[1] = 30
134 | y[3] = 1000
135 | x, y
136 |
137 | # This is because assigment of mutable objects copy the references (i.e. memory adresses), not the values (as for immutable objects).
138 | #
139 | # The right way to copy a mutable object is by using the `copy` method:
140 |
141 | x = [1, 2, 3, 4, 5]
142 | y = x.copy() # make a deep copy of x and store it in an object y
143 | x[1] = 30
144 | y[3] = 1000
145 | x, y
146 |
147 | # In this case, the `x` and `y` objects are completely different objects and are therefore independent.
148 |
149 | # ### Count, reverse, sort
150 | #
151 | # Some methods allow to investigate and manipulate lists.
152 | #
153 | # To count the number occurrences of an element:
154 |
155 | x = [5, 6, 15, 7, 2, 15]
156 | x.count(15)
157 |
158 | # To reverse a list:
159 |
160 | x.reverse()
161 | x
162 |
163 | # Note that the `reserve` function works **in place** and returns nothing. Therefore, the following code is wrong:
164 |
165 | x = (x.reverse())
166 | x
167 |
168 | # Here, the call to `x.reverse()` on the right hand-side effectively reverses the `x` list. But the output of the `reverses()` function (which is `None`) is assigned to a new `x` variable, which overwrites the `x` list.
169 |
170 | # Sorting elements is done in the same way (also in place)
171 |
172 | x = [5, 6, 15, 7, 2, 15]
173 | x.sort()
174 | x
175 |
176 | # ### Check for existence
177 | #
178 | # To check if an element is in a list, you can use the `in` function:
179 |
180 | (2 in x)
181 |
182 | # To get the index of an element (first occurrence), use the `index` method. For instance, to get the index of element `7`:
183 |
184 | ind7 = x.index(7)
185 | ind7
186 |
187 | # ### List indexing
188 | #
189 | # The elements can be accessed using their index within the list. **In Python, indexes start at 0.** Some examples are shown below with the following `x` list:
190 |
191 | x = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
192 | x
193 |
194 | x[0]
195 |
196 | x[2:5] # getting elements from index 2 to index 4(!)
197 |
198 | x[-1] # getting last element
199 |
200 | x[-5:-3] # getting the elements from 5th to last to 4th(!) to last
201 |
202 | x[6:] # getting all the elements starting from index 6
203 |
204 | x[:3] # getting all elements from 0 to index 2(!)
205 |
206 | # getting elements starting from index 2
207 | # ending to index -2 with a stride of 2
208 | x[8:2:-1]
209 |
210 | x[::4] # getting all the elements with a stride of 4
211 |
212 | # Note that the `start:end:stride` syntax can be replaced by `slice(start, end, stride)`.
213 |
214 | start = 0
215 | end = 6
216 | stride = 2
217 | x[start:end:stride]
218 |
219 | x[slice(start, end,stride)]
220 |
221 | x[slice(5)] # equivalent to slice(None, 5, None)
222 |
223 | x[slice(3, 7)] # equivalent to slice(3, 7, None)
224 |
--------------------------------------------------------------------------------
/plots/contours.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # 2D plots
17 | #
18 | # ## Contour lines
19 | #
20 | # Contour plots are done by using the [contour](https://matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.contour.html) function.
21 |
22 | # +
23 | import matplotlib.pyplot as plt
24 | import numpy as np
25 | import matplotlib.mlab as mlab
26 | import matplotlib as mp
27 |
28 | delta = 0.025
29 | x = np.arange(-3.0, 3.0, delta)
30 | y = np.arange(-2.0, 2.0, delta)
31 | xx, yy = np.meshgrid(x, y)
32 | zz = xx * yy
33 | # -
34 |
35 | # ### Simple contour plots
36 | #
37 | # A simple contour plot can be done by providing the number of levels to draw (determined from the minimum and maximum values of the data) to draw.
38 |
39 | plt.figure()
40 | ax = plt.gca()
41 | cs = plt.contour(xx, yy, zz, 21, cmap=plt.cm.jet) # 21 contours drawn
42 | cb = plt.colorbar(cs, orientation='horizontal')
43 | plt.show()
44 |
45 | # To specify the levels to draw, you can provide a `levels` argument:
46 |
47 | plt.figure()
48 | cs = plt.contour(xx, yy, zz, levels=np.arange(-6, 7, 1), # specif
49 | linewidths=0.5, colors='k')
50 | plt.contour(xx, yy, zz, levels=0, colors='r') # draw 0 contour
51 | plt.show()
52 |
53 | # ### Adding labels
54 | #
55 | # Adding contour labels can be done by providing a list of labels to draw and a string formatting.
56 | #
57 | # If the `manual` argument is set to `True`, the user can choose where the labels are put.
58 |
59 | plt.figure()
60 | cs = plt.contour(xx, yy, zz, levels=np.arange(-6, 7, 1), # specif
61 | linewidths=0.5, colors='k')
62 | plt.clabel(cs, cs.levels, fmt="%.1d", manual=False)
63 | plt.show()
64 |
65 | # ## Filled contours
66 | #
67 | # Fille contours are achieved by using the [contourf](https://matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.contourf.html) function
68 |
69 | plt.figure()
70 | ax = plt.gca()
71 | cs = plt.contourf(xx, yy, zz,
72 | levels = np.arange(-5, 6, 1),
73 | cmap=plt.cm.get_cmap('Spectral'), #_r means reverse cmap
74 | )
75 | cb = plt.colorbar(cs,orientation='horizontal', drawedges=True) # add the colorbar
76 | cb.set_ticks(cs.levels[::2])
77 | cb.set_label('Colorbar label')
78 | plt.show()
79 |
80 | # By default, data outside the level range are not shown (appear as background). To overcome this, set the
81 | # `extend` argument to `'both'`
82 |
83 | plt.figure()
84 | ax = plt.gca()
85 | cs = plt.contourf(xx, yy, zz,
86 | levels=np.arange(-5, 6, 1),
87 | cmap=plt.cm.get_cmap('Spectral_r'), #_r means reverse cmap
88 | extend='both') # extend means to saturate the colors (instead of masking)
89 | cb = plt.colorbar(cs, orientation='vertical', drawedges=False) # add the colorbar
90 | cb.set_ticks(cs.levels[::2])
91 | cb.set_label('Colorbar label')
92 | plt.show()
93 |
94 | # To overlay a contour lines over a filled contour, simply call the `contour` function after the `contourf` one. You can recover the plotted levels, use the `levels` attributes.
95 | #
96 | # To add the contour lines on the filled contour colorbar, use the `add_lines` method
97 |
98 | plt.figure()
99 | ax = plt.gca()
100 | cs = plt.contourf(xx, yy, zz,
101 | levels = np.arange(-5, 6, 1),
102 | cmap=plt.cm.get_cmap('Spectral'),
103 | extend='both')
104 | cb = plt.colorbar(cs, orientation='horizontal')
105 | cb.set_ticks(cs.levels[::2])
106 | cb.set_label('Colorbar label')
107 | cl = plt.contour(xx, yy, zz, levels=0, colors='k', linewidths=5)
108 | cb.add_lines(cl)
109 | plt.show()
110 |
111 | # ## Pcolors
112 | #
113 | # Colored mesh are achieved by using the [pcolor](https://matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.pcolor.html) and the [pcolormesh](https://matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.pcolormesh.html) functions.
114 | #
115 | # The second is faster than the former.
116 |
117 | plt.figure()
118 | ax = plt.gca()
119 | cs = plt.pcolor(xx, yy, zz, cmap=plt.cm.get_cmap('hsv'), edgecolors='none')
120 | plt.colorbar(cs)
121 | plt.xlim(np.min(xx), np.max(xx))
122 | plt.ylim(np.min(yy), np.max(yy))
123 | plt.show()
124 |
125 | # To change the color limits, use the `set_clim` method.
126 |
127 | plt.figure()
128 | ax = plt.gca()
129 | cs = plt.pcolormesh(xx, yy, zz, cmap=plt.cm.get_cmap('hsv'))
130 | cs.set_clim(-4, 4)
131 | plt.colorbar(cs)
132 | plt.xlim(np.min(xx), np.max(xx))
133 | plt.ylim(np.min(yy), np.max(yy))
134 | plt.show()
135 |
136 | # ## Imshow
137 |
138 | # +
139 | # Preparing some data and functions to show how imshow works
140 | bbox = dict(boxstyle="round,pad=0.3", fc="lightgray", ec="k", lw=1)
141 |
142 | def add_text():
143 |
144 | for i in range(0, len(x)):
145 | for j in range(0, len(y)):
146 | plt.text(x[i], y[j], data[i, j], ha='center', va='center', bbox=bbox)
147 |
148 | data = np.arange(0, 15)
149 | data = np.reshape(data, (3, 5))
150 | x = np.arange(3) + 100
151 | y = np.arange(5) + 300
152 | # -
153 |
154 | # `pcolor` and `pcolormesh` interpolate the data. Therefore, plotting the `(3, 5)` array with them only shows `(2, 4)` pixels.
155 |
156 | fig = plt.figure()
157 | plt.gca()
158 | cs = plt.pcolormesh(x, y, data.T)
159 | plt.colorbar(cs)
160 | cs.set_clim(data.min(), data.max())
161 | add_text()
162 | plt.xlim(x.min()-0.5, x.max()+0.5)
163 | plt.ylim(y.min()-0.5, y.max()+0.5)
164 | plt.title('pcolormesh')
165 | plt.show()
166 |
167 | # To display all the data cells without any interpolation, use the [imshow](https://matplotlib.org/3.1.3/api/_as_gen/matplotlib.pyplot.imshow.html) function as follows:s
168 |
169 | fig = plt.figure()
170 | ax = plt.gca()
171 | plt.title('imshow')
172 | cs = ax.imshow(data.T, interpolation='none') # int = none fails on PDF
173 | plt.colorbar(cs)
174 | plt.show()
175 |
176 | # By default, the `imshow` function displays the figure with `x` and `y` as pixel indices. Furthermore, one can notice that by default `imshow` function displays the `[0, 0]` corner at the upper left corner.
177 | #
178 | # In order to draw the figure in the right way:
179 | # - set the `origin` argument as equal to `lower`, in order to have the `[0, 0]` point at the lower left corner.
180 | # - Add the data limits by using the `extent` argument, which should be a 4 element.
181 |
182 | plt.figure()
183 | ax = plt.gca()
184 | extent = [x.min()-0.5, x.max()+0.5, y.min()-0.5, y.max()+0.5]
185 | cs = ax.imshow(data.T, interpolation="none", extent=extent, origin='lower') # int = none fails on PDF
186 | plt.colorbar(cs)
187 | add_text()
188 | plt.show()
189 |
190 | # ## Masked values
191 | #
192 | # By default, masked values are shown in background colors (white by default).
193 |
194 | # +
195 | zzm = np.ma.masked_where(np.abs(zz>=3), zz)
196 |
197 | plt.figure()
198 | cs = plt.imshow(zzm, origin='lower')
199 | plt.show()
200 | # -
201 |
202 | # To change the default background color, use the [set_facecolor](https://matplotlib.org/3.1.1/api/_as_gen/matplotlib.axes.Axes.set_facecolor.html) method.
203 |
204 | plt.figure()
205 | ax = plt.gca()
206 | ax.set_facecolor('DarkGray')
207 | cs = plt.imshow(zzm, origin='lower')
208 | plt.show()
209 |
--------------------------------------------------------------------------------
/misc/dask_examples.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: ipynb,py:light
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Parallel programing with Dask
17 | #
18 | # [Dask](http://xarray.pydata.org/en/stable/user-guide/dask.html) is a library that allows to decompose an array into objects of small sizes (`chunks`). Therefore, big datasets are not loaded into memory at once. Additionnally, parallel computing is possible on these `chunks` objects. Let's see an example.
19 | #
20 | # First, the SST field is loaded using `xarray`
21 |
22 | # +
23 | import matplotlib.pyplot as plt
24 | import xarray as xr
25 |
26 | data = xr.open_dataset('data/surface_thetao.nc')
27 | data = data.isel(olevel=0)
28 | data = data['thetao']
29 | data
30 | # -
31 |
32 | # Let's compute the time-mean of the given dataset, and load in memory the output.
33 |
34 | # %%time
35 | datamean = data.mean(dim='time_counter')
36 | datamean = datamean.compute() # does the calculation and store in memory
37 | datamean.min()
38 |
39 | # Now, let's spatially divide our dataset into squares of 150x150 pixels. This is done using the `chunk` method (you can also provide a `chunks` argument in the `open_dataset` method).
40 |
41 | data = xr.open_dataset('data/surface_thetao.nc')
42 | data = data.isel(olevel=0)
43 | data = data['thetao']
44 | data = data.chunk({'x': 150, 'y': 150})
45 | data
46 |
47 | # %%time
48 | datamean2 = data.mean(dim='time_counter')
49 | datamean2 = datamean2.compute()
50 | datamean2.min()
51 |
52 | # The computation time is much better using the chunked data array. And the use of memory is reduced. To see how dask manages the computation, you can use the `dask.visualize` method. It first requires that the `dask` object is extracted from the `xarray` object.
53 |
54 | datamean2 = data.mean(dim='time_counter')
55 | dask_obj = datamean2.data
56 | dask_obj.visualize()
57 |
58 | # In the above graph, it can be seen that each chunk has it's own `(y, x)` map.
59 | #
60 | # If now the mean over all dimensions is computed:
61 |
62 | datamean3 = data.mean()
63 | dask_obj = datamean3.data
64 | dask_obj.visualize()
65 |
66 | # In this case, the mean maps are first computed for each chunk. Then the mean maps for some chunks are recombined together. The last 4 objects are finally aggregated together to give the final output.
67 | #
68 | # Many functions are implemented in `xarray` and which will work with `dask` (cf [the list of available functions](https://numpy.org/doc/stable/reference/ufuncs.html#available-ufuncs)).
69 | #
70 | # However, if the function that you want to use is missing, user-defined `ufunc` can be created.
71 |
72 | # ## Using user-defined functions in parallel.
73 | #
74 | # In order to use a function which is not implemented in the `xarray` list of universal functions, the `xarray.apply_ufunc` method should be used.
75 | #
76 | # For instance, in order to to implement the `scipy.signal.detrend` function in a parallel manner,
77 | # First, create a function that takes as arguments **a `numpy.array`**. Note that the dimension on which you will operate (for detrending, that would be `time`) will be the last one.
78 |
79 | # +
80 | import scipy.signal as sig
81 | import numpy as np
82 |
83 | def gufunc_detrend(x):
84 | x[np.isnan(x)] = 0
85 | return sig.detrend(x)
86 |
87 |
88 | # -
89 |
90 | # Now that it is done, create a new method returns a `xr.apply_ufunc` object. The first argument is the above function, the second argument is the `DataArray`. The `input_core_dims` provides the names of the core dimensions, the ones on which the operations will be performed. In this case, `time`. Since the `detrend` function returns an array of the same size as the input, the `output_core_dims` should be provided as well.
91 |
92 | def xarray_detrend(x, dim):
93 | return xr.apply_ufunc(
94 | gufunc_detrend,
95 | x,
96 | input_core_dims=[[dim]],
97 | output_core_dims=[[dim]],
98 | dask="parallelized",
99 | output_dtypes=[np.float32],
100 | )
101 |
102 |
103 | # Now, we read our data based on a specific chunk layout. **Note that the `time` dimension must remain unchunked, hence the `-1`**.
104 |
105 | data = xr.open_dataset('data/surface_thetao.nc',
106 | chunks={'time_counter': -1, 'x': 150, 'y' : 150})
107 | data = data['thetao']
108 | data = data.isel(olevel=0)
109 | data
110 |
111 | # Now we compute the monthly anomalies:
112 |
113 | dataclim = data.groupby('time_counter.month').mean(dim='time_counter')
114 | dataclim
115 |
116 | data = data.groupby('time_counter.month') - dataclim
117 | data = data.chunk({'time_counter': -1, 'y': 150, 'y': 150})
118 | data
119 |
120 | # %%time
121 | calc = xarray_detrend(data, dim='time_counter').compute()
122 |
123 | # Note that you can call the `compute` method in association with a progress bar as follows:
124 |
125 | from dask.diagnostics import ProgressBar
126 | with ProgressBar():
127 | calc = xarray_detrend(data, dim='time_counter').compute()
128 | calc
129 |
130 | # Now let's check if trend seems ok. First, we extract the detrended time-series on a specific location
131 |
132 | coords = dict(x=90, y=165)
133 | calcts = calc.isel(**coords) # detrendet time series
134 | calcts
135 |
136 | # Then, we extract the raw anomalies on the same location
137 |
138 | datats = data.isel(**coords)
139 |
140 | # Now, we can plot the raw anomalies and the associated trend:
141 |
142 | plt.plot(datats, label='anomalies')
143 | plt.plot(datats - calcts, label='trend')
144 | plt.legend()
145 |
146 | # ## Use on HPCs
147 | #
148 | # It is theoretically possible to parallel Dask operations on HPCs, such as Datarmor. This is achieved by using the [dask-jobqueue](https://jobqueue.dask.org) in association with the `dask.distributed` module. For instance, to run a computation on a `PBS` cluster such as Datarmor, the `PBSCluster` method should be used.
149 | #
150 | # The first step is to create a `jobqueue.yaml` file in the `~/.config/dask` directory. This file contains all the PBS settings for the cluster you are working on (cf. [here](https://jobqueue.dask.org/en/latest/configurations.html#ifremer-datarmor) for the Datarmor configuration). **These are the settings for a single job.**.
151 | #
152 | # There must be also a `distributed.yaml` configuration file, which contains the settings for the HPC server. An example is available [here](https://github.com/apatlpo/lops-array/blob/master/datarmor/distributed.yaml).
153 | #
154 | # When done, create your Python script as shown below (taken from [dask example page](https://jobqueue.dask.org/en/latest/index.html?highlight=client#example)).
155 | #
156 | # ```
157 | # from dask_jobqueue import PBSCluster
158 | # cluster = PBSCluster()
159 | # cluster.scale(jobs=10) # launch 10 jobs
160 | # ```
161 | #
162 | # To see the job script:
163 | #
164 | # ```
165 | # print(cluster.job_script())
166 | # ```
167 | #
168 | # Now init the `Client` object using the cluster defined in the above:
169 | #
170 | # ```
171 | # from dask.distributed import Client
172 | # client = Client(cluster) # Connect this local process to remote workers
173 | # ```
174 | #
175 | # When done, run your Dask operations
176 | #
177 | # ```
178 | # # wait for jobs to arrive, depending on the queue, this may take some time
179 | # import dask.array as da
180 | # ```
181 | #
182 | # **This is not even clear for me, so use with caution!!!**
183 | #
184 | # For more informations:
185 | # - https://docs.dask.org/en/latest/
186 | # - [presentation-dask.pdf](http://osr-cesbio.ups-tlse.fr/gitlab_cesbio/activites-ia/ds-cb/raw/22e64eeed3cd6fa5643381b3ae928e7d73b3d15e/Julien_Dask/presentation-dask.pdf)
187 | # - [lops-array](https://github.com/apatlpo/lops-array/) for some examples.
188 |
--------------------------------------------------------------------------------
/data_types/strings.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Strings
17 | #
18 | # ## Usage
19 | #
20 | # String objects are very common in Python. They are especially usefull when reading and writting text files.
21 | #
22 | # They are defined between simple quotes (```'```) or double quotes (```"```).
23 | #
24 | #
25 | #
26 | # Caution! Opening and closing quotes must be the same
27 | #
28 | #
29 | # # Special characters
30 | #
31 | # Python contains a set of predefined characters, which are listed below (source: [python.org](https://docs.python.org/3/reference/lexical_analysis.html))
32 | #
33 | # |Character | Definition
34 | # |:--------:|:------------------------------:
35 | # |```\a``` | ASCII Bell (BEL)
36 | # |```\b``` | ASCII Backspace (BS)
37 | # |```\f``` | ASCII Formfeed (FF)
38 | # |```\n``` | ASCII Linefeed (LF)
39 | # |```\r``` | ASCII Carriage Return (CR)
40 | # |```\t``` | ASCII Horizontal Tab (TAB)
41 | # |```\v``` | ASCII Vertical Tab (VT)
42 | #
43 | # In order to escape these special characters, add ```r``` before the 1st quote:
44 |
45 | str1 = '@this is \n@' # \n interpreted as line break
46 | print(str1)
47 | str2 = r'%this is \n%'
48 | print(str2)
49 |
50 | # ## String manipulation
51 | #
52 | # String objects have a lot of methods to manipulate them.
53 | #
54 | # Since they are *immutable*, these methods return new string objects, compared to list methods, which
55 | # change the list content.
56 |
57 | # ### Extracting characters
58 |
59 | # +
60 | string1 = 'char1 char2 char3'
61 | print(len(string1)) # length of a string
62 |
63 | chars = list(string1) # returns a list of char
64 | print(chars)
65 | # -
66 |
67 | # ### Changing case
68 |
69 | # +
70 | # change case
71 | # sets the string to lower case
72 | stringl = string1.lower()
73 | print(stringl)
74 |
75 | # sets the string to uper case
76 | stringu = string1.upper()
77 | print(stringu)
78 |
79 | stringc = string1.capitalize()
80 | print(stringc)
81 |
82 | # note here that syntax is different from list
83 | # output of the method is a new string object since strings are immutable
84 | # -
85 |
86 | # ### Replacement and word splitting
87 |
88 | # +
89 | string2 = string1.replace('Char2', 'toto')
90 | print(string2)
91 |
92 | words = string1.split(' ') #['Char1', 'Char2', 'Char3']
93 | print(words)
94 |
95 | words = string1.split(',') #['Char1', 'Char2', 'Char3']
96 | print(words)
97 | # -
98 |
99 | # ### String formatting
100 |
101 | sep = ',\n'
102 | # merges a list of strings in one string providing a separator
103 | string3 = sep.join(['toto1', 'toto2','toto3'])
104 | print(string3)
105 |
106 | # strings behaves list, no maths with lists!
107 | # # + and * for string concatenations
108 | string4 = 2 * 'toto1' + '\t' + 'toto2' + \
109 | '\n' + 'toto3 ' + str(10)
110 | print(string4)
111 |
112 | # +
113 | # variables to display
114 | x = 10
115 | y = 0.5
116 | z = 0.005
117 |
118 | # string formatting. There should be as many %s as variables to display
119 | string5 = '%04d, %.5f, %.3e\n' %(x, y, z)
120 | # string5 = '%04d, %.5f\n' %(x, y, z) # fails because inconsistent number of var.
121 | print(string5)
122 | # -
123 |
124 | # If you want to write percentage symbol, use %s (string format)
125 | string5 = 'Percentage:\n%d%s' %(x, '%')
126 | print(string5)
127 |
128 | string5 = ' test string '
129 | # removing trailing whitespace (usefull when reading a file)
130 | string6 = string5.strip()
131 | print('#' + string5 + '#')
132 | print('#' + string6 + '#')
133 |
134 | # ## Regular expressions
135 | #
136 | # A very powerfull feature is the use of *regular expressions, which allows to match strings with given patterns ([re](https://docs.python.org/3/library/re.html) library).
137 |
138 | # ### Creating regular expressions
139 |
140 | # +
141 | # load the regular expression package
142 | import re
143 |
144 | # match string that starts (^) with a number ranging from 0 to 9
145 | pattern1 = r'^[0-9]'
146 | reg1 = re.compile(pattern1) # creates an object that will be used to match string
147 |
148 | # match string that ends ($) with a number ranging from 0 to 9
149 | pattern2 = r'\d$' # \d is a shortcut for [0-9]
150 | reg2 = re.compile(pattern2) # creates an object that will be used to match string
151 | # -
152 |
153 | # ### Matching regular expressions
154 |
155 | string1 = r'2-start'
156 | string2 = r'end-3'
157 |
158 | reg1.match(string1) # match: returns a re.Match object
159 |
160 | reg2.match(string2) # no match: returns None
161 |
162 | reg2.match(string1) # returns None (no match)
163 |
164 | reg1.match(string2) # returns a re.Match oject
165 |
166 | # note: you should compile a regular expression if will be
167 | # often tested. For isolated cases, you can use:
168 | re.match(pattern1, string1)
169 |
170 | # ### Extracting values
171 | #
172 | # To extract the values matched by a regular expression, use the `groups` method. For that, the
173 | # string pattern that must be extracted must be contained between `()`.
174 |
175 | string1 = r'2-start'
176 | string2 = r'04304-end'
177 | string3 = r'04304-END'
178 | string4 = r'-END'
179 |
180 |
181 | # Function that returns the groups if a match is not None
182 | def test(match):
183 | if(match):
184 | print(match.groups())
185 | else:
186 | print('none')
187 |
188 |
189 | # +
190 | # to get the integer value, use the groups method of the re package
191 | # use () to encompass the elements you want to extract
192 | pattern1 = r'^([0-9]+)-([a-z]+)$' # + = 1 or more match of the preceding pattern
193 | reg1 = re.compile(pattern1)
194 |
195 | test(reg1.match(string1)) # Match
196 | test(reg1.match(string2)) # Match
197 | test(reg1.match(string3)) # No match (end not of the right case)
198 | test(reg1.match(string4)) # No match (doesnt start with num.)
199 |
200 | # +
201 | pattern2 = r'^([0-9]+)-([a-zA-Z]+)$' # + = 1 or more match of the preceding pattern
202 | reg2 = re.compile(pattern2)
203 |
204 | test(reg2.match(string1)) # Match
205 | test(reg2.match(string2)) # Match
206 | test(reg2.match(string3)) # Match (pattern is now case insensitiv)
207 | test(reg2.match(string4)) # No match (doesnt start with num.)
208 |
209 | # +
210 | pattern3 = r'^([0-9]*)-([a-zA-Z]+)$' # * = 0 or more match of the preceding pattern
211 | reg3 = re.compile(pattern3)
212 |
213 | # All matches
214 | test(reg3.match(string1))
215 | test(reg3.match(string2))
216 | test(reg3.match(string3))
217 | test(reg3.match(string4))
218 | # -
219 |
220 | pattern4 = r'^([0-9]?)-([a-zA-Z]+)$' # ? = 0 or 1 match of the preceding pattern
221 | reg4 = re.compile(pattern4)
222 | test(reg4.match(string1)) # Match
223 | test(reg4.match(string2)) # No match (more that 0 or 1 number at the begining)
224 | test(reg4.match(string3)) # No match (more that 0 or 1 number at the begining)
225 | test(reg4.match(string4))
226 |
227 | # ### Splitting using regular expressions
228 |
229 | # How to split this string into the three names?
230 | string1 = r'lala toto titi'
231 | sp1 = string1.split(' ')
232 | print(sp1)
233 | reg = re.compile(' +') # split based on regular expressions: splits with separator = 1 or more spaces
234 | sp2 = reg.split(string1)
235 | print(sp2)
236 |
237 | # +
238 | string1 = r'01 0304 02 45 509 2950 204' # 302 01 2030 39393 50505 s0304 43df'
239 |
240 | # list all the digits of the string
241 | reg = re.compile(r'[0-9]')
242 | print(string1)
243 | print(reg.findall(string1))
244 | # -
245 |
246 | # List all the 2 to 3 digits. However, 0304 is matched as 030
247 | reg = re.compile(r'[0-9]{2,3}')
248 | print(string1)
249 | print(reg.findall(string1))
250 |
251 | # better but 0304 is still matched as 304, and 204 is not matched
252 | reg = re.compile(r'[0-9]{2,3} ') # adding white space at the end
253 | print(string1)
254 | print(reg.findall(string1))
255 |
256 | # we are close with or statements, but there are white spaces
257 | reg = re.compile(r' [0-9]{2,3} |^[0-9]{2,3} | [0-9]{2,3}$')
258 | print(string1)
259 | print(reg.findall(string1))
260 |
261 | # Solution: use the \b (word delimiter)
262 | reg = re.compile(r'\b[0-9]{2,3}\b')
263 | print(string1)
264 | print(reg.findall(string1))
265 |
--------------------------------------------------------------------------------
/io/xar.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # NetCDF
17 | #
18 | # A very efficient way to read, analyze and write NetCDF files is to use the [xarray](http://xarray.pydata.org/en/stable/) Python library, which can be viewed as a ND counterpart of the [pandas](http://pandas.pydata.org).
19 | #
20 | # ## Reading NetCDF
21 | #
22 | # ### Reading single file
23 | #
24 | # Reading NetCDF files is dones by using the `xarray.open_dataset` method, which returns a [xarray.Dataset](http://xarray.pydata.org/en/stable/data-structures.html#dataset) object.
25 |
26 | # +
27 | import xarray as xr
28 | import numpy as np
29 |
30 | data = xr.open_dataset('data/UV500storm.nc')
31 | data
32 | # -
33 |
34 | # ### Reading multiple files
35 | #
36 | # Often, a variable is stored in multiple NetCDF files (one file per year for instance). The `xarray.open_mfdataset` allows to open all the files at one and to concatenate them along *record* dimension (`UNLIMITED` dimension, which is usually time) and spatial dimensions.
37 | #
38 | # Below, the four `ISAS13` files are opened at once and are automatically concanated along the record dimension, hence leading to a dataset with 4 time steps.
39 |
40 | data = xr.open_mfdataset("data/*ISAS*nc", combine='by_coords')
41 | data
42 |
43 | # Furthermore, complex models are often paralellized using the [Message Passing Interface (MPI)](https://fr.wikipedia.org/wiki/Message_Passing_Interface), in which each processor manages a subdomain. If each processor saves output in its sub-region, there will be as many output files as there are processors.
44 | # `xarray` allows to reconstruct the global file by concatenating the subregional files according to their coordinates.
45 | #
46 | #
47 | # Warning! This actually works only if the decomposition into subregions is regular, and if subfiles contain coordinates
48 | #
49 |
50 | data = xr.open_mfdataset("data/GYRE_OOPE*", combine='by_coords', engine='netcdf4')
51 | data['OOPE']
52 |
53 | # In the 2 previous examples, `chunksize` variable attribute appeared. This is due to the fact that opening multiple datasets automatically generates `dask` arrays, which are ready for parallel computing. These are discussed in a specific section
54 |
55 | # ### Accessing dimensions, variables, attributes
56 |
57 | data = xr.open_dataset("data/UV500storm.nc")
58 | data
59 |
60 | # #### Dimensions
61 | #
62 | # Recovering dimensions is dony by accessing the `dims` attribute of the dataset, which returns a `dictionary`, the `keys` of which are the dataset dimension names and the values are the number of elements along the dimension.
63 |
64 | data.dims
65 |
66 | data.dims['lat']
67 |
68 | # #### Variables
69 | #
70 | # Variables can be accessed by using the `data_vars` attribute, which returns a `dictionary`, the `keys` of which are the dataset variable names.
71 |
72 | data.data_vars
73 |
74 | data.data_vars['u']
75 |
76 | # Note that data variables can also be accessed by using variable name as the key to the dataset object, as follows:
77 |
78 | data['v']
79 |
80 | # Note that variables are returned as `xarray.DataArray`.
81 |
82 | # To recover the variable as a `numpy` array, the `values` attribute can be used. In this case, missing values are set to `NaN`.
83 |
84 | v = data['v']
85 | v = v.values
86 | v
87 |
88 | # In order to obtain a masked array instead, use the `to_masked_array()` method:
89 |
90 | v = data['v']
91 | v = v.to_masked_array()
92 | v
93 |
94 | # #### Time management
95 | #
96 | # By default, the time variable is detected by `xarray` by using the NetCDF attributes, and is converted into a human time. This is done by xarray by using the [cftime](https://pypi.org/project/cftime/) module
97 |
98 | data = xr.open_mfdataset("data/*ISAS*", combine='by_coords')
99 | data['time']
100 |
101 | # Then, the user can access the `year`, `month`, `day`, `hour`, `minute`, `second`, `microsecond`, `nanosecond`, `date`, `time`, `dayofyear`, `weekofyear`, `dayofweek`, `quarter` as follows:
102 |
103 | data['time.year']
104 |
105 | data['time.month']
106 |
107 | data['time.day']
108 |
109 | data['time.dayofyear']
110 |
111 | #
112 | # Warning Replace time by the name of your time variable (time_counter in NEMO for instance)
113 | #
114 | #
115 | # If the user does not want `xarray` to convert time into a human date, set the `decode_times` argument to False.
116 |
117 | data = xr.open_mfdataset("data/*ISAS*", combine='by_coords', decode_times=False)
118 | data['time']
119 |
120 | # **In this case, years, months, etc. cannot be extracted**
121 |
122 | # #### Attributes
123 | #
124 | # To get variable attributes, use the `attrs` attribute, which exists for `xarray.Dataset` (global attributes) and `xarray.DataArray` objects (variable's attributes). It returns a `dictionary` containing the attribute names and values.
125 |
126 | data.attrs
127 |
128 | data.attrs['NCO']
129 |
130 | time = data['time']
131 | time.attrs
132 |
133 | time.attrs['units']
134 |
135 | # ## Indexing
136 | #
137 | # As in `pandas`, there is 2 ways to extract part of a dataset. Let's consider the ISAS dataset, which contains 152 vertical levels unevenly from 0 to 2000m.
138 |
139 | data = xr.open_mfdataset('data/*ISAS*', combine='by_coords')
140 | data
141 |
142 | # ### Extracting using indexes
143 | #
144 | # To extract the ten first level and the first to time steps, the `isel` method should be used, which can be applied on either `DataSet` or `DataArray`.
145 | #
146 | #
147 | # Note It is the xarray counterpart of the Pandas iloc method
148 | #
149 |
150 | data.isel(depth=range(10), time=0)
151 |
152 | data.isel(time=slice(0, 2), depth=slice(0, 10))
153 |
154 | data['TEMP'].isel(time=slice(0, 2), depth=range(0, 10))
155 |
156 | # ### Extracting using values
157 | #
158 | # To extract the data between 100m and 500m and for a given period, the `sel` method should be used, which can be applied on either `DataSet` or `DataArray`. It allows use values rather than indexes.
159 | #
160 | #
161 | # Note It is the xarray counterpart of the Pandas loc method
162 | #
163 |
164 | data.sel(time=slice('2012-01-15', '2012-02-15'))
165 |
166 | zmin = 100
167 | zmax = 1000
168 | data.sel(time=slice('2012-01-15', '2012-02-15'), depth=slice(zmin, zmax))
169 |
170 | # ### Plotting
171 | #
172 | # As for `pandas`, `xarray` comes with plotting functions. The plot depends on the dimension of the fields:
173 | #
174 | # - 1D: curve
175 | # - 2D: pcolormesh
176 | # - 3D, 4D, ... : histogram
177 |
178 | data = xr.open_dataset('data/UV500storm.nc')
179 | data
180 |
181 | l = data['u'].isel(timestep=0).plot()
182 |
183 | l = data['u'].isel(timestep=0, lat=15).plot()
184 |
185 | # ## Mathematical operations
186 | #
187 | # As for `pandas`, `xarray` comes with mathematical operations.
188 |
189 | data = xr.open_mfdataset('data/*ISAS*', combine='by_coords')
190 |
191 | # To compute the mean over the entire dataset:
192 |
193 | data.mean()
194 |
195 | # To compute the mean along time dimension:
196 |
197 | data.mean(dim='time')
198 |
199 | # Mean over the depth dimension:
200 |
201 | data.mean(dim='depth')
202 |
203 | # **Contrary to `numpy` eager evaluations, `xarray` performs lazy operations.** As indicated on the `xarray` website:
204 | #
205 | # ```
206 | # Operations queue up a series of tasks mapped over blocks, and no computation is performed until you actually ask values to be computed (e.g., to print results to your screen or write to disk)
207 | # ```
208 | #
209 | # To force the computation, the `compute` and/or `load` methods must be used. Let's compare the outputs below:
210 |
211 | data['TEMP'].mean(dim='time')
212 |
213 | data['TEMP'].mean(dim='time').compute()
214 |
215 | # In the first output, no values are displayed. The `mean` has not been computed yet. In the second output, the effective mean values are shown because computation has been forced using `compute`.
216 |
217 | # ## Group-by operations
218 | #
219 | # The [groupby](http://xarray.pydata.org/en/stable/groupby.html) methods allows to easily perform operations on indepedent groups. For instance, to compute temporal (yearly, monthly, seasonal) means:
220 |
221 | data.groupby('time.month').mean(dim='time')
222 |
223 | data.groupby('time.year').mean(dim='time')
224 |
225 | data.groupby('time.season').mean(dim='time')
226 |
227 | # Defining discrete binning (for depth intervals for instance) is done by using the
228 | # [groupby_bins](http://xarray.pydata.org/en/stable/generated/xarray.Dataset.groupby_bins.html#xarray.Dataset.groupby_bins) method.
229 |
230 | depth_bins = np.arange(0, 1000 + 250, 250)
231 | depth_bins
232 |
233 | zmean = data.groupby_bins('depth', depth_bins).mean(dim='depth')
234 | zmean
235 |
236 | import matplotlib.pyplot as plt
237 | plt.rcParams['text.usetex'] = False
238 | cs = zmean['TEMP'].plot()
239 |
240 | # Let's reload the ISAS dataset
241 |
242 | data = xr.open_mfdataset('data/*ISAS*', combine='by_coords').isel(time=0)
243 | data
244 |
245 | # There is the possibility to compute rolling means along the depth dimensions as follows:
246 |
247 | datar = data.rolling({'depth': 31}, center=True).mean(dim='depth')
248 | datar
249 |
250 | data['TEMP'].plot(label='original')
251 | datar['TEMP'].plot(label='rolling', marker='o', linestyle='none')
252 | plt.legend()
253 |
254 | # ## Creating NetCDF
255 | #
256 | # An easy way to write a NetCDF is to create a `DataSet` object. First, let'sdefine some dummy variables:
257 |
258 | # +
259 | import numpy as np
260 | import cftime
261 |
262 | nx = 10
263 | ny = 20
264 | ntime = 5
265 | x = np.arange(nx)
266 | y = np.arange(ny)
267 |
268 | data = np.random.rand(ntime, ny, nx) - 0.5
269 | data = np.ma.masked_where(data < 0, data)
270 |
271 | # converts time into date
272 | time = np.arange(ntime)
273 | date = cftime.num2date(time, 'days since 1900-01-01 00:00:00')
274 | # -
275 |
276 | date
277 |
278 | data.shape
279 |
280 | # First, init an empty `Dataset` object by calling the [xarray.Dataset](http://xarray.pydata.org/en/stable/generated/xarray.Dataset.html) method.
281 |
282 | ds = xr.Dataset()
283 |
284 | # Then, add to the dataset the variables and coordinates. Note that they should be provided as a tuple that contains two elements:
285 | # - A list of dimension names
286 | # - The numpy array
287 |
288 | ds['data'] = (['time', 'y', 'x'], data)
289 | ds['x'] = (['x'], x)
290 | ds['y'] = (['y'], y)
291 | ds['time'] = (['time'], date)
292 | ds
293 |
294 | # Then, add global and variable attributes to the dataset as follows:
295 |
296 | # +
297 | import os
298 | from datetime import datetime
299 |
300 | # Set file global attributes (file directory name + date)
301 | ds.attrs['script'] = os.getcwd()
302 | ds.attrs['date'] = str(datetime.today())
303 |
304 | ds['data'].attrs['description'] = 'Random draft'
305 | ds
306 | # -
307 |
308 | # Finally, create the NetCDF file by using the [to_netcdf](http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html) method.
309 |
310 | ds.to_netcdf('data/example.nc', unlimited_dims='time', format='NETCDF4')
311 |
312 | # Note that xarray automatically writes the `_FillValue` attribute and the `time:units` attributes.
313 |
--------------------------------------------------------------------------------
/blocks/functions.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Functions
17 | #
18 | # ## Definitions
19 | #
20 | # The creation of functions is achieved with the ```def``` keyword.
21 | #
22 | # Compulsory arguments are put at the beginning of the function definition. The order matters.
23 | #
24 | # Optional arguments are put at the end of the function definition, along with their default values (defined by an `=` statement). The order does not matter here.
25 | #
26 | # Output values are specified using the `return` function. Several variables can be returned (will be returned as `tuple`).
27 |
28 | # +
29 | import numpy as np
30 |
31 | def myfunc(arg1, arg2, arg3=0, arg4=-1):
32 | print('args = ', arg1, arg2, arg3, arg4)
33 | output = arg1 + arg2 + arg3 * (arg1 + arg2) \
34 | + arg4*(arg1 - arg2)
35 |
36 | return output, np.power(output, 2) # returs a 2 elemt. tuple
37 |
38 |
39 | # -
40 |
41 | x1, x2 = myfunc(1, 2)
42 | x1, x2
43 |
44 | x1, x2 = myfunc(1, 2, arg4=1)
45 | x1, x2
46 |
47 | x1, x2 = myfunc(1, 2, arg3=1)
48 | x1, x2
49 |
50 | x1, x2 = myfunc(1, 2, arg3=1, arg4=1)
51 | x1, x2
52 |
53 | x = myfunc(1, 2, arg3=1, arg4=1) # output returned as a tuple
54 | x
55 |
56 |
57 | # ## Arguments as reference or values
58 | #
59 | # Function arguments that are *immutable* (```int```,
60 | # ```float```, etc.) are provided as *values*, i.e. a local copy is made in
61 | # the function. The argument value is not changed after function return.
62 | #
63 | # Function arguments that are *mutable* (```list```,
64 | # ```array```, ```dict```, etc.) are provided as *references*, i.e. memory addresses. They can
65 | # therefore be modified from within the function. The argument value may change after function call.
66 |
67 | # For instance, let's define a function that adds a `y` variable to a `x` variable:
68 |
69 | def update(x, y):
70 | x += y
71 |
72 |
73 | # Let's apply the method on an immutable type, for instance an `int`:
74 |
75 | x = 1
76 | update(x, 10)
77 | print(x)
78 |
79 | # In this case, the `x` value is the same as before. This is because for imutable types, the `+=` statement ultimately creates a new instance (in this case, it is equivalent to `x = x + y`). Same thing holds for a `string` argument:
80 |
81 | x = 'string arg'
82 | update(x, ' final string')
83 | print(x)
84 |
85 | # Now, let's try to use the function on a mutable type. If we convert the above strings into lists:
86 |
87 | x = list('string arg')
88 | update(x, list(' final string'))
89 | print(x)
90 |
91 | # In this example, after the function call, the value of `x` is changed. Same holds for arrays:
92 |
93 | x = np.array([1, 2, 3])
94 | update(x, 2) # x has been updated in the function
95 | print(x)
96 |
97 | # **Therefore, when modifying a variable argument inside a function, be carefull.**
98 |
99 | # ## Scope of variables
100 | #
101 | # All the variables assigned within the function (arguments included)
102 | # are *local* variables. When leaving the function, the variables are removed.
103 |
104 | # +
105 | x = np.arange(5)
106 | print('x before ', x)
107 |
108 | def function(x):
109 | print('x begin func ', x)
110 |
111 | x += 5
112 |
113 | z = x + 10
114 | print('x end func', x, 'z', z)
115 |
116 | function(np.arange(5, 10))
117 |
118 | print('x after ', x)
119 | #print(z) # causes an error: z undefined
120 | # -
121 |
122 | # In the above example, the first print in the function shows the values of the array provided in the function call (`np.arange(5, 10)`). The `x` variable defined above cannot be accessed anymore because overwritten by the local one. When the function ends, the local `x` is destroyed, and the firstly defined `x` is shown by the last print. `z` is only accessible from inside the function.
123 |
124 | # *Global* variables, defined outside of a function, can be accessed from inside a function if they are note overwritten by local ones. **But they are accessed in read-only mode only!**
125 |
126 | # +
127 | # by default, global variable
128 | y = 20
129 |
130 | def function2():
131 | print(y)
132 |
133 | function2()
134 | # -
135 |
136 | # To overwrite the value of a global variable from inside a function, it must be declared as `global`:
137 | #
138 |
139 | # +
140 | z = 30
141 | x = 10
142 |
143 | def function3():
144 |
145 | global z
146 |
147 | z = z + 10
148 | #x = x + 5 # will crash because x not declared global
149 |
150 | function3()
151 | z, x
152 |
153 |
154 | # -
155 |
156 | # In the above example, we can assign a value to the `z` variable although it is not a function argument, because it is declared `global`. However, the same thing on `x` will fail.
157 |
158 | # ### Functions: the ```*args``` argument
159 | #
160 | # When the number of arguments is variable, you can use the ```*args``` argument in the function definition. It allows to define any number of additional arguments, which will be provided as a list.
161 |
162 | def function2(x, y, *args):
163 | print('x = ', x)
164 | print('y = ', y)
165 | print('args = ', args)
166 |
167 |
168 | function2(3, 'toto')
169 |
170 | # Here, no additional argument is provided. Therefore, an empty list is stored in the `args` variable.
171 |
172 | function2(3, 'toto', 5.4)
173 |
174 | # Here, one additional argument has been provided. `args` is therefore a list containing one `float` element.
175 |
176 | function2(3, 'toto', 5.4, 'z', [0, 3, 4])
177 |
178 | # Here, 3 additional arguments are provided. Note that the additional arguments can be of any type, as done in the above.
179 |
180 | # ### The ```**kwargs``` argument
181 | #
182 | # The `**kwargs` arguments allow to provide additional keyword arguments, provided as a `dict`, contrary to `args`, which returns arguments as a `list`. In this case, the arguments are provided using the `key = value` syntax.
183 | #
184 | # Imagine you want to define a function that normalizes a time-series, i.e. removes the mean and divides by the standard-deviation:
185 | #
186 | # $Y = \frac{X - \overline{X}}{\sigma_X}$
187 | #
188 | # Your function should be able to include all the possible arguments of the ```numpy.mean``` function. You can either copy/paste
189 | # the full list of the ```numpy.mean``` arguments. However:
190 | #
191 | # - This is time-consuming
192 | # - This is error prone (misspelling, updates)
193 | #
194 | # A better way is to use the ```**kwargs``` argument, which is a dictionnary of arguments.
195 |
196 | import numpy as np
197 | def stand(x, **kwargs):
198 | print('kwargs = ', kwargs)
199 | m = np.mean(x, **kwargs)
200 | s = np.std(x)
201 | print(m.shape)
202 | return (x - m) / s
203 |
204 |
205 | # In your function, you allow the possibility to include to your function call a list of keyword arguments. These keyword arguments are used in the `np.mean` function. Therefore, all the keyword arguments of the `numpy.mean` function can be used in your `stand` function. Let's give it a try by creating a dummy variable.
206 |
207 | x = np.random.normal(loc=0.0, scale=1.0, size=(1000, 100))
208 | x.shape
209 |
210 | # Now, we call the `stand` function on this array:
211 |
212 | out = stand(x)
213 | out.shape
214 |
215 | # Here, no keyword arguments has been provided, so the call to the `np.mean` function computes the mean over the entire table, hence returning a numerics. `kwargs` is an empty `dict` object.
216 | #
217 | # Now, if we want to specify the axis along which the mean must be computed:
218 |
219 | out = stand(x, axis=0)
220 |
221 | # In this case, `kwargs` is a dictionary containing one `key/value` couple. In this case, the `mean` returns an array of size $100$. We can use other arguments that are available on the `np.mean` function:
222 |
223 | out = stand(x, keepdims=True, axis=0) # keeping the dimensions
224 |
225 |
226 | # However, if you provide a keyword argument that does not exist in the `numpy.mean`, the code will crash.
227 |
228 | # +
229 | #out = stand(x, ddof=1) # crashes since ddof is no numpy.mean argument
230 | # -
231 |
232 | # It would be even better if all the arguments of the `np.std` function could be used as well. It is possible by using dictionaries as arguments (instead of `**kwargs`), as done as follows:
233 |
234 | def stand(x, argsmean={}, argsstd={}):
235 | m = np.mean(x, **argsmean)
236 | s = np.std(x, **argsstd)
237 | print('mean', m.shape)
238 | print('std' , s.shape)
239 | return (x - m) / s
240 |
241 |
242 | # In this case, we add two optional dictionary arguments (empty by default), one dictionary containing the arguments that will be used in `np.mean` function (`argsmean`), and one dictionary containing the arguments that will be used in `np.std` function (`argsstd`).
243 |
244 | # Now we can create the dictionary arguments as follows:
245 |
246 | # +
247 | # extra arguments for the plot function
248 | args_mean = {'keepdims':True, 'axis':0}
249 |
250 | # extra arguments for the savefig function
251 | args_std = {'keepdims':True, 'axis':0, 'ddof':1}
252 | # -
253 |
254 | # Now we can see what happens when we call the new `stand` function, with or without the dict arguments:
255 |
256 | out = stand(x)
257 |
258 | out = stand(x, argsmean=args_mean) # mean computed over 1st dimensions
259 |
260 | out = stand(x, argsstd=args_std) # std computed over 1st dimension, removes one dof
261 |
262 | out = stand(x, argsmean=args_mean, argsstd=args_std)
263 |
264 | # ## Lambda functions
265 | #
266 | # Lambda function, also called anonymous functions, are not defined by using
267 | # the ```def``` statement but the ```lambda``` one.
268 | #
269 | # More on lambda functions can be found in [w3schools](https://www.w3schools.com/python/python_lambda.asp)
270 |
271 | y = lambda x: x**2
272 | print(y(2))
273 | print(y(3))
274 |
275 | z = lambda x, y : x * y
276 | print(z(3, 5))
277 | print(z(5, 7))
278 |
279 |
280 | # An good example of use of lambda functions is provided in [stackoverflow](https://stackoverflow.com/questions/890128/why-are-python-lambdas-useful). In order to generate a multiplicator function, one can use a combination of standard and lambda function.
281 |
282 | # Takes as argument the value by which variables should be multiplied
283 | def mulgenerator(n):
284 | return (lambda x : x * n)
285 |
286 |
287 | # Here, the `mulgenerator` function returns a lambda function, i.e. an object that is callable.
288 |
289 | # +
290 | # doubler
291 | doubler = mulgenerator(2)
292 | print(type(doubler))
293 | print(doubler(10))
294 |
295 | # quadrupler
296 | quadrupler = mulgenerator(4)
297 | print(quadrupler(10))
298 | # -
299 |
300 | # ## Loading your libraries
301 | #
302 | # If you want to load your own function, stored in a `mylib.py` file, you first need to add your library directory to the `PYTHONPATH`. At the beginning of your script, add:
303 | #
304 | # ```
305 | # import sys
306 | # sys.path.append('/add/other/directory/')
307 | # import mylib
308 | # ```
309 | #
310 | # **When the PYTHONPATH is modified this way, it is only valid for the current Python script.**
311 | #
312 | # In order to change the Python default paths, you need to create a `PYTHONPATH` environment variable. With Mac Os X/Linux, edit your `.bashrc` or `.cshrc` file and add:
313 | #
314 | # ```
315 | # # bashrc
316 | # export PYTHONPATH=${PYTHONPATH}:/add/other/directory
317 | # # cshrc
318 | # setenv PYTHONPATH /add/other/directory:${PYTHONPATH}
319 | # ```
320 | #
321 | # In Windows, see for instance [oracle.com](https://docs.oracle.com/en/database/oracle/r-enterprise/1.5.1/oread/creating-and-modifying-environment-variables-on-windows.html#GUID-DD6F9982-60D5-48F6-8270-A27EC53807D0)
322 | #
323 | # Then, in your Python script, you can use your function as follows:
324 | #
325 | # ```
326 | # mylib.function1
327 | # ```
328 |
--------------------------------------------------------------------------------
/maps/pyngl.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.10.3
10 | # kernelspec:
11 | # display_name: pyngl
12 | # language: python
13 | # name: pyngl
14 | # ---
15 |
16 | # # PyNGL
17 | #
18 | # The [PyNGL](https://www.pyngl.ucar.edu/) library is a very powerfull tool for mapping.
19 | #
20 | #
21 | # Note. I strongly discourage the use of PyNGL for simple plots such as time series,
22 | # since Matplotlib does as good but for less efforts.
23 | #
24 | #
25 | # ## Install
26 | #
27 | # To install PyNGL, it is strongly recommended to set-up a virtual environment, as described on the [Download](https://www.pyngl.ucar.edu/Download/) section:
28 | #
29 | # ```
30 | # conda create --name pyngl
31 | # conda activate pyngl
32 | # conda install pyngl
33 | # conda install pynio
34 | # conda install xarray
35 | # ```
36 | #
37 | # To use Jupyter Notebook with this environment, type in a terminal:
38 | #
39 | # ```
40 | # conda activate pyngl
41 | # conda install ipython ipykernel
42 | # ipython kernel install --name "pyngl" --user
43 | # jupyter notebook &
44 | # ```
45 | #
46 | # (source: [Medium.com](https://medium.com/@nrk25693/how-to-add-your-conda-environment-to-your-jupyter-notebook-in-just-4-steps-abeab8b8d084))
47 | #
48 | # ## General concepts
49 | #
50 | # ### Workspace
51 | #
52 | # In PyNGL, a figure is called a `Workspace`. It is opened by using the [Ngl.open_wks](https://www.pyngl.ucar.edu/Functions/Ngl.open_wks.shtml) method.
53 | #
54 | # ### Draw and Frame
55 | #
56 | # A plot is referred to as a `Draw`, while a figure page (for instance PDF page) is referred to as a `Frame`.
57 | #
58 | # Any time a plot is done, a `Draw` is created on a `Frame`, then a `Frame` is added to the `Workspace` , except if the user decides to keep control on when these actions should be performed (**which is highly recommended**).
59 | #
60 | # Finally, figures are finalized by calling the [Ngl.end](https://www.pyngl.ucar.edu/Functions/Ngl.end.shtml) method.
61 | #
62 | # ### Resources
63 | #
64 | # To set-up the plot, resources need to be defined. They can be viewed as a dictionnary of plot parameters.
65 | # Resources are initialized by calling the `Ngl.Resources`. There are resources for nearly everything.
66 | #
67 | # The complete list of resource is available [here](https://www.pyngl.ucar.edu/Resources/).
68 | #
69 | # ## First map
70 | #
71 |
72 | # +
73 | import Ngl
74 | import xarray as xr
75 | import numpy as np
76 |
77 | # load the NetCDF file, using the PyNio engine
78 | data = xr.open_dataset('../io/data/UV500storm.nc', engine='pynio')
79 | data = data.isel(timestep=0)
80 |
81 | lon = data['lon'].values
82 | lat = data['lat'].values
83 | u = data['u'].to_masked_array()
84 | v = data['v'].to_masked_array()
85 | speed = np.sqrt(u*u+v*v, where=(np.ma.getmaskarray(u) == False))
86 |
87 | # open document
88 | wks = Ngl.open_wks("png", "figs/pyngl_examples.png")
89 |
90 | # initialisation of the plot resources
91 | res = Ngl.Resources()
92 |
93 | # not necessary, just a good habit
94 | res.nglDraw = False # deactivate drawing
95 | res.nglFrame = False # deactivate page generation
96 |
97 | # Set map resources.
98 | res.mpProjection = "LambertConformal" # proj
99 | res.nglMaskLambertConformal = True # masked lamb
100 | res.mpLimitMode = "LatLon" # limit map via lat/lon
101 | res.mpMinLatF = 10. # map area
102 | res.mpMaxLatF = 75. # latitudes
103 | res.mpMinLonF = -80. # and
104 | res.mpMaxLonF = 40. # longitudes
105 | res.mpFillOn = True # fill map
106 | res.mpLandFillColor = "LightGray"
107 | res.mpOceanFillColor = -1 # oceans are transparent
108 | res.mpInlandWaterFillColor = "LightBlue" # lakes are light blue
109 | res.tiMainString = "A masked Lambert Conformal map" # plot title
110 | res.tiMainFontHeightF = 0.010 # Font size
111 |
112 | # makes the map
113 | m = Ngl.map(wks, res)
114 |
115 | # draws the map
116 | Ngl.draw(m)
117 |
118 | # add a page to the pdf output
119 | Ngl.frame(wks)
120 |
121 | #Ngl.end()
122 | # -
123 |
124 | #
125 | #
126 | # ## Contour plots
127 |
128 | # +
129 | # init the plot resources
130 | res = Ngl.Resources()
131 |
132 | # not necessary, just a good habit
133 | res.nglDraw = False
134 | res.nglFrame = False
135 |
136 | # Set map resources.
137 | res.mpLimitMode = "LatLon" # limit map via lat/lon
138 | res.mpMinLatF = lat.min() # map area
139 | res.mpMaxLatF = lat.max() # latitudes
140 | res.mpMinLonF = lon.min() # and
141 | res.mpMaxLonF = lon.max() # longitudes
142 | res.mpFillOn = True
143 | res.mpLandFillColor = "LightGray"
144 | res.mpOceanFillColor = -1
145 | res.mpInlandWaterFillColor = "LightBlue"
146 |
147 | # coordinates for contour plots
148 | res.sfXArray = lon
149 | res.sfYArray = lat
150 |
151 | # Set properties for contour lones
152 | res.cnFillOn = False # no filled contour
153 | res.cnLinesOn = True # contour lines
154 | res.cnLineLabelsOn = True # line labels
155 | res.cnLineThicknessF = 4 # contour lines thickness
156 | res.cnLevelSelectionMode = "ExplicitLevels" # plotted levels are set explicitely
157 | res.cnLevels = np.arange(-8, 48+8, 8) # levels to plot
158 | res.cnInfoLabelOn = True # add the contour info
159 |
160 | # draw the contour maps
161 | m = Ngl.contour_map(wks, u, res)
162 |
163 | # draws the map
164 | Ngl.draw(m)
165 |
166 | # add a page to the pdf output
167 | Ngl.frame(wks)
168 |
169 | # ends the plot
170 | # Ngl.end()
171 | # -
172 |
173 | #
174 | #
175 | # ## Filled contours
176 |
177 | # +
178 | # set the document colormap
179 | resngl = Ngl.Resources()
180 | resngl.wkColorMap = 'WhiteBlueGreenYellowRed'
181 | Ngl.set_values(wks, resngl)
182 |
183 | # init the plot resources
184 | res = Ngl.Resources()
185 |
186 | # not necessary, just a good habit
187 | res.nglDraw = False
188 | res.nglFrame = False
189 |
190 | # Set map resources.
191 | res.mpLimitMode = "LatLon" # limit map via lat/lon
192 | res.mpMinLatF = lat.min() # map area
193 | res.mpMaxLatF = lat.max() # latitudes
194 | res.mpMinLonF = lon.min() # and
195 | res.mpMaxLonF = lon.max() # longitudes
196 | res.mpFillOn = True
197 | res.mpLandFillColor = "LightGray"
198 | res.mpOceanFillColor = -1
199 | res.mpInlandWaterFillColor = "LightBlue"
200 |
201 | # coordinates for contour plots
202 | res.sfXArray = lon
203 | res.sfYArray = lat
204 |
205 | # Set properties for contour lones
206 | res.cnFillOn = True # no filled contour
207 | res.cnLinesOn = False # contour lines
208 | res.cnLineLabelsOn = False
209 | res.cnLineThicknessF = 4 # contour lines thickness
210 | res.cnLevelSelectionMode = "ExplicitLevels" # plotted levels are set explicitely
211 | res.cnFillMode = "CellFill"
212 | res.cnLevels = np.linspace(0, 40, 21) # levels to plot
213 |
214 | res.nglSpreadColors = True
215 | res.nglSpreadColorStart = 2
216 | res.nglSpreadColorEnd = 255
217 |
218 | # draw the contour maps
219 | m = Ngl.contour_map(wks, speed, res)
220 |
221 | # draws the map
222 | Ngl.draw(m)
223 |
224 | # add a page to the pdf output
225 | Ngl.frame(wks)
226 |
227 | # add the colormap to see the colors
228 | Ngl.draw_colormap(wks)
229 |
230 | # Ngl.end()
231 | # -
232 |
233 | #
234 | #
235 | # ## Quivers
236 | #
237 | # ### Quivers with key
238 |
239 | # +
240 | # init documents colormap
241 | resngl = Ngl.Resources()
242 | resngl.wkColorMap = 'precip2_15lev'
243 | Ngl.set_values(wks, resngl)
244 |
245 | # init plot resources
246 | res = Ngl.Resources()
247 |
248 | # not necessary, just a good habit
249 | res.nglDraw = False
250 | res.nglFrame = False
251 |
252 | # Set map resources.
253 | res.mpLimitMode = "LatLon" # limit map via lat/lon
254 | res.mpMinLatF = lat.min() # map area
255 | res.mpMaxLatF = lat.max() # latitudes
256 | res.mpMinLonF = lon.min() # and
257 | res.mpMaxLonF = lon.max() # longitudes
258 | res.mpFillOn = True
259 | res.mpLandFillColor = "LightGray"
260 | res.mpOceanFillColor = -1
261 | res.mpInlandWaterFillColor = "LightBlue"
262 |
263 | res.nglSpreadColors = True
264 | res.nglSpreadColorEnd = 17 # index of first color for contourf
265 | res.nglSpreadColorStart = 3 # index of last color for contourf
266 |
267 | # coord arrays for vector plots
268 | res.vfXArray = lon
269 | res.vfYArray = lat
270 |
271 | # set the annotation string. ~C~ is line break
272 | res.vcRefAnnoString1 = "Wind Speed~C~ (50 m/s)"
273 | res.vcRefAnnoArrowSpaceF = 0.65 # reduces white space
274 | res.vcRefAnnoString2On = False # remove the string "Reference vector"
275 | res.vcRefMagnitudeF = 50.0 # speed of the reference arrow
276 | res.vcRefLengthF = 0.08 # length of the reference arrow
277 | res.vcMinDistanceF = 0.02 # min. dist. between arrows
278 |
279 | # draw the contour maps
280 | vc = Ngl.vector_map(wks, u, v, res) # Draw a vector plot of
281 |
282 | # draws the map
283 | Ngl.draw(vc)
284 |
285 | # add a page to the pdf output
286 | Ngl.frame(wks)
287 | # -
288 |
289 | #
290 | #
291 | # ### Quivers with colors
292 |
293 | # +
294 | res = Ngl.Resources()
295 |
296 | # not necessary, just a good habit
297 | res.nglDraw = False
298 | res.nglFrame = False
299 |
300 | # Set map resources.
301 | res.mpLimitMode = "LatLon" # limit map via lat/lon
302 | res.mpMinLatF = lat.min() # map area
303 | res.mpMaxLatF = lat.max() # latitudes
304 | res.mpMinLonF = lon.min() # and
305 | res.mpMaxLonF = lon.max() # longitudes
306 | res.mpFillOn = True
307 | res.mpLandFillColor = "LightGray"
308 | res.mpOceanFillColor = -1
309 | res.mpInlandWaterFillColor = "LightBlue"
310 |
311 | # settings for the colorbar
312 | res.lbOrientation = "Vertical" # vertical colorbar
313 | res.lbTitleString = "Wind Speed (m/s)" # cbar title string
314 |
315 | # the last three resources are to put the title in the right
316 | # position for vertical cbar
317 | res.lbTitlePosition = "Right" # cbar title position
318 | res.lbTitleAngleF = 90
319 | res.lbTitleDirection = "Across"
320 |
321 | res.pmLabelBarWidthF = 0.06 # cbar width
322 | res.lbTitleFontHeightF = 0.01
323 | res.lbLabelFontHeightF = 0.01
324 |
325 | res.vfXArray = lon
326 | res.vfYArray = lat
327 |
328 | res.nglSpreadColorEnd = 17 # index of first color for contourf
329 | res.nglSpreadColorStart = 3 # index of last color for contourf
330 |
331 | res.vcRefMagnitudeF = 50.0
332 | res.vcRefLengthF = 0.08
333 | res.vcMinDistanceF = 0.00
334 | res.vcGlyphStyle = 'CurlyVector'
335 | res.vcMonoLineArrowColor = False # Draw vectors in colors
336 | res.vcRefAnnoOn = False # no reference arrow
337 |
338 | # draw the contour maps
339 | vc = Ngl.vector_map(wks, u, v, res) # Draw a vector plot of
340 |
341 | # draws the map
342 | Ngl.draw(vc)
343 |
344 | # add a page to the pdf output
345 | Ngl.frame(wks)
346 |
347 | # add the colormap to see the colors
348 | # Ngl.draw_colormap(wks)
349 |
350 | # Ngl.end()
351 | # -
352 |
353 | #
354 | #
355 | # ## Overlays
356 | #
357 | # Overlays are achived by using the [Ngl.overlay](https://www.pyngl.ucar.edu/Functions/Ngl.overlay.shtml) method, which takes as argument the workspace and two Ngl objects (the second object is put over the first one).
358 | #
359 | #
360 | # Note. For map overlays, the target object must have been drawn with a map method (Ngl.contour_map or Ngl.vector_map for instance), while the object that is overlayed must have been draw using standard functions (Ngl.contour or Ngl.vector for instance)
361 | #
362 |
363 | # +
364 | # set the document colormap
365 | resngl = Ngl.Resources()
366 | resngl.wkColorMap = 'WhiteBlueGreenYellowRed'
367 | Ngl.set_values(wks, resngl)
368 |
369 | ################################## Create the map contour plot
370 | # init the plot resources
371 | res = Ngl.Resources()
372 |
373 | # not necessary, just a good habit
374 | res.nglDraw = False
375 | res.nglFrame = False
376 |
377 | # Set map resources.
378 | res.mpLimitMode = "LatLon" # limit map via lat/lon
379 | res.mpMinLatF = lat.min() # map area
380 | res.mpMaxLatF = lat.max() # latitudes
381 | res.mpMinLonF = lon.min() # and
382 | res.mpMaxLonF = lon.max() # longitudes
383 | res.mpFillOn = True
384 | res.mpLandFillColor = "LightGray"
385 | res.mpOceanFillColor = -1
386 | res.mpInlandWaterFillColor = "LightBlue"
387 |
388 | # coordinates for contour plots
389 | res.sfXArray = lon
390 | res.sfYArray = lat
391 |
392 | # Set properties for contour lones
393 | res.cnFillOn = True # no filled contour
394 | res.cnLinesOn = False # contour lines
395 | res.cnLineLabelsOn = False
396 | res.cnInfoLabelsOn = False # add the contour info
397 | res.cnLineThicknessF = 4 # contour lines thickness
398 | res.cnLevelSelectionMode = "ExplicitLevels" # plotted levels are set explicitely
399 | res.cnFillMode = "CellFill"
400 | res.cnLevels = np.linspace(0, 40, 21) # levels to plot
401 |
402 | res.nglSpreadColors = True
403 | res.nglSpreadColorStart = 2
404 | res.nglSpreadColorEnd = 255
405 |
406 | # draw the contour maps
407 | m = Ngl.contour_map(wks, speed, res)
408 |
409 | ################################## Create the vector plot
410 | resv = Ngl.Resources()
411 |
412 | resv.gsnDraw = False
413 | resv.gsnFrame = False
414 |
415 | # coord arrays for vector plots
416 | resv.vfXArray = lon
417 | resv.vfYArray = lat
418 |
419 | # set the annotation string. ~C~ is line break
420 | resv.vcRefAnnoString1 = "Wind Speed~C~ (50 m/s)"
421 | resv.vcRefAnnoArrowSpaceF = 0.65 # reduces white space
422 | resv.vcRefAnnoString2On = False # remove the string "Reference vector"
423 | resv.vcRefMagnitudeF = 50.0 # speed of the reference arrow
424 | resv.vcRefLengthF = 0.08 # length of the reference arrow
425 | resv.vcMinDistanceF = 0.02 # min. dist. between arrows
426 |
427 | # Draw a vector plot. Note that here, the vector_map method is not used,
428 | # since map projection will be managed
429 | vplot = Ngl.vector(wks, u, v, resv)
430 |
431 | Ngl.overlay(m, vplot)
432 |
433 | # draws the map
434 | Ngl.draw(m)
435 |
436 | # add a page to the pdf output
437 | Ngl.frame(wks)
438 |
439 | # add the colormap to see the colors
440 | # Ngl.draw_colormap(wks)
441 |
442 | # Ngl.end()
443 | # -
444 |
445 | #
446 |
447 | # Finish the plot
448 | Ngl.end()
449 |
450 | # # More examples
451 | #
452 | # To have some examples, don't hesitate to navigate through the [PyNGL](https://www.pyngl.ucar.edu/Examples/gallery.shtml]) and [NCL](https://www.ncl.ucar.edu/gallery.shtml) galleries. Some examples are also available [here](http://www.nicolasbarrier.fr/gallery).
453 |
--------------------------------------------------------------------------------
/maps/carto.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # jupytext:
4 | # formats: py:light,ipynb
5 | # text_representation:
6 | # extension: .py
7 | # format_name: light
8 | # format_version: '1.5'
9 | # jupytext_version: 1.11.5
10 | # kernelspec:
11 | # display_name: Python 3
12 | # language: python
13 | # name: python3
14 | # ---
15 |
16 | # # Maps
17 | #
18 | # Drawing maps can be achieved by using the [cartopy](https://scitools.org.uk/cartopy/docs/latest/#) library, which can be used with matplotlib.
19 | #
20 | #
21 | #
22 | #
Cartopy vs. Basemap The Cartopy library is planned to take over the
basemap one. Therefore, the latter should not be used anymore, except if the desired Basemap functionality is not yet integrated to cartopy.
23 | #
24 | #
25 | # ## Install
26 | #
27 | # Since it is not a standard library, it needs to be installed. It is best to do it using `conda`, since it needs external C libraries. To install it, type in a terminal:
28 | #
29 | # `conda install cartopy`
30 | #
31 | # ## Map initialisation
32 | #
33 | # Maps are initialized by using the `pyplot.axes` with a `projection` argument that defines the Coordinate Reference System, i.e. a projection system. The list of available projections can be found [here](https://scitools.org.uk/cartopy/docs/v0.15/crs/projections.html).
34 |
35 | import cartopy.crs as ccrs
36 | import matplotlib.pyplot as plt
37 | import numpy as np
38 | plt.rcParams['text.usetex'] = False
39 |
40 | fig = plt.figure()
41 | ax = plt.axes(projection=ccrs.Mollweide())
42 | l = ax.coastlines() # add coastlines
43 |
44 | # +
45 | import cartopy.crs as ccrs
46 | import matplotlib.pyplot as plt
47 |
48 | fig = plt.figure()
49 | ax = plt.axes(projection=ccrs.PlateCarree())
50 | l = ax.stock_img()
51 | # -
52 |
53 | # ## Specifying map limits
54 | #
55 | # Map limits can be specified using the `set_extent` method. It takes as argument the limits of the maps and eventually a `crs` object, specifying the coordinate system used to specify the limits.
56 | #
57 | # **In most cases, limits are defined in geographical coordinates. Thus the `crs` argument must be equal to `ccrs.PlateCarree()`**
58 |
59 | fig = plt.figure()
60 | ax = plt.axes(projection=ccrs.PlateCarree())
61 | ax.set_extent([-150, 20, -20, 20], crs=ccrs.PlateCarree())
62 | l = ax.coastlines()
63 |
64 | # ## Adding map features
65 | #
66 | # In order to add features to the map (land color, ocean colors, etc.), use the [cartopy.feature](https://scitools.org.uk/cartopy/docs/v0.14/matplotlib/feature_interface.html) interface.
67 | #
68 | # Features should be added to the current axes by using the `add_feature` method.
69 | #
70 | # ### Using predefined features
71 | #
72 | # Some features, such as lands ans oceans, coastlines, country borders and lakes are avaiblable. For instance, land mask is accessed as `cfeature.LAND`.
73 |
74 | # +
75 | import cartopy.crs as ccrs
76 | import cartopy.feature as cfeature
77 | import matplotlib.pyplot as plt
78 |
79 | fig = plt.figure()
80 | ax = plt.axes(projection=ccrs.Mollweide())
81 | ax.add_feature(cfeature.LAND, facecolor=cfeature.COLORS['land'])
82 | ax.add_feature(cfeature.COASTLINE, edgecolor='k')
83 | ax.add_feature(cfeature.BORDERS)
84 | ax.add_feature(cfeature.OCEAN, color='SteelBlue')
85 | plt.show()
86 | # -
87 |
88 | # There is the possibility to control the resolution of the default features you by using the `with_scale` argument.
89 |
90 | # +
91 | latc = -18 + 56/60.+ 15/(60 * 60)
92 | lonc = 148 + 5/60 + 45/(60*60)
93 |
94 | fig = plt.figure()
95 | ax = plt.axes(projection=ccrs.PlateCarree())
96 | ax.set_extent([lonc - 5, lonc + 5, latc - 5, latc + 5], ccrs.PlateCarree())
97 | ax.add_feature(cfeature.LAND.with_scale('10m'), facecolor=cfeature.COLORS['land'])
98 | ax.add_feature(cfeature.COASTLINE.with_scale('10m'), edgecolor='k')
99 | plt.show()
100 | # -
101 |
102 | # ### Using Natural Earth Data data
103 | #
104 | # It is also possible to use include other features from [naturalearthdata](https://www.naturalearthdata.com/) by using the
105 | # `cartopy.feature.NaturalEarthFeature` interface. For instance, one can add coral reefs as follows:
106 |
107 | # +
108 | # Create a feature for reefs image
109 | reefs = cfeature.NaturalEarthFeature(
110 | category='physical',
111 | name='reefs',
112 | scale='10m',
113 | edgecolor='face',
114 | facecolor='FireBrick'
115 | )
116 |
117 | latc = -18 + 56/60.+ 15/(60 * 60)
118 | lonc = 148 + 5/60 + 45/(60*60)
119 |
120 | fig = plt.figure()
121 | ax = plt.axes(projection=ccrs.PlateCarree())
122 | ax.set_extent([lonc - 5, lonc + 5, latc - 5, latc + 5], ccrs.PlateCarree())
123 | ax.add_feature(cfeature.LAND)
124 | ax.add_feature(cfeature.OCEAN)
125 | ax.add_feature(reefs)
126 | ax.coastlines(resolution='50m')
127 | plt.show()
128 | # -
129 |
130 | # ### Using GSHSS features
131 |
132 | # There is also the possibility to use [GSHSS](https://www.ngdc.noaa.gov/mgg/shorelines/gshhs.html) features:
133 |
134 | fig = plt.figure(figsize=(6, 6))
135 | ax = plt.axes(projection=ccrs.PlateCarree())
136 | ax.set_xlim(lonc - 5, lonc + 5)
137 | ax.set_ylim(latc - 5, latc + 5)
138 | ax.add_feature(cfeature.GSHHSFeature(scale='intermediate', levels=[1], facecolor='gray', edgecolor='k'))
139 | plt.show()
140 |
141 | # ## Labeling
142 | #
143 | # Informations about how to add grid labels are provided here: https://scitools.org.uk/cartopy/docs/v0.13/matplotlib/gridliner.html
144 | #
145 | # Here is a small example on how to do it.
146 |
147 | # First, you need to import some modules and to define a dictionary containing the map parameters:
148 |
149 | # +
150 | from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
151 | import matplotlib.ticker as mticker
152 |
153 | # definition of grid params
154 | gridparams = {'crs': ccrs.PlateCarree(central_longitude=0),
155 | 'draw_labels':True, 'linewidth':0.5,
156 | 'color':'k', 'alpha':1, 'linestyle':'--'}
157 | # -
158 |
159 | # Then you can use the `ax.gridlines` method with the `**gridparams` method to add the grid lines.
160 |
161 | fig = plt.figure()
162 | ax = plt.axes(projection=ccrs.PlateCarree())
163 | ax.add_feature(cfeature.LAND, zorder=1)
164 | ax.add_feature(cfeature.COASTLINE, zorder=2)
165 | gl = ax.gridlines(**gridparams, zorder=0)
166 |
167 | # If you want to control which labels are drawn, and where, you can modify the values of the `gl` object as follows:
168 |
169 | fig = plt.figure()
170 | ax = plt.axes(projection=ccrs.PlateCarree())
171 | ax.add_feature(cfeature.LAND, zorder=1)
172 | ax.add_feature(cfeature.COASTLINE, zorder=2)
173 | gl = ax.gridlines(**gridparams, zorder=0)
174 | gl.top_labels = False
175 | gl.right_labels = False
176 | gl.xlocator = mticker.FixedLocator(np.arange(-180, 180 + 40, 40))
177 | gl.ylocator = mticker.FixedLocator(np.arange(-90, 90 + 20, 20))
178 | gl.xformatter = LONGITUDE_FORMATTER
179 | gl.yformatter = LATITUDE_FORMATTER
180 |
181 | # ## Plotting data
182 | #
183 | # Examples on how to plot data using Cartopy are provided [here](https://scitools.org.uk/cartopy/docs/latest/matplotlib/advanced_plotting.html).
184 | #
185 | # Basically, the same methods as in Matplotlib are used, except that a `transform` argument must be set equal to the projection used in the input data.
186 | #
187 | # **In most cases, the data are provided in geographical coordinates. Therefore, the `transform` argument must be set to `ccrs.PlateCarree()`.**
188 | #
189 | # First, let's load a dataset:
190 |
191 | # +
192 | import xarray as xr
193 | import numpy as np
194 |
195 | data = xr.open_dataset('../io/data/UV500storm.nc')
196 | lon = data['lon'].values
197 | lat = data['lat'].values
198 | u = data['u'].values[0]
199 | v = data['v'].values[0]
200 | u = np.ma.masked_where(np.abs(u) > 999, u)
201 | v = np.ma.masked_where(np.abs(v) > 999, v)
202 | vel = np.sqrt(u*u + v*v, where=(np.ma.getmaskarray(u) == False))
203 | # -
204 |
205 | # ### Lines
206 | #
207 | # In order to draw lines that follow geodetic distance, set the `transform` argument as equal to `ccrs.Geodetic()`.
208 |
209 | fig = plt.figure()
210 | ax = plt.axes(projection=ccrs.PlateCarree())
211 | x = [lon.min(), lon.max()]
212 | y = [lat.min(), lat.max()]
213 | ax.plot(x, y, transform=ccrs.Geodetic(), label='Geodetic')
214 | ax.plot(x, y, transform=ccrs.PlateCarree(), label='PlateCarree')
215 | plt.legend()
216 | ax.coastlines()
217 |
218 |
219 | # ### Contours
220 |
221 | fig = plt.figure()
222 | ax = plt.axes(projection=ccrs.Mollweide())
223 | cs = plt.contourf(lon, lat, u, transform=ccrs.PlateCarree())
224 | cl = plt.contour(lon, lat, u, transform=ccrs.PlateCarree(), colors='k', linewidths=0.5)
225 | plt.clabel(cl)
226 | ax.coastlines() # add coastlines
227 | plt.show()
228 |
229 | # ### Quivers
230 |
231 | # +
232 | fig = plt.figure(figsize=(10, 10))
233 |
234 | ax = plt.axes(projection=ccrs.Mollweide())
235 | cs = plt.quiver(lon, lat, u, v, vel, cmap=plt.cm.jet, transform=ccrs.PlateCarree())
236 | cb = plt.colorbar(cs, shrink=0.5)
237 | plt.quiverkey(cs, 0.9, 0.1, 50, '50 m/s')
238 |
239 | ax.coastlines() # add coastlines
240 | ax.add_feature(cfeature.LAND)
241 | # -
242 |
243 | # Drawing quiver plots in Cartopy is facilitated by the `regrid_shape` option, which allows to interpolate data onto a regular grid, which is usefull when data are dense.
244 |
245 | # +
246 | fig = plt.figure(figsize=(10, 10))
247 |
248 | ax = plt.axes(projection=ccrs.Mollweide())
249 | cs = plt.quiver(lon, lat, u, v, vel, cmap=plt.cm.jet, transform=ccrs.PlateCarree(), regrid_shape=(100, 100), scale=1000)
250 | cb = plt.colorbar(cs, shrink=0.5)
251 | ax.set_extent([lon.min(), lon.max(), lat.min(), lat.max()], crs=ccrs.PlateCarree())
252 |
253 | ax.coastlines() # add coastlines
254 | ax.add_feature(cfeature.LAND)
255 | # -
256 |
257 | # ## Paneling
258 | #
259 | # Proper paneling can be obtained by using the `ImageGrid` Matplotlib function in combination with the `GeoAxes` method (cf. https://scitools.org.uk/cartopy/docs/v0.16/gallery/axes_grid_basic.html)
260 |
261 | # +
262 | from mpl_toolkits.axes_grid1 import ImageGrid
263 | from cartopy.mpl.geoaxes import GeoAxes
264 | import cartopy.feature as cfeature
265 |
266 | def add_labels(ax, projout):
267 | gridparams = {'crs': projout,
268 | 'draw_labels':True, 'linewidth':0.5,
269 | 'color':'gray', 'alpha':0.5, 'linestyle':'--'}
270 | gl = ax.gridlines(**gridparams)
271 | gl.xlabels_top = False
272 | gl.ylabels_right = False
273 | gl.xformatter = LONGITUDE_FORMATTER
274 | gl.yformatter = LATITUDE_FORMATTER
275 | gl.xlocator = mticker.FixedLocator(np.arange(-180, 180 + 40, 40))
276 | gl.ylocator = mticker.FixedLocator(np.arange(-90, 90 + 20, 20))
277 |
278 | projout = ccrs.PlateCarree(central_longitude=180)
279 | projin = ccrs.PlateCarree()
280 |
281 | fig = plt.figure(figsize=(12, 8))
282 | axes_class = (GeoAxes, dict(map_projection=projout))
283 |
284 | axgr = ImageGrid(fig, 111, axes_class=axes_class, nrows_ncols=(1, 3), axes_pad=(0.7, 0.5), label_mode='',
285 | cbar_mode='each', cbar_size=0.1, cbar_pad=0.3, cbar_location="bottom", share_all=True)
286 |
287 | axcbar = axgr.cbar_axes
288 |
289 | for i, ax in enumerate(axgr):
290 | if i == 0:
291 | cs = ax.contourf(lon, lat, u, transform=projin)
292 | ax.set_title('U')
293 | cb = axcbar[i].colorbar(cs)
294 | elif i == 1:
295 | cs = ax.contourf(lon, lat, v, transform=projin)
296 | cb = axcbar[i].colorbar(cs)
297 | ax.set_title('V')
298 | else:
299 | cs = ax.quiver(lon, lat, u, v, vel, cmap=plt.cm.jet, transform=projin)
300 | cb = axcbar[i].colorbar(cs)
301 | ax.set_title('Wind')
302 | ax.add_feature(cfeature.LAND) # add coastlines
303 | ax.add_feature(cfeature.COASTLINE) # add coastlines
304 | cb.set_label('m/s')
305 | add_labels(ax, projout)
306 | # -
307 |
308 | # ## Switching from one system to another
309 | #
310 | # The `transform_points` method can be used to navigate between coordinates systems. It is called as follow:
311 | #
312 | # ```
313 | # destCRS.transform_points(sourceCrs, longitudes, latitudes)
314 | # ```
315 | #
316 | # For instance, to convert longitudes/latitudes into Mollweide map coordinates:
317 |
318 | # +
319 | projin = ccrs.PlateCarree()
320 | projout = ccrs.Mollweide()
321 |
322 | N = 10
323 | lone = np.linspace(-180, 180, N)
324 | late = np.full((N), 0)
325 |
326 | convert = projout.transform_points(projin, lone, late)
327 | xe = convert[:, 0]
328 | ye = convert[:, 1]
329 | ze = convert[:, 2]
330 | xe, ye
331 | # -
332 |
333 | # Note that the `transform_vectors` can be used to convert vectors from one CRS to another.
334 |
335 | # ## Changing map boundary
336 | #
337 | # There is the possibility to change the map bounding box. This is especially usefull if you want to use the Masked Lambert Conformal projection. First, let's have a look at the `LCC` projection:
338 |
339 | # +
340 | lonw = -80
341 | lone = 40
342 | lats = 10
343 | latn = 75
344 |
345 | lon0 = 0.5 * (lone + lonw)
346 | lat0 = 0.5 * (lats + latn)
347 |
348 | plt.figure()
349 | proj = ccrs.LambertConformal(central_longitude=lon0)
350 | ax = plt.axes(projection=proj)
351 | ax.set_extent([lonw, lone, lats, latn], ccrs.PlateCarree())
352 | i = ax.coastlines()
353 | ax.add_feature(cfeature.LAND)
354 | # -
355 |
356 | # In order to mask it, the boundary polygon needs to be defined in geographical coordinates as a `Path` object:
357 |
358 | # +
359 | import matplotlib.path as mpath
360 | import numpy as np
361 |
362 | N = 100
363 | # -
364 |
365 | # First, we create the array of coordinates for the southern border:
366 |
367 | xsouth = np.linspace(lonw, lone, N)
368 | ysouth = np.full((N), lats)
369 |
370 | # Then, we create the coordinates for the eastern border:
371 |
372 | yeast = np.linspace(lats, latn, N)
373 | xeast = np.full((N), lone)
374 |
375 | # Same thing is repeated for the western and eastern order. Except that the order is reversed.
376 |
377 | # +
378 | xnorth = np.linspace(lonw, lone, N)[::-1]
379 | ynorth = np.full((N), latn)
380 |
381 | ywest = np.linspace(lats, latn, N)[::-1]
382 | xwest = np.full((N), lonw)
383 | # -
384 |
385 | # Then, we combine all these coordinates into 1D arrays:
386 |
387 | x = np.concatenate([xsouth, xeast, xnorth, xwest])
388 | y = np.concatenate([ysouth, yeast, ynorth, ywest])
389 |
390 | # We can check that the domain is properly defined:
391 |
392 | plt.figure()
393 | proj = ccrs.LambertConformal(central_longitude=lon0)
394 | ax = plt.axes(projection=proj)
395 | ax.set_extent([x.min(), x.max(), y.min(), y.max()], ccrs.PlateCarree())
396 | ax.plot(x, y, transform=ccrs.PlateCarree())
397 | l = ax.coastlines()
398 | l = ax.add_feature(cfeature.LAND)
399 |
400 | # Now, we convert the `x, y` arrays into a `matplotlib.path.Path` object:
401 |
402 | path = mpath.Path(np.array([x, y]).T)
403 |
404 | # Now, the map boundary can be specified using the `set_boundary` method:
405 |
406 | plt.figure()
407 | proj = ccrs.LambertConformal(central_longitude=lon0)
408 | ax = plt.axes(projection=proj)
409 | ax.set_extent([x.min(), x.max(), y.min(), y.max()], ccrs.PlateCarree())
410 | ax.set_boundary(path, transform=ccrs.PlateCarree())
411 | l = ax.coastlines()
412 | l = ax.add_feature(cfeature.LAND)
413 |
414 | # **The features must be added after the new boundaries have been set**
415 |
416 | # ## Geodesic calculations
417 | #
418 | # Geodesic calculations can be achieved using Cartopy (cf [geodesic](https://scitools.org.uk/cartopy/docs/latest/cartopy/geodesic.html)).
419 |
--------------------------------------------------------------------------------