├── .circleci
└── config.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── README.md
├── archive
├── assets
│ └── images
│ │ ├── agg.jpg
│ │ ├── broadcasting.jpg
│ │ ├── column-major.jpg
│ │ ├── row-major.jpg
│ │ ├── row-major.png
│ │ └── xarray-data-structures.png
├── experimental
│ ├── .intake_esm
│ │ └── config.yaml
│ ├── cesm-le-global-integral-Copy2.ipynb
│ ├── cesm-le-global-integral-casper.ipynb
│ ├── cesm-le-global-integral.ipynb
│ └── cesm1-le-collection.yml
├── numpy
│ └── 01-numpy-guide.ipynb
├── python
│ └── 00-python-guide-for-people-in-a-hurry.ipynb
└── xarray
│ ├── 01-xarray-introduction.ipynb
│ ├── 02-xarray-advanced-plotting.ipynb
│ └── data
│ ├── air_temperature.nc
│ ├── rasm.nc
│ └── rasm_test.nc
├── assets
├── dask-array-black-text.svg
├── dask-components.jpeg
├── dask-dag.gif
├── dask_horizontal.svg
├── noaa-nino-sst.gif
└── notebook-user-interface.png
├── curricula
├── beginner.md
├── self-guided.txt
├── self_paced
│ ├── outline.ipynb
│ ├── part_1
│ │ ├── z230_p1a.ipynb
│ │ ├── z230_p1b.ipynb
│ │ └── z230_p1c.ipynb
│ └── requirements.ipynb
└── z230-pt2.md
├── data
└── README.md
├── notebooks
├── bytopic
│ ├── ObjectOrientedProgramming
│ │ └── Object Oriented Programming In Python.ipynb
│ ├── TestingInPython
│ │ ├── CodeCoverage.png
│ │ ├── CodeCoverage.xml
│ │ ├── PyCharmIDECode.png
│ │ ├── PyCharmTestResults.png
│ │ └── Testing In Python.ipynb
│ ├── conda
│ │ └── 01_managing_conda.ipynb
│ ├── dask
│ │ ├── 01_overview.ipynb
│ │ ├── 02_dask_arrays.ipynb
│ │ ├── 03_distributed.ipynb
│ │ ├── 04_dask_and_xarray.ipynb
│ │ ├── 05_dask_hpc.ipynb
│ │ └── solutions
│ │ │ └── 02_dask_arrays_mean.py
│ ├── data-visualization
│ │ ├── 00_intro.ipynb
│ │ ├── 01_matplotlib.ipynb
│ │ ├── 02_cartopy.ipynb
│ │ ├── img
│ │ │ ├── anatomy-of-a-figure.png
│ │ │ ├── hv-gv-bk-hvplot.png
│ │ │ ├── landscape_hv_nx.png
│ │ │ ├── landscape_hv_nx_pyviz.png
│ │ │ └── workflow.png
│ │ └── solutions
│ │ │ ├── color_scatter.py
│ │ │ ├── contourf_contour.py
│ │ │ ├── map.py
│ │ │ └── subplots.py
│ ├── git-workflows
│ │ ├── git
│ │ │ ├── 01_what_is_git.ipynb
│ │ │ ├── 02_repositories.ipynb
│ │ │ ├── 03_branches.ipynb
│ │ │ ├── 04_conflicts.ipynb
│ │ │ └── 05_clones.ipynb
│ │ ├── github
│ │ │ ├── 01_what_is_github.ipynb
│ │ │ ├── 02_getting_started_with_github.ipynb
│ │ │ ├── 03_repositories.ipynb
│ │ │ ├── 04_clones.ipynb
│ │ │ ├── 05_forks.ipynb
│ │ │ └── 06_integrations.ipynb
│ │ ├── images
│ │ │ ├── create_a_repository.png
│ │ │ ├── example_repo.png
│ │ │ ├── git-logo.png
│ │ │ ├── github-logo.png
│ │ │ ├── github_log.png
│ │ │ ├── github_plus_sign.png
│ │ │ ├── github_prs.png
│ │ │ └── two_commits.png
│ │ └── intro.ipynb
│ ├── metpy
│ │ ├── Introduction to MetPy.ipynb
│ │ ├── MetPy_breakdown.png
│ │ ├── QG Analysis.ipynb
│ │ ├── SkewT_and_Hodograph.ipynb
│ │ └── solutions
│ │ │ ├── QG_data.py
│ │ │ ├── distance.py
│ │ │ ├── hodograph_preprocessing.py
│ │ │ ├── hodograph_segmented.py
│ │ │ ├── qg_omega_total_fig.py
│ │ │ ├── skewt_cape_cin.py
│ │ │ ├── skewt_get_data.py
│ │ │ ├── skewt_make_figure.py
│ │ │ ├── skewt_thermo.py
│ │ │ ├── skewt_wind_fiducials.py
│ │ │ ├── temperature_change.py
│ │ │ ├── term_B_calc.py
│ │ │ └── wind_speed.py
│ ├── numpy
│ │ ├── 01_getting_started_with_numpy.ipynb
│ │ ├── 02_memory_layout.ipynb
│ │ ├── 03_array_creation.ipynb
│ │ ├── 04_indexing_and_slicing.ipynb
│ │ ├── 05_reshaping_and_resizing.ipynb
│ │ ├── 06_vectorized_expressions.ipynb
│ │ └── images
│ │ │ ├── agg.jpeg
│ │ │ ├── broadcasting.jpeg
│ │ │ ├── column-major.jpeg
│ │ │ └── row-major.jpeg
│ ├── packaging
│ │ ├── demo_hello_cesm_package.ipynb
│ │ ├── hello-cesm-package
│ │ │ ├── .gitignore
│ │ │ ├── CHANGELOG.rst
│ │ │ ├── LICENSE
│ │ │ ├── MANIFEST.in
│ │ │ ├── README.md
│ │ │ ├── cesm_package
│ │ │ │ ├── __init__.py
│ │ │ │ ├── climatologies.py
│ │ │ │ └── statistics.py
│ │ │ ├── docs
│ │ │ │ └── overview.rst
│ │ │ ├── setup.py
│ │ │ └── tests
│ │ │ │ └── test_statistics.py
│ │ └── intro_to_packaging.ipynb
│ ├── python-basics
│ │ ├── 00_introduction.ipynb
│ │ ├── 01_objects.ipynb
│ │ ├── 02_operators.ipynb
│ │ ├── 03_builtin_types.ipynb
│ │ ├── 04_flow_control.ipynb
│ │ ├── 05_builtin_functions.ipynb
│ │ ├── 06_one_liners.ipynb
│ │ ├── 07_functions.ipynb
│ │ └── 08_modules_and_scripts.ipynb
│ ├── test-driven-development
│ │ ├── TDD.ipynb
│ │ ├── img
│ │ │ ├── TDD.png
│ │ │ ├── ci-job.png
│ │ │ ├── ci-workflow.png
│ │ │ └── coverage.png
│ │ ├── my_cesm_package
│ │ │ ├── __init__.py
│ │ │ └── statistics.py
│ │ └── tests
│ │ │ ├── __init__.py
│ │ │ └── test_statistics.py
│ └── xarray
│ │ ├── 01_getting_started_with_xarray.ipynb
│ │ ├── 02_io.ipynb
│ │ ├── 03_indexing.ipynb
│ │ ├── 04_agg.ipynb
│ │ ├── 05_arithmetic.ipynb
│ │ ├── 06_alignment.ipynb
│ │ ├── 07_groupby_resampling_rolling.ipynb
│ │ ├── data
│ │ ├── 1980.nc
│ │ ├── 1981.nc
│ │ ├── 1982.nc
│ │ ├── 1983.nc
│ │ ├── air_temperature.nc
│ │ └── rasm.nc
│ │ └── images
│ │ └── xarray-data-structures.png
├── welcome.ipynb
└── workflows
│ ├── cesm
│ └── oxygen-trend-computation
│ │ ├── O2_forced_unforced.ipynb
│ │ ├── all-trends-O2-200m-NPac.png
│ │ ├── all-trends-internal-O2-200m-NPac.png
│ │ ├── trend-decomp-O2-200m-NPac.png
│ │ └── util.py
│ ├── gmet
│ └── gmet_ensemble.ipynb
│ ├── noaa_ersst_variability
│ └── noaa_ersst_variability.ipynb
│ └── ocean_heat_content
│ ├── 00_intro.ipynb
│ ├── 01_modules_and_xarray_datasets.ipynb
│ ├── 02_subselecting_and_indexing_data.ipynb
│ ├── 03_units.ipynb
│ ├── 04_calculation_and_plotting.ipynb
│ └── solutions
│ ├── solution_1_1.py
│ ├── solution_1_2.py
│ ├── solution_1_3.py
│ ├── solution_2_1.py
│ ├── solution_2_2.py
│ ├── solution_2_3.py
│ ├── solution_2_4.py
│ ├── solution_2_5.py
│ ├── solution_2_6.py
│ ├── solution_3_1.py
│ ├── solution_3_2.py
│ ├── solution_3_3.py
│ └── solution_4_1.py
├── setup
├── check_setup
├── conda
│ ├── README.md
│ ├── build-matlab-api
│ ├── check_env_conflict
│ ├── install_conda
│ ├── post_build_base
│ ├── post_build_tutorial
│ ├── update_base_env
│ └── update_tutorial_env
├── configure
├── download_cartopy_assets.py
├── download_data.py
├── environments
│ ├── env-conda-base.yml
│ ├── env-py-matlab.yml
│ └── env-tutorial.yml
└── jlab
│ ├── jlab-ch
│ ├── jlab-dav
│ └── jlab-hobart
└── site
├── conf.py
├── pages
└── fall2019
│ ├── agenda.md
│ ├── index.md
│ ├── instructions.md
│ └── sprints.md
├── posts
└── fall-2019-tutorial.md
└── themes
└── ncar
├── assets
├── css
│ └── custom.css
└── img
│ └── background.png
├── ncar.theme
└── templates
└── base_helper.tmpl
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 |
3 | jobs:
4 |
5 | build_site:
6 |
7 | docker:
8 | - image: circleci/python:latest
9 |
10 | steps:
11 | - checkout
12 | - add_ssh_keys:
13 | fingerprints:
14 | - "2f:46:d4:bf:53:bd:96:3c:e6:b3:aa:68:5e:42:0b:b9"
15 | - restore_cache:
16 | key: deps1-{{ .Branch }}-{{ checksum "site/conf.py" }}
17 | - run:
18 | name: Install Nikola
19 | command: sudo pip install nikola[extras]
20 | - save_cache:
21 | key: deps1-{{ .Branch }}-{{ checksum "site/conf.py" }}
22 | paths:
23 | - "$HOME/.cache/pip"
24 | - run:
25 | name: Build site with Nikola
26 | command: cd site && nikola build
27 | - store_artifacts:
28 | path: ./site/output/
29 |
30 | deploy_site:
31 |
32 | docker:
33 | - image: circleci/python:latest
34 |
35 | steps:
36 | - checkout
37 | - add_ssh_keys:
38 | fingerprints:
39 | - "2f:46:d4:bf:53:bd:96:3c:e6:b3:aa:68:5e:42:0b:b9"
40 | - restore_cache:
41 | key: deps1-{{ .Branch }}-{{ checksum "site/conf.py" }}
42 | - run:
43 | name: Install Nikola
44 | command: sudo pip install nikola[extras]
45 | - save_cache:
46 | key: deps1-{{ .Branch }}-{{ checksum "site/conf.py" }}
47 | paths:
48 | - "$HOME/.cache/pip"
49 | - run:
50 | name: Build site with Nikola and deploy to GitHub Pages
51 | command: |
52 | git config --global user.name 'Xdev Bot'
53 | git config --global user.email '47308153+xdev-bot@users.noreply.github.com'
54 | cd site
55 | nikola build
56 | nikola github_deploy -m '[ci skip] Nikola auto deploy'
57 |
58 | workflows:
59 | version: 2
60 | build:
61 | jobs:
62 | - build_site:
63 | filters:
64 | branches:
65 | ignore:
66 | - gh-pages
67 | - master
68 |
69 | - deploy_site:
70 | filters:
71 | branches:
72 | only:
73 | - master
74 | ignore:
75 | - gh-pages
76 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # Mac Files
29 | .DS_Store
30 |
31 | # Jupyter Notebooks
32 | .ipynb_checkpoints
33 |
34 | # Sphinx documentation
35 | docs/_build/
36 |
37 | # VSCode
38 | .vscode/
39 |
40 | # Data directory contents
41 | data/
42 |
43 | # Nikola site
44 | site/output
45 | site/cache
46 | .doit.*
47 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 |
3 | - repo: https://github.com/kynan/nbstripout
4 | rev: master
5 | hooks:
6 | - id: nbstripout
7 | files: ".ipynb"
8 |
9 | - repo: https://github.com/pre-commit/pre-commit-hooks
10 | rev: v2.3.0
11 | hooks:
12 | - id: trailing-whitespace
13 | - id: end-of-file-fixer
14 | - id: check-docstring-first
15 | - id: check-yaml
16 |
17 | - repo: https://github.com/ambv/black
18 | rev: 19.3b0
19 | hooks:
20 | - id: black
21 | args: ["--line-length", "100"]
22 |
23 |
24 |
25 | # - repo: https://github.com/pre-commit/pre-commit-hooks
26 | # rev: v2.3.0
27 | # hooks:
28 | # - id: flake8
29 | # args: ["--max-line-length", "100"]
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://circleci.com/gh/NCAR/ncar-python-tutorial/tree/master)
2 |
3 | # NCAR Python Tutorial
4 |
5 | - [NCAR Python Tutorial](#ncar-python-tutorial)
6 | - [Setup](#setup)
7 | - [Step 1: Clone NCAR Python Tutorial Repository](#step-1-clone-ncar-python-tutorial-repository)
8 | - [Step 2: Install Miniconda and Create Environments](#step-2-install-miniconda-and-create-environments)
9 | - [Step 3: Close and re-open your current shell](#step-3-close-and-re-open-your-current-shell)
10 | - [Step 4: Run the Setup Verification Script](#step-4-run-the-setup-verification-script)
11 | - [Launch Jupyter Lab](#launch-jupyter-lab)
12 | - [1. Cheyenne or DAV via JupyterHub (Recommended)](#1-cheyenne-or-dav-via-jupyterhub-recommended)
13 | - [2. Cheyenne or DAV via SSH Tunneling](#2-cheyenne-or-dav-via-ssh-tunneling)
14 | - [3. Hobart via SSH Tunneling](#3-hobart-via-ssh-tunneling)
15 | - [4. Personal Laptop](#4-personal-laptop)
16 |
17 | ----
18 |
19 | ## Setup
20 |
21 | This tutorial covers the installation and setup of a Python environment on:
22 |
23 | - Cheyenne
24 | - Casper
25 | - CGD's Hobart
26 | - Personal laptop/desktop with a UNIX-variant Operating System
27 |
28 | **NOTE:** For windows users, setup scripts provided in this repository don't work on Windows machines for the time being.
29 |
30 | ### Step 1: Clone NCAR Python Tutorial Repository
31 |
32 | Run the following commmand to clone this repo to your system(e.g. cheyenne, casper, your laptop, etc...):
33 |
34 | ```bash
35 | git clone https://github.com/NCAR/ncar-python-tutorial.git
36 | ```
37 |
38 | ### Step 2: Install Miniconda and Create Environments
39 |
40 | - Change directory to the cloned repository
41 |
42 | ```bash
43 | cd ncar-python-tutorial
44 | ```
45 |
46 | - Run the [`configure`](./setup/configure) script:
47 |
48 | **NOTE**: Be prepared for the script to take up to 15 minutes to complete.
49 |
50 | ```bash
51 | ./setup/configure
52 | ```
53 |
54 | ```bash
55 | $ ./setup/configure --help
56 | usage: configure [-h] [--clobber] [--download] [--prefix PREFIX]
57 |
58 | Set up tutorial environment.
59 |
60 | optional arguments:
61 | -h, --help show this help message and exit
62 | --clobber, -c Whether to clobber existing environment (default:
63 | False)
64 | --download, -d Download tutorial data without setting environment up
65 | (default: False)
66 | --prefix PREFIX, -p PREFIX
67 | Miniconda3 install location)
68 | ```
69 |
70 | Default values for ``--prefix`` argument are:
71 |
72 | - Personal laptop / Hobart: `$HOME/miniconda3`
73 | - Cheyenne or Casper: `/glade/work/$USER/miniconda3`
74 |
75 | **NOTE**:
76 | In case the default `prefix` is not appropriate for you (due to limited storage), feel free to specify a different miniconda install location. For instance, this install location may be a `project` workspace on a shared filesystem like GLADE or Hobart's filesystem.
77 |
78 | The `configure` script does the following:
79 |
80 | - Install `conda` package manager if it is unable to find an existing installation. Otherwise, it will update the `base` environment
81 | - Create or Update `python-tutorial` conda environment.
82 | - Download data if not on Cheyenne or Casper or Hobart. If on Cheyenne or Casper or Hobart, create soft-links to an existing/local data repository.
83 |
84 | ### Step 3: Close and re-open your current shell
85 |
86 | For changes to take effect, close and re-open your current shell.
87 |
88 | ### Step 4: Run the Setup Verification Script
89 |
90 | - Check that *conda info* runs successfully:
91 |
92 | ```bash
93 | conda info
94 | ```
95 |
96 | - From the `ncar-python-tutorial` directory, activate `python-tutorial` conda environment:
97 |
98 | ```bash
99 | conda activate python-tutorial
100 | ```
101 |
102 | - Run the setup verification script to confirm that everything is working as expected:
103 |
104 | ```bash
105 | cd ncar-python-tutorial
106 | ./setup/check_setup
107 | ```
108 |
109 | This step should print **"Everything looks good!"**.
110 |
111 | ----
112 |
113 | ## Launch Jupyter Lab
114 |
115 | ### 1. Cheyenne or DAV via JupyterHub (Recommended)
116 |
117 | - JupyterHub link: https://jupyterhub.ucar.edu/
118 |
119 | To use the Cheyenne or DAV compute nodes,we recommend using JupyterLab via NCAR's JupyterHub deployment.
120 |
121 | Open your preferred browser (Chrome, Firefox, Safari, etc...) on your ``local machine``, and head over to https://jupyterhub.ucar.edu/.
122 |
123 | **You will need to authenticate with either your _yubikey_ or your _DUO_ mobile app**
124 |
125 | ### 2. Cheyenne or DAV via SSH Tunneling
126 |
127 | In case you are having issues with jupyterhub.ucar.edu, we've provided utility scripts for launching JupyterLab on both Cheyenne and Casper via SSH Tunneling:
128 |
129 | ```bash
130 | conda activate base
131 | ./setup/jlab/jlab-ch # on Cheyenne
132 | ./setup/jlab/jlab-dav # on Casper
133 | ```
134 |
135 | ### 3. Hobart via SSH Tunneling
136 |
137 | For those interested in running JupyterLab on CGD's Hobart, you will need to use SSH tunneling script provided in [``setup/jlab/jlab-hobart``](./setup/jlab/jlab-hobart)
138 |
139 | ```bash
140 | conda activate base
141 | ./setup/jlab/jlab-hobart
142 | ```
143 |
144 | ```bash
145 | $ ./setup/jlab/jlab-hobart --help
146 | Usage: launch dask
147 | Possible options are:
148 | -w,--walltime: walltime [default: 08:00:00]
149 | -q,--queue: queue [default: medium]
150 | -d,--directory: notebook directory
151 | -p,--port: [default: 8888]
152 | ```
153 |
154 | ### 4. Personal Laptop
155 |
156 | For those interested in running JupyterLab on their local machine, you can simply run the following command, and follow the printed instructions on the console:
157 |
158 | ```bash
159 | conda activate base
160 | jupyter lab
161 | ```
162 |
--------------------------------------------------------------------------------
/archive/assets/images/agg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/agg.jpg
--------------------------------------------------------------------------------
/archive/assets/images/broadcasting.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/broadcasting.jpg
--------------------------------------------------------------------------------
/archive/assets/images/column-major.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/column-major.jpg
--------------------------------------------------------------------------------
/archive/assets/images/row-major.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/row-major.jpg
--------------------------------------------------------------------------------
/archive/assets/images/row-major.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/row-major.png
--------------------------------------------------------------------------------
/archive/assets/images/xarray-data-structures.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/assets/images/xarray-data-structures.png
--------------------------------------------------------------------------------
/archive/experimental/.intake_esm/config.yaml:
--------------------------------------------------------------------------------
1 | collections:
2 | cesm:
3 | collection_columns:
4 | - resource
5 | - resource_type
6 | - direct_access
7 | - experiment
8 | - case
9 | - component
10 | - stream
11 | - variable
12 | - date_range
13 | - ensemble
14 | - file_fullpath
15 | - file_basename
16 | - file_dirname
17 | - ctrl_branch_year
18 | - year_offset
19 | - sequence_order
20 | - has_ocean_bgc
21 | - grid
22 | order_by_columns:
23 | - sequence_order
24 | - file_fullpath
25 | required_columns:
26 | - sequence_order
27 | - file_fullpath
28 | component_streams:
29 | atm:
30 | - cam.h0
31 | - cam.h1
32 | - cam.h2
33 | - cam.h3
34 | - cam.h4
35 | - cam.h5
36 | - cam.h6
37 | - cam.h7
38 | - cam.h8
39 | glc:
40 | - cism.h
41 | - cism.h0
42 | - cism.h1
43 | - cism.h2
44 | - cism.h3
45 | - cism.h4
46 | - cism.h5
47 | - cism.h6
48 | - cism.h7
49 | - cism.h8
50 | ice:
51 | - cice.h2_06h
52 | - cice.h1
53 | - cice.h
54 | lnd:
55 | - clm2.h0
56 | - clm2.h1
57 | - clm2.h2
58 | - clm2.h3
59 | - clm2.h4
60 | - clm2.h5
61 | - clm2.h6
62 | - clm2.h7
63 | - clm2.h8
64 | ocn:
65 | - pop.h.nday1
66 | - pop.h.nyear1
67 | - pop.h.ecosys.nday1
68 | - pop.h.ecosys.nyear1
69 | - pop.h
70 | - pop.h.sigma
71 | rof:
72 | - rtm.h0
73 | - rtm.h1
74 | - rtm.h2
75 | - rtm.h3
76 | - rtm.h4
77 | - rtm.h5
78 | - rtm.h6
79 | - rtm.h7
80 | - rtm.h8
81 | - mosart.h0
82 | - mosart.h1
83 | - mosart.h2
84 | - mosart.h3
85 | - mosart.h4
86 | - mosart.h5
87 | - mosart.h6
88 | - mosart.h7
89 | - mosart.h8
90 | replacements:
91 | freq:
92 | daily: day_1
93 | monthly: month_1
94 | yearly: year_1
95 | cmip:
96 | collection_columns:
97 | - ensemble
98 | - experiment
99 | - file_basename
100 | - file_fullpath
101 | - frequency
102 | - institution
103 | - model
104 | - realm
105 | - files_dirname
106 | - variable
107 | - version
108 | required_columns:
109 | - realm
110 | - frequency
111 | - ensemble
112 | - experiment
113 | - file_fullpath
114 | frequencies:
115 | - 3hr
116 | - 6hr
117 | - day
118 | - fx
119 | - mon
120 | - monclim
121 | - subhr
122 | - yr
123 | realms:
124 | - aerosol
125 | - atmos
126 | - atmoschem
127 | - land
128 | - landice
129 | - ocean
130 | - ocnbgchem
131 | - seaIce
132 | sources:
133 | cesm: intake_esm.cesm.CESMSource
134 | cmip: intake_esm.cmip.CMIPSource
135 | default_chunk_size: 128MiB
136 | data_cache_directory: /glade/scratch/${USER}/intake-esm-data
137 | database_directory: /glade/work/${USER}/intake-esm-collections
138 |
--------------------------------------------------------------------------------
/archive/xarray/data/air_temperature.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/xarray/data/air_temperature.nc
--------------------------------------------------------------------------------
/archive/xarray/data/rasm.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/xarray/data/rasm.nc
--------------------------------------------------------------------------------
/archive/xarray/data/rasm_test.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/archive/xarray/data/rasm_test.nc
--------------------------------------------------------------------------------
/assets/dask-components.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/assets/dask-components.jpeg
--------------------------------------------------------------------------------
/assets/dask-dag.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/assets/dask-dag.gif
--------------------------------------------------------------------------------
/assets/dask_horizontal.svg:
--------------------------------------------------------------------------------
1 | dask
2 |
--------------------------------------------------------------------------------
/assets/noaa-nino-sst.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/assets/noaa-nino-sst.gif
--------------------------------------------------------------------------------
/assets/notebook-user-interface.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/assets/notebook-user-interface.png
--------------------------------------------------------------------------------
/curricula/beginner.md:
--------------------------------------------------------------------------------
1 | Curriculum
2 | ==========
3 |
4 | Beginner
5 | --------
6 |
7 | From the registration form:
8 |
9 | > The Beginner Track tutorial is intended to introduce participants to the basics,
10 | > such as Git, GitHub, Jupyter Notebooks & Lab, the Python Language, itself, as well
11 | > as common Python packages such as Numpy, Pandas, Matplotlib and Cartopy.
12 |
13 | **Curriculum:**
14 |
15 | Beginner tutorial will be done entirely on users' laptops.
16 |
17 | 1. Setup [2 hours]
18 | 1. Instructions for installing software on their laptop
19 | - Conda basics
20 | - ???
21 | 2. Need to write down instructions for reference later
22 | 2. Python, Git & GitHubv [?? hours] - DAY TUTORIAL
23 | 1. (See https://docs.python.org/3/tutorial/)
24 | 2. Teach Python, Git & GitHub by having participants build a package
25 | 3. ESMLab-like utility to do climatologies, etc.
26 | 4. Demonstrates Python modules, packages, VCS, etc.
27 | 3. Introduction to Jupyter Lab [30 minutes]
28 | 1. Launch Jupyter Lab from terminal
29 | 2. Walk people through the UI
30 | - Create a Text File
31 | - Launch a Terminal
32 | - Conda activate
33 | 4. Numpy [2 hours]
34 | 5. Pandas [2 hours]
35 | 6. Matplotlib & Cartopy [3 hours]
36 |
--------------------------------------------------------------------------------
/curricula/self-guided.txt:
--------------------------------------------------------------------------------
1 | Self-guided NCAR Python Tutorial
2 |
3 |
4 | This document is intended to serve as a guide for those who want to go through the information in this repo outside of a scheduled NCAR tutorial.
5 |
6 |
7 | First, follow the set-up instructions. Found in the Read-Me [here](https://github.com/NCAR/ncar-python-tutorial)
8 |
9 |
10 | -If set up on Cheyenne/Casper navigate to https://jupyterhub.ucar.edu/, enter your account key, and voila you can use Jupyter notebooks on Cheyenne/Casper.
11 |
12 |
13 | -If set up on your personal computer, either launch Jupyter from the terminal or from your Anaconda Navigator.
14 |
15 |
16 | Open `notebooks/welcome.ipynb` to begin.
17 |
18 |
19 | Then go through the Curriculum:
20 |
21 |
22 | Getting started
23 | 1. Intro to Jupyter Lab --- `notebooks/welcome.ipynb`
24 | 2. Python basics lectures --- `notebooks/bytopic/python-basics/`
25 | 3. Numpy --- `notebooks/bytopic/numpy/`
26 | 4. Data Visualization with Matplotlib and Cartopy --- `notebooks/bytopic/data-visualization/`
27 |
28 |
29 | Introduction to Xarray
30 | 1. Xarray: Python’s Approach to Multi-Dimensional Datasets --- `notebooks/bytopic/xarray/`
31 | 2. Motivating Example Workflow -- Calculating Ocean Heat Content --- `notebooks/workflows/ocean_heat_content/`
32 |
33 |
34 | Introduction to Dask
35 | 1. Dask: Automatic Parallel Processing in Python --- `notebooks/bytopic/dask/`
36 |
37 |
38 | More Python language
39 | 1. Object Oriented Programming --- `notebooks/bytopic/ObjectOrientedProgramming/`
40 | 2. Jupyter Again --- awaiting material
41 |
42 |
43 | Package Management and Environments
44 | 1. Conda --- `notebooks/bytopic/conda/`
45 |
46 |
47 | Effective collaborative development
48 | 1. Github and Git Workflows --- `notebooks/bytopic/git-workflows/`
49 | 2. Testing and Unit Testing Frameworks --- `notebooks/bytopic/TestingInPython`
50 | 3. Test Driven Development --- `notebooks/bytopic/test-driven-development`
51 |
52 |
53 | Example workflows
54 | 1. CESM trends --- `notebooks/workflows/cesm/oxygen-trend-computation`
55 | 2. NOAA ERSST Variability --- `notebooks/workflows/noaa-ersst-variability`
56 | 3. Analysis of Gridden Ensemble Precip and Temp Estimate --- `notebooks/workflows/gmet`
--------------------------------------------------------------------------------
/curricula/self_paced/outline.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# NCAR Python Virtual Tutorial\n",
8 | "------------------------"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "## Getting Started\n",
16 | "1. [Requirements and Installation](requirements.ipynb)"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "## Zero to Thirty Beginner \n",
24 | "\n",
25 | "### 1. First Python Script -- part_1\n",
26 | "\n",
27 | "#### 1a. [Reading in a .txt File](part_1/z230_p1a.ipynb)\n",
28 | "- Workspace Setup:\n",
29 | " - Creating a directory\n",
30 | " - Creating a conda environment\n",
31 | " - Creating a git repository\n",
32 | " - Downloading a file\n",
33 | "\n",
34 | "- Python Concepts:\n",
35 | " - the `str` datatype\n",
36 | " - `print()`\n",
37 | " - `open()`\n",
38 | " - `readline()`\n",
39 | " - `read()`\n",
40 | " - `close()`\n",
41 | " - the `with` context manager\n",
42 | "\n",
43 | "- Git Fundamentals\n",
44 | " - `git status`\n",
45 | " - `git add`\n",
46 | " - `git commit`\n",
47 | " - `git logs`\n",
48 | "\n",
49 | "#### 1b. [Creating a Data Dictionary](part_1/z230_p1b.ipynb)\n",
50 | "- Python Data Structures\n",
51 | " - `list` \n",
52 | " - `list.append()`\n",
53 | " - `list` indexing\n",
54 | " - nested `list`s\n",
55 | " - `range`\n",
56 | " - `dict` \n",
57 | " - key/value pairs\n",
58 | " - `dict.get()`\n",
59 | "- Other Python Concepts\n",
60 | " - initializing data variables\n",
61 | " - `str.split()`\n",
62 | " - the `float` datatype.\n",
63 | " \n",
64 | "#### 1c. [Writing a Function to Compute Wind Chill Index](part_1/z230_p1c.ipynb)\n",
65 | "- Python Concepts:\n",
66 | " - functions\n",
67 | " - math operators\n",
68 | " - `zip()`\n",
69 | " - the `tuple` data structure\n",
70 | " - f-string formatting\n",
71 | "- Git:\n",
72 | " - pushing a local repository to GitHub"
73 | ]
74 | }
75 | ],
76 | "metadata": {
77 | "kernelspec": {
78 | "display_name": "Python 3",
79 | "language": "python",
80 | "name": "python3"
81 | },
82 | "language_info": {
83 | "codemirror_mode": {
84 | "name": "ipython",
85 | "version": 3
86 | },
87 | "file_extension": ".py",
88 | "mimetype": "text/x-python",
89 | "name": "python",
90 | "nbconvert_exporter": "python",
91 | "pygments_lexer": "ipython3",
92 | "version": "3.7.1"
93 | }
94 | },
95 | "nbformat": 4,
96 | "nbformat_minor": 2
97 | }
98 |
--------------------------------------------------------------------------------
/curricula/self_paced/requirements.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Requirements & Installation\n",
8 | "--------------------------"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "If you don't have conda installed at all, please install it. \n",
16 | "\n",
17 | "Follow [these instructions](https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html)."
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "1. [bash] First, check that you have conda or miniconda installed on your OS. \n",
25 | "\n",
26 | " Check your conda version:\n",
27 | "\n",
28 | " ```bash\n",
29 | " $ conda --version\n",
30 | " ```\n",
31 | " At the time of writing this, the latest version of conda is 4.8. If you have an old version of conda installed, update it."
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "2. [bash] If necessary, update:\n",
39 | "\n",
40 | " ```bash\n",
41 | " $ conda update\n",
42 | " ```\n",
43 | "\n",
44 | " ***NOTE** If you have a REALLY old version of conda it might be easier to delete it and then reinstall it. But before doing this you have to check your env-list to see if there are any environments you created and want to save."
45 | ]
46 | },
47 | {
48 | "cell_type": "markdown",
49 | "metadata": {},
50 | "source": [
51 | "3. [bash] Check your conda version again.\n",
52 | "\n",
53 | " ```bash\n",
54 | " $ conda --version\n",
55 | " ```"
56 | ]
57 | },
58 | {
59 | "cell_type": "markdown",
60 | "metadata": {},
61 | "source": [
62 | "4. Install git\n",
63 | "\n",
64 | " Git is a program that tracks changes made to files. This makes it easy to maintain access to multiple versions of your code as you improve it, and revert your code back to a previous version if you've made any mistakes.\n",
65 | "\n",
66 | " Follow instructions [here](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)."
67 | ]
68 | }
69 | ],
70 | "metadata": {
71 | "kernelspec": {
72 | "display_name": "Python 3",
73 | "language": "python",
74 | "name": "python3"
75 | },
76 | "language_info": {
77 | "codemirror_mode": {
78 | "name": "ipython",
79 | "version": 3
80 | },
81 | "file_extension": ".py",
82 | "mimetype": "text/x-python",
83 | "name": "python",
84 | "nbconvert_exporter": "python",
85 | "pygments_lexer": "ipython3",
86 | "version": "3.7.1"
87 | }
88 | },
89 | "nbformat": 4,
90 | "nbformat_minor": 2
91 | }
92 |
--------------------------------------------------------------------------------
/data/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/data/README.md
--------------------------------------------------------------------------------
/notebooks/bytopic/TestingInPython/CodeCoverage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/TestingInPython/CodeCoverage.png
--------------------------------------------------------------------------------
/notebooks/bytopic/TestingInPython/CodeCoverage.xml:
--------------------------------------------------------------------------------
1 | 7Ztbc6IwFMc/jY/tEMLN11r3MrM725nOTruPEY5CF4mTxls//QYJAg3tUKqmdqMPkn8SDOfkR04OOsCj+eYrI4v4J40gHdhWtBng64FtIxR44iNXtoXiu6gQZiyJZKNKuE2eQIqWVJdJBI+NhpzSlCeLphjSLIOQNzTCGF03m01p2vzWBZmBItyGJFXVuyTicaEGrlXp3yCZxeU3I0vWzEnZWAqPMYnouibh8QCPGKW8OJpvRpDmxivtUvT78kLtfmAMMt6lw92vaMxWT+HDRRiQCca/+fenCyzdsyLpUl6xHC3fliZgdJlFkJ8FDfDVOk443C5ImNeuhdOFFvN5KqunSZqOaErZri+euvlb6I+c0b9Qq/F2r7wHzXhNL15ClwMDxmHz4iWjvSHFDAQ6B862oonsUJpezj1HFteVI31PanHNiXbpQyInz2x/5sq+4kCa+A3mRoFiXYjEfJNFynhMZzQj6bhSr8IlW+3NXznDEqWqww9KF7LJA3C+lSSRJadNB8Em4fe14z/5qS5dWbreyDPvCtuykImLv68Xar3yYtVtVyr7FRebX+HrDhQGoUsWwmuWk+wTNgP+SjunfUIwSAlPVs1xHN69Ckw3SHH4e3CKCATTsBWnMIDJ9DDY4CY2yFa5acPGORo1Q0NNP2qcjtQMdVKzXyCNe4/kXvQC7yfyr3oPNP49rH8tnf51lFVvFEP4V0gjmkUJT2imTAARDC/ywxg2RLhSmG0BLBHjAVapN6Vkvz3unIIXti6UkT+cWJWf3rVQIufZSolcNcJsWSndY62U2MSXPVEbdkQN+zpRG6oBpn1JDhtiuhBEThs5gT3Bux3bAcjZB5BlWkBziIlNiNl3Y2adBTnlMJvoTD4BOtq3Z46J3/uy0zWroZmdlryGfRl+BnZ0rzvIVk3rHtawJ0kZ+eiD3ZRs39yUet6UcMebUrHZ0ZZVMFudoztY66pjm5D86A72dDoYm7jx6A7W+zgMK8HN/5IZdIKPlhk0Sfi+sHXOwttaYVPT8DfOWeYGPavJjqOic9rcoLohM+h0Q8c9D3TcNnTOMTf4HJ2hbnSwQacnOt55oKP+flCgE30CdJB2dhzDTk92/PNgx29j5xzT6go7ukM2x+x2erKDO7ODdbKDW9hRY42P/9wE+yd7biKK1Z8NdnW1v2zg8T8=
--------------------------------------------------------------------------------
/notebooks/bytopic/TestingInPython/PyCharmIDECode.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/TestingInPython/PyCharmIDECode.png
--------------------------------------------------------------------------------
/notebooks/bytopic/TestingInPython/PyCharmTestResults.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/TestingInPython/PyCharmTestResults.png
--------------------------------------------------------------------------------
/notebooks/bytopic/dask/03_distributed.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Distributed \n"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "toc": true
14 | },
15 | "source": [
16 | "
Table of Contents \n",
17 | ""
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "## Distributed Cluster\n",
25 | "\n",
26 | "As we have seen so far, Dask allows you to simply construct graphs of tasks with dependencies, as well as have graphs created automatically for you using functional, Numpy syntax on data collections. None of this would be very useful, if there weren't also a way to execute these graphs, in a parallel and memory-aware way. So far we have been calling `thing.compute()` or `dask.compute(thing)` without worrying what this entails. Now we will discuss the options available for that execution, and in particular, the distributed scheduler, which comes with additional functionality."
27 | ]
28 | },
29 | {
30 | "cell_type": "markdown",
31 | "metadata": {},
32 | "source": [
33 | "## Create and Connect to Dask Distributed Cluster"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {},
39 | "source": [
40 | "Let's begin by importing `Client` and `LocalCluster` objects/classes"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": null,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "from dask.distributed import Client, LocalCluster"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": null,
55 | "metadata": {},
56 | "outputs": [],
57 | "source": [
58 | "# Setup a local cluster.\n",
59 | "# By default this sets up 1 worker per core\n",
60 | "cluster = LocalCluster() \n",
61 | "cluster"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "☝️ Don't forget to click the link above to view the scheduler dashboard! (you may wish to have both the notebook and dashboard side-by-side)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "client = Client(cluster) # Connect to a Dask cluster in order to submit computation\n",
78 | "client"
79 | ]
80 | },
81 | {
82 | "cell_type": "markdown",
83 | "metadata": {},
84 | "source": [
85 | "## Perfom computation on a dask array"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "import dask.array as da\n",
95 | "import numpy as np\n",
96 | "from matplotlib import pyplot as plt\n",
97 | "%matplotlib inline"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": null,
103 | "metadata": {},
104 | "outputs": [],
105 | "source": [
106 | "bigshape = (500, 2400, 3600)\n",
107 | "chunk_shape = (10, 1200, 1800)\n",
108 | "big_ones = da.ones(bigshape, chunks=chunk_shape)\n",
109 | "big_ones"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "big_calc = (big_ones * big_ones[::-1, ::-1]).mean()\n",
119 | "big_calc"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": null,
125 | "metadata": {},
126 | "outputs": [],
127 | "source": [
128 | "%time big_calc.compute()"
129 | ]
130 | },
131 | {
132 | "cell_type": "markdown",
133 | "metadata": {},
134 | "source": [
135 | "**Create a histogram**"
136 | ]
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": null,
141 | "metadata": {},
142 | "outputs": [],
143 | "source": [
144 | "random_values = da.random.normal(size=(1e8,), chunks=(20e6,))\n",
145 | "hist, bins = da.histogram(random_values, bins=10, range=[-5, 5]) \n",
146 | "random_values"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "metadata": {},
153 | "outputs": [],
154 | "source": [
155 | "hist"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": [
164 | "hist.visualize()"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": null,
170 | "metadata": {},
171 | "outputs": [],
172 | "source": [
173 | "%%time \n",
174 | "x = 0.5 * (bins[1:] + bins[:-1])\n",
175 | "width = np.diff(bins)\n",
176 | "plt.bar(x, hist, width);"
177 | ]
178 | },
179 | {
180 | "cell_type": "markdown",
181 | "metadata": {},
182 | "source": [
183 | "## Going Further"
184 | ]
185 | },
186 | {
187 | "cell_type": "markdown",
188 | "metadata": {},
189 | "source": [
190 | "- [Dask Tutorial on Distributed](https://github.com/dask/dask-tutorial/blob/master/05_distributed.ipynb)\n",
191 | "- [Dask Tutorial on Advanced Distributed](https://github.com/dask/dask-tutorial/blob/master/06_distributed_advanced.ipynb)"
192 | ]
193 | },
194 | {
195 | "cell_type": "markdown",
196 | "metadata": {},
197 | "source": [
198 | ""
202 | ]
203 | }
204 | ],
205 | "metadata": {
206 | "kernelspec": {
207 | "display_name": "Python [conda env:python-tutorial]",
208 | "language": "python",
209 | "name": "conda-env-python-tutorial-py"
210 | },
211 | "language_info": {
212 | "codemirror_mode": {
213 | "name": "ipython",
214 | "version": 3
215 | },
216 | "file_extension": ".py",
217 | "mimetype": "text/x-python",
218 | "name": "python",
219 | "nbconvert_exporter": "python",
220 | "pygments_lexer": "ipython3",
221 | "version": "3.7.3"
222 | },
223 | "toc": {
224 | "base_numbering": 1,
225 | "nav_menu": {},
226 | "number_sections": true,
227 | "sideBar": true,
228 | "skip_h1_title": false,
229 | "title_cell": "Table of Contents",
230 | "title_sidebar": "Contents",
231 | "toc_cell": true,
232 | "toc_position": {},
233 | "toc_section_display": true,
234 | "toc_window_display": true
235 | }
236 | },
237 | "nbformat": 4,
238 | "nbformat_minor": 2
239 | }
240 |
--------------------------------------------------------------------------------
/notebooks/bytopic/dask/solutions/02_dask_arrays_mean.py:
--------------------------------------------------------------------------------
1 | ones.mean(axis=[1, 2]).compute()
2 |
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/00_intro.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Welcome to the Visualization Lecture\n",
8 | "\n",
9 | "Learnign Objectives:\n",
10 | "\n",
11 | " - Customizing plots with matploblib\n",
12 | " - Plotting on maps with cartopy"
13 | ]
14 | },
15 | {
16 | "cell_type": "markdown",
17 | "metadata": {},
18 | "source": [
19 | ""
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": null,
27 | "metadata": {},
28 | "outputs": [],
29 | "source": []
30 | }
31 | ],
32 | "metadata": {
33 | "kernelspec": {
34 | "display_name": "Python 3",
35 | "language": "python",
36 | "name": "python3"
37 | },
38 | "language_info": {
39 | "codemirror_mode": {
40 | "name": "ipython",
41 | "version": 3
42 | },
43 | "file_extension": ".py",
44 | "mimetype": "text/x-python",
45 | "name": "python",
46 | "nbconvert_exporter": "python",
47 | "pygments_lexer": "ipython3",
48 | "version": "3.7.1"
49 | }
50 | },
51 | "nbformat": 4,
52 | "nbformat_minor": 2
53 | }
54 |
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/img/anatomy-of-a-figure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/data-visualization/img/anatomy-of-a-figure.png
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/img/hv-gv-bk-hvplot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/data-visualization/img/hv-gv-bk-hvplot.png
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/img/landscape_hv_nx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/data-visualization/img/landscape_hv_nx.png
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/img/landscape_hv_nx_pyviz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/data-visualization/img/landscape_hv_nx_pyviz.png
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/img/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/data-visualization/img/workflow.png
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/solutions/color_scatter.py:
--------------------------------------------------------------------------------
1 | fig = plt.figure(figsize=(10, 6))
2 | ax = fig.add_subplot(1, 1, 1)
3 |
4 | ax.plot([285, 320], [285, 320], color='black', linestyle='--')
5 | s = ax.scatter(temps, temps_1000, c= temps - temps_1000, cmap='bwr', vmin=-5, vmax=5)
6 | fig.colorbar(s)
7 |
8 | ax.set_xlabel('Temperature (surface)')
9 | ax.set_ylabel('Temperature (1000 hPa)')
10 | ax.set_title('Temperature Cross Plot')
11 | ax.grid(True)
12 |
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/solutions/contourf_contour.py:
--------------------------------------------------------------------------------
1 | fig, ax = plt.subplots()
2 | cf = ax.contourf(X, Y, Z, levels=np.arange(-2, 2, 0.5), cmap='PiYG', extend='both')
3 | cl = ax.contour(X, Y, Z, levels=np.arange(-2, 2, 0.5), colors='black')
4 | ax.clabel(cl)
5 | plt.colorbar(cf)
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/solutions/map.py:
--------------------------------------------------------------------------------
1 | field, lon = add_cyclic_point(ds.sst[0, :, :], coord=ds.lon)
2 | lat = ds.lat
3 |
4 | # kludge for cyclic issue
5 | lon[-1] = lon[-1] + 1e-4
6 |
7 | levels = np.arange(-2, 31., 1)
8 |
9 | fig = plt.figure(figsize=(12, 8))
10 | ax = fig.add_subplot(1, 1, 1, projection=ccrs.Robinson(central_longitude=305.0))
11 |
12 | # filled contours
13 | cf = ax.contourf(lon, lat, field, levels=levels,
14 | cmap='plasma', transform=ccrs.PlateCarree());
15 |
16 | # contour lines
17 | cs = ax.contour(lon, lat, field, colors='k', levels=levels, linewidths=0.5,
18 | transform=ccrs.PlateCarree())
19 |
20 | # add contour labels
21 | lb = plt.clabel(cs, fontsize=6, inline=True, fmt='%r');
22 |
23 | # land
24 | land = ax.add_feature(
25 | cartopy.feature.NaturalEarthFeature('physical','land','110m', facecolor='black'))
26 |
27 | # colorbar and labels
28 | cb = plt.colorbar(cf, shrink=0.5)
29 | cb.ax.set_title('°C')
30 | ax.set_title('SST');
--------------------------------------------------------------------------------
/notebooks/bytopic/data-visualization/solutions/subplots.py:
--------------------------------------------------------------------------------
1 | fig = plt.figure(figsize=(10, 6))
2 | ax = fig.add_subplot(1, 2, 1)
3 |
4 | # Specify how our lines should look
5 | ax.plot(times, temps, color='tab:red', label='Temperature (surface)')
6 | ax.plot(times, temps_1000, color='tab:red', linestyle=':',
7 | label='Temperature (isobaric level)')
8 |
9 | # Same as above
10 | ax.set_xlabel('Time')
11 | ax.set_ylabel('Temperature')
12 | ax.set_title('Temperature Forecast')
13 | ax.grid(True)
14 | ax.legend(loc='upper left')
15 |
16 | ax2 = fig.add_subplot(1, 2, 2, sharex=ax, sharey=ax)
17 | ax2.plot(times, dewpoint, color='tab:green', label='Dewpoint (surface)')
18 | ax2.plot(times, dewpoint_1000, color='tab:green', linestyle=':', marker='o',
19 | label='Dewpoint (isobaric level)')
20 |
21 | ax2.set_xlabel('Time')
22 | ax2.set_ylabel('Dewpoint')
23 | ax2.set_title('Dewpoint Forecast')
24 | ax2.grid(True)
25 | ax2.legend(loc='upper left')
26 | ax2.set_ylim(257, 312)
27 | ax2.set_xlim(95, 162)
28 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/git/01_what_is_git.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# What is Git?"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {
14 | "toc": true
15 | },
16 | "source": [
17 | "Table of Contents \n",
18 | ""
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "First and foremost, Git is a Version Control System (VCS). That is, it is something that\n",
26 | "tracks _changes_ in files (or directory structures) incrementally. The files (and directories)\n",
27 | "that get tracked are contained in a _repository_, and each change that you _commit_ to the \n",
28 | "repository gets saved, allowing you to go back to a previous state of the files in your\n",
29 | "repository. This is the basic definition of a VCS."
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "Git, specifically, is a _distributed_ VCS, as opposed to a _centralized_ VCS. What the\n",
37 | "difference? A centralized VCS is typified by a _single_, _central_ _server_ that hosts your\n",
38 | "repository. With a centralized VCS, changes made by one person on their personal computer\n",
39 | "(for example) must be _pushed_ to the central server in order to be saved in the repository.\n",
40 | "Hence, there is always one, and only one, _true_ repository, which is sitting on the server."
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "metadata": {},
46 | "source": [
47 | "Distributed VCSes, on the other hand, don't have a central server. That means, as in the\n",
48 | "case of Git, that changes made by one person on their personal computer get saved to _their\n",
49 | "own copy_ of the \"true\" repository. ...But then which repository is the \"true\" repository?\n",
50 | "If there are multiple copies of the same repository floating around the interwebs, which one\n",
51 | "is the one that can be _trusted_? Well... They _all_ can be trusted, because each copy\n",
52 | "of the repository is a self-consistent, stand-alone repository in its own right. And you can\n",
53 | "always bring multiple copies of the same repository together by _merging_ them."
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "We'll talk a little about what is Git and how to use it in the following examples. I encourage\n",
61 | "you to walk through these steps on your own."
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "**Note:** If you want to do the following on your laptop, you may want to check to see if you have Git installed on your laptop. From the command line, run:\n",
69 | "\n",
70 | "```bash\n",
71 | "git --version\n",
72 | "```\n",
73 | "\n",
74 | "\n",
75 | "In case you get `command not found` error, you probably need to install Git. You can do that [here](https://git-scm.com/downloads).\n",
76 | "\n",
77 | "\n",
78 | "If you want to do the following on Cheyenne, you will need to load the `git` module:\n",
79 | "\n",
80 | "```bash\n",
81 | "module load git\n",
82 | "```"
83 | ]
84 | },
85 | {
86 | "cell_type": "markdown",
87 | "metadata": {},
88 | "source": [
89 | ""
92 | ]
93 | }
94 | ],
95 | "metadata": {
96 | "kernelspec": {
97 | "display_name": "Bash",
98 | "language": "bash",
99 | "name": "bash"
100 | },
101 | "language_info": {
102 | "codemirror_mode": "shell",
103 | "file_extension": ".sh",
104 | "mimetype": "text/x-sh",
105 | "name": "bash"
106 | },
107 | "toc": {
108 | "base_numbering": 1,
109 | "nav_menu": {},
110 | "number_sections": true,
111 | "sideBar": true,
112 | "skip_h1_title": false,
113 | "title_cell": "Table of Contents",
114 | "title_sidebar": "Contents",
115 | "toc_cell": true,
116 | "toc_position": {},
117 | "toc_section_display": true,
118 | "toc_window_display": false
119 | }
120 | },
121 | "nbformat": 4,
122 | "nbformat_minor": 2
123 | }
124 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/github/01_what_is_github.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# What is GitHub?"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {
14 | "toc": true
15 | },
16 | "source": [
17 | "Table of Contents \n",
18 | ""
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "While Git is a _distributed_ Version Control System (VCS), there is nothing to prevent you from having a _central_ server to host your \"blessed\" Git repository. That is essentially what GitHub is: a hosting platform for your Git repositories.\n",
26 | "\n",
27 | "Repositories on GitHub are _bare_ repositories (see last section) where branches and commits can not be checked out, but they can be cloned _ad infinitum_. Also, users with the correct priviledges can _push_ their changes directly to GitHub repositories.\n",
28 | "\n",
29 | "But GitHub is a lot more than that!\n",
30 | "\n",
31 | "GitHub can\n",
32 | "\n",
33 | "- help you track issues,\n",
34 | "- provide a framework for \"Pull Requests\",\n",
35 | "- help you conduct code reviews,\n",
36 | "- help organize your project work,\n",
37 | "- host a website for you (github.io),\n",
38 | "- and much more.\n",
39 | "\n",
40 | "We can't go into all of this, but we'll take a look at the basics to get you started."
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "metadata": {},
46 | "source": [
47 | "You can read up on more of what GitHub can do [here](https://guides.github.com/)."
48 | ]
49 | },
50 | {
51 | "cell_type": "markdown",
52 | "metadata": {},
53 | "source": [
54 | ""
58 | ]
59 | }
60 | ],
61 | "metadata": {
62 | "kernelspec": {
63 | "display_name": "Python 3",
64 | "language": "python",
65 | "name": "python3"
66 | },
67 | "language_info": {
68 | "codemirror_mode": {
69 | "name": "ipython",
70 | "version": 3
71 | },
72 | "file_extension": ".py",
73 | "mimetype": "text/x-python",
74 | "name": "python",
75 | "nbconvert_exporter": "python",
76 | "pygments_lexer": "ipython3",
77 | "version": "3.7.3"
78 | },
79 | "toc": {
80 | "base_numbering": 1,
81 | "nav_menu": {},
82 | "number_sections": true,
83 | "sideBar": true,
84 | "skip_h1_title": false,
85 | "title_cell": "Table of Contents",
86 | "title_sidebar": "Contents",
87 | "toc_cell": true,
88 | "toc_position": {},
89 | "toc_section_display": true,
90 | "toc_window_display": true
91 | }
92 | },
93 | "nbformat": 4,
94 | "nbformat_minor": 2
95 | }
96 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/github/02_getting_started_with_github.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# Getting Started with GitHub"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {
14 | "toc": true
15 | },
16 | "source": [
17 | "Table of Contents \n",
18 | ""
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "The first thing you will need is a GitHub account. If you don't already have one, go to [github.com](https://github.com)."
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "Next, you need to make sure that the email address you have associate with your Git installation (i.e., `git config`) is the same as the email address associated with your GitHub account. _This is how GitHub knows it is you!_"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": null,
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "git config --global --get user.email"
42 | ]
43 | },
44 | {
45 | "cell_type": "markdown",
46 | "metadata": {},
47 | "source": [
48 | "If the previous cell doesn't return an email address that matches the email address you used for your GitHub account, then you should change it with the following line:\n",
49 | "\n",
50 | "```bash\n",
51 | "git config --global user.email you@domain.example.com\n",
52 | "```"
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": null,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": []
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "And that's about all you need to do to get started with GitHub. We'll be going back and forth between the [GitHub website](https://github.com) and wherever you have Git installed for the remainder of this section, so you might want to bring up GitHub in another tab of your browser."
67 | ]
68 | },
69 | {
70 | "cell_type": "markdown",
71 | "metadata": {},
72 | "source": [
73 | "\n",
74 | ""
78 | ]
79 | }
80 | ],
81 | "metadata": {
82 | "kernelspec": {
83 | "display_name": "Bash",
84 | "language": "bash",
85 | "name": "bash"
86 | },
87 | "language_info": {
88 | "codemirror_mode": "shell",
89 | "file_extension": ".sh",
90 | "mimetype": "text/x-sh",
91 | "name": "bash"
92 | },
93 | "toc": {
94 | "base_numbering": 1,
95 | "nav_menu": {},
96 | "number_sections": true,
97 | "sideBar": true,
98 | "skip_h1_title": false,
99 | "title_cell": "Table of Contents",
100 | "title_sidebar": "Contents",
101 | "toc_cell": true,
102 | "toc_position": {},
103 | "toc_section_display": true,
104 | "toc_window_display": true
105 | }
106 | },
107 | "nbformat": 4,
108 | "nbformat_minor": 2
109 | }
110 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/github/04_clones.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "toc": true
7 | },
8 | "source": [
9 | "Table of Contents \n",
10 | ""
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "\n",
18 | "# Clones"
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "## Learning Objectives"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "- Learn how to clone a remote GitHub repository"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "## Cloning a GitHub Repository"
40 | ]
41 | },
42 | {
43 | "cell_type": "markdown",
44 | "metadata": {},
45 | "source": [
46 | "Now, let's go back to our `example` repositories home page.\n",
47 | "\n",
48 | "**Note:** You can also just as easily put into your browser the URL:\n",
49 | "\n",
50 | " https://github.com/[username]/example\n",
51 | "\n",
52 | "Now, let's click on the \"Clone or download\" button and then copy the URL for this repository onto your clipboard.\n",
53 | "\n",
54 | "Next, let's create a clone of this repository where we currently are located (in this notebook)."
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": null,
60 | "metadata": {},
61 | "outputs": [],
62 | "source": [
63 | "cd"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "if [ -d example ]; then\n",
73 | " rm -rf example\n",
74 | "fi"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {},
81 | "outputs": [],
82 | "source": [
83 | "git clone https://github.com/kmpaul/example.git"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": null,
89 | "metadata": {},
90 | "outputs": [],
91 | "source": [
92 | "cd example"
93 | ]
94 | },
95 | {
96 | "cell_type": "markdown",
97 | "metadata": {},
98 | "source": [
99 | "Let's add a new line to our `README.md` file."
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": null,
105 | "metadata": {},
106 | "outputs": [],
107 | "source": [
108 | "echo \"Really, this is just an example.\" >> README.md"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "Now we can commit this change to our clone."
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "metadata": {},
122 | "outputs": [],
123 | "source": [
124 | "git add README.md\n",
125 | "git commit -m \"Emphatically stating that this is just an example.\""
126 | ]
127 | },
128 | {
129 | "cell_type": "markdown",
130 | "metadata": {},
131 | "source": [
132 | "And finally, we can `push` our changes (without any complications!) to GitHub.\n",
133 | "\n",
134 | "**Note:** If this is your first time pushing to GitHub, then it will try to authenticate with your username and password. This doesn't work with Jupyter Notebook, so you may have to do this step in a separate terminal."
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": null,
140 | "metadata": {},
141 | "outputs": [],
142 | "source": [
143 | "git push"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {},
149 | "source": [
150 | "And we're done! If you go to your GitHub repository's home page, you can click the \"commits\" link to see your new commit, and you should see the change added to the `README.md` file."
151 | ]
152 | },
153 | {
154 | "cell_type": "markdown",
155 | "metadata": {},
156 | "source": [
157 | ""
161 | ]
162 | }
163 | ],
164 | "metadata": {
165 | "kernelspec": {
166 | "display_name": "Bash",
167 | "language": "bash",
168 | "name": "bash"
169 | },
170 | "language_info": {
171 | "codemirror_mode": "shell",
172 | "file_extension": ".sh",
173 | "mimetype": "text/x-sh",
174 | "name": "bash"
175 | },
176 | "toc": {
177 | "base_numbering": 1,
178 | "nav_menu": {},
179 | "number_sections": true,
180 | "sideBar": true,
181 | "skip_h1_title": false,
182 | "title_cell": "Table of Contents",
183 | "title_sidebar": "Contents",
184 | "toc_cell": true,
185 | "toc_position": {},
186 | "toc_section_display": true,
187 | "toc_window_display": true
188 | }
189 | },
190 | "nbformat": 4,
191 | "nbformat_minor": 2
192 | }
193 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/github/05_forks.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# Forks & Pull Requests"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {
14 | "toc": true
15 | },
16 | "source": [
17 | "Table of Contents \n",
18 | ""
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "## Learning Objectives"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "- Learn how to fork a GitHub repository\n",
33 | "- Learn how to create a pull request"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {},
39 | "source": [
40 | "## Forks"
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "metadata": {},
46 | "source": [
47 | "\"Forks\" are just clones hosted on GitHub, essentially.\n",
48 | "\n",
49 | "We have described the \"branch-merge\" technique already (in the Git section) for keeping multiple contributors from stomping on each other's work. That technique works very well when everyone who is contributing has permissions to contribute to the repository. However, for any number of reasons, constantly adding users as \"contributors\" (i.e., giving them permissions to write to the repository) may not be the most desirable thing to do.\n",
50 | "\n",
51 | "The main reason for this is that all users who have _write_ permissions to the repository can directly push their changes to the repository. And, as a good practice, all new changes to a repository should be _vetted_ (i.e., reviewed) by a trusted contributor. You can do this with branches and merges using GitHub's \"Pull Request\" capability, but even then a full-fledged contributor can just merge their changes without review. (Though you can set up your repository settings to require at least 1 reviewer before code is merged. We won't go into that detail here.)\n",
52 | "\n",
53 | "Another reason for this is because maintaining an up-to-date list of contributors to your code can be annoying.\n",
54 | "\n",
55 | "And a third reason is because _you don't have to_!\n",
56 | "\n",
57 | "Why? Because of the \"Fork\" / \"Pull Request\" mechanism in GitHub."
58 | ]
59 | },
60 | {
61 | "cell_type": "markdown",
62 | "metadata": {},
63 | "source": [
64 | "### Step 1\n",
65 | "\n",
66 | "Let's create our first fork by heading to this repository:\n",
67 | "\n",
68 | " [https://github.com/ncar/forkme](https://github.com/ncar/forkme)\n",
69 | "\n",
70 | "And click the \"Fork\" button at the top-right of the page. Then, select where you want to place your new Fork (your personal space, for example). Now, you should (after a short while thinking) be on the home page of your new forked repository."
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "### Step 2\n",
78 | "\n",
79 | "Now, let's use the \"Create new file\" button on the fork repository's home page to create a new file. Name the file the same as your Cheyenne (or GitHub) username. Then, write your full name in the newly create file and commit this new file to your fork."
80 | ]
81 | },
82 | {
83 | "cell_type": "markdown",
84 | "metadata": {},
85 | "source": [
86 | "## Pull Requests\n",
87 | "\n",
88 | "Now that you have added a change to your fork, you can _request_ that the owners of the original repository accept your new changes by clicking on the \"New pull request\" button.\n",
89 | "\n",
90 | "You should see a page like this:\n",
91 | "\n",
92 | "---\n",
93 | "\n",
94 | "\n",
95 | "---\n",
96 | "\n",
97 | "Below the \"Comparing changes\" title, you can see that we are going to request that the `master` branch of our fork (`username/forkme`) be merged into the `master` branch of the main repository (`ncar/forkme`). Click the \"Create pull request\" button to actually put in the request. (You will need to generate a title for this pull request, and possibly a description of what you are proposing, and then click \"Create pull request\" again.)\n",
98 | "\n",
99 | "Notice that after doing this, we are now at the main repository's \"Pull Requests\" page for the pull request that we just created. There are a lot of things you can do here, such as click the \"Files changed\" tab to see what changes are actually going to be merged in (if this PR is accepted). Or, in the \"Conversation\" tab, you can have a discussion about the changes that are being proposed. Anyone with _write_ permissions to the main repository can merge your changes into the repository. Though, they may ask you to make additional changes for any number of reasons.\n",
100 | "\n",
101 | "To do this, all you need to do is make new commits to _your fork_. As long as the PR exists (and hasn't been merged, yet), any changes you make to your fork will be added (automatically) to your PR.\n",
102 | "\n",
103 | "Once you have satisfied all of the requirements of the owner of the main repository, it will (_should_) be merged in. And then you can delete your fork (just like you delete your merged branch)."
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "metadata": {},
109 | "source": [
110 | "## Going Further:\n",
111 | "\n",
112 | "\n",
113 | "- Creating a Pull Request from a Fork: https://help.github.com/en/articles/creating-a-pull-request-from-a-fork"
114 | ]
115 | },
116 | {
117 | "cell_type": "markdown",
118 | "metadata": {},
119 | "source": [
120 | ""
124 | ]
125 | }
126 | ],
127 | "metadata": {
128 | "kernelspec": {
129 | "display_name": "Bash",
130 | "language": "bash",
131 | "name": "bash"
132 | },
133 | "language_info": {
134 | "codemirror_mode": "shell",
135 | "file_extension": ".sh",
136 | "mimetype": "text/x-sh",
137 | "name": "bash"
138 | },
139 | "toc": {
140 | "base_numbering": 1,
141 | "nav_menu": {},
142 | "number_sections": true,
143 | "sideBar": true,
144 | "skip_h1_title": false,
145 | "title_cell": "Table of Contents",
146 | "title_sidebar": "Contents",
147 | "toc_cell": true,
148 | "toc_position": {},
149 | "toc_section_display": true,
150 | "toc_window_display": true
151 | }
152 | },
153 | "nbformat": 4,
154 | "nbformat_minor": 2
155 | }
156 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/github/06_integrations.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# Integrations"
9 | ]
10 | },
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {
14 | "toc": true
15 | },
16 | "source": [
17 | "Table of Contents \n",
18 | ""
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "## GitHub Integrations"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "A wonderful feature of GitHub is its ability to integrate with a wide variety of automated processes, such as Continuous Integration and Code Coverage!\n",
33 | "\n",
34 | "### Continuous Integration (CI)\n",
35 | "\n",
36 | "Continuous Integration (CI) allows you to use a free hosting service to run tests for your project code _automatically, every time a new PR is attempted_. That means, your test suite will be run \"somewhere on the cloud\", depending on the CI service you use (CircleCI, TravisCI, AppVeyor, etc.). Each of these CI services has advantages and disadvantages. For example, AppVeyor will test you code on a Windows or Linux machine (for free). TravisCI will let you test on Linux, MacOS, and Android (for free). Obviously, you can pay for additional options.\n",
37 | "\n",
38 | "### Code Coverage\n",
39 | "\n",
40 | "Code Coverage is a capability that allows you to test what _lines_ of your code are actually tested by your test suite. It is not perfect, but code coverage allows you get a sense for whether there are any branches in your code that aren't being tested. Code coverage can identify the actual \"missed\" lines of code, and you can write tests to test that code.\n",
41 | "\n",
42 | "### Code Quality Checks\n",
43 | "\n",
44 | "There are a number of automated tools that let you check the _quality_ of your code, according to set standards. This includes things like code formatting standards and other such things.\n",
45 | "\n",
46 | "## And so much more...\n",
47 | "\n",
48 | "You can look at the [GitHub Marketplace](https://github.com/marketplace) to find integrations that you might be interested in using. Some integrations may require payment for certain services, while many others provide a _free_ level of service, usually to Open Source (i.e., _public_) GitHub repositories. (It's good to be open source!)"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "## Going Further\n",
56 | "\n",
57 | "- Continuous Integration: https://en.wikipedia.org/wiki/Continuous_integration"
58 | ]
59 | },
60 | {
61 | "cell_type": "markdown",
62 | "metadata": {},
63 | "source": [
64 | "\n",
65 | "
Previous: Forks
\n",
66 | "
"
67 | ]
68 | }
69 | ],
70 | "metadata": {
71 | "kernelspec": {
72 | "display_name": "Bash",
73 | "language": "bash",
74 | "name": "bash"
75 | },
76 | "language_info": {
77 | "codemirror_mode": "shell",
78 | "file_extension": ".sh",
79 | "mimetype": "text/x-sh",
80 | "name": "bash"
81 | },
82 | "toc": {
83 | "base_numbering": 1,
84 | "nav_menu": {},
85 | "number_sections": true,
86 | "sideBar": true,
87 | "skip_h1_title": false,
88 | "title_cell": "Table of Contents",
89 | "title_sidebar": "Contents",
90 | "toc_cell": true,
91 | "toc_position": {},
92 | "toc_section_display": true,
93 | "toc_window_display": true
94 | }
95 | },
96 | "nbformat": 4,
97 | "nbformat_minor": 2
98 | }
99 |
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/create_a_repository.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/create_a_repository.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/example_repo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/example_repo.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/git-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/git-logo.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/github-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/github-logo.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/github_log.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/github_log.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/github_plus_sign.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/github_plus_sign.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/github_prs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/github_prs.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/images/two_commits.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/git-workflows/images/two_commits.png
--------------------------------------------------------------------------------
/notebooks/bytopic/git-workflows/intro.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Git & GitHub\n",
8 | "\n",
9 | "Using a Version Control System (VCS) is one of the most powerful things that you can do\n",
10 | "as a developer of any level, whether you are a casual developer \"for fun\" or you have to\n",
11 | "develop for your job or you are a hard-core software engineer. Version Control Systems\n",
12 | "can _save your bum_ more than you like, certainly, but they can also be incredible\n",
13 | "educational tools that help you see how software develops and changes over time. There\n",
14 | "is only one reason you should need to _always_ use a VCS for everything you do,\n",
15 | "whether you are writing a paper for publication, writing a small scripts for seemingly\n",
16 | "\"one-off\" tasks, or you are a full-fledged software engineer: a VCS will let you backtrack\n",
17 | "your work when you realize you've made a mistake. That is, as long as your work is\n",
18 | "_text-based_ content, VCSes provide you with an \"undo\" button for reverting those mistakes\n",
19 | "and getting back to a previous version. _Git_ is one of those VCSes, and if you want\n",
20 | "to use _GitHub_, it's the VCS you should use.\n",
21 | "\n",
22 | "But why GitHub? The answer is _open source software_ (OSS).\n",
23 | "\n",
24 | "GitHub is a _hosting_ platform for your Git _repositories_ (_i.e._, the things that store\n",
25 | "your version-controlled work). GitHub is free, as long as your _repositories_ are open\n",
26 | "and public. GitHub provides powerful search capabilities so that you can find other\n",
27 | "people's work on GitHub, and they can find yours. And that encourages people to _share_\n",
28 | "their work and ideas, which means that people don't constantly have to re-invent the\n",
29 | "wheel. It also encourages collaboration and communication with people working on the\n",
30 | "same things. In other words, GitHub enables open source software. Without something\n",
31 | "like GitHub, OSS would be severely limited.\n",
32 | "\n",
33 | "GitHub goes further than this, though, it also makes working with your Git respositories\n",
34 | "easier and faster. It deals with complications in Git that you (might) find annoying, and\n",
35 | "makes them \"push button.\"\n",
36 | "\n",
37 | "Together, Git and GitHub create a powerful platform that allows you take a tiny idea and\n",
38 | "turn it into a community effort.\n"
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | ""
48 | ]
49 | }
50 | ],
51 | "metadata": {
52 | "kernelspec": {
53 | "display_name": "Bash",
54 | "language": "bash",
55 | "name": "bash"
56 | },
57 | "language_info": {
58 | "codemirror_mode": "shell",
59 | "file_extension": ".sh",
60 | "mimetype": "text/x-sh",
61 | "name": "bash"
62 | },
63 | "toc": {
64 | "base_numbering": 1,
65 | "nav_menu": {},
66 | "number_sections": true,
67 | "sideBar": true,
68 | "skip_h1_title": false,
69 | "title_cell": "Table of Contents",
70 | "title_sidebar": "Contents",
71 | "toc_cell": false,
72 | "toc_position": {},
73 | "toc_section_display": true,
74 | "toc_window_display": false
75 | }
76 | },
77 | "nbformat": 4,
78 | "nbformat_minor": 2
79 | }
80 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/MetPy_breakdown.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/metpy/MetPy_breakdown.png
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/QG_data.py:
--------------------------------------------------------------------------------
1 | # Remaining variables needed to compute QG Omega forcing terms
2 | hght_500 = ds.Geopotential_height_isobaric.metpy.sel(vertical=500 * units.hPa,
3 | time=vtime)
4 | uwnd_500 = ds['u-component_of_wind_isobaric'].metpy.sel(vertical=500 * units.hPa,
5 | time=vtime)
6 | vwnd_500 = ds['v-component_of_wind_isobaric'].metpy.sel(vertical=500 * units.hPa,
7 | time=vtime)
8 | uwnd_900 = ds['u-component_of_wind_isobaric'].metpy.sel(vertical=900 * units.hPa,
9 | time=vtime)
10 | vwnd_900 = ds['v-component_of_wind_isobaric'].metpy.sel(vertical=900 * units.hPa,
11 | time=vtime)
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/distance.py:
--------------------------------------------------------------------------------
1 | speed = 25 * units.knots
2 | time = 1 * units.fortnight
3 | distance = speed * time
4 | print(distance.to('furlongs'))
5 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/hodograph_preprocessing.py:
--------------------------------------------------------------------------------
1 | # Calculate the height above ground level (AGL)
2 | sounding['height_agl'] = sounding['height'] - sounding['height'][0]
3 |
4 | # Make an array of segment boundaries - don't forget units!
5 | boundaries = [0, 1, 3, 5, 8] * units.km
6 |
7 | # Make a list of colors for the segments
8 | colors = ['tab:red', 'tab:green', 'tab:blue', 'tab:olive']
9 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/hodograph_segmented.py:
--------------------------------------------------------------------------------
1 | # Create figure/axis
2 | fig, ax = plt.subplots(1, 1, figsize=(6, 6))
3 |
4 | # Create a hodograph object/fiducial lines
5 | h = Hodograph(ax, component_range=60.)
6 | h.add_grid(increment=20)
7 |
8 | # Plot the data
9 | l = h.plot_colormapped(sounding['u_wind'],
10 | sounding['v_wind'],
11 | sounding['height_agl'],
12 | bounds=boundaries, colors=colors)
13 |
14 | # BONUS - add a colorbar
15 | plt.colorbar(l)
16 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/qg_omega_total_fig.py:
--------------------------------------------------------------------------------
1 | fig=plt.figure(1, figsize=(15.,12.))
2 |
3 | # Upper-Left Panel
4 | ax=plt.subplot(111,projection=plotproj)
5 | ax.set_extent([-125.,-73,25.,50.],ccrs.PlateCarree())
6 | ax.add_feature(cfeature.COASTLINE, linewidth=0.5)
7 | ax.add_feature(cfeature.STATES,linewidth=0.5)
8 |
9 | # Contour #1
10 | cs = ax.contour(lons, lats, hght_700s, clev_hght_700,colors='k',
11 | linewidths=1.5, linestyles='solid', transform=dataproj)
12 | plt.clabel(cs, fontsize=10, inline=1, inline_spacing=3, fmt='%i',
13 | rightside_up=True, use_clabeltext=True)
14 |
15 | # Contour #2
16 | cs2 = ax.contour(lons, lats, tmpc_700s, clev_tmpc_700, colors='grey',
17 | linewidths=1.0, linestyles='dotted', transform=dataproj)
18 | plt.clabel(cs2, fontsize=10, inline=1, inline_spacing=3, fmt='%d',
19 | rightside_up=True, use_clabeltext=True)
20 |
21 | # Colorfill
22 | cf = ax.contourf(lons, lats, (term_A+term_B)*10**12, clev_omega,
23 | cmap=plt.cm.RdYlBu_r, extend='both', transform=dataproj)
24 | plt.colorbar(cf, orientation='horizontal', pad=0.0, aspect=50, extendrect=True)
25 |
26 | # Vector
27 | ax.barbs(lons.m, lats.m, uwnd_700s.to('kts').m, vwnd_700s.to('kts').m,
28 | regrid_shape=15, transform=dataproj)
29 |
30 | # Titles
31 | plt.title('700-hPa Geopotential Heights, Temperature (C),\n'
32 | 'Winds (kt), and QG Omega Forcings ($*10^{12}$ kg m$^{-3}$ s$^{-3}$)',loc='left')
33 | plt.title('VALID: ' + vtime_str, loc='right')
34 |
35 | plt.show()
36 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/skewt_cape_cin.py:
--------------------------------------------------------------------------------
1 | # Calculate surface based cape/cin
2 | surface_cape, surface_cin = mpcalc.surface_based_cape_cin(sounding['pressure'],
3 | sounding['temperature'],
4 | sounding['dewpoint'])
5 |
6 | # Print CAPE and CIN
7 | print('CAPE: {}\tCIN: {}'.format(surface_cape, surface_cin))
8 |
9 | # Shade CAPE
10 | skew.shade_cape(sounding['pressure'],
11 | sounding['temperature'],
12 | sounding['profile'])
13 |
14 | # Shade CIN
15 | skew.shade_cin(sounding['pressure'],
16 | sounding['temperature'],
17 | sounding['profile'])
18 |
19 | # Redisplay the figure
20 | fig
21 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/skewt_get_data.py:
--------------------------------------------------------------------------------
1 | # Import the Wyoming simple web service upper air object
2 | from siphon.simplewebservice.wyoming import WyomingUpperAir
3 |
4 | # Create the datetime and station variables you'll need
5 | request_time = datetime(2011, 4, 14, 18)
6 | station = 'OUN'
7 |
8 | # Make the request for the data
9 | df = WyomingUpperAir.request_data(request_time, station)
10 |
11 | # Attach units to the data
12 | sounding = pandas_dataframe_to_unit_arrays(df)
13 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/skewt_make_figure.py:
--------------------------------------------------------------------------------
1 | # Make a figure
2 | fig = plt.figure(figsize=(10, 10))
3 |
4 | # Make a SkewT object
5 | skew = SkewT(fig)
6 |
7 | # Plot the temperature and dewpoint
8 | skew.plot(sounding['pressure'], sounding['temperature'], linewidth=2, color='tab:red')
9 | skew.plot(sounding['pressure'], sounding['dewpoint'], linewidth=2, color='tab:green')
10 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/skewt_thermo.py:
--------------------------------------------------------------------------------
1 | # Get data for the sounding
2 | df = WyomingUpperAir.request_data(datetime(1999, 5, 3, 12), 'OUN')
3 |
4 | # Calculate the ideal surface parcel path
5 | sounding['profile'] = mpcalc.parcel_profile(sounding['pressure'],
6 | sounding['temperature'][0],
7 | sounding['dewpoint'][0]).to('degC')
8 |
9 | # Calculate the LCL
10 | lcl_pressure, lcl_temperature = mpcalc.lcl(sounding['pressure'][0],
11 | sounding['temperature'][0],
12 | sounding['dewpoint'][0])
13 |
14 | # Calculate the LFC
15 | lfc_pressure, lfc_temperature = mpcalc.lfc(sounding['pressure'],
16 | sounding['temperature'],
17 | sounding['dewpoint'])
18 |
19 | # Calculate the EL
20 | el_pressure, el_temperature = mpcalc.el(sounding['pressure'],
21 | sounding['temperature'],
22 | sounding['dewpoint'])
23 |
24 | # Create a new figure and SkewT object
25 | fig = plt.figure(figsize=(10, 10))
26 | skew = SkewT(fig)
27 |
28 | # Plot the profile and data
29 | skew.plot(sounding['pressure'], sounding['profile'], color='black')
30 | skew.plot(sounding['pressure'], sounding['temperature'], color='tab:red')
31 | skew.plot(sounding['pressure'], sounding['dewpoint'], color='tab:blue')
32 |
33 | # Plot the LCL, LFC, and EL as horizontal markers
34 | if lcl_pressure:
35 | skew.ax.plot(lcl_temperature, lcl_pressure, marker="_", color='orange', markersize=30, markeredgewidth=3)
36 |
37 | if lfc_pressure:
38 | skew.ax.plot(lfc_temperature, lfc_pressure, marker="_", color='brown', markersize=30, markeredgewidth=3)
39 |
40 | if el_pressure:
41 | skew.ax.plot(el_temperature, el_pressure, marker="_", color='blue', markersize=30, markeredgewidth=3)
42 |
43 | # Set axis limits
44 | skew.ax.set_xlim(-60, 30)
45 | skew.ax.set_ylim(1000, 100)
46 |
47 | # Add fiducial lines
48 | skew.plot_dry_adiabats()
49 | skew.plot_moist_adiabats()
50 | skew.plot_mixing_lines()
51 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/skewt_wind_fiducials.py:
--------------------------------------------------------------------------------
1 | # Plot wind barbs
2 | skew.plot_barbs(sounding['pressure'], sounding['u_wind'], sounding['v_wind'])
3 |
4 | # Add dry adiabats
5 | skew.plot_dry_adiabats()
6 |
7 | # Add moist adiabats
8 | skew.plot_moist_adiabats()
9 |
10 | # Add mixing ratio lines
11 | skew.plot_mixing_lines()
12 |
13 | # Redisplay figure
14 | fig
15 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/temperature_change.py:
--------------------------------------------------------------------------------
1 | temperature_change_rate = -2.3 * units.delta_degF / (10 * units.minutes)
2 | temperature = 25 * units.degC
3 | dt = 1.5 * units.hours
4 | print(temperature + temperature_change_rate * dt)
5 |
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/term_B_calc.py:
--------------------------------------------------------------------------------
1 | # 700-hPa Temperature Advection
2 | tadv_700 = mpcalc.advection(tmpk_700s, (uwnd_700s, vwnd_700s), (dx, dy)).to_base_units()
3 | # Laplacian of Temperature Advection
4 | lap_tadv_700 = mpcalc.laplacian(tadv_700, deltas=(dy, dx))
5 |
6 | # Final term B calculation with constants
7 | term_B = (-Rd / (sigma * (700 * units.hPa)) * lap_tadv_700).to_base_units()
8 | print(term_B.units)
--------------------------------------------------------------------------------
/notebooks/bytopic/metpy/solutions/wind_speed.py:
--------------------------------------------------------------------------------
1 | speed = mpcalc.wind_speed(u, v)
2 | print(speed)
3 | print(speed.to('mph'))
4 |
--------------------------------------------------------------------------------
/notebooks/bytopic/numpy/images/agg.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/numpy/images/agg.jpeg
--------------------------------------------------------------------------------
/notebooks/bytopic/numpy/images/broadcasting.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/numpy/images/broadcasting.jpeg
--------------------------------------------------------------------------------
/notebooks/bytopic/numpy/images/column-major.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/numpy/images/column-major.jpeg
--------------------------------------------------------------------------------
/notebooks/bytopic/numpy/images/row-major.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/numpy/images/row-major.jpeg
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | # hello-cesm-package
2 | Example of pure Python package used to illustrate how to create python package
3 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Jane Doe
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/MANIFEST.in:
--------------------------------------------------------------------------------
1 | # any additional files to include should be added here
2 |
3 | # For the license and changes files
4 | include LICENSE
5 | include CHANGELOG.rst
6 |
7 | # The tests
8 | recursive-include tests *.py
9 |
10 | # The docs
11 | recursive-include docs *.rst
12 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/README.md:
--------------------------------------------------------------------------------
1 | # hello-cesm-package
2 | Example of pure Python package used to illustrate how to create python package
3 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/cesm_package/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.1"
2 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/cesm_package/climatologies.py:
--------------------------------------------------------------------------------
1 | """ Module for climatology functions """
2 |
3 |
4 | def compute_seasonal_climatology(dset):
5 | """Function to compute seasonal climatologies
6 | Parameters
7 | ----------
8 | dset : xr.Dataset, xr.DataArray
9 | xarray dataset, or xarray dataarray
10 |
11 | Returns
12 | -------
13 | Computed seasonal climatology
14 | """
15 |
16 | clim = dset.groupby("time.season").mean(dim="time")
17 | return clim
18 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/cesm_package/statistics.py:
--------------------------------------------------------------------------------
1 | """Module for statistic functions"""
2 |
3 |
4 | def compute_mean(dset, dims=None):
5 | """Compute Mean along specified dim
6 | Parameters
7 | ----------
8 | dset : xr.Dataset, xr.DataArray
9 | xarray dataset or xarray dataarray
10 |
11 | dims : list, default (None)
12 |
13 | list of dimensions to apply mean along
14 |
15 | Returns
16 | -------
17 | Dataset/DataArray with mean applied to specified dimensions
18 | """
19 |
20 | return dset.mean(dim=dims)
21 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/docs/overview.rst:
--------------------------------------------------------------------------------
1 | =========================
2 | There should be docs here
3 | =========================
4 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/setup.py:
--------------------------------------------------------------------------------
1 | """The setup script."""
2 |
3 |
4 | from setuptools import setup
5 |
6 | install_requires = [
7 | "xarray",
8 | "numpy",
9 | "matplotlib",
10 | ] # Whatever third-party libraries are required to use our package.
11 |
12 |
13 | long_description = """
14 | CESM data analysis package as an example of a
15 | python package from pre-existing code
16 | """
17 |
18 | setup(
19 | author="Alice Doe",
20 | author_email="alice@example.com",
21 | description="My CESM analysis package",
22 | install_requires=install_requires,
23 | license="MIT",
24 | long_description=long_description,
25 | keywords="ocean modeling",
26 | name="cesm-package",
27 | packages=["cesm_package"],
28 | url="https://github.com/github-user-name/project-name",
29 | version="0.1",
30 | zip_safe=False,
31 | )
32 |
--------------------------------------------------------------------------------
/notebooks/bytopic/packaging/hello-cesm-package/tests/test_statistics.py:
--------------------------------------------------------------------------------
1 | def test_sample():
2 | assert 2 == 2
3 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/00_introduction.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# The Basics of Python\n",
8 | "\n",
9 | "This tutorial is meant for people who already know a little something about programming and have used an interpretted language already, something like IDL, Matlab, NCL or R.\n",
10 | "\n",
11 | "We'll start by talking about \"objects\" in Python. We won't talk about how to create your own _custom_ object (which means being able to write your own `class`) here. We'll leave that for a different tutorial. Then, we'll go further into built-in data types and functions."
12 | ]
13 | },
14 | {
15 | "cell_type": "markdown",
16 | "metadata": {},
17 | "source": [
18 | "\n",
19 | "
Next: Objects
\n",
20 | "
"
21 | ]
22 | }
23 | ],
24 | "metadata": {
25 | "kernelspec": {
26 | "display_name": "Python [conda env:python-tutorial]",
27 | "language": "python",
28 | "name": "conda-env-python-tutorial-py"
29 | },
30 | "language_info": {
31 | "codemirror_mode": {
32 | "name": "ipython",
33 | "version": 3
34 | },
35 | "file_extension": ".py",
36 | "mimetype": "text/x-python",
37 | "name": "python",
38 | "nbconvert_exporter": "python",
39 | "pygments_lexer": "ipython3",
40 | "version": "3.7.3"
41 | }
42 | },
43 | "nbformat": 4,
44 | "nbformat_minor": 4
45 | }
46 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/02_operators.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# \"Arithmetic\" Operators\n",
8 | "\n",
9 | "Python uses common symbols (like any language) to represent certain operations. They are as follows:"
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "| Symbol | Name | Description |\n",
17 | "|--------|----------------|-------------------------------------------------|\n",
18 | "| `+` | Addition | addition, concatenation |\n",
19 | "| `-` | Subtraction | subtraction, differencing |\n",
20 | "| `*` | Multiplication | multiplication, duplication |\n",
21 | "| `/` | Division | division |\n",
22 | "| `%` | Modulus | modulus |\n",
23 | "| `**` | Exponent | \"power to\" |\n",
24 | "| `//` | Floor Division | integer division |"
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {},
30 | "source": [
31 | "# Comparison Operators\n",
32 | "\n",
33 | "These are operators to compare two objects. They return `True` or `False`."
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {},
39 | "source": [
40 | "| Symbol | Name | Description |\n",
41 | "|--------|---------|-------------|\n",
42 | "| `==` | Equality | If two things are equal |\n",
43 | "| `>` | Greater Than | If left is greater than right |\n",
44 | "| `<` | Less Than | If left is less than right |\n",
45 | "| `>=` | Greater Than or Equal | If left is greater than or equal to right |\n",
46 | "| `<=` | Less Than or Equal | If left is less than or equal to right |\n",
47 | "| `<>` or `!=` | Not Equal | If left is not equal to right |"
48 | ]
49 | },
50 | {
51 | "cell_type": "markdown",
52 | "metadata": {},
53 | "source": [
54 | "# Logical Operators"
55 | ]
56 | },
57 | {
58 | "cell_type": "markdown",
59 | "metadata": {},
60 | "source": [
61 | "## `and`\n",
62 | "\n",
63 | "There is the `and` operator to check if two conditions are both true."
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "True and False"
73 | ]
74 | },
75 | {
76 | "cell_type": "code",
77 | "execution_count": null,
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "False and False"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "True and True"
91 | ]
92 | },
93 | {
94 | "cell_type": "markdown",
95 | "metadata": {},
96 | "source": [
97 | "## `or`\n",
98 | "\n",
99 | "And there is an equivalent `or` operator:"
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": null,
105 | "metadata": {},
106 | "outputs": [],
107 | "source": [
108 | "True or False"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "## `not`\n",
116 | "\n",
117 | "There is also the `not` operation, which flips a bool to its opposite value:"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": null,
123 | "metadata": {},
124 | "outputs": [],
125 | "source": [
126 | "not True"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "metadata": {},
133 | "outputs": [],
134 | "source": [
135 | "not False"
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "# Assignment Operators\n",
143 | "\n",
144 | "Python also has a number of operators that can conveniently assign values to variables."
145 | ]
146 | },
147 | {
148 | "cell_type": "markdown",
149 | "metadata": {},
150 | "source": [
151 | "| Symbol | Name | Description |\n",
152 | "|--------|------------|-------------|\n",
153 | "| `=` | Assignment | Simple assignment |\n",
154 | "| `+=` | Add & Assign | `x += 2` is the same as `x = x + 2` |\n",
155 | "| `-=` | Subtract & Assign | `x -= 2` is the same as `x = x - 2` |\n",
156 | "| `*=` | Multiply & Assign | `x *= 2` is the same as `x = x * 2` |\n",
157 | "| `/=` | Divide & Assign | `x /= 2` is the same as `x = x / 2` |\n",
158 | "| `%=` | Modulus & Assign | `x %= 2` is the same as `x = x % 2` |\n",
159 | "| `**=` | Exponent & Assign | `x **= 2` is the same as `x = x ** 2` |\n",
160 | "| `//=` | Floor Divide & Assign | `x //= 2` is the same as `x = x // 2` |"
161 | ]
162 | },
163 | {
164 | "cell_type": "markdown",
165 | "metadata": {},
166 | "source": [
167 | ""
171 | ]
172 | }
173 | ],
174 | "metadata": {
175 | "kernelspec": {
176 | "display_name": "Python [conda env:python-tutorial]",
177 | "language": "python",
178 | "name": "conda-env-python-tutorial-py"
179 | },
180 | "language_info": {
181 | "codemirror_mode": {
182 | "name": "ipython",
183 | "version": 3
184 | },
185 | "file_extension": ".py",
186 | "mimetype": "text/x-python",
187 | "name": "python",
188 | "nbconvert_exporter": "python",
189 | "pygments_lexer": "ipython3",
190 | "version": "3.7.3"
191 | }
192 | },
193 | "nbformat": 4,
194 | "nbformat_minor": 4
195 | }
196 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/04_flow_control.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Flow Control\n",
8 | "\n",
9 | "Python has all of the flow control options that you are familiar with, such as `if` blocks, `for` loops, and `while` loops."
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "## `if` blocks\n",
17 | "\n",
18 | "To control the flow of code depending on a runtime condition, use an `if` block."
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": null,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "x = 5\n",
28 | "if x == 4:\n",
29 | " print('It is 4')\n",
30 | "elif x > 4:\n",
31 | " print(\"It's larger than 4.\")\n",
32 | "else:\n",
33 | " print(\"I wasn't expecting that.\")"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {},
39 | "source": [
40 | "## `while` loops\n",
41 | "\n",
42 | "The `while` loop is great for checking the value of a variable that is changing."
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "i = 9\n",
52 | "while i > 0:\n",
53 | " print(i)\n",
54 | " i -= 1"
55 | ]
56 | },
57 | {
58 | "cell_type": "markdown",
59 | "metadata": {},
60 | "source": [
61 | "## `for` loops\n",
62 | "\n",
63 | "And `for` loops are great for looping over the contents of a container."
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "d = {'a': 1, 'b': 2, 'c': 3, 'd': 4}\n",
73 | "\n",
74 | "for k in d:\n",
75 | " print(f'key = {k} and d[{k}] = {d[k]}')"
76 | ]
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": null,
81 | "metadata": {},
82 | "outputs": [],
83 | "source": [
84 | "for c in 'abcdefg':\n",
85 | " print(c)"
86 | ]
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": [
92 | "## Nested Loops\n",
93 | "\n",
94 | "You can nest loops inside of each other with further indentation."
95 | ]
96 | },
97 | {
98 | "cell_type": "code",
99 | "execution_count": null,
100 | "metadata": {},
101 | "outputs": [],
102 | "source": [
103 | "for row in [[1,2], [3,4]]:\n",
104 | " for i in row:\n",
105 | " print(i)"
106 | ]
107 | },
108 | {
109 | "cell_type": "markdown",
110 | "metadata": {},
111 | "source": [
112 | "And you can nest different kinds of loops inside of each other, too."
113 | ]
114 | },
115 | {
116 | "cell_type": "markdown",
117 | "metadata": {},
118 | "source": [
119 | ""
123 | ]
124 | }
125 | ],
126 | "metadata": {
127 | "kernelspec": {
128 | "display_name": "Python [conda env:python-tutorial]",
129 | "language": "python",
130 | "name": "conda-env-python-tutorial-py"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 3
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython3",
142 | "version": "3.7.3"
143 | }
144 | },
145 | "nbformat": 4,
146 | "nbformat_minor": 4
147 | }
148 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/05_builtin_functions.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Builtin Functions\n",
8 | "\n",
9 | "Python has a number of useful builtin functions, too, in addition to the `print` and `help` functions."
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "## `range`\n",
17 | "\n",
18 | "This can be used to create lists or tuples or to loop over."
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": null,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "for i in range(4):\n",
28 | " print(i)"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "list(range(3))"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "tuple(range(2,5))"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "## `map`\n",
54 | "\n",
55 | "The `map` function can map a function to the elements of a container."
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "def f(i):\n",
65 | " return i + 2"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "l = [1,2,3,4]"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {},
81 | "outputs": [],
82 | "source": [
83 | "for i in map(f,l):\n",
84 | " print(i)"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": null,
90 | "metadata": {},
91 | "outputs": [],
92 | "source": [
93 | "list(map(f,l))"
94 | ]
95 | },
96 | {
97 | "cell_type": "markdown",
98 | "metadata": {},
99 | "source": [
100 | "## `filter`\n",
101 | "\n",
102 | "The `filter` function can efficiently filter the contents of a container based on a condition."
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "def g(i):\n",
112 | " return i > 2"
113 | ]
114 | },
115 | {
116 | "cell_type": "code",
117 | "execution_count": null,
118 | "metadata": {},
119 | "outputs": [],
120 | "source": [
121 | "l = [1,2,3,4]"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {},
128 | "outputs": [],
129 | "source": [
130 | "list(filter(g,l))"
131 | ]
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "metadata": {},
136 | "source": [
137 | "## `len`\n",
138 | "\n",
139 | "Length of a container"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "len(l)"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "## `max` and `min` and `sum`\n",
156 | "\n",
157 | "You can find the maximum and minimum values in a container, as well as the sum."
158 | ]
159 | },
160 | {
161 | "cell_type": "code",
162 | "execution_count": null,
163 | "metadata": {},
164 | "outputs": [],
165 | "source": [
166 | "max(l)"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "metadata": {},
173 | "outputs": [],
174 | "source": [
175 | "min(l)"
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "sum(l)"
185 | ]
186 | },
187 | {
188 | "cell_type": "markdown",
189 | "metadata": {},
190 | "source": [
191 | "Note that this (many times) works with types that have comparison or addition operations defined on them!"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": null,
197 | "metadata": {},
198 | "outputs": [],
199 | "source": [
200 | "s = '123456789'\n",
201 | "s"
202 | ]
203 | },
204 | {
205 | "cell_type": "code",
206 | "execution_count": null,
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "max(s)"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": null,
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "min(s)"
220 | ]
221 | },
222 | {
223 | "cell_type": "markdown",
224 | "metadata": {},
225 | "source": [
226 | "## `zip`\n",
227 | "\n",
228 | "The `zip` function can \"zip\" two lists together into a list of pairs, for example."
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "metadata": {},
235 | "outputs": [],
236 | "source": [
237 | "l = [1,2,3,4]\n",
238 | "s = ['a', 'b', 'c', 'd']"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": null,
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "for ij in zip(s,l):\n",
248 | " print(ij, type(ij))"
249 | ]
250 | },
251 | {
252 | "cell_type": "code",
253 | "execution_count": null,
254 | "metadata": {},
255 | "outputs": [],
256 | "source": [
257 | "dict(zip(s,l))"
258 | ]
259 | },
260 | {
261 | "cell_type": "markdown",
262 | "metadata": {},
263 | "source": [
264 | "There are a lot more builtin functions that you can learn about [here](https://docs.python.org/3/library/functions.html)."
265 | ]
266 | },
267 | {
268 | "cell_type": "markdown",
269 | "metadata": {},
270 | "source": [
271 | ""
275 | ]
276 | }
277 | ],
278 | "metadata": {
279 | "kernelspec": {
280 | "display_name": "Python [conda env:python-tutorial]",
281 | "language": "python",
282 | "name": "conda-env-python-tutorial-py"
283 | },
284 | "language_info": {
285 | "codemirror_mode": {
286 | "name": "ipython",
287 | "version": 3
288 | },
289 | "file_extension": ".py",
290 | "mimetype": "text/x-python",
291 | "name": "python",
292 | "nbconvert_exporter": "python",
293 | "pygments_lexer": "ipython3",
294 | "version": "3.7.3"
295 | }
296 | },
297 | "nbformat": 4,
298 | "nbformat_minor": 4
299 | }
300 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/06_one_liners.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# One Liners\n",
8 | "\n",
9 | "Python has a lot of shortcuts that make writing a lot of code fast and compact. These shortcuts can also make code easier to read, which is a big deal when you start sharing your code with others!"
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "## Comprehensions\n",
17 | "\n",
18 | "Many containers can be constructed quickly in one line using comprehensions."
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": null,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "l = [i+3 for i in range(5) if i < 4]\n",
28 | "l"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "d = {str(i):i*2 for i in range(5)}\n",
38 | "d"
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "## Conditional Assignment\n",
46 | "\n",
47 | "You can assign values based on a condition."
48 | ]
49 | },
50 | {
51 | "cell_type": "code",
52 | "execution_count": null,
53 | "metadata": {},
54 | "outputs": [],
55 | "source": [
56 | "t = True"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": null,
62 | "metadata": {},
63 | "outputs": [],
64 | "source": [
65 | "x = 1 if t else 2\n",
66 | "x"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": null,
72 | "metadata": {},
73 | "outputs": [],
74 | "source": [
75 | "y = 1 if not t else 5\n",
76 | "y"
77 | ]
78 | },
79 | {
80 | "cell_type": "markdown",
81 | "metadata": {},
82 | "source": [
83 | "## Lambda Functions\n",
84 | "\n",
85 | "If you need a function (e.g., for a `map` function), but that is the only time you actually intend to use the function, then there is no need to `def` a function just for that purpose. You can use `lambda` functions, instead."
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "for i in map(lambda x: x*2, [1,2,3,4]):\n",
95 | " print(i)"
96 | ]
97 | },
98 | {
99 | "cell_type": "markdown",
100 | "metadata": {},
101 | "source": [
102 | ""
106 | ]
107 | }
108 | ],
109 | "metadata": {
110 | "kernelspec": {
111 | "display_name": "Python [conda env:python-tutorial]",
112 | "language": "python",
113 | "name": "conda-env-python-tutorial-py"
114 | },
115 | "language_info": {
116 | "codemirror_mode": {
117 | "name": "ipython",
118 | "version": 3
119 | },
120 | "file_extension": ".py",
121 | "mimetype": "text/x-python",
122 | "name": "python",
123 | "nbconvert_exporter": "python",
124 | "pygments_lexer": "ipython3",
125 | "version": "3.7.3"
126 | }
127 | },
128 | "nbformat": 4,
129 | "nbformat_minor": 4
130 | }
131 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/07_functions.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Functions\n",
8 | "\n",
9 | "We've already talked about functions, but there a many features to functions that are worth mentioning."
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "## Required Arguments\n",
17 | "\n",
18 | "Function arguments that do not have default values to them are _required_."
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": null,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "def f(a,b):\n",
28 | " print(a,b)"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "f(1)"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "f(2,3)"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "## Optional Arguments\n",
54 | "\n",
55 | "You can declare an optional argument by giving it a default value in the function definition."
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "def f(a, b, c=3, d=7):\n",
65 | " print(a,b,c,d)"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "f(1,2)"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {},
81 | "outputs": [],
82 | "source": [
83 | "f(1,4,8,9)"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": null,
89 | "metadata": {},
90 | "outputs": [],
91 | "source": [
92 | "f(1, 3, d=10)"
93 | ]
94 | },
95 | {
96 | "cell_type": "markdown",
97 | "metadata": {},
98 | "source": [
99 | "## Variable Length Arguments\n",
100 | "\n",
101 | "You can design a function that takes any number of arguments supplied."
102 | ]
103 | },
104 | {
105 | "cell_type": "code",
106 | "execution_count": null,
107 | "metadata": {},
108 | "outputs": [],
109 | "source": [
110 | "def f(a, b=2, *args):\n",
111 | " print(args)"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": null,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "f(1,3,4,5,6,7)"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": null,
126 | "metadata": {},
127 | "outputs": [],
128 | "source": [
129 | "l = [1,2,3,4,5]\n",
130 | "f(*l)"
131 | ]
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "metadata": {},
136 | "source": [
137 | "## Optional Keyword Arguments"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": null,
143 | "metadata": {},
144 | "outputs": [],
145 | "source": [
146 | "def f(a, b=2, *args, **kwargs):\n",
147 | " print(kwargs)"
148 | ]
149 | },
150 | {
151 | "cell_type": "code",
152 | "execution_count": null,
153 | "metadata": {},
154 | "outputs": [],
155 | "source": [
156 | "f(1, 2, c=4, e='a')"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": null,
162 | "metadata": {},
163 | "outputs": [],
164 | "source": [
165 | "d = {'abc': 5, 'lmn': [2,3,4]}\n",
166 | "f(1,2, **d)"
167 | ]
168 | },
169 | {
170 | "cell_type": "markdown",
171 | "metadata": {},
172 | "source": [
173 | "## Docstrings\n",
174 | "\n",
175 | "You can embed documentation into your function so that other people can understand how to use it."
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "def f(a,b=2):\n",
185 | " \"\"\"\n",
186 | " Print the 'a' and 'b' arguments passed to the function\n",
187 | " \n",
188 | " Arguments:\n",
189 | " a: Something\n",
190 | " b: Something else [defaults to 2]\n",
191 | " \"\"\"\n",
192 | " print(a,b)"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": null,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "help(f)"
202 | ]
203 | },
204 | {
205 | "cell_type": "code",
206 | "execution_count": null,
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "f?"
211 | ]
212 | },
213 | {
214 | "cell_type": "markdown",
215 | "metadata": {},
216 | "source": [
217 | ""
221 | ]
222 | }
223 | ],
224 | "metadata": {
225 | "kernelspec": {
226 | "display_name": "Python [conda env:python-tutorial]",
227 | "language": "python",
228 | "name": "conda-env-python-tutorial-py"
229 | },
230 | "language_info": {
231 | "codemirror_mode": {
232 | "name": "ipython",
233 | "version": 3
234 | },
235 | "file_extension": ".py",
236 | "mimetype": "text/x-python",
237 | "name": "python",
238 | "nbconvert_exporter": "python",
239 | "pygments_lexer": "ipython3",
240 | "version": "3.7.3"
241 | }
242 | },
243 | "nbformat": 4,
244 | "nbformat_minor": 4
245 | }
246 |
--------------------------------------------------------------------------------
/notebooks/bytopic/python-basics/08_modules_and_scripts.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Modules & Scripts\n",
8 | "\n",
9 | "Python source files are called _modules_, and these can be imported just like other Python packages."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "%%writefile test_module.py\n",
19 | "\n",
20 | "def f(a):\n",
21 | " print(a)\n",
22 | "\n",
23 | "def g(b):\n",
24 | " return b\n",
25 | "\n",
26 | "if __name__ == '__main__':\n",
27 | " import sys\n",
28 | " f(sys.argv[1])\n",
29 | " print(g(2))"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "This should have written a Python file called `test_module.py`. You can now import this into your code (if you are in the same directory as the _module_ file)."
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": null,
42 | "metadata": {},
43 | "outputs": [],
44 | "source": [
45 | "import test_module"
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": null,
51 | "metadata": {},
52 | "outputs": [],
53 | "source": [
54 | "test_module.f('a')"
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": null,
60 | "metadata": {},
61 | "outputs": [],
62 | "source": [
63 | "x = test_module.g(2)\n",
64 | "x"
65 | ]
66 | },
67 | {
68 | "cell_type": "markdown",
69 | "metadata": {},
70 | "source": [
71 | "Notice that this _module_ file has a few lines at the end of the file that look like:\n",
72 | "\n",
73 | "```python\n",
74 | "if __name__ == '__main__':\n",
75 | " import sys\n",
76 | " f(sys.argv[1])\n",
77 | " print(g(2))\n",
78 | "```\n",
79 | "\n",
80 | "This allows this _module_ to also be used as a script.\n",
81 | "\n",
82 | "Let's execute this module with the `python` executable and see what it does."
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "!python test_module.py hello"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {},
97 | "source": [
98 | "Notice that when we imported the module, these two lines were _not_ executed. But when we ran the file as a script, these lines:\n",
99 | "\n",
100 | "```python\n",
101 | "if __name__ == '__main__':\n",
102 | " import sys\n",
103 | " f(sys.argv[1])\n",
104 | " print(g(2))\n",
105 | "```\n",
106 | "\n",
107 | "_were_ executed! That's the difference between a module and a script."
108 | ]
109 | },
110 | {
111 | "cell_type": "markdown",
112 | "metadata": {},
113 | "source": [
114 | "Note that we used the `sys` module from Python to get the command line arguments that were supplied when we ran the script from the command line. In this case, the command line argument was `hello`.\n",
115 | "\n",
116 | "There are much better ways of defining and parsing command line arguments using the following packages:\n",
117 | "\n",
118 | "- [argparse](https://docs.python.org/3/library/argparse.html) (Builtin)\n",
119 | "- [click](https://click.palletsprojects.com/en/7.x/)"
120 | ]
121 | },
122 | {
123 | "cell_type": "markdown",
124 | "metadata": {},
125 | "source": [
126 | ""
130 | ]
131 | }
132 | ],
133 | "metadata": {
134 | "kernelspec": {
135 | "display_name": "Python [conda env:python-tutorial]",
136 | "language": "python",
137 | "name": "conda-env-python-tutorial-py"
138 | },
139 | "language_info": {
140 | "codemirror_mode": {
141 | "name": "ipython",
142 | "version": 3
143 | },
144 | "file_extension": ".py",
145 | "mimetype": "text/x-python",
146 | "name": "python",
147 | "nbconvert_exporter": "python",
148 | "pygments_lexer": "ipython3",
149 | "version": "3.7.3"
150 | }
151 | },
152 | "nbformat": 4,
153 | "nbformat_minor": 4
154 | }
155 |
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/img/TDD.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/test-driven-development/img/TDD.png
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/img/ci-job.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/test-driven-development/img/ci-job.png
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/img/ci-workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/test-driven-development/img/ci-workflow.png
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/img/coverage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/test-driven-development/img/coverage.png
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/my_cesm_package/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/test-driven-development/my_cesm_package/__init__.py
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/my_cesm_package/statistics.py:
--------------------------------------------------------------------------------
1 | def mean(num_list):
2 | try:
3 | return sum(num_list) / len(num_list)
4 | except ZeroDivisionError as detail:
5 | msg = "The algebraic mean of an empty list is undefined. Please provide a list of numbers."
6 | raise ZeroDivisionError(f"{detail.__str__()}\n{msg}")
7 |
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/tests/__init__.py:
--------------------------------------------------------------------------------
1 | from my_cesm_package.statistics import mean
2 |
3 |
4 | def test_ints():
5 | num_list = [1, 2, 3, 4, 5]
6 | obs = mean(num_list)
7 | exp = 3
8 | assert obs == exp
9 |
10 |
11 | def test_zero():
12 | num_list = [0, 2, 4, 6]
13 | obs = mean(num_list)
14 | exp = 3
15 | assert obs == exp
16 |
17 |
18 | def test_double():
19 | # This one will fail in Python 2
20 | num_list = [1, 2, 3, 4]
21 | obs = mean(num_list)
22 | exp = 2.5
23 | assert obs == exp
24 |
25 |
26 | def test_long():
27 | big = 100000000
28 | obs = mean(range(1, big))
29 | exp = big / 2.0
30 | assert obs == exp
31 |
32 |
33 | def test_complex():
34 | # given that complex numbers are an unordered field
35 | # the arithmetic mean of complex numbers is meaningless
36 | num_list = [2 + 3j, 3 + 4j, -32 - 2j]
37 | obs = mean(num_list)
38 | exp = NotImplemented
39 | assert obs == exp
40 |
--------------------------------------------------------------------------------
/notebooks/bytopic/test-driven-development/tests/test_statistics.py:
--------------------------------------------------------------------------------
1 | from my_cesm_package.statistics import mean
2 |
3 |
4 | def test_ints():
5 | num_list = [1, 2, 3, 4, 5]
6 | obs = mean(num_list)
7 | exp = 3
8 | assert obs == exp
9 |
10 |
11 | def test_zero():
12 | num_list = [0, 2, 4, 6]
13 | obs = mean(num_list)
14 | exp = 3
15 | assert obs == exp
16 |
17 |
18 | def test_double():
19 | num_list = [1, 2, 3, 4]
20 | obs = mean(num_list)
21 | exp = 2.5
22 | assert obs == exp
23 |
24 |
25 | def test_long():
26 | big = 100000000
27 | obs = mean(range(1, big))
28 | exp = big / 2.0
29 | assert obs == exp
30 |
31 |
32 | def test_complex():
33 | # given that complex numbers are an unordered field
34 | # the arithmetic mean of complex numbers is meaningless
35 | num_list = [2 + 3j, 3 + 4j, -32 - 2j]
36 | obs = mean(num_list)
37 | exp = NotImplemented
38 | assert obs == exp
39 |
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/02_io.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# I/O"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "toc": true
14 | },
15 | "source": [
16 | "Table of Contents \n",
17 | ""
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "## Learning Objectives\n",
25 | "\n",
26 | "- Write xarray objects to netCDF files\n",
27 | "- Load xarray datasets from netCDF files\n",
28 | "- Provide a brief overview on Zarr"
29 | ]
30 | },
31 | {
32 | "cell_type": "markdown",
33 | "metadata": {},
34 | "source": [
35 | "## Reading and Writing Files\n",
36 | "\n",
37 | "\n",
38 | "Xarray supports direct serialization and I/O to several file formats including pickle, netCDF, OPeNDAP (read-only), GRIB1/2 (read-only), Zarr, and HDF by integrating with third-party libraries. Additional serialization formats for 1-dimensional data are available through pandas.\n",
39 | "\n",
40 | "File types\n",
41 | "- Pickle\n",
42 | "- NetCDF 3/4\n",
43 | "- RasterIO\n",
44 | "- Zarr\n",
45 | "- PyNio\n",
46 | "\n",
47 | "Interoperability\n",
48 | "- Pandas\n",
49 | "- Iris\n",
50 | "- CDMS\n",
51 | "- dask DataFrame\n"
52 | ]
53 | },
54 | {
55 | "cell_type": "markdown",
56 | "metadata": {},
57 | "source": [
58 | "## Opening xarray datasets\n",
59 | "\n",
60 | "Xarray's `open_dataset` and `open_mfdataset` are the primary functions for opening local or remote datasets such as netCDF, GRIB, OpenDap, and HDF. These operations are all supported by third party libraries (engines) for which xarray provides a common interface. "
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "execution_count": null,
66 | "metadata": {},
67 | "outputs": [],
68 | "source": [
69 | "!ncdump -h ../../../data/rasm.nc"
70 | ]
71 | },
72 | {
73 | "cell_type": "code",
74 | "execution_count": null,
75 | "metadata": {},
76 | "outputs": [],
77 | "source": [
78 | "import xarray as xr\n",
79 | "from glob import glob"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": null,
85 | "metadata": {},
86 | "outputs": [],
87 | "source": [
88 | "ds = xr.open_dataset('../../../data/rasm.nc')\n",
89 | "ds"
90 | ]
91 | },
92 | {
93 | "cell_type": "markdown",
94 | "metadata": {},
95 | "source": [
96 | "## Saving xarray datasets as netcdf files\n",
97 | "\n",
98 | "Xarray provides a high-level method for writing netCDF files directly from Xarray Datasets/DataArrays."
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": null,
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "ds.to_netcdf('../../../data/rasm_test.nc')"
108 | ]
109 | },
110 | {
111 | "cell_type": "markdown",
112 | "metadata": {},
113 | "source": [
114 | "## Multifile datasets\n",
115 | "\n",
116 | "Xarray can read/write multifile datasets using the `open_mfdataset` and `save_mfdataset` functions. "
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": null,
122 | "metadata": {},
123 | "outputs": [],
124 | "source": [
125 | "paths = glob('./data/19*.nc')\n",
126 | "paths"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "metadata": {},
133 | "outputs": [],
134 | "source": [
135 | "ds2 = xr.open_mfdataset(paths, combine=\"by_coords\")\n",
136 | "ds2"
137 | ]
138 | },
139 | {
140 | "cell_type": "markdown",
141 | "metadata": {},
142 | "source": [
143 | "## Zarr\n",
144 | "\n",
145 | "Zarr is a Python package providing an implementation of chunked, compressed, N-dimensional arrays. Zarr has the ability to store arrays in a range of ways, including in memory, in files, and in cloud-based object storage such as Amazon S3 and Google Cloud Storage. Xarray’s Zarr backend allows xarray to leverage these capabilities."
146 | ]
147 | },
148 | {
149 | "cell_type": "code",
150 | "execution_count": null,
151 | "metadata": {},
152 | "outputs": [],
153 | "source": [
154 | "# save to a Zarr dataset\n",
155 | "ds.to_zarr('./data/rasm.zarr', mode='w')"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": [
164 | "!ls ./data/rasm.zarr"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": null,
170 | "metadata": {},
171 | "outputs": [],
172 | "source": [
173 | "!du -h ./data/rasm.zarr"
174 | ]
175 | },
176 | {
177 | "cell_type": "markdown",
178 | "metadata": {},
179 | "source": [
180 | "## Going Further\n",
181 | " \n",
182 | "- Xarray I/O Documentation: http://xarray.pydata.org/en/latest/io.html\n",
183 | "\n",
184 | "- Zarr Documentation: https://zarr.readthedocs.io/en/stable/\n",
185 | "\n"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | ""
196 | ]
197 | }
198 | ],
199 | "metadata": {
200 | "kernelspec": {
201 | "display_name": "Python [conda env:python-tutorial]",
202 | "language": "python",
203 | "name": "conda-env-python-tutorial-py"
204 | },
205 | "language_info": {
206 | "codemirror_mode": {
207 | "name": "ipython",
208 | "version": 3
209 | },
210 | "file_extension": ".py",
211 | "mimetype": "text/x-python",
212 | "name": "python",
213 | "nbconvert_exporter": "python",
214 | "pygments_lexer": "ipython3",
215 | "version": "3.7.3"
216 | },
217 | "toc": {
218 | "base_numbering": 1,
219 | "nav_menu": {},
220 | "number_sections": true,
221 | "sideBar": true,
222 | "skip_h1_title": false,
223 | "title_cell": "Table of Contents",
224 | "title_sidebar": "Contents",
225 | "toc_cell": true,
226 | "toc_position": {},
227 | "toc_section_display": true,
228 | "toc_window_display": true
229 | }
230 | },
231 | "nbformat": 4,
232 | "nbformat_minor": 2
233 | }
234 |
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/04_agg.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Aggregation\n"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "toc": true
14 | },
15 | "source": [
16 | "Table of Contents \n",
17 | ""
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "## Learning Objectives\n",
25 | "\n",
26 | "- Perform aggregation (reduction) along one or multiple dimensions of a DataArray or Dataset\n"
27 | ]
28 | },
29 | {
30 | "cell_type": "markdown",
31 | "metadata": {},
32 | "source": [
33 | "## Aggregation Methods\n",
34 | "\n",
35 | "Xarray supports many of the aggregations methods that numpy has. A partial list includes: all, any, argmax, argmin, max, mean, median, min, prod, sum, std, var.\n",
36 | "\n",
37 | "Whereas the numpy syntax would require scalar axes, xarray can use dimension names:"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "import xarray as xr"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "metadata": {},
53 | "outputs": [],
54 | "source": [
55 | "ds = xr.open_dataset(\"../../../data/air_temperature.nc\")"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "da = ds['air']\n",
65 | "da"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "da.mean()"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "metadata": {},
81 | "outputs": [],
82 | "source": [
83 | "da.mean(dim=['lat', 'lon'])"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": null,
89 | "metadata": {},
90 | "outputs": [],
91 | "source": [
92 | "da.median(dim='time')"
93 | ]
94 | },
95 | {
96 | "cell_type": "code",
97 | "execution_count": null,
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "da.std(dim='time')"
102 | ]
103 | },
104 | {
105 | "cell_type": "markdown",
106 | "metadata": {},
107 | "source": [
108 | "## Going Further"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "- [Xarray Docs - Aggregation](https://xarray.pydata.org/en/stable/computation.html#aggregation)"
116 | ]
117 | },
118 | {
119 | "cell_type": "markdown",
120 | "metadata": {},
121 | "source": [
122 | ""
126 | ]
127 | }
128 | ],
129 | "metadata": {
130 | "kernelspec": {
131 | "display_name": "Python [conda env:python-tutorial]",
132 | "language": "python",
133 | "name": "conda-env-python-tutorial-py"
134 | },
135 | "language_info": {
136 | "codemirror_mode": {
137 | "name": "ipython",
138 | "version": 3
139 | },
140 | "file_extension": ".py",
141 | "mimetype": "text/x-python",
142 | "name": "python",
143 | "nbconvert_exporter": "python",
144 | "pygments_lexer": "ipython3",
145 | "version": "3.7.3"
146 | },
147 | "toc": {
148 | "base_numbering": 1,
149 | "nav_menu": {},
150 | "number_sections": true,
151 | "sideBar": true,
152 | "skip_h1_title": false,
153 | "title_cell": "Table of Contents",
154 | "title_sidebar": "Contents",
155 | "toc_cell": true,
156 | "toc_position": {},
157 | "toc_section_display": true,
158 | "toc_window_display": true
159 | }
160 | },
161 | "nbformat": 4,
162 | "nbformat_minor": 2
163 | }
164 |
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/05_arithmetic.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Computation"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "toc": true
14 | },
15 | "source": [
16 | "Table of Contents \n",
17 | ""
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "## Learning Objectives\n",
25 | "\n",
26 | "\n",
27 | "- Do basic arithmetic with DataArrays and Datasets"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | "\n",
35 | "## Arithmetic Operations\n",
36 | "\n",
37 | "Arithmetic operations with a single DataArray automatically vectorize (like numpy) over all array values:\n"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "import xarray as xr"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "metadata": {},
53 | "outputs": [],
54 | "source": [
55 | "da = xr.open_dataarray(\"../../../data/air_temperature.nc\")"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "da"
65 | ]
66 | },
67 | {
68 | "cell_type": "code",
69 | "execution_count": null,
70 | "metadata": {},
71 | "outputs": [],
72 | "source": [
73 | "da - 273.15"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {},
80 | "outputs": [],
81 | "source": [
82 | "da_mean = da.mean(dim='time')\n",
83 | "da_mean"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": null,
89 | "metadata": {},
90 | "outputs": [],
91 | "source": [
92 | "da - da_mean"
93 | ]
94 | },
95 | {
96 | "cell_type": "markdown",
97 | "metadata": {},
98 | "source": [
99 | "\n",
100 | "\n",
101 | "Notice that this required broadcasting along the time dimension. NumPy broadcasting is covered in great detail in
NumPy Guide .\n",
102 | "
\n",
103 | "\n"
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "metadata": {},
109 | "source": [
110 | "## Going Further\n",
111 | "\n",
112 | "- [Xarray Docs - Basic Array Math](https://xarray.pydata.org/en/stable/computation.html#basic-array-math)"
113 | ]
114 | },
115 | {
116 | "cell_type": "markdown",
117 | "metadata": {},
118 | "source": [
119 | ""
123 | ]
124 | }
125 | ],
126 | "metadata": {
127 | "kernelspec": {
128 | "display_name": "Python [conda env:python-tutorial]",
129 | "language": "python",
130 | "name": "conda-env-python-tutorial-py"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 3
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython3",
142 | "version": "3.7.3"
143 | },
144 | "toc": {
145 | "base_numbering": 1,
146 | "nav_menu": {},
147 | "number_sections": true,
148 | "sideBar": true,
149 | "skip_h1_title": false,
150 | "title_cell": "Table of Contents",
151 | "title_sidebar": "Contents",
152 | "toc_cell": true,
153 | "toc_position": {},
154 | "toc_section_display": true,
155 | "toc_window_display": true
156 | }
157 | },
158 | "nbformat": 4,
159 | "nbformat_minor": 2
160 | }
161 |
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/06_alignment.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Alignment"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "toc": true
14 | },
15 | "source": [
16 | "Table of Contents \n",
17 | ""
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "## Learning Objectives\n",
25 | "\n",
26 | "- Understand how alignment works in xarray"
27 | ]
28 | },
29 | {
30 | "cell_type": "markdown",
31 | "metadata": {},
32 | "source": [
33 | "\n",
34 | "## Automatic Alignment\n",
35 | "\n",
36 | "xarray enforces alignment between index Coordinates (that is, coordinates with the same name as a dimension, marked by `*`) on objects used in binary operations."
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": null,
42 | "metadata": {},
43 | "outputs": [],
44 | "source": [
45 | "import xarray as xr"
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": null,
51 | "metadata": {},
52 | "outputs": [],
53 | "source": [
54 | "da = xr.open_dataarray(\"../../../data/air_temperature.nc\")\n",
55 | "da"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "arr = da.isel(time=0, lat=slice(5, 10), lon=slice(7, 11))\n",
65 | "arr"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "part = arr[:-1]\n",
75 | "part"
76 | ]
77 | },
78 | {
79 | "cell_type": "markdown",
80 | "metadata": {},
81 | "source": [
82 | "- **Default behavior is an `inner join`**"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "(arr + part) / 2"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {},
97 | "source": [
98 | "- **We can also use an `outer join`**"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": null,
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "with xr.set_options(arithmetic_join=\"outer\"):\n",
108 | " print((arr + part) / 2)"
109 | ]
110 | },
111 | {
112 | "cell_type": "markdown",
113 | "metadata": {},
114 | "source": [
115 | "\n",
116 | "Notice that missing values (nan) were inserted where it is appropriate. \n",
117 | "
"
118 | ]
119 | },
120 | {
121 | "cell_type": "markdown",
122 | "metadata": {},
123 | "source": [
124 | "## Going Further\n",
125 | "\n",
126 | "- [Xarray Docs - Automatic Alignment](https://xarray.pydata.org/en/stable/computation.html#automatic-alignment)"
127 | ]
128 | },
129 | {
130 | "cell_type": "markdown",
131 | "metadata": {},
132 | "source": [
133 | ""
137 | ]
138 | }
139 | ],
140 | "metadata": {
141 | "kernelspec": {
142 | "display_name": "Python [conda env:python-tutorial]",
143 | "language": "python",
144 | "name": "conda-env-python-tutorial-py"
145 | },
146 | "language_info": {
147 | "codemirror_mode": {
148 | "name": "ipython",
149 | "version": 3
150 | },
151 | "file_extension": ".py",
152 | "mimetype": "text/x-python",
153 | "name": "python",
154 | "nbconvert_exporter": "python",
155 | "pygments_lexer": "ipython3",
156 | "version": "3.7.3"
157 | },
158 | "toc": {
159 | "base_numbering": 1,
160 | "nav_menu": {},
161 | "number_sections": true,
162 | "sideBar": true,
163 | "skip_h1_title": false,
164 | "title_cell": "Table of Contents",
165 | "title_sidebar": "Contents",
166 | "toc_cell": true,
167 | "toc_position": {},
168 | "toc_section_display": true,
169 | "toc_window_display": true
170 | }
171 | },
172 | "nbformat": 4,
173 | "nbformat_minor": 2
174 | }
175 |
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/1980.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/1980.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/1981.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/1981.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/1982.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/1982.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/1983.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/1983.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/air_temperature.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/air_temperature.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/data/rasm.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/data/rasm.nc
--------------------------------------------------------------------------------
/notebooks/bytopic/xarray/images/xarray-data-structures.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/bytopic/xarray/images/xarray-data-structures.png
--------------------------------------------------------------------------------
/notebooks/workflows/cesm/oxygen-trend-computation/all-trends-O2-200m-NPac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/workflows/cesm/oxygen-trend-computation/all-trends-O2-200m-NPac.png
--------------------------------------------------------------------------------
/notebooks/workflows/cesm/oxygen-trend-computation/all-trends-internal-O2-200m-NPac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/workflows/cesm/oxygen-trend-computation/all-trends-internal-O2-200m-NPac.png
--------------------------------------------------------------------------------
/notebooks/workflows/cesm/oxygen-trend-computation/trend-decomp-O2-200m-NPac.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/notebooks/workflows/cesm/oxygen-trend-computation/trend-decomp-O2-200m-NPac.png
--------------------------------------------------------------------------------
/notebooks/workflows/cesm/oxygen-trend-computation/util.py:
--------------------------------------------------------------------------------
1 | import esmlab
2 | import numpy as np
3 | import xarray as xr
4 |
5 | def clean_ds(ds, keep_vars):
6 | ds.attrs = {}
7 | ds.time.attrs['units'] = 'days since 0000-01-01 00:00:00'
8 | ds.time.attrs['calendar'] = 'noleap'
9 |
10 | non_dim_coords = set(ds.coords) - set(ds.dims)
11 | if non_dim_coords:
12 | ds = ds.reset_coords(non_dim_coords)
13 |
14 | ds = ds.drop([v for v in ds.variables if v not in keep_vars])
15 | ds = ds.sel(z_t=200e2, method='nearest')
16 | return ds
17 |
18 |
19 | def sel_time(ds, indexer_val, time_coord_name=None, year_offset=None):
20 | esmlabacc = ds.esmlab.set_time(time_coord_name=time_coord_name)
21 | time_coord_name = esmlabacc.time_coord_name
22 | dso = esmlabacc.compute_time_var(year_offset=year_offset).sel(**{time_coord_name: indexer_val})
23 | esmlabacc = dso.esmlab.set_time(time_coord_name=time_coord_name)
24 | return esmlabacc.uncompute_time_var()
25 |
26 |
27 | def pop_add_cyclic(ds):
28 |
29 | nj = ds.TLAT.shape[0]
30 | ni = ds.TLONG.shape[1]
31 |
32 | xL = int(ni/2 - 1)
33 | xR = int(xL + ni)
34 |
35 | tlon = ds.TLONG.data
36 | tlat = ds.TLAT.data
37 |
38 | tlon = np.where(np.greater_equal(tlon, min(tlon[:,0])), tlon-360., tlon)
39 | lon = np.concatenate((tlon, tlon + 360.), 1)
40 | lon = lon[:, xL:xR]
41 |
42 | if ni == 320:
43 | lon[367:-3, 0] = lon[367:-3, 0] + 360.
44 | lon = lon - 360.
45 |
46 | lon = np.hstack((lon, lon[:, 0:1] + 360.))
47 | if ni == 320:
48 | lon[367:, -1] = lon[367:, -1] - 360.
49 |
50 | #-- trick cartopy into doing the right thing:
51 | # it gets confused when the cyclic coords are identical
52 | lon[:, 0] = lon[:, 0] - 1e-8
53 |
54 | #-- periodicity
55 | lat = np.concatenate((tlat, tlat), 1)
56 | lat = lat[:, xL:xR]
57 | lat = np.hstack((lat, lat[:,0:1]))
58 |
59 | TLAT = xr.DataArray(lat, dims=('nlat', 'nlon'))
60 | TLONG = xr.DataArray(lon, dims=('nlat', 'nlon'))
61 |
62 | dso = xr.Dataset({'TLAT': TLAT, 'TLONG': TLONG})
63 |
64 | # copy vars
65 | varlist = [v for v in ds.data_vars if v not in ['TLAT', 'TLONG']]
66 | for v in varlist:
67 | v_dims = ds[v].dims
68 | if not ('nlat' in v_dims and 'nlon' in v_dims):
69 | dso[v] = ds[v]
70 | else:
71 | # determine and sort other dimensions
72 | other_dims = set(v_dims) - {'nlat', 'nlon'}
73 | other_dims = tuple([d for d in v_dims if d in other_dims])
74 | lon_dim = ds[v].dims.index('nlon')
75 | field = ds[v].data
76 | field = np.concatenate((field, field), lon_dim)
77 | field = field[..., :, xL:xR]
78 | field = np.concatenate((field, field[..., :, 0:1]), lon_dim)
79 | dso[v] = xr.DataArray(field, dims=other_dims+('nlat', 'nlon'),
80 | attrs=ds[v].attrs)
81 |
82 |
83 | # copy coords
84 | for v, da in ds.coords.items():
85 | if not ('nlat' in da.dims and 'nlon' in da.dims):
86 | dso = dso.assign_coords(**{v: da})
87 |
88 |
89 | return dso
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/00_intro.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "toc": true
7 | },
8 | "source": [
9 | "Table of Contents \n",
10 | ""
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "# Computing Ocean Heat Content (OHC)\n",
18 | "\n",
19 | "Now that you're familiar with the Jupyter Notebook workspace, let's use some Python in a way that mirrors a potential usecase and integrates the teaching of Python geoscience tools when you would need them. We've prepared a series of 4 notebooks that demonstrate Python Tools through the calculation of Ocean Heat Content (OHC). Throughout these notebooks, we will introduce the following concepts:\n",
20 | "\n",
21 | "- Python modules\n",
22 | "- Xarray Library\n",
23 | "\n",
24 | "\n",
25 | "The contents in each notebook takes build up on the previous notebook. Therefore, we recommend following these notebooks in order (1-4) until you are familiar with all the concepts presented. "
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | ""
35 | ]
36 | }
37 | ],
38 | "metadata": {
39 | "kernelspec": {
40 | "display_name": "Python 3",
41 | "language": "python",
42 | "name": "python3"
43 | },
44 | "language_info": {
45 | "codemirror_mode": {
46 | "name": "ipython",
47 | "version": 3
48 | },
49 | "file_extension": ".py",
50 | "mimetype": "text/x-python",
51 | "name": "python",
52 | "nbconvert_exporter": "python",
53 | "pygments_lexer": "ipython3",
54 | "version": "3.7.1"
55 | },
56 | "toc": {
57 | "base_numbering": 1,
58 | "nav_menu": {},
59 | "number_sections": true,
60 | "sideBar": true,
61 | "skip_h1_title": false,
62 | "title_cell": "Table of Contents",
63 | "title_sidebar": "Contents",
64 | "toc_cell": true,
65 | "toc_position": {},
66 | "toc_section_display": true,
67 | "toc_window_display": true
68 | }
69 | },
70 | "nbformat": 4,
71 | "nbformat_minor": 2
72 | }
73 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_1_1.py:
--------------------------------------------------------------------------------
1 | ds['lev_bnds']
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_1_2.py:
--------------------------------------------------------------------------------
1 | da_thetao = ds['thetao']
2 | ds_thetao = da_thetao.to_dataset()
3 | ds_thetao
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_1_3.py:
--------------------------------------------------------------------------------
1 | print("*** Dimensions")
2 | print(ds.dims)
3 | print("\n\n*** Coordinates")
4 | print(ds.coords)
5 | print("\n\n*** Attributes")
6 | print(ds.attrs)
7 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_1.py:
--------------------------------------------------------------------------------
1 | level_point = ds['lev'][-1]
2 | level_point
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_2.py:
--------------------------------------------------------------------------------
1 | thetao_lat5 = ds['thetao'].isel(lat=5)
2 | thetao_lat5
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_3.py:
--------------------------------------------------------------------------------
1 | thetao_30lon_30lat = ds['thetao'].sel(lon=30, lat=30, method='nearest', tolerance=1)
2 | thetao_30lon_30lat
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_4.py:
--------------------------------------------------------------------------------
1 | level_bounds_limited = ds['lev_bnds'].where(ds['lev_bnds'] < 100, drop = True)
2 | level_bounds_limited.values
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_5.py:
--------------------------------------------------------------------------------
1 | temperature_limited = ds["thetao"].where(delta_level != 0, drop=True)
2 | temperature_limited
3 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_2_6.py:
--------------------------------------------------------------------------------
1 | def limit_depth_of_variables(level_bounds, temperature, depth_limit):
2 | level_bounds_limited = level_bounds.where(level_bounds < depth_limit, depth_limit)
3 | delta_level = abs(level_bounds_limited[:, 1] - level_bounds_limited[:, 0])
4 |
5 | delta_level_limited = delta_level.where(delta_level != 0, drop=True)
6 | temperature_limited = temperature.where(delta_level != 0, drop=True)
7 |
8 | return delta_level_limited, temperature_limited
9 |
10 |
11 | delta_level_limited, temperature_limited = limit_depth_of_variables(
12 | ds["lev_bnds"], ds["thetao"], 50
13 | )
14 | print(delta_level_limited, "\n\n")
15 | print(temperature_limited)
16 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_3_1.py:
--------------------------------------------------------------------------------
1 | thetao_point = ds['thetao'].isel(time=0, lev = 0, lat = 30, lon=30)
2 |
3 | orig_units = cf.Unit(thetao_point.attrs['units'])
4 |
5 | target_units = cf.Unit('degK')
6 | orig_units.convert(thetao_point, target_units)
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_3_2.py:
--------------------------------------------------------------------------------
1 | def change_units(ds, variable_str, variable_bounds_str, target_unit_str):
2 | """ Applies unit conversion on an xarray DataArray """
3 | orig_units = cf.Unit(ds[variable_str].attrs["units"])
4 | target_units = cf.Unit(target_unit_str)
5 | variable_in_new_units = xr.apply_ufunc(
6 | orig_units.convert,
7 | ds[variable_bounds_str],
8 | target_units,
9 | output_dtypes=[ds[variable_bounds_str].dtype],
10 | )
11 | return variable_in_new_units
12 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_3_3.py:
--------------------------------------------------------------------------------
1 | level_bounds_in_m = change_units(ds, "lev", "lev_bnds", "m")
2 | temperature_in_K = change_units(ds, "thetao", "thetao", "degK")
3 | print(level_bounds_in_m, temperature_in_K)
4 |
--------------------------------------------------------------------------------
/notebooks/workflows/ocean_heat_content/solutions/solution_4_1.py:
--------------------------------------------------------------------------------
1 | def calc_ocean_heat(delta_level, temperature):
2 | """ Compute Ocean Heat Content """
3 | rho = 1026 # kg/m^3
4 | c_p = 3990 # J/(kg K)
5 | weighted_temperature = delta_level * temperature
6 | heat = weighted_temperature.sum(dim="lev") * rho * c_p
7 | return heat
8 |
9 |
10 | heat = calc_ocean_heat(delta_level_limited, temperature_limited)
11 | print(heat)
12 |
--------------------------------------------------------------------------------
/setup/check_setup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import importlib
3 | import subprocess
4 | from pathlib import Path
5 | import os
6 |
7 | here = os.path.abspath(os.path.dirname(__file__))
8 |
9 |
10 | def check_conda():
11 | from distutils.spawn import find_executable
12 |
13 | return find_executable("conda") is not None
14 |
15 |
16 | def check_data():
17 | import xarray as xr
18 |
19 | data_dir = os.path.abspath(os.path.join(os.path.dirname(here), "data"))
20 | canary_files = [
21 | "NOAA_NCDC_ERSST_v3b_SST.nc",
22 | "thetao_Omon_historical_GISS-E2-1-G_r1i1p1f1_gn_185001-185512.nc",
23 | "woa2013v2-O2-thermocline-ann.nc",
24 | "rasm.nc",
25 | "air_temperature.nc",
26 | "moc.nc",
27 | ]
28 | canary_filepaths = [os.path.join(data_dir, f) for f in canary_files]
29 | files = list(Path(data_dir).rglob("*"))
30 |
31 | for f in canary_filepaths:
32 | dset = xr.open_dataset(f)
33 | assert isinstance(dset, xr.Dataset)
34 |
35 | for f in files:
36 | assert f.exists()
37 |
38 |
39 | def verify_extensions(extensions):
40 | failing_ext = []
41 | for ext in extensions:
42 | cmd = ["jupyter", "labextension", "check", ext]
43 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
44 | p.communicate()
45 | if p.returncode != 0:
46 | failing_ext.append(ext)
47 |
48 | return failing_ext
49 |
50 |
51 | def main():
52 |
53 | check_data()
54 |
55 | try:
56 | assert check_conda()
57 | except AssertionError:
58 | raise AssertionError("Conda binary is missing from $PATH")
59 |
60 | required_modules = [
61 | "numpy",
62 | "dask",
63 | "xarray",
64 | "matplotlib",
65 | "cartopy",
66 | "jupyter",
67 | "zarr",
68 | "ipywidgets",
69 | ]
70 | missing_modules = []
71 | for mod in required_modules:
72 | try:
73 | importlib.import_module(mod)
74 |
75 | except ImportError:
76 | missing_modules.append(mod)
77 |
78 | extensions = [
79 | "@jupyter-widgets/jupyterlab-manager",
80 | "@pyviz/jupyterlab_pyviz",
81 | "nbdime-jupyterlab",
82 | "jupyter-leaflet",
83 | ]
84 | failing_extensions = verify_extensions(extensions)
85 |
86 | cartopy_assets = Path("~/.local/share/cartopy").expanduser()
87 | try:
88 | assert cartopy_assets.exists()
89 | except AssertionError:
90 | raise AssertionError("Cartopy plotting assets are missing")
91 |
92 | if missing_modules:
93 | print("The following modules are required but not installed:")
94 | print(" {}".format(", ".join(missing_modules)))
95 | print("\nYou can install them using conda by running:")
96 | print("\n conda install {}".format(" ".join(missing_modules)))
97 | print("\nOr you can install them using pip by running:")
98 | print("\n pip install {}".format(" ".join(missing_modules)))
99 |
100 | if failing_extensions:
101 | print("The following JupyterLab extensions are not installed/enabled:")
102 | print(" {}".format(", ".join(failing_extensions)))
103 | print("\nYou can install and enable them by running:")
104 | print("\n jupyter labextension install {}".format(" ".join(failing_extensions)))
105 |
106 | else:
107 | print("Everything looks good!")
108 |
109 |
110 | if __name__ == "__main__":
111 | main()
112 |
--------------------------------------------------------------------------------
/setup/conda/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Creating Additional Environments
3 |
4 | If you are interested in using Matlab in JupyterLab, consider creating the following environment using [env-py-matlab.yml](./environments/env-py-matlab.yml).
5 |
6 | ```bash
7 | conda env create -f ./setup/environments/env-py-matlab.yml
8 | ```
9 |
10 | (Using Matlab requires building the Matlab Python API; see [`setup/conda/build-matlab-api`](./build-matlab-api). Scripts are set up to use API's built in ``~/matlab-python`` or ``~mclong/matlab-python``.)
11 |
12 | To use an environment, we need to activate it using the command ``conda activate ENV_NAME``,and to deactivate an environment, we use ``conda deactivate``.
13 |
--------------------------------------------------------------------------------
/setup/conda/build-matlab-api:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | MATLAB_VERSION=$1
5 |
6 | if [ -z "${MATLAB_VERSION}" ]; then
7 | MATLAB_VERSION=R2018a
8 | fi
9 |
10 |
11 | module load matlab/${MATLAB_VERSION}
12 | matlabroot=$(which matlab | xargs dirname)
13 |
14 | cd ${matlabroot}/../extern/engines/python
15 |
16 |
17 | source activate py-matlab
18 |
19 | build_base=${HOME}/matlab-python
20 | install_dir=${build_base}/${MATLAB_VERSION}
21 |
22 | python setup.py build --build-base=${build_base} install --prefix=${install_dir}
23 |
--------------------------------------------------------------------------------
/setup/conda/check_env_conflict:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -xeo pipefail
4 |
5 | export PATH="$INSTALL_DIR/bin:$PATH"
6 |
7 | conda env list --json
8 |
--------------------------------------------------------------------------------
/setup/conda/install_conda:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | WGET=`which wget`
4 | CURL=`which curl`
5 |
6 | set -xeo pipefail
7 |
8 | if [ "$WGET" = "" ] && [ "$CURL" = "" ]; then
9 | exit 255
10 | fi
11 |
12 | if [ "$WGET" = "" ]; then
13 | curl -o miniconda.sh -s $INSTALLER
14 | else
15 | wget $INSTALLER -O miniconda.sh -q
16 | fi
17 |
18 | chmod +x miniconda.sh
19 | mkdir -p ~/.conda
20 | ./miniconda.sh -b -p $INSTALL_DIR
21 | rm miniconda.sh
22 |
--------------------------------------------------------------------------------
/setup/conda/post_build_base:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -xeo pipefail
4 |
5 | export PATH="$INSTALL_DIR/bin:$PATH"
6 |
7 | # Install jupyterlab extensions
8 | source activate base
9 | jupyter labextension install @jupyter-widgets/jupyterlab-manager \
10 | @jupyterlab/toc \
11 | dask-labextension
12 |
13 | jupyter labextension update --all
14 |
--------------------------------------------------------------------------------
/setup/conda/post_build_tutorial:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -xeo pipefail
4 |
5 | export PATH="$INSTALL_DIR/bin:$PATH"
6 |
7 | # Install jupyterlab extensions
8 |
9 | source activate python-tutorial
10 | jupyter labextension install @jupyter-widgets/jupyterlab-manager \
11 | @pyviz/jupyterlab_pyviz \
12 | nbdime-jupyterlab \
13 | jupyter-leaflet
14 |
15 |
16 | jupyter labextension update --all
17 |
18 |
19 | if [ "$(ls -A ~/.local/share/cartopy)" ]; then
20 | echo "Cartopy plotting assets exist already"
21 |
22 | else
23 | # Download Cartopy plotting assets
24 | mkdir -p ~/.local/share/cartopy
25 | python $CARTOPY_ASSET_SCRIPT --output ~/.local/share/cartopy cultural-extra cultural gshhs physical
26 | fi
27 |
28 | # removes tarballs and unused packages
29 | conda clean -tipsy
30 |
--------------------------------------------------------------------------------
/setup/conda/update_base_env:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -xeo pipefail
4 |
5 | export PATH="$INSTALL_DIR/bin:$PATH"
6 |
7 | # Add conda-forge and activate strict
8 | conda config --add channels conda-forge
9 | # conda config --set channel_priority strict
10 | conda config --set show_channel_urls True
11 | conda config --set pip_interop_enabled True
12 |
13 | conda update -q -y conda
14 | conda env update -q -f $BASE_ENV_YML
15 |
16 | conda info -a
17 |
18 | if [ -z "$INIT_SHELL" ]; then
19 | echo "INIT_SHELL is unset. Skipping conda init INIT_SHELL";
20 | else
21 | conda init $INIT_SHELL;
22 | fi
23 |
--------------------------------------------------------------------------------
/setup/conda/update_tutorial_env:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -xeo pipefail
4 |
5 | export PATH="$INSTALL_DIR/bin:$PATH"
6 |
7 | conda env update -q -f $TUTORIAL_ENV_YML
8 |
--------------------------------------------------------------------------------
/setup/download_cartopy_assets.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # (C) British Crown Copyright 2011 - 2016, Met Office
3 | #
4 | # This file is part of cartopy.
5 | #
6 | # cartopy is free software: you can redistribute it and/or modify it under
7 | # the terms of the GNU Lesser General Public License as published by the
8 | # Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # cartopy is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Lesser General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Lesser General Public License
17 | # along with cartopy. If not, see .
18 |
19 | """
20 | This module provides a command-line tool for triggering the download of
21 | the data used by various Feature instances.
22 |
23 | For detail on how to use this tool, execute it with the `-h` option:
24 |
25 | python download.py -h
26 |
27 | """
28 | from __future__ import absolute_import, division, print_function
29 |
30 | import argparse
31 |
32 | from cartopy import config
33 | from cartopy.feature import Feature, GSHHSFeature, NaturalEarthFeature
34 | from cartopy.io import Downloader
35 |
36 |
37 | ALL_SCALES = ("110m", "50m", "10m")
38 |
39 |
40 | FEATURE_DEFN_GROUPS = {
41 | # Only need one GSHHS resolution because they *all* get downloaded
42 | # from one file.
43 | "gshhs": GSHHSFeature(scale="f"),
44 | "physical": (
45 | ("physical", "coastline", ALL_SCALES),
46 | ("physical", "land", ALL_SCALES),
47 | ("physical", "ocean", ALL_SCALES),
48 | ("physical", "rivers_lake_centerlines", ALL_SCALES),
49 | ("physical", "lakes", ALL_SCALES),
50 | ("physical", "geography_regions_polys", ALL_SCALES),
51 | ("physical", "geography_regions_points", ALL_SCALES),
52 | ("physical", "geography_marine_polys", ALL_SCALES),
53 | ("physical", "glaciated_areas", ALL_SCALES),
54 | ),
55 | "cultural": (
56 | ("cultural", "admin_0_countries", ALL_SCALES),
57 | ("cultural", "admin_0_countries_lakes", ALL_SCALES),
58 | ("cultural", "admin_0_sovereignty", ALL_SCALES),
59 | ("cultural", "admin_0_boundary_lines_land", ALL_SCALES),
60 | ("cultural", "urban_areas", ("50m", "10m")),
61 | ("cultural", "roads", "10m"),
62 | ("cultural", "roads_north_america", "10m"),
63 | ("cultural", "railroads", "10m"),
64 | ("cultural", "railroads_north_america", "10m"),
65 | ),
66 | "cultural-extra": (
67 | ("cultural", "admin_0_map_units", "110m"),
68 | ("cultural", "admin_0_scale_rank", "110m"),
69 | ("cultural", "admin_0_tiny_countries", "110m"),
70 | ("cultural", "admin_0_pacific_groupings", "110m"),
71 | ("cultural", "admin_1_states_provinces_shp", "110m"),
72 | ("cultural", "admin_1_states_provinces_lines", "110m"),
73 | ),
74 | }
75 |
76 |
77 | def download_features(group_names, dry_run=True):
78 | for group_name in group_names:
79 | feature_defns = FEATURE_DEFN_GROUPS[group_name]
80 | if isinstance(feature_defns, Feature):
81 | feature = feature_defns
82 | level = list(feature._levels)[0]
83 | downloader = Downloader.from_config(("shapefiles", "gshhs", feature._scale, level))
84 | format_dict = {"config": config, "scale": feature._scale, "level": level}
85 | if dry_run:
86 | print("URL: {}".format(downloader.url(format_dict)))
87 | else:
88 | downloader.path(format_dict)
89 | geoms = list(feature.geometries())
90 | print("Feature {} length: {}".format(feature, len(geoms)))
91 | else:
92 | for category, name, scales in feature_defns:
93 | if not isinstance(scales, tuple):
94 | scales = (scales,)
95 | for scale in scales:
96 | downloader = Downloader.from_config(
97 | ("shapefiles", "natural_earth", scale, category, name)
98 | )
99 | feature = NaturalEarthFeature(category, name, scale)
100 | format_dict = {
101 | "config": config,
102 | "category": category,
103 | "name": name,
104 | "resolution": scale,
105 | }
106 | if dry_run:
107 | print("URL: {}".format(downloader.url(format_dict)))
108 | else:
109 | downloader.path(format_dict)
110 | geoms = list(feature.geometries())
111 | print(
112 | "Feature {}, {}, {} length: {}"
113 | "".format(category, name, scale, len(geoms))
114 | )
115 |
116 |
117 | if __name__ == "__main__":
118 | parser = argparse.ArgumentParser(description="Download feature datasets.")
119 | parser.add_argument(
120 | "group_names",
121 | nargs="+",
122 | choices=FEATURE_DEFN_GROUPS,
123 | metavar="GROUP_NAME",
124 | help="Feature group name: %(choices)s",
125 | )
126 | parser.add_argument(
127 | "--output",
128 | "-o",
129 | help="save datasets in the specified directory " "(default: user cache directory)",
130 | )
131 | parser.add_argument("--dry-run", help="just print the URLs to download", action="store_true")
132 | parser.add_argument(
133 | "--ignore-repo-data", action="store_true", help="ignore existing repo data when downloading"
134 | )
135 | args = parser.parse_args()
136 |
137 | if args.output:
138 | config["pre_existing_data_dir"] = args.output
139 | config["data_dir"] = args.output
140 | if args.ignore_repo_data:
141 | config["repo_data_dir"] = config["data_dir"]
142 | download_features(args.group_names, dry_run=args.dry_run)
143 |
--------------------------------------------------------------------------------
/setup/download_data.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import absolute_import, division, print_function
3 | from ftplib import FTP
4 | import os
5 | import sys
6 |
7 | # Ref: https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
8 | # https://gist.github.com/aubricus/f91fb55dc6ba5557fbab06119420dd6a
9 | def printProgressBar(iteration, total, prefix="", suffix="", decimals=1, bar_length=100):
10 | """
11 | Call in a loop to create terminal progress bar
12 | @params:
13 | iteration - Required : current iteration (Int)
14 | total - Required : total iterations (Int)
15 | prefix - Optional : prefix string (Str)
16 | suffix - Optional : suffix string (Str)
17 | decimals - Optional : positive number of decimals in percent complete (Int)
18 | bar_length - Optional : character length of bar (Int)
19 | """
20 | str_format = "{0:." + str(decimals) + "f}"
21 | percents = str_format.format(100 * (iteration / float(total)))
22 | filled_length = int(round(bar_length * iteration / float(total)))
23 | bar = "█" * filled_length + "-" * (bar_length - filled_length)
24 |
25 | sys.stdout.write("\r%s |%s| %s%s %s" % (prefix, bar, percents, "%", suffix)),
26 | # sys.stdout.write('%s |%s| %s%s %s\r' % (prefix, bar, percents, '%', suffix)),
27 |
28 | if iteration == total:
29 | sys.stdout.write("\n")
30 | sys.stdout.flush()
31 |
32 |
33 | def ftp_download(
34 | host="ftp.cgd.ucar.edu",
35 | directory="archive/aletheia-data/tutorial-data",
36 | filelist=[],
37 | output_dir=None,
38 | ):
39 | ftp = FTP()
40 | ftp.connect(host)
41 | ftp.login()
42 | ftp.cwd(directory)
43 | if not filelist:
44 | filenames = ftp.nlst()
45 |
46 | else:
47 | filenames = filelist
48 |
49 | if not output_dir:
50 | output_dir = os.getcwd()
51 |
52 | try:
53 | os.makedirs(output_dir)
54 |
55 | except Exception:
56 | pass
57 |
58 | # Initial call to print 0% progress
59 | l = len(filenames)
60 | print("Currently downloading tutorial data")
61 | printProgressBar(0, l, prefix="Progress:", suffix="", bar_length=50)
62 | for i, filename in enumerate(filenames):
63 | local_filename = os.path.join(output_dir, filename)
64 | if os.path.exists(local_filename):
65 | # Do nothing if file or symlink exists
66 | print("{} exists already".format(local_filename))
67 | else:
68 | with open(local_filename, "wb") as f:
69 | cmd = "RETR {}".format(filename)
70 | ftp.retrbinary(cmd, f.write)
71 |
72 | print(local_filename, " ")
73 | # Update Progress Bar
74 | printProgressBar(i + 1, l, prefix="Progress:", suffix="", bar_length=50)
75 |
76 | ftp.quit()
77 |
78 |
79 | if __name__ == "__main__":
80 | ftp_download(directory="archive/aletheia-data/", filelist=["test.sh"])
81 |
--------------------------------------------------------------------------------
/setup/environments/env-conda-base.yml:
--------------------------------------------------------------------------------
1 | name: base
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - dask
6 | - distributed==2.3.2
7 | - dask-labextension
8 | - ipywidgets
9 | - jupyter_contrib_nbextensions
10 | - jupyter_dashboards
11 | - jupyter-server-proxy
12 | - jupyterlab
13 | - nb_conda_kernels
14 | - nodejs
15 | - pip
16 | - python=3.7
17 | - widgetsnbextension
18 | - bash_kernel
19 |
--------------------------------------------------------------------------------
/setup/environments/env-py-matlab.yml:
--------------------------------------------------------------------------------
1 | name: py-matlab
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - python=3.5
7 | - pip
8 | - ipykernel
9 | - pip:
10 | - matlab_kernel
11 |
--------------------------------------------------------------------------------
/setup/environments/env-tutorial.yml:
--------------------------------------------------------------------------------
1 | name: python-tutorial
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - basemap
7 | - bokeh
8 | - bottleneck
9 | - cartopy
10 | - cf_units
11 | - cfgrib
12 | - cftime
13 | - cmocean
14 | - cython
15 | - dask
16 | - distributed==2.3.2
17 | - dask-jobqueue
18 | - dask-mpi
19 | - datashader
20 | - eofs
21 | - esmlab
22 | - esmpy
23 | - fastparquet
24 | - folium
25 | - geopandas
26 | - geoviews
27 | - globus-cli
28 | - graphviz
29 | - gsw # https://teos-10.github.io/GSW-Python/install.html
30 | - h5netcdf
31 | - h5py
32 | - holoviews
33 | - hvplot
34 | - intake
35 | - intake-esm
36 | - intake-xarray
37 | - ipyleaflet
38 | - ipywidgets
39 | - jupyter_contrib_nbextensions
40 | - jupyterlab
41 | - line_profiler
42 | - lz4
43 | - matplotlib
44 | - memory_profiler
45 | - metpy
46 | - mpas-analysis
47 | - mpi4py
48 | - nbdime
49 | - nc-time-axis
50 | - netcdf4
51 | - nodejs
52 | - numba
53 | - numcodecs
54 | - numpy
55 | - pandas
56 | - pandoc
57 | - pandocfilters
58 | - panel
59 | - pint
60 | - pip
61 | - psutil
62 | - psyplot
63 | - pyarrow
64 | - pynio
65 | - pyresample
66 | - pytest
67 | - python-blosc
68 | - python-graphviz
69 | - python=3.7
70 | - rasterio
71 | - satpy
72 | - scikit-image
73 | - scikit-learn
74 | - scipy
75 | - seaborn
76 | - seawater
77 | - siphon
78 | - snakeviz
79 | - sparse
80 | - statsmodels
81 | - tabulate
82 | - toolz
83 | - tqdm
84 | - watermark
85 | - wrf-python
86 | - xarray>=0.12
87 | - xesmf
88 | - xgcm
89 | - xhistogram
90 | - xrft
91 | - zarr
92 | - pip:
93 | - git+https://github.com/NCAR/pop-tools.git
94 | - ncar-jobqueue
95 | - salem
96 |
--------------------------------------------------------------------------------
/setup/jlab/jlab-ch:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | #-- set defaults
5 | # expected environment variables
6 | if [ -z "${JOB_ACCOUNT}" ]; then
7 | source /glade/u/apps/ch/opt/usr/bin/getacct.sh
8 | fi
9 | job_account=${JOB_ACCOUNT}
10 |
11 | walltime=06:00:00
12 | queue=share
13 | memory=8GB
14 | notebook_dir=${HOME}
15 | jlab_port=8888
16 | #dash_port=8787
17 |
18 | NODES=1
19 | CORES=1
20 | LOAD_MATLAB=
21 | MATLAB_VERSION=R2018a
22 |
23 | #-- define usage
24 | usage () {
25 | cat << EOF
26 | Usage: launch dask
27 | Possible options are:
28 | -a,--account: account
29 | -w,--walltime: walltime [default: ${walltime}]
30 | -q,--queue: queue [default: ${queue}]
31 | -m,--memory: memory request [default: ${memory}]
32 | -d,--directory: notebook directory
33 | -p,--port: [default: ${jlab_port}]
34 | --matlab: run matlab kernel
35 | --matlab-version: matlab version [default: ${MATLAB_VERSION}]
36 | EOF
37 | exit
38 | }
39 |
40 | #-- parse input args
41 | while [[ $# -gt 0 ]]; do
42 | key="${1}"
43 | case ${key} in
44 | -h|--help)
45 | usage ;;
46 | -a|--account)
47 | job_account="${2}"
48 | shift 2
49 | ;;
50 | -w|--walltime)
51 | walltime="${2}"
52 | shift 2
53 | ;;
54 | -m|--memory)
55 | memory="${2}"
56 | shift 2
57 | ;;
58 | -q|--queue)
59 | queue="${2}"
60 | shift 2
61 | ;;
62 | -d|--directory)
63 | notebook_dir="${2}"
64 | shift 2
65 | ;;
66 | --add-workers)
67 | add_workers=true
68 | shift
69 | ;;
70 | -p|--port)
71 | jlab_port="${2}"
72 | shift 2
73 | ;;
74 | --matlab)
75 | LOAD_MATLAB=1
76 | shift
77 | ;;
78 | --matlab-version)
79 | MATLAB_VERSION="${2}"
80 | shift 2
81 | ;;
82 | *)
83 | echo "ERROR: unknown argument: ${key}"
84 | usage
85 | ;;
86 | esac
87 | done
88 |
89 | #-- check inputs
90 | if [ -z "${walltime}" ]; then
91 | echo "ERROR: walltime not set."
92 | exit 1
93 | fi
94 | if [ -z "${queue}" ]; then
95 | echo "ERROR: queue not set."
96 | exit 1
97 | fi
98 | if [[ -z ${WORKDIR} ]]; then
99 | WORKDIR=/glade/scratch/${USER}/tmp
100 | fi
101 |
102 | #-- make sure the working directory exists
103 | if [[ ! -d ${WORKDIR} ]]; then
104 | mkdir -vp ${WORKDIR}
105 | fi
106 | LOGFILE=${WORKDIR}/jlab-ch.jlab-log.$(date +%Y%m%dT%H%M%S)
107 |
108 |
109 | if [ ! -z "${LOAD_MATLAB}" ]; then
110 | source activate py-matlab
111 | python_version=$(python --version | awk '{print $2}' | awk -F. '{print $1 "." $2}')
112 | MATLAB_API_PATH=~/matlab-python/${MATLAB_VERSION}/lib/python${python_version}/site-packages
113 | if [ ! -d ${MATLAB_API_PATH} ]; then
114 | MATLAB_API_PATH=~mclong/matlab-python/${MATLAB_VERSION}/lib/python${python_version}/site-packages
115 | if [ ! -d ${MATLAB_API_PATH} ]; then
116 | echo "ERROR:"
117 | echo " Matlab API for ${MATLAB_VERSION} not found;"
118 | echo " build API using build-matlab-api script:"
119 | echo " >>> ./build-matlab-api ${MATLAB_VERSION}"
120 | exit 1
121 | fi
122 | fi
123 | fi
124 |
125 |
126 | echo "Launching notebook server"
127 | if [ ! -z "${LOAD_MATLAB}" ]; then
128 | echo " *Loading Matlab version: ${MATLAB_VERSION}*"
129 | fi
130 |
131 | echo " queue = ${queue}"
132 | echo " account = ${job_account}"
133 | echo " nodes = ${NODES}"
134 | echo " ncpus = ${CORES}"
135 | echo " memory = ${memory}"
136 | echo " walltime = ${walltime}"
137 | echo " port = ${jlab_port}"
138 | echo
139 |
140 | s=$(qsub << EOF
141 | #!/bin/bash
142 | #PBS -N jlab-ch
143 | #PBS -q ${queue}
144 | #PBS -A ${job_account}
145 | #PBS -l select=${NODES}:ncpus=${CORES}:mpiprocs=${CORES}:mem=${memory}
146 | #PBS -l walltime=${walltime}
147 | #PBS -o ${WORKDIR}/
148 | #PBS -e ${WORKDIR}/
149 | #PBS -j oe
150 |
151 | # setup environment
152 | module purge
153 | unset LD_LIBRARY_PATH
154 |
155 | export JUPYTER_RUNTIME_DIR=${WORKDIR}
156 |
157 | # load matlab
158 | if [ ! -z "${LOAD_MATLAB}" ]; then
159 | export PYTHONPATH=${MATLAB_API_PATH}
160 | module load matlab/${MATLAB_VERSION}
161 | fi
162 |
163 | # run lab server
164 | cd ${notebook_dir}
165 | jupyter lab --no-browser --notebook-dir ${notebook_dir} --ip=\$(hostname) >& ${LOGFILE}
166 | EOF
167 | )
168 | sjob=${s%%.*}
169 | echo "submitted job: ${sjob} to queue ${queue}"
170 |
171 | trap ctrl_c INT
172 | function ctrl_c() {
173 | echo "killing ${sjob}"
174 | qdel ${sjob}
175 | exit 0
176 | }
177 |
178 |
179 | #-- wait for job to start
180 | echo -n "waiting for job to run"
181 | while [ 1 ]; do
182 | #-- get job status
183 | job_state=$(qstat -x -f ${sjob} | grep job_state | awk -F= '{print $2}')
184 | exec_host=$(qstat -x -f ${sjob} | grep exec_host | awk -F= '{print $2}')
185 |
186 | if [[ -n "${exec_host}" && ${job_state} =~ R ]]; then
187 | exec_host=$(echo -e "${exec_host%/*}" | tr -d '[:space:]')
188 | echo .
189 | echo "Job ID: ${sjob}"
190 | echo "Job host: ${exec_host}"
191 | break
192 | elif [[ ${job_state} =~ F ]]; then
193 | echo "Job failed."
194 | qstat -x -f ${sjob}
195 | exit 1
196 | else
197 | echo -n ..
198 | fi
199 | sleep 1
200 | done
201 |
202 | #-- wait until log file is there
203 | echo "waiting for job log to appear: ${LOGFILE}"
204 | while [ ! -f "${LOGFILE}" ]; do
205 | sleep 1
206 | done
207 |
208 | #-- wait until address shows up in log
209 | ADDRESS=
210 | ELAPSED=0
211 | while [ -z "${ADDRESS}" ]; do
212 | ADDRESS=$(grep -e '^\[.*\]\s*http://.*:' ${LOGFILE} | head -n 1)
213 | PORT=$(echo ${ADDRESS#*http://} | awk -F':' '{print $2}' | awk -F'/' '{print $1}')
214 | sleep 1
215 | ((ELAPSED+=1))
216 | if [[ ${ELAPSED} -gt 120 ]]; then
217 | echo -e "something went wrong\n---"
218 | cat ${LOGFILE}
219 | echo "---"
220 | ctrl_c
221 | fi
222 | done
223 |
224 | echo
225 | echo "----------------------------------------------------------------------"
226 | echo "Execute on local machine:"
227 | echo "ssh -N -L ${jlab_port}:${exec_host}:${PORT} ${USER}@$(hostname).ucar.edu" # -L ${dash_port}:${exec_host}:8787
228 | echo
229 | echo "Open a browser on your local machine and type in the address bar:"
230 | echo "http://localhost:${jlab_port}"
231 | echo "----------------------------------------------------------------------"
232 | echo
233 |
234 | echo "Job log: ${LOGFILE}"
235 | tail -f ${LOGFILE}
236 |
--------------------------------------------------------------------------------
/setup/jlab/jlab-hobart:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | #-- set defaults
5 | # expected environment variables
6 | walltime=08:00:00
7 | queue=medium
8 | notebook_dir=${HOME}
9 | jlab_port=8888
10 | #dash_port=8787
11 |
12 | NODES=1
13 | PPN=48
14 |
15 | #-- define usage
16 | usage () {
17 | cat << EOF
18 | Usage: launch dask
19 | Possible options are:
20 | -w,--walltime: walltime [default: ${walltime}]
21 | -q,--queue: queue [default: ${queue}]
22 | -d,--directory: notebook directory
23 | -p,--port: [default: ${jlab_port}]
24 | EOF
25 | exit
26 | }
27 |
28 | #-- parse input args
29 | while [[ $# -gt 0 ]]; do
30 | key="${1}"
31 | case ${key} in
32 | -h|--help)
33 | usage ;;
34 | -w|--walltime)
35 | walltime="${2}"
36 | shift 2
37 | ;;
38 | -q|--queue)
39 | queue="${2}"
40 | shift 2
41 | ;;
42 | -d|--directory)
43 | notebook_dir="${2}"
44 | shift 2
45 | ;;
46 | -p|--port)
47 | jlab_port="${2}"
48 | shift 2
49 | ;;
50 | *)
51 | echo "ERROR: unknown argument: ${key}"
52 | usage
53 | ;;
54 | esac
55 | done
56 |
57 | #-- check inputs
58 | if [ -z "${walltime}" ]; then
59 | echo "ERROR: walltime not set."
60 | exit 1
61 | fi
62 | if [ -z "${queue}" ]; then
63 | echo "ERROR: queue not set."
64 | exit 1
65 | fi
66 | if [[ -z ${WORKDIR} ]]; then
67 | WORKDIR=/scratch/cluster/${USER}/tmp
68 | fi
69 |
70 | #-- make sure the working directory exists
71 | if [[ ! -d ${WORKDIR} ]]; then
72 | mkdir -vp ${WORKDIR}
73 | fi
74 | LOGFILE=${WORKDIR}/jlab-hobart.jlab-log.$(date +%Y%m%dT%H%M%S)
75 |
76 |
77 |
78 | echo "Launching notebook server"
79 |
80 | echo " queue = ${queue}"
81 | echo " nodes = ${NODES}"
82 | echo " walltime = ${walltime}"
83 | echo " port = ${jlab_port}"
84 | echo
85 |
86 | s=$(qsub << EOF
87 | #!/bin/bash
88 | #PBS -N jlab-hobart
89 | #PBS -r n
90 | #PBS -q ${queue}
91 | #PBS -l nodes=${NODES}:ppn=${PPN}
92 | #PBS -l walltime=${walltime}
93 | #PBS -o ${WORKDIR}/
94 | #PBS -e ${WORKDIR}/
95 | #PBS -j oe
96 |
97 | # setup environment
98 |
99 | # run lab server
100 | cd ${notebook_dir}
101 | jupyter lab --no-browser --ip=\$(hostname) --notebook-dir ${notebook_dir} >& ${LOGFILE}
102 | EOF
103 | )
104 | sjob=${s%%.*}
105 | echo "submitted job: ${sjob} to queue ${queue}"
106 |
107 | trap ctrl_c INT
108 | function ctrl_c() {
109 | echo "killing ${sjob}"
110 | qdel ${sjob}
111 | exit 0
112 | }
113 |
114 |
115 | #-- wait for job to start
116 | echo -n "waiting for job to run"
117 | while [ 1 ]; do
118 | #-- get job status
119 | job_state=$(qstat -f ${sjob} | grep job_state | awk -F= '{print $2}')
120 | exec_host=$(qstat -f ${sjob} | grep exec_host | awk -F= '{print $2}')
121 |
122 | if [[ -n "${exec_host}" && ${job_state} =~ R ]]; then
123 | exec_host=$(echo -e "${exec_host%/*}" | tr -d '[:space:]')
124 | exec_host=$(echo -e "${exec_host%%.*}")
125 | echo .
126 | echo "Job ID: ${sjob}"
127 | echo "Job host: ${exec_host}"
128 | break
129 | elif [[ ${job_state} =~ F ]]; then
130 | echo "Job failed."
131 | qstat -x -f ${sjob}
132 | exit 1
133 | else
134 | echo -n ..
135 | fi
136 | sleep 1
137 | done
138 |
139 | #-- wait until log file is there
140 | echo "waiting for job log to appear: ${LOGFILE}"
141 | while [ ! -f "${LOGFILE}" ]; do
142 | sleep 1
143 | done
144 |
145 | #-- wait until address shows up in log
146 | ADDRESS=
147 | ELAPSED=0
148 | while [ -z "${ADDRESS}" ]; do
149 | ADDRESS=$(grep -e '^\[.*\]\s*http://.*:' ${LOGFILE} | head -n 1)
150 | PORT=$(echo ${ADDRESS#*http://} | awk -F':' '{print $2}' | awk -F'/' '{print $1}')
151 | sleep 1
152 | ((ELAPSED+=1))
153 | if [[ ${ELAPSED} -gt 120 ]]; then
154 | echo -e "something went wrong\n---"
155 | cat ${LOGFILE}
156 | echo "---"
157 | ctrl_c
158 | fi
159 | done
160 |
161 | echo
162 | echo "----------------------------------------------------------------------"
163 | echo "Execute on local machine:"
164 | echo "ssh -N -L ${jlab_port}:${exec_host}:${PORT} ${USER}@$(hostname)" # -L ${dash_port}:${exec_host}:8787
165 | echo
166 | echo "Open a browser on your local machine and type in the address bar:"
167 | echo "http://localhost:${jlab_port}"
168 | echo "----------------------------------------------------------------------"
169 | echo
170 |
171 | echo "Job log: ${LOGFILE}"
172 | tail -f ${LOGFILE}
173 |
--------------------------------------------------------------------------------
/site/pages/fall2019/agenda.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | [Fall 2019 NCAR Python Tutorial Homepage](/pages/fall2019/)
13 |
14 | ## Location
15 |
16 | The bulk of this tutorial will be held in the NCAR Mesa Laboratory's Main Seminar Room.
17 | Space for hands-on work and collaboration has been made available in the Mesa
18 | Laboratory's Damon Room and Library.
19 |
20 | ## Agenda
21 |
22 | #### DAY 1: Sept 18, 2019
23 |
24 | | TIME | TITLE |
25 | |-------|---------------------------------------|
26 | | 8:00 | _Questions & Setup_ |
27 | | 8:30 | Welcome! |
28 | | 9:00 | Getting Started with Jupyter & Python |
29 | | 10:00 | _Coffee Break_ |
30 | | 10:30 | Real World Example: OHC Part 1 |
31 | | 12:00 | _Lunch_ |
32 | | 13:00 | Real World Example: OHC Part 2 |
33 | | 14:00 | Real World Example: ENSO index |
34 | | 14:30 | Real World Example: MetPy |
35 | | 15:00 | _Break_ |
36 | | 15:30 | Sprint Pitches |
37 | | 16:00 | _Discussion, Questions & Planning_ |
38 | | 16:30 | _Happy Hour at Under the Sun_ |
39 |
40 |
41 |
42 | #### DAY 2: Sept 19, 2019
43 |
44 | Participants are encouraged to work on their Sprint projects all day, but are welcome
45 | to attend any of the below optional lectures. If you want to do more learning by
46 | doing yourself, you are encouraged to work on Sprint projects. If you want to see
47 | more details and go further in a more formal presentation, you are encouraged to
48 | attend lectures.
49 |
50 | | TIME | TITLE |
51 | |-------|------------------------------------|
52 | | 8:30 | Introduction to Python (Continued) |
53 | | 9:30 | Git & GitHub |
54 | | 10:30 | _Coffee Break_ |
55 | | 11:00 | Visualization |
56 | | 12:30 | _Lunch_ |
57 | | 13:30 | Object Oriented Programming |
58 | | 14:30 | Unit Testing |
59 | | 15:30 | _Break_ |
60 | | 16:00 | Python Package Structure |
61 |
62 |
63 |
64 | #### DAY 3: Sept 20, 2019
65 |
66 | Participants are encouraged to work on their Sprint projects in the morning but are
67 | welcome to attend any of the below optional lectures. If you want to do more learning
68 | by doing yourself, you are encouraged to work on Sprint projects. If you want to see
69 | more details and go further in a more formal presentation, you are encouraged to
70 | attend lectures.
71 |
72 | | TIME | TITLE |
73 | |-------|------------------------------------------------|
74 | | 9:00 | MetPy |
75 | | 10:00 | _Coffee Break_ |
76 | | 11:00 | More with Dask |
77 | | 12:30 | _Lunch_ |
78 | | 13:30 | Conda & Conda Forge |
79 | | 14:00 | Update on GeoCAT (new NCL) |
80 | | 14:30 | Sprint Project Presentations (Lightning Talks) |
81 | | 15:00 | _Break_ |
82 | | 15:30 | Discussion & Closing Comments |
83 |
--------------------------------------------------------------------------------
/site/pages/fall2019/index.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | ## Dates: 18-20 September 2019
13 | ## Location: NCAR Mesa Laboratory (Main Seminar Room)
14 |
15 | The agenda and additional details can be found in the following link.
16 |
17 | - [Agenda and Location](/pages/fall2019/agenda)
18 |
19 | We will be conducting Sprints as part of this tutorial. For
20 | more information on these, what they are and how you can participate,
21 | go to the following link.
22 |
23 | - [Sprints and Optional Lectures](/pages/fall2019/sprints)
24 |
25 | Prior to the tutorial, please follow the setup instructions
26 | in the following link.
27 |
28 | - [Setup Instructions](/pages/fall2019/instructions)
29 |
--------------------------------------------------------------------------------
/site/pages/fall2019/instructions.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | [Fall 2019 NCAR Python Tutorial Homepage](/pages/fall2019/)
13 |
14 | ## Setup
15 |
16 | This tutorial covers the installation and setup of a Python environment on:
17 |
18 | - Cheyenne
19 | - Casper
20 | - CGD's Hobart
21 | - Personal laptop/desktop with a UNIX-variant Operating System
22 |
23 | **NOTE:** For windows users, setup scripts provided in this repository don't
24 | work on Windows machines for the time being.
25 |
26 | ### Step 1: Clone NCAR Python Tutorial Repository
27 |
28 | Run the following commmand to clone this repo to your system(e.g. cheyenne,
29 | casper, your laptop, etc...):
30 |
31 | ```bash
32 | git clone https://github.com/NCAR/ncar-python-tutorial.git
33 | ```
34 |
35 | ### Step 2: Install Miniconda and Create Environments
36 |
37 | Change directory to the cloned repository:
38 |
39 | ```bash
40 | cd ncar-python-tutorial
41 | ```
42 |
43 | and run the `setup/configure` script:
44 |
45 | **NOTE**: Be prepared for the script to take up to 15 minutes to complete.
46 |
47 | ```bash
48 | ./setup/configure
49 | ```
50 |
51 | If you want more information on how to use the configure script, use the `--help` (or `-h`)
52 | option:
53 |
54 | ```bash
55 | $ ./setup/configure --help
56 | usage: configure [-h] [--clobber] [--download] [--prefix PREFIX]
57 |
58 | Set up tutorial environment.
59 |
60 | optional arguments:
61 | -h, --help show this help message and exit
62 | --clobber, -c Whether to clobber existing environment (default:
63 | False)
64 | --download, -d Download tutorial data without setting environment up
65 | (default: False)
66 | --prefix PREFIX, -p PREFIX
67 | Miniconda3 install location)
68 | ```
69 |
70 | Default values for ``--prefix`` argument are:
71 |
72 | - Personal laptop / Hobart: `$HOME/miniconda3`
73 | - Cheyenne or Casper: `/glade/work/$USER/miniconda3`
74 |
75 | **NOTE**: In case the default `prefix` is not appropriate for you (due to limited storage),
76 | feel free to specify a different miniconda install location. For instance, this install
77 | location may be a `project` workspace on a shared filesystem like GLADE or Hobart's filesystem.
78 |
79 | The `configure` script does the following:
80 |
81 | - Install `conda` package manager if it is unable to find an existing installation. Otherwise,
82 | it will update the `base` environment
83 | - Create or Update `python-tutorial` conda environment.
84 | - Download data if not on Cheyenne or Casper or Hobart. If on Cheyenne or Casper or Hobart,
85 | create soft-links to an existing/local data repository.
86 |
87 | ### Step 3: Close and re-open your current shell
88 |
89 | For changes to take effect, close and re-open your current shell.
90 |
91 | ### Step 4: Run the Setup Verification Script
92 |
93 | Check that *conda info* runs successfully:
94 |
95 | ```bash
96 | conda info
97 | ```
98 |
99 | Then, from the `ncar-python-tutorial` directory, activate `python-tutorial` conda environment:
100 |
101 | ```bash
102 | conda activate python-tutorial
103 | ```
104 |
105 | and run the setup verification script to confirm that everything is working as expected:
106 |
107 | ```bash
108 | cd ncar-python-tutorial
109 | ./setup/check_setup
110 | ```
111 |
112 | This step should print **"Everything looks good!"**.
113 |
114 | ----
115 |
116 | ## Launch Jupyter Lab
117 |
118 | ### 1. Cheyenne or DAV via JupyterHub (Recommended)
119 |
120 | **JupyterHub link:** [https://jupyterhub.ucar.edu/](https://jupyterhub.ucar.edu/)
121 |
122 | To use the Cheyenne or DAV compute nodes,we recommend using JupyterLab via NCAR's
123 | JupyterHub deployment.
124 |
125 | Open your preferred browser (Chrome, Firefox, Safari, etc...) on your ``local machine``,
126 | and head over to [https://jupyterhub.ucar.edu/](https://jupyterhub.ucar.edu/).
127 |
128 | **You will need to authenticate with either your _yubikey_ or your _DUO_ mobile app**
129 |
130 | ### 2. Cheyenne or DAV via SSH Tunneling
131 |
132 | In case you are having issues with jupyterhub.ucar.edu, we've provided utility scripts for
133 | launching JupyterLab on both Cheyenne and Casper via SSH Tunneling:
134 |
135 | ```bash
136 | conda activate base
137 | ./setup/jlab/jlab-ch # on Cheyenne
138 | ./setup/jlab/jlab-dav # on Casper
139 | ```
140 |
141 | ### 3. Hobart via SSH Tunneling
142 |
143 | For those interested in running JupyterLab on CGD's Hobart, you will need to use SSH
144 | tunneling script provided in `setup/jlab/jlab-hobart`:
145 |
146 | ```bash
147 | conda activate base
148 | ./setup/jlab/jlab-hobart
149 | ```
150 |
151 | For additional help, use the `--help` (or `-h`) option:
152 |
153 | ```bash
154 | $ ./setup/jlab/jlab-hobart --help
155 | Usage: launch dask
156 | Possible options are:
157 | -w,--walltime: walltime [default: 08:00:00]
158 | -q,--queue: queue [default: medium]
159 | -d,--directory: notebook directory
160 | -p,--port: [default: 8888]
161 | ```
162 |
163 | ### 4. Personal Laptop
164 |
165 | For those interested in running JupyterLab on their local machine, you can simply run the
166 | following command, and follow the printed instructions on the console:
167 |
168 | ```bash
169 | conda activate base
170 | jupyter lab
171 | ```
172 |
--------------------------------------------------------------------------------
/site/pages/fall2019/sprints.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | [Fall 2019 NCAR Python Tutorial Homepage](/pages/fall2019/)
13 |
14 | ## Sprints
15 |
16 | We understand that there is no way that we can thoroughly teach Python in just three days. It is for that
17 | reason that we are focusing the entire first day on _real world examples_. Even then, there is no way to
18 | teach Python in such a short time without having you (the student) actually learn by doing.
19 |
20 | In hackathons, participants propose Sprint ideas to the rest of the group. Sprints are short, well defined
21 | projects that multiple people can collaborate and make progress on during the hackathon. At the end of the
22 | hackathon, participants briefly describe what they've accomplished during the hackathon on their Sprints.
23 | Basically, Sprints are a great way of taking advantage of the expertise _in the room_ to actually get something
24 | done.
25 |
26 | We encourage all of you to come up with a Sprint idea for this tutorial, even if it is as simple as just
27 | converting an existing script that you have to Python. You do not have to pitch your Sprint ideas to the
28 | rest of the room, but we encourage you to do so. If you do want to pitch your Sprint idea, perhaps to get
29 | more participation from others in the room, please **add a slide with your Sprint proposal** to this Google
30 | Slides presentation:
31 |
32 | [https://docs.google.com/presentation/d/15jDEb7wvVlPE2b57C1fchDL25iM44fkJACBafzAZd_s/edit?usp=sharing](https://docs.google.com/presentation/d/15jDEb7wvVlPE2b57C1fchDL25iM44fkJACBafzAZd_s/edit?usp=sharing)
33 |
34 | ## Optional Lectures
35 |
36 | After the first day of the tutorial, when we walk through real world examples, we recognize that many of
37 | you may not feel ready to actually _do_ anything with Python. For that reason, there will be optional lectures
38 | in the Mesa Laboratory's Main Seminar Room that will give you the chance to go a little deeper into various
39 | topics. You do not have to attend any of these lectures, but feel free to come if you are interested.
40 |
41 | All of the presentations on Days 2 and 3 are optional.
42 |
--------------------------------------------------------------------------------
/site/posts/fall-2019-tutorial.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | Announcing the Fall 2019 NCAR Python Tutorial (September 18-20)!
13 |
14 |
15 |
16 | First, this tutorial will be workflow focused. That is, we will be teaching
17 | Python and some of the various Python tools through hands-on examples of real
18 | world workflows, such as the calculation of ocean heat content and ENSO index
19 | calculation. Our hope is to give you, on Day 1, examples of how to use these
20 | tools for real science, rather than delving into the tools themselves and
21 | hoping you can synthesize the information.
22 |
23 | Second, Days 2 and 3 (Sept 19-20) will give you the opportunity to collaborate
24 | with each other on short projects (Sprints) or to attend optional lectures.
25 | We hope that you will be willing to contribute a Sprint idea. Don’t be afraid
26 | to contribute your ideas! They could be anything from a desire to convert
27 | your favorite IDL (or Matlab or NCL) script, to Python to adding a new feature
28 | to your favorite Python package, to developing a new Python package with the
29 | help of other people in the room, to fixing that bug that’s been bothering you
30 | for months! If you don’t have an idea of what you’d like to Sprint on, you
31 | can collaborate with other people on their Sprint ideas or sit in on optional
32 | lectures to learn more about Python and existing Python tools.
33 |
34 | What we need from you:
35 |
36 | We need all of you to do a little preparation before you attend the tutorial.
37 | There are multiple ways of participating in this tutorial, including running
38 | the example notebooks on Cheyenne or Casper or on your personal laptop.
39 | However you plan on participating in the tutorial, you need to follow the
40 | instructions described in the README here:
41 |
42 | https://github.com/NCAR/ncar-python-tutorial.git
43 |
44 | Office Hours:
45 |
46 | For those needing help setting your environment up for the tutorial, we will
47 | be holding “Office Hours” from 9:00am to 12:00pm on Tuesday, Sept 17, in
48 | ML-490C (Tower B). Feel free to stop by if you have questions before the
49 | tutorial.
50 |
51 | More information can be found at the [Fall 2019 NCAR Python Tutorial Homepage](/pages/fall2019/)
52 |
--------------------------------------------------------------------------------
/site/themes/ncar/assets/css/custom.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;
3 | background: #c3d7ee;
4 | background-image: url(../img/background.png)!important;
5 | background-repeat: no-repeat;
6 | background-size: contain;
7 | background-position: center top;
8 | }
9 |
10 | a.u-url {
11 | color: #212529;
12 | }
13 |
14 | .bg-dark {
15 | background-color: #00797c!important;
16 | }
17 |
--------------------------------------------------------------------------------
/site/themes/ncar/assets/img/background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NCAR/ncar-python-tutorial/54d536d40cfaf6f8990c58edb438286c19d32a67/site/themes/ncar/assets/img/background.png
--------------------------------------------------------------------------------
/site/themes/ncar/ncar.theme:
--------------------------------------------------------------------------------
1 | [Theme]
2 | engine = mako
3 | parent = bootstrap4
4 |
5 |
--------------------------------------------------------------------------------