├── CESM-Lab
├── 2.1
│ ├── Dockerfile
│ └── Files
│ │ ├── cesm_aliases.ipy
│ │ ├── condarc.yml
│ │ ├── config_pes.xml
│ │ ├── configs
│ │ ├── cam
│ │ │ └── config_pes.xml
│ │ ├── cice
│ │ │ └── config_pes.xml
│ │ ├── cism
│ │ │ └── config_pes.xml
│ │ ├── clm
│ │ │ └── config_pes.xml
│ │ └── pop
│ │ │ └── config_pes.xml
│ │ ├── dask_config.yml
│ │ ├── env_batch.py
│ │ ├── environment.yml
│ │ ├── start
│ │ └── tutorials
│ │ └── CESM
│ │ └── Quickstart.ipynb
└── 2.2
│ ├── Dockerfile
│ └── Files
│ ├── cartopy
│ └── shapefiles
│ │ └── natural_earth
│ │ └── physical
│ │ ├── ne_110m_coastline.dbf
│ │ ├── ne_110m_coastline.shp
│ │ └── ne_110m_coastline.shx
│ ├── cesm.py
│ ├── cesm_aliases.ipy
│ ├── condarc.yml
│ ├── dask_config.yml
│ ├── environment.yml
│ ├── start
│ └── tutorials
│ ├── CESM
│ └── QuickStart.ipynb
│ ├── images
│ └── logo.png
│ └── index.ipynb
├── CESM-Postprocessing
├── Dockerfile
└── Files
│ ├── activate_this.py
│ └── environment.yml
├── CESM
├── 2.1
│ ├── Dockerfile
│ └── Files
│ │ ├── case_setup.py
│ │ ├── config_compilers.xml
│ │ ├── config_inputdata.xml
│ │ ├── config_machines.xml
│ │ └── ea002e626aee6bc6643e8ab5f998e5e4
└── 2.2
│ ├── Dockerfile
│ └── Files
│ ├── case_setup.py
│ ├── config_compilers.xml
│ ├── config_compsets.xml
│ ├── config_inputdata.xml
│ ├── config_machines.xml
│ ├── config_pes.xml
│ ├── configs
│ ├── cam
│ │ └── config_pes.xml
│ ├── cice
│ │ └── config_pes.xml
│ ├── cism
│ │ └── config_pes.xml
│ ├── clm
│ │ └── config_pes.xml
│ └── pop
│ │ └── config_pes.xml
│ ├── create_scam6_iop
│ ├── ea002e626aee6bc6643e8ab5f998e5e4
│ ├── machines.py
│ ├── micro_mg3_0.F90
│ └── scam_shell_commands
├── ESMF
├── 8.0
│ └── Dockerfile
├── API_changes
│ └── Dockerfile
├── copyright_update
│ └── Dockerfile
├── dev
│ └── Dockerfile
├── doc
│ ├── esmf-doc-base
│ │ └── Dockerfile
│ ├── esmf-doc
│ │ └── Dockerfile
│ └── esmpy-doc
│ │ └── Dockerfile
├── nuopc-app-prototypes
│ ├── Dockerfile
│ ├── docker-entrypoint.sh
│ └── meta_test.py
└── test_coverage
│ └── Dockerfile
├── README.md
└── base
└── centos8
└── Dockerfile
/CESM-Lab/2.1/Dockerfile:
--------------------------------------------------------------------------------
1 | ##################################
2 | # ESCOMP/CESM-Lab/2.1 Dockerfile #
3 | ##################################
4 |
5 | # This builds on top of the CESM 2.1 container, and adds the Jupyter Lab environment
6 | FROM escomp/cesm-2.1
7 |
8 | # Set up the Conda version - using the pangeo/base-image as a foundation here:
9 | ENV CONDA_VERSION=4.8.3-4 \
10 | CONDA_ENV=notebook \
11 | NB_USER=user \
12 | NB_GROUP=ncar \
13 | NB_UID=1000 \
14 | SHELL=/bin/bash \
15 | LANG=C.UTF-8 \
16 | LC_ALL=C.UTF-8 \
17 | CONDA_DIR=/srv/conda
18 |
19 | # Additional environment setup that depends on the above:
20 | ENV NB_PYTHON_PREFIX=${CONDA_DIR}/envs/${CONDA_ENV} \
21 | DASK_ROOT_CONFIG=${CONDA_DIR}/etc \
22 | HOME=/home/${NB_USER} \
23 | PATH=${CONDA_DIR}/bin:${PATH}
24 |
25 | # Add the Conda init and set permissions on the directory:
26 | # (Could clean this up, and push changes back to Pangeo -- eg, /srv is hardcoded)
27 | RUN sudo /bin/bash -c "echo '. ${CONDA_DIR}/etc/profile.d/conda.sh ; conda activate ${CONDA_ENV}' > /etc/profile.d/init_conda.sh" && \
28 | sudo chown -R ${NB_USER}:${NB_GROUP} /srv
29 |
30 | # Install miniforge:
31 | RUN URL="https://github.com/conda-forge/miniforge/releases/download/${CONDA_VERSION}/Miniforge3-${CONDA_VERSION}-Linux-x86_64.sh" && \
32 | wget --quiet ${URL} -O miniconda.sh && \
33 | /bin/bash miniconda.sh -u -b -p ${CONDA_DIR} && \
34 | rm miniconda.sh && \
35 | conda clean -afy && \
36 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
37 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete
38 |
39 | COPY --chown=${NB_USER}:${NB_GROUP} Files/*yml /srv/
40 |
41 | RUN mv /srv/condarc.yml ${CONDA_DIR}/.condarc && \
42 | mv /srv/dask_config.yml ${CONDA_DIR}/etc/dask.yml
43 |
44 | RUN sudo yum install -y graphviz
45 |
46 | RUN conda env create --name ${CONDA_ENV} -f /srv/environment.yml && \
47 | conda clean -yaf && \
48 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
49 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
50 | find ${CONDA_DIR} -follow -type f -name '*.js.map' -delete && \
51 | find ${NB_PYTHON_PREFIX}/lib/python*/site-packages/bokeh/server/static -follow -type f -name '*.js' ! -name '*.min.js' -delete
52 |
53 | RUN pip --no-cache-dir install rechunker
54 |
55 |
56 | RUN export PATH=${NB_PYTHON_PREFIX}/bin:${PATH} && \
57 | jupyter labextension install --clean \
58 | @jupyter-widgets/jupyterlab-manager \
59 | @jupyterlab/geojson-extension \
60 | dask-labextension \
61 | @pyviz/jupyterlab_pyviz \
62 | jupyter-leaflet && \
63 | sudo rm -rf /tmp/* && \
64 | rm -rf ${HOME}/.cache ${HOME}/.npm ${HOME}/.yarn && \
65 | rm -rf ${NB_PYTHON_PREFIX}/share/jupyter/lab/staging && \
66 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
67 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
68 | find ${CONDA_DIR} -follow -type f -name '*.js.map' -delete
69 |
70 | RUN sudo yum install -y xmlstarlet
71 |
72 | COPY Files/start /srv
73 | RUN sudo chmod +x /srv/start
74 |
75 | COPY Files/cesm_aliases.ipy /etc/ipython/
76 | RUN sudo /bin/bash -c 'echo "c.InteractiveShellApp.exec_files = [ \"/etc/ipython/cesm_aliases.ipy\" ] " >> /etc/ipython/ipython_config.py'
77 |
78 | COPY Files/tutorials /srv/tutorials
79 |
80 | COPY Files/config_pes.xml /opt/ncar/cesm2/cime_config/
81 | COPY Files/configs/cam/config_pes.xml /opt/ncar/cesm2/components/cam/cime_config/
82 | COPY Files/configs/cice/config_pes.xml /opt/ncar/cesm2/components/cice/cime_config/
83 | COPY Files/configs/cism/config_pes.xml /opt/ncar/cesm2/components/cism/cime_config/
84 | COPY Files/configs/pop/config_pes.xml /opt/ncar/cesm2/components/pop/cime_config/
85 | COPY Files/configs/clm/config_pes.xml /opt/ncar/cesm2/components/clm/cime_config/
86 |
87 | COPY Files/env_batch.py /opt/ncar/cesm2/cime/scripts/lib/CIME/XML/env_batch.py
88 |
89 | ENV CESMROOT=/opt/ncar/cesm2
90 |
91 |
92 | EXPOSE 8888
93 | USER user
94 | WORKDIR /home/user
95 | ENV SHELL /bin/bash
96 | ENTRYPOINT ["/srv/start"]
97 | #ENTRYPOINT ["/opt/conda/bin/jupyter", "lab", "--ip=0.0.0.0", "--no-browser", "--NotebookApp.token=''", "--NotebookApp.password=''"]
98 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/cesm_aliases.ipy:
--------------------------------------------------------------------------------
1 | %alias xmlchange ./xmlchange %l
2 | %alias xmlquery ./xmlquery %l
3 | %alias create_newcase ${CESMROOT}/cime/scripts/create_newcase %l
4 | %alias query_config ${CESMROOT}/cime/scripts/query_config %l
5 | %alias case.setup ./case.setup %l
6 | %alias case.build ./case.build %l
7 | %alias case.submit ./case.submit %l
8 | %alias preview_run ./preview_run %l
9 | %alias pelayout ./pelayout %l
10 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/condarc.yml:
--------------------------------------------------------------------------------
1 | channels:
2 | - conda-forge
3 | - defaults
4 | channel_priority: strict
5 | show_channel_urls: true
6 | # NOTE: defaults only work for `conda create -f`, not `conda env create -f`!
7 | # https://github.com/conda/conda/issues/9580
8 | #create_default_packages:
9 | # - pangeo-notebook=0.0.2
10 | # - ipykernel
11 | #pinned_packages:
12 | # - pangeo-notebook=0.0.2
13 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/configs/cice/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | -1
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 |
17 |
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 |
27 |
28 | 0
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | -2
46 | -2
47 | -2
48 | -40
49 | -40
50 | -2
51 | -2
52 | -2
53 |
54 |
55 | 2
56 | 2
57 | 2
58 | 2
59 | 2
60 | 2
61 | 2
62 | 2
63 |
64 |
65 | 0
66 | -2
67 | -4
68 | 0
69 | 0
70 | -6
71 | -8
72 | -10
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | -2
83 | -2
84 | -2
85 | -2
86 | -2
87 | -2
88 | -2
89 | -2
90 |
91 |
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 |
101 |
102 | 0
103 | 0
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 | -4
119 | -4
120 | -4
121 | -4
122 | -4
123 | -4
124 | -4
125 | -4
126 |
127 |
128 | 2
129 | 2
130 | 2
131 | 2
132 | 2
133 | 2
134 | 2
135 | 2
136 |
137 |
138 | 0
139 | 0
140 | 0
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 | -4
155 | -4
156 | -4
157 | -4
158 | -4
159 | -4
160 | -4
161 | -4
162 |
163 |
164 | 2
165 | 2
166 | 2
167 | 2
168 | 2
169 | 2
170 | 2
171 | 2
172 |
173 |
174 | 0
175 | 0
176 | 0
177 | 0
178 | 0
179 | 0
180 | 0
181 | 0
182 |
183 |
184 |
185 |
186 |
187 |
188 | -4
189 | -4
190 | -4
191 | -4
192 | -4
193 | -4
194 | -4
195 | -4
196 |
197 |
198 | 1
199 | 1
200 | 1
201 | 1
202 | 1
203 | 1
204 | 1
205 | 1
206 |
207 |
208 | 0
209 | 0
210 | 0
211 | 0
212 | 0
213 | 0
214 | 0
215 | 0
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 | 30
226 | 30
227 | 30
228 | 30
229 | 30
230 | 30
231 | 30
232 | 30
233 |
234 |
235 | 2
236 | 2
237 | 2
238 | 2
239 | 2
240 | 2
241 | 2
242 | 2
243 |
244 |
245 | 0
246 | 0
247 | 0
248 | 0
249 | 0
250 | 0
251 | 0
252 | 0
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 | 60
262 | 60
263 | 60
264 | 60
265 | 60
266 | 60
267 | 60
268 | 60
269 |
270 |
271 | 2
272 | 2
273 | 2
274 | 2
275 | 2
276 | 2
277 | 2
278 | 2
279 |
280 |
281 | 0
282 | 0
283 | 0
284 | 0
285 | 0
286 | 0
287 | 0
288 | 0
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 | 60
298 | 60
299 | 60
300 | 60
301 | 60
302 | 60
303 | 60
304 | 60
305 |
306 |
307 | 2
308 | 2
309 | 2
310 | 2
311 | 2
312 | 2
313 | 2
314 | 2
315 |
316 |
317 | 0
318 | 0
319 | 0
320 | 0
321 | 0
322 | 0
323 | 0
324 | 0
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/configs/cism/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | none
8 |
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 | -1
17 |
18 |
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 |
28 |
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 | 0
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | none
46 |
47 | 1
48 | 1
49 | 1
50 | 1
51 | 1
52 | 1
53 | 1
54 | 1
55 |
56 |
57 | 1
58 | 1
59 | 1
60 | 1
61 | 1
62 | 1
63 | 1
64 | 1
65 |
66 |
67 | 0
68 | 0
69 | 0
70 | 0
71 | 0
72 | 0
73 | 0
74 | 0
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | none
83 |
84 | -1
85 | -1
86 | -1
87 | -1
88 | -1
89 | -1
90 | -1
91 | -1
92 |
93 |
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 | 1
102 |
103 |
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 | 0
111 | 0
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 | none
120 |
121 | -8
122 | -8
123 | -8
124 | -8
125 | -8
126 | -8
127 | -8
128 | -8
129 |
130 |
131 | 1
132 | 1
133 | 1
134 | 1
135 | 1
136 | 1
137 | 1
138 | 1
139 |
140 |
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 | 0
147 | 0
148 | 0
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 | none
157 |
158 | -2
159 | -2
160 | -2
161 | -2
162 | -2
163 | -2
164 | -2
165 | -2
166 |
167 |
168 | 1
169 | 1
170 | 1
171 | 1
172 | 1
173 | 1
174 | 1
175 | 1
176 |
177 |
178 | 0
179 | 0
180 | 0
181 | 0
182 | 0
183 | 0
184 | 0
185 | 0
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/configs/clm/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | none
8 |
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 | -1
17 |
18 |
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 |
28 |
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 | 0
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | none
46 |
47 | -1
48 | -1
49 | -1
50 | -1
51 | -1
52 | -1
53 | -1
54 | -1
55 |
56 |
57 | 1
58 | 1
59 | 1
60 | 1
61 | 1
62 | 1
63 | 1
64 | 1
65 |
66 |
67 | 0
68 | 0
69 | 0
70 | 0
71 | 0
72 | 0
73 | 0
74 | 0
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | none
83 |
84 | -1
85 | -9
86 | -9
87 | -9
88 | -9
89 | -9
90 | -9
91 | -9
92 |
93 |
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 | 1
102 |
103 |
104 | 0
105 | -1
106 | -1
107 | -1
108 | -1
109 | -1
110 | -1
111 | -1
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 | none
120 |
121 | -1
122 | -4
123 | -4
124 | -4
125 | -4
126 | -4
127 | -4
128 | -4
129 |
130 |
131 | 1
132 | 1
133 | 1
134 | 1
135 | 1
136 | 1
137 | 1
138 | 1
139 |
140 |
141 | 0
142 | -1
143 | -1
144 | -1
145 | -1
146 | -1
147 | -1
148 | -1
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 | none
157 |
158 | -2
159 | -2
160 | -2
161 | -2
162 | -2
163 | -2
164 | -2
165 | -2
166 |
167 |
168 | 1
169 | 1
170 | 1
171 | 1
172 | 1
173 | 1
174 | 1
175 | 1
176 |
177 |
178 | 0
179 | 0
180 | 0
181 | 0
182 | 0
183 | 0
184 | 0
185 | 0
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 | none
194 |
195 | 180
196 | 180
197 | 180
198 | 180
199 | 180
200 | 180
201 | 180
202 | 180
203 |
204 |
205 | 1
206 | 1
207 | 1
208 | 1
209 | 1
210 | 1
211 | 1
212 | 1
213 |
214 |
215 | 0
216 | 0
217 | 0
218 | 0
219 | 0
220 | 0
221 | 0
222 | 0
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 | none
231 |
232 | -1
233 | -40
234 | -40
235 | -40
236 | -40
237 | -40
238 | -40
239 | -40
240 |
241 |
242 | 1
243 | 1
244 | 1
245 | 1
246 | 1
247 | 1
248 | 1
249 | 1
250 |
251 |
252 | 0
253 | -1
254 | -1
255 | -1
256 | -1
257 | -1
258 | -1
259 | -1
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 | none
268 |
269 | -4
270 | -4
271 | -4
272 | -4
273 | -4
274 | -4
275 | -4
276 | -4
277 |
278 |
279 | 1
280 | 1
281 | 1
282 | 1
283 | 1
284 | 1
285 | 1
286 | 1
287 |
288 |
289 | 0
290 | 0
291 | 0
292 | 0
293 | 0
294 | 0
295 | 0
296 | 0
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 | none
305 |
306 | -1
307 | -50
308 | -50
309 | -50
310 | -50
311 | -50
312 | -50
313 | -50
314 |
315 |
316 | 1
317 | 1
318 | 1
319 | 1
320 | 1
321 | 1
322 | 1
323 | 1
324 |
325 |
326 | 0
327 | -1
328 | -1
329 | -1
330 | -1
331 | -1
332 | -1
333 | -1
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 | none
342 |
343 | -8
344 | -8
345 | -8
346 | -8
347 | -8
348 | -8
349 | -8
350 | -8
351 |
352 |
353 | 1
354 | 1
355 | 1
356 | 1
357 | 1
358 | 1
359 | 1
360 | 1
361 |
362 |
363 | 0
364 | 0
365 | 0
366 | 0
367 | 0
368 | 0
369 | 0
370 | 0
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 | none
379 |
380 | -16
381 | -16
382 | -16
383 | -16
384 | -16
385 | -16
386 | -16
387 | -16
388 |
389 |
390 | 1
391 | 1
392 | 1
393 | 1
394 | 1
395 | 1
396 | 1
397 | 1
398 |
399 |
400 | 0
401 | 0
402 | 0
403 | 0
404 | 0
405 | 0
406 | 0
407 | 0
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 | none
416 |
417 | -4
418 | -4
419 | -4
420 | -4
421 | -4
422 | -4
423 | -4
424 | -4
425 |
426 |
427 | 1
428 | 1
429 | 1
430 | 1
431 | 1
432 | 1
433 | 1
434 | 1
435 |
436 |
437 | 0
438 | 0
439 | 0
440 | 0
441 | 0
442 | 0
443 | 0
444 | 0
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 | none
453 |
454 | -16
455 | -16
456 | -16
457 | -16
458 | -16
459 | -16
460 | -16
461 | -16
462 |
463 |
464 | 1
465 | 1
466 | 1
467 | 1
468 | 1
469 | 1
470 | 1
471 | 1
472 |
473 |
474 | 0
475 | 0
476 | 0
477 | 0
478 | 0
479 | 0
480 | 0
481 | 0
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 | none
490 |
491 | -32
492 | -32
493 | -32
494 | -32
495 | -32
496 | -32
497 | -32
498 | -32
499 |
500 |
501 | 1
502 | 1
503 | 1
504 | 1
505 | 1
506 | 1
507 | 1
508 | 1
509 |
510 |
511 | 0
512 | 0
513 | 0
514 | 0
515 | 0
516 | 0
517 | 0
518 | 0
519 |
520 |
521 |
522 |
523 |
524 |
525 |
526 | none
527 |
528 | 1
529 | 1
530 | 1
531 | 1
532 | 1
533 | 1
534 | 1
535 | 1
536 |
537 |
538 | 1
539 | 1
540 | 1
541 | 1
542 | 1
543 | 1
544 | 1
545 | 1
546 |
547 |
548 | 0
549 | 0
550 | 0
551 | 0
552 | 0
553 | 0
554 | 0
555 | 0
556 |
557 |
558 |
559 |
560 |
561 |
562 |
563 | none
564 |
565 | 5
566 | 5
567 | 5
568 | 5
569 | 5
570 | 5
571 | 5
572 | 5
573 |
574 |
575 | 1
576 | 1
577 | 1
578 | 1
579 | 1
580 | 1
581 | 1
582 | 1
583 |
584 |
585 | 0
586 | 0
587 | 0
588 | 0
589 | 0
590 | 0
591 | 0
592 | 0
593 |
594 |
595 |
596 |
597 |
598 |
599 |
600 | none
601 |
602 | -4
603 | -4
604 | -4
605 | -4
606 | -4
607 | -4
608 | -4
609 | -4
610 | -4
611 |
612 |
613 | 1>
614 | 1
615 | 1
616 | 1
617 | 1
618 | 1
619 | 1
620 | 1
621 |
622 |
623 | 0
624 | 0
625 | 0
626 | 0
627 | 0
628 | 0
629 | 0
630 | 0
631 |
632 |
633 |
634 |
635 |
636 |
637 |
638 | none
639 |
640 | -2
641 | -2
642 | -2
643 | -2
644 | -2
645 | -2
646 | -2
647 | -2
648 | -2
649 |
650 |
651 | 1>
652 | 1
653 | 1
654 | 1
655 | 1
656 | 1
657 | 1
658 | 1
659 |
660 |
661 | 0
662 | 0
663 | 0
664 | 0
665 | 0
666 | 0
667 | 0
668 | 0
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 | none
677 |
678 | -1
679 | -1
680 | -1
681 | -1
682 | -1
683 | -1
684 | -1
685 | -1
686 | -1
687 |
688 |
689 | 1>
690 | 1
691 | 1
692 | 1
693 | 1
694 | 1
695 | 1
696 | 1
697 |
698 |
699 | 0
700 | 0
701 | 0
702 | 0
703 | 0
704 | 0
705 | 0
706 | 0
707 |
708 |
709 |
710 |
711 |
712 |
713 |
714 |
715 |
716 |
717 | 1
718 |
719 |
720 | 1
721 |
722 |
723 |
724 |
725 |
726 |
727 |
728 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/dask_config.yml:
--------------------------------------------------------------------------------
1 | distributed:
2 | version: 2
3 |
4 | dashboard:
5 | link: /user/{JUPYTERHUB_USER}/proxy/{port}/status
6 |
7 | scheduler:
8 | idle-timeout: 3600s
9 |
10 | # uncomment to force new worker pods after 2 hrs
11 | # worker:
12 | # lifetime:
13 | # duration: "2 hours"
14 | # stagger: "10 s"
15 | # restart: true
16 |
17 | admin:
18 | tick:
19 | limit: 5s
20 |
21 | logging:
22 | distributed: warning
23 | bokeh: critical
24 | tornado: critical
25 | tornado.application: error
26 |
27 | kubernetes:
28 | name: dask-{JUPYTERHUB_USER}-{uuid}
29 | worker-template:
30 | spec:
31 | serviceAccount: daskkubernetes
32 | restartPolicy: Never
33 | containers:
34 | - name: dask-worker
35 | image: ${JUPYTER_IMAGE_SPEC}
36 | args:
37 | - dask-worker
38 | - --nthreads
39 | - '2'
40 | - --no-dashboard
41 | - --memory-limit
42 | - 7GB
43 | - --death-timeout
44 | - '60'
45 | resources:
46 | limits:
47 | cpu: "1.75"
48 | memory: 7G
49 | requests:
50 | cpu: 1
51 | memory: 7G
52 |
53 | # Specific to each hub
54 | #gateway:
55 | # address: https://staging.hub.pangeo.io/services/dask-gateway/
56 | # proxy-address: tls://35.225.202.35:8786
57 |
58 | labextension:
59 | factory:
60 | module: dask_gateway
61 | class: GatewayCluster
62 | args: []
63 | kwargs: {}
64 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/environment.yml:
--------------------------------------------------------------------------------
1 | name: pangeo
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.7*
6 | - pangeo-notebook=2020.08.02
7 | - pip=20
8 | - awscli
9 | - bottleneck
10 | - boto3
11 | - cartopy
12 | - ciso
13 | - dask-ml
14 | - datashader>=0.11
15 | - descartes
16 | - eofs
17 | - fastjmd95
18 | - fsspec
19 | - gcsfs
20 | - geopandas
21 | - geoviews-core
22 | - gsw
23 | - h5netcdf
24 | - h5py
25 | - holoviews
26 | - hvplot
27 | - intake
28 | - intake-esm
29 | - intake-geopandas
30 | - intake-stac
31 | - intake-xarray
32 | - ipyleaflet
33 | - ipywidgets
34 | - jupyter-panel-proxy
35 | - lz4
36 | - matplotlib-base
37 | - metpy
38 | - nb_conda_kernels
39 | - nbstripout
40 | - nc-time-axis
41 | - netcdf4
42 | - nomkl
43 | - numcodecs
44 | - numpy
45 | - pandas
46 | - panel
47 | - prefect
48 | - pyarrow
49 | - pydap
50 | - pystac
51 | - python-blosc
52 | - python-gist
53 | - rasterio
54 | - rio-cogeo
55 | - rioxarray
56 | - s3fs
57 | - sat-search
58 | - sat-stac
59 | - satpy
60 | - scikit-image
61 | - scikit-learn
62 | - scipy
63 | - sparse
64 | - tiledb-py
65 | - xarray
66 | - xarray-spatial
67 | - xcape
68 | - xesmf
69 | - xgcm
70 | - xhistogram
71 | - xlayers
72 | - xmitgcm
73 | - xpublish
74 | - xrft
75 | - zarr
76 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/start:
--------------------------------------------------------------------------------
1 | #!/bin/bash -l
2 |
3 | # ==== ONLY EDIT WITHIN THIS BLOCK =====
4 |
5 | export PANGEO_ENV="notebook"
6 | if ! [[ -z "${PANGEO_SCRATCH_PREFIX}" ]] && ! [[ -z "${JUPYTERHUB_USER}" ]]; then
7 | export PANGEO_SCRATCH="${PANGEO_SCRATCH_PREFIX}/${JUPYTERHUB_USER}/"
8 | fi
9 |
10 | if [[ ! -f /home/user/.tutorial_initialized ]]; then
11 | cp -rp /srv/tutorials /home/user
12 | touch /home/user/.tutorial_initialized
13 | fi
14 |
15 | # Change the CPU counts:
16 | CPUS=$(grep -c processor /proc/cpuinfo)
17 | sudo xmlstarlet ed --inplace -u "config_machines/machine[@MACH='container']/MAX_TASKS_PER_NODE" -v ${CPUS} ${CESMROOT}/cime/config/cesm/machines/config_machines.xml
18 | sudo xmlstarlet ed --inplace -u "config_machines/machine[@MACH='container']/MAX_MPITASKS_PER_NODE" -v ${CPUS} ${CESMROOT}/cime/config/cesm/machines/config_machines.xml
19 |
20 | # ==== ONLY EDIT WITHIN THIS BLOCK =====
21 |
22 | exec "$@"
23 |
--------------------------------------------------------------------------------
/CESM-Lab/2.1/Files/tutorials/CESM/Quickstart.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "
CESM2 Quick Start Guide
\n",
8 | "\n",
9 | "This is a very quick introduction to running CESM. It will guide you through the basics of creating a case, modifying simple aspects of that case, building it, running it, and visualizing the outputs. Each step will also provide links to the full CESM documentation and additional examples."
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | " 1. Creating a CESM case
\n",
17 | "\n",
18 | "CESM experiments start with creating a 'case' - a configuration of the model with a specific set of component models (eg, CAM for the atmosphere model, POP for the ocean model, etc), at a particular resolution, and with a set of options defined in XML files. Let's start by using the 'create_newcase' command to set up an experiment with the following details:
\n",
19 | "\n",
20 | " - compset: QPC4
\n",
21 | " - resolution: f45_f45_mg37
\n",
22 | "
\n",
23 | "\n",
24 | "This is a 'simple model' - an aquaplanet configuration at 4.5-degree resolution, using CAM4 physics. This is much less computationally intensive than a fully coupled run, so it works well for this tutorial, only requiring ~0.6GB of RAM. It will also need to download some input files, totaling approximately 127MB of size.\n",
25 | "\n",
26 | "The command to create this configuration is given below, using 'tutorial_aquaplanet' as the casename. Note that given the size and nature of this run, it's not a scientifically supported configuration, but it's still useful for learning how to run the model.\n",
27 | "\n",
28 | "Run the cell to issue the command:"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "create_newcase --case ~/aquaplanet_test_f45 --compset QPC4 --res f45_f45_mg37 --run-unsupported"
38 | ]
39 | },
40 | {
41 | "cell_type": "markdown",
42 | "metadata": {},
43 | "source": [
44 | "
\n",
45 | "
\n",
46 | "Great! You've created the default version of your first case. Before we move to the next section, if you want additional information on the create_newcase command or the compsets and resolutions, you can click on each section below for details and links.
\n",
47 | "
\n",
48 | "\n",
49 | "***\n",
50 | "\n",
51 | "Additional Information:\n",
52 | "\n",
53 | " create_newcase
\n",
54 | " \n",
55 | "
\n",
56 | "The 'create_newcase' command only requires three options - the case, compset and resolution. However, it supports a large number of options for more advanced users. To see a summary, you can run the command by itself:\n",
57 | "
\n",
58 | "\n",
59 | "
\n",
60 | "create_newcase\n",
61 | "usage: create_newcase [-h] [-d] [-v] [-s] --case CASENAME --compset COMPSET\n",
62 | " --res GRID [--machine MACHINE] [--compiler COMPILER]\n",
63 | " [--multi-driver] [--ninst NINST] [--mpilib MPILIB]\n",
64 | " [--project PROJECT] [--pecount PECOUNT]\n",
65 | " [--user-mods-dir USER_MODS_DIR] [--pesfile PESFILE]\n",
66 | " [--gridfile GRIDFILE] [--workflow WORKFLOW]\n",
67 | " [--srcroot SRCROOT] [--output-root OUTPUT_ROOT]\n",
68 | " [--run-unsupported] [--walltime WALLTIME] [-q QUEUE]\n",
69 | " [--handle-preexisting-dirs {a,r,u}] [-i INPUT_DIR]\n",
70 | "
\n",
71 | "
\n",
72 | "Additionally, you can get detailed help via 'create_newcase --help', and you can click here for the CESM documentation on the command\n",
73 | "
\n",
74 | "
\n",
75 | " \n",
76 | "\n",
77 | " CESM Component Sets ('compsets')
\n",
78 | " Not yet filled out; maybe this is too much information here?\n",
79 | " \n",
80 | "\n",
81 | " CESM Resolutions / Grids
\n",
82 | " Also not filled out yet...\n",
83 | " \n",
84 | "\n",
85 | "***\n",
86 | "\n",
87 | "
"
88 | ]
89 | },
90 | {
91 | "cell_type": "markdown",
92 | "metadata": {},
93 | "source": [
94 | " 2. Modifying a CESM case
\n",
95 | "\n",
96 | "Now let's make a simple modification to the case via two easy steps:\n",
97 | "\n",
98 | "1. Change to our new case directory\n",
99 | "2. In that directory, change the duration of the run (in model-time)\n",
100 | "\n",
101 | "\n",
102 | "These changes can be made via modifying the XML files that control CESM's behavior. We could do this by hand, but the preferred way of doing it is to use the 'xmlchange' command in the case directory. By default, newly configured cases are set up to run for 5 model days - since we're just experimenting, we'll change it to 3 model days instead, for even faster runs. Run the following cells to execute the two commands:\n",
103 | "
\n",
104 | "
"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "metadata": {},
111 | "outputs": [],
112 | "source": [
113 | "%cd ~/aquaplanet_test_f45 "
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": null,
119 | "metadata": {},
120 | "outputs": [],
121 | "source": [
122 | "xmlchange STOP_OPTION=ndays,STOP_N=3"
123 | ]
124 | },
125 | {
126 | "cell_type": "markdown",
127 | "metadata": {},
128 | "source": [
129 | "Since the default STOP_OPTION is already 'ndays', it wasn't strictly necessary to include that in the command, but by being explicit we avoid any confusion later if we had changed it previously to 'nhours' or 'nyears'. \n",
130 | "\n",
131 | "***\n",
132 | "\n",
133 | "Additional information:\n",
134 | "\n",
135 | "(I haven't put anything here yet; this is just for demo to give people a sense of things. I want to keep the basic instructions light, I think!)\n",
136 | "\n",
137 | "***"
138 | ]
139 | },
140 | {
141 | "cell_type": "markdown",
142 | "metadata": {},
143 | "source": [
144 | "We don't see any output by 'xmlchange', but we can use another tool, 'xmlquery', to double-check that we have the new values:"
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": null,
150 | "metadata": {},
151 | "outputs": [],
152 | "source": [
153 | "xmlquery STOP_OPTION,STOP_N"
154 | ]
155 | },
156 | {
157 | "cell_type": "markdown",
158 | "metadata": {},
159 | "source": [
160 | "\n",
161 | "***\n",
162 | "\n",
163 | "Just putting the other commands here for now, no write-up yet:\n",
164 | "\n"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": null,
170 | "metadata": {},
171 | "outputs": [],
172 | "source": [
173 | "case.setup"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "metadata": {},
180 | "outputs": [],
181 | "source": [
182 | "case.build"
183 | ]
184 | },
185 | {
186 | "cell_type": "code",
187 | "execution_count": null,
188 | "metadata": {},
189 | "outputs": [],
190 | "source": [
191 | "case.submit"
192 | ]
193 | },
194 | {
195 | "cell_type": "markdown",
196 | "metadata": {},
197 | "source": [
198 | "\n",
199 | "***\n",
200 | "\n",
201 | "\n",
202 | "TO DO:
\n",
203 | "\n",
204 | "1. Add something about visualizing the results or doing statistics on the output files\n",
205 | "2. Add something about performance\n",
206 | "3. Maybe simplify / 'interactive-ize' the output from commands, especially the running?\n",
207 | "4. Investigate 'nhfil' warning above, or turn off DOUT_S?\n",
208 | "5. Emacs / Vim within Jupyter (eg, without going to bash shell in another window) - possible?\n",
209 | "6. CIME_OUTPUT_ROOT - if we put things in a subdirectory, that still points to home. Can we put bld/run in case directories easily?\n",
210 | "7. Fix env_batch.py for non-batch jobs like this (gives a 'ERROR: No result from jobs' message, currently disabled)\n"
211 | ]
212 | }
213 | ],
214 | "metadata": {
215 | "kernelspec": {
216 | "display_name": "Python [conda env:notebook] *",
217 | "language": "python",
218 | "name": "conda-env-notebook-py"
219 | },
220 | "language_info": {
221 | "codemirror_mode": {
222 | "name": "ipython",
223 | "version": 3
224 | },
225 | "file_extension": ".py",
226 | "mimetype": "text/x-python",
227 | "name": "python",
228 | "nbconvert_exporter": "python",
229 | "pygments_lexer": "ipython3",
230 | "version": "3.7.8"
231 | }
232 | },
233 | "nbformat": 4,
234 | "nbformat_minor": 4
235 | }
236 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Dockerfile:
--------------------------------------------------------------------------------
1 | ##################################
2 | # ESCOMP/CESM-Lab/2.1 Dockerfile #
3 | ##################################
4 |
5 | # This builds on top of the CESM 2.1 container, and adds the Jupyter Lab environment
6 | FROM escomp/cesm-2.2
7 |
8 | # Install software needed for Pangeo
9 | RUN sudo yum install -y graphviz libnsl libspatialite libspatialite-devel xmlstarlet
10 |
11 | # Set up the Conda version - using the pangeo/base-image as a foundation here:
12 | ENV CONDA_VERSION=4.8.5-1 \
13 | CONDA_ENV=default \
14 | NB_USER=user \
15 | NB_GROUP=escomp \
16 | NB_UID=1000 \
17 | SHELL=/bin/bash \
18 | CONDA_DIR=/srv/conda
19 |
20 | # Additional environment setup that depends on the above:
21 | ENV NB_PYTHON_PREFIX=${CONDA_DIR}/envs/${CONDA_ENV} \
22 | DASK_ROOT_CONFIG=${CONDA_DIR}/etc \
23 | HOME=/home/${NB_USER} \
24 | PATH=${CONDA_DIR}/bin:${PATH}
25 |
26 | #RUN sudo yum -y upgrade
27 |
28 | # Add the Conda init and set permissions on the directory:
29 | # (Could clean this up, and push changes back to Pangeo -- eg, /srv is hardcoded)
30 | RUN sudo /bin/bash -c "echo '. ${CONDA_DIR}/etc/profile.d/conda.sh ; conda activate ${CONDA_ENV}' > /etc/profile.d/init_conda.sh" && \
31 | sudo chown -R ${NB_USER}:${NB_GROUP} /srv
32 |
33 | # Install miniforge:
34 | RUN URL="https://github.com/conda-forge/miniforge/releases/download/${CONDA_VERSION}/Miniforge3-${CONDA_VERSION}-Linux-x86_64.sh" && \
35 | wget --quiet ${URL} -O miniconda.sh && \
36 | /bin/bash miniconda.sh -u -b -p ${CONDA_DIR} && \
37 | rm miniconda.sh && \
38 | conda clean -afy && \
39 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
40 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete
41 |
42 | COPY --chown=${NB_USER}:${NB_GROUP} Files/*yml /srv/
43 |
44 | RUN mv /srv/condarc.yml ${CONDA_DIR}/.condarc && \
45 | mv /srv/dask_config.yml ${CONDA_DIR}/etc/dask.yml
46 |
47 |
48 | RUN conda env create --name ${CONDA_ENV} -f /srv/environment.yml && \
49 | conda clean -yaf && \
50 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
51 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
52 | find ${CONDA_DIR} -follow -type f -name '*.js.map' -delete && \
53 | find ${NB_PYTHON_PREFIX}/lib/python*/site-packages/bokeh/server/static -follow -type f -name '*.js' ! -name '*.min.js' -delete
54 |
55 | RUN export PATH=${NB_PYTHON_PREFIX}/bin:${PATH} && \
56 | jupyter labextension install --clean \
57 | @jupyter-widgets/jupyterlab-manager \
58 | @jupyterlab/geojson-extension \
59 | dask-labextension \
60 | @pyviz/jupyterlab_pyviz \
61 | jupyter-leaflet && \
62 | sudo rm -rf /tmp/* && \
63 | rm -rf ${HOME}/.cache ${HOME}/.npm ${HOME}/.yarn && \
64 | rm -rf ${NB_PYTHON_PREFIX}/share/jupyter/lab/staging && \
65 | find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
66 | find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
67 | find ${CONDA_DIR} -follow -type f -name '*.js.map' -delete
68 |
69 |
70 | COPY Files/start /srv
71 | RUN sudo chmod +x /srv/start
72 |
73 | COPY Files/cesm_aliases.ipy /etc/ipython/
74 | RUN sudo /bin/bash -c 'echo "c.InteractiveShellApp.exec_files = [ \"/etc/ipython/cesm_aliases.ipy\" ] " >> /etc/ipython/ipython_config.py'
75 |
76 | ADD https://api.github.com/repos/NCAR/CESM-Lab-Tutorial/git/refs/heads/master version.json
77 | #RUN git clone https://github.com/NCAR/CESM-Lab-Tutorial.git /srv/tutorials
78 | COPY Files/tutorials /srv/tutorials
79 | COPY Files/cartopy/shapefiles /srv/conda/envs/default/lib/python3.7/site-packages/cartopy/data/shapefiles/
80 | COPY Files/cesm.py /srv/conda/envs/default/lib/python3.7/site-packages/
81 |
82 |
83 | EXPOSE 8888
84 | USER user
85 | WORKDIR /home/user
86 | ENV SHELL /bin/bash
87 | ENTRYPOINT ["/srv/start"]
88 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ESCOMP/ESCOMP-Containers/c48f872d5b4bbdc90ca9e1159d6c276d87407e09/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.dbf
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ESCOMP/ESCOMP-Containers/c48f872d5b4bbdc90ca9e1159d6c276d87407e09/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.shp
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ESCOMP/ESCOMP-Containers/c48f872d5b4bbdc90ca9e1159d6c276d87407e09/CESM-Lab/2.2/Files/cartopy/shapefiles/natural_earth/physical/ne_110m_coastline.shx
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/cesm.py:
--------------------------------------------------------------------------------
1 | # CESM module for python - includes toolkit (later) and tutorial stuff
2 | import xarray as xr
3 | import numpy as np
4 | import matplotlib.pyplot as plt
5 | import cartopy.crs as ccrs
6 | from cartopy.util import add_cyclic_point
7 |
8 | import cartopy
9 | cartopy.config['pre_existing_data_dir'] = '/srv/conda/envs/default/lib/python3.7/site-packages/cartopy/data'
10 |
11 | import os
12 | import glob
13 |
14 | def QuickViewATM(case, variable):
15 | # Currently supported variables:
16 | supported_3D = ( 'TS', )
17 | supported_4D = ( 'T', 'U', 'V', )
18 |
19 | # Check if we're using a supported variable:
20 | supported = supported_3D + supported_4D
21 | if variable not in supported:
22 | print('Variable is not supported - must be one of: ', supported)
23 | return
24 |
25 | # Get the list of all hist files for the atmosphere:
26 | files = glob.glob('/home/user/archive/' + case + '/atm/hist/*.nc')
27 | latest = max(files , key = os.path.getctime)
28 | dataset = xr.open_dataset(latest)
29 |
30 | if variable in supported_3D:
31 | vardata = dataset[variable][0,:,:] # Fix the time later
32 | else:
33 | vardata = dataset[variable][0,25,:,:] # Fix the level later
34 |
35 | # Set figure size
36 | fig = plt.figure(figsize=(11,8.5))
37 | ax = plt.axes(projection=ccrs.Robinson())
38 |
39 | # Add cyclic point to data (to get rid of gap at dateline)
40 | data, lons = add_cyclic_point(vardata, coord=vardata['lon'])
41 |
42 | # Define contour levels
43 | clevs=np.arange(np.min(data),np.max(data),1)
44 |
45 | # Make a filled contour plot
46 | cs=ax.contourf(lons, vardata['lat'], data,clevs, transform = ccrs.PlateCarree(), cmap='coolwarm',extend='both')
47 |
48 | # Add coastlines
49 | ax.coastlines()
50 |
51 | # Add gridlines
52 | ax.gridlines()
53 |
54 | # Add colorbar
55 | cbar = plt.colorbar(cs,shrink=0.7,orientation='horizontal')
56 |
57 | # Add title
58 | plt.title(case + ' : ' + variable)
59 |
60 | def QuickView(case, variable, model='atmosphere'):
61 | type = model.lower()
62 | if type in { 'atmosphere', 'atm', 'cam' }:
63 | QuickViewATM(case, variable)
64 |
65 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/cesm_aliases.ipy:
--------------------------------------------------------------------------------
1 | %alias xmlchange ./xmlchange %l
2 | %alias xmlquery ./xmlquery %l
3 | %alias create_newcase ${CESMROOT}/cime/scripts/create_newcase %l
4 | %alias query_config ${CESMROOT}/cime/scripts/query_config %l
5 | %alias case.setup ./case.setup %l
6 | %alias case.build ./case.build %l
7 | %alias case.submit ./case.submit %l
8 | %alias preview_run ./preview_run %l
9 | %alias pelayout ./pelayout %l
10 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/condarc.yml:
--------------------------------------------------------------------------------
1 | channels:
2 | - conda-forge
3 | - defaults
4 | channel_priority: strict
5 | show_channel_urls: true
6 | # NOTE: defaults only work for `conda create -f`, not `conda env create -f`!
7 | # https://github.com/conda/conda/issues/9580
8 | #create_default_packages:
9 | # - pangeo-notebook=0.0.2
10 | # - ipykernel
11 | #pinned_packages:
12 | # - pangeo-notebook=0.0.2
13 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/dask_config.yml:
--------------------------------------------------------------------------------
1 | distributed:
2 | version: 2
3 |
4 | dashboard:
5 | link: /user/{JUPYTERHUB_USER}/proxy/{port}/status
6 |
7 | scheduler:
8 | idle-timeout: 3600s
9 |
10 | # uncomment to force new worker pods after 2 hrs
11 | # worker:
12 | # lifetime:
13 | # duration: "2 hours"
14 | # stagger: "10 s"
15 | # restart: true
16 |
17 | admin:
18 | tick:
19 | limit: 5s
20 |
21 | logging:
22 | distributed: warning
23 | bokeh: critical
24 | tornado: critical
25 | tornado.application: error
26 |
27 | kubernetes:
28 | name: dask-{JUPYTERHUB_USER}-{uuid}
29 | worker-template:
30 | spec:
31 | serviceAccount: daskkubernetes
32 | restartPolicy: Never
33 | containers:
34 | - name: dask-worker
35 | image: ${JUPYTER_IMAGE_SPEC}
36 | args:
37 | - dask-worker
38 | - --nthreads
39 | - '2'
40 | - --no-dashboard
41 | - --memory-limit
42 | - 7GB
43 | - --death-timeout
44 | - '60'
45 | resources:
46 | limits:
47 | cpu: "1.75"
48 | memory: 7G
49 | requests:
50 | cpu: 1
51 | memory: 7G
52 |
53 | # Specific to each hub
54 | #gateway:
55 | # address: https://staging.hub.pangeo.io/services/dask-gateway/
56 | # proxy-address: tls://35.225.202.35:8786
57 |
58 | labextension:
59 | factory:
60 | module: dask_gateway
61 | class: GatewayCluster
62 | args: []
63 | kwargs: {}
64 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/environment.yml:
--------------------------------------------------------------------------------
1 | name: default
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - _libgcc_mutex=0.1=conda_forge
7 | - _openmp_mutex=4.5=1_gnu
8 | - abseil-cpp=20200225.2=he1b5a44_2
9 | - adal=1.2.4=pyh9f0ad1d_0
10 | - affine=2.3.0=py_0
11 | - aiofiles=0.5.0=py_0
12 | - aiohttp=3.7.1
13 | - alembic=1.4.3=pyh9f0ad1d_0
14 | - appdirs=1.4.4=pyh9f0ad1d_0
15 | - argon2-cffi=20.1.0=py37h8f50634_2
16 | - arrow-cpp=2.0.0=py37h433b4ec_0_cpu
17 | - asciitree=0.3.3=py_2
18 | - asn1crypto=1.4.0=pyh9f0ad1d_0
19 | - async-timeout=3.0.1=py_1000
20 | - async_generator=1.10=py_0
21 | - attrs=20.2.0=pyh9f0ad1d_0
22 | - aws-c-common=0.4.59=he1b5a44_0
23 | - aws-c-event-stream=0.1.6=h84e28f3_5
24 | - aws-checksums=0.1.9=he252421_2
25 | - aws-sdk-cpp=1.8.63=h9b98462_0
26 | - awscli=1.18.160=py37hc8dfbb8_0
27 | - backcall=0.2.0=pyh9f0ad1d_0
28 | - backports=1.0=py_2
29 | - backports.functools_lru_cache=1.6.1=py_0
30 | - beautifulsoup4=4.9.3=pyhb0f4dca_0
31 | - bleach=3.2.1=pyh9f0ad1d_0
32 | - blinker=1.4=py_1
33 | - blosc=1.20.1=he1b5a44_0
34 | - bokeh=2.2.2=py37hc8dfbb8_0
35 | - boost-cpp=1.74.0=h9359b55_0
36 | - boto3=1.16.0=pyh9f0ad1d_0
37 | - botocore=1.19.0=pyh9f0ad1d_0
38 | - bottleneck=1.3.2=py37h161383b_2
39 | - branca=0.3.1=py_0
40 | - brotli=1.0.9=he1b5a44_2
41 | - brotlipy=0.7.0=py37hb5d75c8_1001
42 | - brunsli=0.1=he1b5a44_0
43 | - bzip2=1.0.8=h516909a_3
44 | - c-ares=1.16.1=h516909a_3
45 | - ca-certificates=2020.6.20=hecda079_0
46 | - cachetools=4.1.1=py_0
47 | - cachey=0.2.1=pyh9f0ad1d_0
48 | - cairo=1.16.0=h488836b_1006
49 | - cartopy=0.18.0=py37h0f91b66_2
50 | - certifi=2020.6.20=py37he5f6b98_2
51 | - certipy=0.1.3=py_0
52 | - cffi=1.14.3=py37h00ebd2e_1
53 | - cfitsio=3.470=hce51eda_7
54 | - cftime=1.2.1=py37h161383b_1
55 | - chardet=3.0.4=py37he5f6b98_1008
56 | - charls=2.1.0=he1b5a44_2
57 | - ciso=0.1.0=py37h161383b_2
58 | - click=7.1.2=pyh9f0ad1d_0
59 | - click-plugins=1.1.1=py_0
60 | - cligj=0.6.0=pyh9f0ad1d_0
61 | - cloudpickle=1.6.0=py_0
62 | - colorama=0.4.3=py_0
63 | - colorcet=2.0.1=py_0
64 | - configobj=5.0.6=py_0
65 | - croniter=0.3.35=pyh9f0ad1d_0
66 | - cryptography=3.1.1=py37hff6837a_1
67 | - curl=7.71.1=he644dc0_8
68 | - cycler=0.10.0=py_2
69 | - cytoolz=0.11.0=py37h8f50634_1
70 | - dask=2.22.0=py_0
71 | - dask-core=2.22.0=py_0
72 | - dask-gateway=0.8.0=py37hc8dfbb8_0
73 | - dask-glm=0.2.0=py_1
74 | - dask-kubernetes=0.10.1=py_0
75 | - dask-labextension=2.0.2=py_0
76 | - dask-ml=1.7.0=py_0
77 | - datashader=0.11.1=pyh9f0ad1d_0
78 | - datashape=0.5.4=py_1
79 | - decorator=4.4.2=py_0
80 | - defusedxml=0.6.0=py_0
81 | - descartes=1.1.0=py_4
82 | - distributed=2.22.0=py37hc8dfbb8_0
83 | - docker-py=4.3.1=py37hc8dfbb8_1
84 | - docker-pycreds=0.4.0=py_0
85 | - docopt=0.6.2=py_1
86 | - docrep=0.2.7=py_0
87 | - docutils=0.15.2=py37_0
88 | - entrypoints=0.3=py37hc8dfbb8_1002
89 | - eofs=1.4.0=py_0
90 | - esmf=8.0.1=nompi_hbeb3ca6_1
91 | - esmpy=8.0.1=nompi_py37h59b2dc9_2
92 | - expat=2.2.9=he1b5a44_2
93 | - fastapi=0.61.1=py_0
94 | - fasteners=0.14.1=py_3
95 | - fastjmd95=0.1=pyh9f0ad1d_0
96 | - fastprogress=1.0.0=py_0
97 | - fiona=1.8.17=py37ha3d844c_0
98 | - fontconfig=2.13.1=h1056068_1002
99 | - freetype=2.10.4=he06d7ca_0
100 | - freexl=1.0.5=h516909a_1002
101 | - fsspec=0.8.4=py_0
102 | - future=0.18.2=py37hc8dfbb8_2
103 | - gcsfs=0.7.1=py_0
104 | - gdal=3.1.2=py37h518339e_3
105 | - geopandas=0.8.1=py_0
106 | - geos=3.8.1=he1b5a44_0
107 | - geotiff=1.6.0=ha04d9d0_1
108 | - geoviews-core=1.8.2=py_0
109 | - gettext=0.19.8.1=hf34092f_1004
110 | - gflags=2.2.2=he1b5a44_1004
111 | - giflib=5.2.1=h516909a_2
112 | - glib=2.66.1=he1b5a44_1
113 | - glog=0.4.0=h49b9bf7_3
114 | - google-auth=1.22.0=py_0
115 | - google-auth-oauthlib=0.4.1=py_2
116 | - graphene=1.4=py_0
117 | - graphql-core=3.1.2=py_0
118 | - graphql-relay=3.0.0=py_0
119 | - grpc-cpp=1.32.0=h7997a97_1
120 | - gsw=3.4.0=py37h161383b_1
121 | - h11=0.11.0=pyh9f0ad1d_0
122 | - h5netcdf=0.8.1=py_0
123 | - h5py=2.10.0=nompi_py37hf7afa78_105
124 | - hdf4=4.2.13=hf30be14_1003
125 | - hdf5=1.10.6=nompi_h54c07f9_1110
126 | - heapdict=1.0.1=py_0
127 | - holoviews=1.13.4=pyh9f0ad1d_0
128 | - httptools=0.1.1=py37h8f50634_1
129 | - hvplot=0.6.0=pyh9f0ad1d_0
130 | - icu=67.1=he1b5a44_0
131 | - idna=2.10=pyh9f0ad1d_0
132 | - imagecodecs=2020.5.30=py37h5f9c477_4
133 | - imageio=2.9.0=py_0
134 | - importlib-metadata=2.0.0=py_1
135 | - importlib_metadata=2.0.0=1
136 | - importlib_resources=3.0.0=py37hc8dfbb8_1
137 | - iniconfig=1.1.0=pyh9f0ad1d_0
138 | - intake=0.6.0=py_0
139 | - intake-esm=2020.8.15=py_0
140 | - intake-geopandas=0.2.3=py_0
141 | - intake-stac=0.3.0=py_0
142 | - intake-xarray=0.4.0=py_0
143 | - ipykernel=5.3.4=py37hc6149b9_1
144 | - ipyleaflet=0.13.3=pyh9f0ad1d_0
145 | - ipython=7.18.1=py37hc6149b9_1
146 | - ipython_genutils=0.2.0=py_1
147 | - ipywidgets=7.5.1=pyh9f0ad1d_1
148 | - itsdangerous=1.1.0=py_0
149 | - jedi=0.17.2=py37hc8dfbb8_1
150 | - jinja2=2.11.2=pyh9f0ad1d_0
151 | - jmespath=0.10.0=pyh9f0ad1d_0
152 | - joblib=0.17.0=py_0
153 | - jpeg=9d=h516909a_0
154 | - json-c=0.13.1=hbfbb72e_1002
155 | - json5=0.9.5=pyh9f0ad1d_0
156 | - jsonschema=3.2.0=py_2
157 | - jupyter-panel-proxy=0.1.0=py_0
158 | - jupyter-server-proxy=1.5.0=py_0
159 | - jupyter_client=6.1.7=py_0
160 | - jupyter_core=4.6.3=py37hc8dfbb8_2
161 | - jupyter_telemetry=0.0.5=py_0
162 | - jupyterhub-base=1.1.0=py37hc8dfbb8_5
163 | - jupyterhub-singleuser=1.1.0=py37hc8dfbb8_5
164 | - jupyterlab=2.2.8=py_0
165 | - jupyterlab_pygments=0.1.2=pyh9f0ad1d_0
166 | - jupyterlab_server=1.2.0=py_0
167 | - jxrlib=1.1=h516909a_2
168 | - kealib=1.4.13=h33137a7_1
169 | - kiwisolver=1.2.0=py37h99015e2_1
170 | - krb5=1.17.1=hfafb76e_3
171 | - kubernetes_asyncio=12.0.0=pyh9f0ad1d_0
172 | - lcms2=2.11=hbd6801e_0
173 | - ld_impl_linux-64=2.35=h769bd43_9
174 | - lerc=2.2=he1b5a44_0
175 | - libaec=1.0.4=he1b5a44_1
176 | - libblas=3.8.0=17_openblas
177 | - libcblas=3.8.0=17_openblas
178 | - libcurl=7.71.1=hcdd3856_8
179 | - libdap4=3.20.6=h1d1bd15_1
180 | - libedit=3.1.20191231=he28a2e2_2
181 | - libev=4.33=h516909a_1
182 | - libevent=2.1.10=hcdb4288_3
183 | - libffi=3.2.1=he1b5a44_1007
184 | - libgcc-ng=9.3.0=h5dbcf3e_17
185 | - libgdal=3.1.2=hb2a6f5f_3
186 | - libgfortran-ng=7.5.0=hae1eefd_17
187 | - libgfortran4=7.5.0=hae1eefd_17
188 | - libglib=2.66.1=h0dae87d_1
189 | - libgomp=9.3.0=h5dbcf3e_17
190 | - libiconv=1.16=h516909a_0
191 | - libkml=1.3.0=h74f7ee3_1012
192 | - liblapack=3.8.0=17_openblas
193 | - libllvm10=10.0.1=he513fc3_3
194 | - libnetcdf=4.7.4=nompi_h84807e1_105
195 | - libnghttp2=1.41.0=h8cfc5f6_2
196 | - libopenblas=0.3.10=pthreads_hb3c22a3_5
197 | - libpng=1.6.37=hed695b0_2
198 | - libpq=12.3=h5513abc_2
199 | - libprotobuf=3.13.0.1=h8b12597_0
200 | - libsodium=1.0.18=h516909a_1
201 | - libspatialindex=1.9.3=he1b5a44_3
202 | - libspatialite=4.3.0a=h57f1b35_1039
203 | - libssh2=1.9.0=hab1572f_5
204 | - libstdcxx-ng=9.3.0=h2ae2ef3_17
205 | - libthrift=0.13.0=h5aa387f_6
206 | - libtiff=4.1.0=hc7e4089_6
207 | - libutf8proc=2.5.0=h516909a_2
208 | - libuuid=2.32.1=h14c3975_1000
209 | - libuv=1.40.0=hd18ef5c_0
210 | - libwebp-base=1.1.0=h516909a_3
211 | - libxcb=1.13=h14c3975_1002
212 | - libxml2=2.9.10=h68273f3_2
213 | - libzopfli=1.0.3=he1b5a44_0
214 | - llvmlite=0.34.0=py37h5202443_2
215 | - locket=0.2.0=py_2
216 | - lz4=3.1.0=py37h5a7ed16_1
217 | - lz4-c=1.9.2=he1b5a44_3
218 | - mako=1.1.3=pyh9f0ad1d_0
219 | - markdown=3.3.2=pyh9f0ad1d_0
220 | - markupsafe=1.1.1=py37hb5d75c8_2
221 | - marshmallow=3.8.0=py_0
222 | - marshmallow-oneofschema=2.0.1=py_0
223 | - matplotlib-base=3.3.2=py37hc9afd2a_1
224 | - mercantile=1.1.6=pyh9f0ad1d_0
225 | - metpy=0.12.2=py_0
226 | - mistune=0.8.4=py37h8f50634_1002
227 | - monotonic=1.5=py_0
228 | - more-itertools=8.5.0=py_0
229 | - msgpack-python=1.0.0=py37h99015e2_2
230 | - multidict=4.7.5=py37h8f50634_2
231 | - multipledispatch=0.6.0=py_0
232 | - munch=2.5.0=py_0
233 | - mypy_extensions=0.4.3=py37hc8dfbb8_2
234 | - natsort=7.0.1=py_0
235 | - nb_conda_kernels=2.3.0=py37hc8dfbb8_2
236 | - nbclient=0.5.1=py_0
237 | - nbconvert=6.0.7=py37hc8dfbb8_1
238 | - nbformat=5.0.8=py_0
239 | - nbgitpuller=0.9.0=py_0
240 | - nbstripout=0.3.9=pyh9f0ad1d_0
241 | - nc-time-axis=1.2.0=py_1
242 | - ncurses=6.2=he1b5a44_2
243 | - nest-asyncio=1.4.1=py_0
244 | - netcdf-fortran=4.5.3=nompi_hfef6a68_101
245 | - netcdf4=1.5.4=nompi_py37hcbfd489_103
246 | - networkx=2.5=py_0
247 | - nodejs=14.14.0=h568c755_0
248 | - nomkl=1.0=h5ca1d4c_0
249 | - notebook=6.1.4=py37hc8dfbb8_1
250 | - numba=0.51.2=py37h9fdb41a_0
251 | - numcodecs=0.7.2=py37h3340039_1
252 | - numpy=1.19.2=py37h7008fea_1
253 | - oauthlib=3.0.1=py_0
254 | - olefile=0.46=pyh9f0ad1d_1
255 | - openjpeg=2.3.1=h981e76c_3
256 | - openssl=1.1.1h=h516909a_0
257 | - orc=1.6.5=hd3605a7_0
258 | - owslib=0.20.0=py_0
259 | - packaging=20.4=pyh9f0ad1d_0
260 | - pamela=1.0.0=py_0
261 | - pandas=1.1.3=py37h9fdb41a_2
262 | - pandoc=2.11.0.2=hd18ef5c_0
263 | - pandocfilters=1.4.2=py_1
264 | - panel=0.9.7=py_0
265 | - pangeo-dask=2020.08.01=0
266 | - pangeo-notebook=2020.08.02=0
267 | - param=1.9.3=py_0
268 | - parquet-cpp=1.5.1=2
269 | - parso=0.7.1=pyh9f0ad1d_0
270 | - partd=1.1.0=py_0
271 | - pcre=8.44=he1b5a44_0
272 | - pendulum=2.1.2=py37hc8dfbb8_0
273 | - pexpect=4.8.0=pyh9f0ad1d_2
274 | - pickleshare=0.7.5=py_1003
275 | - pillow=8.0.0=py37h718be6c_0
276 | - pint=0.16.1=py_0
277 | - pip=20.2.4=py_0
278 | - pixman=0.38.0=h516909a_1003
279 | - pluggy=0.13.1=py37hc8dfbb8_3
280 | - pooch=1.2.0=py_0
281 | - poppler=0.89.0=h4190859_1
282 | - poppler-data=0.4.9=1
283 | - postgresql=12.3=h8573dbc_2
284 | - prefect=0.13.11=py_0
285 | - proj=7.1.0=h966b41f_1
286 | - prometheus_client=0.8.0=pyh9f0ad1d_0
287 | - promise=2.3=py37hc8dfbb8_2
288 | - prompt-toolkit=3.0.8=py_0
289 | - psutil=5.7.2=py37hb5d75c8_1
290 | - pthread-stubs=0.4=h14c3975_1001
291 | - ptyprocess=0.6.0=py_1001
292 | - py=1.9.0=pyh9f0ad1d_0
293 | - pyarrow=2.0.0=py37h9303983_0_cpu
294 | - pyasn1=0.4.8=py_0
295 | - pyasn1-modules=0.2.7=py_0
296 | - pybind11=2.5.0=py37h99015e2_1
297 | - pycparser=2.20=pyh9f0ad1d_2
298 | - pyct=0.4.6=py_0
299 | - pyct-core=0.4.6=py_0
300 | - pycurl=7.43.0.5=py37h21fb010_3
301 | - pydantic=1.6.1=py37h8f50634_1
302 | - pydap=3.2.2=pyh9f0ad1d_1001
303 | - pyepsg=0.4.0=py_0
304 | - pygments=2.7.1=py_0
305 | - pyjwt=1.7.1=py_0
306 | - pykdtree=1.3.1=py37h161383b_1004
307 | - pyopenssl=19.1.0=py_1
308 | - pyorbital=1.6.0=pyh9f0ad1d_0
309 | - pyparsing=2.4.7=pyh9f0ad1d_0
310 | - pyproj=2.6.1.post1=py37hb5dadc3_1
311 | - pyresample=1.16.0=py37h9fdb41a_1
312 | - pyrsistent=0.17.3=py37h8f50634_1
313 | - pyshp=2.1.2=pyh9f0ad1d_0
314 | - pysocks=1.7.1=py37he5f6b98_2
315 | - pyspectral=0.10.1=py_0
316 | - pystac=0.5.2=pyh9f0ad1d_0
317 | - pytest=6.1.1=py37hc8dfbb8_1
318 | - python=3.7.8=h6f2ec95_1_cpython
319 | - python-blosc=1.9.2=py37h9fdb41a_2
320 | - python-box=5.1.1=py_0
321 | - python-dateutil=2.7.5=py_0
322 | - python-dotenv=0.14.0=pyh9f0ad1d_0
323 | - python-editor=1.0.4=py_0
324 | - python-geotiepoints=1.2.0=py37h03ebfcd_0
325 | - python-gist=0.9.2=pyh9f0ad1d_1
326 | - python-gnupg=0.4.6=pyh9f0ad1d_0
327 | - python-json-logger=2.0.1=pyh9f0ad1d_0
328 | - python-kubernetes=12.0.0=pyh9f0ad1d_0
329 | - python-multipart=0.0.5=py_0
330 | - python-slugify=4.0.1=pyh9f0ad1d_0
331 | - python_abi=3.7=1_cp37m
332 | - pytz=2020.1=pyh9f0ad1d_0
333 | - pytzdata=2020.1=pyh9f0ad1d_0
334 | - pyviz_comms=0.7.6=pyh9f0ad1d_0
335 | - pywavelets=1.1.1=py37h161383b_3
336 | - pyyaml=5.3.1=py37hb5d75c8_1
337 | - pyzmq=19.0.2=py37hac76be4_2
338 | - rasterio=1.1.7=py37ha3d844c_0
339 | - re2=2020.10.01=he1b5a44_0
340 | - readline=8.0=he28a2e2_2
341 | - requests=2.24.0=pyh9f0ad1d_0
342 | - requests-oauthlib=1.3.0=pyh9f0ad1d_0
343 | - rio-cogeo=2.0.1=py_0
344 | - rioxarray=0.1.0=py_0
345 | - rsa=4.4.1=pyh9f0ad1d_0
346 | - rtree=0.9.4=py37h8526d28_1
347 | - ruamel.yaml=0.16.12=py37h8f50634_1
348 | - ruamel.yaml.clib=0.2.2=py37h8f50634_1
349 | - s3fs=0.4.2=py_0
350 | - s3transfer=0.3.3=py_3
351 | - sat-search=0.3.0=py_0
352 | - sat-stac=0.4.0=pyh9f0ad1d_0
353 | - satpy=0.23.0=py_0
354 | - scikit-image=0.17.2=py37h9fdb41a_3
355 | - scikit-learn=0.23.2=py37hbb8adca_1
356 | - scipy=1.5.2=py37hb14ef9d_2
357 | - send2trash=1.5.0=py_0
358 | - setuptools=49.6.0=py37he5f6b98_2
359 | - shapely=1.7.1=py37hedb1597_1
360 | - simpervisor=0.3=py_1
361 | - simplejson=3.17.2=py37h8f50634_1
362 | - six=1.15.0=pyh9f0ad1d_0
363 | - snappy=1.1.8=he1b5a44_3
364 | - snuggs=1.4.7=py_0
365 | - sortedcontainers=2.2.2=pyh9f0ad1d_0
366 | - soupsieve=2.0.1=py_1
367 | - sparse=0.11.2=py_0
368 | - sqlalchemy=1.3.20=py37h8f50634_0
369 | - sqlite=3.33.0=h4cf870e_1
370 | - starlette=0.13.6=py_0
371 | - starlette-base=0.13.6=py_0
372 | - supermercado=0.2.0=pyh9f0ad1d_0
373 | - tabulate=0.8.7=pyh9f0ad1d_0
374 | - tbb=2020.2=hc9558a2_0
375 | - tblib=1.6.0=py_0
376 | - terminado=0.9.1=py37hc8dfbb8_1
377 | - testpath=0.4.4=py_0
378 | - text-unidecode=1.3=py_0
379 | - threadpoolctl=2.1.0=pyh5ca1d4c_0
380 | - tifffile=2020.10.1=py_0
381 | - tiledb=2.0.8=h3effe38_1
382 | - tiledb-py=0.6.6=py37h6b53312_2
383 | - tk=8.6.10=hed695b0_1
384 | - toml=0.10.1=pyh9f0ad1d_0
385 | - toolz=0.11.1=py_0
386 | - tornado=6.0.4=py37h8f50634_2
387 | - tqdm=4.50.2=pyh9f0ad1d_0
388 | - traitlets=5.0.5=py_0
389 | - traittypes=0.2.1=pyh9f0ad1d_2
390 | - trollimage=1.14.0=py_0
391 | - trollsift=0.3.4=py_1
392 | - typing-extensions=3.7.4.3=0
393 | - typing_extensions=3.7.4.3=py_0
394 | - tzcode=2020a=h516909a_0
395 | - ujson=4.0.1=py37h3340039_1
396 | - unidecode=1.1.1=py_0
397 | - urllib3=1.25.11=py_0
398 | - uvicorn=0.12.2=py37hc8dfbb8_0
399 | - uvloop=0.14.0=py37h8f50634_2
400 | - watchgod=0.6=py_0
401 | - wcwidth=0.2.5=pyh9f0ad1d_2
402 | - webencodings=0.5.1=py_1
403 | - webob=1.8.6=py_0
404 | - websocket-client=0.57.0=py37hc8dfbb8_3
405 | - websockets=8.1=py37h8f50634_2
406 | - wheel=0.35.1=pyh9f0ad1d_0
407 | - widgetsnbextension=3.5.1=py37hc8dfbb8_4
408 | - xarray=0.16.1=py_0
409 | - xarray-spatial=0.1.0=pyh9f0ad1d_0
410 | - xcape=0.1.2=py37he2cc1d9_1
411 | - xerces-c=3.2.3=hfe33f54_1
412 | - xesmf=0.3.0=py_0
413 | - xgcm=0.5.1=py_0
414 | - xhistogram=0.1.1=py_0
415 | - xlayers=0.2.2=py37hf630d0d_1
416 | - xmitgcm=0.4.1=py_0
417 | - xorg-kbproto=1.0.7=h14c3975_1002
418 | - xorg-libice=1.0.10=h516909a_0
419 | - xorg-libsm=1.2.3=h84519dc_1000
420 | - xorg-libx11=1.6.12=h516909a_0
421 | - xorg-libxau=1.0.9=h14c3975_0
422 | - xorg-libxdmcp=1.1.3=h516909a_0
423 | - xorg-libxext=1.3.4=h516909a_0
424 | - xorg-libxrender=0.9.10=h516909a_1002
425 | - xorg-renderproto=0.11.1=h14c3975_1002
426 | - xorg-xextproto=7.3.0=h14c3975_1002
427 | - xorg-xproto=7.0.31=h14c3975_1007
428 | - xpublish=0.1.0=py_0
429 | - xrft=0.2.2=pyh9f0ad1d_0
430 | - xz=5.2.5=h516909a_1
431 | - yaml=0.2.5=h516909a_0
432 | - yarl=1.6.2=py37h8f50634_0
433 | - zarr=2.4.0=py_0
434 | - zeromq=4.3.3=he1b5a44_2
435 | - zfp=0.5.5=he1b5a44_4
436 | - zict=2.0.0=py_0
437 | - zipp=3.3.1=py_0
438 | - zlib=1.2.11=h516909a_1010
439 | - zstd=1.4.5=h6597ccf_2
440 | prefix: /srv/conda/envs/default
441 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/start:
--------------------------------------------------------------------------------
1 | #!/bin/bash -l
2 |
3 | # ==== ONLY EDIT WITHIN THIS BLOCK =====
4 |
5 | export PANGEO_ENV="default"
6 | if ! [[ -z "${PANGEO_SCRATCH_PREFIX}" ]] && ! [[ -z "${JUPYTERHUB_USER}" ]]; then
7 | export PANGEO_SCRATCH="${PANGEO_SCRATCH_PREFIX}/${JUPYTERHUB_USER}/"
8 | fi
9 |
10 | if [[ ! -f /home/user/.tutorial_initialized ]]; then
11 | cp -rp /srv/tutorials /home/user
12 | touch /home/user/.tutorial_initialized
13 | fi
14 |
15 | # Change the CPU counts:
16 | CPUS=$(grep -c processor /proc/cpuinfo)
17 | sudo xmlstarlet ed --inplace -u "config_machines/machine[@MACH='container']/MAX_TASKS_PER_NODE" -v ${CPUS} ${CESMROOT}/cime/config/cesm/machines/config_machines.xml
18 | sudo xmlstarlet ed --inplace -u "config_machines/machine[@MACH='container']/MAX_MPITASKS_PER_NODE" -v ${CPUS} ${CESMROOT}/cime/config/cesm/machines/config_machines.xml
19 |
20 | # ==== ONLY EDIT WITHIN THIS BLOCK =====
21 |
22 |
23 | exec jupyter lab --ip=0.0.0.0 --NotebookApp.token='' --NotebookApp.password=''
24 |
25 | #exec "$@"
26 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/tutorials/CESM/QuickStart.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# CESM2 - Quick Start\n",
8 | "\n",
9 | "This is a very quick introduction to running CESM. It will guide you through the basics of creating a case, modifying simple aspects of that case, building it, running it, and use a function to provide a quick visualization of the output. Each step will also provide links to the full CESM documentation and additional examples.\n",
10 | "\n",
11 | "**NOTE: Each step must be followed in order, otherwise you may encounter errors**\n",
12 | "\n",
13 | "\n",
14 | "***"
15 | ]
16 | },
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {},
20 | "source": [
21 | " 1. Creating a CESM case
\n",
22 | "\n",
23 | "CESM experiments start with creating a 'case' - a configuration of the model with a specific set of component models (eg, CAM for the atmosphere model, POP for the ocean model, etc), at a particular resolution, and with a set of options defined in XML files. Let's start by using the 'create_newcase' command to set up an experiment with the following details:
\n",
24 | "\n",
25 | " - compset: QPC4
\n",
26 | " - resolution: f45_f45_mg37
\n",
27 | "
\n",
28 | "\n",
29 | "This is a 'simple model' - an aquaplanet configuration at 4.5-degree resolution, using CAM4 physics. This is much less computationally intensive than a fully coupled run, so it works well for this tutorial, only requiring ~0.6GB of RAM. It will also need to download some input files, totaling approximately 127MB of size.\n",
30 | "\n",
31 | "The command to create this configuration is given below, using 'quickstart_case' as the casename. Note that given the size and nature of this run, it's not a scientifically supported configuration, but it's still useful for learning how to run the model.\n",
32 | "\n",
33 | "Run the cell to issue the command:"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": null,
39 | "metadata": {},
40 | "outputs": [],
41 | "source": [
42 | "create_newcase --case ~/quickstart_case --compset QPC4 --res f45_f45_mg37 --run-unsupported"
43 | ]
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "metadata": {},
48 | "source": [
49 | "
The last few lines above should look like this:\n",
50 | "\n",
51 | "> Creating Case directory /home/user/quickstart_case
\n",
52 | "This component includes user_mods /opt/ncar/cesm2/components/cam//cime_config/usermods_dirs/aquap
\n",
53 | "Adding user mods directory /opt/ncar/cesm2/components/cam/cime_config/usermods_dirs/aquap\n",
54 | "\n",
55 | "\n",
56 | "If so, great! You've created the default version of your first case. We'll cover this in more detail in an upcoming tutorial, and additional information can also be found in CESM's online [documentation](https://escomp.github.io/CESM/release-cesm2/quickstart.html#create-a-case).\n",
57 | "
\n",
58 | "\n",
59 | "***"
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | " 2. Modifying a CESM case
\n",
67 | "\n",
68 | "Now let's make a simple modification to the case via two easy steps:\n",
69 | "\n",
70 | "1. Change to our new case directory\n",
71 | "2. In that directory, change the duration of the run (in model-time)\n",
72 | "\n",
73 | "\n",
74 | "These changes can be made via modifying the XML files that control CESM's behavior. We could do this by hand, but the preferred way of doing it is to use the 'xmlchange' command in the case directory. By default, newly configured cases are set up to run for 5 model days - since we're just experimenting, we'll change it to 3 model days instead, for even faster runs. Run the following cells to execute the two commands:\n",
75 | "
\n",
76 | "
"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": null,
82 | "metadata": {},
83 | "outputs": [],
84 | "source": [
85 | "cd ~/quickstart_case"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "xmlchange STOP_OPTION=ndays,STOP_N=3"
95 | ]
96 | },
97 | {
98 | "cell_type": "markdown",
99 | "metadata": {},
100 | "source": [
101 | "Since the default STOP_OPTION is already 'ndays', it wasn't strictly necessary to include that in the command, but by being explicit we avoid any confusion later if we had changed it previously to 'nhours' or 'nyears'. \n",
102 | "\n",
103 | "We don't see any output by 'xmlchange', but we can use another tool, 'xmlquery', to double-check that we have the new values:"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": null,
109 | "metadata": {},
110 | "outputs": [],
111 | "source": [
112 | "xmlquery STOP_OPTION,STOP_N"
113 | ]
114 | },
115 | {
116 | "cell_type": "markdown",
117 | "metadata": {},
118 | "source": [
119 | "
Great! You've now used the 'xmlchange' utility to modify the model's behavior, setting it up to run for 3 model days. You can do a lot more with 'xmlchange', and we'll cover some of these in another tutorial later, and additional information can also be found in CESM's online [documentation](http://esmci.github.io/cime/versions/master/html/Tools_user/xmlchange.html)\n",
120 | "
\n",
121 | "\n",
122 | "***"
123 | ]
124 | },
125 | {
126 | "cell_type": "markdown",
127 | "metadata": {},
128 | "source": [
129 | " 3. Setting up the case
\n",
130 | "\n",
131 | "The next step in running CESM is to 'setup' the case - this is done by running case.setup in the case directory. This command sets up some user-customizable name lists that control the model's behavior, creates the directory where the run happens, and configures additional settings used in building the model. \n",
132 | "\n",
133 | "Run the cell to issue the command:"
134 | ]
135 | },
136 | {
137 | "cell_type": "code",
138 | "execution_count": null,
139 | "metadata": {},
140 | "outputs": [],
141 | "source": [
142 | "case.setup"
143 | ]
144 | },
145 | {
146 | "cell_type": "markdown",
147 | "metadata": {},
148 | "source": [
149 | "
Great! You've now used the 'case.setup' command to set up your case. As the output says, you can now use the 'preview_run' command to get additional information if needed, but it's not necessary. You can also read CESM's online documentation on [case.setup](http://esmci.github.io/cime/versions/master/html/Tools_user/case.setup.html) for more information if you like.
\n",
150 | "\n",
151 | "***"
152 | ]
153 | },
154 | {
155 | "cell_type": "markdown",
156 | "metadata": {},
157 | "source": [
158 | " 4. Building the case
\n",
159 | "\n",
160 | "CESM supports a wide variety of configurations, including different components, physics options and source modifications, and the executable code you get is dependent on each of these, so it requires that cases be *compiled*. The container includes all the necessary libraries and compilers, and the 'create_newcase' and 'case.setup' commands have configured everything we need, so this is a simple process on supported machines -- we just run the 'case.build' command in the cell below.\n",
161 | "\n",
162 | "Be aware that this can take a few minutes - how long depends upon the type of system you have, but estimates are ~3-6 minutes"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": null,
168 | "metadata": {},
169 | "outputs": [],
170 | "source": [
171 | "case.build"
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {},
177 | "source": [
178 | "
\n",
179 | "Again, this will take a few minutes. You'll know this is complete when you see the line:\n",
180 | "\n",
181 | "> MODEL BUILD HAS FINISHED SUCCESSFULLY\n",
182 | "\n",
183 | "If you see that, fantastic! You've built your first CESM case! Now we can move on to the final step of performing a simulation.
\n",
184 | "\n",
185 | "***"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | " 5. Running your case
\n",
193 | "\n",
194 | "Running a case is also simple - we just issue the 'case.submit' command. This will start by checking that we have all of the necessary input data for our run, and downloading whatever is missing, and then it will perform the actual simulation - which we've configured to run for 3 model days.\n",
195 | "\n",
196 | "How long the run takes, like the build, depends heavily on the type of processor you have, and how many cores it has. Expect this to take from 1-3 minutes after the data download is finished."
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "metadata": {},
203 | "outputs": [],
204 | "source": [
205 | "case.submit"
206 | ]
207 | },
208 | {
209 | "cell_type": "markdown",
210 | "metadata": {},
211 | "source": [
212 | "
When this step finishes, the run will complete, then the 'archive' script will execute. You will see a few warning messages about 'No such variable' - these are normal, and can be ignored. The final lines should look like:\n",
213 | "\n",
214 | ">Submitted job case.run with id None
\n",
215 | "Submitted job case.st_archive with id None\n",
216 | "\n",
217 | "\n",
218 | "If you see those, congratulations! You've run your first case -- there's a *lot* of text shown above, and we'll cover it in more detail in another tutorial, and more information can also be found in the [documentation](https://escomp.github.io/CESM/versions/cesm2.2/html/quickstart.html#run-the-case).
\n",
219 | "\n",
220 | "***"
221 | ]
222 | },
223 | {
224 | "cell_type": "markdown",
225 | "metadata": {},
226 | "source": [
227 | " 6. Continuing a run
\n",
228 | "\n",
229 | "We've run our case for three model days as an initial test, but now let's go back and *continue* that run for 28 more days so we have a full month of output. This is better for estimating performance, *and* will give us a monthly history output file that we can use to visualize results.\n",
230 | "\n",
231 | "To do this, we'll use the 'xmlchange' utility to tell CESM to continue the run, and to set the number of days for the new run to 28. Then we'll call 'case.submit' again. Since we're running 9.3x longer (28 days vs. 3), we should expect this to take roughly 9.3x longer. Enter the commands below, then grab a cup of coffee as the model runs!"
232 | ]
233 | },
234 | {
235 | "cell_type": "code",
236 | "execution_count": null,
237 | "metadata": {},
238 | "outputs": [],
239 | "source": [
240 | "xmlchange CONTINUE_RUN=true,STOP_N=28"
241 | ]
242 | },
243 | {
244 | "cell_type": "code",
245 | "execution_count": null,
246 | "metadata": {},
247 | "outputs": [],
248 | "source": [
249 | "case.submit"
250 | ]
251 | },
252 | {
253 | "cell_type": "markdown",
254 | "metadata": {},
255 | "source": [
256 | "
Excellent - you'll notice that this time the output was a little less, since it didn't need to download any new input data. We're just continuing a run that we have all the input data for.
\n",
257 | "\n",
258 | "***"
259 | ]
260 | },
261 | {
262 | "cell_type": "markdown",
263 | "metadata": {},
264 | "source": [
265 | " 6. Visualize the output
\n",
266 | "\n",
267 | "As a final step, let's take a quick look at visualizing our output. Visualization in Jupyter Notebooks is a huge topic, and there are countless ways of analyzing and processing CESM's output. Since this is a 'quickstart' tutorial, we're going to use an existing function provided by the 'cesm' python package. This function, 'QuickView', takes the name of a case and one of several variables -'T' (temperature) in this case- and plots the variable with a global overlay for the lowest vertical level of the model. We'll look at more advanced visualizations in other tutorial lessons."
268 | ]
269 | },
270 | {
271 | "cell_type": "code",
272 | "execution_count": null,
273 | "metadata": {},
274 | "outputs": [],
275 | "source": [
276 | "from cesm import QuickView\n",
277 | "QuickView('quickstart_case', 'T')"
278 | ]
279 | },
280 | {
281 | "cell_type": "markdown",
282 | "metadata": {},
283 | "source": [
284 | "And we can do a plot of 'U' just as easily:"
285 | ]
286 | },
287 | {
288 | "cell_type": "code",
289 | "execution_count": null,
290 | "metadata": {},
291 | "outputs": [],
292 | "source": [
293 | "QuickView('quickstart_case', 'U')"
294 | ]
295 | },
296 | {
297 | "cell_type": "markdown",
298 | "metadata": {},
299 | "source": [
300 | "
Congratulations - you've walked through the basic steps of using CESM : creating a case, setting it up, building it, running it, and visualizing the output. You're all set for some of the more advanced tutorials now. "
301 | ]
302 | }
303 | ],
304 | "metadata": {
305 | "kernelspec": {
306 | "display_name": "Python [conda env:default] *",
307 | "language": "python",
308 | "name": "conda-env-default-py"
309 | },
310 | "language_info": {
311 | "codemirror_mode": {
312 | "name": "ipython",
313 | "version": 3
314 | },
315 | "file_extension": ".py",
316 | "mimetype": "text/x-python",
317 | "name": "python",
318 | "nbconvert_exporter": "python",
319 | "pygments_lexer": "ipython3",
320 | "version": "3.7.8"
321 | }
322 | },
323 | "nbformat": 4,
324 | "nbformat_minor": 4
325 | }
326 |
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/tutorials/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ESCOMP/ESCOMP-Containers/c48f872d5b4bbdc90ca9e1159d6c276d87407e09/CESM-Lab/2.2/Files/tutorials/images/logo.png
--------------------------------------------------------------------------------
/CESM-Lab/2.2/Files/tutorials/index.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "\n",
8 | "# CESM-Lab Tutorials \n",
9 | "\n",
10 | "\n",
11 | "Welcome to CESM! This tutorial is intended to provide a quick introduction to using the Community Earth System Model by providing self-contained examples in Jupyter Notebooks. A list of lessons is below; you can click on any to open up a Notebook and get started.\n",
12 | "\n",
13 | "We'd love to hear from you on the [CESM Forums](https://bb.cgd.ucar.edu/cesm/), and welcome any suggestions on missing topics, new ideas, user contributions and other ways of improving this resource!\n",
14 | "\n",
15 | "\n",
16 | "\n",
17 | "1. [Quick Start](CESM/QuickStart.ipynb) (~20-40 minutes, depending on system performance)\n",
18 | "
\n",
19 | "*This lesson is a quick walkthrough of all the key steps in running CESM out-of-the-box. You'll create a new experiment, set it up, build it, run it, and using an existing visualization function, visualize the global temperature profile.*\n",
20 | "\n",
21 | "---\n",
22 | "\n",
23 | "2. CESM Tutorials \n",
24 | "\n",
25 | " Creating cases (_coming soon_)\n",
26 | "\n",
27 | " Case settings & 'xmlchange' (_coming soon_)\n",
28 | " \n",
29 | " Modifying CESM namelists (_coming soon_)\n",
30 | " \n",
31 | "---\n",
32 | " \n",
33 | "3. Component-specific Tutorials\n",
34 | "\n",
35 | " CAM (atmosphere) (_coming soon_)\n",
36 | "\n",
37 | " CLM (land) (_coming soon_)\n",
38 | "\n",
39 | "---\n",
40 | "\n",
41 | "3. Visualization\n",
42 | "\n",
43 | " Getting started with Matplotlib (_coming soon_)\n",
44 | "\n",
45 | "---\n",
46 | "\n",
47 | "4. Analysis\n",
48 | "\n",
49 | " CESM-LENS data on AWS (_coming soon_)\n",
50 | "\n",
51 | "\n",
52 | "\n",
53 | "\n"
54 | ]
55 | }
56 | ],
57 | "metadata": {
58 | "kernelspec": {
59 | "display_name": "Python [conda env:default] *",
60 | "language": "python",
61 | "name": "conda-env-default-py"
62 | },
63 | "language_info": {
64 | "codemirror_mode": {
65 | "name": "ipython",
66 | "version": 3
67 | },
68 | "file_extension": ".py",
69 | "mimetype": "text/x-python",
70 | "name": "python",
71 | "nbconvert_exporter": "python",
72 | "pygments_lexer": "ipython3",
73 | "version": "3.7.8"
74 | }
75 | },
76 | "nbformat": 4,
77 | "nbformat_minor": 4
78 | }
79 |
--------------------------------------------------------------------------------
/CESM-Postprocessing/Dockerfile:
--------------------------------------------------------------------------------
1 | #########################################
2 | # ESCOMP/CESM-Postprocessing Dockerfile #
3 | #########################################
4 |
5 | # We're using a CentOS8 base for now:
6 | FROM centos:centos8.3.2011
7 |
8 |
9 | # Set up some variables for versions of packages & environment needs:
10 | ARG SOURCE_MPI=https://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz
11 | ARG SOURCE_MINICONDA=https://repo.anaconda.com/miniconda/Miniconda3-py37_4.9.2-Linux-x86_64.sh
12 | ARG SOURCE_POSTPROCESSING="-b cheyenne_update_py3 https://github.com/briandobbins/CESM_postprocessing.git"
13 |
14 | # Set up the environment variable for the postprocessing software
15 | ENV POSTPROCESS_PATH=/opt/ncar/cesm_postprocessing
16 |
17 |
18 | # Set the shell to bash - this helps with some of the conda installation quirks in Dockerfiles
19 | SHELL [ "/bin/bash", "--login", "-c" ]
20 |
21 |
22 | # We're going to install the dnf plugins, the EPEL repo and the english language pack, then
23 | # enable the PowerTools repo (for libsz.so, needed by netcdf-devel), upgrade existing
24 | # packages, install a list of prerequisites, and clean up.
25 | # Note: Some packages, like netcdf-devel, are obvious, others like tbb less so, and a few
26 | # like 'which' are just for convenience/debugging
27 | RUN dnf install -y dnf-plugins-core epel-release glibc-langpack-en &&\
28 | dnf config-manager --set-enabled powertools &&\
29 | dnf upgrade -y &&\
30 | dnf install -y calc \
31 | diffutils \
32 | gcc \
33 | gcc-c++ \
34 | gcc-gfortran \
35 | git \
36 | file \
37 | libnsl \
38 | make \
39 | ncl-devel \
40 | netcdf-fortran-devel \
41 | tbb \
42 | wget \
43 | which &&\
44 | dnf clean all
45 |
46 |
47 | # Now let's download & install MPI, then clean it up:
48 | ENV PACKAGE_MPI=${SOURCE_MPI}
49 | RUN mkdir -p /tmp/mpi &&\
50 | cd /tmp/mpi &&\
51 | curl -sSL ${PACKAGE_MPI} --output mpi.tar.gz &&\
52 | tar zxvf mpi.tar.gz -C . --strip-components=1 &&\
53 | ./configure --prefix=/usr/local --disable-fortran --disable-cxx &&\
54 | make -j 2 install &&\
55 | rm -rf /tmp/mpi
56 |
57 |
58 | # Now let's download, install and init Miniconda, then clean up:
59 | ENV PACKAGE_MINICONDA=${SOURCE_MINICONDA}
60 | RUN mkdir -p /tmp/conda &&\
61 | cd /tmp/conda &&\
62 | curl -sSL ${PACKAGE_MINICONDA} --output Miniconda.sh &&\
63 | sh Miniconda.sh -b -p /opt/ncar/conda &&\
64 | /opt/ncar/conda/bin/conda init bash &&\
65 | rm -rf /tmp/conda
66 |
67 |
68 | # Create the cesm-env2 conda environment -- this name is hardcoded in the code,
69 | # it seems, but may be worth looking into changing later, since everything here
70 | # could be put in a default python environment, and we'd avoid the complexity of
71 | # activating non-default conda environments in containers.
72 | # Also note that I've done some effort on installing things by source, which
73 | # results in a much smaller container, but needs more debugging due to odd issues.
74 | # Finally, using the environment.yml (or specific source versions) makes it easier
75 | # to ensure compatibility at different build times.
76 | COPY Files/environment.yml /
77 | RUN conda update -n base -c defaults conda &&\
78 | conda env create -f /environment.yml &&\
79 | echo "conda activate cesm-env2" >> /root/.bashrc &&\
80 | rm /environment.yml &&\
81 | conda clean --all
82 |
83 |
84 | # Install CESM Postprocessing tools:
85 | ENV POSTPROCESSING_REPO=${SOURCE_POSTPROCESSING}
86 | RUN mkdir -p /opt/ncar && \
87 | cd /opt/ncar && \
88 | git clone ${POSTPROCESSING_REPO} cesm_postprocessing && \
89 | cd cesm_postprocessing && \
90 | ln -s /opt/ncar/conda/envs/cesm-env2 . && \
91 | ./manage_externals/checkout_externals &&\
92 | ./create_python_env -machine container
93 |
94 |
95 | # And add the 'activate_this.py' script (used by the CESM Postprocessing scripts)
96 | # Is this deprecrated? Are we doing something unique? My Python is pretty bad; ask Jim about this
97 | COPY Files/activate_this.py /opt/ncar/cesm_postprocessing/cesm-env2/bin
98 |
99 |
100 | CMD ["/bin/bash", "-l"]
101 |
--------------------------------------------------------------------------------
/CESM-Postprocessing/Files/activate_this.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Activate virtualenv for current interpreter:
3 |
4 | Use exec(open(this_file).read(), {'__file__': this_file}).
5 |
6 | This can be used when you must use an existing Python interpreter, not the virtualenv bin/python.
7 | """
8 | import os
9 | import site
10 | import sys
11 |
12 | try:
13 | abs_file = os.path.abspath(__file__)
14 | except NameError:
15 | raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))")
16 |
17 | bin_dir = os.path.dirname(abs_file)
18 | base = bin_dir[: -len("__BIN_NAME__") - 1] # strip away the bin part from the __file__, plus the path separator
19 |
20 | # prepend bin to PATH (this file is inside the bin directory)
21 | os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep))
22 | os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory
23 |
24 | # add the virtual environments libraries to the host python import mechanism
25 | prev_length = len(sys.path)
26 | for lib in "__LIB_FOLDERS__".split(os.pathsep):
27 | path = os.path.realpath(os.path.join(bin_dir, lib))
28 | site.addsitedir(path)
29 | sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length]
30 |
31 | sys.real_prefix = sys.prefix
32 | sys.prefix = base
33 |
--------------------------------------------------------------------------------
/CESM-Postprocessing/Files/environment.yml:
--------------------------------------------------------------------------------
1 | name: cesm-env2
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - _libgcc_mutex=0.1=conda_forge
7 | - _openmp_mutex=4.5=1_gnu
8 | - boost-cpp=1.72.0=h8e57a91_0
9 | - bzip2=1.0.8=h7f98852_4
10 | - c-ares=1.17.1=h7f98852_1
11 | - ca-certificates=2020.12.5=ha878542_0
12 | - cairo=1.16.0=hcf35c78_1003
13 | - certifi=2020.12.5=py38h578d9bd_1
14 | - cfitsio=3.470=h2e3daa1_7
15 | - curl=7.76.1=h979ede3_1
16 | - esmf=8.0.0=nompi_hb0fcdcb_6
17 | - expat=2.3.0=h9c3ff4c_0
18 | - fontconfig=2.13.1=hba837de_1005
19 | - freetype=2.10.4=h0708190_1
20 | - freexl=1.0.6=h7f98852_0
21 | - g2clib=1.6.0=hf3f1b0b_9
22 | - geos=3.8.1=he1b5a44_0
23 | - geotiff=1.5.1=h05acad5_10
24 | - gettext=0.19.8.1=h0b5b191_1005
25 | - giflib=5.2.1=h36c2ea0_2
26 | - glib=2.68.1=h9c3ff4c_0
27 | - glib-tools=2.68.1=h9c3ff4c_0
28 | - gsl=2.6=he838d99_2
29 | - hdf4=4.2.13=h10796ff_1005
30 | - hdf5=1.10.5=nompi_h7c3c948_1111
31 | - hdfeos2=2.20=h64bfcee_1000
32 | - hdfeos5=5.1.16=h8b6279f_6
33 | - icu=64.2=he1b5a44_1
34 | - jasper=1.900.1=h07fcdf6_1006
35 | - jpeg=9d=h36c2ea0_0
36 | - json-c=0.13.1=hbfbb72e_1002
37 | - kealib=1.4.13=hec59c27_0
38 | - krb5=1.17.2=h926e7f8_0
39 | - ld_impl_linux-64=2.35.1=hea4e1c9_2
40 | - libblas=3.9.0=8_openblas
41 | - libcblas=3.9.0=8_openblas
42 | - libcurl=7.76.1=hc4aaa36_1
43 | - libdap4=3.20.6=hd7c4107_2
44 | - libedit=3.1.20191231=he28a2e2_2
45 | - libev=4.33=h516909a_1
46 | - libffi=3.3=h58526e2_2
47 | - libgcc-ng=9.3.0=h2828fa1_19
48 | - libgdal=3.0.4=h3dfc09a_6
49 | - libgfortran-ng=7.5.0=h14aa051_19
50 | - libgfortran4=7.5.0=h14aa051_19
51 | - libglib=2.68.1=h3e27bee_0
52 | - libgomp=9.3.0=h2828fa1_19
53 | - libiconv=1.16=h516909a_0
54 | - libkml=1.3.0=hd79254b_1012
55 | - liblapack=3.9.0=8_openblas
56 | - libnetcdf=4.7.4=nompi_h9f9fd6a_101
57 | - libnghttp2=1.43.0=h812cca2_0
58 | - libopenblas=0.3.12=pthreads_hb3c22a3_1
59 | - libpng=1.6.37=h21135ba_2
60 | - libpq=12.3=h255efa7_3
61 | - libspatialite=4.3.0a=h2482549_1038
62 | - libssh2=1.9.0=ha56f1ee_6
63 | - libstdcxx-ng=9.3.0=h6de172a_19
64 | - libtiff=4.2.0=hdc55705_0
65 | - libuuid=2.32.1=h7f98852_1000
66 | - libwebp-base=1.2.0=h7f98852_2
67 | - libxcb=1.13=h7f98852_1003
68 | - libxml2=2.9.10=hee79883_0
69 | - lz4-c=1.9.2=he1b5a44_3
70 | - ncl=6.6.2=hfe5c2fd_21
71 | - ncurses=6.2=h58526e2_4
72 | - netcdf-fortran=4.5.2=nompi_h45d7149_104
73 | - numpy=1.20.2=py38h9894fe3_0
74 | - openjpeg=2.3.1=hf7af979_3
75 | - openssl=1.1.1k=h7f98852_0
76 | - pcre=8.44=he1b5a44_0
77 | - pip=21.1=pyhd8ed1ab_0
78 | - pixman=0.38.0=h516909a_1003
79 | - poppler=0.67.0=h14e79db_8
80 | - poppler-data=0.4.10=0
81 | - postgresql=12.3=hc2f5b80_3
82 | - proj=7.0.0=h966b41f_5
83 | - pthread-stubs=0.4=h36c2ea0_1001
84 | - pyngl=1.6.1=py38h174697a_2
85 | - pynio=1.5.5=py38h031d99c_12
86 | - python=3.8.8=hffdb5ce_0_cpython
87 | - python_abi=3.8=1_cp38
88 | - readline=8.1=h46c0cb4_0
89 | - setuptools=49.6.0=py38h578d9bd_3
90 | - sqlite=3.35.5=h74cdb3f_0
91 | - tiledb=1.7.7=h8efa9f0_2
92 | - tk=8.6.10=h21135ba_1
93 | - tzcode=2021a=h7f98852_1
94 | - udunits2=2.2.27.27=h975c496_1
95 | - wheel=0.36.2=pyhd3deb0d_0
96 | - xerces-c=3.2.2=h8412b87_1004
97 | - xorg-imake=1.0.7=0
98 | - xorg-kbproto=1.0.7=h7f98852_1002
99 | - xorg-libice=1.0.10=h7f98852_0
100 | - xorg-libsm=1.2.3=hd9c2040_1000
101 | - xorg-libx11=1.6.12=h516909a_0
102 | - xorg-libxau=1.0.9=h7f98852_0
103 | - xorg-libxaw=1.0.14=h7f98852_0
104 | - xorg-libxdmcp=1.1.3=h7f98852_0
105 | - xorg-libxext=1.3.4=h516909a_0
106 | - xorg-libxmu=1.1.3=h516909a_0
107 | - xorg-libxpm=3.5.13=h516909a_0
108 | - xorg-libxrender=0.9.10=h516909a_1002
109 | - xorg-libxt=1.1.5=h516909a_1003
110 | - xorg-makedepend=1.0.6=he1b5a44_1
111 | - xorg-renderproto=0.11.1=h7f98852_1002
112 | - xorg-xextproto=7.3.0=h7f98852_1002
113 | - xorg-xproto=7.0.31=h7f98852_1007
114 | - xz=5.2.5=h516909a_1
115 | - zlib=1.2.11=h516909a_1010
116 | - zstd=1.4.8=hdf46e1d_0
117 | - pip:
118 | - antlr4-python3-runtime==4.7.2
119 | - cartopy==0.18.0
120 | - cf-units==2.1.4
121 | - cftime==1.4.1
122 | - cycler==0.10.0
123 | - dreqpy==1.0.33
124 | - ilamb==2.5
125 | - jinja2==3.0.0rc2
126 | - kiwisolver==1.3.1
127 | - markupsafe==2.0.0rc2
128 | - matplotlib==3.4.1
129 | - mpi4py==3.0.3
130 | - mpmath==1.2.1
131 | - netcdf4==1.5.6
132 | - pillow==8.2.0
133 | - ply==3.11
134 | - pyparsing==3.0.0b2
135 | - pyshp==2.1.3
136 | - python-dateutil==2.8.1
137 | - scipy==1.6.3
138 | - shapely==1.8a1
139 | - six==1.15.0
140 | - sympy==1.8
141 | prefix: /opt/ncar/conda/envs/cesm-env2
142 |
--------------------------------------------------------------------------------
/CESM/2.1/Dockerfile:
--------------------------------------------------------------------------------
1 | ##############################
2 | # ESCOMP/CESM/2.1 Dockerfile #
3 | ##############################
4 |
5 | # Use the ESCOMP centos8 base - this is a base install of CentOS, plus NetCDF/HDF5/PNetCDF/MPICH
6 | FROM escomp/base-centos8
7 |
8 | # Install CESM:
9 | COPY Files/ea002e626aee6bc6643e8ab5f998e5e4 /root/.subversion/auth/svn.ssl.server/
10 | RUN mkdir -p /opt/ncar && \
11 | cd /opt/ncar && \
12 | git clone -b release-cesm2.1.3 https://github.com/ESCOMP/cesm.git cesm2 && \
13 | cd cesm2 && \
14 | ./manage_externals/checkout_externals
15 |
16 | # Set up the environment - create the group and user, the shell variables, the input data directory and sudo access:
17 | RUN groupadd ncar && \
18 | useradd -c 'CESM User' -d /home/user -g ncar -m -s /bin/bash user && \
19 | yum install -y ftp && \
20 | echo 'export CESMDATAROOT=${HOME}' >> /etc/profile.d/cesm.sh && \
21 | echo 'export CIME_MACHINE=container' >> /etc/profile.d/cesm.sh && \
22 | echo 'export USER=$(whoami)' >> /etc/profile.d/cesm.sh && \
23 | echo 'export PS1="[\u@cesm2.1.3 \W]\$ "' >> /etc/profile.d/cesm.sh && \
24 | echo 'export PATH=${PATH}:/opt/ncar/cesm2/cime/scripts' >> /etc/profile.d/cesm.sh && \
25 | echo 'user ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers.d/ncar
26 |
27 |
28 | # Add the container versions of the config_machines & config_compilers settings - later, integrate these into CIME
29 | COPY Files/config_compilers.xml /opt/ncar/cesm2/cime/config/cesm/machines/
30 | COPY Files/config_machines.xml /opt/ncar/cesm2/cime/config/cesm/machines/
31 | COPY Files/config_inputdata.xml /opt/ncar/cesm2/cime/config/cesm/
32 | COPY Files/case_setup.py /opt/ncar/cesm2/cime/scripts/lib/CIME/case/case_setup.py
33 |
34 |
35 | USER user
36 | WORKDIR /home/user
37 | ENTRYPOINT ["/bin/bash", "-l"]
38 |
--------------------------------------------------------------------------------
/CESM/2.1/Files/case_setup.py:
--------------------------------------------------------------------------------
1 | """
2 | Library for case.setup.
3 | case_setup is a member of class Case from file case.py
4 | """
5 |
6 | from CIME.XML.standard_module_setup import *
7 |
8 | from CIME.XML.machines import Machines
9 | from CIME.BuildTools.configure import configure
10 | from CIME.utils import get_cime_root, run_and_log_case_status, get_model, get_batch_script_for_job, safe_copy
11 | from CIME.test_status import *
12 | from CIME.locked_files import unlock_file, lock_file
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 | ###############################################################################
17 | def _build_usernl_files(case, model, comp):
18 | ###############################################################################
19 | """
20 | Create user_nl_xxx files, expects cwd is caseroot
21 | """
22 | model = model.upper()
23 | if model == "DRV":
24 | model_file = case.get_value("CONFIG_CPL_FILE")
25 | else:
26 | model_file = case.get_value("CONFIG_{}_FILE".format(model))
27 | expect(model_file is not None,
28 | "Could not locate CONFIG_{}_FILE in config_files.xml".format(model))
29 | model_dir = os.path.dirname(model_file)
30 |
31 | expect(os.path.isdir(model_dir),
32 | "cannot find cime_config directory {} for component {}".format(model_dir, comp))
33 | ninst = 1
34 | multi_driver = case.get_value("MULTI_DRIVER")
35 | if multi_driver:
36 | ninst_max = case.get_value("NINST_MAX")
37 | if model not in ("DRV","CPL","ESP"):
38 | ninst_model = case.get_value("NINST_{}".format(model))
39 | expect(ninst_model==ninst_max,"MULTI_DRIVER mode, all components must have same NINST value. NINST_{} != {}".format(model,ninst_max))
40 | if comp == "cpl":
41 | if not os.path.exists("user_nl_cpl"):
42 | safe_copy(os.path.join(model_dir, "user_nl_cpl"), ".")
43 | else:
44 | if ninst == 1:
45 | ninst = case.get_value("NINST_{}".format(model))
46 | nlfile = "user_nl_{}".format(comp)
47 | model_nl = os.path.join(model_dir, nlfile)
48 | if ninst > 1:
49 | for inst_counter in range(1, ninst+1):
50 | inst_nlfile = "{}_{:04d}".format(nlfile, inst_counter)
51 | if not os.path.exists(inst_nlfile):
52 | # If there is a user_nl_foo in the case directory, copy it
53 | # to user_nl_foo_INST; otherwise, copy the original
54 | # user_nl_foo from model_dir
55 | if os.path.exists(nlfile):
56 | safe_copy(nlfile, inst_nlfile)
57 | elif os.path.exists(model_nl):
58 | safe_copy(model_nl, inst_nlfile)
59 | else:
60 | # ninst = 1
61 | if not os.path.exists(nlfile):
62 | if os.path.exists(model_nl):
63 | safe_copy(model_nl, nlfile)
64 |
65 | ###############################################################################
66 | def _case_setup_impl(case, caseroot, clean=False, test_mode=False, reset=False):
67 | ###############################################################################
68 | os.chdir(caseroot)
69 |
70 | # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests, or create if machine is a container:
71 | din_loc_root = case.get_value("DIN_LOC_ROOT")
72 | if case.get_value("MACH") == "container":
73 | if not os.path.isdir(din_loc_root):
74 | logger.info("Making inputdata directory: {}".format(din_loc_root))
75 | os.mkdir(din_loc_root)
76 |
77 | testcase = case.get_value("TESTCASE")
78 | expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
79 | "inputdata root is not a directory or is not readable: {}".format(din_loc_root))
80 |
81 | # Remove batch scripts
82 | if reset or clean:
83 | # clean batch script
84 | batch_script = get_batch_script_for_job(case.get_primary_job())
85 | if os.path.exists(batch_script):
86 | os.remove(batch_script)
87 | logger.info("Successfully cleaned batch script {}".format(batch_script))
88 |
89 | if not test_mode:
90 | # rebuild the models (even on restart)
91 | case.set_value("BUILD_COMPLETE", False)
92 |
93 | if not clean:
94 | case.load_env()
95 |
96 | models = case.get_values("COMP_CLASSES")
97 | mach = case.get_value("MACH")
98 | compiler = case.get_value("COMPILER")
99 | debug = case.get_value("DEBUG")
100 | mpilib = case.get_value("MPILIB")
101 | sysos = case.get_value("OS")
102 | expect(mach is not None, "xml variable MACH is not set")
103 |
104 | # creates the Macros.make, Depends.compiler, Depends.machine, Depends.machine.compiler
105 | # and env_mach_specific.xml if they don't already exist.
106 | if not os.path.isfile("Macros.make") or not os.path.isfile("env_mach_specific.xml"):
107 | configure(Machines(machine=mach), caseroot, ["Makefile"], compiler, mpilib, debug, sysos)
108 |
109 | # Set tasks to 1 if mpi-serial library
110 | if mpilib == "mpi-serial":
111 | case.set_value("NTASKS", 1)
112 |
113 | # Check ninst.
114 | # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
115 | multi_driver = case.get_value("MULTI_DRIVER")
116 | for comp in models:
117 | ntasks = case.get_value("NTASKS_{}".format(comp))
118 | if comp == "CPL":
119 | continue
120 | ninst = case.get_value("NINST_{}".format(comp))
121 | if multi_driver:
122 | expect(case.get_value("NINST_LAYOUT_{}".format(comp)) == "concurrent",
123 | "If multi_driver is TRUE, NINST_LAYOUT_{} must be concurrent".format(comp))
124 | case.set_value("NTASKS_PER_INST_{}".format(comp), ntasks)
125 | else:
126 | if ninst > ntasks:
127 | if ntasks == 1:
128 | case.set_value("NTASKS_{}".format(comp), ninst)
129 | ntasks = ninst
130 | else:
131 | expect(False, "NINST_{comp} value {ninst} greater than NTASKS_{comp} {ntasks}".format(comp=comp, ninst=ninst, ntasks=ntasks))
132 |
133 | case.set_value("NTASKS_PER_INST_{}".format(comp), max(1,int(ntasks / ninst)))
134 |
135 | if os.path.exists(get_batch_script_for_job(case.get_primary_job())):
136 | logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
137 |
138 | case.initialize_derived_attributes()
139 |
140 | case.set_value("SMP_PRESENT", case.get_build_threaded())
141 |
142 | else:
143 | case.check_pelayouts_require_rebuild(models)
144 |
145 | unlock_file("env_build.xml")
146 | unlock_file("env_batch.xml")
147 |
148 | case.flush()
149 | case.check_lockedfiles()
150 |
151 | case.initialize_derived_attributes()
152 |
153 | cost_per_node = case.get_value("COSTPES_PER_NODE")
154 | case.set_value("COST_PES", case.num_nodes * cost_per_node)
155 | threaded = case.get_build_threaded()
156 | case.set_value("SMP_PRESENT", threaded)
157 | if threaded and case.total_tasks * case.thread_count > cost_per_node:
158 | smt_factor = max(1.0,int(case.get_value("MAX_TASKS_PER_NODE") / cost_per_node))
159 | case.set_value("TOTALPES", int(case.total_tasks * max(1.0,float(case.thread_count) / smt_factor)))
160 | else:
161 | case.set_value("TOTALPES", case.total_tasks*case.thread_count)
162 |
163 |
164 | # May need to select new batch settings if pelayout changed (e.g. problem is now too big for prev-selected queue)
165 | env_batch = case.get_env("batch")
166 | env_batch.set_job_defaults([(case.get_primary_job(), {})], case)
167 |
168 | # create batch files
169 | env_batch.make_all_batch_files(case)
170 | if get_model() == "e3sm" and not case.get_value("TEST"):
171 | input_batch_script = os.path.join(case.get_value("MACHDIR"), "template.case.run.sh")
172 | env_batch.make_batch_script(input_batch_script, "case.run", case, outfile=get_batch_script_for_job("case.run.sh"))
173 |
174 | # Make a copy of env_mach_pes.xml in order to be able
175 | # to check that it does not change once case.setup is invoked
176 | case.flush()
177 | logger.debug("at copy TOTALPES = {}".format(case.get_value("TOTALPES")))
178 | lock_file("env_mach_pes.xml")
179 | lock_file("env_batch.xml")
180 |
181 | # Create user_nl files for the required number of instances
182 | if not os.path.exists("user_nl_cpl"):
183 | logger.info("Creating user_nl_xxx files for components and cpl")
184 |
185 | # loop over models
186 | for model in models:
187 | comp = case.get_value("COMP_{}".format(model))
188 | logger.debug("Building {} usernl files".format(model))
189 | _build_usernl_files(case, model, comp)
190 | if comp == "cism":
191 | glcroot = case.get_value("COMP_ROOT_DIR_GLC")
192 | run_cmd_no_fail("{}/cime_config/cism.template {}".format(glcroot, caseroot))
193 |
194 | _build_usernl_files(case, "drv", "cpl")
195 |
196 | # Create needed directories for case
197 | case.create_dirs()
198 |
199 | logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")
200 |
201 | # Some tests need namelists created here (ERP) - so do this if we are in test mode
202 | if test_mode or get_model() == "e3sm":
203 | logger.info("Generating component namelists as part of setup")
204 | case.create_namelists()
205 |
206 | # Record env information
207 | env_module = case.get_env("mach_specific")
208 | env_module.make_env_mach_specific_file("sh", case)
209 | env_module.make_env_mach_specific_file("csh", case)
210 | env_module.save_all_env_info("software_environment.txt")
211 |
212 | logger.info("You can now run './preview_run' to get more info on how your case will be run")
213 |
214 | ###############################################################################
215 | def case_setup(self, clean=False, test_mode=False, reset=False):
216 | ###############################################################################
217 | caseroot, casebaseid = self.get_value("CASEROOT"), self.get_value("CASEBASEID")
218 | phase = "setup.clean" if clean else "case.setup"
219 | functor = lambda: _case_setup_impl(self, caseroot, clean, test_mode, reset)
220 |
221 | if self.get_value("TEST") and not test_mode:
222 | test_name = casebaseid if casebaseid is not None else self.get_value("CASE")
223 | with TestStatus(test_dir=caseroot, test_name=test_name) as ts:
224 | try:
225 | run_and_log_case_status(functor, phase, caseroot=caseroot)
226 | except:
227 | ts.set_status(SETUP_PHASE, TEST_FAIL_STATUS)
228 | raise
229 | else:
230 | if clean:
231 | ts.set_status(SETUP_PHASE, TEST_PEND_STATUS)
232 | else:
233 | ts.set_status(SETUP_PHASE, TEST_PASS_STATUS)
234 | else:
235 | run_and_log_case_status(functor, phase, caseroot=caseroot)
236 |
--------------------------------------------------------------------------------
/CESM/2.1/Files/config_inputdata.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
17 |
18 |
19 | wget
20 | ftp://ftp.cgd.ucar.edu/cesm/inputdata/
21 | anonymous
22 | user@example.edu
23 | ../inputdata_checksum.dat
24 |
25 |
26 |
27 | ftp requires the python package ftplib
28 | ftp
29 | ftp.cgd.ucar.edu/cesm/inputdata
30 | anonymous
31 | user@example.edu
32 | ../inputdata_checksum.dat
33 |
34 |
35 |
36 | svn
37 | https://svn-ccsm-inputdata.cgd.ucar.edu/trunk/inputdata
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/CESM/2.1/Files/ea002e626aee6bc6643e8ab5f998e5e4:
--------------------------------------------------------------------------------
1 | K 10
2 | ascii_cert
3 | V 2416
4 | MIIHDzCCBfegAwIBAgIQXeDJZdzJWpXlTjW08UPtATANBgkqhkiG9w0BAQsFADB2MQswCQYDVQQGEwJVUzELMAkGA1UECBMCTUkxEjAQBgNVBAcTCUFubiBBcmJvcjESMBAGA1UEChMJSW50ZXJuZXQyMREwDwYDVQQLEwhJbkNvbW1vbjEfMB0GA1UEAxMWSW5Db21tb24gUlNBIFNlcnZlciBDQTAeFw0xOTExMTgwMDAwMDBaFw0yMTExMTcyMzU5NTlaMIHJMQswCQYDVQQGEwJVUzEOMAwGA1UEERMFODAzMDExETAPBgNVBAgTCENvbG9yYWRvMRAwDgYDVQQHEwdCb3VsZGVyMSAwHgYDVQQJExczMDkwIENlbnRlciBHcmVlbiBEcml2ZTE8MDoGA1UEChMzVGhlIFVuaXZlcnNpdHkgQ29ycG9yYXRpb24gZm9yIEF0bW9zcGhlcmljIFJlc2VhcmNoMQwwCgYDVQQLEwNDR0QxFzAVBgNVBAMMDiouY2dkLnVjYXIuZWR1MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy7czRVw8TeDY2sEOFs30MWaTs0iQ1Mf4HteImVSiQntHtHDtKXtLIyItRTBO+79VsrrEUaJyh9O1M0NxTycrzHdmd3YBj0wOYVSuyl06ZU10w1RyWgsw1G7Usk3LJT4I8/YycY4GWOnEvbWTpBzh51zhyNgWsHlKnqTM1TzCf0fHMYTVfGQi4NWvsRoaS9FJDEv5Cn9B6Wq8q8tq5u/1P6Sk3G4K1hyMJCjvMtdHKQButWfIr4OTKyTf8GCc9sOT6INluM/zIsymVAIBPC+0+vFvhIKn+IAIWhveBrSkY00OzMczOpZizYBvpRlAbqPN6t49A6aA2FFaL5fIezvgWQIDAQABo4IDQzCCAz8wHwYDVR0jBBgwFoAUHgWjd49sluJbh0umtIascQAM5zgwHQYDVR0OBBYEFP3xHp/FaGPiExSSRqGerr8MwmZ2MA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8EAjAAMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBnBgNVHSAEYDBeMFIGDCsGAQQBriMBBAMBATBCMEAGCCsGAQUFBwIBFjRodHRwczovL3d3dy5pbmNvbW1vbi5vcmcvY2VydC9yZXBvc2l0b3J5L2Nwc19zc2wucGRmMAgGBmeBDAECAjBEBgNVHR8EPTA7MDmgN6A1hjNodHRwOi8vY3JsLmluY29tbW9uLXJzYS5vcmcvSW5Db21tb25SU0FTZXJ2ZXJDQS5jcmwwdQYIKwYBBQUHAQEEaTBnMD4GCCsGAQUFBzAChjJodHRwOi8vY3J0LnVzZXJ0cnVzdC5jb20vSW5Db21tb25SU0FTZXJ2ZXJDQV8yLmNydDAlBggrBgEFBQcwAYYZaHR0cDovL29jc3AudXNlcnRydXN0LmNvbTAZBgNVHREEEjAQgg4qLmNnZC51Y2FyLmVkdTCCAX0GCisGAQQB1nkCBAIEggFtBIIBaQFnAHYAfT7y+I//iFVoJMLAyp5SiXkrxQ54CX8uapdomX4i8NcAAAFuf9b69AAABAMARzBFAiBcvjTuX0l5bYAXoALnLLV25ujjcq97+83zhDyGHY/HzQIhAIloW5UIuHYbKf4NiKhYPqulzGAF5Hym8nWzep6EaLBOAHUARJRlLrDuzq/EQAfYqP4owNrmgr7YyzG1P9MzlrW2gagAAAFuf9b63AAABAMARjBEAiAT60WFNIfuhyCTb2ryabwbU0xSNJZ8FLAfhfWInVylegIgSOGqOR4cIceuhpAvtzyOL7yTXnbO8Zij3HlXEiQgK+sAdgBvU3asMfAxGdiZAKRRFf93FRwR2QLBACkGjbIImjfZEwAAAW5/1vrkAAAEAwBHMEUCIAJ5CSdXBfuqfb3uKYOcuRthByAbRU1qRHG5fBmPuU3AAiEAgYNzNY2rgfFvVEgXNFrjIjYNWjbI89xPNL6CylgA7JIwDQYJKoZIhvcNAQELBQADggEBABUlLjm8ZIJYgvzPRQJawdmYJZuVoSA3jqA6oQKz/UOLami9te7KavlN1WqAxizPTqKOXbpQDWDudJ0PoEWafd+hr9OE94F8z3SLICSh1dPyZU/mdxiNCvn7ZsWbyoWn/Q8GO6haUMgZ6m57VP9XajFqTx4mzN5w8oZeR+FxHJcf0KVSmLX97Dj0cCwgk/0NSRVQrfaDeDqqqgPdg/CBBxFJfLnUGluCAaHW0x59MP2RDPD0B1zkNXqNT9bOBqd9jYinApL//zrJISWf/ERFq6AlhkvWBEC+k5KHbyak8xDvGnKugo8iOpBpjHDxOC+fnPLiiO/eP26SRP/EmeI4duI=
5 | K 8
6 | failures
7 | V 1
8 | 8
9 | K 15
10 | svn:realmstring
11 | V 40
12 | https://svn-ccsm-models.cgd.ucar.edu:443
13 | END
14 |
--------------------------------------------------------------------------------
/CESM/2.2/Dockerfile:
--------------------------------------------------------------------------------
1 | ##############################
2 | # ESCOMP/CESM/2.2 Dockerfile #
3 | ##############################
4 |
5 | # Use the ESCOMP centos8 base - this is a base install of CentOS, plus NetCDF/HDF5/PNetCDF/MPICH
6 | FROM escomp/base:centos8
7 |
8 | # Install CESM:
9 | COPY Files/ea002e626aee6bc6643e8ab5f998e5e4 /root/.subversion/auth/svn.ssl.server/
10 | RUN sudo mkdir -p /opt/ncar && \
11 | cd /opt/ncar && \
12 | sudo git clone -b release-cesm2.2.0 https://github.com/ESCOMP/cesm.git cesm2 && \
13 | cd cesm2 && \
14 | sudo ./manage_externals/checkout_externals
15 |
16 | # Set up the environment - create the group and user, the shell variables, the input data directory and sudo access:
17 | RUN sudo echo 'export CESMDATAROOT=${HOME}' | sudo tee /etc/profile.d/escomp.sh && \
18 | sudo echo 'export CIME_MACHINE=container' | sudo tee -a /etc/profile.d/escomp.sh && \
19 | sudo echo 'export USER=$(whoami)' | sudo tee -a /etc/profile.d/escomp.sh && \
20 | sudo echo 'export PS1="[\u@cesm2.2 \W]\$ "' | sudo tee -a /etc/profile.d/escomp.sh && \
21 | sudo echo 'ulimit -s unlimited' | sudo tee -a /etc/profile.d/escomp.sh && \
22 | sudo echo 'export PATH=${PATH}:/opt/ncar/cesm2/cime/scripts' | sudo tee -a /etc/profile.d/escomp.sh
23 |
24 |
25 | # Add the container versions of the config_machines & config_compilers settings - later, integrate these into CIME
26 | COPY Files/config_compilers.xml /opt/ncar/cesm2/cime/config/cesm/machines/
27 | COPY Files/config_machines.xml /opt/ncar/cesm2/cime/config/cesm/machines/
28 | COPY Files/config_inputdata.xml /opt/ncar/cesm2/cime/config/cesm/
29 | COPY Files/case_setup.py /opt/ncar/cesm2/cime/scripts/lib/CIME/case/case_setup.py
30 |
31 | # Add the container changes to the XML files (to be included in stock CIME soon):
32 | COPY Files/config_compsets.xml /opt/ncar/cesm2/cime_config/
33 | COPY Files/config_pes.xml /opt/ncar/cesm2/cime_config/
34 | COPY Files/configs/cam/config_pes.xml /opt/ncar/cesm2/components/cam/cime_config/
35 | COPY Files/configs/cice/config_pes.xml /opt/ncar/cesm2/components/cice/cime_config/
36 | COPY Files/configs/cism/config_pes.xml /opt/ncar/cesm2/components/cism/cime_config/
37 | COPY Files/configs/pop/config_pes.xml /opt/ncar/cesm2/components/pop/cime_config/
38 | COPY Files/configs/clm/config_pes.xml /opt/ncar/cesm2/components/clm/cime_config/
39 |
40 | # Fix for SCAM with GNU in DEBUG mode (ESCOMP/CAM issue #257)
41 | COPY Files/micro_mg3_0.F90 /opt/ncar/cesm2/components/cam/src/physics/pumas/micro_mg3_0.F90
42 |
43 | # Fix for issue with mpi-serial:
44 | COPY Files/scam_shell_commands /opt/ncar/cesm2/components/cam/cime_config/usermods_dirs/scam_mandatory/shell_commands
45 |
46 | # And add the fixed 'create_scam6_iop' script for SCAM:
47 | COPY Files/create_scam6_iop /opt/ncar/cesm2/components/cam/bld/scripts
48 |
49 | ENV CESMROOT=/opt/ncar/cesm2
50 |
51 | CMD ["/bin/bash", "-l"]
52 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/case_setup.py:
--------------------------------------------------------------------------------
1 | """
2 | Library for case.setup.
3 | case_setup is a member of class Case from file case.py
4 | """
5 |
6 | from CIME.XML.standard_module_setup import *
7 |
8 | from CIME.XML.machines import Machines
9 | from CIME.BuildTools.configure import configure
10 | from CIME.utils import get_cime_root, run_and_log_case_status, get_model, get_batch_script_for_job, safe_copy
11 | from CIME.test_status import *
12 | from CIME.locked_files import unlock_file, lock_file
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 | ###############################################################################
17 | def _build_usernl_files(case, model, comp):
18 | ###############################################################################
19 | """
20 | Create user_nl_xxx files, expects cwd is caseroot
21 | """
22 | model = model.upper()
23 | if model == "DRV":
24 | model_file = case.get_value("CONFIG_CPL_FILE")
25 | else:
26 | model_file = case.get_value("CONFIG_{}_FILE".format(model))
27 | expect(model_file is not None,
28 | "Could not locate CONFIG_{}_FILE in config_files.xml".format(model))
29 | model_dir = os.path.dirname(model_file)
30 |
31 | expect(os.path.isdir(model_dir),
32 | "cannot find cime_config directory {} for component {}".format(model_dir, comp))
33 | comp_interface = case.get_value("COMP_INTERFACE")
34 | multi_driver = case.get_value("MULTI_DRIVER")
35 | ninst = 1
36 |
37 | if multi_driver:
38 | ninst_max = case.get_value("NINST_MAX")
39 | if comp_interface != "nuopc" and model not in ("DRV","CPL","ESP"):
40 | ninst_model = case.get_value("NINST_{}".format(model))
41 | expect(ninst_model==ninst_max,"MULTI_DRIVER mode, all components must have same NINST value. NINST_{} != {}".format(model,ninst_max))
42 | if comp == "cpl":
43 | if not os.path.exists("user_nl_cpl"):
44 | safe_copy(os.path.join(model_dir, "user_nl_cpl"), ".")
45 | else:
46 | if comp_interface == "nuopc":
47 | ninst = case.get_value("NINST")
48 | elif ninst == 1:
49 | ninst = case.get_value("NINST_{}".format(model))
50 | nlfile = "user_nl_{}".format(comp)
51 | model_nl = os.path.join(model_dir, nlfile)
52 | if ninst > 1:
53 | for inst_counter in range(1, ninst+1):
54 | inst_nlfile = "{}_{:04d}".format(nlfile, inst_counter)
55 | if not os.path.exists(inst_nlfile):
56 | # If there is a user_nl_foo in the case directory, copy it
57 | # to user_nl_foo_INST; otherwise, copy the original
58 | # user_nl_foo from model_dir
59 | if os.path.exists(nlfile):
60 | safe_copy(nlfile, inst_nlfile)
61 | elif os.path.exists(model_nl):
62 | safe_copy(model_nl, inst_nlfile)
63 | else:
64 | # ninst = 1
65 | if not os.path.exists(nlfile):
66 | if os.path.exists(model_nl):
67 | safe_copy(model_nl, nlfile)
68 |
69 | ###############################################################################
70 | def _case_setup_impl(case, caseroot, clean=False, test_mode=False, reset=False, keep=None):
71 | ###############################################################################
72 | os.chdir(caseroot)
73 |
74 | non_local = case.get_value("NONLOCAL")
75 |
76 | models = case.get_values("COMP_CLASSES")
77 | mach = case.get_value("MACH")
78 | compiler = case.get_value("COMPILER")
79 | debug = case.get_value("DEBUG")
80 | mpilib = case.get_value("MPILIB")
81 | sysos = case.get_value("OS")
82 | comp_interface = case.get_value("COMP_INTERFACE")
83 | extra_machines_dir = case.get_value("EXTRA_MACHDIR")
84 | expect(mach is not None, "xml variable MACH is not set")
85 |
86 | # Check that $DIN_LOC_ROOT exists - and abort if not a namelist compare tests
87 | if not non_local:
88 | din_loc_root = case.get_value("DIN_LOC_ROOT")
89 | if case.get_value("MACH") == "container":
90 | if not os.path.isdir(din_loc_root):
91 | logger.info("Making inputdata directory: {}".format(din_loc_root))
92 | os.mkdir(din_loc_root)
93 | testcase = case.get_value("TESTCASE")
94 | expect(not (not os.path.isdir(din_loc_root) and testcase != "SBN"),
95 | "inputdata root is not a directory or is not readable: {}".format(din_loc_root))
96 |
97 | # Remove batch scripts
98 | if reset or clean:
99 | # clean setup-generated files
100 | batch_script = get_batch_script_for_job(case.get_primary_job())
101 | files_to_clean = [batch_script, "env_mach_specific.xml", "Macros.make", "Macros.cmake"]
102 | for file_to_clean in files_to_clean:
103 | if os.path.exists(file_to_clean) and not (keep and file_to_clean in keep):
104 | os.remove(file_to_clean)
105 | logger.info("Successfully cleaned {}".format(file_to_clean))
106 |
107 | if not test_mode:
108 | # rebuild the models (even on restart)
109 | case.set_value("BUILD_COMPLETE", False)
110 |
111 | # Cannot leave case in bad state (missing env_mach_specific.xml)
112 | if clean and not os.path.isfile("env_mach_specific.xml"):
113 | case.flush()
114 | configure(Machines(machine=mach, extra_machines_dir=extra_machines_dir),
115 | caseroot, ["Makefile"], compiler, mpilib, debug, comp_interface, sysos, noenv=True,
116 | extra_machines_dir=extra_machines_dir)
117 | case.read_xml()
118 |
119 | if not clean:
120 | if not non_local:
121 | case.load_env()
122 |
123 | # creates the Macros.make, Depends.compiler, Depends.machine, Depends.machine.compiler
124 | # and env_mach_specific.xml if they don't already exist.
125 | if not os.path.isfile("Macros.make") or not os.path.isfile("env_mach_specific.xml"):
126 | reread = not os.path.isfile("env_mach_specific.xml")
127 | if reread:
128 | case.flush()
129 | configure(Machines(machine=mach, extra_machines_dir=extra_machines_dir),
130 | caseroot, ["Makefile"], compiler, mpilib, debug, comp_interface, sysos, noenv=True,
131 | extra_machines_dir=extra_machines_dir)
132 | if reread:
133 | case.read_xml()
134 |
135 | # Also write out Cmake macro file
136 | if not os.path.isfile("Macros.cmake"):
137 | configure(Machines(machine=mach, extra_machines_dir=extra_machines_dir),
138 | caseroot, ["CMake"], compiler, mpilib, debug, comp_interface, sysos, noenv=True,
139 | extra_machines_dir=extra_machines_dir)
140 |
141 | # Set tasks to 1 if mpi-serial library
142 | if mpilib == "mpi-serial":
143 | case.set_value("NTASKS", 1)
144 |
145 | # Check ninst.
146 | # In CIME there can be multiple instances of each component model (an ensemble) NINST is the instance of that component.
147 | comp_interface = case.get_value("COMP_INTERFACE")
148 | if comp_interface == "nuopc":
149 | ninst = case.get_value("NINST")
150 |
151 | multi_driver = case.get_value("MULTI_DRIVER")
152 |
153 | for comp in models:
154 | ntasks = case.get_value("NTASKS_{}".format(comp))
155 | if comp == "CPL":
156 | continue
157 | if comp_interface != "nuopc":
158 | ninst = case.get_value("NINST_{}".format(comp))
159 | if multi_driver:
160 | if comp_interface != "nuopc":
161 | expect(case.get_value("NINST_LAYOUT_{}".format(comp)) == "concurrent",
162 | "If multi_driver is TRUE, NINST_LAYOUT_{} must be concurrent".format(comp))
163 | case.set_value("NTASKS_PER_INST_{}".format(comp), ntasks)
164 | else:
165 | if ninst > ntasks:
166 | if ntasks == 1:
167 | case.set_value("NTASKS_{}".format(comp), ninst)
168 | ntasks = ninst
169 | else:
170 | expect(False, "NINST_{comp} value {ninst} greater than NTASKS_{comp} {ntasks}".format(comp=comp, ninst=ninst, ntasks=ntasks))
171 |
172 | case.set_value("NTASKS_PER_INST_{}".format(comp), max(1,int(ntasks / ninst)))
173 |
174 | if os.path.exists(get_batch_script_for_job(case.get_primary_job())):
175 | logger.info("Machine/Decomp/Pes configuration has already been done ...skipping")
176 |
177 | case.initialize_derived_attributes()
178 |
179 | case.set_value("SMP_PRESENT", case.get_build_threaded())
180 |
181 | else:
182 | case.check_pelayouts_require_rebuild(models)
183 |
184 | unlock_file("env_build.xml")
185 | unlock_file("env_batch.xml")
186 |
187 | case.flush()
188 | case.check_lockedfiles()
189 |
190 | case.initialize_derived_attributes()
191 |
192 | cost_per_node = case.get_value("COSTPES_PER_NODE")
193 | case.set_value("COST_PES", case.num_nodes * cost_per_node)
194 | threaded = case.get_build_threaded()
195 | case.set_value("SMP_PRESENT", threaded)
196 | if threaded and case.total_tasks * case.thread_count > cost_per_node:
197 | smt_factor = max(1.0,int(case.get_value("MAX_TASKS_PER_NODE") / cost_per_node))
198 | case.set_value("TOTALPES", int(case.total_tasks * max(1.0,float(case.thread_count) / smt_factor)))
199 | else:
200 | case.set_value("TOTALPES", case.total_tasks*case.thread_count)
201 |
202 | # May need to select new batch settings if pelayout changed (e.g. problem is now too big for prev-selected queue)
203 | env_batch = case.get_env("batch")
204 | env_batch.set_job_defaults([(case.get_primary_job(), {})], case)
205 |
206 | # create batch files
207 | env_batch.make_all_batch_files(case)
208 | if get_model() == "e3sm" and not case.get_value("TEST"):
209 | input_batch_script = os.path.join(case.get_value("MACHDIR"), "template.case.run.sh")
210 | env_batch.make_batch_script(input_batch_script, "case.run", case, outfile=get_batch_script_for_job("case.run.sh"))
211 |
212 | # Make a copy of env_mach_pes.xml in order to be able
213 | # to check that it does not change once case.setup is invoked
214 | case.flush()
215 | logger.debug("at copy TOTALPES = {}".format(case.get_value("TOTALPES")))
216 | lock_file("env_mach_pes.xml")
217 | lock_file("env_batch.xml")
218 |
219 | # Create user_nl files for the required number of instances
220 | if not os.path.exists("user_nl_cpl"):
221 | logger.info("Creating user_nl_xxx files for components and cpl")
222 |
223 | # loop over models
224 | for model in models:
225 | comp = case.get_value("COMP_{}".format(model))
226 | logger.debug("Building {} usernl files".format(model))
227 | _build_usernl_files(case, model, comp)
228 | if comp == "cism":
229 | glcroot = case.get_value("COMP_ROOT_DIR_GLC")
230 | run_cmd_no_fail("{}/cime_config/cism.template {}".format(glcroot, caseroot))
231 |
232 | _build_usernl_files(case, "drv", "cpl")
233 |
234 | # Create needed directories for case
235 | case.create_dirs()
236 |
237 | logger.info("If an old case build already exists, might want to run \'case.build --clean\' before building")
238 |
239 | # Some tests need namelists created here (ERP) - so do this if we are in test mode
240 | if (test_mode or get_model() == "e3sm") and not non_local:
241 | logger.info("Generating component namelists as part of setup")
242 | case.create_namelists()
243 |
244 | # Record env information
245 | env_module = case.get_env("mach_specific")
246 | env_module.make_env_mach_specific_file("sh", case)
247 | env_module.make_env_mach_specific_file("csh", case)
248 | if not non_local:
249 | env_module.save_all_env_info("software_environment.txt")
250 |
251 | logger.info("You can now run './preview_run' to get more info on how your case will be run")
252 |
253 | ###############################################################################
254 | def case_setup(self, clean=False, test_mode=False, reset=False, keep=None):
255 | ###############################################################################
256 | caseroot, casebaseid = self.get_value("CASEROOT"), self.get_value("CASEBASEID")
257 | phase = "setup.clean" if clean else "case.setup"
258 | functor = lambda: _case_setup_impl(self, caseroot, clean=clean, test_mode=test_mode, reset=reset, keep=keep)
259 |
260 | if self.get_value("TEST") and not test_mode:
261 | test_name = casebaseid if casebaseid is not None else self.get_value("CASE")
262 | with TestStatus(test_dir=caseroot, test_name=test_name) as ts:
263 | try:
264 | run_and_log_case_status(functor, phase, caseroot=caseroot)
265 | except BaseException: # Want to catch KeyboardInterrupt too
266 | ts.set_status(SETUP_PHASE, TEST_FAIL_STATUS)
267 | raise
268 | else:
269 | if clean:
270 | ts.set_status(SETUP_PHASE, TEST_PEND_STATUS)
271 | else:
272 | ts.set_status(SETUP_PHASE, TEST_PASS_STATUS)
273 | else:
274 | run_and_log_case_status(functor, phase, caseroot=caseroot)
275 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/config_compsets.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | =========================================
7 | compset naming convention
8 | =========================================
9 | The compset longname below has the specified order
10 | atm, lnd, ice, ocn, river, glc wave cesm-options
11 |
12 | The notation for the compset longname is
13 | TIME_ATM[%phys]_LND[%phys]_ICE[%phys]_OCN[%phys]_ROF[%phys]_GLC[%phys]_WAV[%phys][_ESP%phys][_BGC%phys]
14 | Where for the specific compsets below the following is supported
15 | TIME = Time period (e.g. 2000, HIST, RCP8...)
16 | ATM = [CAM40, CAM50, CAM60]
17 | LND = [CLM45, CLM50, SLND]
18 | ICE = [CICE, DICE, SICE]
19 | OCN = [DOCN, ,AQUAP, SOCN]
20 | ROF = [RTM, MOSART, SROF]
21 | GLC = [CISM1, CISM2, SGLC]
22 | WAV = [WW3, DWAV, XWAV, SWAV]
23 | ESP = [SESP]
24 | BGC = optional BGC scenario
25 |
26 | The OPTIONAL %phys attributes specify submodes of the given system
27 | For example DOCN%DOM is the data ocean model for DOCN
28 | ALL the possible %phys choices for each component are listed.
29 | ALL data models must have a %phys option that corresponds to the data model mode
30 |
31 | Each compset node is associated with the following elements
32 | - lname
33 | - alias
34 | - support (optional description of the support level for this compset)
35 | Each compset node can also have the following attributes
36 | - grid (optional regular expression match for grid to work with the compset)
37 |
38 |
39 |
40 |
41 |
42 | B1850
43 | 1850_CAM60_CLM50%BGC-CROP_CICE_POP2%ECO_MOSART_CISM2%NOEVOLVE_WW3_BGC%BDRD
44 |
45 |
46 |
47 |
48 | B1850C4_Tutorial
49 | B1850_CAM40_CLM50%SP_CICE_POP2%ECO_MOSART_CISM2%NOEVOLVE_SWAV_BGC%BDRD
50 |
51 |
52 |
53 |
54 | BW1850
55 | 1850_CAM60%WCTS_CLM50%BGC-CROP_CICE_POP2%ECO%NDEP_MOSART_CISM2%NOEVOLVE_WW3
56 |
57 |
58 | BWma1850
59 | 1850_CAM60%WCCM_CLM50%BGC-CROP_CICE_POP2%ECO%NDEP_MOSART_CISM2%NOEVOLVE_WW3
60 |
61 |
62 |
63 | BHIST
64 | HIST_CAM60_CLM50%BGC-CROP_CICE_POP2%ECO_MOSART_CISM2%NOEVOLVE_WW3_BGC%BDRD
65 |
66 |
67 |
68 |
69 |
70 | B1850G
71 | 1850_CAM60_CLM50%BGC-CROP_CICE_POP2%ECO_MOSART_CISM2%EVOLVE_WW3_BGC%BDRD
72 |
73 |
74 |
77 |
78 | B1850G1
79 | 1850_CAM60_CLM50%BGC-CROP_CICE_POP2%ECO_MOSART_CISM1%EVOLVE_WW3_BGC%BDRD
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 | ETEST
95 | 2000_CAM60_CLM50%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV_TEST
96 |
97 |
98 |
99 | E1850TEST
100 | 1850_CAM60_CLM50%SP_CICE_DOCN%SOM_MOSART_SGLC_SWAV_TEST
101 |
102 |
103 |
104 |
105 |
107 | B1850MOM
108 | 1850_CAM60_CLM50%BGC-CROP_CICE_MOM6_MOSART_CISM2%NOEVOLVE_SWAV_BGC%BDRD
109 |
110 |
111 |
113 |
114 |
115 | J1850G
116 | 1850_DATM%CRUv7_CLM50%BGC-CROP_CICE_POP2_MOSART_CISM2%EVOLVE_SWAV
117 |
118 |
119 |
120 |
121 |
122 | 0001-01-01
123 | 0001-01-01
124 | 1850-01-01
125 | 1955-01-01
126 | 2005-01-01
127 | 2013-01-01
128 |
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/config_inputdata.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
17 |
18 |
19 | wget
20 | ftp://cesm-inputdata-lowres1.cgd.ucar.edu/cesm/low-res/
21 | anonymous
22 | user@example.edu
23 | ../inputdata_checksum.dat
24 |
25 |
26 |
27 | wget
28 | ftp://ftp.cgd.ucar.edu/cesm/inputdata/
29 | anonymous
30 | user@example.edu
31 | ../inputdata_checksum.dat
32 |
33 |
34 |
35 | ftp requires the python package ftplib
36 | ftp
37 | ftp.cgd.ucar.edu/cesm/inputdata
38 | anonymous
39 | user@example.edu
40 | ../inputdata_checksum.dat
41 |
42 |
43 |
44 | svn
45 | https://svn-ccsm-inputdata.cgd.ucar.edu/trunk/inputdata
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/configs/cam/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | -1
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 |
17 |
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 |
27 |
28 | 0
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | -2
46 | -2
47 | -2
48 | -40
49 | -40
50 | -2
51 | -2
52 | -2
53 |
54 |
55 | 2
56 | 2
57 | 2
58 | 2
59 | 2
60 | 2
61 | 2
62 | 2
63 |
64 |
65 | 0
66 | -2
67 | -4
68 | 0
69 | 0
70 | -6
71 | -8
72 | -10
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | -2
83 | -2
84 | -2
85 | -2
86 | -2
87 | -2
88 | -2
89 | -2
90 |
91 |
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 |
101 |
102 | 0
103 | 0
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 | -4
119 | -4
120 | -4
121 | -4
122 | -4
123 | -4
124 | -4
125 | -4
126 |
127 |
128 | 2
129 | 2
130 | 2
131 | 2
132 | 2
133 | 2
134 | 2
135 | 2
136 |
137 |
138 | 0
139 | 0
140 | 0
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 | -4
155 | -4
156 | -4
157 | -4
158 | -4
159 | -4
160 | -4
161 | -4
162 |
163 |
164 | 2
165 | 2
166 | 2
167 | 2
168 | 2
169 | 2
170 | 2
171 | 2
172 |
173 |
174 | 0
175 | 0
176 | 0
177 | 0
178 | 0
179 | 0
180 | 0
181 | 0
182 |
183 |
184 |
185 |
186 |
187 |
188 | -4
189 | -4
190 | -4
191 | -4
192 | -4
193 | -4
194 | -4
195 | -4
196 |
197 |
198 | 1
199 | 1
200 | 1
201 | 1
202 | 1
203 | 1
204 | 1
205 | 1
206 |
207 |
208 | 0
209 | 0
210 | 0
211 | 0
212 | 0
213 | 0
214 | 0
215 | 0
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 | 30
226 | 30
227 | 30
228 | 30
229 | 30
230 | 30
231 | 30
232 | 30
233 |
234 |
235 | 2
236 | 2
237 | 2
238 | 2
239 | 2
240 | 2
241 | 2
242 | 2
243 |
244 |
245 | 0
246 | 0
247 | 0
248 | 0
249 | 0
250 | 0
251 | 0
252 | 0
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 | 60
262 | 60
263 | 60
264 | 60
265 | 60
266 | 60
267 | 60
268 | 60
269 |
270 |
271 | 2
272 | 2
273 | 2
274 | 2
275 | 2
276 | 2
277 | 2
278 | 2
279 |
280 |
281 | 0
282 | 0
283 | 0
284 | 0
285 | 0
286 | 0
287 | 0
288 | 0
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 | 60
298 | 60
299 | 60
300 | 60
301 | 60
302 | 60
303 | 60
304 | 60
305 |
306 |
307 | 2
308 | 2
309 | 2
310 | 2
311 | 2
312 | 2
313 | 2
314 | 2
315 |
316 |
317 | 0
318 | 0
319 | 0
320 | 0
321 | 0
322 | 0
323 | 0
324 | 0
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/configs/cice/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | -1
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 |
17 |
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 |
27 |
28 | 0
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | -2
46 | -2
47 | -2
48 | -40
49 | -40
50 | -2
51 | -2
52 | -2
53 |
54 |
55 | 2
56 | 2
57 | 2
58 | 2
59 | 2
60 | 2
61 | 2
62 | 2
63 |
64 |
65 | 0
66 | -2
67 | -4
68 | 0
69 | 0
70 | -6
71 | -8
72 | -10
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | -2
83 | -2
84 | -2
85 | -2
86 | -2
87 | -2
88 | -2
89 | -2
90 |
91 |
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 |
101 |
102 | 0
103 | 0
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 | -4
119 | -4
120 | -4
121 | -4
122 | -4
123 | -4
124 | -4
125 | -4
126 |
127 |
128 | 2
129 | 2
130 | 2
131 | 2
132 | 2
133 | 2
134 | 2
135 | 2
136 |
137 |
138 | 0
139 | 0
140 | 0
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 | -4
155 | -4
156 | -4
157 | -4
158 | -4
159 | -4
160 | -4
161 | -4
162 |
163 |
164 | 2
165 | 2
166 | 2
167 | 2
168 | 2
169 | 2
170 | 2
171 | 2
172 |
173 |
174 | 0
175 | 0
176 | 0
177 | 0
178 | 0
179 | 0
180 | 0
181 | 0
182 |
183 |
184 |
185 |
186 |
187 |
188 | -4
189 | -4
190 | -4
191 | -4
192 | -4
193 | -4
194 | -4
195 | -4
196 |
197 |
198 | 1
199 | 1
200 | 1
201 | 1
202 | 1
203 | 1
204 | 1
205 | 1
206 |
207 |
208 | 0
209 | 0
210 | 0
211 | 0
212 | 0
213 | 0
214 | 0
215 | 0
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 | 30
226 | 30
227 | 30
228 | 30
229 | 30
230 | 30
231 | 30
232 | 30
233 |
234 |
235 | 2
236 | 2
237 | 2
238 | 2
239 | 2
240 | 2
241 | 2
242 | 2
243 |
244 |
245 | 0
246 | 0
247 | 0
248 | 0
249 | 0
250 | 0
251 | 0
252 | 0
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 | 60
262 | 60
263 | 60
264 | 60
265 | 60
266 | 60
267 | 60
268 | 60
269 |
270 |
271 | 2
272 | 2
273 | 2
274 | 2
275 | 2
276 | 2
277 | 2
278 | 2
279 |
280 |
281 | 0
282 | 0
283 | 0
284 | 0
285 | 0
286 | 0
287 | 0
288 | 0
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 | 60
298 | 60
299 | 60
300 | 60
301 | 60
302 | 60
303 | 60
304 | 60
305 |
306 |
307 | 2
308 | 2
309 | 2
310 | 2
311 | 2
312 | 2
313 | 2
314 | 2
315 |
316 |
317 | 0
318 | 0
319 | 0
320 | 0
321 | 0
322 | 0
323 | 0
324 | 0
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/configs/cism/config_pes.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | none
8 |
9 | -1
10 | -1
11 | -1
12 | -1
13 | -1
14 | -1
15 | -1
16 | -1
17 |
18 |
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 |
28 |
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 | 0
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 | none
46 |
47 | 1
48 | 1
49 | 1
50 | 1
51 | 1
52 | 1
53 | 1
54 | 1
55 |
56 |
57 | 1
58 | 1
59 | 1
60 | 1
61 | 1
62 | 1
63 | 1
64 | 1
65 |
66 |
67 | 0
68 | 0
69 | 0
70 | 0
71 | 0
72 | 0
73 | 0
74 | 0
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 | none
83 |
84 | -1
85 | -1
86 | -1
87 | -1
88 | -1
89 | -1
90 | -1
91 | -1
92 |
93 |
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 | 1
102 |
103 |
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 | 0
111 | 0
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 | none
120 |
121 | -8
122 | -8
123 | -8
124 | -8
125 | -8
126 | -8
127 | -8
128 | -8
129 |
130 |
131 | 1
132 | 1
133 | 1
134 | 1
135 | 1
136 | 1
137 | 1
138 | 1
139 |
140 |
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 | 0
147 | 0
148 | 0
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 | none
157 |
158 | -2
159 | -2
160 | -2
161 | -2
162 | -2
163 | -2
164 | -2
165 | -2
166 |
167 |
168 | 1
169 | 1
170 | 1
171 | 1
172 | 1
173 | 1
174 | 1
175 | 1
176 |
177 |
178 | 0
179 | 0
180 | 0
181 | 0
182 | 0
183 | 0
184 | 0
185 | 0
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/create_scam6_iop:
--------------------------------------------------------------------------------
1 | #!/bin/csh -fv
2 |
3 | #**********************************************************************
4 | # Run SCAM with a single IOP
5 | # This script will build and run one IOP
6 | # If a user wishes to run more than one IOP, use create_scam6_iop_multi
7 | #
8 | # Usage:
9 | # ./create_scam6_iop # where IOP name is from list below
10 | # - or -
11 | # ./create_scam6_iop # IOP is specified in the script below
12 | #**********************************************************************
13 |
14 | #------------------
15 | # User sets options in this section
16 | #------------------
17 |
18 | ### Full path of cesm source code and case (output) directories (see examples)
19 |
20 | # set CESMDIR=/project/amp/$USER/collections/cam5_4_175
21 | # set CASEDIR=/project/amp/$USER/cases
22 | set CESMDIR=/opt/ncar/cesm2
23 | set CASEDIR=${HOME}
24 |
25 | ### Case Name
26 |
27 | #set CASETITLE=scam_test
28 | set CASETITLE=scam_case
29 |
30 | ### Set location of user source mods (if any)
31 | setenv this_dir `pwd`
32 | setenv usrsrc ${this_dir}/mods/$CASETITLE
33 |
34 | ### Standard Run Settings
35 | set RES=T42_T42
36 | set COMPSET=FSCAM
37 | set COMPILER=gnu
38 |
39 | ### Set Desired IOP
40 | ### $1 means read from command line. Or put one of the names in:
41 | ### arm95 arm97 atex bomex cgilsS11 cgilsS12 cgilsS6 dycomsRF01 dycomsRF02 gateIII mpace rico sparticus togaII twp06
42 |
43 | if ($#argv == 0) then
44 | set IOP = arm97
45 | else
46 | set IOP = $1
47 | endif
48 |
49 | #------------------
50 | # User should not need to set any options in this section
51 | #------------------
52 |
53 | cd $CASEDIR
54 |
55 | set IOPNAME = scam_$IOP
56 |
57 | ## location of IOP data in CESM Tag
58 | set MODSDIR = $CESMDIR/components/cam/cime_config/usermods_dirs
59 |
60 | #Create full casename
61 | set CASENAME=${CASETITLE}.${COMPSET}.${IOP}
62 |
63 | #------------------
64 | # create case
65 | #------------------
66 |
67 | $CESMDIR/cime/scripts/create_newcase --compset $COMPSET --res $RES --compiler $COMPILER --case $CASEDIR/$CASENAME --user-mods-dir ${MODSDIR}/${IOPNAME} --run-unsupported
68 |
69 | cd $CASEDIR/$CASENAME
70 |
71 | ### Set build and run directories to be under case directory.
72 |
73 | set RUNDIR=${CASEDIR}/${CASENAME}/run
74 | ./xmlchange RUNDIR=$RUNDIR
75 |
76 | ./xmlchange EXEROOT=${CASEDIR}/${CASENAME}/bld
77 |
78 | #------------------
79 | # XMLCHANGE OPTIONS HERE
80 | #------------------
81 |
82 | ### Append to CAM configure options
83 | # ./xmlchange --append CAM_CONFIG_OPTS=' '
84 |
85 | ### DEBUG
86 | #./xmlchange DEBUG='TRUE'
87 |
88 | #------------------
89 | # Setup Case
90 | #------------------
91 |
92 | ./case.setup
93 | # ./case.setup -d -v #-d -v for verbose and debug file
94 |
95 | #------------------
96 | # source mods: copy them into case directory
97 | #------------------
98 |
99 | /bin/cp ${usrsrc}/* SourceMods/src.cam/
100 |
101 | #------------------
102 | # Build
103 | #------------------
104 |
105 | ./case.build
106 | # ./case.build -d -v #-d -v for verbose and debug file
107 |
108 | ### make timing dir kludge [REMOVE WHEN FIXED]
109 | mkdir -p $RUNDIR/timing/checkpoints
110 |
111 | #------------------
112 | # Add all user specific cam namelist changes here
113 | #
114 | # Users should add all user specific namelist changes below in the form of
115 | # namelist_var = new_namelist_value
116 | # Namelist settings which appear in usermods_dir and here will use the values
117 | # specified below
118 | # Other namelist settings from usermods_dirs will be unchanged
119 | # Output can also be specified here (e.g. fincl1)
120 | #------------------
121 |
122 | cat >> user_nl_cam << EOF
123 | use_topo_file = .true.
124 | mfilt = 2500
125 | nhtfrq = 1
126 | fincl1= 'CDNUMC', 'AQSNOW','ANSNOW','FREQSL','LS_FLXPRC'
127 | EOF
128 |
129 | #------------------
130 | # Choose type of job submission (batch or interactive)
131 | #------------------
132 |
133 | ### Submit to Queue (If you have one)
134 | ./case.submit
135 |
136 | ### OR you can run interactively instead of going through the queue
137 | #cd $RUNDIR
138 | #../bld/cesm.exe
139 |
140 |
141 |
142 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/ea002e626aee6bc6643e8ab5f998e5e4:
--------------------------------------------------------------------------------
1 | K 10
2 | ascii_cert
3 | V 2416
4 | MIIHDzCCBfegAwIBAgIQXeDJZdzJWpXlTjW08UPtATANBgkqhkiG9w0BAQsFADB2MQswCQYDVQQGEwJVUzELMAkGA1UECBMCTUkxEjAQBgNVBAcTCUFubiBBcmJvcjESMBAGA1UEChMJSW50ZXJuZXQyMREwDwYDVQQLEwhJbkNvbW1vbjEfMB0GA1UEAxMWSW5Db21tb24gUlNBIFNlcnZlciBDQTAeFw0xOTExMTgwMDAwMDBaFw0yMTExMTcyMzU5NTlaMIHJMQswCQYDVQQGEwJVUzEOMAwGA1UEERMFODAzMDExETAPBgNVBAgTCENvbG9yYWRvMRAwDgYDVQQHEwdCb3VsZGVyMSAwHgYDVQQJExczMDkwIENlbnRlciBHcmVlbiBEcml2ZTE8MDoGA1UEChMzVGhlIFVuaXZlcnNpdHkgQ29ycG9yYXRpb24gZm9yIEF0bW9zcGhlcmljIFJlc2VhcmNoMQwwCgYDVQQLEwNDR0QxFzAVBgNVBAMMDiouY2dkLnVjYXIuZWR1MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy7czRVw8TeDY2sEOFs30MWaTs0iQ1Mf4HteImVSiQntHtHDtKXtLIyItRTBO+79VsrrEUaJyh9O1M0NxTycrzHdmd3YBj0wOYVSuyl06ZU10w1RyWgsw1G7Usk3LJT4I8/YycY4GWOnEvbWTpBzh51zhyNgWsHlKnqTM1TzCf0fHMYTVfGQi4NWvsRoaS9FJDEv5Cn9B6Wq8q8tq5u/1P6Sk3G4K1hyMJCjvMtdHKQButWfIr4OTKyTf8GCc9sOT6INluM/zIsymVAIBPC+0+vFvhIKn+IAIWhveBrSkY00OzMczOpZizYBvpRlAbqPN6t49A6aA2FFaL5fIezvgWQIDAQABo4IDQzCCAz8wHwYDVR0jBBgwFoAUHgWjd49sluJbh0umtIascQAM5zgwHQYDVR0OBBYEFP3xHp/FaGPiExSSRqGerr8MwmZ2MA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8EAjAAMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBnBgNVHSAEYDBeMFIGDCsGAQQBriMBBAMBATBCMEAGCCsGAQUFBwIBFjRodHRwczovL3d3dy5pbmNvbW1vbi5vcmcvY2VydC9yZXBvc2l0b3J5L2Nwc19zc2wucGRmMAgGBmeBDAECAjBEBgNVHR8EPTA7MDmgN6A1hjNodHRwOi8vY3JsLmluY29tbW9uLXJzYS5vcmcvSW5Db21tb25SU0FTZXJ2ZXJDQS5jcmwwdQYIKwYBBQUHAQEEaTBnMD4GCCsGAQUFBzAChjJodHRwOi8vY3J0LnVzZXJ0cnVzdC5jb20vSW5Db21tb25SU0FTZXJ2ZXJDQV8yLmNydDAlBggrBgEFBQcwAYYZaHR0cDovL29jc3AudXNlcnRydXN0LmNvbTAZBgNVHREEEjAQgg4qLmNnZC51Y2FyLmVkdTCCAX0GCisGAQQB1nkCBAIEggFtBIIBaQFnAHYAfT7y+I//iFVoJMLAyp5SiXkrxQ54CX8uapdomX4i8NcAAAFuf9b69AAABAMARzBFAiBcvjTuX0l5bYAXoALnLLV25ujjcq97+83zhDyGHY/HzQIhAIloW5UIuHYbKf4NiKhYPqulzGAF5Hym8nWzep6EaLBOAHUARJRlLrDuzq/EQAfYqP4owNrmgr7YyzG1P9MzlrW2gagAAAFuf9b63AAABAMARjBEAiAT60WFNIfuhyCTb2ryabwbU0xSNJZ8FLAfhfWInVylegIgSOGqOR4cIceuhpAvtzyOL7yTXnbO8Zij3HlXEiQgK+sAdgBvU3asMfAxGdiZAKRRFf93FRwR2QLBACkGjbIImjfZEwAAAW5/1vrkAAAEAwBHMEUCIAJ5CSdXBfuqfb3uKYOcuRthByAbRU1qRHG5fBmPuU3AAiEAgYNzNY2rgfFvVEgXNFrjIjYNWjbI89xPNL6CylgA7JIwDQYJKoZIhvcNAQELBQADggEBABUlLjm8ZIJYgvzPRQJawdmYJZuVoSA3jqA6oQKz/UOLami9te7KavlN1WqAxizPTqKOXbpQDWDudJ0PoEWafd+hr9OE94F8z3SLICSh1dPyZU/mdxiNCvn7ZsWbyoWn/Q8GO6haUMgZ6m57VP9XajFqTx4mzN5w8oZeR+FxHJcf0KVSmLX97Dj0cCwgk/0NSRVQrfaDeDqqqgPdg/CBBxFJfLnUGluCAaHW0x59MP2RDPD0B1zkNXqNT9bOBqd9jYinApL//zrJISWf/ERFq6AlhkvWBEC+k5KHbyak8xDvGnKugo8iOpBpjHDxOC+fnPLiiO/eP26SRP/EmeI4duI=
5 | K 8
6 | failures
7 | V 1
8 | 8
9 | K 15
10 | svn:realmstring
11 | V 40
12 | https://svn-ccsm-models.cgd.ucar.edu:443
13 | END
14 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/machines.py:
--------------------------------------------------------------------------------
1 | """
2 | Interface to the config_machines.xml file. This class inherits from GenericXML.py
3 | """
4 | from CIME.XML.standard_module_setup import *
5 | from CIME.XML.generic_xml import GenericXML
6 | from CIME.XML.files import Files
7 | from CIME.utils import convert_to_unknown_type, get_cime_config, get_all_cime_models
8 |
9 | import socket
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | class Machines(GenericXML):
14 |
15 | def __init__(self, infile=None, files=None, machine=None, extra_machines_dir=None):
16 | """
17 | initialize an object
18 | if a filename is provided it will be used,
19 | otherwise if a files object is provided it will be used
20 | otherwise create a files object from default values
21 |
22 | If extra_machines_dir is provided, it should be a string giving a path to an
23 | additional directory that will be searched for a config_machines.xml file; if
24 | found, the contents of this file will be appended to the standard
25 | config_machines.xml. An empty string is treated the same as None.
26 | """
27 |
28 | self.machine_node = None
29 | self.machine = None
30 | self.machines_dir = None
31 | self.custom_settings = {}
32 | schema = None
33 | supported_models = []
34 | if files is None:
35 | files = Files()
36 | if infile is None:
37 | infile = files.get_value("MACHINES_SPEC_FILE")
38 | schema = files.get_schema("MACHINES_SPEC_FILE")
39 | logger.debug("Verifying using schema {}".format(schema))
40 |
41 | self.machines_dir = os.path.dirname(infile)
42 |
43 | GenericXML.__init__(self, infile, schema)
44 |
45 | # Append the contents of $HOME/.cime/config_machines.xml if it exists.
46 | #
47 | # Also append the contents of a config_machines.xml file in the directory given by
48 | # extra_machines_dir, if present.
49 | #
50 | # This could cause problems if node matches are repeated when only one is expected.
51 | local_infile = os.path.join(os.environ.get("HOME"),".cime","config_machines.xml")
52 | logger.debug("Infile: {}".format(local_infile))
53 | if os.path.exists(local_infile):
54 | GenericXML.read(self, local_infile, schema)
55 | if extra_machines_dir:
56 | local_infile = os.path.join(extra_machines_dir, "config_machines.xml")
57 | logger.debug("Infile: {}".format(local_infile))
58 | if os.path.exists(local_infile):
59 | GenericXML.read(self, local_infile, schema)
60 |
61 | if machine is None:
62 | if "CIME_MACHINE" in os.environ:
63 | machine = os.environ["CIME_MACHINE"]
64 | else:
65 | cime_config = get_cime_config()
66 | if cime_config.has_option("main", "machine"):
67 | machine = cime_config.get("main", "machine")
68 | if machine is None:
69 | machine = self.probe_machine_name()
70 | if machine is None:
71 | for potential_model in get_all_cime_models():
72 | local_infile = os.path.join(get_cime_root(), "config",potential_model,"machines","config_machines.xml")
73 | if local_infile != infile:
74 | GenericXML.read(self, local_infile, schema)
75 | if self.probe_machine_name() is not None:
76 | supported_models.append(potential_model)
77 | GenericXML.change_file(self, infile, schema)
78 |
79 | expect(machine is not None, "Could not initialize machine object from {} or {}. This machine is not available for the target CIME_MODEL. The supported CIME_MODELS that can be used are: {}".format(infile, local_infile, supported_models))
80 | self.set_machine(machine)
81 |
82 | def get_child(self, name=None, attributes=None, root=None, err_msg=None):
83 | if root is None:
84 | root = self.machine_node
85 | return super(Machines, self).get_child(name, attributes, root, err_msg)
86 |
87 | def get_machines_dir(self):
88 | """
89 | Return the directory of the machines file
90 | """
91 | return self.machines_dir
92 |
93 | def get_machine_name(self):
94 | """
95 | Return the name of the machine
96 | """
97 | return self.machine
98 |
99 | def get_node_names(self):
100 | """
101 | Return the names of all the child nodes for the target machine
102 | """
103 | nodes = self.get_children(root=self.machine_node)
104 | node_names = []
105 | for node in nodes:
106 | node_names.append(self.name(node))
107 | return node_names
108 |
109 | def get_first_child_nodes(self, nodename):
110 | """
111 | Return the names of all the child nodes for the target machine
112 | """
113 | nodes = self.get_children(nodename, root=self.machine_node)
114 | return nodes
115 |
116 | def list_available_machines(self):
117 | """
118 | Return a list of machines defined for a given CIME_MODEL
119 | """
120 | machines = []
121 | nodes = self.get_children("machine")
122 | for node in nodes:
123 | mach = self.get(node, "MACH")
124 | machines.append(mach)
125 | return machines
126 |
127 | def probe_machine_name(self, warn=True):
128 | """
129 | Find a matching regular expression for hostname
130 | in the NODENAME_REGEX field in the file. First match wins.
131 | """
132 |
133 | names_not_found = []
134 |
135 | nametomatch = socket.getfqdn()
136 | machine = self._probe_machine_name_one_guess(nametomatch)
137 |
138 | if machine is None:
139 | names_not_found.append(nametomatch)
140 |
141 | nametomatch = socket.gethostname()
142 | machine = self._probe_machine_name_one_guess(nametomatch)
143 |
144 | if "CIME_MACHINE" in os.environ:
145 | machine = os.environ["CIME_MACHINE"]
146 |
147 | if machine is None:
148 | names_not_found.append(nametomatch)
149 |
150 | names_not_found_quoted = ["'" + name + "'" for name in names_not_found]
151 | names_not_found_str = ' or '.join(names_not_found_quoted)
152 | if warn:
153 | logger.warning("Could not find machine match for {}".format(names_not_found_str))
154 |
155 | return machine
156 |
157 | def _probe_machine_name_one_guess(self, nametomatch):
158 | """
159 | Find a matching regular expression for nametomatch in the NODENAME_REGEX
160 | field in the file. First match wins. Returns None if no match is found.
161 | """
162 |
163 | machine = None
164 | nodes = self.get_children("machine")
165 |
166 | for node in nodes:
167 | machtocheck = self.get(node, "MACH")
168 | logger.debug("machine is " + machtocheck)
169 | regex_str_node = self.get_optional_child("NODENAME_REGEX", root=node)
170 | regex_str = machtocheck if regex_str_node is None else self.text(regex_str_node)
171 |
172 | if regex_str is not None:
173 | logger.debug("machine regex string is " + regex_str)
174 | regex = re.compile(regex_str)
175 | if regex.match(nametomatch):
176 | logger.debug("Found machine: {} matches {}".format(machtocheck, nametomatch))
177 | machine = machtocheck
178 | break
179 |
180 | return machine
181 |
182 | def set_machine(self, machine):
183 | """
184 | Sets the machine block in the Machines object
185 |
186 | >>> machobj = Machines(machine="melvin")
187 | >>> machobj.get_machine_name()
188 | 'melvin'
189 | >>> machobj.set_machine("trump") # doctest: +IGNORE_EXCEPTION_DETAIL
190 | Traceback (most recent call last):
191 | ...
192 | CIMEError: ERROR: No machine trump found
193 | """
194 | if machine == "Query":
195 | self.machine = machine
196 | elif self.machine != machine or self.machine_node is None:
197 | self.machine_node = super(Machines,self).get_child("machine", {"MACH" : machine}, err_msg="No machine {} found".format(machine))
198 | self.machine = machine
199 |
200 | return machine
201 |
202 | #pylint: disable=arguments-differ
203 | def get_value(self, name, attributes=None, resolved=True, subgroup=None):
204 | """
205 | Get Value of fields in the config_machines.xml file
206 | """
207 | expect(self.machine_node is not None, "Machine object has no machine defined")
208 | expect(subgroup is None, "This class does not support subgroups")
209 | value = None
210 |
211 | if name in self.custom_settings:
212 | return self.custom_settings[name]
213 |
214 | # COMPILER and MPILIB are special, if called without arguments they get the default value from the
215 | # COMPILERS and MPILIBS lists in the file.
216 | if name == "COMPILER":
217 | value = self.get_default_compiler()
218 | elif name == "MPILIB":
219 | value = self.get_default_MPIlib(attributes)
220 | else:
221 | node = self.get_optional_child(name, root=self.machine_node, attributes=attributes)
222 | if node is not None:
223 | value = self.text(node)
224 |
225 | if resolved:
226 | if value is not None:
227 | value = self.get_resolved_value(value)
228 | elif name in os.environ:
229 | value = os.environ[name]
230 |
231 | value = convert_to_unknown_type(value)
232 |
233 | return value
234 |
235 | def get_field_from_list(self, listname, reqval=None, attributes=None):
236 | """
237 | Some of the fields have lists of valid values in the xml, parse these
238 | lists and return the first value if reqval is not provided and reqval
239 | if it is a valid setting for the machine
240 | """
241 | expect(self.machine_node is not None, "Machine object has no machine defined")
242 | supported_values = self.get_value(listname, attributes=attributes)
243 | # if no match with attributes, try without
244 | if supported_values is None:
245 | supported_values = self.get_value(listname, attributes=None)
246 |
247 | expect(supported_values is not None,
248 | "No list found for " + listname + " on machine " + self.machine)
249 | supported_values = supported_values.split(",") #pylint: disable=no-member
250 |
251 | if reqval is None or reqval == "UNSET":
252 | return supported_values[0]
253 |
254 | for val in supported_values:
255 | if val == reqval:
256 | return reqval
257 | return None
258 |
259 | def get_default_compiler(self):
260 | """
261 | Get the compiler to use from the list of COMPILERS
262 | """
263 | cime_config = get_cime_config()
264 | if cime_config.has_option('main','COMPILER'):
265 | value = cime_config.get('main', 'COMPILER')
266 | expect(self.is_valid_compiler(value), "User-selected compiler {} is not supported on machine {}".format(value, self.machine))
267 | else:
268 | value = self.get_field_from_list("COMPILERS")
269 | return value
270 |
271 | def get_default_MPIlib(self, attributes=None):
272 | """
273 | Get the MPILIB to use from the list of MPILIBS
274 | """
275 | return self.get_field_from_list("MPILIBS", attributes=attributes)
276 |
277 | def is_valid_compiler(self,compiler):
278 | """
279 | Check the compiler is valid for the current machine
280 |
281 | >>> machobj = Machines(machine="cori-knl")
282 | >>> machobj.get_default_compiler()
283 | 'intel'
284 | >>> machobj.is_valid_compiler("gnu")
285 | True
286 | >>> machobj.is_valid_compiler("nag")
287 | False
288 | """
289 | return self.get_field_from_list("COMPILERS", reqval=compiler) is not None
290 |
291 | def is_valid_MPIlib(self, mpilib, attributes=None):
292 | """
293 | Check the MPILIB is valid for the current machine
294 |
295 | >>> machobj = Machines(machine="cori-knl")
296 | >>> machobj.is_valid_MPIlib("mpi-serial")
297 | True
298 | >>> machobj.is_valid_MPIlib("fake-mpi")
299 | False
300 | """
301 | return mpilib == "mpi-serial" or \
302 | self.get_field_from_list("MPILIBS", reqval=mpilib, attributes=attributes) is not None
303 |
304 | def has_batch_system(self):
305 | """
306 | Return if this machine has a batch system
307 |
308 | >>> machobj = Machines(machine="cori-knl")
309 | >>> machobj.has_batch_system()
310 | True
311 | >>> machobj.set_machine("melvin")
312 | 'melvin'
313 | >>> machobj.has_batch_system()
314 | False
315 | """
316 | result = False
317 | batch_system = self.get_optional_child("BATCH_SYSTEM", root=self.machine_node)
318 | if batch_system is not None:
319 | result = (self.text(batch_system) is not None and self.text(batch_system) != "none")
320 | logger.debug("Machine {} has batch: {}".format(self.machine, result))
321 | return result
322 |
323 | def get_suffix(self, suffix_type):
324 | node = self.get_optional_child("default_run_suffix")
325 | if node is not None:
326 | suffix_node = self.get_optional_child(suffix_type, root=node)
327 | if suffix_node is not None:
328 | return self.text(suffix_node)
329 |
330 | return None
331 |
332 | def set_value(self, vid, value, subgroup=None, ignore_type=True):
333 | # A temporary cache only
334 | self.custom_settings[vid] = value
335 |
336 | def print_values(self):
337 | # write out machines
338 | machines = self.get_children("machine")
339 | logger.info("Machines")
340 | for machine in machines:
341 | name = self.get(machine, "MACH")
342 | desc = self.get_child("DESC", root=machine)
343 | os_ = self.get_child("OS", root=machine)
344 | compilers = self.get_child("COMPILERS", root=machine)
345 | max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
346 | max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
347 |
348 | print( " {} : {} ".format(name , self.text(desc)))
349 | print( " os ", self.text(os_))
350 | print( " compilers ",self.text(compilers))
351 | if max_mpitasks_per_node is not None:
352 | print(" pes/node ",self.text(max_mpitasks_per_node))
353 | if max_tasks_per_node is not None:
354 | print(" max_tasks/node ",self.text(max_tasks_per_node))
355 |
356 | def return_values(self):
357 | """ return a dictionary of machine info
358 | This routine is used by external tools in https://github.com/NCAR/CESM_xml2html
359 | """
360 | machines = self.get_children("machine")
361 | mach_dict = dict()
362 | logger.debug("Machines return values")
363 | for machine in machines:
364 | name = self.get(machine, "MACH")
365 | desc = self.get_child("DESC", root=machine)
366 | mach_dict[(name,"description")] = self.text(desc)
367 | os_ = self.get_child("OS", root=machine)
368 | mach_dict[(name,"os")] = self.text(os_)
369 | compilers = self.get_child("COMPILERS", root=machine)
370 | mach_dict[(name,"compilers")] = self.text(compilers)
371 | max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
372 | mach_dict[(name,"max_tasks_per_node")] = self.text(max_tasks_per_node)
373 | max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
374 | mach_dict[(name,"max_mpitasks_per_node")] = self.text(max_mpitasks_per_node)
375 |
376 | return mach_dict
377 |
--------------------------------------------------------------------------------
/CESM/2.2/Files/scam_shell_commands:
--------------------------------------------------------------------------------
1 | # Do not change options below
2 | # these are necessary for scam runs.
3 | #========================================
4 | #
5 | # SCAM works in SPMD mode with a single task, but the default is to run serially.
6 | # Changed for container
7 | #./xmlchange MPILIB=mpi-serial
8 | ./xmlchange NTASKS=1
9 |
10 | # SCAM doesn't have restart functionality yet.
11 | ./xmlchange REST_OPTION=never
12 |
13 | # Note that clm cannot use initial conditions with SCAM -so will only use specified phenology
14 | # Only change if CLM_FORCE_COLDSTART exists.
15 | if [ `./xmlquery --value CLM_FORCE_COLDSTART |& grep -c 'ERROR'` -eq 0 ]; then
16 | ./xmlchange CLM_FORCE_COLDSTART='on'
17 | fi
18 |
--------------------------------------------------------------------------------
/ESMF/8.0/Dockerfile:
--------------------------------------------------------------------------------
1 | ##############################
2 | # ESMF/8 Dockerfile #
3 | ##############################
4 |
5 | # Use the ESCOMP centos8 base - this is a base install of CentOS, plus NetCDF/HDF5/PNetCDF/MPICH
6 | FROM escomp/base:centos8
7 |
8 | ENV ESMF_SLUG="ESMF_8_0_1"
9 |
10 | RUN mkdir -p /tmp/sources && \
11 | cd /tmp/sources && \
12 | wget -q https://github.com/esmf-org/esmf/archive/${ESMF_SLUG}.tar.gz && \
13 | tar zxf ${ESMF_SLUG}.tar.gz && \
14 | cd esmf-${ESMF_SLUG} && \
15 | export ESMF_DIR=/tmp/sources/esmf-${ESMF_SLUG} && \
16 | export ESMF_COMM=mpich3 && \
17 | export ESMF_BOPT="g" && \
18 | export ESMF_NETCDF="nc-config" && \
19 | export ESMF_INSTALL_PREFIX=/usr/local && \
20 | export ESMF_INSTALL_BINDIR=${ESMF_INSTALL_PREFIX}/bin && \
21 | export ESMF_INSTALL_DOCDIR=${ESMF_INSTALL_PREFIX}/doc && \
22 | export ESMF_INSTALL_HEADERDIR=${ESMF_INSTALL_PREFIX}/include && \
23 | export ESMF_INSTALL_LIBDIR=${ESMF_INSTALL_PREFIX}/lib && \
24 | export ESMF_INSTALL_MODDIR=${ESMF_INSTALL_PREFIX}/mod && \
25 | export ESMF_TESTEXHAUSTIVE="OFF" && \
26 | make info && \
27 | make -j $(nproc) && \
28 | # make check && \
29 | make install
30 |
31 | # Build ESMPy =================================================================
32 |
33 | RUN export ESMF_DIR=/tmp/sources/esmf-${ESMF_SLUG} && \
34 | export ESMFMKFILE=/usr/local/lib/esmf.mk && \
35 | cd ${ESMF_DIR}/src/addon/ESMPy && \
36 | pip3 install numpy nose && \
37 | python setup.py build --ESMFMKFILE=${ESMFMKFILE} && \
38 | # python setup.py test && \
39 | python setup.py install && \
40 | cd && \
41 | python -c "import ESMF"
42 |
43 | RUN rm -rf /tmp/sources
44 |
--------------------------------------------------------------------------------
/ESMF/API_changes/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DOCKER_NAMESPACE=""
2 |
3 | FROM ${DOCKER_NAMESPACE}/esmf-doc-base
4 |
5 | ARG TAG1=""
6 | RUN echo "TAG1=$TAG1"
7 |
8 | ENV TAG1=$TAG1
9 |
10 | ARG TAG2=""
11 | RUN echo "TAG2=$TAG2"
12 |
13 | ENV TAG2=$TAG2
14 |
15 | ENV ESMF_ARTIFACTS=/artifacts
16 | RUN mkdir -p ${ESMF_ARTIFACTS}
17 |
18 | WORKDIR ${HOME}/sandbox/esmf
19 | RUN git clone https://github.com/esmf-org/esmf.git src-git
20 |
21 | ENV ESMF_DIR=${HOME}/sandbox/esmf/src-git
22 | RUN echo "ESMF_DIR=$ESMF_DIR"
23 |
24 | RUN git clone https://github.com/esmf-org/esmf-test-scripts.git scripts-git
25 | RUN scripts-git/API_change_script/harvestAPIs
26 |
27 | RUN cp -rv APIs*.out ${ESMF_ARTIFACTS}/
28 | RUN cp -rv diff*.out ${ESMF_ARTIFACTS}/
29 |
30 | RUN zip -r ${ESMF_ARTIFACTS}/api_change-artifacts.zip ${ESMF_ARTIFACTS}
31 |
--------------------------------------------------------------------------------
/ESMF/copyright_update/Dockerfile:
--------------------------------------------------------------------------------
1 | #############################################
2 | # Documentation build dependencies for ESMF #
3 | #############################################
4 |
5 | FROM ubuntu:18.04
6 |
7 | ENV _DEBIAN_FRONTEND=$DEBIAN_FRONTEND
8 | # Avoid having to interact with terminal when installing time-related packages
9 | ENV DEBIAN_FRONTEND=noninteractive
10 | RUN ln -fs /usr/share/zoneinfo/America/Denver /etc/localtime
11 | RUN apt-get -y update && apt-get -y install texlive-full latex2html perl csh \
12 | git build-essential zip gfortran
13 | ENV DEBIAN_FRONTEND=$_DEBIAN_FRONTEND
14 | ENV _DEBIAN_FRONTEND=""
15 |
16 | ########################################
17 | # clone ESMF and run copyright update #
18 | ########################################
19 | ARG DOCKER_NAMESPACE=""
20 |
21 | ARG ESMF_BRANCH=""
22 | RUN echo "ESMF_BRANCH=$ESMF_BRANCH"
23 |
24 | # Clone ESMF
25 | WORKDIR ${HOME}/sandbox/esmf
26 | RUN git clone --branch ${ESMF_BRANCH} --depth 1 https://github.com/esmf-org/esmf.git esmf
27 | RUN cd esmf
28 | RUN git checkout copyright_update
29 | RUN cd ..
30 |
31 |
32 | RUN git clone https://github.com/esmf-org/esmf-test-scripts.git
33 | RUN cp -r esmf-test-scripts/copyright_update/replace_string .
34 | RUN chmod +x replace_string
35 |
36 | RUN find esmf > outfiles
37 | RUN grep -v "eps" outfiles > no_eps_files
38 | RUN grep -v "vsd" no_eps_files > no_vsd_files
39 | RUN grep -v "cdd" no_vsd_files > no_cdd_files
40 | RUN grep -v "vtk" no_cdd_files > final_files
41 |
42 | RUN ./replace_string -nb -nq -s s/2002\-2020/2002\-2021/g -F final_files
43 |
44 | RUN cd esmf
45 | RUN git add .
46 | RUN git commit -a -m " Copyright update pushed at `date` "
47 | RUN git push -u origin copyright_update
48 |
--------------------------------------------------------------------------------
/ESMF/dev/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DOCKER_NAMESPACE=""
2 |
3 | FROM ${DOCKER_NAMESPACE}/centos8
4 |
5 | ARG ESMF_BRANCH=""
6 |
7 | # Build ESMF ==================================================================
8 |
9 | ENV ESMF_DIR=/tmp/sources/esmf
10 | ENV ESMPY_DIR=${ESMF_DIR}/src/addon/ESMPy
11 | ENV ESMFMKFILE=/usr/local/lib/esmf.mk
12 |
13 | ENV ESMF_COMM=mpich3
14 | ENV ESMF_BOPT="g"
15 | ENV ESMF_NETCDF="nc-config"
16 | ENV ESMF_INSTALL_PREFIX=/usr/local
17 | ENV ESMF_INSTALL_BINDIR=${ESMF_INSTALL_PREFIX}/bin
18 | ENV ESMF_INSTALL_DOCDIR=${ESMF_INSTALL_PREFIX}/doc
19 | ENV ESMF_INSTALL_HEADERDIR=${ESMF_INSTALL_PREFIX}/include
20 | ENV ESMF_INSTALL_LIBDIR=${ESMF_INSTALL_PREFIX}/lib
21 | ENV ESMF_INSTALL_MODDIR=${ESMF_INSTALL_PREFIX}/mod
22 | ENV ESMF_TESTEXHAUSTIVE="ON"
23 |
24 | RUN mkdir -p /tmp/sources && \
25 | mkdir -p /tmp/artifacts && \
26 | cd /tmp/sources && \
27 | git clone --branch ${ESMF_BRANCH} --depth 1 https://github.com/esmf-org/esmf.git && \
28 | cd esmf && \
29 | make info 2>&1 | tee /tmp/artifacts/esmf-make-info.out && \
30 | make -j $(nproc) 2>&1 | tee /tmp/artifacts/esmf-make.out && \
31 | make install 2>&1 | tee /tmp/artifacts/esmf-make-install.out
32 |
33 | # Build ESMPy =================================================================
34 |
35 | RUN cd ${ESMPY_DIR} && \
36 | pip3 install numpy nose && \
37 | python setup.py build --ESMFMKFILE=${ESMFMKFILE} && \
38 | python setup.py install && \
39 | cd && \
40 | python -c "import ESMF"
41 |
42 | RUN cd /tmp && zip -rv artifacts.zip artifacts
43 |
44 | WORKDIR ${ESMF_DIR}
45 |
--------------------------------------------------------------------------------
/ESMF/doc/esmf-doc-base/Dockerfile:
--------------------------------------------------------------------------------
1 | #############################################
2 | # Documentation build dependencies for ESMF #
3 | #############################################
4 |
5 | FROM ubuntu:18.04
6 |
7 | ENV _DEBIAN_FRONTEND=$DEBIAN_FRONTEND
8 | # Avoid having to interact with terminal when installing time-related packages
9 | ENV DEBIAN_FRONTEND=noninteractive
10 | RUN ln -fs /usr/share/zoneinfo/America/Denver /etc/localtime
11 | RUN apt-get -y update && apt-get -y install texlive-full latex2html perl csh \
12 | git build-essential zip gfortran
13 | ENV DEBIAN_FRONTEND=$_DEBIAN_FRONTEND
14 | ENV _DEBIAN_FRONTEND=""
15 |
--------------------------------------------------------------------------------
/ESMF/doc/esmf-doc/Dockerfile:
--------------------------------------------------------------------------------
1 | ########################################
2 | # Build and collect ESMF documentation #
3 | ########################################
4 | ARG DOCKER_NAMESPACE=""
5 |
6 | FROM ${DOCKER_NAMESPACE}/esmf-doc-base
7 |
8 | ARG ESMF_BRANCH=""
9 | RUN echo "ESMF_BRANCH=$ESMF_BRANCH"
10 |
11 | # Where documentation artifacts will be stored in the container
12 | ENV ESMF_ARTIFACTS=/artifacts
13 | RUN mkdir -p ${ESMF_ARTIFACTS}
14 |
15 | # Clone ESMF
16 | WORKDIR ${HOME}/sandbox/esmf
17 | RUN git clone --branch ${ESMF_BRANCH} --depth 1 https://github.com/esmf-org/esmf.git src-git
18 |
19 | ENV ESMF_DIR=${HOME}/sandbox/esmf/src-git
20 |
21 | # Make the NUOPC documentation
22 | WORKDIR ${ESMF_DIR}/src/addon/NUOPC/doc
23 | RUN make localdoc 2>&1 | tee ${ESMF_ARTIFACTS}/nuopc-make-doc.out
24 |
25 | # Make the ESMF documentation
26 | WORKDIR ${ESMF_DIR}
27 | RUN make doc 2>&1 | tee ${ESMF_ARTIFACTS}/esmf-make-doc.out
28 |
29 | # Make the ESMF dev guide documentation
30 | WORKDIR ${ESMF_DIR}/src/doc/dev_guide
31 | RUN make 2>&1 | tee ${ESMF_ARTIFACTS}/dev_guide-make-doc.out
32 |
33 | # Collect and compress the documentation artifacts
34 | WORKDIR ${ESMF_DIR}
35 | RUN cp -rv doc ${ESMF_ARTIFACTS}/doc-esmf
36 | RUN mkdir ${ESMF_ARTIFACTS}/doc-nuopc
37 | RUN cp -rv src/addon/NUOPC/doc/*.pdf src/addon/NUOPC/doc/NUOPC_refdoc src/addon/NUOPC/doc/NUOPC_howtodoc ${ESMF_ARTIFACTS}/doc-nuopc
38 | RUN mkdir ${ESMF_ARTIFACTS}/doc-dev_guide
39 | RUN cp -rv src/doc/dev_guide ${ESMF_ARTIFACTS}/doc-dev_guide
40 |
41 | RUN zip -r ${ESMF_ARTIFACTS}/doc-artifacts.zip ${ESMF_ARTIFACTS}
42 |
--------------------------------------------------------------------------------
/ESMF/doc/esmpy-doc/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:focal
2 |
3 | ENV _DEBIAN_FRONTEND=$DEBIAN_FRONTEND
4 | # Avoid having to interact with terminal when installing time-related packages
5 | ENV DEBIAN_FRONTEND=noninteractive
6 | RUN apt-get --yes update && apt-get --yes install latexmk texlive-fonts-recommended \
7 | texlive-latex-recommended texlive-latex-extra build-essential gfortran git \
8 | python3-pip
9 | ENV DEBIAN_FRONTEND=$_DEBIAN_FRONTEND
10 | ENV _DEBIAN_FRONTEND=""
11 |
12 | ARG ESMF_BRANCH=""
13 | RUN echo "ESMF_BRANCH=$ESMF_BRANCH"
14 |
15 | ENV ARTIFACTS=/artifacts/doc-esmpy
16 | RUN mkdir -p ${ARTIFACTS}
17 |
18 | WORKDIR /opt/
19 | RUN git clone --branch ${ESMF_BRANCH} --depth 1 https://github.com/esmf-org/esmf.git
20 |
21 | ENV ESMF_DIR=/opt/esmf
22 | ENV ESMF_INSTALL_PREFIX=/usr/local
23 | ENV ESMF_INSTALL_BINDIR=${ESMF_INSTALL_PREFIX}/bin
24 | ENV ESMF_INSTALL_DOCDIR=${ESMF_INSTALL_PREFIX}/doc
25 | ENV ESMF_INSTALL_HEADERDIR=${ESMF_INSTALL_PREFIX}/include
26 | ENV ESMF_INSTALL_LIBDIR=${ESMF_INSTALL_PREFIX}/lib
27 | ENV ESMF_INSTALL_MODDIR=${ESMF_INSTALL_PREFIX}/mod
28 |
29 | WORKDIR ${ESMF_DIR}
30 | RUN make 2>&1 | tee esmf-make.out
31 | RUN make install 2>&1 | tee esmf-make-install.out
32 |
33 | ENV ESMFMKFILE=${ESMF_INSTALL_LIBDIR}/esmf.mk
34 | RUN pip3 install sphinx sphinxcontrib-packages sphinxcontrib-bibtex==1.0.0 numpy nose
35 |
36 | WORKDIR ${ESMF_DIR}/src/addon/ESMPy
37 | RUN pip install .
38 | WORKDIR doc
39 | RUN make html latexpdf 2>&1 | tee ${ARTIFACTS}/esmpy-make-doc.out
40 |
41 | RUN cp -r esmpy_doc ${ARTIFACTS} && \
42 | cd /artifacts && \
43 | zip -r doc-esmpy.zip doc-esmpy
44 |
--------------------------------------------------------------------------------
/ESMF/nuopc-app-prototypes/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DOCKER_NAMESPACE="esmf"
2 | ARG ESMF_BRANCH="develop"
3 |
4 | FROM ${DOCKER_NAMESPACE}/esmf:${ESMF_BRANCH}
5 |
6 | RUN git clone --depth 1 https://github.com/esmf-org/nuopc-app-prototypes.git
7 | WORKDIR nuopc-app-prototypes
8 | RUN git checkout ${ESMF_BRANCH} || echo "WARNING: ESMF_BRANCH does not exist. Checking out develop" && git checkout develop
9 | RUN ln -s /usr/bin/make /usr/bin/gmake || echo "INFO: gmake symlink already exists"
10 |
11 | ENV ESMF_ARTIFACTS=/opt/artifacts
12 | RUN mkdir -p ${ESMF_ARTIFACTS}
13 |
14 | RUN pip3 install --user unittest-xml-reporting
15 | ADD ./meta_test.py /opt/meta_test.py
16 | ADD ./docker-entrypoint.sh /opt/docker-entrypoint.sh
17 |
18 | RUN mkdir -p /outgoing
19 |
--------------------------------------------------------------------------------
/ESMF/nuopc-app-prototypes/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -Eeuxo pipefail
3 |
4 | case "${1}" in
5 |
6 | testProtos)
7 | # Catch || to true require to allow scripts to continue? Seems like a hack
8 | # and it is, but is required given testing.
9 | ./testProtos.sh 2>&1 | tee "${ESMF_ARTIFACTS}"/testProtos.out || true
10 | ;;
11 |
12 | meta_test)
13 | python /opt/meta_test.py -v 2>&1 | tee "${ESMF_ARTIFACTS}"/meta_test.out
14 | ;;
15 |
16 | prep_artifacts)
17 | cd "${ESMF_ARTIFACTS}"/.. && zip -rv artifacts.zip artifacts
18 | ;;
19 |
20 | *)
21 | echo "ERROR: Command not found"; exit 1
22 | ;;
23 |
24 | esac
25 |
--------------------------------------------------------------------------------
/ESMF/nuopc-app-prototypes/meta_test.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import unittest
4 | from collections import OrderedDict
5 |
6 | import xmlrunner
7 |
8 |
9 | class TestsContainer(unittest.TestCase):
10 | longMessage = True
11 | expected = 48
12 | ran = 0
13 | logs = os.getcwd()
14 |
15 | def setUp(self):
16 | print(self._testMethodName)
17 | print('\n')
18 |
19 | def test_all_tests_ran(self):
20 | self.assertEqual(self.expected, self.ran)
21 |
22 | def test_logs_have_no_errors(self):
23 | def the_pred(de):
24 | if de.name.endswith('.ESMF_LogFile'):
25 | return True
26 | else:
27 | return False
28 |
29 | to_find = ['ERROR']
30 | problems = []
31 | for de in walk_files(self.logs, the_pred):
32 | with open(de.path, 'r') as f:
33 | lines = f.readlines()
34 | for line_number, line in enumerate(lines, start=1):
35 | line = line.strip()
36 | for tf in to_find:
37 | if tf in line:
38 | problems.append((de, line, line_number))
39 |
40 | msg = ['\n']
41 | for tup in problems:
42 | de, line, line_number = tup
43 | msg.append(de.name + ':' + str(line_number) + ': ' + line)
44 | msg = '\n'.join(msg)
45 |
46 | self.assertEqual(len(problems), 0, msg=msg)
47 |
48 |
49 | def make_test_function(description, pass_fail):
50 | def test(self):
51 | self.assertEqual(pass_fail, "PASS", description)
52 | return test
53 |
54 |
55 | def walk_files(path, pred):
56 | for de in os.scandir(path):
57 | if de.is_dir():
58 | for de2 in walk_files(de.path, pred):
59 | yield de2
60 | else:
61 | if pred(de):
62 | yield de
63 |
64 |
65 | def make_testsmap_nuopc_protos(outfile):
66 | testsmap = OrderedDict()
67 | ran = 0
68 | with open(outfile, 'r') as f:
69 | parse = False
70 | for line in f:
71 | line = line.strip()
72 | if '== TEST SUMMARY STOP ==' in line:
73 | break
74 | elif '== TEST SUMMARY START ==' in line:
75 | parse = True
76 | elif parse:
77 | result = re.search('(?P.*): (?P.*)$', line)
78 | if result['name'] in testsmap:
79 | result_name = result['name'] + '_' + str(ran)
80 | else:
81 | result_name = result['name']
82 | testsmap[result_name] = result['pass_fail']
83 | ran += 1
84 | testsmap['ran'] = ran
85 | return testsmap
86 |
87 |
88 | if __name__ == '__main__':
89 | xmlout = os.path.join(os.environ['ESMF_ARTIFACTS'], 'meta_test')
90 | outfile = os.path.join(os.environ['ESMF_ARTIFACTS'], 'testProtos.out')
91 |
92 | testsmap = make_testsmap_nuopc_protos(outfile)
93 |
94 | ran = testsmap.pop('ran')
95 | TestsContainer.ran = ran
96 |
97 | for name, pass_fail in testsmap.items():
98 | test_func = make_test_function(name, pass_fail)
99 | setattr(TestsContainer, 'test_{0}'.format(name), test_func)
100 |
101 | unittest.main(testRunner=xmlrunner.XMLTestRunner(output=xmlout))
102 |
--------------------------------------------------------------------------------
/ESMF/test_coverage/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG DOCKER_NAMESPACE=""
2 |
3 | FROM ${DOCKER_NAMESPACE}/esmf-doc-base
4 |
5 | ARG ESMF_BRANCH=""
6 | RUN echo "ESMF_BRANCH=$ESMF_BRANCH"
7 |
8 | ENV ESMF_ARTIFACTS=/artifacts
9 | RUN mkdir -p ${ESMF_ARTIFACTS}
10 |
11 | WORKDIR ${HOME}/sandbox/esmf
12 | RUN git clone --branch ${ESMF_BRANCH} --depth 1 https://github.com/esmf-org/esmf.git src-git
13 |
14 | ENV ESMF_DIR=${HOME}/sandbox/esmf/src-git
15 |
16 | RUN mkdir store
17 |
18 | ENV LOGDIR=${HOME}/sandbox/esmf/store
19 |
20 | RUN export ESMF_DIR=${HOME}/sandbox/esmf/src-git
21 | RUN export LOGDIR=${HOME}/sandbox/esmf/store
22 |
23 | RUN echo "ESMF_DIR=$ESMF_DIR"
24 | RUN echo "LOGDIR=$LOGDIR"
25 |
26 | RUN git clone https://github.com/esmf-org/esmf-test-scripts.git scripts-git
27 | RUN chmod +x scripts-git/test_coverage_script/test_coverage
28 |
29 | WORKDIR ${ESMF_DIR}
30 | RUN bash ../scripts-git/test_coverage_script/test_coverage
31 |
32 | WORKDIR ${HOME}/sandbox/esmf
33 | RUN cp -rv store/* ${ESMF_ARTIFACTS}/
34 |
35 | RUN zip -r ${ESMF_ARTIFACTS}/test_coverage-artifacts.zip ${ESMF_ARTIFACTS}
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | Containerized versions of ESCOMP software (eg, CESM)
3 |
4 | (This is currently in development; documentation and versions are likely to change rapidly.)
5 |
6 |
7 |
8 | # Quick Start for CESM:
9 |
10 | You need to mount a directory in to the container to save any files / inputdata - this is done via the '-v' option to 'docker'. For example, to run an interactive bash shell with a local 'cesm' directory (/Users/me/cesm) mounted into home, do:
11 |
12 | docker run -it -v /Users/me/cesm:/home/user escomp/cesm-2.1
13 |
14 |
15 | NOTE: For systems with many cores, you might also need to use an option like --dev-shm=512M, since each MPI process will require some space in /dev/shm
16 |
17 | (Unlike a CESM configuration on an HPC system, with dedicated directories for inputdata and scratch, this containerized version stores everything in the /home/user directory, so mounting a local directory ensures you'll not have to download input data multiple times, or lose run data.)
18 |
19 | This will give you a bash prompt like this:
20 |
21 | [user@cesm2.1.3 ~]$
22 |
23 | From here, you can follow the standard CESM documentation on creating / building / submitting cases - in this case, the submission runs in the foreground. An example set of commands to build a 2-degree F2000climo (scientifically unsupported, just used as an example) case, with a case name / directory of 'test1' follows:
24 |
25 | create_newcase --case test1 --compset F2000climo --res f19_g17 --run-unsupported
26 | cd test1
27 | ./xmlchange NTASKS=4
28 | ./case.setup
29 | ./case.build
30 | ./case.submit
31 |
32 |
33 | This will require ~7-10GB of RAM for 1-4 tasks - if you haven't configured your Docker environment to allow that, you'll need to change that under Docker's settings.
34 |
35 | (These will change soon; this is just a is-it-working example.)
36 |
--------------------------------------------------------------------------------
/base/centos8/Dockerfile:
--------------------------------------------------------------------------------
1 | ################################################################################################################
2 | # escomp/base-centos8 Dockerfile #
3 | #--------------------------------------------------------------------------------------------------------------#
4 | # A base CentOS8 install + MPI, HDF5, NetCDF and PNetCDF, as well as other core packages for escomp containers #
5 | ################################################################################################################
6 |
7 | # Use latest CentOS8:
8 | FROM centos:8
9 |
10 | # First, we upate the default packages and install some other necessary ones - while this may give
11 | # some people updated versions of packages vs. others, these differences should not be numerically
12 | # important or affect run-time behavior (eg, a newer Bash version, or perl-XML-LibXML version).
13 |
14 | RUN yum -y update && \
15 | yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm && \
16 | yum -y install vim emacs-nox git subversion which sudo csh make m4 cmake wget file byacc curl-devel zlib-devel && \
17 | yum -y install perl-XML-LibXML gcc-gfortran gcc-c++ dnf-plugins-core python3 perl-core && \
18 | yum -y install ftp xmlstarlet diffutils && \
19 | yum -y install git-lfs latexmk texlive-amscls texlive-anyfontsize texlive-cmap texlive-fancyhdr texlive-fncychap \
20 | texlive-dvisvgm texlive-metafont texlive-ec texlive-titlesec texlive-babel-english texlive-tabulary \
21 | texlive-framed texlive-wrapfig texlive-parskip texlive-upquote texlive-capt-of texlive-needspace \
22 | texlive-times texlive-makeindex texlive-helvetic texlive-courier texlive-gsftopk texlive-dvips texlive-mfware texlive-dvisvgm && \
23 | pip3 install rst2pdf sphinx sphinxcontrib-programoutput && \
24 | pip3 install git+https://github.com/esmci/sphinx_rtd_theme.git@version-dropdown-with-fixes && \
25 | dnf --enablerepo=powertools install -y blas-devel lapack-devel && \
26 | ln -s /usr/bin/python3 /usr/bin/python && \
27 | echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf && \
28 | ldconfig && \
29 | yum clean all
30 |
31 |
32 | # Second, let's install MPI - we're doing this by hand because the default packages install into non-standard locations, and
33 | # we want our image as simple as possible. We're also going to use MPICH, though any of the MPICH ABI-compatible libraries
34 | # will work. This is for future compatibility with offloading to cloud.
35 |
36 | RUN mkdir /tmp/sources && \
37 | cd /tmp/sources && \
38 | wget -q http://www.mpich.org/static/downloads/3.3.2/mpich-3.3.2.tar.gz && \
39 | tar zxf mpich-3.3.2.tar.gz && \
40 | cd mpich-3.3.2 && \
41 | ./configure --prefix=/usr/local && \
42 | make -j 2 install && \
43 | rm -rf /tmp/sources
44 |
45 |
46 | # Next, let's install HDF5, NetCDF and PNetCDF - we'll do this by hand, since the packaged versions have
47 | # lots of extra dependencies (at least, as of CentOS 7) and this also lets us control their location (eg, put in /usr/local).
48 | # NOTE: We do want to change where we store the versions / download links, so it's easier to change, but that'll happen later.
49 | RUN mkdir /tmp/sources && \
50 | cd /tmp/sources && \
51 | wget -q https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.0/src/hdf5-1.12.0.tar.gz && \
52 | tar zxf hdf5-1.12.0.tar.gz && \
53 | cd hdf5-1.12.0 && \
54 | ./configure --prefix=/usr/local && \
55 | make -j 2 install && \
56 | cd /tmp/sources && \
57 | wget -q ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-c-4.7.4.tar.gz && \
58 | tar zxf netcdf-c-4.7.4.tar.gz && \
59 | cd netcdf-c-4.7.4 && \
60 | ./configure --prefix=/usr/local && \
61 | make -j 2 install && \
62 | ldconfig && \
63 | cd /tmp/sources && \
64 | wget -q ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-fortran-4.5.3.tar.gz && \
65 | tar zxf netcdf-fortran-4.5.3.tar.gz && \
66 | cd netcdf-fortran-4.5.3 && \
67 | ./configure --prefix=/usr/local && \
68 | make -j 2 install && \
69 | ldconfig && \
70 | cd /tmp/sources && \
71 | wget -q https://parallel-netcdf.github.io/Release/pnetcdf-1.12.1.tar.gz && \
72 | tar zxf pnetcdf-1.12.1.tar.gz && \
73 | cd pnetcdf-1.12.1 && \
74 | ./configure --prefix=/usr/local && \
75 | make -j 2 install && \
76 | ldconfig && \
77 | rm -rf /tmp/sources
78 |
79 | RUN groupadd escomp && \
80 | useradd -c 'ESCOMP User' -d /home/user -g escomp -m -s /bin/bash user && \
81 | echo 'export USER=$(whoami)' >> /etc/profile.d/escomp.sh && \
82 | echo 'export PS1="[\u@escomp \W]\$ "' >> /etc/profile.d/escomp.sh && \
83 | echo 'user ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers.d/escomp
84 |
85 | ENV SHELL=/bin/bash \
86 | LANG=C.UTF-8 \
87 | LC_ALL=C.UTF-8
88 |
89 | USER user
90 | WORKDIR /home/user
91 | CMD ["/bin/bash", "-l"]
92 | #ENTRYPOINT ["/bin/bash", "-l"]
93 |
--------------------------------------------------------------------------------