├── docs ├── index.rst ├── logo.png ├── environment.yml ├── conf.py ├── doxygen-bash.sed ├── Doxyfile └── custom.css ├── .gitignore ├── .readthedocs.yml ├── examples ├── docker │ └── docker_setup.sh ├── quick │ └── miniconda_setup.sh └── pipeline │ ├── hst.sh │ ├── jwst.sh │ ├── data_analysis.sh │ └── cumulative.sh ├── README.md ├── tests ├── test_framework_io.sh ├── run_tests.sh ├── test_config.sh ├── test_framework_miniconda.sh ├── test_framework_docker.sh ├── test_framework_astroconda.sh └── test_framework_system.sh ├── .github └── workflows │ └── main.yml ├── framework ├── io.inc.sh ├── docker.inc.sh ├── miniconda.inc.sh ├── astroconda.inc.sh └── system.inc.sh ├── LICENSE.txt ├── Makefile └── bin └── ec2pinit.inc.sh /docs/index.rst: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tests/shunit 2 | docs/html 3 | docs/latex 4 | docs/man 5 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | conda: 4 | environment: docs/environment.yml 5 | -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spacetelescope/ec2_post_init/main/docs/logo.png -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: RTD 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python=3.9 7 | - doxygen 8 | -------------------------------------------------------------------------------- /examples/docker/docker_setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | docker_setup "$USER" 5 | docker_pull_many "centos:7" "centos:8" 6 | 7 | for version in {7..8}; do 8 | docker run --rm -it centos:${version} \ 9 | /bin/sh -c 'echo hello world from $(cat /etc/redhat-release)' 10 | done 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ec2 post init 2 | 3 | [![CI](https://github.com/spacetelescope/ec2_post_init/actions/workflows/main.yml/badge.svg)](https://github.com/spacetelescope/ec2_post_init/actions/workflows/main.yml) [![Documentation Status](https://readthedocs.org/projects/ec2-post-init/badge/?version=latest)](https://ec2-post-init.readthedocs.io/en/latest/?badge=latest) 4 | 5 | The goal is simple. `ec2_post_init` can be used to populate STScI specific EC2 instances with common software. 6 | -------------------------------------------------------------------------------- /examples/quick/miniconda_setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | # Download and install the "latest" release of miniconda3 5 | mc_install "latest" "$HOME/miniconda3" 6 | 7 | # Initialize miniconda3 (automatic conda init, conda config, etc) 8 | mc_initialize "$HOME/miniconda3" 9 | 10 | # Create a few basic environments 11 | # Note: -y/--yes isn't required. "always_yes" is set to true by mc_initialize 12 | conda create -n py39 python=3.9 13 | conda create -n py310 python=3.10 14 | 15 | # Save space. Clean up conda's caches 16 | mc_clean 17 | -------------------------------------------------------------------------------- /examples/pipeline/hst.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | # Update system packages 5 | sys_pkg_update_all 6 | 7 | # Install additional packages 8 | sys_pkg_install curl \ 9 | gcc \ 10 | git \ 11 | sudo 12 | 13 | # "become" the target user 14 | sys_user_push ec2-user 15 | 16 | miniconda_root=$HOME/miniconda3 17 | miniconda_version="py39_4.12.0" 18 | 19 | # Install miniconda 20 | mc_install "$miniconda_version" "$miniconda_root" 21 | 22 | # Configure miniconda for user 23 | mc_initialize "$miniconda_root" 24 | 25 | # Install HST pipeline release 26 | export CFLAGS="-std=gnu99" 27 | ac_releases_install_hst "stable" 28 | 29 | # Return to root user 30 | sys_user_pop 31 | 32 | # Reset target user's home directory permissions 33 | sys_reset_home_ownership ec2-user 34 | -------------------------------------------------------------------------------- /examples/pipeline/jwst.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | # Update system packages 5 | sys_pkg_update_all 6 | 7 | # Install additional packages 8 | sys_pkg_install curl \ 9 | gcc \ 10 | git \ 11 | sudo 12 | 13 | # "become" the target user 14 | sys_user_push ec2-user 15 | 16 | miniconda_root=$HOME/miniconda3 17 | miniconda_version="py39_4.12.0" 18 | 19 | # Install miniconda 20 | mc_install "$miniconda_version" "$miniconda_root" 21 | 22 | # Configure miniconda for user 23 | mc_initialize "$miniconda_root" 24 | 25 | # Install JWST pipeline release 26 | export CFLAGS="-std=gnu99" 27 | ac_releases_install_jwst "1.5.2" 28 | 29 | # Return to root user 30 | sys_user_pop 31 | 32 | # Reset target user's home directory permissions 33 | sys_reset_home_ownership ec2-user 34 | -------------------------------------------------------------------------------- /tests/test_framework_io.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | setUp() { 5 | output="DEBUG LEVEL: " 6 | re_stamp='([0-9]+)-([0-9]+)-([0-9]+)\ ([0-9]+):([0-9]+):([0-9]+)\ -\' 7 | } 8 | 9 | tearDown() { 10 | ec2pinit_debug=0 11 | } 12 | 13 | test_io_info() { 14 | ec2pinit_debug=$(( DEBUG_INFO )) 15 | io_info "$output $ec2pinit_debug" 2>&1 | grep -E "$re_stamp INFO: $output .*" 16 | } 17 | 18 | test_io_warn() { 19 | ec2pinit_debug=$(( DEBUG_WARN )) 20 | io_warn "$output $ec2pinit_debug" 2>&1 | grep -E "$re_stamp WARN: $output .*" 21 | } 22 | 23 | test_io_error() { 24 | ec2pinit_debug=$(( DEBUG_ERROR )) 25 | io_error "$output $ec2pinit_debug" 2>&1 | grep -E "$re_stamp ERROR: $output .*" 26 | } 27 | 28 | test_io_DEBUG_ALL() { 29 | ec2pinit_debug=$(( DEBUG_ALL )) 30 | (io_info "YES"; io_warn "YES"; io_error "YES") 2>&1 | grep -E "$re_stamp (INFO|WARN|ERROR): YES" 31 | } 32 | 33 | . "$shunit_path"/shunit2 34 | -------------------------------------------------------------------------------- /tests/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if ! type -p git; then 4 | echo "Missing package: git" 5 | exit 1 6 | fi 7 | 8 | # Always test the source tree 9 | export PATH="$(realpath ../bin):$PATH" 10 | export shunit_path="$(pwd)/shunit" 11 | [ ! -d "$shunit_path" ] && git clone https://github.com/kward/shunit2 "$shunit_path" 12 | 13 | argv=($@) 14 | argc=$# 15 | scripts=() 16 | failures=0 17 | 18 | if (( $argc > 0 )); then 19 | for (( i=0; i < $argc; i++ )); do 20 | if [ -f "${argv[i]}" ] && [[ "${argv[i]}" =~ ^test_.*\.sh ]]; then 21 | scripts+=("${argv[i]}") 22 | fi 23 | done 24 | else 25 | for f in test_*.sh; do 26 | scripts+=("$f") 27 | done 28 | fi 29 | 30 | for f in "${scripts[@]}"; do 31 | echo "Running tests for: ${f}" 32 | if ! bash "$f"; then 33 | (( failures++ )) 34 | fi 35 | done 36 | 37 | if (( $failures )); then 38 | echo "Test suite(s) failed: $failures" >&2 39 | exit 1 40 | fi 41 | -------------------------------------------------------------------------------- /examples/pipeline/data_analysis.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | # Update system packages 5 | sys_pkg_update_all 6 | 7 | # Install additional packages 8 | sys_pkg_install curl \ 9 | gcc \ 10 | git \ 11 | sudo 12 | 13 | # "become" the target user 14 | sys_user_push ec2-user 15 | 16 | miniconda_root=$HOME/miniconda3 17 | miniconda_version="py39_4.12.0" 18 | export CFLAGS="-std=gnu99" 19 | 20 | # Install miniconda 21 | mc_install "$miniconda_version" "$miniconda_root" 22 | 23 | # Configure miniconda for user 24 | mc_initialize "$miniconda_root" 25 | 26 | 27 | # Fix recently introduced packaging bug 05/2022 28 | ac_releases_clone 29 | sed --follow-symlinks -i 's/hsluv.*/hsluv==5.0.3/' $ac_releases_path/de/f/*.yml 30 | 31 | # Install Data Analysis pipeline release 32 | ac_releases_install_data_analysis "f" 33 | 34 | # Return to root user 35 | sys_user_pop 36 | 37 | # Reset target user's home directory permissions 38 | sys_reset_home_ownership ec2-user 39 | -------------------------------------------------------------------------------- /tests/test_config.sh: -------------------------------------------------------------------------------- 1 | oneTimeSetUp() { 2 | source ec2pinit.inc.sh 3 | } 4 | 5 | test_config() { 6 | assertNotNull "$ec2pinit_root" 7 | assertNotNull "$ec2pinit_framework" 8 | assertNotNull "$ec2pinit_tempdir" 9 | assertNotNull "$ac_releases_repo" 10 | assertNotNull "$ac_releases_path" 11 | assertNotNull "$mc_url" 12 | assertNotNull "$mc_installer" 13 | } 14 | 15 | test_config_directories() { 16 | assertTrue "root directory not found" '[ -d $ec2pinit_root ]' 17 | assertTrue "framework directory not found" '[ -d $ec2pinit_framework ]' 18 | assertTrue "$ec2pinit_tempdir is missing" '[ -d $ec2pinit_tempdir ]' 19 | assertTrue "$ec2pinit_tempdir is not writable" '[ -w $ec2pinit_tempdir ]' 20 | assertTrue "$ac_releases_path should not exist yet" '[ ! -d $ac_releases_path ]' 21 | assertTrue "$mc_installer should not exist yet" '[ ! -d $ac_releases_path ]' 22 | } 23 | 24 | test_config_upstream_connectivity() { 25 | assertTrue "$ac_releases_repo is broken" 'curl -f -S -L $ac_releases_repo' 26 | assertTrue "$mc_url is broken" 'curl -f -S -L $mc_url' 27 | } 28 | 29 | . "$shunit_path"/shunit2 30 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | workflow_dispatch: 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-latest 14 | container: 15 | image: centos:7 16 | env: 17 | GITHUB_WORKSPACE: ${{ github.workspace }} 18 | volumes: 19 | - ${{ github.workspace }}:${{ github.workspace }} 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - name: Install deps 25 | run: | 26 | yum install -y git 27 | 28 | - name: Run tests 29 | run: | 30 | cd tests 31 | export ec2pinit_debug=2 32 | bash run_tests.sh 33 | 34 | docs: 35 | runs-on: ubuntu-latest 36 | container: 37 | image: centos:7 38 | env: 39 | GITHUB_WORKSPACE: ${{ github.workspace }} 40 | volumes: 41 | - ${{ github.workspace }}:${{ github.workspace }} 42 | 43 | steps: 44 | - uses: actions/checkout@v3 45 | 46 | - name: Install deps 47 | run: | 48 | yum install -y git make doxygen graphviz 49 | 50 | - name: Build HTML documentation 51 | run: | 52 | cd docs 53 | doxygen 54 | -------------------------------------------------------------------------------- /tests/test_framework_miniconda.sh: -------------------------------------------------------------------------------- 1 | oneTimeSetUp() { 2 | source ec2pinit.inc.sh 3 | mkdir -p "$ec2pinit_tempdir"/home 4 | export USER=$(id -n -u) 5 | export HOME_ORIG="$HOME" 6 | export HOME="$ec2pinit_tempdir"/home/tester 7 | cp -a /etc/skel $HOME || mkdir -p $HOME 8 | dest="$HOME"/miniconda3 9 | version="latest" 10 | } 11 | 12 | test_mc_get() { 13 | mc_get "$version" 1>/dev/null 14 | retval=$? 15 | installer="$ec2pinit_tempdir/$mc_installer" 16 | assertTrue "download failed" '[ $retval -eq 0 ]' 17 | assertTrue "$installer was not created" '[ -f "$installer" ]' 18 | } 19 | 20 | test_mc_install() { 21 | mc_install "$version" "$dest" 1>/dev/null 22 | retval=$? 23 | assertTrue "installation failed" '[ $retval -eq 0 ]' 24 | assertTrue "$dest was not created" '[ -d "$dest"/bin ]' 25 | } 26 | 27 | test_mc_initialize() { 28 | mc_initialize "$dest" 1>/dev/null 29 | assertTrue "unexpected path to conda: $CONDA_EXE" '[[ "$dest"/bin/conda == "$CONDA_EXE" ]]' 30 | } 31 | 32 | test_mc_configure_defaults() { 33 | mc_configure_defaults 1>/dev/null 34 | keys=( 35 | auto_update_conda 36 | always_yes 37 | report_errors 38 | ) 39 | for key in "${keys[@]}"; do 40 | assertNotNull 'conda configuration key not set: $key' '$(conda config --get $key)' 41 | done 42 | } 43 | 44 | test_mc_clean() { 45 | mc_clean 1>/dev/null 46 | retval=$? 47 | assertTrue "clean operation failed" '[ $retval -eq 0 ]' 48 | } 49 | 50 | . "$shunit_path"/shunit2 51 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | import os 17 | import shutil 18 | import subprocess 19 | 20 | print("current directory: {}".format(os.path.abspath(os.curdir))) 21 | if os.path.exists("html"): 22 | shutil.rmtree("html") 23 | 24 | # Update doxygen config 25 | subprocess.run("doxygen -u", shell=True) 26 | 27 | # Run doxygen 28 | subprocess.run("doxygen", shell=True) 29 | 30 | # -- Project information ----------------------------------------------------- 31 | 32 | project = 'ec2_post_init' 33 | copyright = '2022, Space Telescope Science Institute' 34 | author = 'Joseph Hunkeler' 35 | 36 | html_extra_path = ["html"] 37 | 38 | # -- General configuration --------------------------------------------------- 39 | 40 | # List of patterns, relative to source directory, that match files and 41 | # directories to ignore when looking for source files. 42 | # This pattern also affects html_static_path and html_extra_path. 43 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 44 | -------------------------------------------------------------------------------- /framework/io.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief Input output functions 3 | 4 | (( $EC2PINIT_IO_INCLUDED )) && return 5 | EC2PINIT_IO_INCLUDED=1 6 | source ec2pinit.inc.sh 7 | 8 | ## Date format for IO functions 9 | io_datefmt="%Y-%m-%d %H:%M:%S" 10 | export io_datefmt 11 | 12 | ## @fn io_timestamp() 13 | ## @brief Return current date and time 14 | ## @retval date as string 15 | io_timestamp() { 16 | date +"$io_datefmt" 17 | } 18 | 19 | ## @fn io_info() 20 | ## @brief Print a message 21 | ## @param ... message arguments 22 | ## 23 | ## @code{.sh} 24 | ## var=hello 25 | ## ec2pinit_debug=$DEBUG_INFO 26 | ## io_info "$var" 27 | ## # 2022-06-22 18:46:57 - INFO: hello 28 | ## @endcode 29 | io_info() { 30 | (( ec2pinit_debug & DEBUG_INFO )) || return 0 31 | printf "$(io_timestamp) - INFO: %s\n" "$@" >&2 32 | } 33 | 34 | ## @fn io_warn() 35 | ## @brief Print a warning message 36 | ## @param ... message arguments 37 | ## 38 | ## @code{.sh} 39 | ## var=hello 40 | ## ec2pinit_debug=$DEBUG_WARN 41 | ## io_warn "uh oh... $var" 42 | ## # 2022-06-22 18:46:57 - WARN: uh oh... hello 43 | ## @endcode 44 | io_warn() { 45 | (( ec2pinit_debug & DEBUG_WARN )) || return 0 46 | printf "$(io_timestamp) - WARN: %s\n" "$@" >&2 47 | } 48 | 49 | ## @fn io_error() 50 | ## @brief Print an error message 51 | ## @param ... message arguments 52 | ## 53 | ## @code{.sh} 54 | ## var=hello 55 | ## ec2pinit_debug=$DEBUG_ERROR 56 | ## io_error "oh no... $var" 57 | ## # 2022-06-22 18:46:57 - ERROR: oh no... hello 58 | ## @endcode 59 | io_error() { 60 | (( ec2pinit_debug & DEBUG_ERROR )) || return 0 61 | printf "$(io_timestamp) - ERROR: %s\n" "$@" >&2 62 | } 63 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, Space Telescope Science Institute, AURA 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | -------------------------------------------------------------------------------- /tests/test_framework_docker.sh: -------------------------------------------------------------------------------- 1 | if (( $EUID > 0 )) || ! (( $DANGEROUS_TESTS )); then 2 | exit 0 3 | fi 4 | 5 | # I'm marking these as dangerous because they modify the host system in 6 | # generally undesirable ways: 7 | # - Adding a profile script 8 | # - Adding users to groups 9 | # - Removing groups from users 10 | # - Exposing the docker API port to the system 11 | # 12 | # I recommend using vagrant or some one-off virtual machine for this suite 13 | 14 | oneTimeSetUp() { 15 | source ec2pinit.inc.sh 16 | mkdir -p "$ec2pinit_tempdir"/home/tester 17 | export HOME_ORIG="$HOME" 18 | export HOME="$ec2pinit_tempdir"/home/tester 19 | export USER="root" 20 | } 21 | 22 | test_docker_setup_account_only() { 23 | docker_setup "$USER" 24 | pid=$(pgrep docker) 25 | assertTrue "Docker is not running" '[[ -n $pid ]] && [[ $pid =~ [0-9]+ ]]' 26 | docker ps &>/dev/null 27 | retval=$? 28 | assertTrue "$USER cannot use docker" '[ $retval -eq 0 ]' 29 | } 30 | 31 | test_docker_setup_bind_port() { 32 | if ! groups "$USER" | grep docker; then 33 | usermod -G "$(groups ${USER} | awk -F':' '{ print $2 }' | sed 's/^ //;s/docker//')" "$USER" 34 | fi 35 | docker_setup "" 2376 36 | pid=$(pgrep docker) 37 | assertTrue "Docker is not running" '[[ -n $pid ]] && [[ $pid =~ [0-9]+ ]]' 38 | docker ps &>/dev/null 39 | retval=$? 40 | assertTrue "$USER cannot use docker" '[ $retval -eq 0 ]' 41 | } 42 | 43 | test_docker_user_add() { 44 | docker_user_add "$USER" 1>/dev/null 45 | assertTrue "$USER was not added to docker group" 'groups $USER | grep docker' 46 | } 47 | 48 | test_docker_pull_many() { 49 | images=(centos:7 centos:8) 50 | docker_pull_many "${images[@]}" 51 | retval=$? 52 | assertTrue "Failed to pull images" '[ $retval -eq 0 ]' 53 | } 54 | 55 | 56 | . "$shunit_path"/shunit2 57 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PROG=ec2_post_init 2 | DESTDIR= 3 | PREFIX=/usr/local 4 | BINDIR=$(PREFIX)/bin 5 | DATADIR=$(PREFIX)/share 6 | DOCDIR=$(DATADIR)/doc 7 | 8 | F_OBJ=$(wildcard framework/*.inc.sh) 9 | B_OBJ=$(wildcard bin/*.inc.sh) 10 | HTML_OBJ=docs/html/index.html 11 | PDF_OBJ=docs/latex/refman.pdf 12 | 13 | .PHONY: clean 14 | 15 | all: 16 | @echo "targets:" 17 | @echo 18 | @echo "docs => build $(PROG) documentation" 19 | @echo "html => build html documentation" 20 | @echo "pdf => build pdf documentation" 21 | @echo "install => install $(PROG)" 22 | @echo "install-doc => install documentation" 23 | @echo "install-doc-html => install HTML documentation" 24 | @echo "install-doc-pdf => install PDF documentation" 25 | @echo 26 | @echo To install $(PROG) in a different location use PREFIX= 27 | @echo make install PREFIX=/abc/123 28 | @echo 29 | 30 | docs/html/index.html: 31 | @echo generate html 32 | (cd docs && doxygen 1>/dev/null) 33 | 34 | docs/latex/refman.pdf: 35 | @echo generate pdf 36 | (cd docs && make -C latex 1>/dev/null) 37 | 38 | html: $(HTML_OBJ) 39 | 40 | pdf: $(PDF_OBJ) 41 | 42 | docs: html pdf 43 | 44 | install: install-bin install-framework 45 | 46 | install-doc: install-doc-html install-doc-pdf 47 | 48 | install-bin: $(B_OBJ) 49 | mkdir -p $(DESTDIR)$(BINDIR) 50 | for x in $(B_OBJ); do \ 51 | echo installing $$x; \ 52 | install -m644 $$x $(DESTDIR)$(BINDIR); \ 53 | done 54 | 55 | install-framework: $(F_OBJ) 56 | mkdir -p $(DESTDIR)$(DATADIR)/$(PROG)/framework 57 | for x in $(F_OBJ); do \ 58 | echo installing $$x; \ 59 | install -m644 $$x $(DESTDIR)$(DATADIR)/$(PROG)/framework; \ 60 | done 61 | install -m644 LICENSE.txt $(DESTDIR)$(DATADIR)/$(PROG) 62 | cp -a examples $(DESTDIR)$(DATADIR)/$(PROG) 63 | 64 | install-doc-html: html 65 | mkdir -p $(DESTDIR)$(DOCDIR)/$(PROG) 66 | @echo installing html docs 67 | cp -a docs/html $(DESTDIR)$(DOCDIR)/$(PROG) 68 | 69 | install-doc-pdf: pdf 70 | mkdir -p $(DESTDIR)$(DOCDIR)/$(PROG) 71 | @echo installing pdf docs 72 | cp -a docs/latex/refman.pdf $(DESTDIR)$(DOCDIR)/$(PROG)/$(PROG).pdf 73 | 74 | clean: 75 | rm -rf docs/html docs/latex 76 | -------------------------------------------------------------------------------- /examples/pipeline/cumulative.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | source ec2pinit.inc.sh 3 | 4 | # Update system packages 5 | sys_pkg_update_all 6 | 7 | # Install additional packages 8 | if (( $HAVE_DNF )) || (( $HAVE_YUM )); then 9 | sys_pkg_install \ 10 | gcc \ 11 | bzip2-devel \ 12 | curl \ 13 | gcc \ 14 | gcc-c++ \ 15 | gcc-gfortran \ 16 | git \ 17 | glibc-devel \ 18 | kernel-devel \ 19 | libX11-devel \ 20 | mesa-libGL \ 21 | mesa-libGLU \ 22 | ncurses-devel \ 23 | openssh-server \ 24 | subversion \ 25 | sudo \ 26 | wget \ 27 | zlib-devel \ 28 | xauth \ 29 | xterm 30 | elif (( $HAVE_APT )); then 31 | sys_pkg_install \ 32 | build-essential \ 33 | gcc \ 34 | bzip2 \ 35 | curl \ 36 | gcc \ 37 | g++ \ 38 | gfortran \ 39 | git \ 40 | libc6-dev \ 41 | libx11-dev \ 42 | libgl1-mesa-dri \ 43 | libgl1-mesa-glx \ 44 | libglu1-mesa \ 45 | libncurses5-dev \ 46 | openssh-server \ 47 | subversion \ 48 | sudo \ 49 | wget \ 50 | zlib1g-dev \ 51 | xauth \ 52 | xterm 53 | fi 54 | 55 | # "become" the target user 56 | sys_user_push ec2-user 57 | 58 | miniconda_root=$HOME/miniconda3 59 | miniconda_version="py39_4.11.0" 60 | export CFLAGS="-std=gnu99" 61 | 62 | # Install miniconda 63 | mc_install "$miniconda_version" "$miniconda_root" || true 64 | mc_initialize "$miniconda_root" 65 | 66 | # Install HST pipeline 67 | ac_releases_install_hst "stable" 68 | 69 | # Install JWST pipeline 70 | ac_releases_install_jwst "1.5.2" 71 | 72 | # Handle recently introduced packaging bug 05/2022 (old upstream tag deleted) 73 | sed --follow-symlinks -i 's/hsluv.*/hsluv==5.0.3/' $ac_releases_path/de/f/*.yml 74 | 75 | # Install 76 | ac_releases_install_data_analysis "f" 77 | 78 | # Clean up conda packages and caches 79 | mc_clean 80 | 81 | # return to root user 82 | sys_user_pop 83 | 84 | # Reset target user's home directory permissions 85 | sys_reset_home_ownership ec2-user 86 | 87 | # Clean up package manager 88 | sys_pkg_clean 89 | -------------------------------------------------------------------------------- /tests/test_framework_astroconda.sh: -------------------------------------------------------------------------------- 1 | oneTimeSetUp() { 2 | source ec2pinit.inc.sh 3 | mkdir -p "$ec2pinit_tempdir"/home 4 | export HOME_ORIG="$HOME" 5 | export HOME="$ec2pinit_tempdir"/home/tester 6 | cp -a /etc/skel $HOME || mkdir -p $HOME 7 | dest="$HOME"/miniconda3 8 | version="latest" 9 | } 10 | 11 | test_ac_platform() { 12 | assertTrue '[ -n $(ac_platform) ]' 13 | } 14 | 15 | test_ac_releases_clone() { 16 | ac_releases_clone 1>/dev/null 17 | assertTrue '[ -d $ac_releases_path ]' 18 | } 19 | 20 | test_ac_releases_pipeline_exists() { 21 | path=$(ac_releases_pipeline_exists caldp) 22 | assertTrue '[ -d $path ]' 23 | } 24 | 25 | test_ac_releases_pipeline_release_exists() { 26 | path=$(ac_releases_pipeline_release_exists caldp stable) 27 | assertTrue '[ -d $path ]' 28 | } 29 | 30 | test_ac_releases_data_analysis() { 31 | path=$(ac_releases_data_analysis f) 32 | assertTrue '[ -f $path ]' 33 | 34 | path=$(ac_releases_data_analysis) 35 | assertTrue '[ -f $path ]' 36 | } 37 | 38 | test_ac_releases_data_analysis_environ() { 39 | path=$(ac_releases_data_analysis f) 40 | assertTrue '[ -n $(ac_releases_data_analysis_environ) ]' 41 | } 42 | 43 | test_ac_releases_jwst() { 44 | path=($(ac_releases_jwst 1.5.2)) 45 | assertTrue '[ -f "${path[0]}" ]' 46 | assertTrue '[ -f "${path[1]}" ]' 47 | 48 | path=($(ac_releases_jwst)) 49 | assertTrue '[ -f "${path[0]}" ]' 50 | assertTrue '[ -f "${path[1]}" ]' 51 | } 52 | 53 | test_ac_releases_jwst_environ() { 54 | path=$(ac_releases_jwst 1.5.2) 55 | assertTrue '[ -n $(ac_releases_jwst_environ $path) ]' 56 | } 57 | 58 | test_ac_releases_hst() { 59 | path=$(ac_releases_hst stable) 60 | assertTrue '[ -f $path ]' 61 | 62 | path=$(ac_releases_hst) 63 | assertTrue '[ -f $path ]' 64 | } 65 | 66 | test_ac_releases_hst_environ() { 67 | path=$(ac_releases_hst stable) 68 | assertTrue '[ -n $(ac_releases_hst_environ $path) ]' 69 | } 70 | 71 | # Full pipeline installations might fail due to external factors 72 | # I need to create some mock configurations before I can continue writing these 73 | test_ac_releases_install_hst() { 74 | true 75 | } 76 | 77 | test_ac_releases_install_jwst() { 78 | true 79 | } 80 | 81 | test_ac_releases_install_data_analysis() { 82 | true 83 | } 84 | 85 | . "$shunit_path"/shunit2 86 | -------------------------------------------------------------------------------- /tests/test_framework_system.sh: -------------------------------------------------------------------------------- 1 | if (( $EUID > 0 )); then 2 | not_root=1 3 | fi 4 | 5 | oneTimeSetUp() { 6 | source ec2pinit.inc.sh 7 | mkdir -p "$ec2pinit_tempdir"/home 8 | export HOME_ORIG="$HOME" 9 | export HOME="$ec2pinit_tempdir"/home/tester 10 | cp -a /etc/skel $HOME || mkdir -p $HOME 11 | export USER="root" 12 | } 13 | 14 | # this test is a no-op 15 | test_sys_user_push() { 16 | (( $not_root )) && return 17 | sys_user_push "$USER" 1>/dev/null 18 | assertTrue "$HOME != $HOME_ORIG" '[[ $HOME != "$HOME_ORIG" ]]' 19 | } 20 | 21 | # this test is a no-op 22 | test_sys_user_pop() { 23 | (( $not_root )) && return 24 | sys_user_pop 1>/dev/null 25 | assertTrue "$HOME != $HOME_ORIG" '[[ $HOME != "$HOME_ORIG" ]]' 26 | } 27 | 28 | test_sys_reset_home_ownership() { 29 | (( $not_root )) && return 30 | sys_reset_home_ownership $USER 1>/dev/null 31 | retval=$? 32 | assertTrue "Failed to reset ownership" '[ $retval -eq 0 ]' 33 | } 34 | 35 | test_sys_pkg_install() { 36 | (( $not_root )) && return 37 | pkg=nano 38 | sys_pkg_install $pkg 1>/dev/null 39 | retval=$? 40 | assertTrue "'$pkg' could not be installed" '[ $retval -eq 0 ]' 41 | 42 | sys_pkg_installed $pkg 43 | retval=$? 44 | assertTrue "'$pkg' not installed" '[ $retval -eq 0 ]' 45 | } 46 | 47 | test_sys_pkg_clean() { 48 | (( $not_root )) && return 49 | sys_pkg_clean 1>/dev/null 50 | retval=$? 51 | assertTrue "Failed to clean up system packages" '[ $retval -eq 0 ]' 52 | } 53 | 54 | test_sys_pkg_update_all() { 55 | (( $not_root )) && return 56 | sys_pkg_update_all 1>/dev/null 57 | retval=$? 58 | assertTrue "failed to update system packages" '[ $retval -eq 0 ]' 59 | } 60 | 61 | test_sys_pkg_get_manager() { 62 | assertTrue "" '[[ -n $(sys_pkg_get_manager) ]]' 63 | } 64 | 65 | test_sys_user_home() { 66 | assertTrue "" '[[ -d $(sys_user_home root) ]]' 67 | } 68 | 69 | test_sys_arch() { 70 | assertTrue "" '[[ -n $(sys_arch) ]]' 71 | } 72 | 73 | test_sys_platform() { 74 | assertTrue "" '[[ -n $(sys_platform) ]]' 75 | } 76 | 77 | test_sys_pkg_installed() { 78 | sys_pkg_installed coreutils 79 | retval=$? 80 | assertTrue "coreutils not installed (unlikely!)" '[ $retval -eq 0 ]' 81 | 82 | sys_pkg_installed samba 83 | retval=$? 84 | assertTrue "samba installed when it shouldn't be" '[ $retval -ne 0 ]' 85 | 86 | # No arguments 87 | assertFalse sys_pkg_installed 88 | } 89 | 90 | . "$shunit_path"/shunit2 91 | -------------------------------------------------------------------------------- /docs/doxygen-bash.sed: -------------------------------------------------------------------------------- 1 | #!/bin/sed -nf 2 | ## 3 | ## DO WHAT THE **** YOU WANT TO PUBLIC LICENSE 4 | ## Version 2, December 2004 5 | ## 6 | ## Copyright (C) 2004 Sam Hocevar 7 | ## 8 | ## Everyone is permitted to copy and distribute verbatim or modified 9 | ## copies of this license document, and changing it is allowed as long 10 | ## as the name is changed. 11 | ## 12 | ## DO WHAT THE *** YOU WANT TO PUBLIC LICENSE 13 | ## TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 14 | ## 15 | ## 0. You just DO WHAT THE *** YOU WANT TO. 16 | ## 17 | ## 18 | ## Project Home Page: http://github.com/Anvil/bash-doxygen/ 19 | ## Project Author: Damien Nadé 20 | ## 21 | 22 | /^## \+@fn/{ 23 | :step 24 | /@param [^ ]\+ .*$/{ 25 | # Groups are 26 | # \1: @fn 27 | # \2: already identified params 28 | # \3: previous doc string 29 | # \4: @param 30 | # \5: newly identified param name plus optional dot-dot-dot string 31 | # \6: optional dot-dot-dot string 32 | # \7: everything after \5 to end of line 33 | # Here, we-reinsert param names into the () 34 | s/\(@fn [^(\n]\+\)(\([^(]*\))\(.*\)\(@param \)\([^ \n]\+\(\.\.\.\)\?\)\([^\n]*\)$/\1(\2, \5)\3\4\5\7/ 35 | } 36 | / *\(function \+\)\?[a-z:.A-Z0-9_]\+ *() *{ *$/!{ 37 | N 38 | b step 39 | } 40 | # Remove optional 'function' keyword (and some extra spaces). 41 | s/ *\(function \+\)\?\([a-z:.A-Z0-9_]\+ *() *{\) *$/\2/ 42 | # Here, we should have @fn (, param1, param2, param3), we remove 43 | # the first extra ", ". 44 | s/\(@fn[^(]\+\)(, /\1(/ 45 | # Remove the function body to avoid interference, and re-introduce 46 | # list of parameters in the funcname(). 47 | s/\(@fn \([^(]\+\)(\)\([^)]*\)\().*\)\n\2() *{/\1\3\4\n\2(\3) { }/ 48 | # Replace all '## ' by '//! ' at beginning-of-line. 49 | s/\(^\|\n\)##\n/\1\/\/!\n/g 50 | s/\(^\|\n\)## /\1\/\/! /g 51 | p 52 | b end 53 | } 54 | 55 | /^declare /{ 56 | # The principle is quite easy. For every declare option, we add a 57 | # keyword into the sed exchange buffer. Once everything is parsed, 58 | # we add the variable identifier and maybe the variable default 59 | # value, add that to the exchange buffer and print the result. 60 | 61 | # Reset exchange buffer 62 | x 63 | s/.*// 64 | x 65 | # Remove declare keyword, we wont need it anymore 66 | s/^declare \+// 67 | # Simple declaration case. 68 | /^[^-]/{ 69 | x 70 | s/.*/&String / 71 | x 72 | b declareprint 73 | } 74 | # Concat options. Some of them are ignored, such as -f. 75 | :declare 76 | s/^-\([aAilrtux]\+\) \+-\([aAilrtux]\+\) \+/-\1\2 / 77 | t declare 78 | 79 | # Prepend Exported and ReadOnly attributes 80 | /^-[aAiltur]*x/{ 81 | x 82 | s/.*/&Exported / 83 | x 84 | } 85 | /^-[aAiltux]*r/{ 86 | x 87 | s/.*/&ReadOnly / 88 | x 89 | } 90 | 91 | # Integer type, exclusive with default 'String' type. 92 | /^-[aAlturx]*i/{ 93 | x 94 | s/.*/&Integer / 95 | x 96 | b array 97 | } 98 | 99 | # String type. handling. 100 | /^-[aAtrx]*l/{ 101 | x 102 | s/.*/&LowerCase / 103 | x 104 | } 105 | /^-[aAtrx]*u/{ 106 | x 107 | s/.*/&UpperCase / 108 | x 109 | } 110 | x 111 | s/.*/&String / 112 | x 113 | 114 | : array 115 | # For arrays, we remove the initialisation since I dont know yet 116 | # how to print it for doxygen to understand. 117 | /^-[Ailturx]*a/{ 118 | x 119 | s/.*/&Array / 120 | x 121 | b deletevalue 122 | } 123 | /^-[ailturx]*A/{ 124 | x 125 | s/.*/&AssociativeArray / 126 | x 127 | b deletevalue 128 | } 129 | 130 | :declareprint 131 | # Remove the declare option, x, then G will concat the exchange 132 | # buffer (the 'type' string) and the regular buffer (the var 133 | # possibly followed by an init value). The rest is quite easy to 134 | # understand. 135 | s/-[^ ]\+ \+// 136 | x 137 | G 138 | s/\n// 139 | s/=/ = / 140 | s/$/;/ 141 | p 142 | x 143 | b end 144 | } 145 | 146 | /^ *export \+[_a-zA-Z]/{ 147 | s/=/ = / 148 | s/\([^;]\) *$/\1;/ 149 | s/^ *export \+/Exported String / 150 | p 151 | b end 152 | } 153 | 154 | 155 | # Delete non doxygen-related lines content, but not the line 156 | # themselves. 157 | /^\(\s*\)##\( \|$\)/!{ 158 | s/^.*$//p 159 | } 160 | b end 161 | 162 | # For arrays, to avoid duplication. 163 | : deletevalue 164 | s/\(-[^ ]\+ \+[^=]\+\)=.*/\1/ 165 | b declareprint 166 | 167 | :end 168 | # Make all ## lines doxygen-able. 169 | s/^\s*##\( \|$\)/\/\/!\1/p 170 | -------------------------------------------------------------------------------- /framework/docker.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief Docker functions 3 | ## @details 4 | ## @section docker_example Example 5 | ## @include docker_setup.sh 6 | 7 | (( $EC2PINIT_DOCKER_INCLUDED )) && return 8 | EC2PINIT_DOCKER_INCLUDED=1 9 | source ec2pinit.inc.sh 10 | 11 | ## @fn docker_setup() 12 | ## @brief Install docker on the server 13 | ## @param user account to add to docker group 14 | ## @param bind_port binds the docker daemon to a TCP port. When this option is 15 | ## enabled the ``user`` account argument is ignored in favor of setting 16 | ## ``DOCKER_HOST=tcp://127.0.0.1:${bind_port}`` at login 17 | docker_setup() { 18 | local user="${1:-$USER}" 19 | local bind_port= 20 | if (( ! HAVE_SUPPORT )); then 21 | io_error "docker_setup: unsupported operating system" 22 | fi 23 | 24 | # TODO: Generalize the following init blocks 25 | io_info "docker_setup: Installing docker" 26 | if (( HAVE_DEBIAN )); then 27 | # see: https://docs.docker.com/engine/install/debian/ 28 | sys_pkg_install apt-transport-https ca-certificates curl gnupg lsb-release sudo 29 | if [ ! -f "/etc/apt/keyrings/docker.gpg" ]; then 30 | sudo mkdir -p /etc/apt/keyrings 31 | curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg 32 | fi 33 | if [ ! -f "/etc/apt/sources.list.d/docker.list" ]; then 34 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" \ 35 | | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null 36 | fi 37 | sys_pkg_install docker-ce docker-ce-cli containerd.io docker-compose 38 | elif (( HAVE_UBUNTU )); then 39 | # see: https://docs.docker.com/engine/install/ubuntu/ 40 | sys_pkg_install apt-transport-https ca-certificates curl gnupg lsb-release sudo 41 | if [ ! -f "/etc/apt/keyrings/docker.gpg" ]; then 42 | sudo mkdir -p /etc/apt/keyrings 43 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg 44 | fi 45 | if [ ! -f "/etc/apt/sources.list.d/docker.list" ]; then 46 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \ 47 | | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null 48 | fi 49 | sys_pkg_install docker-ce docker-ce-cli containerd.io docker-compose 50 | elif (( HAVE_REDHAT )); then 51 | # see: https://docs.docker.com/engine/install/centos/ 52 | if [ ! -f /etc/yum.repos.d/docker-ce.repo ]; then 53 | yum-config-manager --add-repo \ 54 | https://download.docker.com/linux/centos/docker-ce.repo 55 | fi 56 | sys_pkg_install docker-ce docker-ce-cli containerd.io docker-compose-plugin 57 | else 58 | io_warn "docker_setup: Operating system was not recognized. Blindly attempting to install docker." >&2 59 | sys_pkg_install docker docker-compose 60 | fi 61 | 62 | # Enable the system service 63 | io_info "docker_setup: Enabling docker" 64 | systemctl enable docker 65 | 66 | if [ -n "$bind_port" ] && [[ $bind_port =~ [0-9]+ ]]; then 67 | # Allow any local account to use the docker API port 68 | io_info "docker_setup: Binding docker to port 127.0.0.1:${bind_port}" 69 | mkdir -p /etc/systemd/system/docker.service.d 70 | cat << CONFIG > /etc/systemd/system/docker.service.d/override.conf 71 | [Service] 72 | ExecStart= 73 | ExecStart=/usr/bin/dockerd -H 127.0.0.1:${bind_port} --containerd=/run/containerd/containerd.sock 74 | CONFIG 75 | echo "DOCKER_HOST=tcp://127.0.0.1:${bind_port}" > /etc/profile.d/docker_host.sh 76 | source /etc/profile.d/docker_host.sh 77 | else 78 | # Only the named can use docker 79 | io_info "docker_setup: adding $user to docker group" 80 | docker_user_add "$user" 81 | fi 82 | 83 | io_info "docker_setup: Reloading systemd" 84 | systemctl daemon-reload 85 | io_info "docker_setup: Starting docker" 86 | systemctl start docker 87 | } 88 | 89 | ## @fn docker_user_add() 90 | ## @brief Add a user account to the ``docker`` group 91 | ## @param user an account to modify (must exist) 92 | docker_user_add() { 93 | local user="${1:-$USER}" 94 | if groups "$user" | grep docker; then 95 | usermod -a -G docker "$user" 96 | fi 97 | } 98 | 99 | ## @fn docker_pull_many() 100 | ## @brief Wrapper for ``docker pull`` 101 | ## @details Pull multiple docker images with a single command 102 | ## @param image... image to pull 103 | ## 104 | ## ~~~{.sh} 105 | ## images=(centos:7 centos:8) 106 | ## docker_pull_many "${images[@]}" 107 | ## # or 108 | ## docker_pull_many "centos:7" "centos:8" 109 | ## ~~~ 110 | docker_pull_many() { 111 | local image=($@) 112 | local image_count="${#image[@]}" 113 | local error_count=0 114 | 115 | if [ -z "$image_count" ]; then 116 | return 1 117 | fi 118 | 119 | io_info "Pulling $image_count image(s)..." 120 | for ((i = 0; i < image_count; i++)); do 121 | io_info "Image #$((i+1)): ${image[i]}" 122 | if ! docker pull "${image[$i]}"; then 123 | (( error_count++ )) 124 | fi 125 | done 126 | (( error_count )) && return 1 127 | } 128 | -------------------------------------------------------------------------------- /framework/miniconda.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief Miniconda control functions 3 | ## @details 4 | ## @section miniconda_example Example 5 | ## @include miniconda_setup.sh 6 | 7 | (( $EC2PINIT_MINICONDA_INCLUDED )) && return 8 | EC2PINIT_MINICONDA_INCLUDED=1 9 | source ec2pinit.inc.sh 10 | 11 | # URL to a site providing miniconda installers 12 | mc_url="https://repo.anaconda.com/miniconda" 13 | 14 | # Name of miniconda installation script 15 | mc_installer="miniconda3_install.sh" 16 | 17 | 18 | ## @fn _get_rc() 19 | ## @private 20 | ## @brief Get the default bash rc script for the user account 21 | ## @code{.sh} 22 | ## # Red Hat... 23 | ## rc=$(_get_rc) 24 | ## # rc=/home/example/.bash_profile 25 | ## 26 | ## Debian... 27 | ## rc=$(_get_rc) 28 | ## # rc=/home/example/.bashrc 29 | ## @endcode 30 | ## @retval 1 if ``home`` does not exist 31 | _get_rc() { 32 | local scripts=(.bashrc .bashrc_profile .profile) 33 | local home="$(sys_user_home ${1:-$USER})" 34 | if [ -z "$home" ] || [ ! -d "$home" ]; then 35 | return 1 36 | fi 37 | 38 | for x in "${scripts[@]}"; do 39 | local filename="$home/$x" 40 | [ ! -f "$filename" ] && continue 41 | echo $filename 42 | break 43 | done 44 | } 45 | 46 | 47 | ## @fn mc_get() 48 | ## @brief Download Miniconda3 49 | ## @details Installation script destination is set by global $mc_installer 50 | ## @param version Miniconda3 release version... 51 | ## (i.e., py39_4.11.0) 52 | ## @param version "latest" if empty 53 | ## @see config.sh 54 | mc_get() { 55 | local version="${1:-latest}" 56 | local dest="$ec2pinit_tempdir" 57 | local platform="$(sys_platform)" 58 | local arch="$(sys_arch)" 59 | local name="Miniconda3-$version-$platform-$arch.sh" 60 | 61 | if [ -f "$dest/$mc_installer" ]; then 62 | io_warn "mc_get: $dest/$mc_installer exists" 63 | return 1 64 | fi 65 | io_info "mc_get: Downloading $mc_url/$name" 66 | io_info "mc_get: Destination: $dest/$mc_installer" 67 | 68 | curl -L -o "$dest/$mc_installer" "$mc_url/$name" 69 | # Is this a bug or what? 70 | # curl exits zero on success but causes "if mc_get" to fail 71 | # as if it exited non-zero. I'm forcing the conditions I need 72 | # below. 73 | (( $? != 0 )) && return 1 74 | return 0 75 | } 76 | 77 | 78 | ## @fn mc_configure_defaults() 79 | ## @brief Sets global defaults for conda and pip 80 | mc_configure_defaults() { 81 | if [ -z "$CONDA_EXE" ]; then 82 | # Not initialized correctly 83 | io_error "mc_configure_defaults: conda is not initialized" 84 | return 1 85 | fi 86 | io_info "mc_configure_defaults: Configuring conda options" 87 | conda config --system --set auto_update_conda false 88 | conda config --system --set always_yes true 89 | conda config --system --set report_errors false 90 | 91 | # Some skeletons default to .bashrc instead of .bash_profile. 92 | local rc="$(_get_rc)" 93 | io_info "mc_configure_defaults: Enabling verbose output from pip" 94 | if ! grep -E '[^#](export)?[\t\ ]+PIP_VERBOSE=' "$rc" &>/dev/null; then 95 | echo export PIP_VERBOSE=1 >> "$rc" 96 | io_info "mc_configure_defaults: $rc modified" 97 | else 98 | io_info "mc_configure_defaults: $rc not modified" 99 | fi 100 | } 101 | 102 | 103 | ## @fn mc_initialize() 104 | ## @brief Configures user account to load conda at login 105 | ## @param dest path to miniconda installation root 106 | mc_initialize() { 107 | local dest="$1" 108 | if [[ $- =~ v ]]; then 109 | set +v 110 | trap 'set -v' RETURN 111 | fi 112 | if [[ $- =~ x ]]; then 113 | set +v 114 | trap 'set -x' RETURN 115 | fi 116 | 117 | io_info "mc_initialize: Using conda: $dest" 118 | 119 | # Configure the user's shell 120 | if (( ! $EUID )); then 121 | if ! grep -E '#.*>>>.*conda initialize.*>>>$' $(_get_rc) &>/dev/null; then 122 | sudo -u $USER dest=$dest -i bash -c 'source "$dest"/etc/profile.d/conda.sh ; conda init' 123 | fi 124 | fi 125 | 126 | # Give current context access to the miniconda installation 127 | source "$dest"/etc/profile.d/conda.sh 128 | mc_configure_defaults 129 | } 130 | 131 | 132 | ## @fn mc_install() 133 | ## @brief Installs Miniconda3 134 | ## @param version of the Miniconda3 installer (i.e., py39_4.11.0) 135 | ## @param dest path to install Miniconda3 (~/miniconda3) 136 | ## @retval 1 if any argument is invalid 137 | ## @retval 1 if destination exists 138 | ## @retval 1 if download fails 139 | ## @retval 1 if installation fails (implicit) 140 | mc_install() { 141 | local version="$1" 142 | local dest="$2" 143 | local cmd="bash "$ec2pinit_tempdir/$mc_installer" -b -p $dest" 144 | 145 | if [ -z "$version" ]; then 146 | io_error "mc_install: miniconda version required" >&2 147 | return 1 148 | fi 149 | 150 | if [ -z "$dest" ]; then 151 | io_error "mc_install: miniconda destination directory required" >&2 152 | return 1 153 | elif [ -d "$dest" ]; then 154 | io_error "mc_install: miniconda destination directory exists" >&2 155 | return 1 156 | fi 157 | 158 | if mc_get "$version"; then 159 | io_error "mc_install: unable to obtain miniconda from server" >&2 160 | return 1 161 | fi 162 | 163 | io_info "mc_install: Installing conda: $dest" 164 | $cmd 165 | } 166 | 167 | 168 | ## @fn mc_clean() 169 | ## @brief Remove unused tarballs, caches, indexes, etc 170 | ## @retval 1 if miniconda is not initialized 171 | mc_clean() { 172 | if [ -z "$CONDA_EXE" ]; then 173 | # Not initialized correctly 174 | io_error "mc_clean: conda is not initialized" 175 | return 1 176 | fi 177 | 178 | conda clean --all 179 | } 180 | 181 | -------------------------------------------------------------------------------- /bin/ec2pinit.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief Framework entrypoint 3 | ## 4 | ## Include this file in your script to use ec2pinit's functions 5 | ## 6 | ## @mainpage 7 | ## @section intro_sec Introduction 8 | ## 9 | ## This shell library is useful if you are not a systems administrator but want to spin up an EC2 image to do data analysis or research. ec2_post_init provides a simple easy to use API that can: 10 | ## 11 | ## - Install system software (see: ``system.inc.sh :: sys_pkg_install()``) 12 | ## - Install Docker (see: ``docker.inc.sh :: docker_setup()``) 13 | ## - Install Miniconda3 (see: ``miniconda.inc.sh :: mc_install()``) 14 | ## - Install STScI pipelines (see: ``astroconda.inc.sh``) 15 | ## - ``ac_releases_install_hst()`` 16 | ## - ``ac_releases_install_jwst()`` 17 | ## - ``ac_releases_install_data_analysis()`` 18 | ## 19 | ## @section require_sec Supported Operating Systems 20 | ## 21 | ## - Red Hat 22 | ## - CentOS 7+ 23 | ## - Fedora 19+ 24 | ## - Debian 25 | ## - Stretch+ 26 | ## - Ubuntu 27 | ## - Bionic+ 28 | ## 29 | ## @section install_sec Installing 30 | ## 31 | ## @subsection install_system_subsec System installation 32 | ## 33 | ## @code{.sh} 34 | ## git clone https://github.com/spacetelescope/ec2_post_init 35 | ## cd ec2_post_init 36 | ## sudo make install PREFIX=/usr/local 37 | ## @endcode 38 | ## 39 | ## @subsection install_portable_subsec Portable installation 40 | ## 41 | ## If you don't want to install ec2_post_init permanently, you don't have to. This is especially useful for systems that provide ``curl`` and ``tar`` by default but lack ``git`` and ``make``. Here is how to use ec2_post_init from its source directory: 42 | ## 43 | ## @code{.sh} 44 | ## curl https://github.com/spacetelescope/ec2_post_init/archive/refs/heads/main.tar.gz | tar -x 45 | ## cd ec2_post_init 46 | ## export PATH=$(pwd)/bin:$PATH 47 | ## @endcode 48 | ## 49 | ## @section usage_sec Using ec2_post_init 50 | ## 51 | ## Now you can include the library in your own script by sourcing ``ec2pinit.inc.sh``... 52 | ## 53 | ## @code{.sh} 54 | ## #!/usr/bin/env bash 55 | ## 56 | ## # Load ec2_post_init 57 | ## source ec2pinit.inc.sh 58 | ## 59 | ## # ... 60 | ## @endcode 61 | ## 62 | ## To see how one can use ec2_post_init to populate a system with Miniconda3 and the three major STScI pipeline releases, please refer to the @ref full_example_page page. The API reference for each library module can be found here. 63 | ## 64 | ## @section install_develop_sec Developing 65 | ## 66 | ## To write code for ec2_post_init you should have access to an EC2 instance, or a host with ``docker`` or ``vagrant`` installed. 67 | ## 68 | ## @code{.sh} 69 | ## git clone https://github.com/spacetelescope/ec2_post_init 70 | ## cd ec2_post_init 71 | ## export PATH=$(pwd)/bin:$PATH 72 | ## @endcode 73 | ## 74 | ## To test ec2_post_init using docker: 75 | ## 76 | ## @code{.sh} 77 | ## docker run --rm -it -v $(pwd):/data -w /data centos:7 /bin/bash 78 | ## [root@abc123 data]# export PATH=$PATH:/data/bin 79 | ## [root@abc123 data]# cd tests 80 | ## [root@abc123 tests]# ./run_tests.sh 81 | ## @endcode 82 | ## 83 | ## To test ec2_post_init using vagrant (VirtualBox): 84 | ## 85 | ## @code{.sh} 86 | ## mkdir -p ~/vagrant/centos/7 87 | ## cd ~/vagrant/centos/7 88 | ## @endcode 89 | ## 90 | ## Create a new ``Vagrantfile``. Be sure to change any paths to match your local system 91 | ## 92 | ## @code 93 | ## Vagrant.configure("2") do |config| 94 | ## config.vm.box = "generic/centos7" 95 | ## 96 | ## # Mount the ec2_post_init source directory at /data inside of the VM 97 | ## config.vm.synced_folder "/home/example/my_code/ec2_post_init", "/data" 98 | ## 99 | ## # Change VM resources 100 | ## config.vm.provider "virtualbox" do |v| 101 | ## v.memory = 2048 102 | ## v.cpus = 2 103 | ## end 104 | ## end 105 | ## @endcode 106 | ## 107 | ## Provision the VM, log in, and execute the test suite: 108 | ## 109 | ## @code{.sh} 110 | ## vagrant up 111 | ## vagrant ssh sudo -i 112 | ## [root@vagrant123 ~]# export PATH=$PATH:/data/bin 113 | ## [root@vagrant123 data]# cd /data/tests 114 | ## [root@vagrant123 tests]# ./run_tests.sh 115 | ## @endcode 116 | ## 117 | ## @page full_example_page Full example 118 | ## @include cumulative.sh 119 | ## 120 | ## @page license_page License 121 | ## @include LICENSE.txt 122 | 123 | (( $EC2PINIT_INCLUDED )) && return 124 | EC2PINIT_INCLUDED=1 125 | 126 | ## @property ec2pinit_root 127 | ## @brief Path to ec2pinit directory 128 | ## 129 | ## Do not change this value 130 | ec2pinit_root="$(readlink -f $(dirname ${BASH_SOURCE[0]})/..)" 131 | export ec2pinit_root 132 | 133 | ## @property ec2pinit_framework 134 | ## @brief Path to framework directory 135 | ## 136 | ## Do not change this value 137 | ec2pinit_framework="$ec2pinit_root"/framework 138 | 139 | # Adjust the framework path when we're installed as a system package 140 | if [ ! -d "$ec2pinit_framework" ]; then 141 | ec2pinit_framework="$ec2pinit_root/share/ec2_post_init"/framework 142 | fi 143 | export ec2pinit_framework 144 | 145 | ## @property ec2pinit_tempdir 146 | ## @brief Where ec2pinit will store temporary data 147 | ## 148 | ## Do not change this value 149 | ec2pinit_tempdir=/tmp/ec2_post_init 150 | export ec2pinit_tempdir 151 | 152 | ## FLAG - Print info messages 153 | DEBUG_INFO=$(( 1 << 1 )) 154 | export DEBUG_INFO 155 | 156 | ## FLAG - Print warning messages 157 | DEBUG_WARN=$(( 1 << 2 )) 158 | export DEBUG_WARN 159 | 160 | ## FLAG - Print error messages 161 | DEBUG_ERROR=$(( 1 << 3 )) 162 | export DEBUG_ERROR 163 | 164 | ## FLAG - Print only warnings and errors 165 | DEBUG_DEFAULT=$(( DEBUG_WARN | DEBUG_ERROR )) 166 | export DEBUG_DEFAULT 167 | 168 | ## FLAG - Print all messages 169 | DEBUG_ALL=$(( DEBUG_INFO | DEBUG_WARN | DEBUG_ERROR )) 170 | export DEBUG_ALL 171 | 172 | ## @property ec2pinit_debug 173 | ## @brief Debug output control 174 | ## 175 | ## Set print statement behavior with: ``DEBUG_INFO``, ``DEBUG_WARN``, and ``DEBUG_ERROR`` 176 | ## @code{.sh} 177 | ## ec2pinit_debug=$(( DEBUG_WARN | DEBUG_ERROR )) 178 | ## @endcode 179 | ec2pinit_debug=${ec2pinit_debug:-$DEBUG_DEFAULT} 180 | export ec2pinit_debug 181 | 182 | # If the user modifies debug flags through the environment 183 | # verify an integer was received. If not then use the defaults 184 | if ! [[ "$ec2pinit_debug" =~ [0-9]+ ]]; then 185 | # pre-IO function availability 186 | echo "WARN: ec2pinit_debug: Must be a positive integer!" >&2 187 | echo "WARN: Using DEBUG_DEFAULT ($DEBUG_DEFAULT)." >&2 188 | ec2pinit_debug=$DEBUG_DEFAULT 189 | fi 190 | 191 | bug_report() { 192 | io_error "$*" 193 | io_error "Please open an issue at: https://github.com/spacetelescope/ec2_post_init" 194 | echo 195 | echo TYPE 196 | echo ==== 197 | ([ -f /.dockerenv ] && echo Docker) || echo 'Physical / Virtualized' 198 | echo 199 | echo KERNEL 200 | echo ====== 201 | uname -a 202 | echo 203 | echo MEMORY 204 | echo ====== 205 | command free -m 206 | echo 207 | echo CPU 208 | echo === 209 | lscpu 210 | echo 211 | echo EC2_POST_INIT INFO 212 | echo ================== 213 | set | grep -E '^(ec2pinit|EC2PINIT|ec2_post_init|HAVE_|HOME|USER|PWD|sys_manager_)' | sort 214 | echo 215 | echo 216 | } 217 | 218 | mkdir -p "$ec2pinit_tempdir" 219 | source $ec2pinit_framework/io.inc.sh 220 | source $ec2pinit_framework/system.inc.sh 221 | 222 | # OS detection gate 223 | if (( ! HAVE_SUPPORT )); then 224 | bug_report "OPERATING SYSTEM IS NOT SUPPORTED" 225 | return 1 226 | else 227 | if ! sys_initialize; then 228 | bug_report "UNABLE TO INITIALIZE BASE OPERATING SYSTEM PACKAGES" 229 | return 1 230 | fi 231 | fi 232 | 233 | source $ec2pinit_framework/miniconda.inc.sh 234 | source $ec2pinit_framework/astroconda.inc.sh 235 | source $ec2pinit_framework/docker.inc.sh 236 | 237 | # Ensure any external success checks succeed 238 | true 239 | return 240 | -------------------------------------------------------------------------------- /framework/astroconda.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief Astroconda control functions 3 | ## @details 4 | ## @section astroconda_hst_example HST Example 5 | ## @include pipeline/hst.sh 6 | ## 7 | ## @section astroconda_jwst_example JWST Example 8 | ## @include pipeline/jwst.sh 9 | ## 10 | ## @section astroconda_data_analysis_example Data Analysis Example 11 | ## @include pipeline/data_analysis.sh 12 | 13 | (( $EC2PINIT_ASTROCONDA_INCLUDED )) && return 14 | EC2PINIT_ASTROCONDA_INCLUDED=1 15 | source ec2pinit.inc.sh 16 | 17 | ## URL to astroconda releases Git repository (or local file system) 18 | ac_releases_repo="https://github.com/astroconda/astroconda-releases" 19 | 20 | ## Path where ec2pinit will store the astroconda releases repository 21 | ac_releases_path="$ec2pinit_tempdir/$(basename $ac_releases_repo)" 22 | 23 | 24 | ## @fn ac_platform() 25 | ## @brief Get astroconda platform string 26 | ## @details The value returned is the platform suffix of a pipeline release 27 | ## file name. 28 | ## @retval platform if supported platform is detected 29 | ## @retval "unknown" if platform is not supported 30 | ac_platform() { 31 | case $(sys_platform) in 32 | Linux) 33 | echo linux ;; 34 | Win*) 35 | echo windows ;; 36 | Darwin) 37 | echo macos ;; 38 | *) 39 | echo unknown ;; 40 | esac 41 | } 42 | 43 | 44 | ## @fn ac_releases_clone() 45 | ## @brief Clone the astroconda-releases repository 46 | ## @details The destination is $ec2pinit_tempdir 47 | ## @see config.sh 48 | ac_releases_clone() { 49 | if [ ! -d "$ac_releases_path" ]; then 50 | io_info "ac_releases_clone: Cloning astroconda releases repository" 51 | git clone $ac_releases_repo $ac_releases_path >&2 52 | fi 53 | } 54 | 55 | 56 | ## @fn ac_releases_pipeline_exists() 57 | ## @brief Check if a named pipeline exists in astroconda-releases 58 | ## @retval path if pipeline exists 59 | ## @retval "" if pipeline does not exist 60 | ac_releases_pipeline_exists() { 61 | local pattern="$1" 62 | 63 | ac_releases_clone 64 | find $ac_releases_path -maxdepth 1 -type d -name ''$pattern'' 65 | } 66 | 67 | 68 | ## @fn ac_releases_pipeline_release_exists() 69 | ## @brief Check if a named release exists in a astroconda-releases pipeline 70 | ## @param pipeline_name Pipeline name 71 | ## @param pipeline_release Pipeline release name 72 | ## @retval path if release exists 73 | ## @retval "" if release does not exist 74 | ac_releases_pipeline_release_exists() { 75 | local pipeline_name="$1" 76 | local pipeline_release="$2" 77 | result=$(find "$(ac_releases_pipeline_exists $pipeline_name)" -maxdepth 1 -name ''$pipeline_release'') 78 | if [ -n "$result" ]; then 79 | readlink -f $result 80 | fi 81 | } 82 | 83 | 84 | ## @fn ac_releases_data_analysis() 85 | ## @brief Get path to data analysis release file 86 | ## @param series Pipeline release name 87 | ## @retval latest_path if series is undefined 88 | ## @retval path if series is found 89 | ac_releases_data_analysis() { 90 | local series="$1" 91 | local pipeline="de" 92 | if [ -z "$series" ]; then 93 | # get implicit latest in the release series 94 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 95 | -name ''latest-$(ac_platform)*.yml'' \ 96 | | sort -V | tail -n 1) 97 | else 98 | # get the latest release for the requested series 99 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 100 | -wholename ''\*$series/latest-$(ac_platform).yml'' \ 101 | | sort -V | tail -n 1) 102 | fi 103 | if [ -n "$release" ]; then 104 | readlink -f "$release" 105 | fi 106 | } 107 | 108 | 109 | ## @fn ac_releases_data_analysis_environ() 110 | ## @brief Generate conda environment name 111 | ## @param series Pipeline release name 112 | ## @retval environment_name if series exists 113 | ## @retval 1 if release cannot be found 114 | ac_releases_data_analysis_environ() { 115 | local series="$1" 116 | local filename=$(ac_releases_data_analysis "$series") 117 | if [ -z "$filename" ]; then 118 | return 1 119 | fi 120 | sed "s/-/_/g;s/_$(ac_platform).*//g" <<< $(basename $filename) 121 | } 122 | 123 | 124 | ## @fn ac_releases_jwst() 125 | ## @brief Get path to JWST pipeline release file(s) 126 | ## @details JWST splits its installation into two files. This function returns 127 | ## two strings separated by new lines. 128 | ## @param series Pipeline release name 129 | ## @retval latest_path if series is undefined 130 | ## @retval paths if series is found 131 | ac_releases_jwst() { 132 | local series="$1" 133 | local pipeline="jwstdp" 134 | if [ -z "$series" ]; then 135 | # get implicit latest in the release series 136 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 137 | -name ''*.txt'' -and \( -not -name ''*macos*'' \) \ 138 | | sort -V | tail -n 2) 139 | else 140 | # get the latest release for the requested series 141 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 142 | -wholename ''\*$series/*.txt'' -and \( -not -wholename ''\*$series/\*macos\*.txt'' \) \ 143 | | sort -V | tail -n 2) 144 | fi 145 | echo "$release" 146 | } 147 | 148 | 149 | ## @fn ac_releases_jwst_environ() 150 | ## @brief Generate conda environment name 151 | ## @param series Pipeline release name 152 | ## @retval environment_name if series exists 153 | ## @retval 1 if release cannot be found 154 | ac_releases_jwst_environ() { 155 | local series="$1" 156 | local pipeline="jwstdp" 157 | if [ -z "$series" ]; then 158 | # get implicit latest in the release series 159 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 160 | -maxdepth 1 \ 161 | -type d \ 162 | -not -wholename ''*/utils*'' \ 163 | | sort -V | tail -n 1) 164 | else 165 | # get the latest release for the requested series 166 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 167 | -type d \ 168 | -wholename ''\*/$series'' \ 169 | | sort -V | tail -n 1) 170 | fi 171 | printf "JWSTDP_%s" $(basename $release) 172 | } 173 | 174 | 175 | ## @fn ac_releases_hst() 176 | ## @brief Get path to HST pipeline release file 177 | ## @details HST provides a platform dependent YAML configuration 178 | ## @param series Pipeline release name 179 | ## @retval latest_path if series is undefined 180 | ## @retval path if series is found 181 | ac_releases_hst() { 182 | local series="$1" 183 | local pipeline="caldp" # no one will ever use hstdp 184 | if [ -z "$series" ]; then 185 | # get implicit latest in the release series 186 | release=$(find "$(ac_releases_pipeline_exists $pipeline)" \ 187 | -name ''latest-$(ac_platform)*.yml'' \ 188 | | sort -V | tail -n 1) 189 | else 190 | # get the latest release for the requested series 191 | release=$(find -L "$(ac_releases_pipeline_exists $pipeline)" \ 192 | -wholename ''\*$series/latest-$(ac_platform).yml'' \ 193 | | sort -V | tail -n 1) 194 | fi 195 | if [ -n "$release" ]; then 196 | readlink -f "$release" 197 | fi 198 | } 199 | 200 | 201 | ## @fn ac_releases_hst_environ() 202 | ## @brief Generate conda environment name 203 | ## @param series Pipeline release name 204 | ## @retval environment_name if series exists 205 | ## @retval 1 if release cannot be found 206 | ac_releases_hst_environ() { 207 | local series="$1" 208 | local filename=$(ac_releases_hst "$series") 209 | if [ -z "$filename" ]; then 210 | return 1 211 | fi 212 | sed "s/_$(ac_platform).*//" <<< $(basename $filename) 213 | } 214 | 215 | 216 | ## @fn ac_releases_install_hst() 217 | ## @brief Install the HST pipeline 218 | ## @param version pipeline release version 219 | ac_releases_install_hst() { 220 | local version="$1" 221 | if [ -z "$version" ]; then 222 | io_error "ac_releases_install_hst: release version required" 223 | return 1 224 | fi 225 | local release_file=$(ac_releases_hst $version) 226 | local release_name=$(ac_releases_hst_environ $version) 227 | io_info "ac_releases_install_hst: Creating $release_name environment" 228 | conda env create -n $release_name --file $release_file 229 | } 230 | 231 | 232 | ## @fn ac_releases_install_jwst() 233 | ## @brief Install the JWST pipeline 234 | ## @param version pipeline release version 235 | ac_releases_install_jwst() { 236 | local version="$1" 237 | if [ -z "$version" ]; then 238 | io_error "ac_releases_install_jwst: release version required" >&2 239 | return 1 240 | fi 241 | release_file=($(ac_releases_jwst $version_jwst)) 242 | release_name=$(ac_releases_jwst_environ $version_jwst) 243 | io_info "ac_releases_install_jwst: Creating $release_name environment" 244 | conda create -n $release_name --file ${release_file[0]} 245 | io_info "ac_releases_install_jwst: Activating $release_name environment" 246 | conda activate $release_name 247 | io_info "ac_releases_install_jwst: Installing dependencies: ${release_file[1]}" 248 | python -m pip install -r ${release_file[1]} 249 | conda deactivate 250 | } 251 | 252 | 253 | ## @fn ac_releases_install_data_analysis() 254 | ## @brief Install the data analysis pipeline 255 | ## @param version pipeline release version 256 | ac_releases_install_data_analysis() { 257 | local version="$1" 258 | if [ -z "$version" ]; then 259 | io_error "ac_releases_install_data_analysis: release version required" 260 | return 1 261 | fi 262 | local release_file=$(ac_releases_data_analysis $version) 263 | local release_name=$(ac_releases_data_analysis_environ $version) 264 | io_info "ac_releases_install_data_analysis: Creating $release_name environment" 265 | conda env create -n $release_name --file $release_file 266 | } 267 | -------------------------------------------------------------------------------- /framework/system.inc.sh: -------------------------------------------------------------------------------- 1 | ## @file 2 | ## @brief System functions 3 | 4 | (( $EC2PINIT_SYSTEM_INCLUDED )) && return 5 | EC2PINIT_SYSTEM_INCLUDED=1 6 | source ec2pinit.inc.sh 7 | 8 | _sys_user_old='' 9 | _sys_user_home_old='' 10 | 11 | ## System uses DNF package manager 12 | export HAVE_DNF=0 13 | 14 | ## System uses YUM package manager 15 | export HAVE_YUM=0 16 | 17 | ## System uses APT package manager 18 | export HAVE_APT=0 19 | 20 | ## System is based on Red Hat 21 | export HAVE_REDHAT=0 22 | 23 | ## System is based on Debian 24 | export HAVE_DEBIAN=0 25 | 26 | ## System is based on Ubuntu 27 | export HAVE_UBUNTU=0 28 | 29 | ## System is based on Arch 30 | export HAVE_ARCH=0 31 | 32 | ## System is supported 33 | export HAVE_SUPPORT=1 34 | 35 | ## @fn sys_check_admin() 36 | ## @brief Determine if the current user is root 37 | ## @retval true if root 38 | ## @retval 1 if not root 39 | sys_check_admin() { 40 | if (( $EUID > 0 )); then 41 | return 1 42 | fi 43 | return 0 44 | } 45 | 46 | ## @fn sys_user_push() 47 | ## @brief Lazily "become" another user 48 | ## @details This sidesteps sudo's environment limitations allowing 49 | ## one to execute scripts on behalf of the named user. Anything modified 50 | ## while ``sys_user_push()`` is active will need to have its ownership and/or 51 | ## octal permissions normalized. If ``name`` does not exist it will be created. 52 | ## @param name the user to become 53 | sys_user_push() { 54 | local name="$1" 55 | local current="$(id -n -u)" 56 | _sys_user_home_old=$(sys_user_home $current) 57 | _sys_user_old=$current 58 | HOME=$(sys_user_home $name) 59 | if [ -z "$HOME" ]; then 60 | useradd -m -s /bin/bash "$name" 61 | HOME=/home/"$name" 62 | fi 63 | export USER=$name 64 | pushd "$HOME" 65 | } 66 | 67 | ## @fn sys_user_pop() 68 | ## @brief Restore caller environment after ``sys_user_push()`` 69 | sys_user_pop() { 70 | HOME="$_sys_user_home_old" 71 | export USER="$_sys_user_old" 72 | export _sys_user_home_old='' 73 | export _sys_user_old='' 74 | popd 75 | } 76 | 77 | ## @fn sys_platform() 78 | ## @brief Get system platform (``Linux``, ``Darwin``, etc) 79 | ## @retval platform string 80 | sys_platform() { 81 | local result=$(uname -s) 82 | case "$result" in 83 | # placeholder - convert platform name to miniconda platform string 84 | *) 85 | ;; 86 | esac 87 | echo "$result" 88 | } 89 | 90 | ## @fn sys_arch() 91 | ## @brief Get system architecture (``i386``, ``x86_64``, etc) 92 | ## @retval architecture string 93 | sys_arch() { 94 | local result=$(uname -m) 95 | case "$result" in 96 | # placeholder - convert cpu architecture name to miniconda architecture string 97 | *) ;; 98 | esac 99 | echo "$result" 100 | } 101 | 102 | ## @fn sys_user_home() 103 | ## @brief Get account home directory 104 | ## @details This function returns the home directory defined in /etc/passwd unless 105 | ## ``name`` is the caller's account; in which case it will use the value of ``$HOME``. 106 | ## @param user account to inspect 107 | ## @retval home directory path 108 | sys_user_home() { 109 | local user="${1:-$USER}" 110 | 111 | if [ -z "$user" ]; then 112 | user=$(id -n -u) 113 | fi 114 | 115 | # short circuit - if the user is the one we're logged in as, return its home variable 116 | if [[ $(id -n -u) == "$user" ]]; then 117 | echo "$HOME" 118 | return 119 | fi 120 | getent passwd $user | awk -F: '{ print $6 }' 121 | } 122 | 123 | ## @fn sys_reset_home_ownership() 124 | ## @brief Resets ownership of a user (after ``sys_user_push()``/``sys_user_pop()``) 125 | ## @param user account to modify 126 | sys_reset_home_ownership() { 127 | local home 128 | local user="${1:-$USER}" 129 | 130 | if [ -z "$user" ]; then 131 | io_error "sys_reset_home_ownership: user name required" 132 | return 1 133 | fi 134 | 135 | home="$(getent passwd $user | awk -F: '{ print $6 }')" 136 | if [ -z "$home" ] || (( $(wc -l <<< "$home") > 1 )) ; then 137 | io_error "sys_reset_home_ownership: reset failed" 138 | return 1 139 | fi 140 | 141 | io_info "sys_reset_home_ownership: ${home} will be owned by ${user}" 142 | chown -R "${user}": "${home}" 143 | } 144 | 145 | ## @fn sys_pkg_get_manager() 146 | ## @brief Get the system package manager 147 | ## @retval result the path to the package manager 148 | sys_pkg_get_manager() { 149 | local managers=( 150 | "dnf" 151 | "yum" 152 | "apt" 153 | "" 154 | ) 155 | local result=""; 156 | for manager in "${managers[@]}"; do 157 | local tmp=$(type -p $manager) 158 | if [ -x "$tmp" ]; then 159 | result="$tmp" 160 | break; 161 | fi 162 | done 163 | 164 | echo "$result" 165 | } 166 | 167 | # Configure package manager globals 168 | sys_manager_cmd=$(sys_pkg_get_manager) 169 | case "$sys_manager_cmd" in 170 | */dnf) 171 | HAVE_DNF=1 172 | HAVE_REDHAT=1 173 | sys_manager_cmd_install="dnf -y install" 174 | sys_manager_cmd_update="dnf -y update" 175 | sys_manager_cmd_clean="dnf clean all" 176 | sys_manager_cmd_list="rpm -qa" 177 | ;; 178 | */yum) 179 | HAVE_YUM=1 180 | HAVE_REDHAT=1 181 | sys_manager_cmd_install="yum -y install" 182 | sys_manager_cmd_update="yum -y update" 183 | sys_manager_cmd_clean="yum clean all" 184 | sys_manager_cmd_list="rpm -qa" 185 | ;; 186 | */apt) 187 | HAVE_APT=1 188 | DEBIAN_FRONTEND=noninteractive 189 | sys_manager_cmd_install="apt update && apt -y install" 190 | sys_manager_cmd_update="apt update && apt -y upgrade" 191 | sys_manager_cmd_clean="apt -y autoremove && apt -y clean" 192 | sys_manager_cmd_list="apt -qq list" 193 | ;; 194 | *) 195 | HAVE_SUPPORT=0 196 | ;; 197 | esac 198 | 199 | # Distro detection 200 | if (( HAVE_YUM )) || (( HAVE_DNF )); then 201 | if [ -f "/etc/redhat-release" ]; then 202 | HAVE_REDHAT=1 203 | fi 204 | elif (( HAVE_APT )); then 205 | if [ -L "/etc/dpkg/origins/default" ]; then 206 | if [ "$(basename $(readlink -f /etc/dpkg/origins/default))" == "debian" ]; then 207 | HAVE_DEBIAN=1 208 | elif [ "$(basename $(readlink -f /etc/dpkg/origins/default))" == "ubuntu" ]; then 209 | HAVE_UBUNTU=1 210 | fi 211 | fi 212 | else 213 | HAVE_SUPPORT=0 214 | fi 215 | 216 | if (( HAVE_SUPPORT )); then 217 | io_info "system: Detected package manager: $sys_manager_cmd" 218 | io_info "system: is based on Red Hat? $(( HAVE_REDHAT ))" 219 | io_info "system: is based on Debian? $(( HAVE_DEBIAN ))" 220 | io_info "system: is based on Ubuntu? $(( HAVE_UBUNTU ))" 221 | fi 222 | 223 | ## @fn sys_pkg_install() 224 | ## @brief Install a system package 225 | ## @param ... a variable length list of packages to install 226 | ## @retval 1 if not supported 227 | ## @retval exit_code of system package manager 228 | ## 229 | ## @code{.sh} 230 | ## # Install vim and nano 231 | ## sys_pkg_install nano 232 | ## if (( $HAVE_REDHAT )); then 233 | ## sys_pkg_install vim 234 | ## elif (( $HAVE_DEBIAN )); then 235 | ## sys_pkg_install vim-common 236 | ## fi 237 | ## 238 | ## # Alternative method using an array to dynamically set dependencies 239 | ## deps=(nano) 240 | ## (( $HAVE_REDHAT )) && deps+=(vim) 241 | ## (( $HAVE_DEBIAN )) && deps+=(vim-common) 242 | ## sys_pkg_install "${deps[@]}" 243 | ## @endcode 244 | sys_pkg_install() { 245 | if (( ! HAVE_SUPPORT )); then 246 | io_error "sys_pkg_install: unsupported package manager" 247 | return 1 248 | fi 249 | if (( "$#" < 1 )); then 250 | io_error "sys_pkg_install: at least one package name is required" 251 | return 1 252 | fi 253 | io_info "sys_pkg_install: Installing $*" 254 | bash -c "$sys_manager_cmd_install $*" 255 | } 256 | 257 | ## @fn sys_pkg_update_all() 258 | ## @brief Update all system packages 259 | ## @retval 1 if not supported 260 | ## @retval exit_code of system package manager 261 | sys_pkg_update_all() { 262 | if (( ! HAVE_SUPPORT )); then 263 | io_error "sys_pkg_update_all: unsupported package manager" 264 | return 1 265 | fi 266 | io_info "sys_pkg_update_all: Updating system packages" 267 | bash -c "$sys_manager_cmd_update" 268 | } 269 | 270 | ## @fn sys_pkg_installed() 271 | ## @brief Test if a system package is installed 272 | ## @param name of a system package 273 | ## @retval 1 if not supported 274 | ## @retval 1 if package is NOT installed 275 | ## @retval 0 if package is installed 276 | sys_pkg_installed() { 277 | local output='' 278 | local name="$1" 279 | if (( ! HAVE_SUPPORT )); then 280 | io_error "sys_pkg_installed: unsupported package manager" 281 | return 1 282 | fi 283 | 284 | if (( "$#" < 1 )); then 285 | io_error "sys_pkg_installed: package name is required" 286 | return 1 287 | fi 288 | 289 | output="$($sys_manager_cmd_list $name | tail -n 1)" 290 | if (( $HAVE_YUM )) || (( $HAVE_DNF )); then 291 | if grep -E ''^$1\..*'' <<< "$output" &>/dev/null; then 292 | return 0 293 | fi 294 | elif (( $HAVE_APT )); then 295 | if grep -E ''^$1/.*\\[installed\\]$'' <<< "$output" &>/dev/null; then 296 | return 0 297 | fi 298 | fi 299 | 300 | return 1 301 | } 302 | 303 | ## @fn sys_pkg_clean() 304 | ## @brief Clean the system package manager's cache(s) 305 | sys_pkg_clean() { 306 | if (( ! HAVE_SUPPORT )); then 307 | io_error "sys_pkg_clean: unsupported package manager" 308 | return 309 | fi 310 | io_info "sys_pkg_clean: Clearing caches" 311 | bash -c "$sys_manager_cmd_clean" 312 | } 313 | 314 | ## @fn sys_initialize() 315 | ## @brief Install dependencies required by ``ec2_post_init`` 316 | ## @retval exit_code value of ``sys_pkg_install()`` 317 | ## 318 | ## @code{.sh} 319 | ## if ! sys_pkg_initialize; then 320 | ## io_error "Initialization failed!" 321 | ## else 322 | ## io_info "Initialization succeeded!" 323 | ## fi 324 | ## @endcode 325 | sys_initialize() { 326 | local want=( 327 | bash 328 | git 329 | curl 330 | gcc 331 | make 332 | sudo 333 | ) 334 | local need=() 335 | 336 | # Handle barren Debian images 337 | (( HAVE_DEBIAN )) && want+=(procps build-essential) 338 | 339 | # NOTE: Aside from the sys_* functions most operations in ec2_post_init 340 | # don't strictly require root access. 341 | io_info "sys_initialize: Administrator check..." 342 | if ! sys_check_admin; then 343 | io_warn "sys_initialize: SYSTEM FUNCTIONS REQUIRE ROOT ACCESS!" 344 | io_warn "sys_initialize: Beyond this point errors thrown by sys_* functions are NOT bugs!" 345 | return 346 | else 347 | io_info "sys_initialize: User is root!" 348 | fi 349 | 350 | io_info "sys_initialize: Checking system requirements..." 351 | for x in "${want[@]}"; do 352 | if ! sys_pkg_installed "$x"; then 353 | io_info "sys_initialize: $x marked for installation" 354 | need+=($x) 355 | fi 356 | done 357 | if (( ! ${#need[@]} )); then 358 | io_info "sys_initialize: No additional packages required" 359 | return 0 360 | fi 361 | 362 | io_info "sys_initialize: Installing packages required by ec2_post_init..." 363 | sys_pkg_install ${need[@]} 364 | return $? 365 | } 366 | -------------------------------------------------------------------------------- /docs/Doxyfile: -------------------------------------------------------------------------------- 1 | # Doxyfile 1.8.5 2 | 3 | #--------------------------------------------------------------------------- 4 | # Project related configuration options 5 | #--------------------------------------------------------------------------- 6 | DOXYFILE_ENCODING = UTF-8 7 | PROJECT_NAME = "ec2_post_init" 8 | PROJECT_NUMBER = 9 | PROJECT_BRIEF = "Populate STScI EC2 instances with ease" 10 | PROJECT_LOGO = logo.png 11 | OUTPUT_DIRECTORY = 12 | CREATE_SUBDIRS = NO 13 | OUTPUT_LANGUAGE = English 14 | BRIEF_MEMBER_DESC = YES 15 | REPEAT_BRIEF = YES 16 | ABBREVIATE_BRIEF = 17 | ALWAYS_DETAILED_SEC = NO 18 | INLINE_INHERITED_MEMB = NO 19 | FULL_PATH_NAMES = YES 20 | STRIP_FROM_PATH = ../ 21 | STRIP_FROM_INC_PATH = ../ 22 | SHORT_NAMES = YES 23 | JAVADOC_AUTOBRIEF = NO 24 | QT_AUTOBRIEF = NO 25 | MULTILINE_CPP_IS_BRIEF = NO 26 | INHERIT_DOCS = YES 27 | SEPARATE_MEMBER_PAGES = NO 28 | TAB_SIZE = 4 29 | ALIASES = 30 | TCL_SUBST = 31 | OPTIMIZE_OUTPUT_FOR_C = NO 32 | OPTIMIZE_OUTPUT_JAVA = NO 33 | OPTIMIZE_FOR_FORTRAN = NO 34 | OPTIMIZE_OUTPUT_VHDL = NO 35 | EXTENSION_MAPPING = sh=C 36 | MARKDOWN_SUPPORT = YES 37 | AUTOLINK_SUPPORT = YES 38 | BUILTIN_STL_SUPPORT = NO 39 | CPP_CLI_SUPPORT = NO 40 | SIP_SUPPORT = NO 41 | IDL_PROPERTY_SUPPORT = YES 42 | DISTRIBUTE_GROUP_DOC = NO 43 | SUBGROUPING = YES 44 | INLINE_GROUPED_CLASSES = NO 45 | INLINE_SIMPLE_STRUCTS = NO 46 | TYPEDEF_HIDES_STRUCT = NO 47 | LOOKUP_CACHE_SIZE = 0 48 | #--------------------------------------------------------------------------- 49 | # Build related configuration options 50 | #--------------------------------------------------------------------------- 51 | EXTRACT_ALL = NO 52 | EXTRACT_PRIVATE = NO 53 | EXTRACT_PACKAGE = NO 54 | EXTRACT_STATIC = NO 55 | EXTRACT_LOCAL_CLASSES = YES 56 | EXTRACT_LOCAL_METHODS = NO 57 | EXTRACT_ANON_NSPACES = NO 58 | HIDE_UNDOC_MEMBERS = NO 59 | HIDE_UNDOC_CLASSES = NO 60 | HIDE_FRIEND_COMPOUNDS = NO 61 | HIDE_IN_BODY_DOCS = NO 62 | INTERNAL_DOCS = NO 63 | CASE_SENSE_NAMES = YES 64 | HIDE_SCOPE_NAMES = NO 65 | SHOW_INCLUDE_FILES = YES 66 | FORCE_LOCAL_INCLUDES = NO 67 | INLINE_INFO = YES 68 | SORT_MEMBER_DOCS = YES 69 | SORT_BRIEF_DOCS = NO 70 | SORT_MEMBERS_CTORS_1ST = NO 71 | SORT_GROUP_NAMES = NO 72 | SORT_BY_SCOPE_NAME = NO 73 | STRICT_PROTO_MATCHING = NO 74 | GENERATE_TODOLIST = YES 75 | GENERATE_TESTLIST = YES 76 | GENERATE_BUGLIST = YES 77 | GENERATE_DEPRECATEDLIST= YES 78 | ENABLED_SECTIONS = 79 | MAX_INITIALIZER_LINES = 30 80 | SHOW_USED_FILES = YES 81 | SHOW_FILES = YES 82 | SHOW_NAMESPACES = YES 83 | FILE_VERSION_FILTER = 84 | LAYOUT_FILE = 85 | CITE_BIB_FILES = 86 | #--------------------------------------------------------------------------- 87 | # Configuration options related to warning and progress messages 88 | #--------------------------------------------------------------------------- 89 | QUIET = NO 90 | WARNINGS = YES 91 | WARN_IF_UNDOCUMENTED = YES 92 | WARN_IF_DOC_ERROR = YES 93 | WARN_NO_PARAMDOC = NO 94 | WARN_FORMAT = "$file:$line: $text" 95 | WARN_LOGFILE = 96 | #--------------------------------------------------------------------------- 97 | # Configuration options related to the input files 98 | #--------------------------------------------------------------------------- 99 | INPUT = .. 100 | INPUT_ENCODING = UTF-8 101 | FILE_PATTERNS = *.sh 102 | RECURSIVE = YES 103 | EXCLUDE = ../docs/ ../tests/ 104 | EXCLUDE_SYMLINKS = NO 105 | EXCLUDE_PATTERNS = test_*.sh 106 | EXCLUDE_SYMBOLS = 107 | EXAMPLE_PATH = ../examples/ .. 108 | EXAMPLE_PATTERNS = *.sh *.txt 109 | EXAMPLE_RECURSIVE = NO 110 | IMAGE_PATH = 111 | INPUT_FILTER = "sed -n -f doxygen-bash.sed -- " 112 | FILTER_PATTERNS = 113 | FILTER_SOURCE_FILES = NO 114 | FILTER_SOURCE_PATTERNS = 115 | USE_MDFILE_AS_MAINPAGE = 116 | #--------------------------------------------------------------------------- 117 | # Configuration options related to source browsing 118 | #--------------------------------------------------------------------------- 119 | SOURCE_BROWSER = YES 120 | INLINE_SOURCES = NO 121 | STRIP_CODE_COMMENTS = YES 122 | REFERENCED_BY_RELATION = NO 123 | REFERENCES_RELATION = NO 124 | REFERENCES_LINK_SOURCE = YES 125 | SOURCE_TOOLTIPS = YES 126 | USE_HTAGS = NO 127 | VERBATIM_HEADERS = YES 128 | #--------------------------------------------------------------------------- 129 | # Configuration options related to the alphabetical class index 130 | #--------------------------------------------------------------------------- 131 | ALPHABETICAL_INDEX = YES 132 | COLS_IN_ALPHA_INDEX = 5 133 | IGNORE_PREFIX = 134 | #--------------------------------------------------------------------------- 135 | # Configuration options related to the HTML output 136 | #--------------------------------------------------------------------------- 137 | GENERATE_HTML = YES 138 | HTML_OUTPUT = html 139 | HTML_FILE_EXTENSION = .html 140 | HTML_HEADER = 141 | HTML_FOOTER = 142 | HTML_STYLESHEET = 143 | HTML_EXTRA_STYLESHEET = custom.css 144 | HTML_EXTRA_FILES = 145 | HTML_COLORSTYLE_HUE = 220 146 | HTML_COLORSTYLE_SAT = 100 147 | HTML_COLORSTYLE_GAMMA = 80 148 | HTML_TIMESTAMP = NO 149 | HTML_DYNAMIC_SECTIONS = NO 150 | HTML_INDEX_NUM_ENTRIES = 100 151 | GENERATE_DOCSET = NO 152 | DOCSET_FEEDNAME = "Doxygen generated docs" 153 | DOCSET_BUNDLE_ID = org.doxygen.Project 154 | DOCSET_PUBLISHER_ID = org.doxygen.Publisher 155 | DOCSET_PUBLISHER_NAME = Publisher 156 | GENERATE_HTMLHELP = NO 157 | CHM_FILE = 158 | HHC_LOCATION = 159 | GENERATE_CHI = NO 160 | CHM_INDEX_ENCODING = 161 | BINARY_TOC = NO 162 | TOC_EXPAND = NO 163 | GENERATE_QHP = NO 164 | QCH_FILE = 165 | QHP_NAMESPACE = org.doxygen.Project 166 | QHP_VIRTUAL_FOLDER = doc 167 | QHP_CUST_FILTER_NAME = 168 | QHP_CUST_FILTER_ATTRS = 169 | QHP_SECT_FILTER_ATTRS = 170 | QHG_LOCATION = 171 | GENERATE_ECLIPSEHELP = NO 172 | ECLIPSE_DOC_ID = org.doxygen.Project 173 | DISABLE_INDEX = NO 174 | GENERATE_TREEVIEW = YES 175 | ENUM_VALUES_PER_LINE = 4 176 | TREEVIEW_WIDTH = 250 177 | EXT_LINKS_IN_WINDOW = NO 178 | FORMULA_FONTSIZE = 10 179 | FORMULA_TRANSPARENT = YES 180 | USE_MATHJAX = NO 181 | MATHJAX_FORMAT = HTML-CSS 182 | MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest 183 | MATHJAX_EXTENSIONS = 184 | MATHJAX_CODEFILE = 185 | SEARCHENGINE = YES 186 | SERVER_BASED_SEARCH = NO 187 | EXTERNAL_SEARCH = NO 188 | SEARCHENGINE_URL = 189 | SEARCHDATA_FILE = searchdata.xml 190 | EXTERNAL_SEARCH_ID = 191 | EXTRA_SEARCH_MAPPINGS = 192 | #--------------------------------------------------------------------------- 193 | # Configuration options related to the LaTeX output 194 | #--------------------------------------------------------------------------- 195 | GENERATE_LATEX = YES 196 | LATEX_OUTPUT = latex 197 | LATEX_CMD_NAME = latex 198 | MAKEINDEX_CMD_NAME = makeindex 199 | COMPACT_LATEX = NO 200 | PAPER_TYPE = a4 201 | EXTRA_PACKAGES = 202 | LATEX_HEADER = 203 | LATEX_FOOTER = 204 | LATEX_EXTRA_FILES = 205 | PDF_HYPERLINKS = YES 206 | USE_PDFLATEX = YES 207 | LATEX_BATCHMODE = NO 208 | LATEX_HIDE_INDICES = NO 209 | LATEX_SOURCE_CODE = NO 210 | LATEX_BIB_STYLE = plain 211 | #--------------------------------------------------------------------------- 212 | # Configuration options related to the RTF output 213 | #--------------------------------------------------------------------------- 214 | GENERATE_RTF = NO 215 | RTF_OUTPUT = rtf 216 | COMPACT_RTF = NO 217 | RTF_HYPERLINKS = NO 218 | RTF_STYLESHEET_FILE = 219 | RTF_EXTENSIONS_FILE = 220 | #--------------------------------------------------------------------------- 221 | # Configuration options related to the man page output 222 | #--------------------------------------------------------------------------- 223 | GENERATE_MAN = NO 224 | MAN_OUTPUT = man 225 | MAN_EXTENSION = .3 226 | MAN_LINKS = NO 227 | #--------------------------------------------------------------------------- 228 | # Configuration options related to the XML output 229 | #--------------------------------------------------------------------------- 230 | GENERATE_XML = NO 231 | XML_OUTPUT = xml 232 | XML_SCHEMA = 233 | XML_DTD = 234 | XML_PROGRAMLISTING = YES 235 | #--------------------------------------------------------------------------- 236 | # Configuration options related to the DOCBOOK output 237 | #--------------------------------------------------------------------------- 238 | GENERATE_DOCBOOK = NO 239 | DOCBOOK_OUTPUT = docbook 240 | #--------------------------------------------------------------------------- 241 | # Configuration options for the AutoGen Definitions output 242 | #--------------------------------------------------------------------------- 243 | GENERATE_AUTOGEN_DEF = NO 244 | #--------------------------------------------------------------------------- 245 | # Configuration options related to the Perl module output 246 | #--------------------------------------------------------------------------- 247 | GENERATE_PERLMOD = NO 248 | PERLMOD_LATEX = NO 249 | PERLMOD_PRETTY = YES 250 | PERLMOD_MAKEVAR_PREFIX = 251 | #--------------------------------------------------------------------------- 252 | # Configuration options related to the preprocessor 253 | #--------------------------------------------------------------------------- 254 | ENABLE_PREPROCESSING = YES 255 | MACRO_EXPANSION = NO 256 | EXPAND_ONLY_PREDEF = NO 257 | SEARCH_INCLUDES = YES 258 | INCLUDE_PATH = 259 | INCLUDE_FILE_PATTERNS = 260 | PREDEFINED = 261 | EXPAND_AS_DEFINED = 262 | SKIP_FUNCTION_MACROS = YES 263 | #--------------------------------------------------------------------------- 264 | # Configuration options related to external references 265 | #--------------------------------------------------------------------------- 266 | TAGFILES = 267 | GENERATE_TAGFILE = 268 | ALLEXTERNALS = NO 269 | EXTERNAL_GROUPS = YES 270 | EXTERNAL_PAGES = YES 271 | PERL_PATH = /usr/bin/perl 272 | #--------------------------------------------------------------------------- 273 | # Configuration options related to the dot tool 274 | #--------------------------------------------------------------------------- 275 | CLASS_DIAGRAMS = YES 276 | MSCGEN_PATH = 277 | HIDE_UNDOC_RELATIONS = YES 278 | HAVE_DOT = YES 279 | DOT_NUM_THREADS = 0 280 | DOT_FONTNAME = Helvetica 281 | DOT_FONTSIZE = 10 282 | DOT_FONTPATH = 283 | CLASS_GRAPH = YES 284 | COLLABORATION_GRAPH = YES 285 | GROUP_GRAPHS = YES 286 | UML_LOOK = NO 287 | UML_LIMIT_NUM_FIELDS = 10 288 | TEMPLATE_RELATIONS = NO 289 | INCLUDE_GRAPH = YES 290 | INCLUDED_BY_GRAPH = YES 291 | CALL_GRAPH = YES 292 | CALLER_GRAPH = NO 293 | GRAPHICAL_HIERARCHY = YES 294 | DIRECTORY_GRAPH = YES 295 | DOT_IMAGE_FORMAT = png 296 | INTERACTIVE_SVG = NO 297 | DOT_PATH = 298 | DOTFILE_DIRS = 299 | MSCFILE_DIRS = 300 | DOT_GRAPH_MAX_NODES = 50 301 | MAX_DOT_GRAPH_DEPTH = 0 302 | DOT_TRANSPARENT = NO 303 | DOT_MULTI_TARGETS = NO 304 | GENERATE_LEGEND = YES 305 | DOT_CLEANUP = YES 306 | -------------------------------------------------------------------------------- /docs/custom.css: -------------------------------------------------------------------------------- 1 | /* The standard CSS for doxygen 1.8.5 */ 2 | 3 | body, table, div, p, dl { 4 | font: 400 14px/22px Roboto,sans-serif; 5 | } 6 | 7 | /* @group Heading Levels */ 8 | 9 | h1.groupheader { 10 | font-size: 150%; 11 | } 12 | 13 | .title { 14 | font: 400 14px/28px Roboto,sans-serif; 15 | font-size: 150%; 16 | font-weight: bold; 17 | margin: 10px 2px; 18 | } 19 | 20 | h2.groupheader { 21 | border-bottom: 1px solid #879ECB; 22 | color: #354C7B; 23 | font-size: 150%; 24 | font-weight: normal; 25 | margin-top: 1.75em; 26 | padding-top: 8px; 27 | padding-bottom: 4px; 28 | width: 100%; 29 | } 30 | 31 | h3.groupheader { 32 | font-size: 100%; 33 | } 34 | 35 | h1, h2, h3, h4, h5, h6 { 36 | -webkit-transition: text-shadow 0.5s linear; 37 | -moz-transition: text-shadow 0.5s linear; 38 | -ms-transition: text-shadow 0.5s linear; 39 | -o-transition: text-shadow 0.5s linear; 40 | transition: text-shadow 0.5s linear; 41 | margin-right: 15px; 42 | } 43 | 44 | h1.glow, h2.glow, h3.glow, h4.glow, h5.glow, h6.glow { 45 | text-shadow: 0 0 15px cyan; 46 | } 47 | 48 | dt { 49 | font-weight: bold; 50 | } 51 | 52 | div.multicol { 53 | -moz-column-gap: 1em; 54 | -webkit-column-gap: 1em; 55 | -moz-column-count: 3; 56 | -webkit-column-count: 3; 57 | } 58 | 59 | p.startli, p.startdd, p.starttd { 60 | margin-top: 2px; 61 | } 62 | 63 | p.endli { 64 | margin-bottom: 0px; 65 | } 66 | 67 | p.enddd { 68 | margin-bottom: 4px; 69 | } 70 | 71 | p.endtd { 72 | margin-bottom: 2px; 73 | } 74 | 75 | /* @end */ 76 | 77 | caption { 78 | font-weight: bold; 79 | } 80 | 81 | span.legend { 82 | font-size: 70%; 83 | text-align: center; 84 | } 85 | 86 | h3.version { 87 | font-size: 90%; 88 | text-align: center; 89 | } 90 | 91 | div.qindex, div.navtab{ 92 | background-color: #EBEFF6; 93 | border: 1px solid #A3B4D7; 94 | text-align: center; 95 | } 96 | 97 | div.qindex, div.navpath { 98 | width: 100%; 99 | line-height: 140%; 100 | } 101 | 102 | div.navtab { 103 | margin-right: 15px; 104 | } 105 | 106 | /* @group Link Styling */ 107 | 108 | a { 109 | color: #3D578C; 110 | font-weight: normal; 111 | text-decoration: none; 112 | } 113 | 114 | .contents a:visited { 115 | color: #4665A2; 116 | } 117 | 118 | a:hover { 119 | text-decoration: underline; 120 | } 121 | 122 | a.qindex { 123 | font-weight: bold; 124 | } 125 | 126 | a.qindexHL { 127 | font-weight: bold; 128 | background-color: #9CAFD4; 129 | color: #ffffff; 130 | border: 1px double #869DCA; 131 | } 132 | 133 | .contents a.qindexHL:visited { 134 | color: #ffffff; 135 | } 136 | 137 | a.el { 138 | font-weight: bold; 139 | } 140 | 141 | a.elRef { 142 | } 143 | 144 | a.code, a.code:visited, a.line, a.line:visited { 145 | color: #4665A2; 146 | } 147 | 148 | a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited { 149 | color: #4665A2; 150 | } 151 | 152 | /* @end */ 153 | 154 | dl.el { 155 | margin-left: -1cm; 156 | } 157 | 158 | pre.fragment { 159 | border: 1px solid #C4CFE5; 160 | background-color: #FBFCFD; 161 | padding: 4px 6px; 162 | margin: 4px 8px 4px 2px; 163 | overflow: auto; 164 | word-wrap: break-word; 165 | font-size: 9pt; 166 | line-height: 125%; 167 | font-family: monospace, fixed; 168 | font-size: 105%; 169 | } 170 | 171 | div.fragment { 172 | padding: 0px; 173 | margin: 0px; 174 | background-color: #FBFCFD; 175 | border: 1px solid #C4CFE5; 176 | } 177 | 178 | div.line { 179 | font-family: monospace, fixed; 180 | font-size: 13px; 181 | min-height: 13px; 182 | line-height: 1.0; 183 | text-wrap: unrestricted; 184 | white-space: -moz-pre-wrap; /* Moz */ 185 | white-space: -pre-wrap; /* Opera 4-6 */ 186 | white-space: -o-pre-wrap; /* Opera 7 */ 187 | white-space: pre-wrap; /* CSS3 */ 188 | word-wrap: break-word; /* IE 5.5+ */ 189 | text-indent: -53px; 190 | padding-left: 53px; 191 | padding-bottom: 0px; 192 | margin: 6px; 193 | -webkit-transition-property: background-color, box-shadow; 194 | -webkit-transition-duration: 0.5s; 195 | -moz-transition-property: background-color, box-shadow; 196 | -moz-transition-duration: 0.5s; 197 | -ms-transition-property: background-color, box-shadow; 198 | -ms-transition-duration: 0.5s; 199 | -o-transition-property: background-color, box-shadow; 200 | -o-transition-duration: 0.5s; 201 | transition-property: background-color, box-shadow; 202 | transition-duration: 0.5s; 203 | } 204 | 205 | div.line.glow { 206 | background-color: cyan; 207 | box-shadow: 0 0 10px cyan; 208 | } 209 | 210 | 211 | span.lineno { 212 | padding-right: 4px; 213 | text-align: right; 214 | border-right: 2px solid #0F0; 215 | background-color: #E8E8E8; 216 | white-space: pre; 217 | } 218 | span.lineno a { 219 | background-color: #D8D8D8; 220 | } 221 | 222 | span.lineno a:hover { 223 | background-color: #C8C8C8; 224 | } 225 | 226 | div.ah { 227 | background-color: black; 228 | font-weight: bold; 229 | color: #ffffff; 230 | margin-bottom: 3px; 231 | margin-top: 3px; 232 | padding: 0.2em; 233 | border: solid thin #333; 234 | border-radius: 0.5em; 235 | -webkit-border-radius: .5em; 236 | -moz-border-radius: .5em; 237 | box-shadow: 2px 2px 3px #999; 238 | -webkit-box-shadow: 2px 2px 3px #999; 239 | -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; 240 | background-image: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#000),color-stop(0.3, #444)); 241 | background-image: -moz-linear-gradient(center top, #eee 0%, #444 40%, #000); 242 | } 243 | 244 | div.groupHeader { 245 | margin-left: 16px; 246 | margin-top: 12px; 247 | font-weight: bold; 248 | } 249 | 250 | div.groupText { 251 | margin-left: 16px; 252 | font-style: italic; 253 | } 254 | 255 | body { 256 | background-color: white; 257 | color: black; 258 | margin: 0; 259 | } 260 | 261 | div.contents { 262 | margin-top: 10px; 263 | margin-left: 12px; 264 | margin-right: 8px; 265 | } 266 | 267 | td.indexkey { 268 | background-color: #EBEFF6; 269 | font-weight: bold; 270 | border: 1px solid #C4CFE5; 271 | margin: 2px 0px 2px 0; 272 | padding: 2px 10px; 273 | white-space: nowrap; 274 | vertical-align: top; 275 | } 276 | 277 | td.indexvalue { 278 | background-color: #EBEFF6; 279 | border: 1px solid #C4CFE5; 280 | padding: 2px 10px; 281 | margin: 2px 0px; 282 | } 283 | 284 | tr.memlist { 285 | background-color: #EEF1F7; 286 | } 287 | 288 | p.formulaDsp { 289 | text-align: center; 290 | } 291 | 292 | img.formulaDsp { 293 | 294 | } 295 | 296 | img.formulaInl { 297 | vertical-align: middle; 298 | } 299 | 300 | div.center { 301 | text-align: center; 302 | margin-top: 0px; 303 | margin-bottom: 0px; 304 | padding: 0px; 305 | } 306 | 307 | div.center img { 308 | border: 0px; 309 | } 310 | 311 | address.footer { 312 | text-align: right; 313 | padding-right: 12px; 314 | } 315 | 316 | img.footer { 317 | border: 0px; 318 | vertical-align: middle; 319 | } 320 | 321 | /* @group Code Colorization */ 322 | 323 | span.keyword { 324 | color: #008000 325 | } 326 | 327 | span.keywordtype { 328 | color: #604020 329 | } 330 | 331 | span.keywordflow { 332 | color: #e08000 333 | } 334 | 335 | span.comment { 336 | color: #800000 337 | } 338 | 339 | span.preprocessor { 340 | color: #806020 341 | } 342 | 343 | span.stringliteral { 344 | color: #002080 345 | } 346 | 347 | span.charliteral { 348 | color: #008080 349 | } 350 | 351 | span.vhdldigit { 352 | color: #ff00ff 353 | } 354 | 355 | span.vhdlchar { 356 | color: #000000 357 | } 358 | 359 | span.vhdlkeyword { 360 | color: #700070 361 | } 362 | 363 | span.vhdllogic { 364 | color: #ff0000 365 | } 366 | 367 | blockquote { 368 | background-color: #F7F8FB; 369 | border-left: 2px solid #9CAFD4; 370 | margin: 0 24px 0 4px; 371 | padding: 0 12px 0 16px; 372 | } 373 | 374 | /* @end */ 375 | 376 | /* 377 | .search { 378 | color: #003399; 379 | font-weight: bold; 380 | } 381 | 382 | form.search { 383 | margin-bottom: 0px; 384 | margin-top: 0px; 385 | } 386 | 387 | input.search { 388 | font-size: 75%; 389 | color: #000080; 390 | font-weight: normal; 391 | background-color: #e8eef2; 392 | } 393 | */ 394 | 395 | td.tiny { 396 | font-size: 75%; 397 | } 398 | 399 | .dirtab { 400 | padding: 4px; 401 | border-collapse: collapse; 402 | border: 1px solid #A3B4D7; 403 | } 404 | 405 | th.dirtab { 406 | background: #EBEFF6; 407 | font-weight: bold; 408 | } 409 | 410 | hr { 411 | height: 0px; 412 | border: none; 413 | border-top: 1px solid #4A6AAA; 414 | } 415 | 416 | hr.footer { 417 | height: 1px; 418 | } 419 | 420 | /* @group Member Descriptions */ 421 | 422 | table.memberdecls { 423 | border-spacing: 0px; 424 | padding: 0px; 425 | } 426 | 427 | .memberdecls td, .fieldtable tr { 428 | -webkit-transition-property: background-color, box-shadow; 429 | -webkit-transition-duration: 0.5s; 430 | -moz-transition-property: background-color, box-shadow; 431 | -moz-transition-duration: 0.5s; 432 | -ms-transition-property: background-color, box-shadow; 433 | -ms-transition-duration: 0.5s; 434 | -o-transition-property: background-color, box-shadow; 435 | -o-transition-duration: 0.5s; 436 | transition-property: background-color, box-shadow; 437 | transition-duration: 0.5s; 438 | } 439 | 440 | .memberdecls td.glow, .fieldtable tr.glow { 441 | background-color: cyan; 442 | box-shadow: 0 0 15px cyan; 443 | } 444 | 445 | .mdescLeft, .mdescRight, 446 | .memItemLeft, .memItemRight, 447 | .memTemplItemLeft, .memTemplItemRight, .memTemplParams { 448 | background-color: #F9FAFC; 449 | border: none; 450 | margin: 4px; 451 | padding: 1px 0 0 8px; 452 | } 453 | 454 | .mdescLeft, .mdescRight { 455 | padding: 0px 8px 4px 8px; 456 | color: #555; 457 | } 458 | 459 | .memSeparator { 460 | border-bottom: 1px solid #DEE4F0; 461 | line-height: 1px; 462 | margin: 0px; 463 | padding: 0px; 464 | } 465 | 466 | .memItemLeft, .memTemplItemLeft { 467 | white-space: nowrap; 468 | } 469 | 470 | .memItemRight { 471 | width: 100%; 472 | } 473 | 474 | .memTemplParams { 475 | color: #4665A2; 476 | white-space: nowrap; 477 | font-size: 80%; 478 | } 479 | 480 | /* @end */ 481 | 482 | /* @group Member Details */ 483 | 484 | /* Styles for detailed member documentation */ 485 | 486 | .memtemplate { 487 | font-size: 80%; 488 | color: #4665A2; 489 | font-weight: normal; 490 | margin-left: 9px; 491 | } 492 | 493 | .memnav { 494 | background-color: #EBEFF6; 495 | border: 1px solid #A3B4D7; 496 | text-align: center; 497 | margin: 2px; 498 | margin-right: 15px; 499 | padding: 2px; 500 | } 501 | 502 | .mempage { 503 | width: 100%; 504 | } 505 | 506 | .memitem { 507 | padding: 0; 508 | margin-bottom: 10px; 509 | margin-right: 5px; 510 | -webkit-transition: box-shadow 0.5s linear; 511 | -moz-transition: box-shadow 0.5s linear; 512 | -ms-transition: box-shadow 0.5s linear; 513 | -o-transition: box-shadow 0.5s linear; 514 | transition: box-shadow 0.5s linear; 515 | display: table !important; 516 | width: 100%; 517 | } 518 | 519 | .memitem.glow { 520 | box-shadow: 0 0 15px cyan; 521 | } 522 | 523 | .memname { 524 | font-weight: bold; 525 | margin-left: 6px; 526 | } 527 | 528 | .memname td { 529 | vertical-align: bottom; 530 | } 531 | 532 | .memproto, dl.reflist dt { 533 | border-top: 1px solid #A8B8D9; 534 | border-left: 1px solid #A8B8D9; 535 | border-right: 1px solid #A8B8D9; 536 | padding: 6px 0px 6px 0px; 537 | color: #253555; 538 | font-weight: bold; 539 | text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); 540 | background-image:url('nav_f.png'); 541 | background-repeat:repeat-x; 542 | background-color: #E2E8F2; 543 | /* opera specific markup */ 544 | box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); 545 | border-top-right-radius: 4px; 546 | border-top-left-radius: 4px; 547 | /* firefox specific markup */ 548 | -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; 549 | -moz-border-radius-topright: 4px; 550 | -moz-border-radius-topleft: 4px; 551 | /* webkit specific markup */ 552 | -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); 553 | -webkit-border-top-right-radius: 4px; 554 | -webkit-border-top-left-radius: 4px; 555 | 556 | } 557 | 558 | .memdoc, dl.reflist dd { 559 | border-bottom: 1px solid #A8B8D9; 560 | border-left: 1px solid #A8B8D9; 561 | border-right: 1px solid #A8B8D9; 562 | padding: 6px 10px 2px 10px; 563 | background-color: #FBFCFD; 564 | border-top-width: 0; 565 | background-image:url('nav_g.png'); 566 | background-repeat:repeat-x; 567 | background-color: #FFFFFF; 568 | /* opera specific markup */ 569 | border-bottom-left-radius: 4px; 570 | border-bottom-right-radius: 4px; 571 | box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); 572 | /* firefox specific markup */ 573 | -moz-border-radius-bottomleft: 4px; 574 | -moz-border-radius-bottomright: 4px; 575 | -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; 576 | /* webkit specific markup */ 577 | -webkit-border-bottom-left-radius: 4px; 578 | -webkit-border-bottom-right-radius: 4px; 579 | -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); 580 | } 581 | 582 | dl.reflist dt { 583 | padding: 5px; 584 | } 585 | 586 | dl.reflist dd { 587 | margin: 0px 0px 10px 0px; 588 | padding: 5px; 589 | } 590 | 591 | .paramkey { 592 | text-align: right; 593 | } 594 | 595 | .paramtype { 596 | white-space: nowrap; 597 | } 598 | 599 | .paramname { 600 | color: #602020; 601 | white-space: nowrap; 602 | } 603 | .paramname em { 604 | font-style: normal; 605 | } 606 | .paramname code { 607 | line-height: 14px; 608 | } 609 | 610 | .params, .retval, .exception, .tparams { 611 | margin-left: 0px; 612 | padding-left: 0px; 613 | } 614 | 615 | .params .paramname, .retval .paramname { 616 | font-weight: bold; 617 | vertical-align: top; 618 | } 619 | 620 | .params .paramtype { 621 | font-style: italic; 622 | vertical-align: top; 623 | } 624 | 625 | .params .paramdir { 626 | font-family: "courier new",courier,monospace; 627 | vertical-align: top; 628 | } 629 | 630 | table.mlabels { 631 | border-spacing: 0px; 632 | } 633 | 634 | td.mlabels-left { 635 | width: 100%; 636 | padding: 0px; 637 | } 638 | 639 | td.mlabels-right { 640 | vertical-align: bottom; 641 | padding: 0px; 642 | white-space: nowrap; 643 | } 644 | 645 | span.mlabels { 646 | margin-left: 8px; 647 | } 648 | 649 | span.mlabel { 650 | background-color: #728DC1; 651 | border-top:1px solid #5373B4; 652 | border-left:1px solid #5373B4; 653 | border-right:1px solid #C4CFE5; 654 | border-bottom:1px solid #C4CFE5; 655 | text-shadow: none; 656 | color: white; 657 | margin-right: 4px; 658 | padding: 2px 3px; 659 | border-radius: 3px; 660 | font-size: 7pt; 661 | white-space: nowrap; 662 | vertical-align: middle; 663 | } 664 | 665 | 666 | 667 | /* @end */ 668 | 669 | /* these are for tree view when not used as main index */ 670 | 671 | div.directory { 672 | margin: 10px 0px; 673 | border-top: 1px solid #A8B8D9; 674 | border-bottom: 1px solid #A8B8D9; 675 | width: 100%; 676 | } 677 | 678 | .directory table { 679 | border-collapse:collapse; 680 | } 681 | 682 | .directory td { 683 | margin: 0px; 684 | padding: 0px; 685 | vertical-align: top; 686 | } 687 | 688 | .directory td.entry { 689 | white-space: nowrap; 690 | padding-right: 6px; 691 | padding-top: 3px; 692 | } 693 | 694 | .directory td.entry a { 695 | outline:none; 696 | } 697 | 698 | .directory td.entry a img { 699 | border: none; 700 | } 701 | 702 | .directory td.desc { 703 | width: 100%; 704 | padding-left: 6px; 705 | padding-right: 6px; 706 | padding-top: 3px; 707 | border-left: 1px solid rgba(0,0,0,0.05); 708 | } 709 | 710 | .directory tr.even { 711 | padding-left: 6px; 712 | background-color: #F7F8FB; 713 | } 714 | 715 | .directory img { 716 | vertical-align: -30%; 717 | } 718 | 719 | .directory .levels { 720 | white-space: nowrap; 721 | width: 100%; 722 | text-align: right; 723 | font-size: 9pt; 724 | } 725 | 726 | .directory .levels span { 727 | cursor: pointer; 728 | padding-left: 2px; 729 | padding-right: 2px; 730 | color: #3D578C; 731 | } 732 | 733 | div.dynheader { 734 | margin-top: 8px; 735 | -webkit-touch-callout: none; 736 | -webkit-user-select: none; 737 | -khtml-user-select: none; 738 | -moz-user-select: none; 739 | -ms-user-select: none; 740 | user-select: none; 741 | } 742 | 743 | address { 744 | font-style: normal; 745 | color: #2A3D61; 746 | } 747 | 748 | table.doxtable { 749 | border-collapse:collapse; 750 | margin-top: 4px; 751 | margin-bottom: 4px; 752 | } 753 | 754 | table.doxtable td, table.doxtable th { 755 | border: 1px solid #2D4068; 756 | padding: 3px 7px 2px; 757 | } 758 | 759 | table.doxtable th { 760 | background-color: #374F7F; 761 | color: #FFFFFF; 762 | font-size: 110%; 763 | padding-bottom: 4px; 764 | padding-top: 5px; 765 | } 766 | 767 | table.fieldtable { 768 | /*width: 100%;*/ 769 | margin-bottom: 10px; 770 | border: 1px solid #A8B8D9; 771 | border-spacing: 0px; 772 | -moz-border-radius: 4px; 773 | -webkit-border-radius: 4px; 774 | border-radius: 4px; 775 | -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; 776 | -webkit-box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); 777 | box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); 778 | } 779 | 780 | .fieldtable td, .fieldtable th { 781 | padding: 3px 7px 2px; 782 | } 783 | 784 | .fieldtable td.fieldtype, .fieldtable td.fieldname { 785 | white-space: nowrap; 786 | border-right: 1px solid #A8B8D9; 787 | border-bottom: 1px solid #A8B8D9; 788 | vertical-align: top; 789 | } 790 | 791 | .fieldtable td.fieldname { 792 | padding-top: 3px; 793 | } 794 | 795 | .fieldtable td.fielddoc { 796 | border-bottom: 1px solid #A8B8D9; 797 | /*width: 100%;*/ 798 | } 799 | 800 | .fieldtable td.fielddoc p:first-child { 801 | margin-top: 0px; 802 | } 803 | 804 | .fieldtable td.fielddoc p:last-child { 805 | margin-bottom: 2px; 806 | } 807 | 808 | .fieldtable tr:last-child td { 809 | border-bottom: none; 810 | } 811 | 812 | .fieldtable th { 813 | background-image:url('nav_f.png'); 814 | background-repeat:repeat-x; 815 | background-color: #E2E8F2; 816 | font-size: 90%; 817 | color: #253555; 818 | padding-bottom: 4px; 819 | padding-top: 5px; 820 | text-align:left; 821 | -moz-border-radius-topleft: 4px; 822 | -moz-border-radius-topright: 4px; 823 | -webkit-border-top-left-radius: 4px; 824 | -webkit-border-top-right-radius: 4px; 825 | border-top-left-radius: 4px; 826 | border-top-right-radius: 4px; 827 | border-bottom: 1px solid #A8B8D9; 828 | } 829 | 830 | 831 | .tabsearch { 832 | top: 0px; 833 | left: 10px; 834 | height: 36px; 835 | background-image: url('tab_b.png'); 836 | z-index: 101; 837 | overflow: hidden; 838 | font-size: 13px; 839 | } 840 | 841 | .navpath ul 842 | { 843 | font-size: 11px; 844 | background-image:url('tab_b.png'); 845 | background-repeat:repeat-x; 846 | background-position: 0 -5px; 847 | height:30px; 848 | line-height:30px; 849 | color:#8AA0CC; 850 | border:solid 1px #C2CDE4; 851 | overflow:hidden; 852 | margin:0px; 853 | padding:0px; 854 | } 855 | 856 | .navpath li 857 | { 858 | list-style-type:none; 859 | float:left; 860 | padding-left:10px; 861 | padding-right:15px; 862 | background-image:url('bc_s.png'); 863 | background-repeat:no-repeat; 864 | background-position:right; 865 | color:#364D7C; 866 | } 867 | 868 | .navpath li.navelem a 869 | { 870 | height:32px; 871 | display:block; 872 | text-decoration: none; 873 | outline: none; 874 | color: #283A5D; 875 | font-family: 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif; 876 | text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); 877 | text-decoration: none; 878 | } 879 | 880 | .navpath li.navelem a:hover 881 | { 882 | color:#6884BD; 883 | } 884 | 885 | .navpath li.footer 886 | { 887 | list-style-type:none; 888 | float:right; 889 | padding-left:10px; 890 | padding-right:15px; 891 | background-image:none; 892 | background-repeat:no-repeat; 893 | background-position:right; 894 | color:#364D7C; 895 | font-size: 8pt; 896 | } 897 | 898 | 899 | div.summary 900 | { 901 | float: right; 902 | font-size: 8pt; 903 | padding-right: 5px; 904 | width: 50%; 905 | text-align: right; 906 | } 907 | 908 | div.summary a 909 | { 910 | white-space: nowrap; 911 | } 912 | 913 | div.ingroups 914 | { 915 | font-size: 8pt; 916 | width: 50%; 917 | text-align: left; 918 | } 919 | 920 | div.ingroups a 921 | { 922 | white-space: nowrap; 923 | } 924 | 925 | div.header 926 | { 927 | background-image:url('nav_h.png'); 928 | background-repeat:repeat-x; 929 | background-color: #F9FAFC; 930 | margin: 0px; 931 | border-bottom: 1px solid #C4CFE5; 932 | } 933 | 934 | div.headertitle 935 | { 936 | padding: 5px 5px 5px 10px; 937 | } 938 | 939 | dl 940 | { 941 | padding: 0 0 0 10px; 942 | } 943 | 944 | /* dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug */ 945 | dl.section 946 | { 947 | margin-left: 0px; 948 | padding-left: 0px; 949 | } 950 | 951 | dl.note 952 | { 953 | margin-left:-7px; 954 | padding-left: 3px; 955 | border-left:4px solid; 956 | border-color: #D0C000; 957 | } 958 | 959 | dl.warning, dl.attention 960 | { 961 | margin-left:-7px; 962 | padding-left: 3px; 963 | border-left:4px solid; 964 | border-color: #FF0000; 965 | } 966 | 967 | dl.pre, dl.post, dl.invariant 968 | { 969 | margin-left:-7px; 970 | padding-left: 3px; 971 | border-left:4px solid; 972 | border-color: #00D000; 973 | } 974 | 975 | dl.deprecated 976 | { 977 | margin-left:-7px; 978 | padding-left: 3px; 979 | border-left:4px solid; 980 | border-color: #505050; 981 | } 982 | 983 | dl.todo 984 | { 985 | margin-left:-7px; 986 | padding-left: 3px; 987 | border-left:4px solid; 988 | border-color: #00C0E0; 989 | } 990 | 991 | dl.test 992 | { 993 | margin-left:-7px; 994 | padding-left: 3px; 995 | border-left:4px solid; 996 | border-color: #3030E0; 997 | } 998 | 999 | dl.bug 1000 | { 1001 | margin-left:-7px; 1002 | padding-left: 3px; 1003 | border-left:4px solid; 1004 | border-color: #C08050; 1005 | } 1006 | 1007 | dl.section dd { 1008 | margin-bottom: 6px; 1009 | } 1010 | 1011 | 1012 | #projectlogo 1013 | { 1014 | text-align: center; 1015 | vertical-align: bottom; 1016 | border-collapse: separate; 1017 | } 1018 | 1019 | #projectlogo img 1020 | { 1021 | border: 0px none; 1022 | height: 50px; 1023 | } 1024 | 1025 | #projectname 1026 | { 1027 | font: 300% Tahoma, Arial,sans-serif; 1028 | margin: 0px; 1029 | padding: 2px 0px; 1030 | } 1031 | 1032 | #projectbrief 1033 | { 1034 | font: 120% Tahoma, Arial,sans-serif; 1035 | margin: 0px; 1036 | padding: 0px; 1037 | } 1038 | 1039 | #projectnumber 1040 | { 1041 | font: 50% Tahoma, Arial,sans-serif; 1042 | margin: 0px; 1043 | padding: 0px; 1044 | } 1045 | 1046 | #titlearea 1047 | { 1048 | padding: 0px; 1049 | margin: 0px; 1050 | width: 100%; 1051 | border-bottom: 1px solid #5373B4; 1052 | } 1053 | 1054 | .image 1055 | { 1056 | text-align: center; 1057 | } 1058 | 1059 | .dotgraph 1060 | { 1061 | text-align: center; 1062 | } 1063 | 1064 | .mscgraph 1065 | { 1066 | text-align: center; 1067 | } 1068 | 1069 | .caption 1070 | { 1071 | font-weight: bold; 1072 | } 1073 | 1074 | div.zoom 1075 | { 1076 | border: 1px solid #90A5CE; 1077 | } 1078 | 1079 | dl.citelist { 1080 | margin-bottom:50px; 1081 | } 1082 | 1083 | dl.citelist dt { 1084 | color:#334975; 1085 | float:left; 1086 | font-weight:bold; 1087 | margin-right:10px; 1088 | padding:5px; 1089 | } 1090 | 1091 | dl.citelist dd { 1092 | margin:2px 0; 1093 | padding:5px 0; 1094 | } 1095 | 1096 | div.toc { 1097 | padding: 14px 25px; 1098 | background-color: #F4F6FA; 1099 | border: 1px solid #D8DFEE; 1100 | border-radius: 7px 7px 7px 7px; 1101 | float: right; 1102 | height: auto; 1103 | margin: 0 20px 10px 10px; 1104 | width: 200px; 1105 | } 1106 | 1107 | div.toc li { 1108 | background: url("bdwn.png") no-repeat scroll 0 5px transparent; 1109 | font: 10px/1.2 Verdana,DejaVu Sans,Geneva,sans-serif; 1110 | margin-top: 5px; 1111 | padding-left: 10px; 1112 | padding-top: 2px; 1113 | } 1114 | 1115 | div.toc h3 { 1116 | font: bold 12px/1.2 Arial,FreeSans,sans-serif; 1117 | color: #4665A2; 1118 | border-bottom: 0 none; 1119 | margin: 0; 1120 | } 1121 | 1122 | div.toc ul { 1123 | list-style: none outside none; 1124 | border: medium none; 1125 | padding: 0px; 1126 | } 1127 | 1128 | div.toc li.level1 { 1129 | margin-left: 0px; 1130 | } 1131 | 1132 | div.toc li.level2 { 1133 | margin-left: 15px; 1134 | } 1135 | 1136 | div.toc li.level3 { 1137 | margin-left: 30px; 1138 | } 1139 | 1140 | div.toc li.level4 { 1141 | margin-left: 45px; 1142 | } 1143 | 1144 | .inherit_header { 1145 | font-weight: bold; 1146 | color: gray; 1147 | cursor: pointer; 1148 | -webkit-touch-callout: none; 1149 | -webkit-user-select: none; 1150 | -khtml-user-select: none; 1151 | -moz-user-select: none; 1152 | -ms-user-select: none; 1153 | user-select: none; 1154 | } 1155 | 1156 | .inherit_header td { 1157 | padding: 6px 0px 2px 5px; 1158 | } 1159 | 1160 | .inherit { 1161 | display: none; 1162 | } 1163 | 1164 | tr.heading h2 { 1165 | margin-top: 12px; 1166 | margin-bottom: 4px; 1167 | } 1168 | 1169 | /* tooltip related style info */ 1170 | 1171 | .ttc { 1172 | position: absolute; 1173 | display: none; 1174 | } 1175 | 1176 | #powerTip { 1177 | cursor: default; 1178 | white-space: nowrap; 1179 | background-color: white; 1180 | border: 1px solid gray; 1181 | border-radius: 4px 4px 4px 4px; 1182 | box-shadow: 1px 1px 7px gray; 1183 | display: none; 1184 | font-size: smaller; 1185 | max-width: 80%; 1186 | opacity: 0.9; 1187 | padding: 1ex 1em 1em; 1188 | position: absolute; 1189 | z-index: 2147483647; 1190 | } 1191 | 1192 | #powerTip div.ttdoc { 1193 | color: grey; 1194 | font-style: italic; 1195 | } 1196 | 1197 | #powerTip div.ttname a { 1198 | font-weight: bold; 1199 | } 1200 | 1201 | #powerTip div.ttname { 1202 | font-weight: bold; 1203 | } 1204 | 1205 | #powerTip div.ttdeci { 1206 | color: #006318; 1207 | } 1208 | 1209 | #powerTip div { 1210 | margin: 0px; 1211 | padding: 0px; 1212 | font: 12px/16px Roboto,sans-serif; 1213 | } 1214 | 1215 | #powerTip:before, #powerTip:after { 1216 | content: ""; 1217 | position: absolute; 1218 | margin: 0px; 1219 | } 1220 | 1221 | #powerTip.n:after, #powerTip.n:before, 1222 | #powerTip.s:after, #powerTip.s:before, 1223 | #powerTip.w:after, #powerTip.w:before, 1224 | #powerTip.e:after, #powerTip.e:before, 1225 | #powerTip.ne:after, #powerTip.ne:before, 1226 | #powerTip.se:after, #powerTip.se:before, 1227 | #powerTip.nw:after, #powerTip.nw:before, 1228 | #powerTip.sw:after, #powerTip.sw:before { 1229 | border: solid transparent; 1230 | content: " "; 1231 | height: 0; 1232 | width: 0; 1233 | position: absolute; 1234 | } 1235 | 1236 | #powerTip.n:after, #powerTip.s:after, 1237 | #powerTip.w:after, #powerTip.e:after, 1238 | #powerTip.nw:after, #powerTip.ne:after, 1239 | #powerTip.sw:after, #powerTip.se:after { 1240 | border-color: rgba(255, 255, 255, 0); 1241 | } 1242 | 1243 | #powerTip.n:before, #powerTip.s:before, 1244 | #powerTip.w:before, #powerTip.e:before, 1245 | #powerTip.nw:before, #powerTip.ne:before, 1246 | #powerTip.sw:before, #powerTip.se:before { 1247 | border-color: rgba(128, 128, 128, 0); 1248 | } 1249 | 1250 | #powerTip.n:after, #powerTip.n:before, 1251 | #powerTip.ne:after, #powerTip.ne:before, 1252 | #powerTip.nw:after, #powerTip.nw:before { 1253 | top: 100%; 1254 | } 1255 | 1256 | #powerTip.n:after, #powerTip.ne:after, #powerTip.nw:after { 1257 | border-top-color: #ffffff; 1258 | border-width: 10px; 1259 | margin: 0px -10px; 1260 | } 1261 | #powerTip.n:before { 1262 | border-top-color: #808080; 1263 | border-width: 11px; 1264 | margin: 0px -11px; 1265 | } 1266 | #powerTip.n:after, #powerTip.n:before { 1267 | left: 50%; 1268 | } 1269 | 1270 | #powerTip.nw:after, #powerTip.nw:before { 1271 | right: 14px; 1272 | } 1273 | 1274 | #powerTip.ne:after, #powerTip.ne:before { 1275 | left: 14px; 1276 | } 1277 | 1278 | #powerTip.s:after, #powerTip.s:before, 1279 | #powerTip.se:after, #powerTip.se:before, 1280 | #powerTip.sw:after, #powerTip.sw:before { 1281 | bottom: 100%; 1282 | } 1283 | 1284 | #powerTip.s:after, #powerTip.se:after, #powerTip.sw:after { 1285 | border-bottom-color: #ffffff; 1286 | border-width: 10px; 1287 | margin: 0px -10px; 1288 | } 1289 | 1290 | #powerTip.s:before, #powerTip.se:before, #powerTip.sw:before { 1291 | border-bottom-color: #808080; 1292 | border-width: 11px; 1293 | margin: 0px -11px; 1294 | } 1295 | 1296 | #powerTip.s:after, #powerTip.s:before { 1297 | left: 50%; 1298 | } 1299 | 1300 | #powerTip.sw:after, #powerTip.sw:before { 1301 | right: 14px; 1302 | } 1303 | 1304 | #powerTip.se:after, #powerTip.se:before { 1305 | left: 14px; 1306 | } 1307 | 1308 | #powerTip.e:after, #powerTip.e:before { 1309 | left: 100%; 1310 | } 1311 | #powerTip.e:after { 1312 | border-left-color: #ffffff; 1313 | border-width: 10px; 1314 | top: 50%; 1315 | margin-top: -10px; 1316 | } 1317 | #powerTip.e:before { 1318 | border-left-color: #808080; 1319 | border-width: 11px; 1320 | top: 50%; 1321 | margin-top: -11px; 1322 | } 1323 | 1324 | #powerTip.w:after, #powerTip.w:before { 1325 | right: 100%; 1326 | } 1327 | #powerTip.w:after { 1328 | border-right-color: #ffffff; 1329 | border-width: 10px; 1330 | top: 50%; 1331 | margin-top: -10px; 1332 | } 1333 | #powerTip.w:before { 1334 | border-right-color: #808080; 1335 | border-width: 11px; 1336 | top: 50%; 1337 | margin-top: -11px; 1338 | } 1339 | 1340 | @media print 1341 | { 1342 | #top { display: none; } 1343 | #side-nav { display: none; } 1344 | #nav-path { display: none; } 1345 | body { overflow:visible; } 1346 | h1, h2, h3, h4, h5, h6 { page-break-after: avoid; } 1347 | .summary { display: none; } 1348 | .memitem { page-break-inside: avoid; } 1349 | #doc-content 1350 | { 1351 | margin-left:0 !important; 1352 | height:auto !important; 1353 | width:auto !important; 1354 | overflow:inherit; 1355 | display:inline; 1356 | } 1357 | } 1358 | 1359 | --------------------------------------------------------------------------------