├── .gitignore ├── tests ├── test_alwaysfail.py ├── test_alwayspass.py ├── setup.cfg ├── access_env_var.sh ├── test_25pass.py └── test_75pass.py ├── doc └── test_summary_issue.png ├── pull_request_template.md ├── install_hooks.sh ├── access_env_var.sh ├── hooks └── pre-commit ├── vars ├── pytestVars.groovy └── utils.groovy ├── invalid_report.xml ├── src ├── DataConfig.groovy ├── JobConfig.groovy └── BuildConfig.groovy ├── Jenkinsfile ├── JenkinsfileRT └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | -------------------------------------------------------------------------------- /tests/test_alwaysfail.py: -------------------------------------------------------------------------------- 1 | def test_alwaysfail(): 2 | assert 1==2 3 | -------------------------------------------------------------------------------- /tests/test_alwayspass.py: -------------------------------------------------------------------------------- 1 | def test_alwayspass(): 2 | assert 1==1 3 | 4 | -------------------------------------------------------------------------------- /tests/setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | junit_family=xunit2 3 | results_root = datb-generic 4 | -------------------------------------------------------------------------------- /doc/test_summary_issue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spacetelescope/jenkins_shared_ci_utils/master/doc/test_summary_issue.png -------------------------------------------------------------------------------- /tests/access_env_var.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo "This is the value of SECRET_VALUE" 4 | echo "${SECRET_VALUE}" 5 | -------------------------------------------------------------------------------- /pull_request_template.md: -------------------------------------------------------------------------------- 1 | Reminder: Be sure to remove any @Library directive present at the top of Jenkinsfiles before merging into master. 2 | -------------------------------------------------------------------------------- /install_hooks.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo "Installing pre-commit git hook script to facilitate CI testing..." 4 | ln -s ../../hooks/pre-commit .git/hooks/pre-commit 5 | -------------------------------------------------------------------------------- /access_env_var.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo "This is the value of SECRET_VALUE" 4 | echo "${SECRET_VALUE}" 5 | 6 | echo "This is the value of CUSTOM_CREDENTIAL_VAR" 7 | echo "${CUSTOM_CREDENTIAL_VAR}" 8 | -------------------------------------------------------------------------------- /tests/test_25pass.py: -------------------------------------------------------------------------------- 1 | def test_25_1of4(): 2 | assert 1==1 3 | 4 | def test_25_2of4(): 5 | assert 1==2 6 | 7 | def test_25_3of4(): 8 | assert 1==3 9 | 10 | def test_25_4of4(): 11 | assert 1==4 12 | -------------------------------------------------------------------------------- /tests/test_75pass.py: -------------------------------------------------------------------------------- 1 | def test_75_1of4(): 2 | assert 1==1 3 | 4 | def test_75_2of4(): 5 | assert 1==1 6 | 7 | def test_75_3of4(): 8 | assert 1==1 9 | 10 | def test_75_4of4(): 11 | assert 1==4 12 | -------------------------------------------------------------------------------- /hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | printf "[pre-commit hook] Updating branch reference in Jenkinsfile.test to allow self-testing... " 4 | branch=$(git rev-parse --abbrev-ref HEAD) 5 | sed -i "s/utils@.*'/utils@${branch}'/" Jenkinsfile 6 | git update-index --add Jenkinsfile 7 | printf "done.\n" 8 | -------------------------------------------------------------------------------- /vars/pytestVars.groovy: -------------------------------------------------------------------------------- 1 | class pytestVars implements Serializable { 2 | // Pytest exit codes 3 | // https://docs.pytest.org/en/stable/reference.html#pytest.ExitCode 4 | final int EXIT_OK = 0 5 | final int EXIT_TESTS_FAILED = 1 6 | final int EXIT_INTRRUPTED = 2 7 | final int EXIT_INTERNAL_ERROR = 3 8 | final int EXIT_USAGE_ERROR = 4 9 | final int EXIT_NO_TESTS = 5 10 | 11 | // Minimum version of pytest capable of emitting reliable exit codes 12 | final String EXIT_CAPABLE = "5.0" 13 | } 14 | 15 | -------------------------------------------------------------------------------- /invalid_report.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/DataConfig.groovy: -------------------------------------------------------------------------------- 1 | import groovy.json.JsonOutput 2 | import org.apache.commons.io.FileUtils 3 | 4 | class DataConfig implements Serializable { 5 | String root = '.' 6 | String server_id = '' 7 | String match_prefix = '(.*)' 8 | Boolean keep_data = false 9 | int keep_builds = 20 10 | int keep_days = 10 11 | def data = [:] 12 | 13 | DataConfig() {} 14 | 15 | def insert(String name, String block) { 16 | /* Store JSON directly as string */ 17 | this.data[name] = block 18 | } 19 | 20 | def insert(String name, block=[:]) { 21 | /* Convert a Groovy Map to JSON and store it */ 22 | this.data[name] = JsonOutput.toJson(block) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/JobConfig.groovy: -------------------------------------------------------------------------------- 1 | // src/JobConfig.groovy 2 | 3 | class JobConfig implements Serializable { 4 | 5 | // Regression testing summary control 6 | def post_test_summary = false 7 | def all_posts_in_same_issue = true 8 | 9 | // Conda environment specification file publication control 10 | def enable_env_publication = false 11 | def publish_env_on_success_only = true 12 | // Filter format: "github_user_or_org_name/branch" 13 | def publish_env_filter = "" 14 | 15 | def credentials = null 16 | 17 | // Build retention control 18 | def builds_to_keep = -1 19 | 20 | // Development 21 | def debug = false 22 | 23 | // Constructors 24 | JobConfig() {} 25 | } 26 | -------------------------------------------------------------------------------- /src/BuildConfig.groovy: -------------------------------------------------------------------------------- 1 | // src/BuildConfig.groovy 2 | 3 | class BuildConfig implements Serializable { 4 | def nodetype = "" 5 | def name = "" 6 | 7 | def conda_packages = [] 8 | def conda_override_channels = false 9 | def conda_channels = [] 10 | def conda_ver = null 11 | 12 | def pip_reqs_files = [] 13 | 14 | def env_vars = [] 15 | def env_vars_raw = [] 16 | def build_cmds = [] 17 | def test_cmds = [] 18 | def test_configs = [] 19 | 20 | def failedFailureNewThresh = '' 21 | def failedFailureThresh = '' 22 | def failedUnstableNewThresh = '' 23 | def failedUnstableThresh= '0' 24 | 25 | def skippedFailureNewThresh = '' 26 | def skippedFailureThresh = '' 27 | def skippedUnstableNewThresh = '' 28 | def skippedUnstableThresh= '' 29 | 30 | // Scheduling - default behavior is to not restrict run schedule based on 31 | // the day of the week. 32 | def run_on_days = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat'] 33 | 34 | // Private. Not to be used directly by Jenkinsfile. 35 | def runtime = [] 36 | 37 | // Constructors 38 | BuildConfig() { 39 | this.nodetype = "" 40 | } 41 | } 42 | 43 | 44 | class testInfo implements Serializable { 45 | def problems = false 46 | def subject = "" 47 | def message = "" 48 | } 49 | -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | //@Library('utils@dayfix') _ 2 | 3 | // [skip ci] and [ci skip] have no effect here. 4 | if (utils.scm_checkout(['skip_disable':true])) return 5 | 6 | // Allow modification of the job configuration, affects all relevant build configs. 7 | // Pass this object in the argument list to the`run()` function below to apply these settings to the job's execution. 8 | jobconfig = new JobConfig() 9 | //jobconfig.post_test_summary = true 10 | //jobconfig.credentials = ['SECRET_VALUE'] 11 | //jobconfig.enable_env_publication = true 12 | //jobconfig.publish_env_on_success_only = false 13 | 14 | 15 | // Pytest wrapper 16 | def PYTEST_BASETEMP = "test_outputs" 17 | def PYTEST = "pytest \ 18 | -r s \ 19 | --basetemp=${PYTEST_BASETEMP} \ 20 | --junit-xml=results.xml" 21 | 22 | // Configure artifactory ingest 23 | data_config = new DataConfig() 24 | data_config.server_id = 'bytesalad' 25 | data_config.root = '${PYTEST_BASETEMP}' 26 | data_config.match_prefix = '(.*)_result' // .json is appended automatically 27 | 28 | 29 | bc0 = new BuildConfig() 30 | //bc0.nodetype = 'RHEL-6' 31 | bc0.nodetype = 'linux' 32 | bc0.name = 'First buildconfig' 33 | bc0.env_vars = ['VAR_ONE=1', 34 | 'VAR_TWO=2'] 35 | bc0.conda_ver = 'py39_23.1.0-1' 36 | bc0.conda_packages = ['python=3.9', 37 | 'pytest'] 38 | bc0.build_cmds = ["env", 39 | "ls -al ..", // Workspace root. 40 | "ls -al", // Project clone dir. 41 | "conda config --show", 42 | "./access_env_var.sh", 43 | "which python", 44 | "conda install ipython"] 45 | bc0.test_cmds = ["${PYTEST} tests/test_75pass.py"] 46 | bc0.test_configs = [data_config] 47 | 48 | 49 | bc1 = utils.copy(bc0) 50 | bc1.name = 'Second' 51 | bc1.env_vars = ['VAR_THREE=3', 52 | 'VAR_FOUR=4'] 53 | bc1.test_cmds[1] = "${PYTEST} tests/test_25pass.py" 54 | 55 | 56 | bc2 = utils.copy(bc0) 57 | bc2.name = 'Third build config' 58 | bc2.conda_packages = ['python=3.9'] 59 | bc2.build_cmds = ["env", 60 | "which python"] 61 | bc2.test_cmds = ["ls -al ..", // Workspace root. 62 | "ls -al"] // Project clone dir. 63 | bc2.test_configs = [] 64 | 65 | 66 | utils.run([bc0, bc1, bc2, jobconfig]) 67 | -------------------------------------------------------------------------------- /JenkinsfileRT: -------------------------------------------------------------------------------- 1 | //@Library('utils@credentials') _ 2 | 3 | // [skip ci] and [ci skip] have no effect here. 4 | if (utils.scm_checkout(['skip_disable':true])) return 5 | 6 | // Allow modification of the job configuration, affects all relevant build configs. 7 | // Pass this object in the argument list to the`run()` function below to apply these settings to the job's execution. 8 | jobconfig = new JobConfig() 9 | //jobconfig.post_test_summary = true 10 | //jobconfig.enable_env_publication = true 11 | //jobconfig.publish_env_on_success_only = false 12 | 13 | jobconfig.credentials = [['SECRET_VALUE', 'CUSTOM_CREDENTIAL_VAR'], 14 | 'SECRET_VALUE'] 15 | 16 | 17 | // Pytest wrapper 18 | def PYTEST_BASETEMP = "test_outputs" 19 | def PYTEST = "pytest \ 20 | -r s \ 21 | --basetemp=${PYTEST_BASETEMP} \ 22 | --junit-xml=results.xml" 23 | 24 | // Configure artifactory ingest 25 | data_config = new DataConfig() 26 | data_config.server_id = 'bytesalad' 27 | data_config.root = '${PYTEST_BASETEMP}' 28 | data_config.match_prefix = '(.*)_result' // .json is appended automatically 29 | 30 | 31 | bc0 = new BuildConfig() 32 | //bc0.nodetype = 'RHEL-6' 33 | bc0.nodetype = 'linux' 34 | bc0.name = 'First buildconfig' 35 | bc0.env_vars = ['VAR_ONE=1', 36 | 'VAR_TWO=2'] 37 | bc0.conda_ver = '4.6.4' 38 | bc0.conda_packages = ['python=3.9', 39 | 'pytest'] 40 | bc0.build_cmds = ["env", 41 | "./access_env_var.sh", 42 | "ls -al ..", // Workspace root. 43 | "ls -al", // Project clone dir. 44 | "conda config --show", 45 | "which python", 46 | "conda install ipython"] 47 | bc0.test_cmds = ["${PYTEST} tests/test_75pass.py"] 48 | bc0.test_configs = [data_config] 49 | 50 | 51 | bc1 = utils.copy(bc0) 52 | bc1.name = 'Second' 53 | bc1.env_vars = ['VAR_THREE=3', 54 | 'VAR_FOUR=4'] 55 | bc1.test_cmds[1] = "${PYTEST} tests/test_25pass.py" 56 | 57 | 58 | bc2 = utils.copy(bc0) 59 | bc2.name = 'Third build config' 60 | bc2.conda_packages = ['python=3.9'] 61 | bc2.build_cmds = ["env", 62 | "which python"] 63 | bc2.test_cmds = ["ls -al ..", // Workspace root. 64 | "ls -al"] // Project clone dir. 65 | bc2.test_configs = [] 66 | 67 | 68 | utils.run([bc0, bc1, bc2, jobconfig]) 69 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This Jenkins shared library provides common utility classes and functions used to 2 | support continuous integration (CI) build and test jobs for projects within the spacetelescope organization. 3 | 4 | Note: For jobs spawned as a result of the creation of a pull request (PR), Jenkins behaves differently than the Travis CI service (https://travis-ci.org/) in that the build will use the HEAD commit of the PR branch only, while Travis typically will run the build/test job on the _merge commit_ between the PR branch and the master branch. 5 | 6 | ## A Simplified Job Definition Syntax 7 | 8 | Functionality provided that extends the native Groovy syntax approach 9 | 1. Terminate job execution immediately (with a success status) when the string `[skip ci]` or `[ci skip]` is found in the commit message. 10 | 2. Selection of either parallel (default) or sequential execution of the specified build matrix. 11 | 3. Automatic creation of a conda environment with user-specified dependencies to host the build. 12 | 13 | This library's functionality is automatically made available to every Jenkinsfile hosted in the spacetelescope Github organization. 14 | 15 | An example job that builds three parallel combinations and runs tests on one of them, posting a 16 | summary of test results for all build configurations if any test failures or errors occur. 17 | 18 | ```groovy 19 | // Obtain files from source control system. 20 | if (utils.scm_checkout()) return 21 | 22 | 23 | // Allow modification of the job configuration, affects all relevant build configs. 24 | // Pass this object in the argument list to the`run()` function below to apply 25 | // these settings to the job's execution. 26 | jobconfig = new JobConfig() 27 | jobconfig.post_test_summary = true 28 | // 'credentials' example 29 | // To make an environment variable 30 | // NOTE: This requires server-side configuration on the Jenkins instance hosting the builds. 31 | // Contact a Jenkins administrator to add secrets to the credentials store. 32 | jobconfig.credentials = [ 33 | 'SECRET_VALUE_CREDENTIAL_ID', // Credential ID as stored in Jenkins, var has same name as ID. 34 | ['SECRET_VALUE_CREDENTIAL_ID, CUSTOM_ENV_VAR_NAME'] // Mapping of credental ID to custom env var. 35 | ] 36 | 37 | 38 | // Config data to share between builds. 39 | CFLAGS = 'CFLAGS="-m64"' 40 | LDFLAGS = 'LDFLAGS="-m64"' 41 | DEFAULT_FLAGS = "${CFLAGS} ${LDFLAGS}" 42 | // Some waf flags cause a prompt for input during configuration, hence the 'yes'. 43 | configure_cmd = "yes '' | ./waf configure --prefix=./_install ${DEFAULT_FLAGS}" 44 | 45 | 46 | // Define each build configuration, copying and overriding values as necessary. 47 | bc0 = new BuildConfig() 48 | bc0.nodetype = "linux-stable" 49 | bc0.name = "debug" 50 | LOCAL_VARIABLE='use_me_now' 51 | bc0.env_vars = ['MY_VAR=' + LOCAL_VARIABLE, // (Early expansion) Compose string locally, then pass to environment. 52 | 'PATH=./_install/bin:$PATH', // (Late expansion) $PATH gets expanded later by the shell hosting the build. 53 | ] 54 | bc0.build_cmds = ["${configure_cmd} --debug", 55 | "./waf build", 56 | "./waf install"] 57 | 58 | 59 | bc1 = utils.copy(bc0) 60 | bc1.name = "release" 61 | bc1.build_cmds[0] = "${configure_cmd} --release-with-symbols" 62 | bc1.test_cmds = ["conda install -q -y pytest requests astropy", 63 | "pip install -q pytest-remotedata", 64 | "pytest tests --basetemp=tests_output --junitxml results.xml --remote-data"] 65 | bc1.failedUnstableThresh = 1 66 | bc1.failedFailureThresh = 6 67 | 68 | 69 | bc2 = utils.copy(bc0) 70 | bc2.name = "optimized" 71 | bc2.build_cmds[0] = "${configure_cmd} --O3" 72 | 73 | 74 | // Iterate over configurations that define the (distibuted) build matrix. 75 | // Spawn a host of the given nodetype for each combination and run in parallel. 76 | // Also apply the job configuration defined in `jobconfig` above. 77 | utils.run([bc0, bc1, bc2, jobconfig]) 78 | ``` 79 | 80 | ### Utils Library 81 | The build configuration syntax shown here is provided by the `utils` library which contains two main components, the `utils` functions and the `BuildConfig` class. 82 | 83 | #### Functions 84 | The `utils` library provides several functions: 85 | 86 | | Function | Description | 87 | | --- | --- | 88 | | `if (utils.scm_checkout()) return` | Accepts: Will ignore any `[skip ci]`/`[ci skip]` directive found in the latest commit message. This is used in certain regression testing jobs that run on a schedule, the execution of which should never be skipped due to skip directives found in the commit history. | 89 | | `utils.copy()` | Accepts: | 90 | | `utils.run(config_list, concurrent=true)` | Accepts: | 91 | | `utils.convert_specifiers(s)` | 92 | 93 | #### JobConfig Class 94 | This class contains properties that may be adjusted to control the behavior of the overall Jenkins job. 95 | A JobConfig object must be created as shown in the example above and then passed in to the `run()` function in the list of BuildConfig objects for the customizations to be honored. 96 | 97 | It has the following properties: 98 | 99 | | Member | Type | Required | Purpose | 100 | | --- | --- | --- | --- | 101 | | `post_test_summary` | boolean | no | When `true`, will cause the creation of a Github issue on the project's repository containing a summary of test results produced by all build configurations hosted in the the job if any tests returned a `failure` or `error` status. Default is false, meaning no summary issues will be created upon test failures or errors. When set to `true`, if no test failures or errors occur, a summary post will not be generated. Default value when not specified or no jobconfig object passed to `run()`: `false` | 102 | | `enable_env_publication` | boolean | no | When `true`, and when conda is used during the job (for instance when a `conda_packages` list is provided in a build config), every build configuration (See BuildConfig Class below) that produces an XML test report with no test failures will also publish a list of the environment's packages to an Artifactory repository defined in either a `setup.cfg [tool:pytest]` section _OR_ in a `pytest.ini` file (but not both) using the `results_root` configuration value. i.e. `results_root = ` The environment list file produced is the result of the command `conda list --explicit` from within the active environment and will be named `conda_env_dump_.txt`. Note: the Artifactory repository specified must be configured to allow files to be published there. Default value when not specified or no jobconfig object passed to `run()`: `false` | 103 | | `publish_env_filter` | string | yes | Only publish the environment when the current git origin and branch matches within the pipeline job. The expected format is `user/branch` (e.g. `spacetelescope/master`). Wildcard operators are not supported. To override this behavior set the environment variable `JSCIU_ENV_PUBLISH_FORCE=1`. | 104 | | `publish_env_on_success_only` | boolean | no | When `enable_env_publication` is set to true, a `false` value for this option will publish a package list of any conda environments that are used in each build configuration, even if the test results contain failures. Default value when not specified or no jobconfig object passed to `run()`: `true` | 105 | | `credentials` | list of strings or lists | no | If string-type credentials have been added to Jenkins's internal credentials store in a scope available to the job in question, adding the credential ID value(s) here in a comma separated list of strings will cause the value of each credential item to be injected into the runtime environment of each build configuration hosted by the job as an environment variable with the same name as the credential ID. If instead a custom environment variable name is desired onto which the value of a stored credential secret is to be mapped, it may be supplied as the second element of a list. I.e. `['SECRET_VALUE_CREDENTIAL_ID, CUSTOM_ENV_VAR_NAME']` as shown in the example Jenkinsfile above. | 106 | 107 | #### Test Summary Issue Posts 108 | If test summaries are requested using the `post_test_summary` property of the JobConfig class as described above, each Jenkins job that produces one or more test errors or failures will result in a single new Github issue being posted to the project's repository. 109 | 110 | If the label `testing` has been defined in the Github repository and a test summary issue ends up being generated for a job run, the issue that results will have the `testing` label applied. If a label with the name `testing` has not been defined on the repository, then the issue will be created without a label. 111 | 112 | An example issue: 113 | ![issue_image](doc/test_summary_issue.png "Example issue") 114 | 115 | If tests continue to fail or error in the periodically scheduled job, a (possibly redundant) issue will be posted each time the job runs. 116 | 117 | #### BuildConfig Class 118 | The utils library also provides the definition of a class called BuildConfig that may be used to create build configuration objects used to define build tasks to be run on various hosts. 119 | 120 | It has the following properties: 121 | 122 | | Member | Type | Required | Purpose | 123 | | --- | --- | --- | --- | 124 | | `nodetype` | string | yes | The Jenkins node label to which the build is assigned | 125 | | `name` | string | yes | A (short) arbitrary name/description of the build configuration. Builds are named `/` in the build status GUI. I.e. "linux/stable" or "linux/debug" | 126 | | `run_on_days` | list of strings | no | (When absent, default behavior is to always run the BuildConfig.) Primarily for periodic regression test (RT) job use. A list of day-of-week names on which to execute the associated BuildConfig. Example: `bc0.run_on_days = ['sat', 'sun']` to only run the BuildConfig on those two days. Valid day names are `sun`, `mon`, `tue`, `wed`, `thu`, `fri`, `sat`.| 127 | | `conda_packages` | list of strings | no | If this list is defined, the associated build job will create a temporary conda environment to host the job which contains the packages specified. Package specifications are of the form
  • ``
  • `=`
Example: `bc0.conda_packages = ["pytest", "requests", "numpy=1.14.3"]` | 128 | | `conda_override_channels` | boolean | no | Instructs the conda environment creation process to not implicitly prepend the anaconda defaults channel to the list of channels used. This allows the priority of channels to be used for environment creation to be specified exactly in the order of channels provided in the `conda_channels` list, described below. If `conda_packages` is not defined in the Jenkinsfile this property is ignored. | 129 | | `conda_channels` | list of strings | no | The list of channels, in order of search priority, to use when retrieving packages for installation. If `conda_override_channels` is not defined, this list will have the conda `defaults` channel implicitly prepended to it at installation time. If `conda_packages` is not defined in the Jenkinsfile this property is ignored. Example: `bc0.conda_channels = ["http://ssb.stsci.edu/astroconda"]` | 130 | | `conda_ver` | string | no | The version of conda to use when creating environments to host the build. If not supplied, a recent version of conda will be obtained. | 131 | | `pip_reqs_files` | list of strings | no | Name(s) of pip requirements files to use to install python packages into the build environment. Each file will be processed with a `pip install -r [file]` in the order in which they appear in the list. | 132 | | `env_vars` | list of strings | no | Allow configuration of the shell environment in which build and test commands are run. Of note:
  • Only single-quoted `'` strings are supported in order to accommodate both early and late variable expansion (see later bullet points). An error will be thrown and the job status set to "FAILED" if double-quoted strings are used in this list.
  • Relative path characters such as `.` and '..' are honored with respect to the isolated build WORKSPACE directory into which the source repository is cloned and the build job takes place.
  • (Early expansion) - Variables can be expanded into the `env_vars` list items before they are passed to the shell. This is useful for programmatic composition of values that takes place within the Jenkinsfile. Example:
    • MATRIX_SUFFIX is a variable local to the Jenkinsfile script.
    • 'BUILD_MATRIX_SUFFIX=' + MATRIX_SUFFIX is how to compose a list item that will be expanded using the value of MATRIX_SUFFIX _prior_ to being passed to the shell.
  • (Late expansion) - Variable names prefixed with `$` are dereferenced to their value by the bash shell responsible for hosting the job's activities. The variable name to dereference must exist at the time the entry in the `env_vars` list is processed on each parallel node. I.e. variables can appear in the definition of other variables later in the list (the list is processed in order.)
| 133 | | `build_cmds` | list of strings | yes | These commands are run in their order of appearance in this list with the default shell environment and any modifications to that environment provided by the `env_vars` list described above.
  • Varables defined in the Jenkinsfile script itself may appear in these commands via `${varname}` notation and are interpolated at script execution time.
  • These command are executed BEFORE any optional `test_cmds`.
| 134 | | `test_cmds` | list of strings | no | These commands are run in their order of appearance in this list with the default shell environment plus any modifications to that environment provided by the `env_vars` list described above.
  • If this list is not set for a build configuration, no test commands are run and no test report is generated.
  • If present, these commands are executed AFTER the build_cmds. NOTE: The return code from each of the commands in this list is ignored. This is to prevent issues with tools such as `pytest` which return a nonzero exit code when tests fail, thus causing Jenkins to abort the entire job before the Post-Build stage runs where additional processing of results may occur.
| 135 | | `failedFailureNewThresh` | integer | no | (Default is no threshold set.) The threshold for the number of newly appearing test failures that will cause the build to be flagged as "FAILED". | 136 | | `failedFailureThresh` | integer | no | (Default is no threshold set.) The threshold for the number of test failures that will cause the build to be flagged as "FAILED". | 137 | | `failedUnstableNewThresh` | integer | no | (Default is no threshold set.) The threshold for the number of newly appearing test failures that will cause the build to be flagged as "UNSTABLE". | 138 | | `failedUnstableThresh` | integer | no | (Default is no threshold set.) The threshold for the number of test failures that will cause the build to be flagged as "UNSTABLE". | 139 | | `skippedFailureNewThresh` | integer | no | (Default is no threshold set.) The threshold for the number of newly appearing skipped tests that will cause the build to be flagged as "FAILED". | 140 | | `skippedFailureThresh` | integer | no | (Default is no threshold set.) The threshold for the number of skipped tests that will cause the build to be flagged as "FAILED". | 141 | | `skippedUnstableNewThresh` | integer | no | (Default is no threshold set.) The threshold for the number of newly appearing skipped tests that will cause the build to be flagged as "UNSTABLE". | 142 | | `skippedUnstableThresh` | integer | no | (Default is no threshold set.) The threshold for the number of skipped tests that will cause the build to be flagged as "UNSTABLE". | 143 | 144 | 145 | ### Test Results Customization 146 | 147 | The following documentation for the xUnit plugin which is used to provide the test report functionality in the CI system may be useful when customizing test thresholds. The heading "Accept a Baseline". https://jenkins.io/blog/2016/10/31/xunit-reporting/ describes the scenario and how to set the appropriate thresholds. 148 | 149 | The return code of all commands specified in the `test_cmds` list are explicitly ignored and do not affect the overall job status (Success/Unstable/Failure) in Jenkins. 150 | 151 | ### Build Sequence 152 | 153 | This is a brief description of the job execution sequence to aid in understanding CI system behavior when constructing build configuration scripts. 154 | 155 | 1. A repository in https://github.com/spacetelescope has a Jenkinsfile added to one or more branches or PRs. The Jenkinsfile describes the build and test activities to take place upon a git push event. 156 | 2. A git push event takes place on a branch containing a Jenkinsfile. 157 | 3. Jenkins initiates a clone of the repository where the push event occurred INTO A SUBDIRECTORY of the job workspace called 'clone'. If you wish to modify the PATH variable, for instance to refer to some path in the source tree or a directory generated from the configuration or build process, bear this in mind. The CI job tree looks like: 158 | 159 | ``` 160 | 161 | +- clone (project source tree) 162 | +- 163 | +- miniforge (if conda was requested) 164 | ``` 165 | 166 | 4. Source check out 167 | a. When the `if (utils.scm_checkout()) return` construct is used, the commit message is examined and the build is immediately terminated with a SUCCESS status if the string `[skip ci]` or `[ci skip]` appears in the latest commit message. If no such string is found, job execution continues. 168 | b. Jenkins creates a "stash" of all the files that were retrieved by the git clone and distributes them internally ("unstashes" them) to each build host that is spawned later in this sequence. This is done to minimize network calls to external resources. 169 | 5. The Jenkinsfile is executed as a Jenkins "pipeline script" 170 | 1. For every build configuration passed in to the utils.run() function a docker container will be created to host the build and test activities for that configuration. 171 | 2. Environment variables specified in env_vars list are added to the environment before executing each command in build_cmds and then test_cmds in their order of appearance in those lists. 172 | 3. After the last test_cmds command is executed, Jenkins examines the build environment for a filename with an .xml extension. If one is found, it is assumed to be a JUnit-compliant test report and is read. 173 | 1. Any test reporting thresholds supplied in the build configuration are applied and the results presented accordingly via the Jenkins user interface. 174 | -------------------------------------------------------------------------------- /vars/utils.groovy: -------------------------------------------------------------------------------- 1 | // Jenkinsfile support utilities 2 | import BuildConfig 3 | import groovy.io.FileType 4 | import groovy.json.JsonOutput 5 | import org.apache.commons.lang3.SerializationUtils 6 | import org.apache.commons.io.FilenameUtils 7 | import java.util.Calendar 8 | import java.text.SimpleDateFormat 9 | 10 | import org.kohsuke.github.GitHub 11 | 12 | 13 | // Determine if a program is available on $PATH 14 | // 15 | // @param name String program name 16 | // @return int Non-zero on failure, zero on success 17 | int programExists(String name) { 18 | // Sanitize input 19 | name = name.split('\\ |\\;|\\||\\&\\&?|\\/|\\\\')[0] 20 | // Find program, return status 21 | return sh(script: "which ${name}", label: "Check program exists: ${name}", returnStatus: true) 22 | } 23 | 24 | Boolean versionMin(String minver, String version) { 25 | def retval = false 26 | def have = version.tokenize('.')[0] 27 | def want = minver.tokenize('.')[0] 28 | if (have != null && want != null && have >= want) { 29 | retval = true 30 | } 31 | return retval 32 | } 33 | 34 | Boolean pytestSupportsExitCodes() { 35 | return pytestVersionMin(pytestVars.EXIT_CAPABLE) 36 | } 37 | 38 | Boolean pytestVersionMin(String target_version) { 39 | if (programExists("pytest") != 0) { 40 | println("pytest is not installed") 41 | return false 42 | } 43 | 44 | // Extract version from pytest output: 45 | // "pytest x.y.?\n" 46 | version = sh(script: "pytest --version 2>&1", returnStdout: true, label: "Get pytest version").trim().split(' ')[1] 47 | 48 | return versionMin(target_version, version) 49 | } 50 | 51 | @NonCPS 52 | // Post an issue to a particular Github repository. 53 | // 54 | // @param reponame - str 55 | // @param username - str username to use when authenticating to Github 56 | // @param password - str password for the associated username 57 | // @param subject - str Subject/title text for the issue 58 | // @param message - str Body text for the issue 59 | def postGithubIssue(reponame, username, password, subject, message) { 60 | def github = GitHub.connectUsingPassword("${username}", "${password}") 61 | def repo = github.getRepository(reponame) 62 | // Determine if the 'testing' label exists in the repo. If it does, 63 | // apply it to the new issue. 64 | def labels = repo.listLabels() 65 | def labelnames = [] 66 | for (label in labels) { 67 | labelnames.add(label.getName()) 68 | } 69 | def labelname = 'testing' 70 | def ibuilder = repo.createIssue(subject) 71 | ibuilder.body(message) 72 | if (labelname in labelnames) { 73 | ibuilder.label(labelname) 74 | } 75 | ibuilder.create() 76 | } 77 | 78 | 79 | // Clone the source repository and examine the most recent commit message. 80 | // If a '[ci skip]' or '[skip ci]' directive is present, immediately 81 | // terminate the job with a success code. 82 | // If no skip directive is found, or skip_disable is true, stash all the 83 | // source files for efficient retrieval by subsequent nodes. 84 | // 85 | // @param args Map containing entries for control of Setup stage. 86 | // 87 | // @return skip_job int Status of clone step, to be tested to determine 88 | // need to abort from Jenkinsfile. 89 | def scm_checkout(args = ['skip_disable':false]) { 90 | skip_job = 0 91 | node('master') { 92 | stage("Setup") { 93 | deleteDir() 94 | // Perform repo checkout, which for some reason clobbers everything 95 | // in the workspace. Then, create a project subdir, and move all 96 | // files into it. Then continue as usual. 97 | checkout(scm) 98 | sh "mkdir clone" 99 | stat = sh(script: "shopt -s dotglob; mv * clone", returnStatus: true) 100 | println("args['skip_disable'] = ${args['skip_disable']}") 101 | dir('clone') { 102 | if (args['skip_disable'] == false) { 103 | // Obtain the last commit message and examine it for skip directives. 104 | logoutput = sh(script:"git log -1 --pretty=%B", returnStdout: true).trim() 105 | if (logoutput.contains("[ci skip]") || logoutput.contains("[skip ci]")) { 106 | skip_job = 1 107 | currentBuild.result = 'SUCCESS' 108 | println("\nBuild skipped due to commit message directive.\n") 109 | return skip_job 110 | } 111 | } 112 | } //end dir(... 113 | stash includes: '**/*', name: 'source_tree', useDefaultExcludes: false 114 | } 115 | } 116 | return skip_job 117 | } 118 | 119 | 120 | // Returns true if the conda exe is somewhere in the $PATH, false otherwise. 121 | // @return boolean 122 | def condaPresent() { 123 | def success = sh(script: "conda --version", returnStatus: true) 124 | if (success == 0) { 125 | return true 126 | } else { 127 | return false 128 | } 129 | } 130 | 131 | 132 | // Install a particular version of conda by downloading and running the miniforge 133 | // installer and then installing conda at the specified version. 134 | // 135 | // @param version string holding version of conda to install 136 | // A version argument of 'null' will result in the latest 137 | // available conda version being installed. 138 | // @param install_dir directory relative to the current working directory 139 | // where conda should be installed. 140 | // 141 | // @return boolean true if conda could be downloaded and installed, false 142 | // otherwise 143 | def installConda(version, install_dir) { 144 | 145 | installer_ver = '24.3.0-0' 146 | default_conda_version = '24.5.0' 147 | default_dir = 'miniforge3' 148 | 149 | if (version == null) { 150 | version = default_conda_version 151 | } 152 | if (install_dir == null) { 153 | install_dir = default_dir 154 | } 155 | 156 | def conda_base_url = "https://ssb.stsci.edu/miniforge" 157 | 158 | def OSname = null 159 | def uname = sh(script: "uname", returnStdout: true).trim() 160 | if (uname == "Darwin") { 161 | OSname = "MacOSX" 162 | println("OSname=${OSname}") 163 | env.PATH = "/sw/bin:$PATH" 164 | } 165 | if (uname == "Linux") { 166 | OSname = uname 167 | println("OSname=${OSname}") 168 | } 169 | assert uname != null 170 | 171 | // Check for the availability of a download tool and then use it 172 | // to get the conda installer. 173 | def dl_cmds = ["curl -LOSs", 174 | "wget --no-verbose --server-response --no-check-certificate"] 175 | def dl_cmd = null 176 | def stat1 = 999 177 | for (cmd in dl_cmds) { 178 | stat1 = sh(script: "which ${cmd.tokenize()[0]}", returnStatus: true) 179 | if( stat1 == 0 ) { 180 | dl_cmd = cmd 181 | break 182 | } 183 | } 184 | if (stat1 != 0) { 185 | println("Could not find a download tool for obtaining conda. Unable to proceed.") 186 | return false 187 | } 188 | 189 | def cwd = pwd() 190 | def conda_exe = "${install_dir}/bin/conda" 191 | def conda_installer = "Miniforge3-${installer_ver}-${OSname}-x86_64.sh" 192 | dl_cmd = dl_cmd + " ${conda_base_url}/${conda_installer}" 193 | if (!fileExists("./${conda_installer}")) { 194 | sh dl_cmd 195 | } 196 | 197 | // Install miniforge 198 | sh "bash ./${conda_installer} -b -p ${install_dir}" 199 | 200 | // Override conda version if specified and different from default. 201 | def curr_ver = sh(script:"${conda_exe} --version", returnStdout: true) 202 | curr_ver = curr_ver.tokenize()[1].trim() 203 | if (curr_ver != version) { 204 | sh "${conda_exe} install -q conda=${version}" 205 | } 206 | 207 | return true 208 | } 209 | 210 | // Retrieve the current git branch 211 | // 212 | // @return string 213 | def gitCurrentBranch() { 214 | def branch = scm.branches[0].toString().tokenize('/')[-1] 215 | return branch 216 | } 217 | 218 | 219 | // Retrieve the URL associated with "origin" 220 | // 221 | // @return string 222 | def gitCurrentOrigin() { 223 | return sh(script: "git remote get-url origin", returnStdout: true).trim() 224 | } 225 | 226 | // Part of post-build stage. Runs on 'master' node. 227 | def parseTestReports(buildconfigs) { 228 | // Unstash all test reports produced by all possible agents. 229 | // Iterate over all unique files to compose the testing summary. 230 | def confname = '' 231 | def report_hdr = '' 232 | def short_hdr = '' 233 | def raw_totals = '' 234 | def totals = [:] 235 | def tinfo = new testInfo() 236 | tinfo.subject = "[AUTO] Regression testing summary" 237 | tinfo.message = "Regression Testing (RT) Summary:\n\n" 238 | for (config in buildconfigs) { 239 | println("Unstashing test report for: ${config.name}") 240 | try { 241 | unstash "${config.name}.results" 242 | results_hdr = sh(script:"grep 'testsuite errors' 'results.${config.name}.xml'", 243 | returnStdout: true) 244 | short_hdr = results_hdr.findAll(/(?<=testsuite ).*/)[0] 245 | short_hdr = short_hdr.split('>${prop.getProperty('results_root')}") 388 | pub_repo = prop.getProperty('results_root') 389 | println("Variable 'pub_repo' populated by information from file 'setup.cfg'") 390 | } 391 | else if (fileExists('pyproject.toml')) { 392 | // Get pub_repo from value stored in pyproject.toml file 393 | def fileContents = readFile('pyproject.toml') 394 | def lines = fileContents.split("\n") // split file into individual lines by parsing newline chars 395 | for (line in lines) { 396 | line = line.replaceAll("\\s","") // Remove all whitespaces 397 | if (line.startsWith("results_root")) { 398 | println("PROP->${line.split("=")[1].replaceAll(quotes,"")}") 399 | pub_repo = line.split("=")[1].replaceAll(quotes,"") 400 | println("Variable 'pub_repo' populated by information from file 'pyproject.toml'") 401 | } 402 | } 403 | if (pub_repo == "") { 404 | // throw error if value for 'pub_repo' cannot be found. 405 | throw new Exception("Error: Value for 'pub_repo' not found in existing file 'pyproject.toml'") 406 | } 407 | } 408 | else if (env.TEST_RESULTS_ROOT) { 409 | // Populate pub_repo from environment variable 'TEST_RESULTS_ROOT' 410 | println("PROP->${env.TEST_RESULTS_ROOT.replaceAll("'","")}") 411 | pub_repo = env.TEST_RESULTS_ROOT.replaceAll(quotes,"") 412 | println("Variable 'pub_repo' populated by information from environment variable 'TEST_RESULTS_ROOT") 413 | } 414 | else { 415 | // throw exception if value for 'pub_repo' could not be found. 416 | throw new Exception("Error: Value for 'pub_repo' not found in files 'setup.cfg' of 'pyproject.toml' or in environment variable 'TEST_RESULTS_ROOT'") 417 | } 418 | if (jobconfig.publish_env_on_success_only) { 419 | if (!test_info.problems) { 420 | pushToArtifactory("conda_python_*", pub_repo) 421 | pushToArtifactory("reqs_*", pub_repo) 422 | } 423 | } else { 424 | pushToArtifactory("conda_python_*", pub_repo) 425 | pushToArtifactory("reqs_*", pub_repo) 426 | } 427 | 428 | } // end dir(... 429 | } 430 | } 431 | 432 | 433 | // If a non-JUnit format .xml file exists in the 434 | // root of the workspace, the xunit report 435 | // ingestion will fail. 436 | // 437 | // @param config BuildConfig object 438 | def processTestReport(config) { 439 | def config_name = config.name 440 | report_exists = sh(script: "find *.xml", returnStatus: true) 441 | def threshold_summary = "failedUnstableThresh: ${config.failedUnstableThresh}\n" + 442 | "failedFailureThresh: ${config.failedFailureThresh}\n" + 443 | "skippedUnstableThresh: ${config.skippedUnstableThresh}\n" + 444 | "skippedFailureThresh: ${config.skippedFailureThresh}" 445 | println(threshold_summary) 446 | 447 | // Process the XML results files to include the build config name as a prefix 448 | // on each test name to make it more obvious from where each result originates. 449 | if (report_exists == 0) { 450 | // get all .xml files in root 451 | repfiles = sh(script:"find \$(pwd) -name '*.xml' -maxdepth 1", returnStdout: true).split("\n") 452 | for (String repfile : repfiles) { 453 | // loop through files 454 | command = "cp '${repfile}' '${repfile}.modified'" 455 | sh(script:command) 456 | } 457 | sh(script: "sed -i 's/ name=\"/ name=\"[${config.name}] /g' *.xml.modified") 458 | xunit( 459 | thresholds: [ 460 | skipped(unstableThreshold: "${config.skippedUnstableThresh}"), 461 | skipped(failureThreshold: "${config.skippedFailureThresh}"), 462 | failed(unstableThreshold: "${config.failedUnstableThresh}"), 463 | failed(failureThreshold: "${config.failedFailureThresh}")], 464 | tools: [JUnit(pattern: '*.xml.modified')]) 465 | } else { 466 | println("No .xml files found in workspace. Test report ingestion skipped.") 467 | } 468 | // TODO: Define results file name centrally and reference here. 469 | if (fileExists('results.xml')) { 470 | // Copy test report to a name unique to this build configuration. 471 | sh("cp 'results.xml' 'results.${config.name}.xml'") 472 | def stashname = "${config.name}.results" 473 | stash includes: "results.${config.name}.xml", name: stashname, useDefaultExcludes: false 474 | } 475 | } 476 | 477 | 478 | // Define actions executed in the 'Artifactory' stage. 479 | // Collect artifacts and push them to the artifactory server. 480 | // 481 | // @param config BuildConfig object 482 | def stageArtifactory(config) { 483 | stage("Artifactory (${config.name})") { 484 | def buildInfo = Artifactory.newBuildInfo() 485 | 486 | buildInfo.env.capture = true 487 | buildInfo.env.collect() 488 | def server 489 | 490 | for (artifact in config.test_configs) { 491 | server = Artifactory.server artifact.server_id 492 | 493 | // Construct absolute path to data 494 | def path = FilenameUtils.getFullPath( 495 | "${env.WORKSPACE}/${artifact.root}" 496 | ) 497 | 498 | // Record listing of all files starting at ${path} 499 | // (Native Java and Groovy approaches will not 500 | // work here) 501 | sh(script: "find ${path} -type f", 502 | returnStdout: true).trim().tokenize('\n').each { 503 | 504 | // Semi-wildcard matching of JSON input files 505 | // ex: 506 | // it = "test_1234_result.json" 507 | // artifact.match_prefix = "(.*)_result" 508 | // 509 | // pattern becomes: (.*)_result(.*)\\.json 510 | if (it.matches( 511 | artifact.match_prefix + '(.*)\\.json')) { 512 | def basename = FilenameUtils.getBaseName(it) 513 | def data = readFile(it) 514 | 515 | // Store JSON in a logical map 516 | // i.e. ["basename": [data]] 517 | artifact.insert(basename, data) 518 | } 519 | } // end find.each 520 | 521 | // Submit each request to the Artifactory server 522 | artifact.data.each { blob -> 523 | def bi_temp = server.upload spec: blob.value 524 | 525 | // Define retention scheme 526 | // Defaults: see DataConfig.groovy 527 | bi_temp.retention \ 528 | maxBuilds: artifact.keep_builds, \ 529 | maxDays: artifact.keep_days, \ 530 | deleteBuildArtifacts: !artifact.keep_data 531 | 532 | buildInfo.append bi_temp 533 | } 534 | 535 | } // end for-loop 536 | 537 | server.publishBuildInfo buildInfo 538 | 539 | } // end stage Artifactory 540 | } 541 | 542 | 543 | // Like the Setup stage, this runs on the master node and allows for 544 | // aggregation and analysis of results produced in the build configurations 545 | // processed prior. 546 | // 547 | // @param jobconfig JobConfig object holding paramters that influence the 548 | // behavior of the entire Jenkins job. 549 | def stagePostBuild(jobconfig, buildconfigs) { 550 | node('master') { 551 | stage("Post-build") { 552 | for (config in buildconfigs) { 553 | try { 554 | unstash "conda_python_${config.name}" 555 | } catch(Exception ex) { 556 | println("No conda env dump stash available for ${config.name}") 557 | } 558 | try { 559 | unstash "reqs_${config.name}" 560 | } catch(Exception ex) { 561 | println("No pip requirements stash available for ${config.name}") 562 | } 563 | } 564 | def test_info = parseTestReports(buildconfigs) 565 | if (jobconfig.post_test_summary) { 566 | testSummaryNotify(jobconfig, buildconfigs, test_info) 567 | } 568 | publishCondaEnv(jobconfig, test_info) 569 | println("Post-build stage completed.") 570 | } //end stage 571 | } //end node 572 | } 573 | 574 | 575 | // Unstash the source tree stashed in the pre-build stage. 576 | // In a shell envrionment defined by the variables held in the 577 | // config.runtime list, run the build_cmds items in sequence 578 | // Then do the same for any test_cmds items present. 579 | // If any test_configs were defined, run the Artifactory 580 | // interaction steps for those. 581 | // Then, handle test report ingestion and stashing. 582 | // 583 | // @param config BuildConfig object 584 | def buildAndTest(config) { 585 | def retval = 0 586 | withEnv(config.runtime) { 587 | unstash "source_tree" 588 | dir('clone') { 589 | processReqsFiles(config) 590 | stage("Build (${config.name})") { 591 | for (cmd in config.build_cmds) { 592 | sh(script: cmd) 593 | } 594 | } 595 | stage("Test (${config.name})") { 596 | if (config.test_cmds.size() > 0) { 597 | try { 598 | stage("Test (${config.name})") { 599 | for (cmd in config.test_cmds) { 600 | // Ignore status code from all commands in 601 | // test_cmds so Jenkins will always make it 602 | // to the post-build stage. 603 | // This accommodates tools like pytest returning 604 | // !0 codes when a test fails which would 605 | // abort the job too early. 606 | retval = sh(script: "${cmd}", returnStatus: true) 607 | if (cmd.startsWith("pytest") && pytestSupportsExitCodes() && retval >= pytestVars.EXIT_INTERNAL_ERROR) { 608 | currentBuild.result = 'FAILURE' 609 | } else if (retval != 0) { 610 | currentBuild.result = 'UNSTABLE' 611 | } 612 | } 613 | } 614 | } 615 | finally { 616 | // Perform Artifactory upload if required 617 | if (config.test_configs.size() > 0) { 618 | 619 | stageArtifactory(config) 620 | 621 | } // end test_configs check 622 | 623 | processTestReport(config) 624 | 625 | } // end test test_cmd finally clause 626 | } // end if(config.test_cmds...) 627 | } // end stage("Test 628 | 629 | // If conda is present, dump the conda environment definition to a file. 630 | def conda_exe = '' 631 | def local_conda = "${env.WORKSPACE}/miniforge/bin/conda" 632 | 633 | system_conda_present = sh(script:"which conda", returnStatus:true) 634 | if (system_conda_present == 0) { 635 | conda_exe = sh(script:"which conda", returnStdout:true).trim() 636 | } else if (fileExists(local_conda)) { 637 | conda_exe = local_conda 638 | } 639 | 640 | pip_exe = sh(script:"which pip", returnStdout:true).trim() 641 | if (pip_exe != '') { 642 | // - Extract all git dependency spec lines from all requirements files save them in a list. 643 | // - Generate pip freeze list. 644 | // - Replace all VCS dependencies in pip freeze list with the full git+http dependency 645 | // specs collected earlier. 646 | // 647 | // TODO: 648 | // - Generate conda export file. 649 | // - Replace all VCS dependencies in export file with the full git+http dependency 650 | // specs collected earlier. 651 | def vcs_specs = [] 652 | for (rfile in config.pip_reqs_files) { 653 | rflines = readFile(rfile).trim().tokenize('\n') 654 | for (line in rflines) { 655 | if (line.replace(' ', '').contains('@git+')) { 656 | vcs_specs.add(line) 657 | println("vcs spec line: ${line}") 658 | } 659 | } 660 | } 661 | 662 | def output_reqs = "reqs_${config.name}.txt" 663 | freezelist = sh(script: "${pip_exe} freeze", returnStdout:true).trim().tokenize('\n') 664 | def freeze_data = '' 665 | def modline = '' 666 | for (line in freezelist) { 667 | if (line.contains('==')) { 668 | def fpkg = line.tokenize('==')[0].trim() 669 | for (vcs_spec in vcs_specs) { 670 | def vcspkg = vcs_spec.tokenize('@')[0].trim() 671 | modline = '' 672 | if (fpkg == vcspkg) { 673 | modline = vcs_spec 674 | break 675 | } 676 | } 677 | if (modline != '') { 678 | freeze_data = "${freeze_data}${modline}\n" 679 | } else { 680 | freeze_data = "${freeze_data}${line}\n" 681 | } 682 | } else if (line.contains('-e git+')) { // Editable install separate from the VCS installs. 683 | def convert = '' 684 | def nm = line.tokenize('=')[1] 685 | // strip off egg portion 686 | convert = line.replace('-e ', "${nm} @ ") 687 | convert = convert[0..convert.indexOf('#')-1] 688 | freeze_data = "${freeze_data}${convert}\n" 689 | } else { // Pass through all other lines unmodified. 690 | freeze_data = "${freeze_data}${line}\n" 691 | } 692 | } 693 | writeFile(file: output_reqs, text: freeze_data) 694 | // Stash requirements file for use on master node. 695 | stash includes: '**/reqs_*.txt', 696 | name: "reqs_${config.name}", 697 | useDefaultExcludes: false 698 | 699 | } else { 700 | println('"pip" not found. Unable to generate "freeze" environment snapshot.') 701 | } 702 | 703 | if (conda_exe != '') { 704 | // 'def' _required_ here to prevent use of values from one build 705 | // config leaking into others. 706 | def dump_name = "conda_python_${config.name}.txt" 707 | println("About to dump baseline python environment: ${dump_name}") 708 | sh(script: "${conda_exe} list --explicit > '${dump_name}'") 709 | 710 | // Insert the version of conda used into explicit spec file. 711 | def condaver = sh(script: "${conda_exe} --version", returnStdout: true).trim() 712 | condaver = condaver.tokenize()[1] 713 | sh("sed -i 's/@EXPLICIT/# conda version: ${condaver}\\n@EXPLICIT/' ${dump_name}") 714 | 715 | // Stash spec file for use on master node. 716 | stash includes: '**/conda_python*', 717 | name: "conda_python_${config.name}", 718 | useDefaultExcludes: false 719 | } 720 | 721 | } // end withEnv 722 | } // end dir( 723 | } 724 | 725 | 726 | // If conda packages were specified, create an environment containing 727 | // them and then 'activate' it by setting key environment variables that 728 | // influence conda's behavior. . If a specific python version is 729 | // desired, it must be specified as a package, i.e. 'python=3.9' 730 | // in the list config.conda_packages. 731 | // 732 | // @param config BuildConfig object 733 | // @param index int - unique index of BuildConfig passed in as config. 734 | // 735 | // @return Modified config 736 | def processCondaPkgs(config, index) { 737 | def conda_exe = null 738 | def conda_inst_dir = null 739 | println("processCondaPkgs") 740 | if (config.conda_packages.size() > 0) { 741 | // Test for presence of conda. If not available, install it in 742 | // a prefix unique to this build configuration. 743 | if (!condaPresent()) { 744 | println('Conda not found. Installing.') 745 | conda_inst_dir = "${env.WORKSPACE}/miniforge" 746 | println("conda_inst_dir = ${conda_inst_dir}") 747 | installConda(config.conda_ver, conda_inst_dir) 748 | conda_exe = "${conda_inst_dir}/bin/conda" 749 | println("conda_exe = ${conda_exe}") 750 | } else { 751 | conda_exe = sh(script: "which conda", returnStdout: true).trim() 752 | println("Found conda exe at ${conda_exe}.") 753 | } 754 | def conda_root = conda_exe.replace("/bin/conda", "").trim() 755 | def env_name = "tmp_env${index}" 756 | def conda_prefix = "${conda_root}/envs/${env_name}".trim() 757 | def packages = "" 758 | for (pkg in config.conda_packages) { 759 | packages = "${packages} '${pkg}'" 760 | } 761 | // Override removes the implicit 'defaults' channel from the channels 762 | // to be used, The conda_channels list is then used verbatim (in 763 | // priority order) by conda. 764 | def override = "" 765 | if (config.conda_override_channels.toString() == 'true') { 766 | override = "--override-channels" 767 | } 768 | def chans = "" 769 | for (chan in config.conda_channels) { 770 | chans = "${chans} -c ${chan}" 771 | } 772 | sh(script: "${conda_exe} create -q -y -n ${env_name} ${override} ${chans} ${packages}") 773 | // Configure job to use this conda environment. 774 | config.env_vars.add(0, "CONDA_SHLVL=1") 775 | config.env_vars.add(0, "CONDA_PROMPT_MODIFIER=${env_name}") 776 | config.env_vars.add(0, "CONDA_EXE=${conda_exe}") 777 | config.env_vars.add(0, "CONDA_PREFIX=${conda_prefix}") 778 | config.env_vars.add(0, "CONDA_PYTHON_EXE=${conda_prefix}/bin/python") 779 | config.env_vars.add(0, "CONDA_DEFAULT_ENV=${env_name}") 780 | // Prepend the PATH var adjustment to the list that gets processed below. 781 | def conda_path = "PATH=${conda_prefix}/bin:${conda_root}/bin:$PATH" 782 | config.env_vars.add(0, conda_path) 783 | } 784 | return config 785 | } 786 | 787 | 788 | // If one or more pip requirements files were specified, process them to 789 | // add the packages to the available python environment. 790 | // 791 | // @param config BuildConfig object 792 | // 793 | def processReqsFiles(config) { 794 | for (reqf in config.pip_reqs_files) { 795 | sh(script: "pip install -r ${reqf} --src=../src") 796 | } 797 | } 798 | 799 | 800 | // Expand each environment variable in the config object's env_vars list 801 | // using a shell invocation to perform the substitutions. 802 | // 803 | // @param config BuildConfig object 804 | // 805 | // @return Modified config 806 | def expandEnvVars(config) { 807 | // Expand environment variable specifications by using the shell 808 | // to dereference any var references and then render the entire 809 | // value as a canonical path. 810 | 811 | // Override the HOME dir to be the job workspace. 812 | config.env_vars.add("HOME=${env.WORKSPACE}") 813 | 814 | for (var in config.env_vars) { 815 | // Process each var in an environment defined by all the prior vars. 816 | withEnv(config.runtime) { 817 | if (!var || !var.contains("=")) { 818 | throw new Exception("Invalid environment variable declaration (missing delimiter '='): '${var}'") 819 | } 820 | def data = var.split("=", 2) 821 | def varName = data[0].trim() 822 | def varValue = data[1].trim() 823 | // examine var value, if it contains var refs, expand them. 824 | def expansion = varValue 825 | if (varValue.contains("\$")) { 826 | expansion = sh(script: "echo \"${varValue}\"", returnStdout: true) 827 | } 828 | 829 | // Change values of '.' and './' to the node's WORKSPACE. 830 | // Replace a leading './' with the node's WORKSPACE. 831 | if (expansion == '.' || expansion == './') { 832 | expansion = env.WORKSPACE 833 | } else if(expansion.size() > 2 && expansion[0..1] == './') { 834 | expansion = "${env.WORKSPACE}/${expansion[2..-1]}" 835 | } 836 | 837 | // Replace all ':.' combinations with the node's WORKSPACE. 838 | expansion = expansion.replaceAll(':\\.', ":${env.WORKSPACE}") 839 | 840 | // Convert var value to canonical based on a WORKSPACE base directory. 841 | if (expansion.contains('..')) { 842 | expansion = new File(expansion).getCanonicalPath() 843 | } 844 | expansion = expansion.trim() 845 | config.runtime.add("${varName}=${expansion}") 846 | } // end withEnv 847 | } 848 | return config 849 | } 850 | 851 | 852 | // Test for GStrings (double quoted). These perform string interpolation 853 | // immediately and may not do what the user intends to do when defining 854 | // environment variables to use in the build. Disallow them here. 855 | // 856 | // @param config BuildConfig object 857 | def abortOnGstrings(config) { 858 | config.env_vars.each { evar -> 859 | if (evar.getClass() == org.codehaus.groovy.runtime.GStringImpl) { 860 | msg = "Immediate interpolation of variables in the 'env_vars'" + 861 | " list is not supported and will probably not do what" + 862 | " you expect. Please change the double quotes (\") to " + 863 | "single quotes (') in each value of the 'env_vars' list." 864 | println(msg) 865 | error('Abort the build.') 866 | } 867 | } 868 | } 869 | 870 | 871 | // Run tasks defined for the build nodes in sequential fashion. 872 | // 873 | // @param tasks Map containing groovy code to execute on build nodes. 874 | def sequentialTasks(tasks) { 875 | // Run tasks sequentially. Any failure halts the sequence. 876 | def iter = 0 877 | for (task in tasks) { 878 | def localtask = [:] 879 | localtask[task.key] = task.value 880 | stage("Serial-${iter}") { 881 | parallel(localtask) 882 | } 883 | iter++ 884 | } 885 | } 886 | 887 | 888 | // Execute build/test task(s) based on passed-in configuration(s). 889 | // Each task is defined by a BuildConfig object. 890 | // A list of such objects is iterated over to process all configurations. 891 | // 892 | // Optionally accept a jobConfig object as part of the incoming list. 893 | // Test for type of list object and parse attributes accordingly. 894 | // @param configs list of BuildConfig (and JobConfig) objects 895 | // @param concurrent boolean 896 | // whether or not to run all build 897 | // configurations in parallel. The default is 898 | // true when no value is provided. 899 | def run(configs, concurrent = true) { 900 | 901 | // Map to hold code block definitions provided in the loop below for 902 | // passing to the build nodes. 903 | def tasks = [:] 904 | 905 | // Create JobConfig with default values. 906 | def jobconfig = new JobConfig() 907 | 908 | def buildconfigs = [] 909 | 910 | // Separate jobconfig from buildconfig(s). 911 | configs.eachWithIndex { config -> 912 | 913 | dowMap = ["sun":1, "mon":2, "tue":3, "wed":4, "thu":5, "fri":6, "sat":7] 914 | def date = new Date() 915 | Calendar c = Calendar.getInstance() 916 | c.setTime(date) 917 | int dayOfWeek = c.get(Calendar.DAY_OF_WEEK) 918 | 919 | 920 | // Extract a JobConfig object if one is found 921 | if (config.getClass() == JobConfig) { 922 | jobconfig = config // TODO: Try clone here to make a new instance 923 | return // effectively a 'continue' from within a closure. 924 | } 925 | 926 | days = [] 927 | for (day in config.run_on_days) { 928 | days.add(dowMap[day.toLowerCase()]) 929 | } 930 | 931 | // Remove any JobConfig with a day-of-week request that does not match 932 | // today. 933 | if (!(dayOfWeek in days)) { 934 | println("Skipping build of [${config.name}] due to 'run_on_days' stipulation.") 935 | return 936 | } else { 937 | buildconfigs.add(config) 938 | } 939 | } 940 | 941 | // Loop over config objects passed in handling each accordingly. 942 | buildconfigs.eachWithIndex { config, index -> 943 | 944 | // Make any requested credentials available to all build configs 945 | // in this job via environment variables. 946 | if (jobconfig.credentials != null) { 947 | jobconfig.credentials.each { cred -> 948 | if (cred.getClass() == java.lang.String) { 949 | withCredentials([string(credentialsId: cred, variable: 'cred_val')]) { 950 | config.env_vars.add("${cred}=${cred_val}".toString()) 951 | } 952 | } 953 | if (cred.getClass() == java.util.ArrayList) { 954 | withCredentials([string(credentialsId: cred[0], variable: 'cred_val')]) { 955 | config.env_vars.add("${cred[1]}=${cred_val}".toString()) 956 | } 957 | } 958 | 959 | } //end .each 960 | } 961 | 962 | 963 | def BuildConfig myconfig = new BuildConfig() // MUST be inside eachWith loop. 964 | myconfig = SerializationUtils.clone(config) 965 | 966 | // Test for problematic string interpolations requested in 967 | // environment variable definitions. 968 | abortOnGstrings(config) 969 | 970 | // For containerized CI builds, code defined within 'tasks' is eventually executed 971 | // on a separate node. Parallel builds on the RT system each get assigned a new 972 | // workspace directory by Jenkins. i.e. workspace, workspace@2, etc. 973 | // 'tasks' is a java.util.LinkedHashMap, which preserves insertion order. 974 | tasks["${myconfig.nodetype}/${myconfig.name}"] = { 975 | node(myconfig.nodetype) { 976 | deleteDir() 977 | myconfig = processCondaPkgs(myconfig, index) 978 | myconfig = expandEnvVars(myconfig) 979 | for (var in myconfig.env_vars_raw) { 980 | myconfig.runtime.add(var) 981 | } 982 | buildAndTest(myconfig) 983 | } // end node 984 | } 985 | 986 | } //end closure configs.eachWithIndex 987 | 988 | if (concurrent == true) { 989 | stage("Matrix") { 990 | parallel(tasks) 991 | } 992 | } else { 993 | sequentialTasks(tasks) 994 | } 995 | 996 | stagePostBuild(jobconfig, buildconfigs) 997 | } 998 | 999 | 1000 | // Condense version triplet and replace version specifier(s) with human-readable text 1001 | // 1002 | // @param String s string containing version specifiers 1003 | // @return String string with converted version specifiers 1004 | String convert_specifiers(String s) { 1005 | String result = s 1006 | result = result.replaceAll("\\.", "") // No period 1007 | .replaceAll(",", "") // No comma 1008 | .replaceAll("<", "L") // Less than 1009 | .replaceAll(">", "G") // Greater than 1010 | .replaceAll("~=", "C") // Compatible (GE x.y && L x.*) 1011 | .replaceAll("=", "E") // Equal to (=, E | ==, EE) 1012 | .replaceAll("\\!", "N") // Not equal to 1013 | 1014 | return result 1015 | } 1016 | 1017 | 1018 | // Convenience function that performs a deep copy on the supplied object. 1019 | // 1020 | // @param obj Java/Groovy object to copy 1021 | // 1022 | // @return Deep copy of obj . 1023 | def copy(obj) { 1024 | return SerializationUtils.clone(obj) 1025 | } 1026 | --------------------------------------------------------------------------------