├── vars ├── awx.txt ├── faas.txt ├── goss.txt ├── helm.txt ├── packer.txt ├── puppet.txt ├── terraform.txt ├── awx.groovy ├── goss.groovy ├── faas.groovy ├── puppet.groovy ├── packer.groovy ├── helm.groovy └── terraform.groovy ├── tests ├── terraform │ ├── main.tf │ └── Jenkinsfile ├── packer │ ├── docker.pkr.hcl │ ├── docker.pkr.json │ └── Jenkinsfile ├── goss │ ├── goss.yaml │ └── Jenkinsfile ├── faas │ └── Jenkinsfile ├── lib │ └── Jenkinsfile ├── puppet │ └── Jenkinsfile ├── awx │ └── Jenkinsfile └── helm │ └── Jenkinsfile ├── .groovylintrc.json ├── src └── devops │ └── common │ ├── utilsTest.groovy │ ├── hcl.groovy │ ├── restTest.groovy │ ├── rest.groovy │ ├── helpers.groovy │ └── utils.groovy ├── LICENSE.md ├── .github └── workflows │ └── ci.yaml ├── .circleci └── config.yml ├── docs ├── Puppet.md ├── Goss.md ├── AWX.md ├── Packer.md ├── FaaS.md ├── Helm.md └── Terraform.md ├── README.md └── CHANGELOG.md /vars/awx.txt: -------------------------------------------------------------------------------- 1 | ../docs/AWX.md -------------------------------------------------------------------------------- /vars/faas.txt: -------------------------------------------------------------------------------- 1 | ../docs/FaaS.md -------------------------------------------------------------------------------- /vars/goss.txt: -------------------------------------------------------------------------------- 1 | ../docs/Goss.md -------------------------------------------------------------------------------- /vars/helm.txt: -------------------------------------------------------------------------------- 1 | ../docs/Helm.md -------------------------------------------------------------------------------- /vars/packer.txt: -------------------------------------------------------------------------------- 1 | ../docs/Packer.md -------------------------------------------------------------------------------- /vars/puppet.txt: -------------------------------------------------------------------------------- 1 | ../docs/Puppet.md -------------------------------------------------------------------------------- /vars/terraform.txt: -------------------------------------------------------------------------------- 1 | ../docs/Terraform.md -------------------------------------------------------------------------------- /tests/terraform/main.tf: -------------------------------------------------------------------------------- 1 | resource "local_file" "file" { 2 | content = "hello world" 3 | filename = "${path.root}/foo" 4 | } 5 | -------------------------------------------------------------------------------- /tests/packer/docker.pkr.hcl: -------------------------------------------------------------------------------- 1 | source "docker" "example" { 2 | image = "centos:7" 3 | } 4 | 5 | build { 6 | sources = ["source.docker.example"] 7 | } 8 | -------------------------------------------------------------------------------- /tests/goss/goss.yaml: -------------------------------------------------------------------------------- 1 | file: 2 | /etc: 3 | exists: true 4 | mode: '0755' 5 | size: 4096 6 | owner: root 7 | group: root 8 | filetype: directory 9 | -------------------------------------------------------------------------------- /.groovylintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "recommended", 3 | "rules": { 4 | "formatting.Indentation": { 5 | "spacesPerIndentLevel": 2 6 | }, 7 | "formatting.LineLength": { 8 | "length": 240 9 | } 10 | } 11 | } -------------------------------------------------------------------------------- /tests/packer/docker.pkr.json: -------------------------------------------------------------------------------- 1 | { 2 | "builders": [{ 3 | "type": "docker", 4 | "image": "centos:7", 5 | "commit": "true" 6 | }], 7 | "provisioners": [ 8 | { 9 | "type": "shell", 10 | "inline": [ 11 | "echo hello" 12 | ] 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /src/devops/common/utilsTest.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/utilsTest.groovy 2 | package devops.common 3 | 4 | import org.junit.Test 5 | 6 | class utilsTest { 7 | 8 | @Test 9 | void testDefaultInput() { 10 | final String override = new utils().defaultInput('foo', 'bar') 11 | 12 | assert override == 'foo' 13 | 14 | final String backup = new utils().defaultInput(null, 'bar') 15 | 16 | assert backup == 'bar' 17 | } 18 | 19 | @Test 20 | void testMapToJSON() { 21 | final String json = mapToJSON([['foo':'bar'], ['bar':'baz']]) 22 | 23 | assert json 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /tests/faas/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'openfaas/faas-cli:0.12.14' } } 12 | 13 | stages { 14 | stage('Validate Template') { 15 | steps { 16 | sh 'curl -L https://raw.githubusercontent.com/openfaas/faas-cli/master/samples.yml -o /tmp/faas.yaml' 17 | 18 | script { 19 | faas.validateTemplate('/tmp/faas.yaml') 20 | } 21 | } 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/devops/common/hcl.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/hcl.groovy 2 | package devops.common; 3 | 4 | // imports 5 | import jenkins.model.Jenkins 6 | import com.cloudbees.groovy.cps.NonCPS 7 | @Grab('com.bertramlabs.plugins:hcl4j:0.9.1') 8 | import com.bertramlabs.plugins.hcl4j.HCLParser 9 | 10 | // wrapper method for returning a map from a hcl file 11 | @NonCPS 12 | Map hclToMap(String filePath) { 13 | // load the file from the jenkins master or the build agent/node 14 | File file = env['NODE_NAME'] == 'master' ? new File(filePath) : new FilePath(Jenkins.getInstance().getComputer(env['NODE_NAME']).getChannel(), filePath); 15 | 16 | // verify file exists 17 | if (!(file.exists())) { 18 | print "File does not exist at ${filePath}" 19 | throw new FileNotFoundException("HCL file does not exist") 20 | } 21 | 22 | // return map from parsed hcl-formatted string 23 | return new HCLParser().parse(file, 'UTF-8'); 24 | } 25 | -------------------------------------------------------------------------------- /src/devops/common/restTest.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/restTest.groovy 2 | package devops.common 3 | 4 | import static groovy.test.GroovyAssert.shouldFail 5 | import org.junit.Test 6 | 7 | class restTest extends GroovyTestCase { 8 | 9 | @Test 10 | void testGet() { 11 | final Map response = new rest().request('https://www.google.com') 12 | 13 | assert response 14 | } 15 | 16 | @Test 17 | void testPost() { 18 | final Map response = new rest().request('https://www.google.com', POST) 19 | 20 | assert response 21 | } 22 | 23 | @Test 24 | void testPut() { 25 | final Map response = new rest().request('https://www.google.com', PUT) 26 | 27 | assert response 28 | } 29 | 30 | @Test 31 | void testError() { 32 | String exception = shouldFail { 33 | new rest().request(config.url, config.headers, config.body, 'error') 34 | } 35 | 36 | assert exception.message == "Invalid REST API interaction method 'error' specified." 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Matt Schuchard 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /tests/lib/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent any 12 | 13 | stages { 14 | stage('Test Rest') { 15 | steps { 16 | script { 17 | testRest.get(url: 'http://www.google.com') 18 | testRest.post( 19 | url: 'url', 20 | headers: [key: value], 21 | body: [key: value] 22 | ) 23 | testRest.put( 24 | url: 'url', 25 | headers: [key: value], 26 | body: [key: value] 27 | ) 28 | testRest.error( 29 | url: 'http://localhost:8080', 30 | headers: [key: 'value'], 31 | body: [key: 'value'] 32 | ) 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/devops/common/rest.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/rest.groovy 2 | package devops.common; 3 | 4 | // imports 5 | @Grab('org.codehaus.groovy.modules.http-builder:http-builder:0.7.2') 6 | import groovyx.net.http.RESTClient 7 | 8 | // http method enum 9 | enum HTTPMethod{ 10 | GET, 11 | POST, 12 | PUT, 13 | DELETE, 14 | } 15 | 16 | // defines a method for interacting with rest apis 17 | Map request(String url, HTTPMethod method = GET, Map body = [:], Map headers = [:]) { 18 | // initialize client and expected status code 19 | def client = new RESTClient(url) 20 | int status = 200 21 | Map response 22 | 23 | // invoke helper request method depending upon interaction method 24 | switch(method) { 25 | case GET: 26 | response = client.get(headers: headers) 27 | break 28 | case POST: 29 | response = client.post(headers: headers, body: body) 30 | status = 201 31 | break 32 | case PUT: 33 | response = client.put(headers: headers, body: body) 34 | break 35 | case DELETE: 36 | response = client.delete(headers: headers) 37 | break 38 | default: 39 | throw new Exception("Invalid REST API interaction method '${method}' specified.") 40 | } 41 | 42 | // handle the response 43 | assert response.status == status : "Invalid response status code from the REST API: ${response.status}." 44 | // return the data as a list instance with map interface 45 | return response['reader'] 46 | } 47 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: ci 3 | 4 | on: 5 | push: 6 | branches: [master] 7 | paths: 8 | - '**.groovy' 9 | pull_request: 10 | branches: [master] 11 | paths: 12 | - '**.groovy' 13 | 14 | jobs: 15 | lint: 16 | runs-on: ubuntu-latest 17 | container: nvuillam/npm-groovy-lint 18 | steps: 19 | - name: checkout 20 | uses: actions/checkout@v4 21 | - name: lint 22 | shell: bash 23 | run: npm-groovy-lint --failon error {src,vars} 24 | # test: 25 | # runs-on: ubuntu-latest 26 | # container: jenkins/jenkins:lts-slim 27 | # steps: 28 | # - name: checkout 29 | # uses: actions/checkout@v4 30 | # - name: install jenkins pipeline and pipeline utility steps plugins 31 | # run: jenkins-plugin-cli -p workflow-aggregator pipeline-utility-steps http_request 32 | # - name: start jenkins server 33 | # run: /opt/java/openjdk/bin/java -jar /usr/share/jenkins/jenkins.war & 34 | # - name: pause for jenkins startup and then install jenkins cli 35 | # run: sleep 10 && curl localhost:8080/jnlpJars/jenkins-cli.jar -o /var/jenkins_home/war/WEB-INF/lib/jenkins-cli.jar 36 | # - name: execute syntax check 37 | # # TODO: currently jenkins cli does not parse groovysh args correctly 38 | # run: /opt/java/openjdk/bin/java -jar /var/jenkins_home/war/WEB-INF/lib/jenkins-cli.jar -s http://localhost:8080 -auth admin:$(cat /var/jenkins_home/secrets/initialAdminPassword) 'groovysh -cp src:/usr/share/jenkins:/usr/share/jenkins/ref/plugins -q -e vars/*.groovy' 39 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2.1 3 | 4 | jobs: 5 | lint: 6 | working_directory: /tmp/project 7 | docker: 8 | - image: nvuillam/npm-groovy-lint 9 | resource_class: small 10 | steps: 11 | - checkout 12 | - run: 13 | name: lint 14 | command: npm-groovy-lint --failon error {src,vars} 15 | test: 16 | working_directory: /tmp/project 17 | docker: 18 | - image: jenkins/jenkins:lts-slim 19 | resource_class: small 20 | steps: 21 | - checkout 22 | - restore_cache: 23 | keys: 24 | - jenkins-devops-libs 25 | - run: 26 | name: install jenkins pipeline and pipeline utility steps plugins 27 | command: jenkins-plugin-cli -p workflow-aggregator pipeline-utility-steps http_request 28 | - save_cache: 29 | paths: 30 | - /usr/share/jenkins/ref/plugins 31 | key: jenkins-devops-libs 32 | - run: 33 | name: start jenkins server 34 | command: /opt/java/openjdk/bin/java -jar /usr/share/jenkins/jenkins.war & 35 | - run: 36 | name: pause for jenkins startup and then install jenkins cli 37 | command: sleep 10 && curl localhost:8080/jnlpJars/jenkins-cli.jar -o /var/jenkins_home/war/WEB-INF/lib/jenkins-cli.jar 38 | - run: 39 | # TODO: currently jenkins cli does not parse groovysh args correctly 40 | name: execute syntax check 41 | command: /opt/java/openjdk/bin/java -jar /var/jenkins_home/war/WEB-INF/lib/jenkins-cli.jar -s http://localhost:8080 -auth admin:$(cat /var/jenkins_home/secrets/initialAdminPassword) 'groovysh -cp src:/usr/share/jenkins:/usr/share/jenkins/ref/plugins -q -e vars/*.groovy' 42 | no_output_timeout: 45s 43 | 44 | workflows: 45 | execute_tests: 46 | jobs: 47 | - lint 48 | # - test 49 | -------------------------------------------------------------------------------- /src/devops/common/helpers.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/helpers.groovy 2 | package devops.common 3 | 4 | // imports 5 | import hudson.AbortException 6 | 7 | // generic tool execution 8 | void toolExec(String label, List cmd) { 9 | try { 10 | sh(label: label, script: cmd.join(' ')) 11 | } 12 | catch (AbortException error) { 13 | print "failure using ${label.toLowerCase()}" 14 | throw error 15 | } 16 | print "${label.toLowerCase()} was successful" 17 | } 18 | 19 | // hashi vars 20 | List varSubCmd(Map config) { 21 | List subCmd = [] 22 | 23 | // check for optional var inputs 24 | if (config.varFile) { 25 | assert fileExists(config.varFile) : "The var file ${config.varFile} does not exist!" 26 | 27 | subCmd.add("-var-file=${config.varFile}") 28 | } 29 | if (config.var) { 30 | assert (config.var in Map) : 'The var parameter must be a Map.' 31 | 32 | config.var.each { String var, String value -> 33 | // convert value to json if not string type 34 | if (value in List || value in Map) { 35 | value = writeJSON(json: value, returnText: true) 36 | } 37 | 38 | subCmd.addAll(['-var', "${var}=${value}"]) 39 | } 40 | } 41 | 42 | return subCmd 43 | } 44 | 45 | // yaml file validation 46 | Boolean validateYamlFile(String filePath, String description) { 47 | // ensure yaml file exists 48 | assert fileExists(filePath) : "${description} ${filePath} does not exist!" 49 | 50 | // check yaml syntax 51 | try { 52 | readYaml(file: filePath) 53 | } 54 | catch (Exception error) { 55 | print "${description} failed YAML and JSON validation." 56 | print error.getMessage() 57 | return false 58 | } 59 | 60 | print "${filePath} is valid YAML and/or JSON." 61 | return true 62 | } 63 | -------------------------------------------------------------------------------- /tests/goss/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'aelsabbahy/goss:v0.3.16' } } 12 | 13 | stages { 14 | stage('Validate GoSSfile') { 15 | steps { 16 | sh 'curl -L https://raw.githubusercontent.com/mschuchard/jenkins-devops-libs/master/tests/goss/goss.yaml -o /tmp/goss.yaml' 17 | 18 | script { 19 | goss.validateGossfile('/tmp/goss.yaml') 20 | } 21 | } 22 | } 23 | // this may fail due to missing directive 24 | stage('Render GoSSfile') { 25 | steps { 26 | script { 27 | goss.render( 28 | debug: true, // optional print rendered golang template prior to gossfile 29 | gossfile: '/tmp/goss.yaml', // optional location of gossfile with included directive 30 | ) 31 | } 32 | } 33 | } 34 | stage('Validate Server') { 35 | steps { 36 | script { 37 | goss.validate(gossfile: '/tmp/goss.yaml') // optional location of gossfile 38 | } 39 | } 40 | } 41 | stage('Create Endpoint') { 42 | steps { 43 | script { 44 | goss.server( 45 | gossfile: '/tmp/goss.yaml', // optional location of gossfile 46 | format: 'documentation', // optional formatter to use for output 47 | ) 48 | } 49 | } 50 | } 51 | stage('Validate Docker Image') { 52 | // switch to dgoss agent here 53 | environment { 54 | GOSS_FILES_PATH = '/tmp/' 55 | GOSS_FILES_STRATEGY = 'mount' 56 | } 57 | steps { 58 | script { 59 | goss.validateDocker( 60 | flags: ['JAVA_OPTS="-Xmx1048m"'], // optional flags for container run 61 | image: 'alpine:latest' // docker image to run container from 62 | ) 63 | } 64 | } 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /tests/puppet/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // experimental library 2 | // for dynamic retrieval 3 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 4 | [$class: 'GitSCMSource', 5 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 6 | // if added in Jenkins global config shared libraries 7 | //@Library('jenkins-devops-libs')_ 8 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 9 | //library('jenkins-devops-libs')_ 10 | 11 | pipeline { 12 | agent any 13 | 14 | stages { 15 | stage('Tests') { 16 | parallel { 17 | stage('Token Generate') { 18 | steps { 19 | script { 20 | puppet.token { 21 | puppet.token( 22 | password: 'password', // password for the rbac token 23 | secure: false, // optional verify ssl connection 24 | server: 'puppet', // optional server hosting puppet server 25 | username: 'username' // username for the rbac token 26 | ) 27 | } 28 | } 29 | } 30 | } 31 | stage('Code Deploy') { 32 | steps { 33 | script { 34 | puppet.codeDeploy( 35 | environments: ['development', 'production'], // optional environments to deploy (default is to deploy all environments) 36 | servers: ['puppet'], // optional server hosting code manager 37 | tokenFile: '/var/lib/jenkins/.puppetlabs/token', // rbac token for deploying with code manager 38 | wait: true // optional wait for code manager to finish deployment 39 | ) 40 | } 41 | } 42 | } 43 | stage('Execute Task') { 44 | steps { 45 | script { 46 | puppet.task( 47 | params: "['action':'install', 'name':'httpd']", // optional input 48 | scope: ['localhost'], // scope for deployment 49 | task: 'package', // name of the task to execute 50 | tokenFile: '/var/lib/jenkins/.puppetlabs/token' // rbac token for executing tasks 51 | ) 52 | } 53 | } 54 | } 55 | } 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /tests/packer/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'hashicorp/packer:1.10' } } 12 | 13 | stages { 14 | stage('Init') { 15 | steps { 16 | sh 'curl -L https://raw.githubusercontent.com/mschuchard/jenkins-devops-libs/master/tests/packer/docker.pkr.json -o docker.pkr.json' 17 | sh 'curl -L https://raw.githubusercontent.com/mschuchard/jenkins-devops-libs/master/tests/packer/docker.pkr.hcl -o docker.pkr.hcl' 18 | 19 | script { 20 | packer.init(dir: '.') 21 | } 22 | } 23 | } 24 | stage('Parse') { 25 | steps { 26 | script { 27 | parsedMap = packer.parse('docker.pkr.hcl') 28 | } 29 | } 30 | } 31 | stage('Plugins') { 32 | steps { 33 | script { 34 | packer.plugins(command: 'installed') 35 | packer.plugins( 36 | command: 'required', 37 | dir: '.' 38 | ) 39 | packer.pluginsInstall(plugin: 'github.com/hashicorp/docker') 40 | packer.pluginsRemove(plugin: 'github.com/hashicorp/docker') 41 | } 42 | } 43 | } 44 | stage('Validate') { 45 | steps { 46 | script { 47 | packer.validate(template: 'docker.pkr.json') 48 | packer.validate(template: 'docker.pkr.hcl') 49 | } 50 | } 51 | } 52 | stage('Format') { 53 | steps { 54 | script { 55 | packer.fmt( 56 | check: true, 57 | diff: true, 58 | template: '.' 59 | ) 60 | } 61 | } 62 | } 63 | stage('Inspect') { 64 | steps { 65 | script { 66 | packer.inspect('docker.pkr.json') 67 | packer.inspect('docker.pkr.hcl') 68 | } 69 | } 70 | } 71 | stage('Build') { 72 | steps { 73 | script { 74 | packer.build(template: 'docker.pkr.json') 75 | packer.build(template: 'docker.pkr.hcl') 76 | } 77 | } 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /docs/Puppet.md: -------------------------------------------------------------------------------- 1 | # Puppet Enterprise 2 | 3 | Interacts with Puppet Enterprise endpoints. This library is considered experimental and users are encouraged to file issues when and where they are found. 4 | 5 | ### Dependencies 6 | 7 | - `http_request` plugin 8 | - Puppet Enterprise installation 9 | 10 | ### puppet.codeDeploy() 11 | Deploys code and data with the Puppet Enterprise Code Manager. If wait is set to `true`, then errors returned by Code Manager will be returned and cause the pipeline to fatally error. 12 | 13 | ```groovy 14 | puppet.codeDeploy( 15 | credentialsId: 'pe_token', // token bindings credentials id for rbac token; mutually exclusive with token 16 | environments: ['development', 'production'], // optional environments to deploy (default is to deploy all environments) 17 | port: 8170, // optional code manager api endpoint port 18 | servers: ['puppet'], // optional servers hosting code manager 19 | tokenFile: '/var/lib/jenkins/.puppetlabs/token', // rbac token file location for deploying with code manager; mutually exclusive with credentialsId 20 | wait: false // optional wait for code manager to finish deployment 21 | ) 22 | ``` 23 | 24 | ### puppet.task() 25 | Triggers the execution of a Puppet Enterprise task via the Puppet Enterprise Orchestrator. 26 | 27 | ```groovy 28 | puppet.task( 29 | credentialsId: 'pe_token', // token bindings credentials id for rbac token; mutually exclusive with token 30 | description: 'my task', // optional description of the job 31 | environment: 'production', // optional environment to execute the task on (default is production) 32 | noop: true, // optional execute task in noop (default is false) 33 | params: ['action':'install', 'name':'httpd'], // optional input parameters (default is empty) 34 | port: 8170, // optional orchestrator api endpoint port 35 | scope: ['node1.example.com', 'node2.example.com'], // scope for deployment (if string, will be passed as `node_group` or `application`; if array of strings, will be passed as `nodes` or `query`; internal logic attempts to correctly determine which) 36 | server: 'puppet', // optional server hosting puppet orchestrator 37 | task: 'package', // name of the task to execute 38 | tokenFile: '/var/lib/jenkins/.puppetlabs/token' // rbac token file location for deploying with code manager; mutually exclusive with credentialId 39 | ) 40 | ``` 41 | 42 | ### puppet.token() 43 | Generates a RBAC token for use with Puppet Enterprise endpoints, and saves it as a file in the default location (`~/.puppetlabs/token`). Recommended to use `withCredentials` bindings for `usernamePassword` in conjunction with this. 44 | 45 | ```groovy 46 | puppet.token( 47 | password: 'password', // password for the rbac token 48 | path: '$HOME/.puppetlabs', // optional path to save rbac token to 49 | port: 4433, // optional puppet server api endpoint port 50 | secure: true, // optional verify ssl connection 51 | server: 'puppet', // optional server hosting puppet server 52 | username: 'username' // username for the rbac token 53 | ) 54 | ``` 55 | -------------------------------------------------------------------------------- /tests/terraform/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'hashicorp/terraform:1.10' } } 12 | 13 | stages { 14 | stage('Initialize') { 15 | steps { 16 | sh 'curl -L https://raw.githubusercontent.com/mschuchard/jenkins-devops-libs/master/tests/terraform/main.tf -o /tmp/main.tf' 17 | 18 | script { 19 | dir('/tmp') { 20 | terraform.init() 21 | } 22 | } 23 | } 24 | } 25 | stage('Parse') { 26 | steps { 27 | script { 28 | parsedMap = terraform.parse('/tmp/main.tf') 29 | } 30 | } 31 | } 32 | stage('Providers') { 33 | steps { 34 | script { 35 | terraform.providers('/tmp') 36 | } 37 | } 38 | } 39 | stage('Workspace') { 40 | steps { 41 | script { 42 | terraform.workspace( 43 | dir: '/tmp', 44 | workspace: 'default' 45 | ) 46 | } 47 | } 48 | } 49 | stage('Validate') { 50 | steps { 51 | script { 52 | terraform.validate( 53 | dir: '/tmp', 54 | json: true 55 | ) 56 | } 57 | } 58 | } 59 | stage('Fmt') { 60 | steps { 61 | script { 62 | terraform.fmt( 63 | dir: '/tmp', 64 | check: true, 65 | diff: true 66 | ) 67 | } 68 | } 69 | } 70 | stage('Plan') { 71 | steps { 72 | script { 73 | terraform.plan(dir: '/tmp') 74 | } 75 | } 76 | } 77 | stage('Graph') { 78 | steps { 79 | script { 80 | terraform.graph(plan: '/tmp/plan.tfplan') 81 | } 82 | } 83 | } 84 | stage('Apply') { 85 | steps { 86 | script { 87 | terraform.apply(configPath: '/tmp/plan.tfplan') 88 | } 89 | } 90 | } 91 | stage('Output') { 92 | steps { 93 | script { 94 | terraform.output(json: false) 95 | } 96 | } 97 | } 98 | stage('Refresh') { 99 | steps { 100 | script { 101 | terraform.refresh(dir: '/tmp') 102 | } 103 | } 104 | } 105 | stage('State') { 106 | steps { 107 | script { 108 | terraform.state( 109 | command: 'move', 110 | resources: ['local_file.file':'local_file.not_file'] 111 | ) 112 | } 113 | } 114 | } 115 | stage('Destroy') { 116 | steps { 117 | script { 118 | terraform.destroy(configPath: '/tmp') 119 | } 120 | } 121 | } 122 | stage('Test') { 123 | steps { 124 | script { 125 | print 'This will fail at runtime, but should succeed during compilation.' 126 | terraform.test(dir: '/tmp') 127 | } 128 | } 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /tests/awx/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'quay.io/ansible/awx:23.9.0' } } 12 | 13 | environment { 14 | TOWER_OAUTH_TOKEN = '6E5SXhld7AMOhpRveZsLJQsfs9VS8U' 15 | TOWER_HOST = 'awx.svc.cluster.local' 16 | } 17 | 18 | stages { 19 | stage('Inventory Create') { 20 | steps { 21 | script { 22 | awx.inventoryCreate( 23 | description: 'an inventory', // optional description of this inventory 24 | hostFilter: 'myhosts*', // optional filter that will be applied to the hosts of this inventory 25 | name: 'my_inventory', // name of this inventory 26 | organization: 'my_org', // organization containing this inventory 27 | ) 28 | } 29 | } 30 | } 31 | stage('Inventory Modify') { 32 | steps { 33 | script { 34 | awx.inventoryModify( 35 | description: 'an inventory', // optional description of this inventory 36 | hostFilter: 'myotherhosts*', // optional filter that will be applied to the hosts of this inventory 37 | name: 'my_inventory', // name of this inventory 38 | organization: 'my_org', // organization containing this inventory 39 | ) 40 | } 41 | } 42 | } 43 | stage('Host Create') { 44 | steps { 45 | script { 46 | awx.hostCreate( 47 | description: 'my host', // optional description of the host 48 | enabled: true, // optional is host available and online for running jobs 49 | inventory: 'my_inventory', // ID of the associated inventory 50 | name: 'foo.bar.com', // name of the host 51 | variables: ['foo': 'bar', 'baz': 1], // optional host variables 52 | ) 53 | } 54 | } 55 | } 56 | stage('Host Delete') { 57 | steps { 58 | script { 59 | awx.hostDelete('foo.bar.com') 60 | } 61 | } 62 | } 63 | stage('Inventory Delete') { 64 | steps { 65 | script { 66 | awx.inventoryDelete('my_inventory') 67 | } 68 | } 69 | } 70 | stage('Project Update') { 71 | steps { 72 | script { 73 | awx.projectsUpdate(id: 3) 74 | } 75 | } 76 | } 77 | stage('Job Template Launch') { 78 | steps { 79 | script { 80 | awx.jobTemplateLaunch( 81 | extraVars: ['foo': 'bar', 'baz': 1], // optional extra variables 82 | id: 5, // job template id 83 | jobType: 'run', // optional job type (run or check) 84 | skipTags: ['skipper', 'to_skip'] // optional tags to skip 85 | ) 86 | } 87 | } 88 | } 89 | stage('Workflow Job Template Launch') { 90 | steps { 91 | script { 92 | awx.workflowJobTemplateLaunch( 93 | extraVars: ['foo': 'bar', 'baz': 1], // optional extra variables 94 | id: 2, // job template id 95 | ) 96 | } 97 | } 98 | } 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /tests/helm/Jenkinsfile: -------------------------------------------------------------------------------- 1 | // for dynamic retrieval 2 | library identifier: 'jenkins-devops-libs@master', retriever: modernSCM( 3 | [$class: 'GitSCMSource', 4 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git']) 5 | // if added in Jenkins global config shared libraries 6 | //@Library('jenkins-devops-libs')_ 7 | // if added in Jenkins global config shared libraries and the github api plugin is bugging out and slamming github with requests 8 | //library('jenkins-devops-libs')_ 9 | 10 | pipeline { 11 | agent { docker { image 'alpine/helm:3.15.4' } } 12 | 13 | environment { KUBECONFIG = '/home/vagrant/.kube/config' } 14 | 15 | stages { 16 | stage('Plugin') { 17 | steps { 18 | script { 19 | helm.plugin( 20 | command: 'install', // plugin command; one of 'install', 'list', 'uninstall', or 'update' 21 | plugin: 'https://github.com/adamreese/helm-env' // targeted plugin (unless 'list' command) 22 | ) 23 | } 24 | } 25 | } 26 | stage('Repo') { 27 | steps { 28 | script { 29 | helm.repo( 30 | repo: 'stable', 31 | url: 'https://charts.helm.sh/stable' 32 | ) 33 | } 34 | } 35 | } 36 | stage('Show') { 37 | steps { 38 | script { 39 | helm.show( 40 | chart: 'stable/mariadb', // chart repository, local archive, directory, or url to display 41 | info: 'all', // info to display; one of 'all', 'chart', 'readme', or 'values' 42 | ) 43 | } 44 | } 45 | } 46 | stage('Lint') { 47 | steps { 48 | script { 49 | helm.lint(chart: 'stable/mariadb' // chart repository, local archive, directory, or url to install 50 | ) 51 | } 52 | } 53 | } 54 | stage('Verify') { 55 | steps { 56 | script { 57 | Boolean success = helm.verify(chartPath: '/tmp') 58 | } 59 | } 60 | } 61 | stage('Install') { 62 | steps { 63 | script { 64 | helm.install( 65 | chart: 'stable/mariadb', // chart repository, local archive, directory, or url to install 66 | name: 'happy-panda' // name for the installed release object 67 | ) 68 | } 69 | } 70 | } 71 | stage('Upgrade') { 72 | steps { 73 | script { 74 | helm.upgrade( 75 | chart: 'stable/mariadb', // chart repository, local archive, directory, or url to upgrade 76 | install: false, // optional install if release not already present 77 | name: 'happy-panda', // name of the upgraded release object 78 | ) 79 | } 80 | } 81 | } 82 | stage('Status') { 83 | steps { 84 | script { 85 | helm.status(name: 'happy-panda') 86 | } 87 | } 88 | } 89 | stage('Rollback') { 90 | steps { 91 | script { 92 | helm.rollback( 93 | name: 'happy-panda', // release object name to rollback 94 | version: '1' // version of release-object to rollback to 95 | ) 96 | } 97 | } 98 | } 99 | stage('Test') { 100 | steps { 101 | script { 102 | helm.test( 103 | chart: 'stable/mariadb', // chart repository, local archive, directory, or url to install 104 | kubectl: '/usr/bin/kubectl', // optional executable path for kubectl 105 | name: 'happy-panda' // name of a deployed release 106 | ) 107 | } 108 | } 109 | } 110 | stage('Uninstall') { 111 | steps { 112 | script { 113 | helm.uninstall(name: 'happy-panda') 114 | } 115 | } 116 | } 117 | stage('History') { 118 | steps { 119 | script { 120 | helm.history)name: 'happy-panda' 121 | } 122 | } 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jenkins DevOps Shared Libraries 2 | 3 | A collection of Jenkins Pipeline shared libraries for common "DevOps" software. Usage and dependencies for each can be found in the [documentation](docs). 4 | 5 | Unsure how to use these in your declarative syntax `Jenkinsfile`? Check the declarative Jenkinsfile shared library [documentation](https://jenkins.io/doc/book/pipeline/shared-libraries/#using-libraries). 6 | 7 | Additionally, you can pare down the libraries available from this repo and then load those in yourself from your own git repo or otherwise. 8 | 9 | Note also that a library will not be developed to compete against a good existing plugin (e.g. Ansible), or a good existing library (e.g. Fabric8). These and any future libraries will exist to fill a gap in the existence of a good Jenkins Pipeline plugin or library for a software tool. 10 | 11 | ## Dependencies 12 | 13 | While each library enumerates its own dependencies in its related documentation, it should be noted that a low percentage of the various methods across the libraries also require the [Pipeline Utility Steps Plugin](https://www.jenkins.io/doc/pipeline/steps/pipeline-utility-steps/). Therefore, that plugin should generally be installed as a dependency of these libraries to ensure reliability. 14 | 15 | ## Retrieve and use with Disabled Sandbox 16 | 17 | Basically, if you have the GitHub Branch Source plugin installed, then you can [load a specific version](https://jenkins.io/doc/book/pipeline/shared-libraries/#library-versions) like: 18 | 19 | ```groovy 20 | @Library('github.com/mschuchard/jenkins-devops-libs@')_ 21 | ``` 22 | 23 | If you do not have this plugin installed, or want more flexibility over the version used, then you can use or expand upon this class: 24 | 25 | ```groovy 26 | library( 27 | identifier: 'jenkins-devops-libs@', 28 | retriever: modernSCM( 29 | [$class: 'GitSCMSource', 30 | remote: 'https://github.com/mschuchard/jenkins-devops-libs.git'] 31 | ) 32 | ) 33 | ``` 34 | 35 | Note that this latter example is also useful for circumventing Github API rate limit issues. 36 | 37 | ## Use with Enabled Sandbox 38 | 39 | Basically, you need to first [add the shared library](https://jenkins.io/doc/book/pipeline/shared-libraries/#global-shared-libraries) in the Jenkins global configuration. Then, you can either load the library's methods with: 40 | 41 | ```groovy 42 | @Library('jenkins-devops-libs@')_ 43 | ``` 44 | 45 | or using the defaults with: 46 | 47 | ```groovy 48 | library('jenkins-devops-libs') 49 | ``` 50 | 51 | ## Supported 52 | - [AWX/Ansible Tower](docs/AWX.md) (beta) 53 | - [GoSS](docs/Goss.md) 54 | - [Helm](docs/Helm.md) 55 | - [OpenFaaS](docs/FaaS.md) 56 | - [Packer](docs/Packer.md) 57 | - [Puppet Enterprise](docs/Puppet.md) (beta) 58 | - [Terraform](docs/Terraform.md) 59 | 60 | ## 2.0.0 Breaking Changes 61 | Note the following breaking changes for new major version 2.0.0: 62 | 63 | - All publicly accessible global variable methods and their associated parameters have been converted to camelCase to adhere to standard Groovy convention. 64 | - The old DSL for Jenkines Pipeline global variable methods which accepted Closure type inputs is no longer supported. Usage will need to be updated to the new DSL with Map type inputs. 65 | - Global variable methods for software installation and configuration are removed. Users should migrate to build agents managed with software provisioning, configuration management, and/or containerization. 66 | 67 | ## Ports 68 | 69 | These libraries will eventually be ported to other pipeline software so that these pipeline platforms will also be enabled for the same tools supported here. These include (in order of priority): 70 | 71 | - Github Actions 72 | - CircleCI 73 | - Concourse 74 | 75 | Note that currently Travis and GitlabCI do not support pipeline libraries in the same sense as Jenkins and the pipeline software listed above. 76 | 77 | ## Contributing 78 | Code should pass all acceptance tests. New features should involve new acceptance tests. 79 | 80 | Please consult the GitHub Project for the current development roadmap. 81 | -------------------------------------------------------------------------------- /docs/Goss.md: -------------------------------------------------------------------------------- 1 | # GoSS 2 | 3 | Interacts with GoSS. 4 | 5 | ### Dependencies 6 | 7 | - GoSS CLI binary executable >= 0.4.0 8 | - DGoSS CLI binary exeuctable (`validateDocker`) 9 | 10 | ### goss.render() 11 | Renders a single valid GoSSfile from separated individual test files. This method will return the rendered GoSSfile as a String, and does not perform any further actions with the rendered content. 12 | 13 | ```groovy 14 | goss.render( 15 | bin: '/usr/bin/goss', // optional executable path for goss 16 | debug: false, // optional print rendered golang template prior to gossfile 17 | gossfile: 'goss.yaml', // optional location of gossfile with included directive 18 | package: null, // optional package type to use (apk, dpkg, pacman, rpm) 19 | vars: 'vars.yaml', // optional YAML or JSON vars file to use with gossfile 20 | varsInline: ['name':'value', 'name2':'value2'] // optional inline vars to use with gossfile (overwrites 'vars') 21 | ) 22 | ``` 23 | 24 | ### goss.server() 25 | Creates a persistent REST API endpoint with GoSS. 26 | 27 | ```groovy 28 | goss.server( 29 | bin: '/usr/bin/goss', // optional executable path for goss 30 | cache: '5s', // optional time to cache the results 31 | endpoint: '/healthz', // optional endpoint to expose 32 | format: 'rspecish', // optional formatter to use for output 33 | formatOpts: 'perfdata', // optional extra formatter options (perfdata, pretty, or verbose) 34 | gossfile: 'goss.yaml', // optional location of gossfile 35 | logLevel: 'info', // optional logging verbosity level; one of 'error', 'warn', 'info', 'debug', or 'trace' 36 | maxConcur: '50', // optional maximum number of tests to run concurrently 37 | package: null, // optional package type to use (apk, dpkg, pacman, rpm) 38 | port: '8080', // optional specified port to listen on 39 | vars: 'vars.yaml', // optional YAML or JSON vars file to use with gossfile 40 | varsInline: ['name':'value', 'name2':'value2'] // optional inline vars to use with gossfile (overwrites 'vars') 41 | ) 42 | ``` 43 | 44 | ### goss.validate() 45 | Locally executes a `gossfile` with GoSS. This method will return a `Boolean` type indicating whether the validation was successful (`true`) or not (`false`). 46 | 47 | ```groovy 48 | goss.validate( 49 | bin: '/usr/bin/goss', // optional executable path for goss 50 | format: 'rspecish', // optional formatter to use for output 51 | formatOpts: 'perfdata', // optional extra formatter options (perfdata, pretty, sort, or verbose) 52 | gossfile: 'goss.yaml', // optional location of gossfile 53 | logLevel: 'info', // optional logging verbosity level; one of 'error', 'warn', 'info', 'debug', or 'trace' 54 | maxConcur: '50', // optional maximum number of tests to run concurrently 55 | package: null, // optional package type to use (apk, dpkg, pacman, rpm) 56 | retryTimeout: '0s', // optional retry on failure so long as elapsed + `sleep` time is less than this value 57 | sleep: '1s', // optional time to sleep between retries (ignored unless `retryTimeout` also specified) 58 | vars: 'vars.yaml', // optional YAML or JSON vars file to use with gossfile 59 | varsInline: ['name':'value', 'name2':'value2'] // optional inline vars to use with gossfile (overwrites 'vars') 60 | ) 61 | ``` 62 | 63 | ### goss.validateDocker() 64 | Locally executes a `gossfile` in a Docker container with DGoSS. 65 | Note that dgoss [environment variables](https://github.com/aelsabbahy/goss/tree/master/extras/dgoss#environment-vars-and-defaults) should be set in the `environment` block of a `Jenkinsfile` and will not be provided as as part of the interface to this method. That is also the process for providing arguments to goss when running inside the dgoss wrapper, so goss arguments cannot be directly interfaced in this method. Also note that dgoss runs a container, but does not stop the running container, so you may want to wrap the code inside a `Image.withRun{}` block for safety. 66 | 67 | ```groovy 68 | goss.validateDocker( 69 | bin: '/usr/bin/dgoss', // optional executable path for dgoss 70 | flags: ['JENKINS_OPTS':'--httpPort=8080 --httpsPort=-1', 'JAVA_OPTS':'-Xmx1048m'], // optional flags for container run 71 | image: 'alpine:latest' // docker image to run container from 72 | ) 73 | ``` 74 | 75 | ### goss.validateGossfile(String gossfile) 76 | Validates `gossfile` syntax. This method will return a `Boolean` type indicating whether the validation was successful (`true`) or not (`false`). 77 | 78 | ```groovy 79 | goss.validateGossfile('gossfile.yaml') 80 | ``` 81 | -------------------------------------------------------------------------------- /docs/AWX.md: -------------------------------------------------------------------------------- 1 | # AWX/Ansible Tower 2 | 3 | Interacts with AWX/Ansible Tower endpoints via the AWX CLI. Note that you should set the environment variable `TOWER_OAUTH_TOKEN` and `TOWER_HOST` in your pipeline with `environment { TOWER_OAUTH_TOKEN = '6E5SXhld7AMOhpRveZsLJQsfs9VS8U' }` for targeting and authentication. Alternatively, you can use the Credentials Binding plugin for the token, and then wrap code within a `withCredentials` block as per normal. Also alternatively, you can use the plugin with the environment directive like `environment { TOWER_OAUTH_TOKEN = credentials('tower-oauth-token') }`. This library is considered experimental and users are encouraged to file issues when and where they are found. 4 | 5 | ### Dependencies 6 | 7 | - AWX CLI binary executable 8 | - AWX or Tower installation 9 | 10 | ### awx.hostCreate() 11 | 12 | Uses AWX to create a host in an inventory. 13 | 14 | ```groovy 15 | awx.hostCreate( 16 | bin: '/usr/bin/awx', // optional path to awx executable 17 | description: 'my host', // optional description of the host 18 | enabled: true, // optional is host available and online for running jobs 19 | instanceId: 'foo.bar.com', // host ID in the remote inventory source 20 | inventory: 'my_inventory', // associated inventory ID 21 | name: 'webserver1', // name of the host 22 | variables: ['foo': 'bar', 'baz': 1] // optional host variables 23 | ) 24 | ``` 25 | 26 | ### awx.hostDelete() 27 | 28 | Uses AWX to delete a host in an inventory. 29 | 30 | ```groovy 31 | awx.hostDelete('foo.bar.com', '/usr/local/bin/awx') // the ID (or unique name) of the host for first argument 32 | ``` 33 | 34 | ### awx.inventoryCreate() 35 | 36 | Uses AWX to create an inventory. 37 | 38 | ```groovy 39 | awx.inventoryCreate( 40 | bin: '/usr/bin/awx', // optional path to awx executable 41 | description: 'an inventory', // optional description of this inventory 42 | hostFilter: 'myhosts*', // optional filter that will be applied to the hosts of this inventory 43 | kind: '', // optional kind of inventory ('smart' or 'constructed' are accepted; otherwise do not specify a value) 44 | name: 'my_inventory', // name of this inventory 45 | organization: 'my_org', // organization containing this inventory 46 | variables: ['foo': 'bar', 'baz': 1] // optional inventory variables 47 | ) 48 | ``` 49 | 50 | ### awx.inventoryDelete() 51 | 52 | Uses AWX to delete an inventory. 53 | 54 | ```groovy 55 | awx.inventoryDelete('my_inventory', '/usr/local/bin/awx') // the ID (or unique name) of the inventory for first argument 56 | ``` 57 | 58 | ### awx.inventoryModify() 59 | 60 | Uses AWX to modify an inventory. 61 | 62 | ```groovy 63 | awx.inventoryModify( 64 | bin: '/usr/bin/awx', // optional path to awx executable 65 | description: 'an inventory', // optional description of this inventory 66 | hostFilter: 'myhosts*', // optional filter that will be applied to the hosts of this inventory 67 | inventory: 'my_inventory', // ID or unique name of inventory 68 | kind: '', // optional kind of inventory ('smart' or 'constructed' are accepted; otherwise do not specify a value) 69 | name: 'my_inventory', // name of this inventory 70 | organization: 'my_org', // organization containing this inventory 71 | variables: ['foo': 'bar', 'baz': 1] // optional inventory variables 72 | ) 73 | ``` 74 | 75 | ### awx.jobTemplateLaunch() 76 | 77 | Uses AWX to launch a job from a job template. 78 | 79 | ```groovy 80 | awx.jobTemplateLaunch( 81 | bin: '/usr/bin/awx', // optional path to awx executable 82 | credentials: ['sshkey', 'password'], // optional list of credentials IDs or name 83 | executionEnv: 'rhel9', // optional execution environment ID 84 | extraVars: ['foo': 'bar', 'baz': 1], // optional extra variables 85 | id: 5, // job template id 86 | inventory: 'my_inventory', // optional ID or name of the associated inventory 87 | jobType: 'run', // optional job type (run or check) 88 | limit: 'hosts*.com', // optional host limit 89 | monitor: false, // optional wait until launched job finishes 90 | skipTags: ['skipper', 'to_skip'] // optional tags to skip 91 | ) 92 | ``` 93 | 94 | ### awx.projectsUpdate() 95 | 96 | Uses AWX to launch a project update job. 97 | 98 | ```groovy 99 | awx.projectsUpdate( 100 | bin: '/usr/bin/awx', // optional path to awx executable 101 | id: 3, // project id 102 | monitor: false // optional wait until project update job finishes 103 | ) 104 | ``` 105 | 106 | ### awx.workflowJobTemplateLaunch() 107 | 108 | Uses AWX to launch a workflow job from a workflow job template. 109 | 110 | ```groovy 111 | awx.workflowJobTemplateLaunch( 112 | bin: '/usr/bin/awx', // optional path to awx executable 113 | extraVars: ['foo': 'bar', 'baz': 1], // optional extra variables 114 | id: 5, // workflow job template id 115 | inventory: 'my_inventory', // optional ID of the associated inventory 116 | limit: 'hosts*.com', // optional host limit 117 | monitor: false, // optional wait until launched job finishes 118 | skipTags: ['skipper', 'to_skip'] // optional tags to skip 119 | ) 120 | ``` 121 | -------------------------------------------------------------------------------- /docs/Packer.md: -------------------------------------------------------------------------------- 1 | # Packer 2 | 3 | Interacts with Packer. The `template` argument must generally be a `pkr.json` template, `pkr.hcl` template, or a directory containing Packer templates and configs. 4 | 5 | ### Dependencies 6 | 7 | - Packer CLI binary executable >= 1.7 8 | 9 | ### packer.build() 10 | Uses Packer to build an artifact from a template or template directory. 11 | 12 | ```groovy 13 | packer.build( 14 | bin: '/usr/bin/packer', // optional location of packer install 15 | except: ['source.*.foo', 'source.bar.*', 'baz'], // optional builder names to ignore during build (mutually exclusive with only) 16 | force: false, // optional force a build to continue if artifacts exist and deletes existing artifacts 17 | only: ['source.*.foo', 'source.bar.*', 'baz'], // optional builder names to build (mutually exclusive with except) 18 | onError: 'default', // optional 'default' cleanup, 'abort', 'ask', or 'run-cleanup-provisioner' 19 | template: '/path/to/template.pkr.json', // location of packer template file or templates directory 20 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 21 | varFile: '/path/to/variables.json' // optional location of variables file 22 | ) 23 | ``` 24 | 25 | ### packer.fmt() 26 | Uses Packer to check for properly canonically formatted code. This method will return a `Boolean` type indicating whether the format check was successful (`true`) or not (`false`). Note that if `check` is `false` then the return will always be `true`. 27 | 28 | ```groovy 29 | packer.fmt( 30 | bin: '/usr/bin/packer', // optional location of packer install 31 | check: false, // optional check template and return an error if file is not formatted correctly (cannot be used with `write`) 32 | diff: false, // optional present a diff if the template is not formatted correctly 33 | recursive: false, // optional also process files in subdirectories 34 | template: '/path/to/template_dir', // location of packer templates directory 35 | write: true // optional write changes directly to files that are not formatted directly (cannot be used with `check`) 36 | ) 37 | ``` 38 | 39 | ### packer.init() 40 | Uses Packer to install all the missing plugins required in a Packer template directory. 41 | 42 | ```groovy 43 | packer.init( 44 | bin: '/usr/bin/packer', // optional location of packer install 45 | dir: '/path/to/template_dir', // location of packer templates directory 46 | upgrade: false // optional update installed plugins to the latest available version within the specified constraints 47 | ) 48 | ``` 49 | 50 | ### packer.inspect(String template, String bin = '/usr/bin/packer') 51 | Inspects a template and parses and outputs the components a template defines. 52 | 53 | ```groovy 54 | packer.inspect('/path/to/template.pkr.json', '/usr/local/bin/packer') 55 | ``` 56 | 57 | ### packer.parse(String template) 58 | Provides a thin wrapper around [HCL4j](https://github.com/bertramdev/hcl4j) for inputting a Packer template or config, and returning a `Map` representing the parsed HCL2. Note this requires local installation of the HCL4j dependency, and therefore the agent must have sufficient permissions to do so. 59 | 60 | ```groovy 61 | parsedMap = packer.parse('/path/to/template.pkr.hcl') 62 | ``` 63 | 64 | ### packer.pluginsInstall() 65 | Uses Packer to install the most recent compatible Packer plugin matching the version constraint. When the version parameter is omitted, then the most recent version will be installed. `packer.init()` with a config file is generally recommended instead of this method. 66 | 67 | ```groovy 68 | packer.pluginsInstall( 69 | bin: '/usr/bin/packer', // optional location of packer install 70 | plugin: 'github.com/hashicorp/happycloud', 71 | force: false, // optional force reinstallation of plugins 72 | version: 'v1.2.3', // optional version of plugin to install 73 | ) 74 | ``` 75 | 76 | ### packer.pluginsRemove() 77 | Uses Packer to remove all Packer plugins matching the version constraint for the current OS and architecture. When the version parameter is omitted all installed versions will be removed. `packer.init()` with a config file is generally recommended instead of this method. 78 | 79 | ```groovy 80 | packer.pluginsRemove( 81 | bin: '/usr/bin/packer', // optional location of packer install 82 | plugin: 'github.com/hashicorp/happycloud', 83 | version: 'v1.2.3', // optional version of plugin to install 84 | ) 85 | ``` 86 | 87 | ### packer.plugins() 88 | Uses Packer to interact with plugins and display information about them. 89 | 90 | ```groovy 91 | packer.plugins( 92 | bin: '/usr/bin/packer', // optional location of packer install 93 | command: 'installed', // one of 'installed' or 'required' 94 | dir: '/path/to/template_dir', // location of directory with packer config (required for 'required' command) 95 | ) 96 | ``` 97 | 98 | ### packer.validate() 99 | **`evalData` and `warnUndeclVar` require Packer version >= 1.8.5** 100 | 101 | Uses Packer to validate a build template or template directory. This method will return a `Boolean` type indicating whether the validation was successful (`true`) or not (`false`). 102 | 103 | ```groovy 104 | packer.validate( 105 | bin: '/usr/bin/packer', // optional location of packer install 106 | evalData: false, // optional evaluate datasources during validation 107 | except: ['source.*.foo', 'source.bar.*', 'baz'], // optional builder names to ignore during build (mutually exclusive with only) 108 | only: ['source.*.foo', 'source.bar.*', 'baz'], // optional builder names to build (mutually exclusive with except) 109 | syntaxOnly: false, // optional only check syntax and do not verify config 110 | template: '/path/to/template.pkr.hcl', // // location of packer template file or templates directory 111 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 112 | varFile: '/path/to/variables.json' // optional location of variables file 113 | warnUndeclVar: true, // optional warn on user variable files containing undeclared variables 114 | ) 115 | ``` 116 | -------------------------------------------------------------------------------- /src/devops/common/utils.groovy: -------------------------------------------------------------------------------- 1 | // src/devops/common/utils.groovy 2 | package devops.common 3 | 4 | // imports 5 | import com.cloudbees.groovy.cps.NonCPS 6 | import hudson.FilePath 7 | import jenkins.model.Jenkins 8 | 9 | // checks input value for default value use if not set 10 | def defaultInput(input, defaultValue) { 11 | return input ?: defaultValue 12 | } 13 | 14 | // removes file 15 | @NonCPS 16 | void removeFile(String file) { 17 | // delete a file on the master or build node 18 | env['NODE_NAME'] == 'master' ? new File(file).delete() : new FilePath(Jenkins.getInstance().getComputer(env['NODE_NAME']).getChannel(), file).delete() 19 | } 20 | 21 | // downloads file using httpRequest step 22 | void downloadFile(String url, String dest) { 23 | // attempt to download the file 24 | try { 25 | Map response = httpRequest( 26 | url: url, 27 | httpMode: 'GET', 28 | outputFile: dest, 29 | quiet: true, 30 | validResponseCodes: '200:399' 31 | ) 32 | 33 | print "Downloaded ${url} to ${dest} (HTTP ${response.status})" 34 | } catch (Exception error) { 35 | print "Failed to download ${url} to ${dest} (HTTP ${response.status})" 36 | throw error 37 | } 38 | } 39 | 40 | // downloads file (java/groovy) 41 | @NonCPS 42 | void deprecatedDownloadFile(String url, String dest) { 43 | // establish the file download for the master or the build node 44 | File file = env['NODE_NAME'] == 'master' ? new File(dest) : new FilePath(Jenkins.getInstance().getComputer(env['NODE_NAME']).getChannel(), dest) 45 | 46 | // download the file and close the ostream 47 | file.newOutputStream() << new URL(url).openStream() 48 | file.close() 49 | } 50 | 51 | // recursively creates directory with serializable steps 52 | void makeDirParents(String dir) { 53 | // normalize path 54 | String normalizedDir = dir.replaceAll('\\\\', '/') 55 | 56 | // check if directory already exists 57 | if (fileExists(normalizedDir)) { 58 | print "Directory at ${normalizedDir} already exists on node." 59 | return 60 | } 61 | 62 | // create the directory(ies) 63 | dir(normalizedDir) { 64 | print "Created directory at ${normalizedDir}" 65 | } 66 | } 67 | 68 | // functionally equivalent to unix mkdir -p (java/groovy) 69 | @NonCPS 70 | void deprecatedMakeDirParents(String dir) { 71 | // ascertain directory on jenkins master or build agent/node 72 | File file = env['NODE_NAME'] == 'master' ? new File(dir) : new FilePath(Jenkins.getInstance().getComputer(env['NODE_NAME']).getChannel(), dir) 73 | 74 | // short circuit if directory exists 75 | if (file.exists()) { 76 | print "Directory at ${dir} already exists on node." 77 | return 78 | } 79 | 80 | // create the directory(ies) 81 | file.mkdirs() 82 | print "Created directory at ${dir}" 83 | } 84 | 85 | // converts content map to json string 86 | String mapToJSON(Map content) { 87 | return writeJSON(json: content, returnText: true) 88 | } 89 | 90 | // converts closure body to config map, or returns same config map 91 | // bridges gap between users of older DSL and newer DSL 92 | Map paramsConverter(body) { 93 | // initialize config 94 | Map config = [:] 95 | 96 | // if we received older DSL, convert it into config map 97 | if (body in Closure) { 98 | // evaluate the body block and collect configuration into the object 99 | body.resolveStrategy = Closure.DELEGATE_FIRST 100 | body.delegate = config 101 | body() 102 | } 103 | // if newer DSL, return same map as the config 104 | else if (body in Map) { 105 | config = body 106 | } 107 | // params are invalid type 108 | else { 109 | throw new Exception('The parameter inputs are an invalid type. They must either be a Closure or Map. Consult the documentation for more information.') 110 | } 111 | 112 | return config 113 | } 114 | // example usage: void globalVarMethod(body) 115 | // where body is closure or map 116 | // Map config = new utils().paramsConverter(body) 117 | 118 | // the following methods handle parameter subcommand generation 119 | // the various wrapper methods all rely upon subCommand for actual functionality, and the various definitions exist primarily for type checking 120 | 121 | String listParam(List param, String cmdArg) { 122 | return subCommand(param, cmdArg) 123 | } 124 | 125 | String mapParam(Map param, String cmdArg) { 126 | return subCommand(param, cmdArg) 127 | } 128 | 129 | String stringBoolParams(Map paramCmdArg) { 130 | // initialize aggregate sub command 131 | String aggregateSubCommand = '' 132 | 133 | // iterate through map of params and corresponding command arguments 134 | paramCmdArg.each { param, cmdArg -> 135 | // build aggregated sub command 136 | aggregateSubCommand += subCommand(param, cmdArg) 137 | } 138 | 139 | return aggregateSubCommand 140 | } 141 | 142 | private String subCommand(param, String cmdArg) { 143 | // initialize subcommand string 144 | String subCmd = '' 145 | 146 | // immediately verify param is not null 147 | if (param) { 148 | // different behavior based on param type 149 | switch (param) { 150 | case Map: 151 | // iterate through param value pairs and concatenate full arg and value pairs to subcommand 152 | param.each { paramValueName, paramValue -> 153 | subCmd += " ${cmdArg}${paramValueName}=${paramValue}" 154 | } 155 | break 156 | case List: 157 | // iterate through param values and concatenate full arg and value to subcommand 158 | param.each { paramValue -> 159 | subCmd += " ${cmdArg}${paramValue}" 160 | } 161 | break 162 | case String: 163 | // build aggregate sub command with consecutive subCommand returns 164 | subCmd += " ${cmdArg}${param}" 165 | break 166 | case Boolean: 167 | // build aggregate sub command with consecutive subCommand returns 168 | subCmd += " ${cmdArg}" 169 | break 170 | default: 171 | throw new Exception("Unexpected parameter type '${param.getClass()}' for command argument '${cmdArg}'.") 172 | } 173 | } 174 | 175 | return subCmd 176 | } 177 | -------------------------------------------------------------------------------- /vars/awx.groovy: -------------------------------------------------------------------------------- 1 | // vars/awx.groovy 2 | import devops.common.utils 3 | import devops.common.helpers 4 | 5 | void hostCreate(Map config) { 6 | // input checking 7 | assert config.name in String : '"name" is a required parameter for awx.hostCreate.' 8 | assert config.inventory in String : '"inventory" is a required parameter for awx.hostCreate.' 9 | config.bin = config.bin ?: 'awx' 10 | 11 | // initialize the base command 12 | List cmd = [config.bin, 'hosts', 'create', '--name', config.name, '--inventory', config.inventory] 13 | 14 | // check for optional inputs 15 | if (config.description) { 16 | cmd.addAll(['--description', config.description]) 17 | } 18 | if (config.enabled == true) { 19 | cmd.add('--enabled') 20 | } 21 | if (config.instanceId) { 22 | cmd.addAll(['--instance_id', config.instanceId]) 23 | } 24 | if (config.variables) { 25 | assert (config.variables in Map) : 'The variables parameter must be a Map.' 26 | 27 | // convert variables map to json for input 28 | final String variables = writeJSON(json: config.variables, returnText: true) 29 | 30 | cmd.addAll(['--variables', variables]) 31 | } 32 | 33 | // create a host in the inventory 34 | new helpers().toolExec('AWX Host Create', cmd) 35 | } 36 | 37 | void hostDelete(String id, String bin = 'awx') { 38 | // delete a host in the inventory 39 | new helpers().toolExec('AWX Host Delete', [bin, 'hosts', 'delete', id]) 40 | } 41 | 42 | // helper method for create and modify 43 | private void inventory(Map config) { 44 | // helpful constant 45 | final String capAction = config.action.capitalize() 46 | 47 | // input checking 48 | assert config.name in String : "'name' is a required parameter for inventory${capAction}." 49 | assert config.organization in String : "'organization' is a required parameter for inventory${capAction}." 50 | config.bin = config.bin ?: 'awx' 51 | 52 | // initialize the base command 53 | List cmd = [config.bin, 'inventory', config.action, '--name', config.name, '--organization', config.organization] 54 | 55 | // check for optional inputs 56 | if (config.description) { 57 | cmd.addAll(['--description', config.description]) 58 | } 59 | if (config.kind) { 60 | assert ['smart', 'constructed'].contains(config.kind) : 'Inventory kind parameter value must be "smart" or "constructed".' 61 | 62 | cmd.addAll(['--kind', config.kind]) 63 | } 64 | if (config.hostFilter) { 65 | cmd.addAll(['--host_filter', config.hostFilter]) 66 | } 67 | if (config.variables) { 68 | assert (config.variables in Map) : 'The variables parameter must be a Map.' 69 | 70 | // convert variables map to json for input 71 | final String variables = writeJSON(json: config.variables, returnText: true) 72 | 73 | cmd.addAll(['--variables', variables]) 74 | } 75 | if (config.action == 'modify') { 76 | assert config.inventory in String : 'inventory is a required parameter for inventoryModify' 77 | 78 | cmd.add(config.inventory) 79 | } 80 | 81 | // "something" a inventory 82 | new helpers().toolExec("AWX Inventory ${capAction}", cmd) 83 | } 84 | 85 | // invokes inventory helper method 86 | void inventoryCreate(Map config) { 87 | // invoke helper method with create 88 | config.action = 'create' 89 | inventory(config) 90 | } 91 | 92 | void inventoryDelete(String id, String bin = 'awx') { 93 | // delete an inventory 94 | new helpers().toolExec('AWX Inventory Delete', [bin, 'inventory', 'delete', id]) 95 | } 96 | 97 | // invokes inventory helper method 98 | void inventoryModify(Map config) { 99 | // invoke helper method with modify 100 | config.action = 'modify' 101 | inventory(config) 102 | } 103 | 104 | void jobTemplateLaunch(Map config) { 105 | // input checking 106 | assert config.id in Integer : '"id" is a required parameter for awx.jobTemplateLaunch.' 107 | config.bin = config.bin ?: 'awx' 108 | 109 | // initialize the base command 110 | List cmd = [config.bin, 'job_templates', 'launch'] 111 | 112 | // check for optional inputs 113 | if (config.credentials) { 114 | assert (config.credentials in List) : 'The credentials parameter must be a list of strings.' 115 | 116 | cmd.addAll(['--credentials', config.credentials.join(',')]) 117 | } 118 | if (config.executionEnv) { 119 | cmd.addAll(['--execution_environment', config.executionEnv]) 120 | } 121 | if (config.monitor == true) { 122 | cmd.add('--monitor') 123 | } 124 | if (config.limit) { 125 | cmd.addAll(['--limit', config.limit]) 126 | } 127 | if (config.inventory) { 128 | cmd.addAll(['--inventory', config.inventory]) 129 | } 130 | if (config.jobType) { 131 | assert config.jobType in ['run', 'check'] : 'jobType parameter must be one of "run" or "check"' 132 | 133 | cmd.addAll(['--job_type', config.jobType]) 134 | } 135 | if (config.skipTags) { 136 | assert (config.skipTags in List) : 'The skipTags parameter must be a List.' 137 | 138 | cmd.addAll(['--skip_tags', config.skipTags.join(',')]) 139 | } 140 | if (config.extraVars) { 141 | assert (config.extraVars in Map) : 'The extraVars parameter must be a Map.' 142 | 143 | // convert variables map to json for input 144 | final String extraVars = writeJSON(json: config.extraVars, returnText: true) 145 | 146 | cmd.addAll(['--extra_vars', extraVars]) 147 | } 148 | 149 | // launch a job template job 150 | new helpers().toolExec('AWX Job Template Launch', cmd) 151 | } 152 | 153 | void projectsUpdate(Map config) { 154 | // input checking 155 | assert config.id in int : '"id" is a required parameter for awx.projectsUpdate.' 156 | config.bin = config.bin ?: 'awx' 157 | 158 | // initialize the base command 159 | List cmd = [config.bin, 'projects', 'update'] 160 | 161 | // check for optional inputs 162 | if (config.monitor == true) { 163 | cmd.add('--monitor') 164 | } 165 | 166 | // launch a project update job 167 | new helpers().toolExec('AWX Project Update', cmd) 168 | } 169 | 170 | void workflowJobTemplateLaunch(Map config) { 171 | // input checking 172 | assert config.id in int : '"id" is a required parameter for awx.workflowJobTemplateLaunch.' 173 | config.bin = config.bin ?: 'awx' 174 | 175 | // initialize the base command 176 | List cmd = [config.bin, 'workflow_job_templates', 'launch'] 177 | 178 | // check for optional inputs 179 | if (config.monitor == true) { 180 | cmd.add('--monitor') 181 | } 182 | if (config.limit) { 183 | cmd.addAll(['--limit', config.limit]) 184 | } 185 | if (config.inventory) { 186 | cmd.addAll(['--inventory', config.inventory]) 187 | } 188 | if (config.extraVars) { 189 | assert (config.extraVars in Map) : 'The extraVars parameter must be a Map.' 190 | 191 | // convert variables map to json for input 192 | final String extraVars = writeJSON(json: config.extraVars, returnText: true) 193 | 194 | cmd.addAll(['--extra_vars', extraVars]) 195 | } 196 | if (config.skipTags) { 197 | assert (config.skipTags in List) : 'The skipTags parameter must be a List.' 198 | 199 | cmd.addAll(['--skip_tags', config.skipTags.join(',')]) 200 | } 201 | 202 | // launch a workflow job template job 203 | new helpers().toolExec('AWX Workflow Job Template Launch', cmd) 204 | } 205 | -------------------------------------------------------------------------------- /docs/FaaS.md: -------------------------------------------------------------------------------- 1 | # FaaS 2 | 3 | Interacts with OpenFaaS CLI. 4 | 5 | ### Dependencies 6 | 7 | - OpenFaaS CLI binary executable >= 0.17 8 | 9 | ### faas.build() 10 | Builds OpenFaaS function containers. 11 | 12 | ```groovy 13 | faas.build( 14 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 15 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 16 | noCache: false, // optional do not use docker's build cache 17 | parallel: '1', // optional build in parallel to depth specified 18 | pull: false, // optional force re-pull of base images 19 | regex: 'regexp_string', // optional regex to match with function names in yaml file 20 | squash: false, // optional use docker's squash flag for smaller images 21 | tag: 'latest', // optional tag override for function image 22 | template: 'samples.yaml' // path to yaml file describing function(s) 23 | ) 24 | ``` 25 | 26 | ### faas.deploy() 27 | Deploys OpenFaaS function containers. 28 | 29 | ```groovy 30 | faas.deploy( 31 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 32 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 33 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 34 | label: ['canary':'true', 'dev':'false'], // optional labels to set 35 | namespace: 'default', // optional namespace of the function 36 | regex: 'regexp_string', // optional regex to match with function names in yaml file (default is unused) 37 | replace: true, // optional replace any existing function (mutually exclusive with update; will be deprecated in 2.3.0 for strategy) 38 | regex: 'regexp_string', // optional regex to match with function names in yaml file 39 | secret: 'dockerhuborg', // optional secure secret to give function access to 40 | strategy: 'update', // optional function strategy (valid: 'update' or 'replace') 41 | template: 'samples.yaml', // path to yaml file describing function(s) 42 | tls: true // optional TLS validation 43 | update: false // optional update existing functions (mutually exclusive with replace; will be deprecated in 2.3.0 for strategy) 44 | ) 45 | ``` 46 | 47 | ### faas.invoke() 48 | Invokes an OpenFaaS function. 49 | 50 | ```groovy 51 | faas.invoke( 52 | async: false, // optional invoke the function asynchronously 53 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 54 | contentType: 'text/plain', // optional content-type HTTP header 55 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 56 | function: 'echo', // name of the deployed function 57 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 58 | header: ['X-Callback-Url':'http://gateway:8080/function/send2slack', 'X-Ping-Url':'http://request.bin/etc'], // optional HTTP request headers 59 | method: 'POST', // optional HTTP request method 60 | namespace: 'default', // optional namespace of the function 61 | query: ['repo':'faas-cli', 'org':'openfaas'], // optional queries for request 62 | regex: 'regexp_string', // optional regex to match with function names in yaml file 63 | stdin: 'image.png', // optional stdin for function to receive 64 | tls: true // optional TLS validation 65 | ) 66 | ``` 67 | 68 | ### faas.list() 69 | List OpenFaaS functions. The output function list and information is also returned by this method as a `String`. 70 | 71 | ```groovy 72 | faas.list( 73 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 74 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 75 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 76 | namespace: 'default', // optional namespace of the function 77 | quiet: false, // optional display only the function's id (mutually exclusive with verbose) 78 | regex: 'regexp_string', // optional regex to match with function names in yaml file 79 | sort: 'name', // optional sort category (valid: 'name' or 'invocations') 80 | tls: true // optional TLS validation 81 | verbose: false, // optional display extra function information (mutually exclusive with quiet) 82 | ) 83 | ``` 84 | 85 | ### faas.login() 86 | Log in to the specified OpenFaaS gateway. 87 | 88 | ```groovy 89 | faas.login( 90 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 91 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 92 | password: 'password', // gateway password 93 | user: 'admin', // optional gateway username 94 | tls: true // optional TLS validation 95 | ) 96 | ``` 97 | 98 | ### faas.logs() 99 | Fetch logs for a given function name. The logs are returned by this method as a `String`. 100 | 101 | ```groovy 102 | faas.logs( 103 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 104 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 105 | format: 'plain', // optional return format (plain, keyvalue, or json) 106 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 107 | instance: false, // optional print the function instance name/id 108 | name: 'sadpanda', // function name for which to retrieve logs 109 | namespace: 'default', // optional namespace of the function 110 | regex: 'regexp_string', // optional regex to match with function names in yaml file 111 | since: '5s', // optional return logs newer than relative duration 112 | tls: true // optional TLS validation 113 | ) 114 | ``` 115 | 116 | ### faas.push() 117 | Pushes the OpenFaaS function container image(s) to a remote repository. These container images must already be present in your local image cache. 118 | 119 | ```groovy 120 | faas.push( 121 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 122 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 123 | parallel: '1', // optional build in parallel to depth specified 124 | regex: 'regexp_string', // optional regex to match with function names in yaml file 125 | tag: 'latest', // override latest tag on function Docker image 126 | template: 'samples.yaml' // path to yaml file describing function(s) 127 | ) 128 | ``` 129 | 130 | ### faas.remove() 131 | Removes/deletes deployed OpenFaaS functions. 132 | 133 | ```groovy 134 | faas.remove( 135 | bin: '/usr/bin/faas-cli', // optional executable path for faas-cli 136 | filter: 'filter_string', // optional wildcard to match with function names in yaml file 137 | gateway: 'http://127.0.0.1:8080', // optional gateway url with protocol 138 | namespace: 'default', // optional namespace of the function 139 | regex: 'regexp_string', // optional regex to match with function names in yaml file 140 | template: 'samples.yaml' // path to yaml file describing function(s) 141 | tls: true // optional TLS validation 142 | ) 143 | ``` 144 | 145 | ### faas.validateTemplate(String template) 146 | Validates template syntax. This method will return a `Boolean` type indicating whether the validation was successful (`true`) or not (`false`). 147 | 148 | ```groovy 149 | faas.validateTemplate('template.yaml') 150 | ``` 151 | -------------------------------------------------------------------------------- /vars/goss.groovy: -------------------------------------------------------------------------------- 1 | // vars/goss.groovy 2 | import devops.common.utils 3 | 4 | void install(String version, String installPath = '/usr/bin/') { 5 | new utils().makeDirParents(installPath) 6 | 7 | // check if current version already installed 8 | if (fileExists("${installPath}/goss")) { 9 | final String installedVersion = sh(label: 'Check GoSS Version', returnStdout: true, script: "${installPath}/goss --version").trim() 10 | if (installedVersion =~ version) { 11 | print "GoSS version ${version} already installed at ${installPath}." 12 | return 13 | } 14 | } 15 | // otherwise download and install specified version 16 | new utils().downloadFile("https://github.com/aelsabbahy/goss/releases/download/v${version}/goss-linux-amd64", "${installPath}/goss") 17 | sh(label: 'GoSS CLI Executable Permissions', script: "chmod ug+rx ${installPath}/goss") 18 | print "GoSS successfully installed at ${installPath}/goss." 19 | } 20 | 21 | void installDgoss(String version, String installPath = '/usr/bin/') { 22 | new utils().makeDirParents(installPath) 23 | 24 | // check if current version already installed 25 | if (fileExists("${installPath}/dgoss") && fileExists("${installPath}/goss")) { 26 | final String installedVersion = sh(label: 'Check DGoSS Version', returnStdout: true, script: "${installPath}/goss --version").trim() 27 | if (installedVersion =~ version) { 28 | print "DGoSS version ${version} already installed at ${installPath}." 29 | return 30 | } 31 | } 32 | assert (fileExists("${installPath}/goss")) : 'dgoss is installed but goss is not. dgoss execution requires goss.' 33 | 34 | // otherwise download and install specified version 35 | new utils().downloadFile("https://raw.githubusercontent.com/aelsabbahy/goss/v${version}/extras/dgoss/dgoss", "${installPath}/dgoss") 36 | sh(label: 'DGoSS CLI Executable Permissions', script: "chmod ug+rx ${installPath}/dgoss") 37 | print "DGoSS successfully installed at ${installPath}/dgoss." 38 | } 39 | 40 | String render(Map config) { 41 | // input checking 42 | config.bin = config.bin ?: 'goss' 43 | 44 | List cmd = [config.bin] 45 | 46 | // check for optional global inputs and establish command 47 | cmd.addAll(globalArgsCmd(config)) 48 | cmd.add('render') 49 | 50 | // check for optional inputs 51 | if (config.debug == true) { 52 | cmd.add('--debug') 53 | } 54 | 55 | // render gossfile 56 | try { 57 | final String rendered = sh(label: "GoSS Render ${config?.gossfile}", script: cmd.join(' '), returnStdout: true) 58 | 59 | print 'GoSSfile rendered successfully.' 60 | 61 | return rendered 62 | } 63 | catch (hudson.AbortException error) { 64 | print 'Failure using goss render.' 65 | throw error 66 | } 67 | } 68 | 69 | void server(Map config) { 70 | // input checking 71 | if (config.logLevel) { 72 | assert ['error', 'warn', 'info', 'debug', 'trace'].contains(config.logLevel) : 'The logLevel parameter must be one of error, warn, info, debug, or trace.' 73 | } 74 | 75 | config.endpoint = config.endpoint ?: '/healthz' 76 | config.port = config.port ?: '8080' 77 | config.bin = config.bin ?: 'goss' 78 | 79 | List cmd = [config.bin] 80 | 81 | // check for optional global inputs and establish command 82 | cmd.addAll(globalArgsCmd(config)) 83 | cmd.add('serve') 84 | 85 | // check for optional inputs 86 | if (config.maxConcur) { 87 | cmd.addAll(['--max-concurrent', config.maxConcur]) 88 | } 89 | if (config.format) { 90 | assert (['documentation', 'json', 'junit', 'nagios', 'prometheus', 'rspecish', 'silent', 'structured', 'tap'].contains(config.format)) : 'The "format" parameter value must be a valid accepted format for GoSS' 91 | 92 | cmd.addAll(['-f', config.format]) 93 | } 94 | if (config.formatOpts) { 95 | assert (['perfdata', 'pretty', 'verbose'].contains(config.formatOpts)) : 'The "formatOpts" parameter value must be one of: perfdata, pretty, or verbose.' 96 | 97 | cmd.addAll(['-o', config.formatOpts]) 98 | } 99 | if (config.cache) { 100 | cmd.addAll(['-c', config.cache]) 101 | } 102 | if (config.logLevel) { 103 | cmd.addAll(['-L', config.logLevel.toUpperCase()]) 104 | } 105 | 106 | // create goss rest api endpoint 107 | try { 108 | List bgCmd = ['nohup'] + cmd + ['-e', config.endpoint, '-l', ":${config.port}"] 109 | sh(label: "GoSS Server ${config?.gossfile}", script: bgCmd.join(' ')) 110 | } 111 | catch (hudson.AbortException error) { 112 | print 'Failure using goss serve.' 113 | throw error 114 | } 115 | print 'GoSS server endpoint created successfully.' 116 | } 117 | 118 | Boolean validate(Map config) { 119 | // input checking 120 | if (config.logLevel) { 121 | assert ['error', 'warn', 'info', 'debug', 'trace'].contains(config.logLevel) : 'The logLevel parameter must be one of error, warn, info, debug, or trace.' 122 | } 123 | config.bin = config.bin ?: 'goss' 124 | 125 | // optional inputs 126 | List cmd = [config.bin] 127 | 128 | // check for optional global inputs and establish command 129 | cmd.addAll(globalArgsCmd(config)) 130 | cmd.add('validate --no-color') 131 | 132 | // check for optional inputs 133 | if (config.maxConcur) { 134 | cmd.addAll(['--max-concurrent', config.maxConcur]) 135 | } 136 | if (config.format) { 137 | assert (['documentation', 'json', 'junit', 'nagios', 'prometheus', 'rspecish', 'silent', 'structured', 'tap'].contains(config.format)) : 'The "format" parameter value must be a valid accepted format for GoSS' 138 | 139 | cmd.addAll(['-f', config.format]) 140 | } 141 | if (config.formatOpts) { 142 | assert (['perfdata', 'pretty', 'sort', 'verbose'].contains(config.formatOpts)) : 'The "formatOpts" parameter value must be one of: perfdata, pretty, or verbose.' 143 | 144 | cmd.addAll(['-o', config.formatOpts]) 145 | } 146 | if (config.retryTimeout) { 147 | cmd.addAll(['-r', config.retryTimeout]) 148 | 149 | if (config.sleep) { 150 | cmd.addAll(['-s', config.sleep]) 151 | } 152 | } 153 | if (config.logLevel) { 154 | cmd.addAll(['-L', config.logLevel.toUpperCase()]) 155 | } 156 | 157 | // validate with goss 158 | final int returnCode = sh(label: "GoSS Validate ${config?.gossfile}", script: cmd.join(' '), returnStatus: true) 159 | 160 | // return by code 161 | if (returnCode == 0) { 162 | print 'The system successfully validated.' 163 | return true 164 | } 165 | else if (returnCode == 1) { 166 | print 'The system failed validation.' 167 | return false 168 | } 169 | 170 | print 'Failure using goss validate.' 171 | error(message: 'GoSS validate failed unexpectedly') 172 | } 173 | 174 | void validateDocker(Map config) { 175 | // input checking 176 | assert config.image in String : 'The required image parameter was not set.' 177 | config.bin = config.bin ?: 'dgoss' 178 | 179 | List cmd = [config.bin, 'run'] 180 | 181 | // check for optional inputs 182 | if (config.flags) { 183 | assert (config.flags in Map) : 'The flags parameter must be a Map.' 184 | 185 | config.flags.each { String flag, String value -> 186 | cmd.addAll(['-e', "${flag}=${value}"]) 187 | } 188 | } 189 | 190 | // run with dgoss 191 | try { 192 | cmd.add(config.image) 193 | sh(label: "DGoSS Validate Docker ${config.image}", script: cmd.join(' ')) 194 | } 195 | catch (hudson.AbortException error) { 196 | print 'Failure using dgoss run.' 197 | throw error 198 | } 199 | print 'DGoSS run command was successful.' 200 | } 201 | 202 | Boolean validateGossfile(String gossfile) { 203 | return new helpers().validateYamlFile(gossfile, 'GoSSfile') 204 | } 205 | 206 | // private method for global arguments 207 | private static List globalArgsCmd(Map config) { 208 | // initialize subcommand from global args 209 | List subCmd = [] 210 | 211 | // check for optional global args 212 | if (config.varsInline) { 213 | assert config.varsInline in Map : 'The inline vars parameter must be a Map.' 214 | final String varsInlineJSON = writeJSON(json: config.varsInline, returnText: true) 215 | 216 | subCmd.addAll(['--vars-inline', varsInlineJSON]) 217 | } 218 | else if (config.vars) { 219 | // validate vars file 220 | assert new helpers().validateYamlFile(config.vars, 'vars file') 221 | 222 | subCmd.addAll(['--vars', config.vars]) 223 | } 224 | if (config.package) { 225 | assert (['apk', 'dpkg', 'pacman', 'rpm'].contains(config.package)) : 'The "package" parameter must be one of: apk, dpkg, pacman, or rpm' 226 | 227 | subCmd.addAll(['--package', config.package]) 228 | } 229 | if (config.gossfile) { 230 | assert validateGossfile(config.gossfile) : "GoSSfile ${config.gossfile} does not exist or is not a valid YAML file!" 231 | 232 | subCmd.addAll(['-g', config.gossfile]) 233 | } 234 | else { 235 | assert validateGossfile('goss.yaml') : 'GoSSfile \'goss.yaml\' does not exist or is not a valid YAML file!' 236 | } 237 | 238 | // return subcommand based from global arguments 239 | return subCmd 240 | } 241 | -------------------------------------------------------------------------------- /vars/faas.groovy: -------------------------------------------------------------------------------- 1 | // vars/faas.groovy 2 | import devops.common.utils 3 | import devops.common.helpers 4 | 5 | void build(Map config) { 6 | // input checking 7 | assert config.template : 'The required template parameter was not set.' 8 | config.bin = config.bin ?: 'faas-cli' 9 | assert validateTemplate(config.template) : "The template file ${config.template} does not exist or is not a valid YAML file!" 10 | 11 | List cmd = [config.bin, 'build'] 12 | 13 | // check for optional inputs 14 | if (config.noCache == true) { 15 | cmd.add('--no-cache') 16 | } 17 | if (config.parallel) { 18 | cmd.addAll(['--parallel', config.parallel]) 19 | } 20 | if (config.pull) { 21 | cmd.add('--pull') 22 | } 23 | if (config.squash == true) { 24 | cmd.add('--squash') 25 | } 26 | if (config.tag) { 27 | cmd.addAll(['--tag', config.tag]) 28 | } 29 | 30 | // create image with faas 31 | new helpers().toolExec('OpenFaaS Build', cmd) 32 | } 33 | 34 | void deploy(Map config) { 35 | // input checking 36 | assert config.template : 'The required template parameter was not set.' 37 | if (config.replace && config.update) { 38 | error(message: 'The parameters "replace" and "update" are mutually exclusive!') 39 | } 40 | assert validateTemplate(config.template) : "The template file ${config.template} does not exist or is not a valid YAML file!" 41 | 42 | config.bin = config.bin ?: 'faas-cli' 43 | List cmd = [config.bin, 'deploy'] 44 | 45 | // check for optional inputs 46 | if (config.label) { 47 | assert (config.label in Map) : 'The label parameter must be a Map.' 48 | 49 | config.label.each { String label, String value -> 50 | cmd.addAll(['--label', "${label}=${value}"]) 51 | } 52 | } 53 | if (config.replace == false) { 54 | cmd.add('--replace=false') 55 | } 56 | else if (config.update == true) { 57 | cmd.add('--update=true') 58 | } 59 | else if (config.strategy) { 60 | assert ['replace', 'update'].contains(config.strategy) : 'The strategy parameter must be either "replace" or "update".' 61 | 62 | cmd.add(["--${config.strategy}"]) 63 | } 64 | if (config.secret) { 65 | cmd.addAll(['--secret', config.secret]) 66 | } 67 | cmd.addAll(globalArgsCmd(config)) 68 | 69 | // deploy function with faas 70 | cmd.addAll(['-f', config.template]) 71 | new helpers().toolExec('OpenFaaS Deploy', cmd) 72 | } 73 | 74 | void install(Map config) { 75 | // input checking 76 | config.installPath = config.installPath ? config.installPath : '/usr/bin' 77 | assert (config.platform in String && config.version in String) : 'A required parameter is missing from this faas.install block. Please consult the documentation for proper usage.' 78 | new utils().makeDirParents(config.installPath) 79 | 80 | // check if current version already installed 81 | if (fileExists("${config.installPath}/faas-cli")) { 82 | final String installedVersion = sh(label: 'Check OpenFaaS CLI Version', returnStdout: true, script: "${config.installPath}/faas-cli version").trim() 83 | if (installedVersion =~ config.version) { 84 | print "FaaS CLI version ${config.version} already installed at ${config.installPath}." 85 | return 86 | } 87 | } 88 | // otherwise determine extension based on platform 89 | String extension = '' 90 | switch (config.platform) { 91 | case 'linux': extension = ''; break 92 | case 'windows': extension = '.exe'; break 93 | case 'darwin': extension = '-darwin'; break 94 | case 'linux-arm64': extension = '-arm64'; break 95 | case 'linux-armhf': extension = '-armhf'; break 96 | default: error(message: "Unsupported platform ${config.platform} specified!") 97 | } 98 | // download and install specified version 99 | new utils().downloadFile("https://github.com/openfaas/faas-cli/releases/download/${config.version}/faas-cli${extension}", "${config.installPath}/faas-cli") 100 | extension = null 101 | sh(label: 'OpenFaaS CLI Executable Permissions', script: "chmod ug+rx ${config.installPath}/faas-cli") 102 | print "FaaS CLI successfully installed at ${config.installPath}/faas-cli." 103 | } 104 | 105 | void invoke(Map config) { 106 | // input checking 107 | config.bin = config.bin ?: 'faas-cli' 108 | assert config.function in String : 'The required parameter function was not set.' 109 | 110 | List cmd = [config.bin, 'invoke'] 111 | 112 | // check for optional inputs 113 | if (config.async == true) { 114 | cmd.add('-a') 115 | } 116 | if (config.contentType) { 117 | cmd.addAll(['--content-type', config.contentType]) 118 | } 119 | if (config.header) { 120 | assert (config.header in Map) : 'The header parameter must be a Map.' 121 | 122 | config.header.each { String header, String value -> 123 | cmd.addAll(['-H', "${header}=${value}"]) 124 | } 125 | } 126 | if (config.method) { 127 | cmd.addAll(['-m', config.method]) 128 | } 129 | if (config.query) { 130 | assert (config.query in Map) : 'The query parameter must be a Map.' 131 | 132 | config.query.each { String query, String value -> 133 | cmd.addAll(['--query', "${query}=${value}"]) 134 | } 135 | } 136 | if (config.stdin) { 137 | cmd.addAll(['<', config.stdin]) 138 | } 139 | cmd.addAll(globalArgsCmd(config)) 140 | 141 | // invoke faas function 142 | cmd.add(config.function) 143 | new helpers().toolExec('OpenFaaS Invoke', cmd) 144 | } 145 | 146 | String list(Map config) { 147 | // input checking 148 | if (config.quiet && config.verbose) { 149 | error(message: 'The "quiet" and "verbose" parameters for faas.list are mutually exclusive; only one can be specified.') 150 | } 151 | config.bin = config.bin ?: 'faas-cli' 152 | 153 | List cmd = [config.bin, 'list'] 154 | 155 | // optional inputs 156 | if (config.quiet) { 157 | cmd.add('-q') 158 | } 159 | else if (config.verbose) { 160 | cmd.add('-v') 161 | } 162 | if (config.sort) { 163 | assert ['name', 'invocations'].contains(config.sort) : 'The "sort" parameter value must be either "name" or "invocations".' 164 | 165 | cmd.addAll(['--sort', config.sort]) 166 | } 167 | cmd.addAll(globalArgsCmd(config)) 168 | 169 | // list faas functions 170 | String functions 171 | try { 172 | functions = sh(label: 'OpenFaaS List', script: cmd.join(' '), returnStdout: true) 173 | } 174 | catch (hudson.AbortException error) { 175 | print 'Failure using faas-cli list.' 176 | throw error 177 | } 178 | 179 | print 'FaaS function list executed successfully.' 180 | return functions 181 | } 182 | 183 | void login(Map config) { 184 | // input checking 185 | assert config.password in String : 'The required password parameter was not set.' 186 | config.bin = config.bin ?: 'faas-cli' 187 | 188 | List cmd = [config.bin, 'login'] 189 | 190 | // check for optional inputs 191 | if (config.user) { 192 | cmd.addAll(['-u', config.user]) 193 | } 194 | cmd.addAll(globalArgsCmd(config)) 195 | 196 | // login to faas gateway 197 | cmd.addAll(['-p', config.password]) 198 | new helpers().toolExec('OpenFaaS Login', cmd) 199 | } 200 | 201 | String logs(Map config) { 202 | // input checking 203 | assert config.name in String : 'The required "name" parameter was not set.' 204 | config.bin = config.bin ?: 'faas-cli' 205 | 206 | List cmd = [config.bin, 'logs'] 207 | 208 | // optional inputs 209 | if (config.instance) { 210 | cmd.add('--instance') 211 | } 212 | if (config.format) { 213 | assert ['plain', 'keyvalue', 'json'].contains(config.format) 214 | 215 | cmd.addAll(['-o', config.format]) 216 | } 217 | if (config.since) { 218 | cmd.addAll(['--since', config.since]) 219 | } 220 | cmd.addAll(globalArgsCmd(config)) 221 | 222 | // retrieve function logs 223 | String logs 224 | try { 225 | cmd.add(config.name) 226 | logs = sh(label: "OpenFaaS Logs ${config.name}", script: cmd.join(' '), returnStdout: true) 227 | } 228 | catch (hudson.AbortException error) { 229 | print 'Failure using faas-cli logs.' 230 | throw error 231 | } 232 | 233 | print 'FaaS function log retrieval executed successfully.' 234 | return logs 235 | } 236 | 237 | void push(Map config) { 238 | // input checking 239 | assert config.template : 'The required template parameter was not set.' 240 | assert validateTemplate(config.template) : "The template file ${config.template} does not exist or is not a valid YAML file!" 241 | config.bin = config.bin ?: 'faas-cli' 242 | 243 | List cmd = [config.bin, 'push'] 244 | 245 | // check for optional inputs 246 | if (config.parallel) { 247 | cmd.addAll(['--parallel', config.parallel]) 248 | } 249 | if (config.tag) { 250 | cmd.addAll(['--tag', config.tag]) 251 | } 252 | 253 | // push function with faas 254 | cmd.addAll(['-f', config.template]) 255 | new helpers().toolExec('OpenFaaS Push', cmd) 256 | } 257 | 258 | void remove(Map config) { 259 | // input checking 260 | assert config.template : 'The required template parameter was not set.' 261 | assert validateTemplate(config.template) : "The template file ${config.template} does not exist or is not a valid YAML file!" 262 | config.bin = config.bin ?: 'faas-cli' 263 | 264 | List cmd = [config.bin, 'rm'] 265 | 266 | // check for optional inputs 267 | cmd.addAll(globalArgsCmd(config)) 268 | 269 | // remove function with faas 270 | cmd.addAll(['-f', config.template]) 271 | new helpers().toolExec('OpenFaaS Remove', cmd) 272 | } 273 | 274 | Boolean validateTemplate(String template) { 275 | return new helpers().validateYamlFile(template, 'template') 276 | } 277 | 278 | // private method for global arguments pertaining to all methods 279 | private static List globalArgsCmd(Map config) { 280 | // initialize subcommand from global args 281 | List subCmd = [] 282 | 283 | // check for optional inputs 284 | if (config.filter) { 285 | subCmd.addAll(['--filter', "'${config.filter}'"]) 286 | } 287 | if (config.gateway) { 288 | subCmd.addAll(['-g', config.gateway]) 289 | } 290 | if (config.namespace) { 291 | subCmd.addAll(['-n', config.namespace]) 292 | } 293 | if (config.regex) { 294 | subCmd.addAll(['--regex', "'${config.regex}'"]) 295 | } 296 | if (config.tls == false) { 297 | subCmd.add(' --tls-no-verify') 298 | } 299 | 300 | // return subcommand based from global arguments 301 | return subCmd 302 | } 303 | -------------------------------------------------------------------------------- /vars/puppet.groovy: -------------------------------------------------------------------------------- 1 | // vars/puppet.groovy 2 | import devops.common.utils 3 | 4 | void codeDeploy(Map config) { 5 | // input checking 6 | if (config.tokenFile && config.credentialsId) { 7 | error(message: "The 'tokenFile' and 'credentialsId' parameters for puppet.codeDeploy are mutually exclusive; only one can be specified.") 8 | } 9 | assert config.tokenFile || (config.credentialsId in String) : 'The required token or credentialsId parameter was not set.' 10 | if (config.tokenFile) { 11 | assert readFile(config.tokenFile) in String : "The RBAC token ${config.tokenFile} does not exist or is not readable!" 12 | } 13 | 14 | if (config.servers) { 15 | assert (config.servers in List) : 'The servers parameter must be a list of strings.' 16 | } 17 | else { 18 | config.servers = ['puppet'] 19 | } 20 | 21 | config.port = config.port ?: 8170 22 | 23 | // init payload 24 | Map payload = [:] 25 | 26 | // check for environments 27 | if (config.environments) { 28 | assert (config.environments in List) : 'The environments parameter must be a list of strings.' 29 | 30 | // preface environments payload 31 | payload['environments'] = config.environments 32 | } 33 | else { 34 | payload['deploy-all'] = true 35 | } 36 | 37 | // check for wait 38 | if (config.wait == true) { 39 | payload['wait'] = true 40 | } 41 | 42 | // check for deploy-modules 43 | if (config.deployModules != null) { 44 | payload['deploy-modules'] = config.deployModules 45 | } 46 | 47 | // check for modules 48 | if (config.modules) { 49 | assert (config.modules in List || config.modules in String) : 'The modules parameter must be a list of strings or a string.' 50 | payload['modules'] = config.modules 51 | } 52 | 53 | // check for dry-run 54 | if (config.dryRun == true) { 55 | payload['dry-run'] = true 56 | } 57 | 58 | // convert map to json string 59 | payload = writeJSON(json: payload, returnText: true) 60 | 61 | // initialize vars 62 | boolean errored = false 63 | Map jsonResponse = [:] 64 | Map response = [:] 65 | String token = '' 66 | 67 | // set token with logic from appropriate parameter 68 | if (config.credentialsId) { 69 | withCredentials([token(credentialsId: config.credentialsId, variable: 'theToken')]) { 70 | token = theToken 71 | } 72 | } 73 | else if (config.tokenFile) { 74 | // initialize token with readFile relative pathing requirement stupidness 75 | token = readFile("../../../../../../../../../../../${config.tokenFile}") 76 | } 77 | 78 | // iterate through servers 79 | config.servers.each { String server -> 80 | // trigger code manager deployment 81 | try { 82 | jsonResponse = httpRequest( 83 | acceptType: 'APPLICATION_JSON', 84 | consoleLogResponseBody: true, 85 | contentType: 'APPLICATION_JSON', 86 | customHeaders: [[name: 'X-Authentication', value: token]], 87 | httpMode: 'POST', 88 | ignoreSslErrors: true, 89 | quiet: true, 90 | requestBody: payload, 91 | url: "https://${server}:${config.port}/code-manager/v1/deploys", 92 | ) 93 | } 94 | catch (Exception error) { 95 | print "Failure executing REST API request against ${server} with token! Returned status: ${jsonResponse.status}." 96 | print error 97 | errored = true 98 | } 99 | // parse response 100 | try { 101 | response = readJSON(text: jsonResponse.content) 102 | } 103 | catch (Exception error) { 104 | print "Response from ${server} is not valid JSON! Response content: ${jsonResponse.content}." 105 | print error 106 | errored = true 107 | } 108 | // check for errors if waited 109 | if (config.wait == true) { 110 | response.each { Map hash -> 111 | if (hash.containsKey('error')) { 112 | print "Response from Code Manager for environment ${hash['environment']} was an error of kind ${hash['error']['kind']}." 113 | print hash['error']['msg'] 114 | errored = true 115 | } 116 | else { 117 | print 'Successful response from Code Manager below:' 118 | print hash.toMapString() 119 | } 120 | } 121 | } 122 | } 123 | if (errored) { 124 | throw 'One or more Code Manager deployment(s) failed with above error info.' 125 | } 126 | print 'Code manager deployment(s) was successful.' 127 | } 128 | 129 | void task(Map config) { 130 | // input checking 131 | if (config.tokenFile && config.credentialsId) { 132 | error(message: "The 'tokenFile' and 'credentialsId' parameters for puppet.task are mutually exclusive; only one can be specified.") 133 | } 134 | assert config.tokenFile || (config.credentialsId in String) : 'The required token or credentialsId parameter was not set.' 135 | if (config.tokenFile) { 136 | assert readFile(config.tokenFile) in String : "The RBAC token ${config.tokenFile} does not exist or is not readable!" 137 | } 138 | assert config.task in String : 'The required task parameter was not set.' 139 | assert config.scope : 'The required scope parameter was not set.' 140 | 141 | config.server = config.server ?: 'puppet' 142 | config.port = config.port ?: 8143 143 | 144 | // initialize payload 145 | Map payload = [:] 146 | 147 | // environment is required, default to production 148 | payload['environment'] = config.environment ?: 'production' 149 | 150 | if (config.description) { 151 | payload['description'] = config.description 152 | } 153 | if (config.noop != null) { 154 | payload['noop'] = config.noop 155 | } 156 | 157 | // params is required, can be empty 158 | payload['params'] = config.params ?: [:] 159 | 160 | // task is required 161 | payload['task'] = config.task 162 | 163 | // scope is required 164 | payload['scope'] = [:] 165 | 166 | if (config.scope in List) { 167 | // is the last element of the list a nested list 168 | if (config.scope[-1] in List) { 169 | payload['scope']['query'] = config.scope 170 | } 171 | // otherwise it is a list of strings which is then a node list 172 | else { 173 | payload['scope']['nodes'] = config.scope 174 | } 175 | } 176 | else if (config.scope in String) { 177 | // does the string look like an app orchestrator string 178 | if (config.scope ==~ /\[.*\]$/) { 179 | payload['scope']['application'] = config.scope 180 | } 181 | // otherwise it is a node group string 182 | else { 183 | payload['scope']['node_group'] = config.scope 184 | } 185 | } 186 | else { 187 | error(message: 'The scope parameter is an invalid type!') 188 | } 189 | 190 | // check for targets (for Bolt server usage) 191 | if (config.targets) { 192 | assert (config.targets in List) : 'The targets parameter must be a list of target objects.' 193 | payload['targets'] = config.targets 194 | } 195 | 196 | // check for timeout 197 | if (config.timeout) { 198 | payload['timeout'] = config.timeout 199 | } 200 | 201 | // check for userdata 202 | if (config.userdata) { 203 | payload['userdata'] = config.userdata 204 | } 205 | 206 | // convert map to json string 207 | payload = writeJSON(json: payload, returnText: true) 208 | 209 | // initialize vars 210 | Map jsonResponse = [:] 211 | Map response = [:] 212 | String token = '' 213 | 214 | // set token with logic from appropriate parameter 215 | if (config.credentialsId) { 216 | withCredentials([token(credentialsId: config.credentialsId, variable: 'theToken')]) { 217 | token = theToken 218 | } 219 | } 220 | else if (config.tokenFile) { 221 | // initialize token with readFile relative pathing requirement stupidness 222 | token = readFile("../../../../../../../../../../../${config.tokenFile}") 223 | } 224 | 225 | // trigger task orchestration 226 | try { 227 | jsonResponse = httpRequest( 228 | acceptType: 'APPLICATION_JSON', 229 | consoleLogResponseBody: true, 230 | contentType: 'APPLICATION_JSON', 231 | customHeaders: [[name: 'X-Authentication', value: token]], 232 | httpMode: 'POST', 233 | ignoreSslErrors: true, 234 | quiet: true, 235 | requestBody: payload, 236 | url: "https://${config.server}:${config.port}/orchestrator/v1/command/task", 237 | ) 238 | } 239 | catch (Exception error) { 240 | print "Failure executing REST API request against ${config.server} with token! Returned status: ${jsonResponse.status}." 241 | throw error 242 | } 243 | // receive and parse response 244 | try { 245 | response = readJSON(text: jsonResponse.content) 246 | } 247 | catch (Exception error) { 248 | print "Response from ${config.server} is not valid JSON! Response content: ${jsonResponse.content}." 249 | throw error 250 | } 251 | // handle successful response 252 | if (response.containsKey('job')) { 253 | print 'Puppet Orchestrator Task execution successfully requested.' 254 | print "Job Name: ${response['job']['name']}, Job ID: ${response['job']['id']}" 255 | } 256 | else { 257 | print 'Failure response from Orchestrator below:' 258 | print response.toMapString() 259 | } 260 | } 261 | 262 | void token(Map config) { 263 | // input checking 264 | assert config.username in String : 'The username parameter is required.' 265 | assert config.password in String : 'The password parameter is required.' 266 | 267 | config.server = config.server ?: 'puppet' 268 | config.port = config.port ?: 4433 269 | config.path = config.path ?: "${env.JENKINS_HOME}/.puppetlabs" 270 | 271 | //construct payload 272 | Map payload = [:] 273 | payload['login'] = config.username 274 | payload['password'] = config.password 275 | 276 | // optional parameters 277 | if (config.lifetime) { 278 | payload['lifetime'] = config.lifetime 279 | } 280 | if (config.label) { 281 | payload['label'] = config.label 282 | } 283 | 284 | // convert map to json string 285 | payload = writeJSON(json: payload, returnText: true) 286 | 287 | // initialize vars 288 | Map jsonResponse = [:] 289 | Map response = [:] 290 | 291 | // trigger token generation 292 | try { 293 | jsonResponse = httpRequest( 294 | acceptType: 'APPLICATION_JSON', 295 | consoleLogResponseBody: true, 296 | contentType: 'APPLICATION_JSON', 297 | httpMode: 'POST', 298 | ignoreSslErrors: !config.secure, 299 | quiet: true, 300 | requestBody: payload, 301 | url: "https://${config.server}:${config.port}/rbac-api/v1/auth/token", 302 | ) 303 | } 304 | catch (Exception error) { 305 | print "Failure executing REST API request against ${config.server} with username ${config.username}. Returned status: ${jsonResponse.status}." 306 | throw error 307 | } 308 | // receive and parse response 309 | try { 310 | response = readJSON(text: jsonResponse.content) 311 | } 312 | catch (Exception error) { 313 | print "Response from ${config.server} is not valid JSON! Response content: ${jsonResponse.content}." 314 | throw error 315 | } 316 | 317 | // check if desired token save path exists and create if not 318 | new utils().makeDirParents(config.path) 319 | 320 | // acess token value and save it to file 321 | writeFile(file: "${config.path}/token", text: response['token']) 322 | 323 | print "RBAC Token retrieved successfully and stored at ${config.path}/token." 324 | } 325 | -------------------------------------------------------------------------------- /docs/Helm.md: -------------------------------------------------------------------------------- 1 | # Helm 2 | 3 | Interacts with Helm. Note that you should set the environment variable `KUBECONFIG` in your pipeline with `environment { KUBECONFIG = '/path/to/.kube/config' }` as the `jenkins` user probably does not have one in its home directory, and Helm requires a valid kube config for all commands. Alternatively, you can use the `kubeconfigFile` or `kubeconfigContent` bindings for the Credentials Binding plugin, and then wrap code within a `withCredentials` block as per normal. Also alternatively, you can combine the two like `environment { KUBECONFIG = credentials('my-kubeconfig') }` 4 | 5 | Also please note direct Kubernetes support will never exist in this library. The reason for this is that the Kubernetes Java client requires exactly specifying the API version and object type altered during modifications. This limitation is not encountered in the Golang client and its associated tools (e.g. `kubectl`), and the reason is [well explained here](https://github.com/kubernetes-client/java/issues/611#issuecomment-509106822). This makes the implementation significantly more cumbersome. The workaround for this has already been excellently implemented by Fabric8 in their library, so no compelling reason exists to attempt to implement something similar in this library. 6 | 7 | ### Dependencies 8 | 9 | - Helm CLI binary executable >= 3.0 10 | - Kubernetes cluster 11 | 12 | ### helm.history() 13 | Prints historical revisions for a given release, and also returns the information as a String. 14 | 15 | ```groovy 16 | helm.history( 17 | bin: '/usr/bin/helm', // optional executable path for helm 18 | context: 'default', // optional kube-context from kube config 19 | max: 256, // optional maximum number of revisions to include in history (default 256) 20 | name: 'happy-panda', // required name for the release object 21 | namespace: 'default', // optional namespace for the installed release object 22 | outputFormat: 'table', // optional format for output (table, json, or yaml) 23 | ) 24 | ``` 25 | 26 | ### helm.install() 27 | Performs an installation with Helm onto the Kubernetes cluster. 28 | 29 | ```groovy 30 | helm.install( 31 | atomic: false, // optional deletes the installation on failure 32 | bin: '/usr/bin/helm', // optional executable path for helm 33 | chart: 'chart', // chart repository, local archive, directory, or url to install 34 | context: 'default', // optional kube-context from kube config 35 | createNS: false, // optional create the release namespace if not present 36 | devel: false, // optional also use development versions (mutually exclusive with version) 37 | dryRun: false, // optional simulate an install 38 | force: false, // optional force resource updates through replacement strategy 39 | name: 'happy-panda', // required name for the installed release object 40 | namespace: 'default', // optional namespace for the installed release object 41 | set: ['key1':'val1', 'key2':'val2'], // optional set input values 42 | values: ['config.yaml'], // optional value overrides yaml file or url 43 | verify: true, // optional verify the provenance of the chart 44 | version: 'latest', // optional chart version to install (mutually exclusive with devel) 45 | wait: false, // optional wait until everything is in a ready state 46 | ) 47 | ``` 48 | 49 | ### helm.lint() 50 | Runs a series of tests to verify that the chart is well-formed. This method will return a `Boolean` type indicating whether the chart linting was successful (`true`) or not (`false`). 51 | 52 | ```groovy 53 | helm.lint( 54 | bin: '/usr/bin/helm', // optional executable path for helm 55 | chart: 'chart', // chart repository, local archive, directory, or url to install 56 | context: 'default', // optional kube-context from kube config 57 | namespace: 'default', // optional namespace for the installed release object 58 | set: ['foo':'bar', 'bar':'baz'], // optional value override 59 | strict: false // optional fail on warnings 60 | values: ['config.yaml'], // optional value overrides yaml file or url 61 | withSubcharts: false, // optional lin dependent charts 62 | ) 63 | ``` 64 | 65 | ### helm.packages() 66 | Package a chart directory into a chart archive. 67 | 68 | ```groovy 69 | helm.packages( 70 | bin: '/usr/bin/helm', // optional executable path for helm 71 | chart: 'path/to/chart', // absolute or relative path to chart 72 | dest: '.', // optional location to write the chart 73 | key: 'foo', // optional sign the package with this key name (mutually exclusive with keyring) 74 | keyring: '/home/dir/.gnupg/pubring.gpg', // optional sign the package with the public keyring at this location (mutually exclusive with key) 75 | updateDeps: false, // optional update dependencies from requirements prior to packaging 76 | version: '1.0.0' // optional version set for the chart 77 | ) 78 | ``` 79 | 80 | ### helm.plugin() 81 | Manage client-side Helm plugins. 82 | 83 | ```groovy 84 | helm.plugin( 85 | bin: '/usr/bin/helm', // optional executable path for helm 86 | command: 'install', // plugin command; one of 'install', 'list', 'uninstall', or 'update' 87 | plugin: 'https://github.com/adamreese/helm-env' // targeted plugin (unless 'list' command) 88 | ) 89 | ``` 90 | 91 | ### helm.push 92 | Upload a chart to a registry. 93 | 94 | ```groovy 95 | helm.push( 96 | bin: '/usr/bin/helm', // optional executable path for helm 97 | chart: 'helm-chart-1.0.0.tgz', // absolute or relative path to packaged chart 98 | insecure: false, // optional skip tls certificate checks for the chart upload 99 | remote: 'oci://my.registry.com/', // address of the remote chart registry 100 | ) 101 | ``` 102 | 103 | ### helm.registryLogin 104 | Authenticate to a remote registry. 105 | 106 | ```groovy 107 | helm.registryLogin( 108 | bin: '/usr/bin/helm', // optional executable path for helm 109 | host: 'https://helm.registry.com', // registry host address 110 | insecure: false, // optional allow connections to TLS registry without certs 111 | password: 'pass', // registry password or identity token 112 | username: 'user', // registry username 113 | ) 114 | ``` 115 | 116 | ### helm.repo() 117 | Add and update a Helm chart repository. The repository will update if it has already been added. 118 | 119 | ```groovy 120 | helm.repo( 121 | bin: '/usr/local/bin/helm', // optional executable path 122 | ca: '/path/to/crt.ca', // optional path to CA bundle to verify certificates of HTTPS servers 123 | cert: '/path/to/ca.crt', // optional path to HTTPS client SSL certificate file 124 | force: false, // optional replace/overwrite the repo if it exists 125 | insecure: false, // optional skip tls certificate checks 126 | key: '/path/to/rsa.key', // optional path to HTTPS client SSL key file 127 | password: 'mypassword', // optional chart repository password 128 | repo: 'stable', // name of the chart repository 129 | url: 'https://kubernetes-charts.storage.googleapis.com', // url of the chart repository 130 | user: 'myuser' // optional chart repository username 131 | ) 132 | ``` 133 | 134 | ### helm.rollback() 135 | Roll back the release object to a previous release with Helm. 136 | 137 | ```groovy 138 | helm.rollback( 139 | bin: '/usr/local/bin/helm', // optional executable path for helm 140 | context: 'default', // optional kube-context from kube config 141 | force: false, // optional force resource update through delete/recreate if needed 142 | hooks: true, // optional hooks run during rollback 143 | name: 'happy-panda', // release object name to rollback 144 | namespace: 'default', // optional namespace for the rolled back release object 145 | recreatePods: false, // optional performs pods restart for the resource if applicable 146 | version: 'previous' // optional version of release-object to rollback to 147 | ) 148 | ``` 149 | 150 | ### helm.show() 151 | Show information about a chart. 152 | 153 | ```groovy 154 | helm.show( 155 | bin: '/usr/local/bin/helm', // optional executable path for helm 156 | chart: 'chart', // chart repository, local archive, directory, or url to display 157 | info: 'all', // info to display; one of 'all', 'chart', 'crds', 'readme', or 'values' 158 | ) 159 | ``` 160 | 161 | ### helm.status() 162 | Shows the status of a named release. This will also return the displayed information as a `String` type for further consumption and usage. 163 | 164 | ```groovy 165 | helm.status( 166 | bin: '/usr/bin/helm', // optional executable path for helm 167 | description: false, // optional display release description message 168 | name: 'happy-panda', // name for the release object to be queried 169 | context: 'default', // optional kube-context from kube config 170 | namespace: 'default', // optional namespace for the queried release object 171 | outputFormat: 'table' // optional format for output (table, json, or yaml) 172 | resources: false, // optional display release resources 173 | revision: 0, // optional display the status of the release revision 174 | ) 175 | ``` 176 | 177 | ### helm.test() 178 | Executes the tests for a release. 179 | 180 | ```groovy 181 | helm.test( 182 | bin: '/usr/bin/helm', // optional executable path for helm 183 | context: 'default', // optional kube-context from kube config 184 | kubectl: '/usr/bin/kubectl', // optional executable path for kubectl 185 | name: 'happy-panda', // name of a deployed release 186 | namespace: 'default' // optional namespace for the queried release object 187 | ) 188 | ``` 189 | 190 | ### helm.uninstall() 191 | Uninstall the release object from Kubernetes with Helm. 192 | 193 | ```groovy 194 | helm.uninstall( 195 | bin: '/usr/bin/helm', // optional executable path for helm 196 | name: 'happy-panda', // name for the release object to be deleted 197 | context: 'default' // optional kube-context from kube config 198 | namespace: 'default' // optional namespace for the uninstalled release object 199 | ) 200 | ``` 201 | 202 | ### helm.upgrade() 203 | Updates and/or changes the configuration of a release with Helm. 204 | 205 | ```groovy 206 | helm.upgrade( 207 | atomic: false, // optional rolls back changes made in case of failed upgrade 208 | bin: '/usr/bin/helm', // optional executable path for helm 209 | chart: 'chart', // chart repository, local archive, directory, or url to upgrade 210 | context: 'default', // optional kube-context from kube config 211 | createNS: false, // optional create the release namespace if not present and install occurs 212 | devel: false, // optional also use development versions (mutually exclusive with version) 213 | dryRun: false, // optional simulate an upgrade 214 | force: false, // optional force resource updates through replacement strategy 215 | install: false, // optional install if release not already present 216 | name: 'happy-panda', // name of the upgraded release object 217 | namespace: 'default', // optional namespace for the upgraded release object 218 | set: ['key1':'val1', 'key2':'val2'], // optional set input values 219 | values: ['config.yaml'], // optional value overrides yaml file or url 220 | verify: true, // optional verify the provenance of the chart 221 | version: 'latest', // optional chart version to install (mutually exclusive with devel) 222 | wait: false, // optional wait until everything is in a ready state 223 | ) 224 | ``` 225 | 226 | ### helm.verify(String chartPath, String helmPath = 'helm') 227 | Verify that the given chart has a valid provenance file. This can be used to verify a local chart. This method will return a `Boolean` type indicating whether the verification was successful (`true`) or not (`false`). 228 | 229 | ```groovy 230 | helm.verify('/path/to/chart.tar.gz', '/usr/bin/helm') 231 | ``` 232 | -------------------------------------------------------------------------------- /vars/packer.groovy: -------------------------------------------------------------------------------- 1 | // vars/packer.groovy 2 | import devops.common.utils 3 | import devops.common.helpers 4 | import devops.common.hcl 5 | 6 | void build(Map config) { 7 | // input checking 8 | assert config.template in String : 'The required template parameter was not set.' 9 | if (config.except && config.only) { 10 | error(message: "The 'except' and 'only' parameters for packer.build are mutually exclusive; only one can be specified.") 11 | } 12 | assert fileExists(config.template) : "The template file or templates directory ${config.template} does not exist!" 13 | config.bin = config.bin ?: 'packer' 14 | 15 | List cmd = [config.bin, 'build', '-color=false'] 16 | 17 | // check for optional inputs 18 | cmd.addAll(new helpers().varSubCmd(config)) 19 | 20 | if (config.except) { 21 | assert (config.except in List) : 'The except parameter must be a list of strings.' 22 | 23 | cmd.add("-except=${config.except.join(',')}") 24 | } 25 | if (config.only) { 26 | assert (config.only in List) : 'The only parameter must be a list of strings.' 27 | 28 | cmd.add("-only=${config.only.join(',')}") 29 | } 30 | if (config.force == true) { 31 | cmd.add('-force') 32 | } 33 | if (config.onError) { 34 | assert (['default', 'abort', 'ask', 'run-cleanup-provisioner'].contains(config.onError)) : 'The argument must be one of: default, abort, ask, or run-cleanup-provisioner.' 35 | 36 | cmd.add("-on-error=${config.onError}") 37 | } 38 | 39 | // create artifact with packer 40 | try { 41 | if (config.template ==~ /\.pkr\./) { 42 | cmd.add(config.template) 43 | sh(label: "Packer Build ${config.template}", script: cmd.join(' ')) 44 | } 45 | else { 46 | dir(config.template) { 47 | cmd.add('.') 48 | sh(label: "Packer Build ${config.template}", script: cmd.join(' ')) 49 | } 50 | } 51 | } 52 | catch (hudson.AbortException error) { 53 | print 'Failure using packer build.' 54 | throw error 55 | } 56 | print 'Packer build artifact created successfully.' 57 | } 58 | 59 | Boolean fmt(Map config) { 60 | // input checking 61 | assert config.template in String : 'The required template parameter was not set.' 62 | assert fileExists(config.template) : "The template file or templates directory ${config.template} does not exist!" 63 | if (config.write && config.check) { 64 | error(message: "The 'write' and 'check' options for packer.fmt are mutually exclusive - only one can be enabled.") 65 | } 66 | config.bin = config.bin ?: 'packer' 67 | 68 | List cmd = [config.bin, 'fmt'] 69 | 70 | // check for optional inputs 71 | if (config.diff == true) { 72 | cmd.add('-diff') 73 | } 74 | if (config.check == true) { 75 | cmd.add('-check') 76 | } 77 | // incompatible with above 78 | else if (config.write == true) { 79 | cmd.add('-write') 80 | } 81 | if (config.recursive == true) { 82 | cmd.add('-recursive') 83 | } 84 | 85 | // canonically format the code 86 | int fmtStatus 87 | if (config.template ==~ /\.pkr\./) { 88 | cmd.add(config.template) 89 | fmtStatus = sh(label: "Packer Format ${config.template}", returnStatus: true, script: cmd.join(' ')) 90 | } 91 | else { 92 | dir(config.template) { 93 | cmd.add('.') 94 | fmtStatus = sh(label: "Packer Format ${config.template}", returnStatus: true, script: cmd.join(' ')) 95 | } 96 | } 97 | 98 | // report if formatting check detected issues 99 | if (fmtStatus != 0) { 100 | // the format check failed 101 | if (config.check == true) { 102 | print 'Packer fmt has detected formatting errors.' 103 | return false 104 | } 105 | 106 | // the format command failed unexpectedly 107 | print 'Failure using packer fmt.' 108 | error(message: 'packer fmt failed unexpectedly; check logs for details') 109 | } 110 | 111 | print 'Packer fmt was successful.' 112 | return true 113 | } 114 | 115 | void init(Map config) { 116 | // input checking 117 | assert fileExists(config.dir) : "Working template directory ${config.dir} does not exist." 118 | config.bin = config.bin ?: 'packer' 119 | 120 | List cmd = [config.bin, 'init'] 121 | 122 | // check for optional inputs 123 | if (config.upgrade == true) { 124 | cmd.add('-upgrade') 125 | } 126 | 127 | // initialize the working template directory 128 | try { 129 | dir(config.dir) { 130 | cmd.add('.') 131 | sh(label: "Packer Init ${config.dir}", script: cmd.join(' ')) 132 | } 133 | } 134 | catch (hudson.AbortException error) { 135 | print 'Failure using packer init.' 136 | throw error 137 | } 138 | print 'Packer init was successful.' 139 | } 140 | 141 | void inspect(String template, String bin = '/usr/bin/packer') { 142 | // input checking 143 | assert fileExists(template) : "A file does not exist at ${template}." 144 | 145 | // inspect the packer template 146 | try { 147 | sh(label: "Packer Inspect ${template}", script: "${bin} inspect ${template}") 148 | } 149 | catch (hudson.AbortException error) { 150 | print 'Failure inspecting the template.' 151 | throw error 152 | } 153 | print 'Packer inspect was successful' 154 | } 155 | 156 | void install(Map config) { 157 | // input checking 158 | config.installPath = config.installPath ? config.installPath : '/usr/bin' 159 | assert (config.platform in String && config.version in String) : 'A required parameter ("platform" or "version") is missing from the packer.install method. Please consult the documentation for proper usage.' 160 | 161 | new utils().makeDirParents(config.installPath) 162 | 163 | // check if current version already installed 164 | if (fileExists("${config.installPath}/packer")) { 165 | final String installedVersion = sh(label: 'Check Packer Version', returnStdout: true, script: "${config.installPath}/packer version").trim() 166 | if (installedVersion =~ config.version) { 167 | print "Packer version ${config.version} already installed at ${config.installPath}." 168 | return 169 | } 170 | } 171 | // otherwise download and install specified version 172 | new utils().downloadFile("https://releases.hashicorp.com/packer/${config.version}/packer_${config.version}_${config.platform}.zip", 'packer.zip') 173 | unzip(zipFile: 'packer.zip', dir: config.installPath) 174 | new utils().removeFile('packer.zip') 175 | print "Packer successfully installed at ${config.installPath}/packer." 176 | } 177 | 178 | Map parse(String file) { 179 | // return map of parsed hcl 180 | return new hcl().hclToMap(file) 181 | } 182 | 183 | void pluginInstall(String url, String installLoc) { 184 | // return file path up to final slash element 185 | final String installDir = new File(installLoc).parent ?: '.' 186 | 187 | // check if plugin dir exists and create if not 188 | new utils().makeDirParents(installDir) 189 | 190 | // check if plugin already installed 191 | if (fileExists(installLoc)) { 192 | print "Packer plugin already installed at ${installLoc}." 193 | return 194 | } 195 | // otherwise download and install plugin 196 | if (url ==~ /\.zip$/) { 197 | // append zip extension to avoid filename clashes 198 | installLoc = "${installLoc}.zip" 199 | } 200 | new utils().downloadFile(url, installLoc) 201 | if (url ==~ /\.zip$/) { 202 | unzip(zipFile: installLoc) 203 | new utils().removeFile(installLoc) 204 | } 205 | else { 206 | sh(label: 'Packer Plugin Executable Permissions', script: "chmod ug+rx ${installLoc}") 207 | } 208 | print "Packer plugin successfully installed at ${installLoc}." 209 | } 210 | 211 | void pluginsInstall(Map config) { 212 | config.bin = config.bin ?: 'packer' 213 | assert config.plugin in String : 'The required "plugin" parameter was not assigned a value.' 214 | 215 | List cmd = [config.bin, 'install'] 216 | 217 | // optional inputs 218 | if (config.force == true) { 219 | cmd.add('-force') 220 | } 221 | // append plugin since optional version must be last argument 222 | cmd.add(config.plugin) 223 | 224 | if (config.version) { 225 | cmd.add(config.version) 226 | } 227 | 228 | // install plugin 229 | try { 230 | sh(label: "Packer Plugins Install ${config.plugin}", script: cmd.join(' ')) 231 | } 232 | catch (hudson.AbortException error) { 233 | print 'Failure using packer plugins install.' 234 | throw error 235 | } 236 | print 'Packer plugins install executed successfully.' 237 | } 238 | 239 | void pluginsRemove(Map config) { 240 | config.bin = config.bin ?: 'packer' 241 | assert config.plugin in String : 'The required "plugin" parameter was not assigned a value.' 242 | 243 | List cmd = [config.bin, 'remove', config.plugin] 244 | 245 | // optional inputs 246 | if (config.version) { 247 | cmd.add(config.version) 248 | } 249 | 250 | // remove plugin 251 | try { 252 | sh(label: "Packer Plugins Remove ${config.plugin}", script: cmd.join(' ')) 253 | } 254 | catch (hudson.AbortException error) { 255 | print 'Failure using packer plugins remove.' 256 | throw error 257 | } 258 | print 'Packer plugins remove executed successfully.' 259 | } 260 | 261 | void plugins(Map config) { 262 | // input checking 263 | assert (['installed', 'required'].contains(config.command)) : 'The command parameter must be one of "installed" or "required".' 264 | config.bin = config.bin ?: 'packer' 265 | 266 | List cmd = [config.bin, 'plugins', config.command] 267 | 268 | // check for optional inputs 269 | // conditional based on command to double verify dir param input both exists and is valid 270 | // groovy 3: if (config.command === 'required') { 271 | if (config.command == 'required') { 272 | assert config.dir in String : 'The required "dir" parameter was not set.' 273 | assert fileExists(config.dir) : "The Packer config directory ${config.dir} does not exist!" 274 | } 275 | 276 | // interact with packer plugins 277 | try { 278 | // groovy 3: if (config.command === 'required') { 279 | if (config.command == 'required') { 280 | dir(config.dir) { 281 | cmd.add('.') 282 | sh(label: "Packer Plugins ${config.command.capitalize()}", script: cmd.join(' ')) 283 | } 284 | } 285 | else { 286 | sh(label: "Packer Plugins ${config.command.capitalize()}", script: cmd.join(' ')) 287 | } 288 | } 289 | catch (hudson.AbortException error) { 290 | print 'Failure using packer plugins.' 291 | throw error 292 | } 293 | print 'Packer plugins executed successfully.' 294 | } 295 | 296 | Boolean validate(Map config) { 297 | // input checking 298 | assert config.template in String : 'The required template parameter was not set.' 299 | if (config.except && config.only) { 300 | error(message: "The 'except' and 'only' parameters for packer.validate are mutually exclusive; only one can be specified.") 301 | } 302 | assert fileExists(config.template) : "The template file or templates directory ${config.template} does not exist!" 303 | config.bin = config.bin ?: 'packer' 304 | 305 | List cmd = [config.bin, 'validate'] 306 | 307 | // check for optional inputs 308 | cmd.addAll(new helpers().varSubCmd(config)) 309 | 310 | if (config.except) { 311 | assert (config.except in List) : 'The except parameter must be a list of strings.' 312 | 313 | cmd.add("-except=${config.except.join(',')}") 314 | } 315 | if (config.only) { 316 | assert (config.only in List) : 'The only parameter must be a list of strings.' 317 | 318 | cmd.add("-only=${config.only.join(',')}") 319 | } 320 | if (config.evalData == true) { 321 | cmd.add('-evaluate-datasources') 322 | } 323 | if (config.warnUndeclVar == false) { 324 | cmd.add('-no-warn-undeclared-var') 325 | } 326 | if (config.syntaxOnly == true) { 327 | cmd.add('-syntax-only') 328 | } 329 | 330 | // validate template with packer 331 | int returnCode 332 | if (config.template ==~ /\.pkr\./) { 333 | cmd.add(config.template) 334 | returnCode = sh(label: "Packer Validate ${config.template}", script: cmd.join(' '), returnStatus: true) 335 | } 336 | else { 337 | dir(config.template) { 338 | cmd.add('.') 339 | returnCode = sh(label: "Packer Validate ${config.template}", script: cmd.join(' '), returnStatus: true) 340 | } 341 | } 342 | 343 | // return by code 344 | if (returnCode == 0) { 345 | print 'The configs and templates successfully validated.' 346 | return true 347 | } 348 | else if (returnCode == 1) { 349 | print 'The configs and templates failed validation.' 350 | return false 351 | } 352 | print 'Failure using packer validate.' 353 | error(message: 'Packer validate failed unexpectedly') 354 | } 355 | -------------------------------------------------------------------------------- /docs/Terraform.md: -------------------------------------------------------------------------------- 1 | # Terraform 2 | 3 | Interacts with Terraform. `env.TF_IN_AUTOMATION` is set to `true` for each method. Note that OpenTofu can also be used with these by assigning a value to the `bin` parameter for each method that is the path to the OpenTofu binary executable. 4 | 5 | ### Dependencies 6 | 7 | - Terraform CLI binary executable >= 1.0 8 | 9 | ### terraform.apply() 10 | Uses Terraform to apply a config. Note that if `terraform.plan(path: configDir)` was invoked before this and the `out` parameter was not specified, then the resultant plan file is in `${configDir}/plan.tfplan` by default. If a plan file is specified as the `configPath` parameter value, then the `vars` and `target` parameters will be ignored. 11 | 12 | ```groovy 13 | terraform.apply( 14 | bin: '/usr/bin/terraform', // optional path to terraform executable 15 | compactWarn: false, // optional warnings as compact summary messages 16 | configPath: '/path/to/config_dir_or_plan_file', // path to config dir or plan file 17 | target: ['aws_instance.example', 'aws_eip.ip'], // optional resource targets 18 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 19 | varFile: '/path/to/variables.tf' // optional location of variables file 20 | ) 21 | ``` 22 | 23 | ### terraform.destroy() 24 | Uses Terraform to destroy an applied config. Note that if `terraform.plan(path: configDir)` with `destroy: true` was invoked before this, then the resultant plan file is in `${configDir}/plan.tfplan` by default. If a plan file is specified as the `configPath` parameter value, then the `vars` and `target` parameters will be ignored. 25 | 26 | ```groovy 27 | terraform.destroy( 28 | bin: '/usr/bin/terraform', // optional path to terraform executable 29 | compactWarn: false, // optional warnings as compact summary messages 30 | configPath: '/path/to/config_dir_or_plan_file', // path to config dir or plan file 31 | target: ['aws_instance.example', 'aws_eip.ip'], // optional resource targets 32 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 33 | varFile: '/path/to/variables.tf' // optional location of variables file 34 | ) 35 | ``` 36 | 37 | ### terraform.fmt() 38 | Uses Terraform to check for properly formatted code. Note that in Terraform 0.12.x the `recursive` option was added (Terraform < 0.12 automatically recursed through subdirectories). The `check` and `write` parameters are mutually exclusive, and so only one of them may be enabled at a time. This method will return a `Boolean` type indicating whether the format check was successful (`true`) or not (`false`). Note that if `check` is `false` then the return will always be `true`. 39 | 40 | ```groovy 41 | terraform.fmt( 42 | bin: '/usr/bin/terraform', // optional path to terraform executable 43 | check: false, // optional check files within config dir and return an error if any files are not formatted correctly (cannot be used with `write`) 44 | diff: false, // optional present a diff if any files within config dir are not formatted correctly 45 | dir: env.WORKSPACE, // optional path to working config dir 46 | recursive: false, // optional check subdirectories of config dir recursively 47 | write: true // optional write changes directly to files that are not formatted directly (cannot be used with `check`) 48 | ) 49 | ``` 50 | 51 | ### terraform.graph() 52 | Uses Terraform to produce a representation of the dependency graph between different objects in the current configuration and state. The resulting DOT graph is written as `graph.gv` (GraphViz extension) in the current working directory when this method is invoked. 53 | 54 | ```groovy 55 | terraform.graph( 56 | bin: '/usr/bin/terraform', // optional path to terraform executable 57 | dir: '/path/to/working_config_dir', // optional path to working config dir (mutually exclusive with plan) 58 | drawCycles: false, // optional highlight any cycles in the graph with colored edges 59 | plan: 'plan.tfplan', // optional path to plan file for rendering (mutually exclusive with dir) 60 | type: 'plan', // optional type of graph to output; valid arguments are plan, plan-refresh-only, plan-destroy, or apply (apply is default if plan parameter also specified) 61 | ) 62 | ``` 63 | 64 | ### terraform.imports() 65 | Imports existing infrastructure into your Terraform state. 66 | 67 | ```groovy 68 | terraform.imports( 69 | bin: '/usr/bin/terraform', // optional path to terraform executable 70 | dir: '/path/to/config', // optional path to terraform config for provider 71 | resources: ['resource.name':'resource.id', 'aws_instance.this':'i-1234567890'], // resource name and id mappings to import 72 | provider: 'template', // optional specific provider for import 73 | state: 'terraform.tfstate', // optional path to the source state file 74 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 75 | varFile: '/path/to/variables.tf' // optional location of variables file 76 | ) 77 | ``` 78 | 79 | ### terraform.init() 80 | Uses Terraform to initialize a working directory. 81 | 82 | ```groovy 83 | terraform.init( 84 | backend: true // optional false to omit backend initialization 85 | backendConfig: ['/path/to/backend.hcl'] // optional paths to hcl files with backend configs 86 | backendKV: ['address':'demo.consul.io', 'scheme':'https'], // optional key-value pairs for backend settings 87 | bin: '/usr/bin/terraform', // optional path to terraform executable 88 | dir: env.WORKSPACE, // optional path to working config dir 89 | forceCopy: false, // optional suppress prompts about copying state data when initializating a new state backend 90 | migrateState: false, // optional reconfigure a backend and attempt to migrate any existing state 91 | pluginDir: '/path/to/plugin_dir', // optional path to (presumably shared) plugin/provider installation directory 92 | testDir: 'tests', // optional terraform test directory 93 | upgrade: false, // optional upgrade modules and plugins 94 | ) 95 | ``` 96 | 97 | ### terraform.output() 98 | Reads an output variable from a Terraform state and returns the value as a String. 99 | 100 | ```groovy 101 | terraform.output( 102 | bin: '/usr/bin/terraform', // optional path to terraform executable 103 | dir: env.WORKSPACE, // optional path to config dir 104 | display: false, // optional display outputs 105 | json: false, // optional String return in JSON format 106 | name: 'module.foo.server_ip_address', // optional output name 107 | raw: false, // optional output raw strings 108 | state: 'terraform.tfstate', // optional path to the source state file 109 | ) 110 | ``` 111 | 112 | ### terraform.parse(String template) 113 | Provides a thin wrapper around [HCL4j](https://github.com/bertramdev/hcl4j) for inputting a Terraform config, and returning a `Map` representing the parsed HCL2. Note this requires local installation of the HCL4j dependency, and therefore the agent must have sufficient permissions to do so. 114 | 115 | ```groovy 116 | parsedMap = terraform.parse('/path/to/config.tf') 117 | ``` 118 | 119 | ### terraform.plan() 120 | Uses Terraform to generate an execution plan. The output plan file `plan.tfplan` (default) will be written to the same directory as the input config directory if the `out` parameter is not specified. Otherwise, the output plan file will be written to the filesystem at the path specified in the `out` parameter. This is recommended practice to provide as an input in a Pipeline to a subsequent `apply` or `destroy` for various reasons. The unencoded plan content will also be returned as a String. 121 | 122 | ```groovy 123 | terraform.plan( 124 | bin: '/usr/bin/terraform', // optional path to terraform executable 125 | compactWarn: false, // optional warnings as compact summary messages 126 | destroy: false, // optional generate a plan to destroy resources 127 | dir: env.WORKSPACE, // optional path to config dir 128 | genConfig: 'config.tf', // optional hcl generation for import blocks (>= 1.5) 129 | out: 'plan.tfplan', // optional plan output file path (extension must be .tfplan) 130 | refreshOnly: false, // optional check if remote objects match outcome of most recent apply (>= 0.15) 131 | replace: ['aws_instance.example', 'aws_eip.ip'], // optional resources to unconditionally recreate in plan 132 | target: ['aws_instance.example', 'aws_eip.ip'], // optional resource targets 133 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 134 | varFile: '/path/to/variables.tf' // optional location of variables file 135 | ) 136 | ``` 137 | 138 | ### terraform.providers(String rootDir = env.WORKSPACE, String bin = 'terraform') 139 | Prints out a tree of modules in the referenced configuration annotated with their provider requirements. 140 | 141 | ```groovy 142 | terraform.providers('/path/to/root_module_dir') 143 | ``` 144 | 145 | ### terraform.refresh() 146 | Update the state file of your infrastructure with metadata that matches the physical resources they are tracking. 147 | 148 | ```groovy 149 | terraform.refresh( 150 | bin: '/usr/bin/terraform', // optional path to terraform executable 151 | compactWarn: false, // optional warnings as compact summary messages 152 | dir: env.WORKSPACE, // optional path to config dir 153 | target: ['aws_instance.example', 'aws_eip.ip'], // optional resource targets 154 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 155 | varFile: '/path/to/variables.tf' // optional location of variables file 156 | ) 157 | ``` 158 | 159 | ### terraform.state() 160 | Manipulate or display the Terraform state. The resources parameter should be `null` for a `push` or `list`, a list of strings for a `remove`, and a map of strings for a `move`. 161 | 162 | ```groovy 163 | terraform.state( 164 | bin: '/usr/bin/terraform', // optional path to terraform executable 165 | command: 'move', // state command; one of 'move', 'remove', 'list', 'show', 'pull', or 'push' 166 | dir: env.WORKSPACE, // optional path to config dir 167 | resources: ['resource.from':'resource.to', 'resource.other_from':'resource.other_to'], // resources to move 168 | resources: ['resource.one', 'resource.two'], // resources to remove or show 169 | state: 'terraform.tfstate' // optional path to read and save state 170 | ) 171 | ``` 172 | 173 | ### terraform.taint() 174 | Manually marks a resource as tainted. This forces a destroy and recreate on the next plan or apply. Note this is generally deprecated in favor of the `replace` parameter in the `plan` method. 175 | 176 | ```groovy 177 | terraform.taint( 178 | allowMissing: false, // optional succeed even if resource is missing 179 | bin: '/usr/bin/terraform', // optional path to terraform executable 180 | dir: env.WORKSPACE, // optional path to config dir 181 | resources: ['resource.name', 'other.name'], // names of the resources to taint 182 | state: 'terraform.tfstate' // optional path to read and save state 183 | ) 184 | ``` 185 | 186 | ### terraform.test() 187 | Uses Terraform to execute experimental automated integration testing of shared modules. The test output is also returned as a String (mostly useful with `json: true`). **This method usage will correlate to a recent version of Terraform as this subcommand changes greatly between versions.** 188 | 189 | ```groovy 190 | terraform.test( 191 | bin: '/usr/bin/terraform', // optional path to terraform executable 192 | cloudRun: 'app.terraform.io/:ORG/:MODULE_NAME/:PROVIDER', // optional source of a private module in a registry to execute tests remotely against via terraform cloud 193 | dir: env.WORKSPACE, // optional path to config dir 194 | filter: ['machine.tf', 'network.tf'], // optional list of test files to execute 195 | json: false, // optional produce output in a machine-readable JSON format 196 | testDir: 'tests', // optional terraform test directory 197 | var: ['foo':'bar', 'bar':'baz'], // optional variable setting 198 | varFile: '/path/to/variables.tf' // optional location of variables file 199 | verbose: false, // optional print plan or state for each test as it executes 200 | ) 201 | ``` 202 | 203 | ### terraform.validate() 204 | Uses Terraform to validate a config directory. The validation output is also returned as a String (mostly useful with `json: true`). **This subcommand's usage varies greatly between different versions of Terraform, and therefore not all parameters may be supported in your utilized version.** 205 | 206 | ```groovy 207 | terraform.validate( 208 | bin: '/usr/bin/terraform', // optional path to terraform executable 209 | dir: env.WORKSPACE, // optional path to config dir 210 | json: false, // optional produce output in a machine-readable JSON format 211 | testDir: 'tests', // optional terraform test directory (ignored if tests parameter is false) 212 | tests: true, // optional validate test files 213 | ) 214 | ``` 215 | 216 | ### terraform.workspace() 217 | **`create` requires version >= 1.4** 218 | 219 | Selects the Terraform workspace for a config directory. Ideally executed in Pipeline before other Terraform blocks. 220 | 221 | ```groovy 222 | terraform.workspace( 223 | bin: '/usr/bin', // optional location of terraform install 224 | create: false, // optionally create the workspace if it does not exist 225 | dir: env.WORKSPACE, // optional location of terraform config directory 226 | workspace: 'default' // terraform workspace to select 227 | ) 228 | ``` 229 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ### 2.2.2 (Next) 2 | **General** 3 | - Improve reliability of tool command assignments. 4 | - improve YAML and JSON file validation. 5 | - Improve serialization of utility methods. 6 | - Promote `pipeline-utility-steps` plugin to global dependency. 7 | 8 | **FaaS** 9 | - Add `stategy` parameter to `deploy` method. 10 | 11 | **GoSS** 12 | - Fix variable file validation. 13 | 14 | **Packer** 15 | - Fix return checks for `fmt` method. 16 | 17 | **Puppet** 18 | - Fix request in `token` method. 19 | - Update request bodies to match updated API. 20 | - Minor fixes to logging. 21 | 22 | ### 2.2.1 23 | **General** 24 | - Improve label descriptions for `sh` steps. 25 | - Miscellanous improvements from code linting. 26 | 27 | **AWX** 28 | - Fix `extraVars` parameter usage in template launch methods. 29 | - Fix `kind` parameter validation in inventory methods. 30 | - Fix `instance_id` parameter usage in `hostCreate` method. 31 | 32 | **FaaS** 33 | - Fix function file validation. 34 | - Fix `format` and `since` parameter usage in `logs` method. 35 | 36 | **GoSS** 37 | - Fix `gossfile` validation. 38 | - Fix `logLevel` and `format` parameter validation. 39 | 40 | **Helm** 41 | - Fix input parameter type assertions. 42 | - Fix input parameter command assignments. 43 | - Fix yaml file validations. 44 | 45 | **Puppet** 46 | - Fix reponse and server name handling in `task` method. 47 | 48 | **Terraform** 49 | - Fix check on return from `fmt`. 50 | - Fix config dir setting for `fmt` and `workspace`. 51 | - Fix `resources` assert for `import`. 52 | - Fix error throw for `fmt`. 53 | 54 | ### 2.2.0 55 | **General** 56 | - Miscellanous improvements and fixes from code linting. 57 | 58 | **FaaS** 59 | - Increase minimum supported version to 0.17. 60 | - Return `Boolean` for `validateTemplate`. 61 | - Add `list` and `logs` methods. 62 | 63 | **GoSS** 64 | - Increase minimum supported version to 0.4. 65 | - Return `Boolean` for `validate` and `validateGossfile`. 66 | 67 | **Helm** 68 | - Always return value for `history` method. 69 | - Improve `verify` method return logic. 70 | - Improve `lint` and return `Boolean`. 71 | 72 | **Packer** 73 | - Improve `validate` and return `Boolean`. 74 | - Return `Boolean` for `fmt`. 75 | 76 | **Puppet** 77 | - Add `port` parameter to all methods. 78 | - Remove mandatory `https://` URI from API endpoint URLs. 79 | 80 | **Terraform** 81 | - Always return value for `plan`, `test`, and `validate` methods. 82 | - Return `Boolean` for `fmt`. 83 | - Add `compactWarn` parameter to `apply` and `destroy` methods. 84 | 85 | ### 2.1.3 86 | **AWX** 87 | - Add `instanceId` parameter to `hostCreate` method. 88 | - Update `smart` parameter to `kind` for inventory methods. 89 | - Fix `inventoryModify` method by adding mandatory `id` parameter. 90 | - Add `credentials` and `executionEnv` parameters to `jobTemplateLaunch` method. 91 | - Add `limit` and `skipTags` parameters to `workflowJobTemplateLaunch` method. 92 | 93 | **FaaS** 94 | - Add `pull` and `tag` parameters to `build` method. 95 | - Add `gateway` and `namespace` parameters to `deploy`, `invoke`, and `remove` methods. 96 | - `gateway` and `user` parameters no longer mandatory for `login` method. 97 | - Add `tls` parameter to `deploy` and `remove` methods. 98 | 99 | **GoSS** 100 | - Add `logLevel` parameter to `server` and `validate` methods. 101 | - Remove deprecated `format` values for `validate` method. 102 | - Add `sort` value for `formatOpts` parameter for `validate` method. 103 | 104 | **Packer** 105 | - Add `recursive` parameter to `fmt` method. 106 | 107 | **Terraform** 108 | - Add `testDir` parameter to `init` method. 109 | - Add `raw` parameter to `output` method. 110 | - Add `compactWarn` parameter to `plan` and `refresh` methods. 111 | - Add `rootDir` parameter default value for `providers` method. 112 | - Fix `rootDir` existence check in `providers` method. 113 | - Add `show` and `pull` commands to `state` method. 114 | - Add `allowMissing` parameter to `taint` method. 115 | 116 | **Puppet** 117 | - Miscellaneous minor improvements and fixes. 118 | 119 | ### 2.1.2 120 | **Helm** 121 | - Add `force` parameter to `install`, `repo`, `rollback`, and `upgrade` methods. 122 | - Add `withSubcharts` parameter to `lint` method. 123 | - Add `hooks` and `recreatePods` parameters to `rollback` method. 124 | - Add `description`, `outputFormat`, `resources`, and `revision` parameters to `status` method. 125 | - Remove `cleanup` and `parallel` parameters from `test` method. 126 | - Support `crds` for `show` method. 127 | 128 | **Packer** 129 | - Add `pluginsInstall` and `pluginsRemove` methods. 130 | 131 | **Terraform** 132 | - Cast resource namespace as literal string for `imports` method. 133 | - Increase minimum supported version to 1.0. 134 | - Always return outputs as String for `output` method. 135 | - Fix `chdir` argument position in `graph` method. 136 | - Add `testDir` and `tests` parameters to `validate` method. 137 | 138 | ### 2.1.1 139 | **Packer** 140 | - Add `parse` method. 141 | - Add `except` parameter to `build` and `validate` methods. 142 | 143 | **Terraform** 144 | - Add `parse` method. 145 | - Update `test` method usage for 1.8. 146 | - Add `init` support for backend setting with key-value pairs. 147 | 148 | ### 2.1.0 149 | **Helm** 150 | - Rename `registryLogin` method to camelCase. 151 | - Add `history` and `push` methods. 152 | 153 | **Packer** 154 | - Increase minimum supported version to 1.7. 155 | - Support complex type variable input values. 156 | - Add `syntaxOnly` parameter to `validate` method. 157 | 158 | **Terraform** 159 | - Add `create` parameter to `workspace` method. 160 | - Add `genConfig` parameter to `plan` method. 161 | - Add `refresh` method. 162 | - Support complex type variable input values. 163 | 164 | ### 2.0.2 165 | **General** 166 | - More type checking for required parameters. 167 | 168 | **GoSS** 169 | - Fix flag and argument ordering. 170 | - Add `package` and `varsInline` parameters to appropriate methods. 171 | - Add `vars` parameter to `render` method. 172 | - Add `formatOpts` and `maxConcur` parameters to `server` and `validate` methods. 173 | - Add `cache` parameter to `server` method. 174 | - Add `sleep` and `retry-timeout` parameters to `validate` method. 175 | 176 | **Helm** 177 | - Add `atomic`, `version`, `devel`, and `createNS` parameters to appropriate methods. 178 | - Add `registry_login` method. 179 | - Convert `version` parameter to optional in `rollback` method. 180 | 181 | **Packer** 182 | - Revert code to Groovy 2.4 compatible. 183 | - Add `evalData` and `warnUndeclVar` params to `validate` method. 184 | 185 | **Terraform** 186 | - Add `graph` and `test` methods. 187 | - Default to `cwd` for config directories where possible. 188 | - Add `dir` param to methods where absent. 189 | 190 | ### 2.0.1 191 | **General** 192 | - Improve `sh` step labels for iterative invocations. 193 | - Fix syntax error in `makeDirParents` library method. 194 | - Check if directory exists on build agent in `makeDirParents`. 195 | 196 | **GoSS** 197 | - Validate YAML file parameter values. 198 | - Add `render` method. 199 | 200 | **Helm** 201 | - Validate YAML file parameter values. 202 | - Add `verify` method. 203 | - Add `wait` parameter to appropriate methods. 204 | 205 | **OpenFaaS** 206 | - Validate template file parameter values. 207 | 208 | **Packer** 209 | - Change into directory when `template` parameter is directory of templates/configs. 210 | - Add `plugins` method. 211 | 212 | **Puppet** 213 | - Further validate RBAC token file parameter values. 214 | 215 | **Terraform** 216 | - Add `migrateState` and `forceCopy` params to `init` method. 217 | - Add `providers` method. 218 | 219 | ### 2.0.0 220 | **General** 221 | - Global variable methods and associated parameters converted to camelCase. 222 | - Remove support for old DSL/closure type inputs to global variable methods. 223 | - Remove global variable methods for software installation and configuration. 224 | 225 | **Helm** 226 | - Add `dryRun` parameter to `install` and `upgrade` methods. 227 | - Add `show` method. 228 | 229 | **Terraform** 230 | - Add `out` and `replace` parameters to `plan` method. 231 | - Fix `json` parameter for `validate` method. 232 | - Add `return` parameter to `validate` method. 233 | - Fix `resources` parameter for `imports` method. 234 | - Fix return type for `validate` method. 235 | 236 | ### 1.6.2 237 | **General** 238 | - Replace `File.mkdir()` class method with `dir` step method (valid also on agents). 239 | - Add `makeDirParents` method to attempt to fix missing user directories. 240 | - Reorganize try/catch blocks to be more precise. 241 | 242 | **AWX** 243 | - Finish initial implementation and promote to beta. 244 | 245 | **Packer** 246 | - Add `init` method. 247 | - Fix `var_file` parameter in applicable methods. 248 | 249 | **Puppet** 250 | - Fix `credentials_id` usage. 251 | - Update `token` param to `tokenFile`. 252 | - Fix syntax errors. 253 | 254 | **Terraform** 255 | - Change working directory to config directory before command execution. 256 | 257 | ### 1.6.1 258 | **General** 259 | - Deprecate software `install` methods. 260 | - Fine tune nested type specifications. 261 | 262 | **AWX** 263 | - Initialize as alpha. 264 | 265 | **Helm** 266 | - Add `namespace` parameter to `test` method. 267 | 268 | **Puppet** 269 | - Enable `withCredentials` bindings for PE token for methods. 270 | 271 | **Terraform** 272 | - List workspace information when selection fails. 273 | - Add `list` option to `command` parameter for `state` method. 274 | - Add `refreshOnly` parameter to `plan` method. 275 | - Add `display` parameter to `output` method. 276 | - Add `backendConfig` parameter to `init` method. 277 | - Fix `var_file` parameter in all relevant methods. 278 | - Ignore useless parameters for `apply` and `destroy` methods when `config_path` value is a plan file. 279 | 280 | ### 1.6.0 281 | **Helm** 282 | - Drop support for versions < 3.0. 283 | - Add `plugin` and `status` methods. 284 | - Add `namespace` parameter to `uninstall` and `rollback` methods. 285 | 286 | **Packer** 287 | - Drop support for versions < 1.5. 288 | - Add `fmt` method. 289 | - Add `force` and `on_error` parameters to `build` method. 290 | 291 | **Terraform** 292 | - Drop support for versions < 0.12. 293 | - Update validate method for >= 0.12 only. 294 | - Add `output` method. 295 | - Prevent `fmt` parameter incompatibility with `check` and `write`. 296 | - Rename `state` method `cmd` parameter to `command`. 297 | 298 | **Puppet** 299 | - Convert REST API requests to utilize `http_request` plugin. 300 | 301 | ### 1.5.0 302 | **GoSS** 303 | - Convert `flags` parameter in `validate_docker` from `list` to `map` type. 304 | 305 | **Helm** 306 | - Add `uninstall` method alias. 307 | - Add `repo` method. 308 | - Prevent initialization for versions >= 3. 309 | - Require `name` parameter for `install` method. 310 | - Convert `set` parameter in `install`, `lint`, and `upgrade` methods from `list` to `map` type. 311 | 312 | **OpenFaaS** 313 | - Convert `query` and `header` parameters in `invoke`, and `label` in `deploy` from `list` to `map` type. 314 | 315 | **Packer** 316 | - Convert `var` parameters from `list` to `map` type. 317 | 318 | **Terraform** 319 | - Add `backend` parameter to `init` method. 320 | - Convert `var` parameters from `list` to `map` type. 321 | - Convert `resources` parameter for `move` value for `command` parameter in `state` method from `list>` to `map` type. 322 | - Fix `state` method incorrect `sh` step method prefix. 323 | 324 | ### 1.4.1 325 | **General** 326 | - Fine tune type specifications, especially in global var method returns. 327 | 328 | **GoSS** 329 | - Fix `serve` port specification. 330 | 331 | **Helm** 332 | - Add `install` parameter to `upgrade` method. 333 | 334 | **Terraform** 335 | - Fix syntax error in `fmt` method. 336 | 337 | ### 1.4.0 338 | **General** 339 | - Support new and old Pipeline DSL. 340 | - Add labels to shell methods for clarity. 341 | 342 | **Helm** 343 | - Enable native logging for `test` method if available. 344 | 345 | **Terraform** 346 | - Update `taint` method for new 0.12 usage. 347 | - Add `return` parameter to `plan` method. 348 | - Promote `fmt` method to supported. 349 | 350 | ### 1.3.1 351 | **General** 352 | - Add type specification checks. 353 | - Fixed explicit List type check on params. 354 | 355 | **GoSS** 356 | - Fix port default value in `server` method. 357 | 358 | **Helm** 359 | - Rename `package` method to `packages` to avoid reserved name collision. 360 | 361 | **Packer** 362 | - Fix bugs in methods. 363 | 364 | **Terraform** 365 | - Add `fmt` method as beta. 366 | - Add `target` and `display` parameters to `plan` method. 367 | - Rename `import` method to `imports` to avoid reserved name collision. 368 | - Fix bugs in methods. 369 | 370 | ### 1.3.0 371 | **Helm** 372 | - Allow values override to also be a URL. 373 | - Add `keyring` param to `package` method. 374 | - Add `kubectl` method. 375 | 376 | **Packer** 377 | - Add `inspect` method. 378 | - Change `only` parameter input type. 379 | 380 | **Puppet** 381 | - Add `token` method. 382 | 383 | **Terraform** 384 | - Add `taint`, `state`, and `import` methods. 385 | - Add `check_vars` parameter to `validate` method. 386 | - Add `destroy` parameter to `plan` method. 387 | - Handle 0.12 changes to `validate` method. 388 | - Checks for required parameters to methods. 389 | - Change `dir` to `config_path` param for `destroy` method, and also allow for plan file arguments. 390 | 391 | ### 1.2.1 392 | **General** 393 | - Fix `mapToJSON` common method and update per documented example. 394 | - Various fixes, cleanup, and optimization. 395 | 396 | **GoSS** 397 | - Fix flag setting and usage in `validate_docker`. 398 | 399 | **OpenFaaS** 400 | - Add necessary `function` and optional `tls` params to `invoke` method. 401 | 402 | ### 1.2.0 403 | **Helm** 404 | - Add chart provenance verification to applicable methods. 405 | - Add `lint`, `package`, and `test` methods. 406 | - `values` param is now an array of strings. 407 | 408 | **OpenFaaS** 409 | - Added `invoke`, `login`, `push`, and `remove` methods. 410 | - Fixed `deploy` and `build` invalid usage issue. 411 | 412 | **Puppet** 413 | - Fix `scope` param for `.task`. 414 | 415 | **Terraform** 416 | - Changed `init` usage to block DSL and added `plugin_dir` and `upgrade` params. 417 | - Modified `plugin_install` usage to block DSL and added `install_loc` param. 418 | - `env.TF_IN_AUTOMATION` added to methods. 419 | 420 | ### 1.1.0 421 | **Helm** 422 | - Added `context` param to applicable methods. 423 | - Enabled multiple `set` parameter values for applicable methods. 424 | - Changed `delete` usage to block DSL. 425 | 426 | **Packer** 427 | - Added `plugin_install` method. 428 | 429 | **Puppet** 430 | - Added `code_deploy` and `task` methods. 431 | 432 | **Terraform** 433 | - Changed `apply`, `destroy`, `plan`, and `validate` usage to block DSL. 434 | - Added `var_file`, `var`, and `target` params to applicable methods. 435 | - Added `plugin_install` method. 436 | 437 | ### 1.0.0 438 | Initial release. 439 | -------------------------------------------------------------------------------- /vars/helm.groovy: -------------------------------------------------------------------------------- 1 | //vars/helm.groovy 2 | import devops.common.utils 3 | 4 | String history(Map config) { 5 | // input checking 6 | assert config.name : 'The required parameter "name" was not set.' 7 | config.bin = config.bin ?: 'helm' 8 | 9 | List cmd = [config.bin, 'history'] 10 | 11 | // check for optional inputs 12 | if (config.max) { 13 | cmd.addAll(['--max', config.max]) 14 | } 15 | if (config.outputFormat) { 16 | assert (['table', 'json', 'yaml'].contains(config.outputFormat)) : 'The outputFormat parameter must be one of table, json, or yaml' 17 | 18 | cmd.addAll(['-o', config.outputFormat]) 19 | } 20 | if (config.context) { 21 | cmd.addAll(['--kube-context', config.context]) 22 | } 23 | if (config.namespace) { 24 | cmd.addAll(['--namespace', config.namespace]) 25 | } 26 | 27 | // gather release revision history with helm 28 | try { 29 | cmd.add(config.name) 30 | final String historyOutput = sh(label: "Helm History ${config.name}", script: cmd.join(' '), returnStdout: true) 31 | 32 | print 'Helm history executed successfully.' 33 | 34 | return historyOutput 35 | } 36 | catch (hudson.AbortException error) { 37 | print 'Failure using helm history.' 38 | throw error 39 | } 40 | } 41 | 42 | void install(Map config) { 43 | // input checking 44 | assert config.name : 'The required parameter "name" was not set.' 45 | assert config.chart : 'The required parameter "chart" was not set.' 46 | if (config.version && config.devel) { 47 | error(message: "The 'version' and 'devel' parameters for helm.install are mutually exclusive; only one can be specified.") 48 | } 49 | config.bin = config.bin ?: 'helm' 50 | 51 | List cmd = [config.bin, 'install'] 52 | List lister = [config.bin, 'list'] 53 | 54 | // check for optional inputs 55 | if (config.values) { 56 | assert (config.values in List) : 'The values parameter must be a list of strings.' 57 | 58 | config.values.each { String value -> 59 | if (!(value ==~ /:\/\//)) { 60 | assert new helpers().validateYamlFile(value, 'value overrides file') 61 | } 62 | 63 | cmd.addAll(['-f', value]) 64 | } 65 | } 66 | if (config.set) { 67 | assert (config.set in Map) : 'The set parameter must be a Map.' 68 | 69 | config.set.each { String var, String value -> 70 | cmd.addAll(['--set', "${var}=${value}"]) 71 | } 72 | } 73 | if (config.version) { 74 | cmd.addAll(['--version', config.version]) 75 | } 76 | else if (config.devel == true) { 77 | cmd.add('--devel') 78 | } 79 | if (config.dryRun == true) { 80 | cmd.add('--dry-run') 81 | } 82 | if (config.force == true) { 83 | cmd.add('--force') 84 | } 85 | if (config.context) { 86 | cmd.addAll(['--kube-context', config.context]) 87 | lister.addAll(['--kube-context', config.context]) 88 | } 89 | if (config.namespace) { 90 | cmd.addAll(['--namespace', config.namespace]) 91 | lister.addAll(['--namespace', config.namespace]) 92 | } 93 | if (config.createNS == true) { 94 | cmd.add('--create-namespace') 95 | } 96 | if (config.verify == true) { 97 | cmd.add('--verify') 98 | } 99 | if (config.atomic == true) { 100 | cmd.add('--atomic') 101 | } 102 | else if (config.wait == true) { 103 | cmd.add('--wait') 104 | } 105 | 106 | // check release object 107 | final String releaseObjList = sh(label: 'List Release Objects', returnStdout: true, script: lister.join(' ')).trim() 108 | if (releaseObjList =~ config.name) { 109 | error(message: "Release object ${config.name} already exists!") 110 | } 111 | 112 | // install with helm 113 | try { 114 | cmd.addAll([config.name, config.chart]) 115 | sh(label: "Helm Install ${config.name}", script: cmd.join(' ')) 116 | } 117 | catch (hudson.AbortException error) { 118 | print 'Failure using helm install.' 119 | throw error 120 | } 121 | print 'Helm install executed successfully.' 122 | } 123 | 124 | void kubectl(String version, String installPath = '/usr/bin/') { 125 | new utils().makeDirParents(installPath) 126 | 127 | // check if current version already installed 128 | if (fileExists("${installPath}/kubectl")) { 129 | final String installedVersion = sh(label: 'Check Kubectl Version', returnStdout: true, script: "${installPath}/kubectl version").trim() 130 | if (installedVersion =~ version) { 131 | print "Kubectl version ${version} already installed at ${installPath}." 132 | return 133 | } 134 | } 135 | // otherwise download specified version 136 | new utils().downloadFile("https://storage.googleapis.com/kubernetes-release/release/v${version}/bin/linux/amd64/kubectl", "${installPath}/kubectl") 137 | sh(label: 'Kubectl Executable Permissions', script: "chmod ug+rx ${installPath}/kubectl") 138 | print "Kubectl successfully installed at ${installPath}/kubectl." 139 | } 140 | 141 | Boolean lint(Map config) { 142 | // input checking 143 | config.bin = config.bin ?: 'helm' 144 | assert config.chart : 'The required parameter "chart" was not set.' 145 | 146 | List cmd = [config.bin, 'lint'] 147 | 148 | // check for optional inputs 149 | if (config.values) { 150 | assert (config.values in List) : 'The values parameter must be a list of strings.' 151 | 152 | config.values.each { String value -> 153 | if (!(value ==~ /:\/\//)) { 154 | assert new helpers().validateYamlFile(value, 'value overrides file') 155 | } 156 | 157 | cmd.addAll(['-f', value]) 158 | } 159 | } 160 | if (config.set) { 161 | assert (config.set in Map) : 'The set parameter must be a Map.' 162 | 163 | config.set.each { String var, String value -> 164 | cmd.addAll(['--set', "${var}=${value}"]) 165 | } 166 | } 167 | if (config.context) { 168 | cmd.addAll(['--kube-context', config.context]) 169 | } 170 | if (config.namespace) { 171 | cmd.addAll(['--namespace', config.namespace]) 172 | } 173 | if (config.strict == true) { 174 | cmd.add('--strict') 175 | } 176 | 177 | // lint with helm 178 | cmd.add(config.chart) 179 | final int returnCode = sh(label: "Helm Lint ${config.chart}", script: cmd.join(' '), returnStatus: true) 180 | 181 | // return by code 182 | if (returnCode == 0) { 183 | print 'The chart successfully linted.' 184 | return true 185 | } 186 | else if (returnCode == 1) { 187 | print 'The chart failed linting.' 188 | return false 189 | } 190 | 191 | print 'Failure using helm lint.' 192 | error(message: 'Helm lint failed unexpectedly') 193 | } 194 | 195 | void packages(Map config) { 196 | // input checking 197 | config.bin = config.bin ?: 'helm' 198 | assert config.chart : 'The required parameter "chart" was not set.' 199 | assert new helpers().validateYamlFile("${config.chart}/Chart.yaml", 'chart') 200 | if (config.key && config.keyring) { 201 | error(message: "The 'key' and 'keyring' parameters for helm.packages are mutually exclusive; only one can be specified.") 202 | } 203 | 204 | List cmd = [config.bin, 'package'] 205 | 206 | // check for optional inputs 207 | if (config.dest) { 208 | new utils().makeDirParents(config.dest) 209 | 210 | cmd.addAll(['-d', config.dest]) 211 | } 212 | if (config.key) { 213 | cmd.addAll(['--sign', '--key', config.key]) 214 | } 215 | else if (config.keyring) { 216 | assert fileExists(config.keyring) : "The keyring ${config.keyring} does not exist." 217 | 218 | cmd.addAll(['--sign', '--keyring', config.keyring]) 219 | } 220 | if (config.updateDeps == true) { 221 | cmd.add('-u') 222 | } 223 | if (config.version) { 224 | cmd.addAll(['--version', config.version]) 225 | } 226 | 227 | // package with helm 228 | try { 229 | cmd.add(config.chart) 230 | sh(label: "Helm Package ${config.chart}", script: cmd.join(' ')) 231 | } 232 | catch (hudson.AbortException error) { 233 | print 'Failure using helm package.' 234 | throw error 235 | } 236 | print 'Helm package command was successful.' 237 | } 238 | 239 | void plugin(Map config) { 240 | // input checking 241 | assert (['install', 'list', 'uninstall', 'update'].contains(config.command)) : 'The argument must be one of: install, list, uninstall, or update.' 242 | assert (config.plugin) && (config.command != 'list') : 'The required parameter "plugin" was not set for a non-list command.' 243 | config.bin = config.bin ?: 'helm' 244 | 245 | List cmd = [config.bin, 'plugin', config.command] 246 | 247 | // append plugin to cmd if not list command 248 | if (config.command != 'list') { 249 | cmd.add(config.plugin) 250 | } 251 | 252 | // manage a helm plugin 253 | try { 254 | sh(label: 'Helm Plugin', script: cmd.join(' ')) 255 | } 256 | catch (hudson.AbortException error) { 257 | print "Failure using helm plugin ${config.command}." 258 | throw error 259 | } 260 | print "Helm plugin ${config.command} executed successfully." 261 | } 262 | 263 | void push(Map config) { 264 | // input checking 265 | assert config.chart : 'The required parameter "chart" was not set.' 266 | assert fileExists(config.chart) : "The chart does not exist at ${config.chart}." 267 | assert config.remote : 'The required parameter "remote" was not set.' 268 | config.bin = config.bin ?: 'helm' 269 | 270 | List cmd = [config.bin, 'push'] 271 | 272 | // optional inputs 273 | if (config.insecure == true) { 274 | cmd.add('--insecure-skip-tls-verify') 275 | } 276 | 277 | // push helm chart to remote registry 278 | try { 279 | cmd.addAll([config.chart, config.remote]) 280 | sh(label: "Helm Push ${config.chart}", script: cmd.join(' ')) 281 | } 282 | catch (hudson.AbortException error) { 283 | print 'Failure using helm push' 284 | throw error 285 | } 286 | print 'Helm push executed successfully' 287 | } 288 | 289 | void registryLogin(Map config) { 290 | // input checking 291 | assert config.host : 'The required parameter "host" was not set.' 292 | assert config.password : 'The required parameter "password" was not set.' 293 | assert config.username : 'The required parameter "username" was not set.' 294 | config.bin = config.bin ?: 'helm' 295 | 296 | List cmd = [config.bin, 'registry', 'login', '--username', config.username, '--password', config.password] 297 | 298 | // optional inputs 299 | if (config.insecure == true) { 300 | cmd.add('--insecure') 301 | } 302 | 303 | // login to a helm registry 304 | try { 305 | cmd.add(config.host) 306 | sh(label: "Helm Registry Login ${config.host}", script: cmd.join(' ')) 307 | } 308 | catch (hudson.AbortException error) { 309 | print 'Failure using helm registry login.' 310 | throw error 311 | } 312 | print 'Helm registry login executed successfully.' 313 | } 314 | 315 | void repo(Map config) { 316 | // input checking 317 | assert config.repo : 'The required parameter "repo" was not set.' 318 | assert config.url : 'The required parameter "url" was not set.' 319 | config.bin = config.bin ?: 'helm' 320 | 321 | List cmd = [config.bin, 'repo', 'add'] 322 | 323 | // optional inputs 324 | if (config.insecure == true) { 325 | cmd.add('--insecure-skip-tls-verify') 326 | } 327 | else if ((config.ca) && (config.cert) && (config.key)) { 328 | cmd.addAll(['--ca-file', config.ca, '--cert-file', config.cert, '--key-file', config.key]) 329 | } 330 | if (config.force == true) { 331 | cmd.add('--force-update') 332 | } 333 | if ((config.user) && (config.password)) { 334 | cmd.addAll(['--username', config.user, '--password', config.password]) 335 | } 336 | 337 | // add a repo with helm 338 | try { 339 | cmd.addAll([config.repo, config.url]) 340 | sh(label: "Helm Repo Add ${config.repo}", script: cmd.join(' ')) 341 | } 342 | catch (hudson.AbortException error) { 343 | print 'Failure using helm repo add.' 344 | throw error 345 | } 346 | print 'Helm repo add executed successfully.' 347 | 348 | // update the repo 349 | try { 350 | cmd.add(config.repo) 351 | sh(label: "Helm Repo Update ${config.repo}", script: cmd.join(' ').replaceFirst('add', 'update')) 352 | } 353 | catch (hudson.AbortException error) { 354 | print 'Failure using helm repo update.' 355 | throw error 356 | } 357 | print 'Helm repo update executed successfully.' 358 | } 359 | 360 | void rollback(Map config) { 361 | // input checking 362 | assert config.name : "The required parameter 'name' was not set." 363 | config.bin = config.bin ?: 'helm' 364 | 365 | List cmd = [config.bin, 'rollback'] 366 | List lister = [config.bin, 'list'] 367 | 368 | // optional inputs also applicable to lister 369 | if (config.context) { 370 | cmd.addAll(['--kube-context', config.context]) 371 | lister.addAll(['--kube-context', config.context]) 372 | } 373 | if (config.namespace) { 374 | cmd.addAll(['--namespace', config.namespace]) 375 | lister.addAll(['--namespace', config.namespace]) 376 | } 377 | 378 | // check release object 379 | final String releaseObjList = sh(label: 'List Release Objects', returnStdout: true, script: lister.join(' ')).trim() 380 | assert releaseObjList =~ config.name : "Release object ${config.name} does not exist!" 381 | 382 | // optional inputs 383 | if (config.force == true) { 384 | cmd.add('--force') 385 | } 386 | if (config.hooks == false) { 387 | cmd.add('--no-hooks') 388 | } 389 | if (config.recreatePods == true) { 390 | cmd.add('--recreate-pods') 391 | } 392 | 393 | // append rollback version if specified 394 | if (config.version) { 395 | cmd.addAll([config.name, config.version]) 396 | } 397 | else { 398 | cmd.add(config.name) 399 | } 400 | 401 | // rollback with helm 402 | try { 403 | sh(label: "Helm Rollback ${config.name}", script: cmd.join(' ')) 404 | } 405 | catch (hudson.AbortException error) { 406 | print 'Failure using helm rollback.' 407 | throw error 408 | } 409 | print 'Helm rollback command was successful.' 410 | } 411 | 412 | void setup(String version, String installPath = '/usr/bin/') { 413 | new utils().makeDirParents(installPath) 414 | 415 | // check if current version already installed 416 | if (fileExists("${installPath}/helm")) { 417 | final String installedVersion = sh(label: 'Check Helm Version', returnStdout: true, script: "${installPath}/helm version").trim() 418 | if (installedVersion =~ version) { 419 | print "Helm version ${version} already installed at ${installPath}." 420 | } 421 | } 422 | // otherwise download and untar specified version 423 | else { 424 | new utils().downloadFile("https://storage.googleapis.com/kubernetes-helm/helm-v${version}-linux-amd64.tar.gz", '/tmp/helm.tar.gz') 425 | sh(label: 'Untar Helm CLI', script: "tar -xzf /tmp/helm.tar.gz -C ${installPath} --strip-components 1 linux-amd64/helm") 426 | new utils().removeFile('/tmp/helm.tar.gz') 427 | print "Helm successfully installed at ${installPath}/helm." 428 | } 429 | } 430 | 431 | void show(Map config) { 432 | // input checking 433 | config.bin = config.bin ?: 'helm' 434 | assert config.chart : 'The required parameter "chart" was not set.' 435 | assert (['all', 'chart', 'crds', 'readme', 'values']).contains(config.info) : 'The info parameter must be one of all, chart, crds, readme, or values.' 436 | 437 | // show chart info 438 | try { 439 | sh(label: "Helm Show ${config.chart}", script: "${config.bin} ${config.info} ${config.chart}") 440 | } 441 | catch (hudson.AbortException error) { 442 | print 'Failure using helm show.' 443 | throw error 444 | } 445 | print 'Helm show executed successfully.' 446 | } 447 | 448 | String status(Map config) { 449 | // input checking 450 | config.bin = config.bin ?: 'helm' 451 | assert config.name : 'The required parameter "name" was not set.' 452 | 453 | List cmd = [config.bin, 'status'] 454 | List lister = [config.bin, 'list'] 455 | 456 | // check for optional inputs 457 | if (config.context) { 458 | cmd.addAll(['--kube-context', config.context]) 459 | lister.addAll(['--kube-context', config.context]) 460 | } 461 | if (config.description) { 462 | cmd.add('--show-desc') 463 | } 464 | if (config.namespace) { 465 | cmd.addAll(['--namespace', config.namespace]) 466 | lister.addAll(['--namespace', config.namespace]) 467 | } 468 | if (config.outputFormat) { 469 | assert (['table', 'json', 'yaml'].contains(config.outputFormat)) : 'The outputFormat parameter must be one of table, json, or yaml' 470 | 471 | cmd.addAll(['-o', config.outputFormat]) 472 | } 473 | if (config.resources) { 474 | cmd.add('--show-resources') 475 | } 476 | if (config.revision) { 477 | cmd.addAll(['--revision', config.revision]) 478 | } 479 | 480 | // check release object 481 | final String releaseObjList = sh(label: 'List Release Objects', returnStdout: true, script: lister.join(' ')).trim() 482 | assert (releaseObjList =~ config.name) : "Release object ${config.name} does not exist!" 483 | 484 | // attempt to query a release object's status 485 | try { 486 | cmd.add(config.name) 487 | String status = sh(label: "Helm Status ${config.name}", script: cmd.join(' '), returnStdout: true) 488 | 489 | print 'Helm status executed successfully.' 490 | 491 | return status 492 | } 493 | catch (hudson.AbortException error) { 494 | print 'Failure using helm status.' 495 | throw error 496 | } 497 | } 498 | 499 | void test(Map config) { 500 | // input checking 501 | config.bin = config.bin ?: 'helm' 502 | assert config.name : 'The required parameter "name" was not set.' 503 | 504 | List cmd = [config.bin, 'test'] 505 | 506 | // check if helm test has logging functionality (deprecated in 3, but interesting code to retain) 507 | final String logs = sh(label: 'Check Helm Usage', returnStdout: true, script: "${config.bin} test --help") ==~ /--logs/ 508 | if (logs) { 509 | cmd.add('--logs') 510 | } 511 | 512 | // optional inputs 513 | if (config.context) { 514 | cmd.addAll(['--kube-context', config.context]) 515 | } 516 | if (config.namespace) { 517 | cmd.addAll(['--namespace', config.namespace]) 518 | } 519 | 520 | // test with helm 521 | try { 522 | cmd.add(config.name) 523 | sh(label: "Helm Test ${config.name}", script: cmd.join(' ')) 524 | } 525 | catch (hudson.AbortException error) { 526 | // no longer relevant as of version 1.6.0, but still interesting code 527 | if (!(logs)) { 528 | print 'Release failed helm test. kubectl will now access the logs of the test pods and display them for debugging (unless using cleanup param).' 529 | 530 | if (config.cleanup == true) { 531 | print 'Pods have already been cleaned up and are no longer accessible.' 532 | return 533 | } 534 | 535 | // collect necessary information for displaying debug logs 536 | // first grab the status of the release as a json 537 | final String jsonStatus = sh(label: 'Check Release Object Status', returnStdout: true, script: "${config.bin} status -o json ${config.name}") 538 | // parse the json to return the status map 539 | final Map status = readJSON(text: jsonStatus) 540 | // assign the namespace to a local var for kubectl logs 541 | final String namespace = status['namespace'] 542 | // iterate through results and store names of test pods 543 | List testPods = [] 544 | status['info']['status']['last_test_suite_run']['results'].each { result -> 545 | testPods.push(result['name']) 546 | } 547 | 548 | // input check default value for kubectl path 549 | config.kubectl = config.kubectl ?: 'kubectl' 550 | 551 | // iterate through test pods, display the logs for each, and then delete the test pod 552 | testPods.each { String testPod -> 553 | final String podLogs = sh(label: "List Pod Logs for ${testPod}", returnStdout: true, script: "${config.kubectl} -n ${namespace} logs ${testPod}") 554 | print "Logs for ${testPod} for release ${config.name} are:" 555 | print podLogs 556 | print "Removing test pod ${testPod}." 557 | sh(label: "Test Pod Cleanup for ${testPod}", script: "${config.kubectl} -n ${namespace} delete pod ${testPod}") 558 | } 559 | } 560 | 561 | error(message: 'Helm test failed with above logs.') 562 | } 563 | print 'Helm test executed successfully.' 564 | } 565 | 566 | void uninstall(Map config) { 567 | // input checking 568 | config.bin = config.bin ?: 'helm' 569 | assert config.name in String : 'The required parameter "name" was not set.' 570 | 571 | List cmd = [config.bin, 'uninstall'] 572 | List lister = [config.bin, 'list'] 573 | 574 | // check for optional inputs 575 | if (config.context) { 576 | cmd.addAll(['--kube-context', config.context]) 577 | lister.addAll(['--kube-context', config.context]) 578 | } 579 | if (config.namespace) { 580 | cmd.addAll(['--namespace', config.namespace]) 581 | lister.addAll(['--namespace', config.namespace]) 582 | } 583 | 584 | // check release object 585 | final String releaseObjList = sh(label: 'List Release Objects', returnStdout: true, script: lister.join(' ')).trim() 586 | assert (releaseObjList =~ config.name) : "Release object ${config.name} does not exist!" 587 | 588 | // attempt to uninstall a release object 589 | try { 590 | cmd.add(config.name) 591 | sh(label: "Helm Uninstall ${config.name}", script: cmd.join(' ')) 592 | } 593 | catch (hudson.AbortException error) { 594 | print 'Failure using helm uninstall.' 595 | throw error 596 | } 597 | print 'Helm uninstall executed successfully.' 598 | } 599 | 600 | void upgrade(Map config) { 601 | // input checking 602 | if (config.version && config.devel) { 603 | error(message: "The 'version' and 'devel' parameters for helm.upgrade are mutually exclusive; only one can be specified.") 604 | } 605 | assert config.chart in String : 'The required parameter "chart" was not set.' 606 | assert config.name in String : 'The required parameter "name" was not set.' 607 | config.bin = config.bin ?: 'helm' 608 | 609 | List cmd = [config.bin, 'upgrade'] 610 | List lister = [config.bin, 'list'] 611 | 612 | // check for optional inputs 613 | if (config.values) { 614 | assert (config.values in List) : 'The values parameter must be a list of strings.' 615 | 616 | config.values.each { String value -> 617 | if (!(value ==~ /:\/\//)) { 618 | assert new helpers().validateYamlFile(value, 'value overrides file') 619 | } 620 | 621 | cmd.addAll(['-f', value]) 622 | } 623 | } 624 | if (config.set) { 625 | assert (config.set in Map) : 'The set parameter must be a Map.' 626 | 627 | config.set.each { String var, String value -> 628 | cmd.addAll(['--set', "${var}=${value}"]) 629 | } 630 | } 631 | if (config.version) { 632 | cmd.addAll(['--version', config.version]) 633 | } 634 | else if (config.devel == true) { 635 | cmd.add('--devel') 636 | } 637 | if (config.verify == true) { 638 | cmd.add('--verify') 639 | } 640 | if (config.atomic == true) { 641 | cmd.add('--atomic') 642 | } 643 | else if (config.wait == true) { 644 | cmd.add('--wait') 645 | } 646 | if (config.install == true) { 647 | cmd.add('--install') 648 | 649 | if (config.createNS == true) { 650 | cmd.add('--create-namespace') 651 | } 652 | } 653 | if (config.dryRun == true) { 654 | cmd.add('--dry-run') 655 | } 656 | if (config.context) { 657 | cmd.addAll(['--kube-context', config.context]) 658 | lister.addAll(['--kube-context', config.context]) 659 | } 660 | if (config.namespace) { 661 | cmd.addAll(['--namespace', config.namespace]) 662 | lister.addAll(['--namespace', config.namespace]) 663 | } 664 | 665 | // check release object presence if install param is not true (i.e. false or null) 666 | if (!(config.install == true)) { 667 | final String releaseObjList = sh(label: 'List Release Objects', returnStdout: true, script: lister.join(' ')).trim() 668 | assert releaseObjList =~ config.name : "Release object ${config.name} does not exist!" 669 | } 670 | 671 | // upgrade with helm 672 | try { 673 | cmd.addAll([config.name, config.chart]) 674 | sh(label: "Helm Upgrade ${config.name}", script: cmd.join(' ')) 675 | } 676 | catch (hudson.AbortException error) { 677 | print 'Failure using helm upgrade.' 678 | throw error 679 | } 680 | print 'Helm upgrade executed successfully.' 681 | } 682 | 683 | Boolean verify(String chartPath, String helmPath = 'helm') { 684 | // input checking 685 | assert fileExists(chartPath) : "The chart at ${chartPath} does not exist." 686 | 687 | // verify helm chart 688 | final int returnCode = sh(label: "Helm Verify ${chartPath}", script: "${helmPath} verify ${chartPath}", returnStatus: true) 689 | 690 | // return by code 691 | if (returnCode == 0) { 692 | print "The chart at ${chartPath} successfully verified." 693 | return true 694 | } 695 | else if (returnCode == 1) { 696 | print "The chart at ${chartPath} failed verification." 697 | return false 698 | } 699 | 700 | print 'Failure using helm verify' 701 | error(message: 'Helm verify failed unexpectedly') 702 | } 703 | -------------------------------------------------------------------------------- /vars/terraform.groovy: -------------------------------------------------------------------------------- 1 | // vars/terraform.groovy 2 | import devops.common.utils 3 | import devops.common.helpers 4 | import devops.common.hcl 5 | 6 | private void execute(Map config) { 7 | // set terraform env for automation 8 | env.TF_IN_AUTOMATION = true 9 | 10 | // input checking 11 | assert config.configPath in String : "'configPath' is a required parameter for terraform.${config.action}." 12 | assert fileExists(config.configPath) : "Terraform config/plan ${config.configPath} does not exist!" 13 | config.bin = config.bin ?: 'terraform' 14 | 15 | List cmd = [config.bin, 'apply', '-input=false', '-no-color', '-auto-approve'] 16 | 17 | // check if a directory was passed for the config path 18 | if (!(config.configPath ==~ /\.tfplan$/)) { 19 | cmd.addAll(new helpers().varSubCmd(config)) 20 | 21 | if (config.target) { 22 | assert (config.target in List) : 'The target parameter must be a list of strings.' 23 | 24 | config.target.each { String target -> 25 | cmd.add("-target=${target}") 26 | } 27 | } 28 | } 29 | if (config.compactWarn == true) { 30 | cmd.add('-compact-warnings') 31 | } 32 | if (config.action == 'destroy') { 33 | cmd.add('-destroy') 34 | } 35 | 36 | // apply the config 37 | try { 38 | if (config.configPath ==~ /\.tfplan$/) { 39 | cmd.add(config.configPath) 40 | sh(label: "Terraform Apply ${config.configPath}", script: cmd.join(' ')) 41 | } 42 | else { 43 | dir(config.configPath) { 44 | sh(label: "Terraform Apply ${config.configPath}", script: cmd.join(' ')) 45 | } 46 | } 47 | } 48 | catch (hudson.AbortException error) { 49 | print "Failure using terraform ${config.action}." 50 | throw error 51 | } 52 | print "Terraform ${config.action} was successful." 53 | } 54 | 55 | void apply(Map config) { 56 | // invoke helper method with apply 57 | config.action = 'apply' 58 | execute(config) 59 | } 60 | 61 | void destroy(Map config) { 62 | // invoke helper method with destroy 63 | config.action = 'destroy' 64 | execute(config) 65 | } 66 | 67 | Boolean fmt(Map config) { 68 | // set terraform env for automation 69 | env.TF_IN_AUTOMATION = true 70 | 71 | // input checking 72 | if (config.dir) { 73 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 74 | } 75 | else { 76 | config.dir = env.WORKSPACE 77 | } 78 | if (config.write && config.check) { 79 | error(message: "The 'write' and 'check' options for terraform.fmt are mutually exclusive; only one can be specified.") 80 | } 81 | config.bin = config.bin ?: 'terraform' 82 | 83 | List cmd = [config.bin, 'fmt', '-no-color'] 84 | 85 | // check for optional inputs 86 | if (config.recursive == true) { 87 | cmd.add('-recursive') 88 | } 89 | if (config.diff == true) { 90 | cmd.add('-diff') 91 | } 92 | if (config.check == true) { 93 | cmd.add('-check') 94 | } 95 | // incompatible with above 96 | else if (config.write == true) { 97 | cmd.add('-write') 98 | } 99 | 100 | // canonically format the code 101 | int fmtStatus 102 | dir(config.dir) { 103 | fmtStatus = sh(label: 'Terraform Format', returnStatus: true, script: cmd.join(' ')) 104 | } 105 | 106 | // report if formatting check detected issues 107 | if (fmtStatus != 0) { 108 | // the format check failed 109 | if (config.check == true) { 110 | print 'Terraform fmt has detected formatting errors.' 111 | return false 112 | } 113 | 114 | // the format command failed unexpectedly 115 | print 'Failure using terraform fmt.' 116 | error(message: 'terraform fmt failed unexpectedly; check logs for details') 117 | } 118 | 119 | print 'Terraform fmt was successful.' 120 | return true 121 | } 122 | 123 | void graph(Map config) { 124 | // set terraform env for automation 125 | env.TF_IN_AUTOMATION = true 126 | 127 | // input checking 128 | if (config.plan && config.dir) { 129 | error(message: "The 'plan' and 'dir' parameters for terraform.graph are mutually exclusive; only one can be specified.") 130 | } 131 | else if (config.dir) { 132 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 133 | } 134 | List cmd = [config.bin ?: 'terraform'] 135 | 136 | // check for plan versus dir target 137 | if (config.plan) { 138 | cmd.add("graph -plan=${config.plan}") 139 | } 140 | else { 141 | // cannot cleanly use dir step for this, and also because of later graph file write 142 | cmd.addAll(["-chdir=${config.dir}", 'graph']) 143 | } 144 | 145 | // check for optional inputs 146 | if (config.type) { 147 | assert (['plan', 'plan-refresh-only', 'plan-destroy', 'apply'].contains(config.type)) : 'The type parameter must be one of: plan, plan-refresh-only, plan-destroy, or apply.' 148 | 149 | cmd.add("-type=${config.type}") 150 | } 151 | if (config.drawCycles == true) { 152 | cmd.add('-draw-cycles') 153 | } 154 | 155 | String dotGraph 156 | try { 157 | dotGraph = sh(label: 'Terraform Graph', script: cmd.join(' '), returnStdout: true) 158 | } 159 | catch (hudson.AbortException error) { 160 | print 'Failure using terraform graph.' 161 | throw error 162 | } 163 | print 'Terraform graph was successful. Writing graph output to "graph.gv" in current working directory.' 164 | 165 | writeFile(file: 'graph.gv', text: dotGraph) 166 | } 167 | 168 | void imports(Map config) { 169 | // set terraform env for automation 170 | env.TF_IN_AUTOMATION = true 171 | 172 | // input checking 173 | assert config.resources : 'Parameter resources must be specified.' 174 | assert (config.resources in Map) : 'Parameter resources must be a map of strings.' 175 | config.bin = config.bin ?: 'terraform' 176 | 177 | List cmd = [config.bin, 'import', '-no-color', '-input=false'] 178 | 179 | // check for optional inputs 180 | cmd.addAll(new helpers().varSubCmd(config)) 181 | 182 | if (config.dir) { 183 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 184 | 185 | cmd.add("-config=${config.dir}") 186 | } 187 | if (config.provider) { 188 | cmd.add("-provider=${config.provider}") 189 | } 190 | if (config.state) { 191 | assert fileExists(config.state) : "The state file at ${config.state} does not exist." 192 | 193 | cmd.add("-state=${config.state}") 194 | } 195 | 196 | // import the resources 197 | try { 198 | // import each resource 199 | config.resources.each { String name, String id -> 200 | cmd.addAll(["'${name}'", id]) 201 | sh(label: "Terraform Import ${name}", script: cmd.join(' ')) 202 | } 203 | } 204 | catch (hudson.AbortException error) { 205 | print 'Failure using terraform import.' 206 | throw error 207 | } 208 | print 'Terraform imports were successful.' 209 | } 210 | 211 | void init(Map config) { 212 | // set terraform env for automation 213 | env.TF_IN_AUTOMATION = true 214 | 215 | // input checking 216 | if (config.dir) { 217 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 218 | } 219 | else { 220 | config.dir = env.WORKSPACE 221 | } 222 | config.bin = config.bin ?: 'terraform' 223 | 224 | List cmd = [config.bin, 'init', '-input=false', '-no-color'] 225 | 226 | // check for optional inputs 227 | if (config.pluginDir) { 228 | new utils().makeDirParents(config.pluginDir) 229 | 230 | cmd.add("-plugin-dir=${config.pluginDir}") 231 | } 232 | if (config.upgrade == true) { 233 | cmd.add('-upgrade') 234 | } 235 | if (config.backend == false) { 236 | cmd.add('-backend=false') 237 | } 238 | if (config.migrateState == true) { 239 | cmd.add('-migrate-state') 240 | } 241 | if (config.forceCopy == true) { 242 | cmd.add('-force-copy') 243 | } 244 | if (config.backendConfig) { 245 | assert (config.backendConfig in List) : 'Parameter backendConfig must be a list of strings.' 246 | 247 | config.backendConfig.each { String backconf -> 248 | assert fileExists(backconf) : "Backend config file ${backconf} does not exist!" 249 | 250 | cmd.add("-backend-config=${backconf}") 251 | } 252 | } 253 | if (config.backendKV) { 254 | assert (config.backendKV in Map) : 'Parameter backendKV must be a map of strings.' 255 | 256 | config.backendKV.each { String key, String value -> 257 | cmd.add("-backend-config='${key}=${value}'") 258 | } 259 | } 260 | if (config.testDir) { 261 | assert fileExists(config.testDir) : "The test directory ${config.testDir} does not exist." 262 | 263 | cmd.add("-test-directory=${config.testDir}") 264 | } 265 | 266 | // initialize the working config directory 267 | try { 268 | dir(config.dir) { 269 | sh(label: 'Terraform Init', script: cmd.join(' ')) 270 | } 271 | } 272 | catch (hudson.AbortException error) { 273 | print 'Failure using terraform init.' 274 | throw error 275 | } 276 | print 'Terraform init was successful.' 277 | } 278 | 279 | void install(Map config) { 280 | // set terraform env for automation 281 | env.TF_IN_AUTOMATION = true 282 | 283 | // input checking 284 | config.installPath = config.installPath ? config.installPath : '/usr/bin' 285 | assert (config.platform in String && config.version in String) : 'A required parameter is missing from the terraform.install method. Please consult the documentation for proper usage.' 286 | 287 | new utils().makeDirParents(config.installPath) 288 | 289 | // check if current version already installed 290 | if (fileExists("${config.installPath}/terraform")) { 291 | final String installedVersion = sh(label: 'Check Terraform Version', returnStdout: true, script: "${config.installPath}/terraform version").trim() 292 | if (installedVersion =~ config.version) { 293 | print "Terraform version ${config.version} already installed at ${config.installPath}." 294 | return 295 | } 296 | } 297 | // otherwise download and install specified version 298 | new utils().downloadFile("https://releases.hashicorp.com/terraform/${config.version}/terraform_${config.version}_${config.platform}.zip", 'terraform.zip') 299 | unzip(zipFile: 'terraform.zip', dir: config.installPath) 300 | new utils().removeFile('terraform.zip') 301 | print "Terraform successfully installed at ${config.installPath}/terraform." 302 | } 303 | 304 | String output(Map config) { 305 | // set terraform env for automation 306 | env.TF_IN_AUTOMATION = true 307 | 308 | // input checking 309 | if (config.dir) { 310 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 311 | } 312 | else { 313 | config.dir = env.WORKSPACE 314 | } 315 | config.bin = config.bin ?: 'terraform' 316 | 317 | List cmd = [config.bin, 'output', '-no-color'] 318 | 319 | // check for optional inputs 320 | if (config.state) { 321 | assert fileExists(config.state) : "The state file at ${config.state} does not exist." 322 | 323 | cmd.add("-state=${config.state}") 324 | } 325 | if (config.json == true) { 326 | cmd.add('-json') 327 | } 328 | if (config.raw == true) { 329 | cmd.add('-raw') 330 | } 331 | // must be last param 332 | if (config.name) { 333 | cmd.add(config.name) 334 | } 335 | 336 | // display outputs from the state 337 | String outputs 338 | try { 339 | // capture output(s) 340 | dir(config.dir) { 341 | outputs = sh(label: 'Terraform Output', script: cmd.join(' '), returnStdout: true) 342 | } 343 | } 344 | catch (hudson.AbortException error) { 345 | print 'Failure using terraform output.' 346 | throw error 347 | } 348 | 349 | print 'Terraform output was successful.' 350 | // display output 351 | if (config.display == true) { 352 | print 'Terraform outputs are displayed below:' 353 | print outputs 354 | } 355 | // return output 356 | return outputs 357 | } 358 | 359 | Map parse(String file) { 360 | // return map of parsed hcl 361 | return new hcl().hclToMap(file) 362 | } 363 | 364 | String plan(Map config) { 365 | // set terraform env for automation 366 | env.TF_IN_AUTOMATION = true 367 | 368 | // input checking 369 | if (config.dir) { 370 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 371 | } 372 | else { 373 | config.dir = env.WORKSPACE 374 | } 375 | config.bin = config.bin ?: 'terraform' 376 | 377 | List cmd = [config.bin, 'plan', '-no-color', '-input=false'] 378 | 379 | // check for optional inputs 380 | cmd.addAll(new helpers().varSubCmd(config)) 381 | 382 | if (config.target) { 383 | assert (config.target in List) : 'The target parameter must be a list of strings.' 384 | 385 | config.target.each { String target -> 386 | cmd.add("-target=${target}") 387 | } 388 | } 389 | if (config.replace) { 390 | assert (config.replace in List) : 'The replace parameter must be a list of strings.' 391 | 392 | config.replace.each { String resource -> 393 | cmd.add("-replace=${resource}") 394 | } 395 | } 396 | if (config.destroy == true) { 397 | cmd.add('-destroy') 398 | } 399 | if (config.refreshOnly == true) { 400 | cmd.add('-refresh-only') 401 | } 402 | if (config.compactWarn == true) { 403 | cmd.add('-compact-warnings') 404 | } 405 | if (config.genConfig) { 406 | assert !fileExists(config.genConfig) : "The path at ${config.genConfig} is required to not exist prior to Terraform config generation, but the path does exist." 407 | 408 | cmd.add("-generate-config-out=${config.genConfig}") 409 | } 410 | final String out = config.out ?: "${config.dir}/plan.tfplan" 411 | 412 | // generate a plan from the config directory 413 | String planOutput 414 | try { 415 | // execute plan 416 | dir(config.dir) { 417 | cmd.add("-out=${out}") 418 | planOutput = sh(label: 'Terraform Plan', script: cmd.join(' '), returnStdout: true) 419 | print "Plan output artifact written to: ${out}" 420 | } 421 | } 422 | catch (hudson.AbortException error) { 423 | print 'Failure using terraform plan.' 424 | throw error 425 | } 426 | print 'Terraform plan was successful.' 427 | 428 | return planOutput 429 | } 430 | 431 | void pluginInstall(Map config) { 432 | // set terraform env for automation 433 | env.TF_IN_AUTOMATION = true 434 | 435 | // input checking 436 | assert config.url in String : "The required parameter 'url' was not set." 437 | assert config.installName in String : "The required parameter 'installName' was not set." 438 | 439 | config.installPath = config.installPath ? config.installPath : '~/.terraform/plugins' 440 | 441 | // set and assign plugin install location 442 | String installLoc = "${config.installPath}/${config.installName}" 443 | 444 | // check if plugin dir exists and create if not 445 | new utils().makeDirParents(config.installPath) 446 | 447 | // check if plugin already installed 448 | if (fileExists(installLoc)) { 449 | print "Terraform plugin already installed at ${installLoc}." 450 | return 451 | } 452 | // otherwise download and install plugin 453 | else if (config.url ==~ /\.zip$/) { 454 | // append zip extension to avoid filename clashes 455 | installLoc = "${installLoc}.zip" 456 | } 457 | 458 | new utils().downloadFile(config.url, installLoc) 459 | 460 | if (config.url ==~ /\.zip$/) { 461 | unzip(zipFile: installLoc) 462 | new utils().removeFile(installLoc) 463 | } 464 | else { 465 | sh(label: 'Terraform Plugin Executable Permissions', script: "chmod ug+rx ${installLoc}") 466 | } 467 | print "Terraform plugin successfully installed at ${installLoc}." 468 | } 469 | 470 | void providers(String rootDir = '', String bin = 'terraform') { 471 | // set terraform env for automation 472 | env.TF_IN_AUTOMATION = true 473 | 474 | // input checking 475 | if (rootDir.length() == 0) { 476 | rootDir = env.WORKSPACE 477 | } else { 478 | assert fileExists(rootDir) : "Config directory ${rootDir} does not exist!" 479 | } 480 | 481 | // output provider information 482 | try { 483 | dir(rootDir) { 484 | sh(label: 'Terraform Providers Information', script: "${bin} providers") 485 | } 486 | } 487 | catch (hudson.AbortException error) { 488 | print 'Failure using terraform providers.' 489 | throw error 490 | } 491 | print 'Terraform providers was successful.' 492 | } 493 | 494 | void refresh(Map config) { 495 | // set terraform env for automation 496 | env.TF_IN_AUTOMATION = true 497 | 498 | // input checking 499 | if (config.dir) { 500 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 501 | } 502 | else { 503 | config.dir = env.WORKSPACE 504 | } 505 | config.bin = config.bin ?: 'terraform' 506 | 507 | List cmd = [config.bin, 'refresh', '-no-color', '-input=false'] 508 | 509 | // check for optional inputs 510 | cmd.addAll(new helpers().varSubCmd(config)) 511 | 512 | if (config.target) { 513 | assert (config.target in List) : 'The target parameter must be a list of strings.' 514 | 515 | config.target.each { String target -> 516 | cmd.add("-target=${target}") 517 | } 518 | } 519 | if (config.compactWarn == true) { 520 | cmd.add('-compact-warnings') 521 | } 522 | 523 | // refresh the state 524 | try { 525 | dir(config.dir) { 526 | sh(label: 'Terraform Refresh', script: cmd.join(' ')) 527 | } 528 | } 529 | catch (hudson.AbortException error) { 530 | print 'Failure using terraform refresh.' 531 | throw error 532 | } 533 | print 'Terraform refresh was successful.' 534 | } 535 | 536 | void state(Map config) { 537 | // set terraform env for automation 538 | env.TF_IN_AUTOMATION = true 539 | 540 | // input checking 541 | assert (['move', 'remove', 'push', 'list', 'show', 'pull'].contains(config.command)) : 'The command parameter must be one of: move, remove, list, show, pull, or push.' 542 | if (config.dir) { 543 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 544 | } 545 | else { 546 | config.dir = env.WORKSPACE 547 | } 548 | config.bin = config.bin ?: 'terraform' 549 | List cmd = [config.bin, 'state'] 550 | 551 | // optional inputs 552 | if (config.state) { 553 | assert config.command != 'push' && config.command != 'pull' : 'The state parameter is incompatible with state pushing and pulling.' 554 | assert fileExists(config.state) : "The state file at ${config.state} does not exist." 555 | 556 | cmd.add("-state=${config.state}") 557 | } 558 | 559 | // perform state manipulation 560 | try { 561 | // perform different commands based upon type of state action 562 | switch (config.command) { 563 | case 'move': 564 | assert (config.resources in Map) : 'Parameter resources must be a Map of strings for move command.' 565 | 566 | dir(config.dir) { 567 | config.resources.each { String from, String to -> 568 | cmd.addAll(['mv', from, to]) 569 | sh(label: "Terraform State Move ${from} to ${to}", script: cmd.join(' ')) 570 | } 571 | } 572 | 573 | break 574 | case 'remove': 575 | assert (config.resources in List) : 'Parameter resources must be a list of strings for remove command.' 576 | 577 | dir(config.dir) { 578 | config.resources.each { String resource -> 579 | cmd.addAll(['rm', resource]) 580 | sh(label: "Terraform State Remove ${resource}", script: cmd.join(' ')) 581 | } 582 | } 583 | 584 | break 585 | case 'push': 586 | assert !config.resources : 'Resources parameter is not allowed for list command.' 587 | 588 | dir(config.dir) { 589 | cmd.add('push') 590 | sh(label: 'Terraform State Push', script: cmd.join(' ')) 591 | } 592 | 593 | break 594 | case 'list': 595 | assert !config.resources : 'Resources parameter is not allowed for push command.' 596 | 597 | String stateList 598 | dir(config.dir) { 599 | cmd.add('list') 600 | stateList = sh(label: 'Terraform State List', script: cmd.join(' '), returnStdout: true) 601 | } 602 | 603 | print 'Terraform state output is as follows:' 604 | print stateList 605 | 606 | break 607 | case 'show': 608 | assert (config.resources in List) : 'Parameter resources must be a list of strings for show command.' 609 | 610 | dir(config.dir) { 611 | config.resources.each { String resource -> 612 | cmd.addAll(['show', resource]) 613 | String stateShow = sh(label: "Terraform State Show ${resource}", script: cmd.join(' '), returnStdout: true) 614 | 615 | print 'Terraform state output is as follows:' 616 | print stateShow 617 | } 618 | } 619 | 620 | break 621 | case 'pull': 622 | assert !config.resources : 'Resources parameter is not allowed for pull command.' 623 | 624 | dir(config.dir) { 625 | cmd.add('pull') 626 | sh(label: 'Terraform State Pull', script: cmd.join(' ')) 627 | } 628 | 629 | break 630 | default: 631 | // should never reach this because of above assert 632 | error(message: "Unknown Terraform state command ${config.command} specified.") 633 | } 634 | } 635 | catch (hudson.AbortException error) { 636 | print 'Failure using terraform state manipulation.' 637 | throw error 638 | } 639 | print 'Terraform state manipulation was successful.' 640 | } 641 | 642 | void taint(Map config) { 643 | // set terraform env for automation 644 | env.TF_IN_AUTOMATION = true 645 | 646 | // input checking 647 | assert config.resources : 'Parameter resources must be specified.' 648 | assert (config.resources in List) : 'Parameter resources must be a list of strings.' 649 | if (config.dir) { 650 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 651 | } 652 | else { 653 | config.dir = env.WORKSPACE 654 | } 655 | config.bin = config.bin ?: 'terraform' 656 | 657 | List cmd = [config.bin, 'taint', '-no-color'] 658 | 659 | // optional inputs 660 | if (config.state) { 661 | assert fileExists(config.state) : "The state file at ${config.state} does not exist." 662 | 663 | cmd.add("-state=${config.state}") 664 | } 665 | if (config.allowMissing == true) { 666 | cmd.add('-allow-missing') 667 | } 668 | 669 | // taint the resources 670 | try { 671 | // taint each resource 672 | dir(config.dir) { 673 | config.resources.each { String resource -> 674 | cmd.add(resource) 675 | sh(label: "Terraform Taint ${resource}", script: cmd.join(' ')) 676 | } 677 | } 678 | } 679 | catch (hudson.AbortException error) { 680 | print 'Failure using terraform taint.' 681 | throw error 682 | } 683 | print 'Terraform taints were successful.' 684 | } 685 | 686 | String test(Map config) { 687 | // set terraform env for automation 688 | env.TF_IN_AUTOMATION = true 689 | 690 | // input checking 691 | if (config.dir) { 692 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 693 | } 694 | else { 695 | config.dir = env.WORKSPACE 696 | } 697 | config.bin = config.bin ?: 'terraform' 698 | 699 | List cmd = [config.bin, 'test', '-no-color'] 700 | 701 | // optional inputs 702 | if (config.cloudRun) { 703 | cmd.add("-cloud-run=${config.cloudRun}") 704 | } 705 | if (config.filter) { 706 | assert (config.filter in List) : 'The filter parameter must be a list of strings.' 707 | 708 | config.filter.each { String filter -> 709 | cmd.add("-filter=${filter}") 710 | } 711 | } 712 | if (config.json == true) { 713 | cmd.add('-json') 714 | } 715 | if (config.testDir) { 716 | assert fileExists(config.testDir) : "The test directory ${config.testDir} does not exist." 717 | 718 | cmd.add("-test-directory=${config.testDir}") 719 | } 720 | cmd.addAll(new helpers().varSubCmd(config)) 721 | 722 | if (config.verbose == true) { 723 | cmd.add('-verbose') 724 | } 725 | 726 | // execute tests 727 | String testOutput 728 | try { 729 | dir(config.dir) { 730 | testOutput = sh(label: 'Terraform Test', script: cmd.join(' '), returnStdout: true) 731 | } 732 | } 733 | catch (hudson.AbortException error) { 734 | print 'Failure using terraform test.' 735 | throw error 736 | } 737 | print 'Terraform test was successful.' 738 | 739 | return testOutput 740 | } 741 | 742 | String validate(Map config) { 743 | // set terraform env for automation 744 | env.TF_IN_AUTOMATION = true 745 | 746 | // input checking 747 | if (config.dir) { 748 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 749 | } 750 | else { 751 | config.dir = env.WORKSPACE 752 | } 753 | config.bin = config.bin ?: 'terraform' 754 | 755 | List cmd = [config.bin, 'validate', '-no-color'] 756 | 757 | // optional inputs 758 | if (config.json == true) { 759 | cmd.add('-json') 760 | } 761 | if (config.tests == false) { 762 | cmd.add('-no-tests') 763 | } 764 | else if (config.testDir) { 765 | assert fileExists(config.testDir) : "The test directory ${config.testDir} does not exist." 766 | 767 | cmd.add("-test-directory=${config.testDir}") 768 | } 769 | 770 | // validate the config directory 771 | String validateOutput 772 | try { 773 | dir(config.dir) { 774 | validateOutput = sh(label: 'Terraform Validate', script: cmd.join(' '), returnStdout: true) 775 | } 776 | } 777 | catch (hudson.AbortException error) { 778 | print 'Failure using terraform validate.' 779 | throw error 780 | } 781 | print 'Terraform validate was successful.' 782 | 783 | return validateOutput 784 | } 785 | 786 | void workspace(Map config) { 787 | // set terraform env for automation 788 | env.TF_IN_AUTOMATION = true 789 | 790 | // input checking 791 | if (config.dir) { 792 | assert fileExists(config.dir) : "Config directory ${config.dir} does not exist!" 793 | } 794 | else { 795 | config.dir = env.WORKSPACE 796 | } 797 | assert config.workspace in String : 'The "workspace" parameter must be specified for the "workspace" method.' 798 | config.bin = config.bin ?: 'terraform' 799 | 800 | List cmd = [config.bin, 'workspace', 'select'] 801 | 802 | // optional inputs 803 | if (config.create == true) { 804 | cmd.add('-or-create') 805 | } 806 | 807 | dir(config.dir) { 808 | // select workspace in terraform config directory 809 | try { 810 | cmd.add(config.workspace) 811 | sh(label: "Terraform Workspace Select ${config.workspace}", script: cmd.join(' ')) 812 | } 813 | catch (hudson.AbortException error) { 814 | print 'Failure using terraform workspace select. The available workspaces and your current workspace are as follows:' 815 | 816 | final String workspaces = sh(label: 'Terraform Workspace List', script: "${config.bin} workspace list", returnStdout: true) 817 | print workspaces 818 | 819 | throw error 820 | } 821 | print "Terraform workspace ${config.workspace} selected successfully." 822 | } 823 | } 824 | --------------------------------------------------------------------------------