├── docs ├── global-pipeline-jenkins-setup.png └── PatchBuildConfigGitSource.md ├── src └── bcgov │ ├── date.groovy │ ├── ChangedFiles.groovy │ ├── GitHubHelper.groovy │ └── OpenShiftHelper.groovy ├── vars ├── getTimeStamp.groovy ├── getLastCommit.groovy ├── getChangeString.groovy ├── gitHubCreateDeployment.groovy ├── gitHubCreateDeploymentStatus.groovy ├── slackNotify.groovy ├── abortAllPreviousBuildInProgress.groovy ├── hasDirectoryChanged.groovy └── basicPipeline.groovy ├── dynamicallyDefinedSharedLib.txt ├── snippets ├── sendEmail ├── ChangedFilesSinceLastSuccessfulBuild ├── inputs ├── running-sonar-scan ├── slackPost ├── DynamicNodeCreation ├── getting-the-image-hash-of-the-latest-image ├── CommitsMsgsSinceLatsSuccessfulBuild ├── VerifyOpenshiftActions └── run-bdd-test ├── .gitignore ├── Jenkins-env.txt ├── README.md └── LICENSE /docs/global-pipeline-jenkins-setup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BCDevOps/jenkins-pipeline-shared-lib/HEAD/docs/global-pipeline-jenkins-setup.png -------------------------------------------------------------------------------- /src/bcgov/date.groovy: -------------------------------------------------------------------------------- 1 | def timestamp(Date date = new Date()){ 2 | return date.format('yyyyMMddHHmmss',TimeZone.getTimeZone('GMT')) as String 3 | } 4 | return this 5 | -------------------------------------------------------------------------------- /vars/getTimeStamp.groovy: -------------------------------------------------------------------------------- 1 | def String call() { 2 | Date date = new Date() 3 | return date.format('yyyyMMddHHmmss',TimeZone.getTimeZone('PST')) as String 4 | } 5 | return this 6 | -------------------------------------------------------------------------------- /dynamicallyDefinedSharedLib.txt: -------------------------------------------------------------------------------- 1 | library identifier: 'devops-library@master', retriever: modernSCM( 2 | [$class: 'GitSCMSource', 3 | remote: 'https://github.com/BCDevOps/jenkins-pipeline-shared-lib.git' 4 | ]) -------------------------------------------------------------------------------- /snippets/sendEmail: -------------------------------------------------------------------------------- 1 | mail (to: 'user1@gov.bc.ca,user2@gov.bc.ca', 2 | subject: "FYI: Job '${env.JOB_NAME}' (${env.BUILD_NUMBER}) deployed to test", 3 | body: "Changes:\n" + getChangeString() + "\n\nSee ${env.BUILD_URL} for details. "); 4 | -------------------------------------------------------------------------------- /vars/getLastCommit.groovy: -------------------------------------------------------------------------------- 1 | def String call() { 2 | def commit = null 3 | def changeLogSets = currentBuild.changeSets 4 | if(changeLogSets.size()>0 && changeLogSets.items.size() > 0){ 5 | commit = changeLogSets[0].items[0] 6 | } 7 | return commit 8 | } 9 | 10 | -------------------------------------------------------------------------------- /docs/PatchBuildConfigGitSource.md: -------------------------------------------------------------------------------- 1 | # Set GIT Repo URL and Branch name in buildconfig 2 | 3 | This example set the Repo URL to a forked repo and defines a pull request as the branch to build from: 4 | ``` 5 | oc -n agehlers-sandbox patch bc/gitbook -p '{"spec":{"source":{"git":{"ref": "refs/pull/2/head", "uri": "https://github.com/agehlers/tested.git"}}}}'; 6 | ``` 7 | 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.ear 17 | *.zip 18 | *.tar.gz 19 | *.rar 20 | 21 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 22 | hs_err_pid* 23 | -------------------------------------------------------------------------------- /Jenkins-env.txt: -------------------------------------------------------------------------------- 1 | JAVA_OPTS 2 | -XX:MaxMetaspaceSize=512m -Duser.timezone=America/Vancouver -Dhudson.model.DirectoryBrowserSupport.CSP= 3 | 4 | Notes: 5 | -XX:MaxMetaspaceSize=512m - allows for larger space to accomodate OATH 6 | -Duser.timezone=America/Vancouver - set the timezone to show correct date/time in jenkins-logs 7 | -Dhudson.model.DirectoryBrowserSupport.CSP= - allows for pretty print of reports/html/css files 8 | 9 | -------------------------------------------------------------------------------- /snippets/ChangedFilesSinceLastSuccessfulBuild: -------------------------------------------------------------------------------- 1 | def changes = "Changes:\n" 2 | build = currentBuild 3 | while(build != null && build.result != 'SUCCESS') { 4 | changes += "In ${build.id}:\n" 5 | for (changeLog in build.changeSets) { 6 | for(entry in changeLog.items) { 7 | for(file in entry.affectedFiles) { 8 | changes += "* ${file.path}\n" 9 | } 10 | } 11 | } 12 | build = build.previousBuild 13 | } 14 | echo changes 15 | -------------------------------------------------------------------------------- /snippets/inputs: -------------------------------------------------------------------------------- 1 | timeout(time: 1, unit: 'DAYS') { 2 | input message: "Deploy to test?", submitter: 'user1,user2' 3 | } 4 | 5 | 6 | def userInput = input( 7 | id: 'userInput', message: 'Let\'s promote?', parameters: [ 8 | [$class: 'TextParameterDefinition', defaultValue: 'uat', description: 'Environment', name: 'env'], 9 | [$class: 'TextParameterDefinition', defaultValue: 'uat1', description: 'Target', name: 'target'] 10 | ]) 11 | echo ("Env: "+userInput['env']) 12 | echo ("Target: "+userInput['target']) 13 | -------------------------------------------------------------------------------- /src/bcgov/ChangedFiles.groovy: -------------------------------------------------------------------------------- 1 | def changed_files() { 2 | def changes = "Changes:\n" 3 | build = currentBuild 4 | while(build != null && build.result != 'SUCCESS') { 5 | changes += "In ${build.id}:\n" 6 | for (changeLog in build.changeSets) { 7 | for(entry in changeLog.items) { 8 | for(file in entry.affectedFiles) { 9 | changes += "* ${file.path}\n" 10 | } 11 | } 12 | } 13 | build = build.previousBuild 14 | } 15 | return changes 16 | } 17 | return this 18 | -------------------------------------------------------------------------------- /vars/getChangeString.groovy: -------------------------------------------------------------------------------- 1 | def String call() { 2 | MAX_MSG_LEN = 512 3 | def changeString = "" 4 | def changeLogSets = currentBuild.changeSets 5 | for (int i = 0; i < changeLogSets.size(); i++) { 6 | def entries = changeLogSets[i].items 7 | for (int j = 0; j < entries.length; j++) { 8 | def entry = entries[j] 9 | truncated_msg = entry.msg.take(MAX_MSG_LEN) 10 | changeString += " - ${truncated_msg} [${entry.author}]\n" 11 | } 12 | } 13 | if (!changeString) { 14 | changeString = "No changes" 15 | } 16 | return changeString 17 | } 18 | return this 19 | -------------------------------------------------------------------------------- /vars/gitHubCreateDeployment.groovy: -------------------------------------------------------------------------------- 1 | import bcgov.GitHubHelper 2 | import org.kohsuke.github.* 3 | import com.cloudbees.jenkins.GitHubRepositoryName 4 | 5 | 6 | def call(script, environment, options = null) { 7 | def commitId = sh(returnStdout: true, script: 'git rev-parse HEAD') 8 | String gitUrl = script.scm.getUserRemoteConfigs()[0].getUrl() 9 | GHRepository repository = GitHubRepositoryName.create(gitUrl).resolveOne() 10 | GHDeploymentBuilder builder = repository.createDeployment(commitId.trim()) 11 | builder.environment(environment) 12 | builder.autoMerge(false) 13 | builder.requiredContexts([]) 14 | 15 | return builder.create().getId() 16 | } 17 | -------------------------------------------------------------------------------- /snippets/running-sonar-scan: -------------------------------------------------------------------------------- 1 | stage('code quality check') { 2 | echo "Code Quality Check ...." 3 | SONARQUBE_PWD = sh ( 4 | script: 'oc env dc/sonarqube --list | awk -F "=" \'/SONARQUBE_ADMINPW/{print $2}\'', 5 | returnStdout: true).trim() 6 | SONARQUBE_URL = sh ( 7 | script: 'oc get routes -o wide --no-headers | awk \'/sonarqube/{ print match($0,/edge/) ? "https://"$2 : "http://"$2 }\'', 8 | returnStdout: true).trim() 9 | dir('sonar-runner') { 10 | sh returnStdout: true, script: "./gradlew sonarqube -Dsonar.host.url=${SONARQUBE_URL} -Dsonar.verbose=true --stacktrace --info -Dsonar.sources=.." 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /snippets/slackPost: -------------------------------------------------------------------------------- 1 | import groovy.json.JsonOutput 2 | def notifySlack(text, channel, url, attachments) { 3 | def slackURL = url 4 | def jenkinsIcon = 'https://wiki.jenkins-ci.org/download/attachments/2916393/logo.png' 5 | def payload = JsonOutput.toJson([text: text, 6 | channel: channel, 7 | username: "Jenkins", 8 | icon_url: jenkinsIcon, 9 | attachments: attachments 10 | ]) 11 | sh "curl -s -S -X POST --data-urlencode \'payload=${payload}\' ${slackURL}" 12 | } 13 | 14 | node('master') { 15 | stage("Post to Slack") { 16 | notifySlack("Test Message Posting", "#ci-cd-release-info", "https://hooks.slack.com/services/${SLACK_TOKEN}", []) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /snippets/DynamicNodeCreation: -------------------------------------------------------------------------------- 1 | //See https://github.com/jenkinsci/kubernetes-plugin 2 | podTemplate(label: 'owasp-zap', name: 'owasp-zap', serviceAccount: 'jenkins', cloud: 'openshift', containers: [ 3 | containerTemplate( 4 | name: 'jnlp', 5 | image: '172.50.0.2:5000/openshift/jenkins-slave-zap', 6 | resourceRequestCpu: '500m', 7 | resourceLimitCpu: '1000m', 8 | resourceRequestMemory: '3Gi', 9 | resourceLimitMemory: '4Gi', 10 | workingDir: '/tmp', 11 | command: '', 12 | args: '${computer.jnlpmac} ${computer.name}' 13 | ) 14 | ]) { 15 | node('owasp-zap') { 16 | stage('Scan Web Application') { 17 | sleep 30 18 | ... 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /snippets/getting-the-image-hash-of-the-latest-image: -------------------------------------------------------------------------------- 1 | IMAGE_HASH = sh ( 2 | script: 'oc get istag :latest -o template --template="{{.image.dockerImageReference}}"|awk -F ":" \'{print $3}\'', 3 | returnStdout: true).trim() 4 | echo ">> IMAGE_HASH: $IMAGE_HASH" 5 | 6 | 7 | 8 | 9 | IMAGE_HASH = sh ( 10 | script: """oc get istag ${IMAGESTREAM_NAME}:latest -o template --template=\"{{.image.dockerImageReference}}\"|awk -F \":\" \'{print \$3}\'""", 11 | returnStdout: true).trim() 12 | echo ">> IMAGE_HASH: ${IMAGE_HASH}" 13 | } 14 | 15 | stage('deploy-' + TAG_NAMES[0]) { 16 | openshiftTag destStream: IMAGESTREAM_NAME, verbose: 'true', destTag: TAG_NAMES[0], srcStream: IMAGESTREAM_NAME, srcTag: "${IMAGE_HASH}" 17 | } 18 | -------------------------------------------------------------------------------- /vars/gitHubCreateDeploymentStatus.groovy: -------------------------------------------------------------------------------- 1 | import bcgov.GitHubHelper 2 | import org.kohsuke.github.* 3 | import com.cloudbees.jenkins.GitHubRepositoryName 4 | 5 | 6 | def call(script, deploymentId, deploymentState, options = null) { 7 | def commitId = sh(returnStdout: true, script: 'git rev-parse HEAD') 8 | String gitUrl = script.scm.getUserRemoteConfigs()[0].getUrl() 9 | GHRepository repository = GitHubRepositoryName.create(gitUrl).resolveOne() 10 | 11 | def builder=repository.root.retrieve().to(repository.getApiTailUrl("deployments/") + deploymentId, GHDeployment.class).wrap(repository).createStatus(GHDeploymentState.valueOf(deploymentState)) 12 | if (options!=null){ 13 | if (options.description){ 14 | builder.description(options.description) 15 | } 16 | if (options.targetUrl){ 17 | builder.targetUrl(options.targetUrl) 18 | } 19 | } 20 | 21 | return builder.create().getId() 22 | } 23 | -------------------------------------------------------------------------------- /snippets/CommitsMsgsSinceLatsSuccessfulBuild: -------------------------------------------------------------------------------- 1 | node('master') { 2 | pastBuilds = [] 3 | getAllBuildsSinceLastSuccess(pastBuilds, currentBuild); 4 | def changeLog = getChangeLog(pastBuilds) 5 | echo "changeLog ${changeLog}" 6 | } 7 | 8 | def getAllBuildsSinceLastSuccess(pastBuilds, build) { 9 | if ((build != null) && (build.result != 'SUCCESS')) { 10 | pastBuilds.add(build) 11 | getAllBuildsSinceLastSuccess(pastBuilds, build.getPreviousBuild()) 12 | } 13 | } 14 | 15 | @NonCPS 16 | def getChangeLog(pastBuilds) { 17 | def log = "" 18 | for (int x = 0; x < pastBuilds.size(); x++) { 19 | for (int i = 0; i < pastBuilds[x].changeSets.size(); i++) { 20 | def entries = pastBuilds[x].changeSets[i].items 21 | for (int j = 0; j < entries.length; j++) { 22 | def entry = entries[j] 23 | log += "* ${entry.msg} by ${entry.author} \n" 24 | } 25 | } 26 | } 27 | return log; 28 | } 29 | -------------------------------------------------------------------------------- /snippets/VerifyOpenshiftActions: -------------------------------------------------------------------------------- 1 | // Note: openshiftVerifyDeploy requires policy to be added: 2 | // oc policy add-role-to-user view -z system:serviceaccount:-tools:jenkins -n -dev 3 | // oc policy add-role-to-user view -z system:serviceaccount:-tools:jenkins -n -test 4 | // oc policy add-role-to-user view -z system:serviceaccount:-tools:jenkins -n -prod 5 | 6 | 7 | // Documentation: https://jenkins.io/doc/pipeline/steps/openshift-pipeline/ 8 | 9 | openshiftVerifyBuild bldCfg: '', namespace: '-tools', verbose: 'false', checkForTriggeredDeployments: 'true' 10 | // sleep 5 sec to allow the build to start 11 | sleep 5 12 | openshiftVerifyDeployment depCfg: '', namespace: '-dev', replicaCount: 1, verbose: 'false', verifyReplicaCount: 'false' 13 | openshiftVerifyService svcName: '', namespace: '-dev', verbose: 'false', retryCount: '1' 14 | 15 | 16 | -------------------------------------------------------------------------------- /snippets/run-bdd-test: -------------------------------------------------------------------------------- 1 | node('bddstack') { 2 | stage('Functional Test') { 3 | //the checkout is mandatory, otherwise functional test would fail 4 | echo "checking out source" 5 | checkout scm 6 | dir('functional-tests') { 7 | // retrieving variables from buildConfig 8 | TEST_USERNAME = sh ( 9 | script: 'oc env bc/ --list | awk -F "=" \'/TEST_USERNAME/{print $2}\'', 10 | returnStdout: true).trim() 11 | TEST_PASSWORD = sh ( 12 | script: 'oc env bc/ --list | awk -F "=" \'/TEST_PASSWORD/{print $2}\'', 13 | returnStdout: true).trim() 14 | try { 15 | sh 'export TEST_USERNAME=${TEST_USERNAME}\nexport TEST_PASSWORD=${TEST_PASSWORD}\n./gradlew --debug --stacktrace chromeHeadlessTest' 16 | } finally { 17 | archiveArtifacts allowEmptyArchive: true, artifacts: 'build/reports/**/*' 18 | archiveArtifacts allowEmptyArchive: true, artifacts: 'build/test-results/**/*' 19 | junit 'build/test-results/**/*.xml' 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /vars/slackNotify.groovy: -------------------------------------------------------------------------------- 1 | import groovy.json.JsonOutput 2 | def call(title, text, color, hookurl, channel, actions=[]) { 3 | def jenkinsIcon = 'https://wiki.jenkins-ci.org/download/attachments/2916393/logo.png' 4 | def slackURL = hookurl 5 | def payloadJson = [ 6 | channel: channel, 7 | username: "Jenkins", 8 | icon_url: jenkinsIcon, 9 | attachments: [[ 10 | fallback: text, 11 | color: color, 12 | author_name: env.CHANGE_AUTHOR_DISPLAY_NAME, 13 | fields: [ 14 | [ 15 | title: title, 16 | value: text, 17 | short: false 18 | ], 19 | [ 20 | title: "Change Summary", 21 | value: env.CHANGE_TITLE, 22 | short: false 23 | ] 24 | ], 25 | actions:actions 26 | ]] 27 | ] 28 | def encodedReq = URLEncoder.encode(JsonOutput.toJson(payloadJson), "UTF-8") 29 | sh("curl -s -S -X POST " + 30 | "--data \'payload=${encodedReq}\' ${slackURL}") 31 | } 32 | -------------------------------------------------------------------------------- /vars/abortAllPreviousBuildInProgress.groovy: -------------------------------------------------------------------------------- 1 | 2 | @NonCPS 3 | private void abortBuild(build){ 4 | boolean aborted=false; 5 | if (build instanceof org.jenkinsci.plugins.workflow.job.WorkflowRun){ 6 | int counter=0 7 | while (counter<60 && build.isInProgress()){ 8 | for (org.jenkinsci.plugins.workflow.support.steps.input.InputAction inputAction:build.getActions(org.jenkinsci.plugins.workflow.support.steps.input.InputAction.class)){ 9 | for (org.jenkinsci.plugins.workflow.support.steps.input.InputStepExecution inputStep:inputAction.getExecutions()){ 10 | if (!inputStep.isSettled()){ 11 | inputStep.doAbort() 12 | } 13 | } 14 | } 15 | 16 | counter++ 17 | Thread.sleep(1000) //milliseconds 18 | } 19 | } 20 | 21 | if (build.isInProgress()){ 22 | build.doKill() 23 | } 24 | 25 | } 26 | 27 | def call(currentBuild) { 28 | while(currentBuild.rawBuild.getPreviousBuildInProgress() != null) { 29 | abortBuild(currentBuild.rawBuild.getPreviousBuildInProgress()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /vars/hasDirectoryChanged.groovy: -------------------------------------------------------------------------------- 1 | // Checks for new changes to the context directory 2 | def boolean call(String contextDirectory) { 3 | // Determine if code has changed within the source context directory. 4 | def changeLogSets = currentBuild.changeSets 5 | def filesChangeCnt = 0 6 | for (int i = 0; i < changeLogSets.size(); i++) { 7 | def entries = changeLogSets[i].items 8 | for (int j = 0; j < entries.length; j++) { 9 | def entry = entries[j] 10 | //echo "${entry.commitId} by ${entry.author} on ${new Date(entry.timestamp)}: ${entry.msg}" 11 | def files = new ArrayList(entry.affectedFiles) 12 | for (int k = 0; k < files.size(); k++) { 13 | def file = files[k] 14 | def filePath = file.path 15 | //echo ">> ${file.path}" 16 | if (filePath.contains(contextDirectory)) { 17 | filesChangeCnt = 1 18 | k = files.size() 19 | j = entries.length 20 | } 21 | } 22 | } 23 | } 24 | if ( filesChangeCnt < 1 ) { 25 | echo('The changes do not require a build.') 26 | return false 27 | } 28 | else { 29 | echo('The changes require a build.') 30 | return true 31 | } 32 | } -------------------------------------------------------------------------------- /vars/basicPipeline.groovy: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | * References: 4 | * - https://zwischenzugs.com/2017/04/23/things-i-wish-i-knew-before-using-jenkins-pipelines/ 5 | * - https://jenkins.io/blog/2017/10/02/pipeline-templates-with-shared-libraries/ 6 | * - https://jenkins.io/doc/pipeline/examples/ 7 | */ 8 | 9 | import hudson.model.Result; 10 | import jenkins.model.CauseOfInterruption.UserInterruption; 11 | import org.kohsuke.github.* 12 | import bcgov.OpenShiftHelper 13 | import bcgov.GitHubHelper 14 | 15 | 16 | def call(body) { 17 | def context= [:] 18 | 19 | body.resolveStrategy = Closure.DELEGATE_FIRST 20 | body.delegate = context 21 | body() 22 | 23 | 24 | properties([ 25 | buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')), 26 | durabilityHint('MAX_SURVIVABILITY'), 27 | parameters([string(defaultValue: '', description: '', name: 'run_stages')]) 28 | ]) 29 | 30 | 31 | stage('Prepare') { 32 | abortAllPreviousBuildInProgress(currentBuild) 33 | echo "BRANCH_NAME=${env.BRANCH_NAME}\nCHANGE_ID=${env.CHANGE_ID}\nCHANGE_TARGET=${env.CHANGE_TARGET}\nBUILD_URL=${env.BUILD_URL}" 34 | //def pullRequest=GitHubHelper.getPullRequest(this) 35 | //echo "Pull-Request: ${pullRequest}" 36 | //echo "Pull-Request: head.ref: ${pullRequest.getHead().getRef()}" 37 | } 38 | 39 | stage('Build') { 40 | node('build') { 41 | checkout scm 42 | new OpenShiftHelper().build(this, context) 43 | if ("master".equalsIgnoreCase(env.CHANGE_TARGET)) { 44 | new OpenShiftHelper().prepareForCD(this, context) 45 | } 46 | } 47 | } 48 | for(String envKeyName: context.env.keySet() as String[]){ 49 | String stageDeployName=envKeyName.toUpperCase() 50 | 51 | if ("DEV".equalsIgnoreCase(stageDeployName) || "master".equalsIgnoreCase(env.CHANGE_TARGET)) { 52 | stage("Readiness - ${stageDeployName}") { 53 | node('build') { 54 | new OpenShiftHelper().waitUntilEnvironmentIsReady(this, context, envKeyName) 55 | } 56 | } 57 | } 58 | 59 | if (!"DEV".equalsIgnoreCase(stageDeployName) && "master".equalsIgnoreCase(env.CHANGE_TARGET)){ 60 | stage("Approve - ${stageDeployName}") { 61 | def inputResponse = input(id: "deploy_${stageDeployName.toLowerCase()}", message: "Deploy to ${stageDeployName}?", ok: 'Approve', submitterParameter: 'approved_by') 62 | //echo "inputResponse:${inputResponse}" 63 | GitHubHelper.getPullRequest(this).comment("User '${inputResponse}' has approved deployment to '${stageDeployName}'") 64 | } 65 | } 66 | 67 | if ("DEV".equalsIgnoreCase(stageDeployName) || "master".equalsIgnoreCase(env.CHANGE_TARGET)){ 68 | stage("Deploy - ${stageDeployName}") { 69 | node('build') { 70 | new OpenShiftHelper().deploy(this, context, envKeyName) 71 | } 72 | } 73 | } 74 | } 75 | 76 | stage('Cleanup') { 77 | def inputResponse=input(id: 'close_pr', message: "Ready to Accept/Merge, and Close pull-request #${env.CHANGE_ID}?", ok: 'Yes', submitter: 'authenticated', submitterParameter: 'approver') 78 | echo "inputResponse:${inputResponse}" 79 | 80 | new OpenShiftHelper().cleanup(this, context) 81 | GitHubHelper.mergeAndClosePullRequest(this) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) 2 | 3 | 4 | # jenkins-pipeline-shared-lib 5 | Extending Jenkins with a Shared Library 6 | 7 | References: 8 | * https://jenkins.io/doc/book/pipeline/shared-libraries/ 9 | * https://jenkins.io/blog/2017/10/02/pipeline-templates-with-shared-libraries/ 10 | 11 | 12 | ## Project Setup 13 | 14 | ### Jenkins 15 | 16 | Accessing shared libaries from within your Jenkinsfile. 17 | 18 | Two Optios: 19 | 20 | 1.) 21 | 22 | You can add the shared library into the configuration for Jenkins. 23 | This can be found in **Manage Jenkins->Configure System->Global Pipeline Libraries**: 24 | 25 | From there you will want to setup up the library to use git and point it at this repository: 26 | 27 | ![Global Pipeline Setup](./docs/global-pipeline-jenkins-setup.png) 28 | 29 | Then use the library in your Jenkinsfile. 30 | 31 | Example: 32 | ``` 33 | @Library('devops-library') _ 34 | 35 | stage('testing lib') { 36 | def TIMESTAMP = getTimeStamp(); 37 | echo "${TIMESTAMP}" 38 | } 39 | ``` 40 | 41 | 2.) 42 | 43 | Define the library location at the top of the Jenkisfile. 44 | This does not require any global Jenkins configurartion changes. 45 | 46 | Example: 47 | 48 | ``` 49 | library identifier: 'devops-library@master', retriever: modernSCM( 50 | [$class: 'GitSCMSource', 51 | remote: 'https://github.com/BCDevOps/jenkins-pipeline-shared-lib.git']) 52 | 53 | stage('testing lib') { 54 | def TIMESTAMP = getTimeStamp(); 55 | echo "${TIMESTAMP}" 56 | } 57 | ``` 58 | 59 | 60 | ### Jenkinsfile 61 | 62 | Once you have your shared library setup, you can now use the utilities within your `Jenkinsfile` like so: 63 | 64 | ```groovy 65 | 66 | // @Library('devops-library') _ 67 | 68 | library identifier: 'devops-library@master', retriever: modernSCM( 69 | [$class: 'GitSCMSource', 70 | remote: 'https://github.com/BCDevOps/jenkins-pipeline-shared-lib.git']) 71 | 72 | def hasRepoChanged = false; 73 | node{ 74 | def lastCommit = getLastCommit() // <------------------- getLastCommit utility from vars/ 75 | 76 | if(lastCommit != null){ 77 | if(env.CHANGE_AUTHOR_DISPLAY_NAME == null){ 78 | env.CHANGE_AUTHOR_DISPLAY_NAME = lastCommit.author.fullName 79 | } 80 | 81 | if(env.CHANGE_TITLE == null){ 82 | env.CHANGE_TITLE = lastCommit.msg 83 | } 84 | hasRepoChanged = true; 85 | }else{ 86 | hasRepoChanged = false; 87 | } 88 | } 89 | 90 | if(hasRepoChanged){ 91 | stage('Build ' + APP_NAME) { 92 | node{ 93 | try{ 94 | echo "Building: " + ARTIFACT_BUILD 95 | openshiftBuild bldCfg: ARTIFACT_BUILD, showBuildLogs: 'true' 96 | 97 | echo "Assembling Runtime: " + RUNTIME_BUILD 98 | openshiftBuild bldCfg: RUNTIME_BUILD, showBuildLogs: 'true' 99 | }catch(error){ 100 | slackNotify( // <------------------- slackNotify utility from shared lib vars/ folder 101 | 'Build Broken 🤕', 102 | "The latest ${APP_NAME} build seems to have broken\n'${error.message}'", 103 | 'danger', 104 | env.SLACK_HOOK, 105 | SLACK_DEV_CHANNEL, 106 | [ 107 | [ 108 | type: "button", 109 | text: "View Build Logs", 110 | style:"danger", 111 | url: "${currentBuild.absoluteUrl}/console" 112 | ] 113 | ]) 114 | throw error 115 | } 116 | } 117 | } 118 | } 119 | ``` 120 | 121 | ## Utilities Available 122 | 123 | The utility functions that are available to be called like global helpers can be found in the [vars/](./vars/) folder. We currently have: 124 | 125 | - [getChangeString](./vars/getChangeString.groovy) 126 | - Generates a string representation of a changeset 127 | - [getLastCommit](./vars/getLastCommit.groovy) 128 | - Get's the last commit from the changeset, returns null if the build is a repeat 129 | - [hasDirectoryChanged](./vars/hasDirectoryChanged.groovy) 130 | - Will return `true` or `false` whether or not a particular directory has been modified in the current changeset. Useful if multiple builds are triggered from a single repo 131 | - [slackNotify](./vars/slackNotify.groovy) 132 | - Sends a notification to slack via [Incoming Webhook](https://api.slack.com/incoming-webhooks) 133 | - >Note: The webhook URL should be placed into an environment variable and not in your repositories code base as it should be kept secret 134 | 135 | 136 | ## Directory Structure: 137 | 138 | ``` 139 | (root) 140 | +- src # Groovy source files 141 | | +- org 142 | | +- foo 143 | | +- Bar.groovy # for org.foo.Bar class 144 | +- vars 145 | | +- foo.groovy # for global 'foo' variable / custom step 146 | | +- foo.txt # help for 'foo' variable 147 | ``` 148 | 149 | The src directory looks like a standard Java source directory structure. This directory is added to the classpath when executing Pipelines. 150 | 151 | The vars directory hosts scripts that define global variables accessible from Pipeline. The basename of each *.groovy file should be a Groovy (~ Java) identifier, conventionally camelCased. The matching *.txt, if present, can contain documentation, processed through the system’s configured markup formatter (so may really be HTML, Markdown, etc., though the txt extension is required). 152 | 153 | The Groovy source files in these directories get the same “CPS transformation” as in Scripted Pipeline. 154 | 155 | 156 | Pipeline Steps 157 | https://jenkins.io/doc/pipeline/steps/ 158 | -------------------------------------------------------------------------------- /src/bcgov/GitHubHelper.groovy: -------------------------------------------------------------------------------- 1 | package bcgov 2 | 3 | import org.kohsuke.github.* 4 | import org.jenkinsci.plugins.workflow.cps.CpsScript 5 | import com.cloudbees.jenkins.GitHubRepositoryName 6 | 7 | /* 8 | * Reference: 9 | * - http://github-api.kohsuke.org/apidocs/index.html 10 | * - https://github.com/jenkinsci/github-plugin/blob/master/src/main/java/com/cloudbees/jenkins/GitHubRepositoryName.java 11 | * */ 12 | class GitHubHelper { 13 | 14 | static String getRepositoryUrl(CpsScript script){ 15 | return script.scm.getUserRemoteConfigs()[0].getUrl() 16 | } 17 | 18 | static GHRepository getGitHubRepository(CpsScript script){ 19 | return getGitHubRepository(script.scm.getUserRemoteConfigs()[0].getUrl()) 20 | } 21 | 22 | @NonCPS 23 | private static String stackTraceAsString(Throwable t) { 24 | StringWriter sw = new StringWriter(); 25 | t.printStackTrace(new PrintWriter(sw)); 26 | return sw.toString() 27 | } 28 | 29 | @NonCPS 30 | static GHRepository getGitHubRepository(String url){ 31 | return GitHubRepositoryName.create(url).resolveOne() 32 | } 33 | 34 | static GHPullRequest getPullRequest(CpsScript script){ 35 | return getGitHubRepository(script).getPullRequest(Integer.parseInt(script.env.CHANGE_ID)) 36 | } 37 | 38 | static String getPullRequestLastCommitId(CpsScript script){ 39 | return getPullRequest(script).getHead().getSha() 40 | } 41 | 42 | @NonCPS 43 | static boolean mergeAndClosePullRequest(String repositoryUrl, int prNumber, String mergeMethod){ 44 | GHRepository repo=getGitHubRepository(repositoryUrl) 45 | GHPullRequest pullRequest = repo.getPullRequest(prNumber) 46 | Boolean mergeable = pullRequest.getMergeable() 47 | GHIssueState state = pullRequest.getState() 48 | boolean ret=false 49 | boolean doClose=true; 50 | 51 | if (state != GHIssueState.CLOSED) { 52 | GHCommitPointer head = pullRequest.getHead() 53 | if (!pullRequest.isMerged()) { 54 | if (mergeable != null && mergeable.booleanValue() == true) { 55 | pullRequest.merge("Merged PR-${prNumber}", head.getSha(), GHPullRequest.MergeMethod.valueOf(mergeMethod.toUpperCase())) 56 | } else { 57 | doClose = false 58 | } 59 | } 60 | 61 | if (doClose && pullRequest.getRepository().getFullName().equalsIgnoreCase(head.getRepository().getFullName())) { 62 | if (head.getRef() != null) { 63 | GHRef headRef = repo.getRef('heads/' + head.getRef()) 64 | if (headRef != null) { 65 | headRef.delete() 66 | } 67 | } 68 | } 69 | 70 | if (doClose){ 71 | pullRequest.close() 72 | ret = true 73 | } 74 | }else{ 75 | ret = true 76 | } 77 | 78 | return ret 79 | } 80 | static boolean mergeAndClosePullRequest(CpsScript script) { 81 | return mergeAndClosePullRequest(script, 'merge') 82 | } 83 | static boolean mergeAndClosePullRequest(CpsScript script, String mergeMethod) { 84 | try { 85 | return mergeAndClosePullRequest(getRepositoryUrl(script), Integer.parseInt(script.env.CHANGE_ID), mergeMethod) 86 | }catch (ex){ 87 | //This need to be done because the github API does NOT return serializable Exceptions 88 | script.echo "Original Stack Trace:\n${stackTraceAsString(ex)}" 89 | throw new IOException(ex.message) 90 | } 91 | } 92 | 93 | static void commentOnPullRequest(CpsScript script, String comment) { 94 | try { 95 | commentOnPullRequest(getRepositoryUrl(script), Integer.parseInt(script.env.CHANGE_ID), comment) 96 | }catch (ex){ 97 | //This need to be done because the github API does NOT return serializable Exceptions 98 | script.echo "Original Stack Trace:\n${stackTraceAsString(ex)}" 99 | throw new IOException(ex.message) 100 | } 101 | } 102 | 103 | @NonCPS 104 | static void commentOnPullRequest(String repositoryUrl, int pullRequestNumber, String comment) { 105 | GHRepository repo=getGitHubRepository(repositoryUrl) 106 | GHPullRequest pullRequest = repo.getPullRequest(pullRequestNumber) 107 | pullRequest.comment(comment) 108 | 109 | } 110 | 111 | static GHDeploymentBuilder createDeployment(CpsScript script, String ref) { 112 | return getGitHubRepository(script).createDeployment(ref) 113 | } 114 | 115 | 116 | static GHDeploymentBuilder createDeployment(String url, String ref) { 117 | return getGitHubRepository(url).createDeployment(ref) 118 | } 119 | 120 | static def createDeploymentStatus(CpsScript script, long deploymentId, GHDeploymentState state) { 121 | return getGitHubRepository(script).getDeployment(deploymentId).createStatus(state) 122 | } 123 | 124 | /* 125 | * http://github-api.kohsuke.org/apidocs/org/kohsuke/github/GHDeploymentBuilder.html 126 | * */ 127 | @NonCPS 128 | def createDeployment(String url, String ref, Map deploymentConfig) { 129 | //long deploymentId = -1 130 | GHRepository repository=getGitHubRepository(url) 131 | 132 | /* 133 | for (GHDeployment deployment:repository.listDeployments(null, ref, null, deploymentConfig.environment)){ 134 | deployment.createStatus(GHDeploymentState.PENDING).create() 135 | return deployment.getId() 136 | } 137 | */ 138 | GHDeploymentBuilder builder=repository.createDeployment(ref) 139 | builder.environment(deploymentConfig.environment) 140 | builder.autoMerge(false) 141 | builder.requiredContexts([]) 142 | 143 | 144 | //deployment=null 145 | 146 | if (deploymentConfig!=null) { 147 | //if (deploymentConfig.environment) { 148 | // builder.environment(deploymentConfig.environment) 149 | //} 150 | 151 | if (deploymentConfig.payload) { 152 | builder.payload(deploymentConfig.payload) 153 | } 154 | 155 | if (deploymentConfig.description) { 156 | builder.description(deploymentConfig.description) 157 | } 158 | 159 | if (deploymentConfig.task) { 160 | builder.task(deploymentConfig.task) 161 | } 162 | 163 | if (deploymentConfig.requiredContexts) { 164 | builder.requiredContexts(deploymentConfig.requiredContexts) 165 | } 166 | } 167 | 168 | /* 169 | long deploymentId = builder.create().getId() 170 | builder=null; 171 | return deploymentId 172 | */ 173 | 174 | return builder.create().getId() 175 | } 176 | 177 | long createDeployment(CpsScript script, String ref, Map deploymentConfig) { 178 | script.echo "ref:${ref} - config:${deploymentConfig}" 179 | return createDeployment(script.scm.getUserRemoteConfigs()[0].getUrl(), ref, deploymentConfig) 180 | } 181 | 182 | @NonCPS 183 | static long createDeploymentStatus(String url, long deploymentId, String statusName, Map deploymentStatusConfig) { 184 | def ghRepo=getGitHubRepository(url) 185 | def ghDeploymentState=GHDeploymentState.valueOf(statusName) 186 | 187 | def ghDeploymentStatus=ghRepo.getDeployment(deploymentId).createStatus(ghDeploymentState) 188 | 189 | if (deploymentStatusConfig.description){ 190 | ghDeploymentStatus.description(deploymentStatusConfig.description) 191 | } 192 | if (deploymentStatusConfig.targetUrl){ 193 | ghDeploymentStatus.targetUrl(deploymentStatusConfig.targetUrl) 194 | } 195 | return ghDeploymentStatus.create().getId() 196 | } 197 | static long createDeploymentStatus(CpsScript script, long deploymentId, String statusName, Map config) { 198 | script.echo "deploymentId:${deploymentId} - status:${statusName} - config:${config}" 199 | return createDeploymentStatus(script.scm.getUserRemoteConfigs()[0].getUrl(), deploymentId, statusName, config) 200 | } 201 | @NonCPS 202 | static void createCommitStatus(String url, String sha1, String statusName, String targetUrl, String description, String context) { 203 | def ghRepo=getGitHubRepository(url) 204 | def ghCommitState=GHCommitState.valueOf(statusName) 205 | 206 | ghRepo.createCommitStatus(sha1, ghCommitState, targetUrl, description, context) 207 | } 208 | 209 | static void createCommitStatus(CpsScript script, String ref, String statusName, String targetUrl, String description, String context) { 210 | createCommitStatus(script.scm.getUserRemoteConfigs()[0].getUrl() as String, ref, statusName, targetUrl, description, context) 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/bcgov/OpenShiftHelper.groovy: -------------------------------------------------------------------------------- 1 | package bcgov; 2 | 3 | import org.jenkinsci.plugins.workflow.cps.CpsScript; 4 | import com.openshift.jenkins.plugins.OpenShiftDSL; 5 | import org.kohsuke.github.GHRepository 6 | 7 | class OpenShiftHelper { 8 | int logLevel=0 9 | static List PROTECTED_TYPES = ['Secret', 'ConfigMap', 'PersistentVolumeClaim'] 10 | static String ANNOTATION_AS_COPY_OF ='as-copy-of' 11 | static String ANNOTATION_ROUTE_TLS_SECRET_NAME='template.openshift.io.bcgov/tls-secret-name' 12 | static String ANNOTATION_ALLOW_CREATE='template.openshift.io.bcgov/create' 13 | static String ANNOTATION_ALLOW_UPDATE='template.openshift.io.bcgov/update' 14 | 15 | @NonCPS 16 | private String getLastSha1InPath(String gitURL, String head, String path) { 17 | //if (path==null || path.length() == 0 ) return head 18 | GHRepository repository=GitHubHelper.getGitHubRepository(gitURL) 19 | return repository.queryCommits().pageSize(1).from(head).path(path).list().iterator().next().getSHA1(); 20 | } 21 | 22 | private void loadMetadata(CpsScript script, Map metadata) { 23 | metadata.commitId = script.sh(returnStdout: true, script: 'git rev-parse HEAD').trim() 24 | metadata.isPullRequest=(script.env.CHANGE_ID != null && script.env.CHANGE_ID.trim().length()>0) 25 | metadata.gitRepoUrl = script.scm.getUserRemoteConfigs()[0].getUrl() 26 | 27 | metadata.buildBranchName = script.env.BRANCH_NAME; 28 | metadata.buildEnvName = 'bld' 29 | metadata.buildNamePrefix = "${metadata.appName}" 30 | metadata.isPullRequestFromFork = false 31 | 32 | if (metadata.isPullRequest){ 33 | loadPullRequestMetadata(metadata, GitHubHelper.getPullRequest(script)) 34 | metadata.pullRequestNumber=script.env.CHANGE_ID 35 | metadata.gitBranchRemoteRef = script.sh(returnStdout: true, script: "git ls-remote origin 'refs/pull/${script.env.CHANGE_ID}/*' | grep '${metadata.commitId}' | cut -f2").trim() 36 | metadata.buildEnvName="pr-${metadata.pullRequestNumber}" 37 | } 38 | 39 | metadata.buildNameSuffix = "-${metadata.buildEnvName}" 40 | } 41 | 42 | private boolean allowCreate(Map newModel) { 43 | return !PROTECTED_TYPES.contains(newModel.kind) || Boolean.parseBoolean((newModel.metadata?.annotations?:[:])[ANNOTATION_ALLOW_CREATE]?:'false')==true 44 | } 45 | 46 | private boolean allowUpdate(Map newModel) { 47 | return !PROTECTED_TYPES.contains(newModel.kind) || Boolean.parseBoolean((newModel.metadata?.annotations?:[:])[ANNOTATION_ALLOW_UPDATE]?:'false')==true 48 | } 49 | 50 | private boolean allowCreateOrUpdate(Map newModel, Map currentModel) { 51 | return (currentModel==null && allowCreate(newModel)) || (currentModel!=null && allowUpdate(newModel)) 52 | } 53 | @NonCPS 54 | private static void loadPullRequestMetadata(Map metadata, org.kohsuke.github.GHPullRequest pullRequest) { 55 | metadata.isPullRequestFromFork = !pullRequest.getRepository().getFullName().equalsIgnoreCase(pullRequest.getHead().getRepository().getFullName()) 56 | } 57 | @NonCPS 58 | private static String toJsonString(Object object) { 59 | return groovy.json.JsonOutput.toJson(object) 60 | } 61 | 62 | @NonCPS 63 | private List getTemplateParameters(String parameters) { 64 | List ret=[] 65 | boolean isTitleLine=true 66 | for (String line:parameters.tokenize('\n')){ 67 | if (!isTitleLine){ 68 | ret.add(line.tokenize()[0]) 69 | } 70 | isTitleLine=false; 71 | } 72 | return ret; 73 | } 74 | 75 | @NonCPS 76 | private static String stackTraceAsString(Throwable t) { 77 | StringWriter sw = new StringWriter(); 78 | t.printStackTrace(new PrintWriter(sw)); 79 | return sw.toString() 80 | } 81 | 82 | @NonCPS 83 | private def processStringTemplate(String template, Map bindings) { 84 | def engine = new groovy.text.GStringTemplateEngine() 85 | return engine.createTemplate(template).make(bindings).toString() 86 | } 87 | 88 | @NonCPS 89 | private List createProcessTemplateParameters(Map params, Map bindings) { 90 | def engine = new groovy.text.GStringTemplateEngine() 91 | def ret=[] 92 | for (String paramName:params.keySet()) { 93 | ret.add('-p') 94 | ret.add(paramName+'='+engine.createTemplate(params[paramName]).make(bindings).toString()) 95 | } 96 | return ret 97 | } 98 | 99 | private Map loadObjectsFromTemplate(OpenShiftDSL openshift, List templates, Map context, String purpose){ 100 | def models = [:] 101 | if (templates !=null && templates.size() > 0) { 102 | for (Map template : templates) { 103 | List parameters=getTemplateParameters(openshift.raw('process', '-f', template.file, '--parameters').out) 104 | template.params = template.params?:[:] 105 | 106 | for (String paramName:parameters){ 107 | if ('build'.equals(purpose)) { 108 | if ('NAME_SUFFIX'.equals(paramName)) { 109 | template.params[paramName] = '${buildNameSuffix}' 110 | } else if ('SOURCE_REPOSITORY_URL'.equals(paramName)) { 111 | template.params[paramName] = '${gitRepoUrl}' 112 | } else if ('ENV_NAME'.equals(paramName)) { 113 | template.params[paramName] = '${buildEnvName}' 114 | } 115 | }else if ('deployment'.equals(purpose)) { 116 | if ('NAME_SUFFIX'.equals(paramName)) { 117 | template.params[paramName] = '${deploy.dcSuffix}' 118 | } else if ('SOURCE_REPOSITORY_URL'.equals(paramName)) { 119 | template.params[paramName] = '${gitRepoUrl}' 120 | } else if ('ENV_NAME'.equals(paramName)) { 121 | template.params[paramName] = '${deploy.envName}' 122 | } else if ('BUILD_ENV_NAME'.equals(paramName)) { 123 | template.params[paramName] = '${buildEnvName}' 124 | } 125 | } 126 | } 127 | List params=createProcessTemplateParameters(template.params, context) 128 | String firstParam=template?.template 129 | if (template.file){ 130 | firstParam='-f' 131 | params.add(0, template.file) 132 | } 133 | for (Map model in openshift.process(firstParam, params)){ 134 | models[key(model)] = model 135 | } 136 | } 137 | } 138 | return models 139 | } 140 | 141 | private Map loadObjectsByLabel(OpenShiftDSL openshift, Map labels){ 142 | def models = [:] 143 | def selector=openshift.selector('is,bc,secret,configmap,dc,svc,route', labels) 144 | 145 | if (selector.count()>0) { 146 | for (Map model : selector.objects(exportable:false)) { 147 | models[key(model)] = model 148 | } 149 | } 150 | return models 151 | } 152 | 153 | private Map loadBuildConfigStatus(OpenShiftDSL openshift, Map labels){ 154 | Map buildOutput = [:] 155 | def selector=openshift.selector('bc', labels) 156 | 157 | if (selector.count()>0) { 158 | for (Map bc : selector.objects()) { 159 | String buildName = "Build/${bc.metadata.name}-${bc.status.lastVersion}" 160 | Map build=null 161 | 162 | if (openshift.selector(buildName).exists()){ 163 | build = openshift.selector(buildName).object() 164 | } 165 | 166 | if (build!=null) { 167 | buildOutput[buildName] = [ 168 | 'kind' : build.kind, 169 | 'metadata': ['name': build.metadata.name], 170 | 'spec' : ['revision': build.spec.revision], 171 | 'output' : [ 172 | 'to': [ 173 | 'kind': build.spec.output.to.kind, 174 | 'name': build.spec.output.to.name 175 | ] 176 | ], 177 | 'status' : ['phase': build.status.phase] 178 | ] 179 | 180 | if (isBuildSuccesful(build)) { 181 | buildOutput["${build.spec.output.to.kind}/${build.spec.output.to.name}"] = [ 182 | 'kind' : build.spec.output.to.kind, 183 | 'metadata' : ['name': build.spec.output.to.name], 184 | 'imageDigest' : build.status.output.to.imageDigest, 185 | 'outputDockerImageReference': build.status.outputDockerImageReference 186 | ] 187 | } 188 | } 189 | 190 | buildOutput["${key(bc)}"] = [ 191 | 'kind': bc.kind, 192 | 'metadata': ['name':bc.metadata.name], 193 | 'status': ['lastVersion':bc.status.lastVersion, 'lastBuildName':buildName] 194 | ] 195 | } 196 | } 197 | return buildOutput 198 | } 199 | 200 | @NonCPS 201 | private String key(Map model){ 202 | return "${model.kind}/${model.metadata.name}" 203 | } 204 | 205 | private void waitForDeploymentsToComplete(CpsScript script, OpenShiftDSL openshift, Map labels){ 206 | script.echo "Waiting for deployments with labels ${labels}" 207 | 208 | Map rcLabels=[:] 209 | openshift.selector('dc', labels).withEach { it -> 210 | def dc=it.object() 211 | rcLabels['openshift.io/deployment-config.name']= dc.metadata.name 212 | } 213 | 214 | boolean doCheck=true 215 | int failures=0 216 | 217 | //wait for replication controllers to finish 218 | while(doCheck) { 219 | try { 220 | script.timeout(5) { 221 | openshift.selector('rc', rcLabels).watch { 222 | boolean allDone = true 223 | it.withEach { item -> 224 | def object = item.object() 225 | script.echo "${key(object)} - ${getReplicationControllerStatus(object)}" 226 | if (!isReplicationControllerComplete(object)) { 227 | allDone = false 228 | } 229 | } 230 | return allDone 231 | } 232 | } 233 | }catch (ex){ 234 | failures++ 235 | script.echo "${stackTraceAsString(ex)}" 236 | //after 10 failures, give up 237 | if (failures > 10){ 238 | throw ex 239 | } 240 | continue 241 | } 242 | 243 | script.sleep 5 244 | doCheck=false 245 | for (Map build:openshift.selector('rc', rcLabels).objects()){ 246 | if (!isReplicationControllerComplete(build)) { 247 | doCheck=true 248 | break 249 | } 250 | } 251 | } 252 | 253 | //wait for pods to startup 254 | doCheck=true 255 | failures = 0 256 | while(doCheck) { 257 | try { 258 | //5 minutes timeout before restarting watch 259 | script.timeout(5) { 260 | openshift.selector('dc', labels).watch { 261 | boolean allDone = true 262 | it.withEach { item -> 263 | def dc = item.object() 264 | script.echo "${key(dc)} - desired:${dc?.status?.replicas} ready:${dc?.status?.readyReplicas} available:${dc?.status?.availableReplicas}" 265 | if (!(dc?.status?.replicas == dc?.status?.readyReplicas && dc?.status?.replicas == dc?.status?.availableReplicas)) { 266 | allDone = false 267 | } 268 | } 269 | return allDone 270 | } 271 | } 272 | } catch (ex){ 273 | failures++ 274 | script.echo "${stackTraceAsString(ex)}" 275 | //after 10 failures, give up 276 | if (failures > 10){ 277 | throw ex 278 | } 279 | continue 280 | } 281 | 282 | script.sleep 5 283 | doCheck=false 284 | for (Map dc : openshift.selector('dc', labels).objects()){ 285 | if (!(dc?.status?.replicas == dc?.status?.readyReplicas && dc?.status?.replicas == dc?.status?.availableReplicas)) { 286 | doCheck=true 287 | break 288 | } 289 | } 290 | } 291 | } 292 | 293 | private void waitForBuildsToComplete(CpsScript script, OpenShiftDSL openshift, Map labels){ 294 | //openshift.verbose(true) 295 | script.echo "Waiting for builds with labels ${labels}" 296 | boolean doCheck=true 297 | while(doCheck) { 298 | openshift.selector('builds', labels).watch { 299 | boolean allDone = true 300 | /* 301 | for (Map object:it.objects(exportable:true)){ 302 | if (!isBuildComplete(object)) { 303 | script.echo "${key(object)} - ${object.status.phase}" 304 | allDone = false 305 | } 306 | } 307 | */ 308 | 309 | it.withEach { item -> 310 | try { 311 | def object = item.object() 312 | if (!isBuildComplete(object)) { 313 | script.echo "${key(object)} - ${object.status.phase}" 314 | allDone = false 315 | } 316 | }catch (ex){ 317 | script.echo "${stackTraceAsString(ex)}" 318 | //This can happen when the script waits for so long 319 | // that a build object may just have been pruned/deleted 320 | return false 321 | } 322 | } 323 | 324 | return allDone 325 | } 326 | script.sleep 5 327 | doCheck=false 328 | try { 329 | for (Map build:openshift.selector('builds', labels).objects()){ 330 | if (!isBuildComplete(build)) { 331 | doCheck = true 332 | break 333 | } 334 | } //end for 335 | }catch (ex){ 336 | script.echo "${stackTraceAsString(ex)}" 337 | //This can happen when the script waits for so long 338 | // that a build object may just have been pruned/deleted 339 | doCheck = true 340 | } 341 | } //end while 342 | //openshift.verbose(false) 343 | } 344 | 345 | private void checkProjectsAccess(CpsScript script, OpenShiftDSL openshift, Map context){ 346 | String currentUser= openshift.raw('whoami').out.tokenize()[0] 347 | String currentProjectName= openshift.project() 348 | String currentProjectBaseName = null 349 | 350 | if (currentProjectName.endsWith('-tools')){ 351 | currentProjectBaseName=currentProjectName.substring(0, currentProjectName.length()-6) 352 | } 353 | 354 | script.echo "currentProjectBaseName: '${currentProjectBaseName}'" 355 | 356 | Map modifiedEnvProjects=[:] 357 | 358 | script.waitUntil { 359 | boolean isReady = true 360 | List projects=[] 361 | List accessibleProjects=openshift.raw('projects', '-q').out.tokenize() 362 | for(String envKeyName: context.env.keySet() as String[]){ 363 | Map env=context.env[envKeyName] 364 | if (env.project!=null) { 365 | projects.add(env.project) 366 | }else if (env.project == null && currentProjectBaseName!=null){ 367 | String deployProjectName="${currentProjectBaseName}-deploy" 368 | String envProjectName="${currentProjectBaseName}-${envKeyName.toLowerCase()}" 369 | boolean deployProjectAccessible = accessibleProjects.contains(deployProjectName) 370 | boolean envProjectAccessible = accessibleProjects.contains(envProjectName) 371 | 372 | script.echo "deployProjectName:'${deployProjectName}' accessible:${deployProjectAccessible}" 373 | script.echo "envProjectName:'${envProjectName}' accessible:${envProjectAccessible}" 374 | 375 | if (deployProjectAccessible){ 376 | modifiedEnvProjects[envKeyName]=deployProjectName 377 | }else{ 378 | modifiedEnvProjects[envKeyName]=envProjectName 379 | } 380 | 381 | projects.add(modifiedEnvProjects[envKeyName]) 382 | }else if (env.project == null){ 383 | modifiedEnvProjects[envKeyName]="${currentProjectName}" 384 | projects.add(modifiedEnvProjects[envKeyName]) 385 | } 386 | } 387 | 388 | script.echo "Accessible Projects '${accessibleProjects}'" 389 | 390 | for(String projectName: projects.unique()){ 391 | if (!accessibleProjects.contains(projectName)){ 392 | isReady=false 393 | script.echo "Cannot access project '${projectName}'. Please run:" 394 | script.echo " oc policy add-role-to-user edit ${currentUser} -n ${projectName}" 395 | } 396 | } 397 | 398 | if (!isReady) { 399 | script.input "Retry Access Check?" 400 | } 401 | 402 | return isReady 403 | } 404 | 405 | for(String envKeyName: modifiedEnvProjects.keySet() as String[]){ 406 | script.echo "Setting target project for '${envKeyName}' to '${modifiedEnvProjects[envKeyName]}' " 407 | context.env[envKeyName].project=modifiedEnvProjects[envKeyName] 408 | } 409 | //script.error "stop here" 410 | } 411 | 412 | def prepareForCD(CpsScript script, Map context) { 413 | //Prepare status for deployments 414 | for(String envKeyName: context.env.keySet() as String[]){ 415 | new GitHubHelper().createCommitStatus(script, context.commitId, 'PENDING', "${script.env.BUILD_URL}", "Deployment to ${envKeyName.toUpperCase()}", "continuous-integration/jenkins/deployment/${envKeyName.toLowerCase()}") 416 | } 417 | } 418 | 419 | def build(CpsScript script, Map context) { 420 | OpenShiftDSL openshift=script.openshift; 421 | 422 | def stashIncludes=[] 423 | for ( List templates : context.templates.values()){ 424 | for ( Map template : templates){ 425 | if (template.file){ 426 | stashIncludes.add(template.file) 427 | } 428 | } 429 | } 430 | 431 | script.echo "BRANCH_NAME=${script.env.BRANCH_NAME}\nCHANGE_ID=${script.env.CHANGE_ID}\nCHANGE_TARGET=${script.env.CHANGE_TARGET}\nBUILD_URL=${script.env.BUILD_URL}\nisPullRequestFromFork" 432 | script.echo "absoluteUrl=${script.currentBuild.absoluteUrl}" 433 | 434 | script.sh(returnStdout: false, script: "git log --pretty=oneline -20") 435 | script.sh(returnStdout: false, script: "git rev-list -20 HEAD") 436 | 437 | 438 | loadMetadata(script, context) 439 | 440 | script.echo "isPullRequestFromFork:${context.isPullRequestFromFork}" 441 | 442 | new GitHubHelper().createCommitStatus(script, context.commitId, 'PENDING', "${script.env.BUILD_URL}", 'Build', 'continuous-integration/jenkins/build') 443 | 444 | context['ENV_KEY_NAME'] = 'build' 445 | script.stash(name: 'openshift', includes:stashIncludes.join(',')) 446 | Map labels=['app-name': context.name, 'env-name': context.buildEnvName] 447 | 448 | openshift.withCluster() { 449 | openshift.withProject(openshift.project()) { 450 | checkProjectsAccess(script, openshift, context) 451 | 452 | script.echo "Connected to project '${openshift.project()}' as user '${openshift.raw('whoami').out.tokenize()[0]}'" 453 | def newObjects = loadObjectsFromTemplate(openshift, context.templates.build, context, 'build') 454 | def currentObjects = loadObjectsByLabel(openshift, labels) 455 | //script.echo "${currentObjects}" 456 | for (Map m : newObjects.values()){ 457 | if ('BuildConfig'.equalsIgnoreCase(m.kind)){ 458 | // apply last commit id/hash to spec.source.git.ref 459 | // this ensure that a build will get triggered only when there has been changes 460 | 461 | String commitId = context.commitId 462 | String contextDir=null 463 | 464 | if (m.spec && m.spec.source && m.spec.source.contextDir){ 465 | contextDir=m.spec.source.contextDir 466 | } 467 | 468 | if (contextDir!=null && contextDir.startsWith('/') && !contextDir.equalsIgnoreCase('/')){ 469 | contextDir=contextDir.substring(1) 470 | } 471 | 472 | if (!m.metadata.annotations) m.metadata.annotations=[:] 473 | if (!m.metadata.labels) m.metadata.labels=[:] 474 | m.metadata.annotations['source.git.commit']=commitId 475 | 476 | if (m.spec.source?.git?.uri){ 477 | if (m.spec.source.git.uri.equalsIgnoreCase(context.gitRepoUrl)){ 478 | commitId=getLastSha1InPath(m.spec.source.git.uri, context.gitBranchRemoteRef, contextDir?:'') 479 | if (m.spec.source.git.ref) m.metadata.annotations['source/spec.source.git.ref']=m.spec.source.git.ref 480 | m.metadata.annotations['source.git.ref']=context.gitBranchRemoteRef 481 | m.metadata.annotations['source.git.head']=getLastSha1InPath(m.spec.source.git.uri, context.gitBranchRemoteRef, '') 482 | m.metadata.annotations['source.git.commit']=commitId 483 | m.spec.source.git.ref=commitId 484 | if (context.isPullRequestFromFork) { 485 | m.metadata.annotations['source.git.commit']=m.metadata.annotations['source.git.head'] 486 | m.spec.source.git.ref=context.gitBranchRemoteRef 487 | } 488 | }else{ 489 | commitId=getLastSha1InPath(m.spec.source.git.uri, m.spec.source.git.ref, contextDir?:'') 490 | m.metadata.annotations['source.git.ref']=m.spec.source.git.ref 491 | m.metadata.annotations['source.git.head']=getLastSha1InPath(m.spec.source.git.uri, m.spec.source.git.ref, '') 492 | m.metadata.annotations['source.git.commit']=commitId 493 | m.spec.source.git.ref=commitId 494 | } 495 | } 496 | //m.metadata.labels['git-ref']=m.metadata.annotations['source.git.ref'] 497 | m.metadata.labels['git-commit']=m.metadata.annotations['source.git.commit'] 498 | //m.spec.source.git.ref=m.metadata.annotations['source.git.commit'] 499 | 500 | m.spec.runPolicy = 'SerialLatestOnly' 501 | m.spec.output.to.name=m.spec.output.to.name.tokenize(':')[0]+':'+context.buildEnvName 502 | if (m.spec.source?.git?.uri){ 503 | script.echo "${key(m)} - ${m.spec.source.git.uri}#${m.spec?.source?.git?.ref} @ ${m.metadata.annotations['source.git.head']} - /${m?.spec?.source?.contextDir?:''} @ ${m.metadata.annotations['source.git.commit']}" 504 | }else{ 505 | script.echo "${key(m)} - @ ${m.metadata.annotations['source.git.commit']}" 506 | } 507 | 508 | // retrieve existing spec.triggers.imageChange.lastTriggeredImageID 509 | // this will ensure that builds won't be triggered upon updating BuildConfig 510 | if (m.spec?.triggers != null ){ 511 | Map current = currentObjects[key(m)] 512 | if (current!=null){ 513 | //script.echo "${key(m)} - current triggers -> ${current.spec.triggers}" 514 | //script.echo "${key(m)} - new triggers -> ${m.spec.triggers}" 515 | for (Map t1:m.spec.triggers){ 516 | if ('ImageChange'.equalsIgnoreCase(t1.type)){ 517 | if (current.spec.triggers != null){ 518 | for (Map t2:current.spec.triggers){ 519 | if ('ImageChange'.equalsIgnoreCase(t2.type)){ 520 | if ( 521 | (t1.imageChange?.from == null && t2.imageChange?.from == null) || 522 | ( 523 | (t1.imageChange?.from != null && t2.imageChange?.from != null) && 524 | t1.imageChange.from.kind.equalsIgnoreCase(t2.imageChange.from.kind) && 525 | t1.imageChange.from.name.equalsIgnoreCase(t2.imageChange.from.name) 526 | ) 527 | ){ 528 | t1.imageChange = t1.imageChange?:[:] 529 | t1.imageChange.lastTriggeredImageID=t2.imageChange.lastTriggeredImageID 530 | } 531 | } 532 | } 533 | } 534 | } 535 | } 536 | } 537 | //script.echo "${key(m)} - triggers -> ${m.spec.triggers}" 538 | } //end fix (m.spec.triggers) 539 | } //end if 540 | } // end for 541 | 542 | def initialBuildConfigState=loadBuildConfigStatus(openshift, labels) 543 | 544 | applyBuildConfig(script, openshift, context.name, context.buildEnvName, newObjects, currentObjects); 545 | script.echo "Waiting for builds to complete" 546 | waitForBuildsToComplete(script, openshift, labels) 547 | def startedNewBuilds=false 548 | def postBuildConfigState=loadBuildConfigStatus(openshift, labels) 549 | 550 | for (Map item: postBuildConfigState.values()){ 551 | if ('BuildConfig'.equalsIgnoreCase(item.kind)){ 552 | String lastBuildName="Build/${item.metadata.name}-${item.status.lastVersion}" 553 | Map lastBuild=postBuildConfigState[lastBuildName] 554 | script.echo "Analyzing if ${key(item)} needs a new build (last build is '${lastBuildName}')" 555 | if (lastBuild !=null) { 556 | script.echo " Based on ${key(lastBuild)} with status ${lastBuild.status.phase}" 557 | }else{ 558 | script.echo " Based on last build not found" 559 | } 560 | def newBuild=null; 561 | if (lastBuild == null) { 562 | script.echo " Starting a new build because none was found" 563 | newBuild = openshift.selector(key(item)).startBuild() 564 | }else if (lastBuild != null && !isBuildSuccesful(lastBuild)){ 565 | script.echo " Starting a new build because the last one (${key(lastBuild)}) was not successful (${lastBuild.status.phase})" 566 | newBuild = openshift.selector(key(item)).startBuild() 567 | }else{ 568 | Map m=newObjects[key(item)] 569 | if (m!=null) { 570 | if (lastBuild.spec?.revision?.git?.commit !=null){ 571 | if (m.metadata?.labels['git-commit'] != null && !m.metadata.labels['git-commit'].equalsIgnoreCase(lastBuild.spec?.revision?.git?.commit)) { 572 | script.echo " Starting a new build because the last commit (${lastBuild.spec?.revision?.git?.commit}) does not match latest one (${m.metadata.labels['git-commit']})" 573 | newBuild = openshift.selector(key(item)).startBuild() 574 | } else if (m.spec.source?.git?.uri) { 575 | String newestCommit = m.metadata.annotations['spec.source.git.ref'] 576 | String oldestCommit = lastBuild.spec?.revision?.git?.commit 577 | 578 | if (newestCommit != null && !newestCommit.equalsIgnoreCase(oldestCommit)) { 579 | if (context.isPullRequestFromFork) { 580 | //git rev-list [newer] ^[older] --count 581 | int distance = Integer.parseInt(script.sh(returnStdout: true, script: "git rev-list ${newestCommit} ^${oldestCommit} --count").trim()) 582 | script.echo "${distance} commits between ${oldestCommit} (oldest) and ${newestCommit} (newest)" 583 | if (distance > 0) { 584 | script.echo " Starting a new build because the last one (${key(lastBuild)}) was outdated" 585 | newBuild = openshift.selector(key(item)).startBuild() 586 | startedNewBuilds = true 587 | } 588 | } 589 | } 590 | } else { 591 | script.echo " Not starting a build (relying on ConfigChange/ImageChange triggers)" 592 | //startedNewBuilds = true 593 | } 594 | }else{ 595 | script.echo " This build is not based on a GIT repository (relying on ConfigChange/ImageChange triggers)" 596 | } 597 | //git rev-list e71492589b94239576a6397997c29e6cb5b55fc8 ^e71492589b94239576a6397997c29e6cb5b55fc8 --count 598 | }else{ 599 | script.echo " ${key(item)} was not found among objects managed by the template(s). I am guessing it was removed, eh?" 600 | } 601 | } 602 | 603 | if (newBuild!=null){ 604 | startedNewBuilds = true 605 | script.echo "New build started - ${newBuild.name()}" 606 | } 607 | } 608 | } 609 | /* 610 | for (Map item: initialBuildConfigState.values()){ 611 | //script.echo "${item}" 612 | if ('BuildConfig'.equalsIgnoreCase(item.kind)){ 613 | Map newItem=postBuildConfigState[key(item)] 614 | Map build=initialBuildConfigState["Build/${item.metadata.name}-${item.status.lastVersion}"] 615 | 616 | if (item.status.lastVersion == newItem.status.lastVersion && (build==null || !isBuildSuccesful(build))){ 617 | openshift.selector(key(item)).startBuild() 618 | startedNewBuilds=true 619 | }else if(build!=null){ 620 | //git rev-list [newer] ^[older] --count 621 | //git rev-list e71492589b94239576a6397997c29e6cb5b55fc8 ^e71492589b94239576a6397997c29e6cb5b55fc8 --count 622 | } 623 | 624 | } 625 | } 626 | */ 627 | 628 | if (startedNewBuilds) { 629 | waitForBuildsToComplete(script, openshift, labels) 630 | } 631 | 632 | def buildOutput=loadBuildConfigStatus(openshift, labels) 633 | boolean allBuildSuccessful=true 634 | for (Map item: buildOutput.values()){ 635 | if ('BuildConfig'.equalsIgnoreCase(item.kind)){ 636 | Map build=buildOutput["Build/${item.metadata.name}-${item.status.lastVersion}"] 637 | if (!isBuildSuccesful(build)){ 638 | allBuildSuccessful=false 639 | break; 640 | } 641 | } 642 | } 643 | if (!allBuildSuccessful){ 644 | script.error('Sorry, not all builds have been successful! :`(') 645 | } 646 | 647 | openshift.selector( 'is', labels).withEach { 648 | def iso=it.object() 649 | //script.echo "is --> ${iso}" 650 | def tags=[:] 651 | for (Map tag:iso.status.tags){ 652 | tags[tag.tag]=[ 653 | 'items':[tag.items[0]] 654 | ] 655 | } 656 | //script.echo "is.status.tags --> ${tags}" 657 | 658 | buildOutput["${key(iso)}"] = [ 659 | 'kind': iso.kind, 660 | 'metadata': ['name':iso.metadata.name, 'namespace':iso.metadata.namespace], 661 | 'labels':iso.metadata.labels, 662 | 'status':[ 663 | 'tags':tags 664 | ] 665 | ] 666 | String baseName=getImageStreamBaseName(iso) 667 | buildOutput["BaseImageStream/${baseName}"]=[ 668 | 'ImageStream':key(iso) 669 | ] 670 | } 671 | 672 | context['build'] = ['status':buildOutput, 'projectName':"${openshift.project()}"] 673 | 674 | 675 | }// enf withProject 676 | } // end withCluster 677 | new GitHubHelper().createCommitStatus(script, context.commitId, 'SUCCESS', "${script.env.BUILD_URL}", 'Build', 'continuous-integration/jenkins/build') 678 | context.deployments = context.deployments?:[:] 679 | context.deployments['build']=['projectName':context.build.projectName, 'labels':labels, 'transient':true] 680 | } 681 | 682 | private def applyBuildConfig(CpsScript script, OpenShiftDSL openshift, String appName, String envName, Map models, Map currentModels) { 683 | def bcSelector = ['app-name': appName, 'env-name': envName] 684 | 685 | if (logLevel >= 4 ) script.echo "openShiftApplyBuildConfig:openshift1:${openshift.dump()}" 686 | 687 | 688 | script.echo "Processing ${models.size()} objects for '${appName}' for '${envName}'" 689 | def creations=[] 690 | def updates=[] 691 | def patches=[] 692 | 693 | for (Object o : models.values()) { 694 | if (logLevel >= 4 ) script.echo "Processing '${o.kind}/${o.metadata.name}' (before apply)" 695 | if (o.metadata.labels==null) o.metadata.labels =[:] 696 | o.metadata.labels["app"] = "${appName}-${envName}" 697 | o.metadata.labels["app-name"] = "${appName}" 698 | o.metadata.labels["env-name"] = "${envName}" 699 | 700 | def sel=openshift.selector("${o.kind}/${o.metadata.name}") 701 | if (sel.count()==0){ 702 | //script.echo "Creating '${o.kind}/${o.metadata.name}'" 703 | creations.add(o) 704 | }else{ 705 | if (!'ImageStream'.equalsIgnoreCase("${o.kind}")){ 706 | //script.echo "Skipping '${key(o)}'" 707 | //updates.add(o) 708 | patches.add(o) 709 | }else{ 710 | //script.echo "Skipping '${o.kind}/${o.metadata.name}' (Already Exists)" 711 | //def newObject=o 712 | //if (newObject.spec && newObject.spec.tags){ 713 | // newObject.spec.remove('tags') 714 | //} 715 | //script.echo "Modified '${o.kind}/${o.metadata.name}' = ${newObject}" 716 | updates.add(o) 717 | } 718 | } 719 | 720 | } 721 | 722 | if (creations.size()>0){ 723 | script.echo "Creating ${creations.size()} objects" 724 | openshift.apply(creations, ''); 725 | } 726 | 727 | if (patches.size()>0){ 728 | script.echo "Updating ${patches.size()} objects" 729 | openshift.apply(patches); 730 | } 731 | 732 | if (updates.size()>0){ 733 | script.echo "Updating ${updates.size()} objects" 734 | openshift.apply(updates); 735 | } 736 | 737 | } 738 | 739 | private String getReplicationControllerStatus(rc) { 740 | return rc.metadata.annotations['openshift.io/deployment.phase'] 741 | } 742 | 743 | private def isReplicationControllerComplete(rc) { 744 | String phase=getReplicationControllerStatus(rc) 745 | return ("Complete".equalsIgnoreCase(phase) || "Cancelled".equalsIgnoreCase(phase) || "Failed".equalsIgnoreCase(phase) || "Error".equalsIgnoreCase(phase)) 746 | } 747 | 748 | private def isBuildComplete(build) { 749 | return ("Complete".equalsIgnoreCase(build.status.phase) || "Cancelled".equalsIgnoreCase(build.status.phase) || "Failed".equalsIgnoreCase(build.status.phase) || "Error".equalsIgnoreCase(build.status.phase)) 750 | } 751 | 752 | private def isBuildSuccesful(build) { 753 | return "Complete".equalsIgnoreCase(build.status.phase) 754 | } 755 | 756 | @NonCPS 757 | private def getImageStreamBaseName(res) { 758 | String baseName=res.metadata.name 759 | if (res.metadata && res.metadata.labels && res.metadata.labels['base-name']){ 760 | baseName=res.metadata.labels['base-name'] 761 | } 762 | return baseName 763 | } 764 | 765 | void waitUntilEnvironmentIsReady(CpsScript script, Map context, String envKeyName){ 766 | OpenShiftDSL openshift=script.openshift 767 | script.unstash(name: 'openshift') 768 | initializeDeploymentContext(script, openshift, context, envKeyName) 769 | 770 | script.waitUntil { 771 | boolean isReady=false 772 | List errors = [] 773 | try { 774 | Map deployCfg = context.deploy 775 | openshift.withCluster() { 776 | openshift.withProject(deployCfg.projectName) { 777 | Map models = loadObjectsFromTemplate(openshift, context.templates.deployment, context, 'deployment') 778 | 779 | for (Map m : models.values()) { 780 | Map annotations=m?.metadata?.annotations?:[:] 781 | script.echo "Checking '${key(m)}'" 782 | script.echo " annotations:${annotations}" 783 | if ("Route".equalsIgnoreCase(m.kind)) { 784 | String secretName=(annotations[ANNOTATION_ROUTE_TLS_SECRET_NAME+".${envKeyName}"])?:(annotations[ANNOTATION_ROUTE_TLS_SECRET_NAME]) 785 | 786 | if (secretName!=null) { 787 | def selector = openshift.selector("secrets/${secretName}") 788 | if (selector.count() == 0) { 789 | errors.add("Missing 'secret/${secretName}'") 790 | } 791 | } 792 | }else if ("Secret".equalsIgnoreCase(m.kind) || "ConfigMap".equalsIgnoreCase(m.kind)) { 793 | String sourceName=annotations[ANNOTATION_AS_COPY_OF+".${envKeyName}"]?:annotations[ANNOTATION_AS_COPY_OF] 794 | if (sourceName!=null){ 795 | def selector = openshift.selector("${m.kind}/${sourceName}") 796 | if (selector.count() == 0) { 797 | errors.add("Missing '${m.kind}/${sourceName}'") 798 | } 799 | } 800 | } 801 | } 802 | 803 | } //end withProject 804 | } // end withCluster 805 | isReady = errors.size() == 0 806 | } catch (ex) { 807 | script.echo "Error: ${ex}" 808 | isReady = false 809 | } 810 | 811 | if (!isReady){ 812 | for (String err:errors){ 813 | script.echo "${err}" 814 | } 815 | script.input "Retry Environment Readiness Check?" 816 | } 817 | 818 | return isReady 819 | } 820 | 821 | clearDeploymentContext(script, openshift, context, envKeyName) 822 | } 823 | 824 | private void initializeDeploymentContext(CpsScript script, OpenShiftDSL openshift, Map context, String envKeyName) { 825 | Map deployCfg = createDeployContext(script, context, envKeyName) 826 | context['deploy'] = deployCfg 827 | context['DEPLOY_ENV_NAME'] = envKeyName 828 | context.deployments = context.deployments?:[:] 829 | context.deployments[envKeyName] = deployCfg 830 | } 831 | 832 | private void clearDeploymentContext(CpsScript script, OpenShiftDSL openshift, Map context, String envKeyName) { 833 | context.remove('deploy') 834 | context.remove('DEPLOY_ENV_NAME') 835 | } 836 | private Map createDeployContext(CpsScript script, Map context, String envKeyName) { 837 | String envName = envKeyName.toLowerCase() 838 | boolean transientEnv =false 839 | if ("DEV".equalsIgnoreCase(envKeyName)) { 840 | envName = "dev-pr-${script.env.CHANGE_ID}" 841 | transientEnv=true 842 | } 843 | Map deployCfg = [ 844 | 'envName':envName, 845 | 'projectName':context.env[envKeyName].project, 846 | 'envKeyName':envKeyName, 847 | 'transient': transientEnv, 848 | 'logUrl': "${script.env.BUILD_URL}" 849 | ] 850 | 851 | if (!deployCfg.dcPrefix) deployCfg.dcPrefix = context.name 852 | if (!deployCfg.dcSuffix) deployCfg.dcSuffix = "-${deployCfg.envName}" 853 | 854 | deployCfg['labels']=['app-name':context.name, 'env-name':envName] 855 | 856 | return deployCfg 857 | } 858 | 859 | @NonCPS 860 | List labelsToArgs(Map labels) { 861 | List args=[] 862 | labels.each { String key, String value -> 863 | args.addAll(['-l', "${key}=${value}"]) 864 | } 865 | return args 866 | } 867 | 868 | void cleanup(CpsScript script, Map context) { 869 | OpenShiftDSL openshift=script.openshift 870 | for (Map deployment:context.deployments.values()){ 871 | if (deployment.transient == true){ 872 | openshift.withCluster(){ 873 | openshift.withProject(deployment.projectName) { 874 | def result=openshift.selector('all', deployment.labels).delete() 875 | script.echo "Output:\n${result.out}" 876 | 877 | def protectedSelector=openshift.selector('secret,configmap,pvc', deployment.labels) 878 | if (protectedSelector.count() > 0) { 879 | script.echo "Deleting: ${protectedSelector.names()}" 880 | result=protectedSelector.delete() 881 | script.echo "Output:\n${result.out}" 882 | } 883 | } // end withProject 884 | } // end withCluster 885 | } 886 | } 887 | } 888 | 889 | void deploy(CpsScript script, Map context, String envKeyName) { 890 | OpenShiftDSL openshift=script.openshift 891 | initializeDeploymentContext(script, openshift, context, envKeyName) 892 | 893 | Map deployCfg = context.deploy 894 | script.echo "Deploying to ${envKeyName.toUpperCase()} as ${deployCfg.envName}" 895 | //GitHubHelper.getPullRequest(script).getHead().getSha() 896 | 897 | def ghDeploymentId = new GitHubHelper().createDeployment(script, context.commitId, ['environment':"${envKeyName.toUpperCase()}", 'payload':toJsonString(deployCfg), 'task':"deploy:pull:${script.env.CHANGE_ID}"]) 898 | deployCfg['ghDeploymentId'] = ghDeploymentId 899 | 900 | new GitHubHelper().createDeploymentStatus(script, ghDeploymentId, 'PENDING', ['targetUrl':"${deployCfg.logUrl}"]) 901 | 902 | new GitHubHelper().createCommitStatus(script, context.commitId, 'PENDING', "${deployCfg.logUrl}", "Deployment to ${envKeyName.toUpperCase()}", "continuous-integration/jenkins/deployment/${envKeyName.toLowerCase()}") 903 | 904 | //try { 905 | //GitHubHelper.getPullRequest(script).comment("Build in progress") 906 | //GitHubHelper.getPullRequest(script).comment("Deploying to DEV") 907 | 908 | script.unstash(name: 'openshift') 909 | script.echo "Deploying '${context.name}' to '${context.deploy.envName}'" 910 | openshift.withCluster() { 911 | script.echo "Connected to project '${openshift.project()}' as user '${openshift.raw('whoami').out}'" 912 | 913 | //openshift.withCredentials( 'jenkins-deployer-dev.token' ) { 914 | openshift.withProject(deployCfg.projectName) { 915 | script.echo "Connected to project '${openshift.project()}' as user '${openshift.raw('whoami').out}'" 916 | //script.echo "DeployModels:${models}" 917 | applyDeploymentConfig(script, openshift, context) 918 | 919 | 920 | } // end openshift.withProject() 921 | //} // end openshift.withCredentials() 922 | } // end openshift.withCluster() 923 | context.remove('deploy') 924 | context.deployments = context.deployments?:[:] 925 | context.deployments[envKeyName]=deployCfg 926 | new GitHubHelper().createDeploymentStatus(script, ghDeploymentId, 'SUCCESS', ['targetUrl':"${deployCfg.environmentUrl}"]) 927 | new GitHubHelper().createCommitStatus(script, context.commitId, 'SUCCESS', "${deployCfg.logUrl}", "Deployment to ${envKeyName.toUpperCase()}", "continuous-integration/jenkins/deployment/${envKeyName.toLowerCase()}") 928 | //}catch (all) { 929 | // new GitHubHelper().createDeploymentStatus(script, ghDeploymentId, 'ERROR', [:]) 930 | // throw new Exception(all) 931 | //} 932 | } // end 'deploy' method 933 | 934 | private def updateContainerImages(CpsScript script, OpenShiftDSL openshift, containers, triggers) { 935 | for ( c in containers ) { 936 | for ( t in triggers) { 937 | if ('ImageChange'.equalsIgnoreCase(t['type'])){ 938 | for ( cn in t.imageChangeParams.containerNames){ 939 | if (cn.equalsIgnoreCase(c.name)){ 940 | if (logLevel >= 4 ) script.echo "${t.imageChangeParams.from}" 941 | def dockerImageReference = ' ' 942 | def selector=openshift.selector("istag/${t.imageChangeParams.from.name}") 943 | 944 | if (t.imageChangeParams.from['namespace']!=null && t.imageChangeParams.from['namespace'].length()>0){ 945 | openshift.withProject(t.imageChangeParams.from['namespace']) { 946 | selector=openshift.selector("istag/${t.imageChangeParams.from.name}"); 947 | if (selector.count() == 1 ){ 948 | dockerImageReference=selector.object().image.dockerImageReference 949 | } 950 | } 951 | }else{ 952 | selector=openshift.selector("istag/${t.imageChangeParams.from.name}"); 953 | if (selector.count() == 1 ){ 954 | dockerImageReference=selector.object().image.dockerImageReference 955 | } 956 | } 957 | 958 | if (logLevel >= 4 ) script.echo "ImageReference is '${dockerImageReference}'" 959 | c.image = "${dockerImageReference}" 960 | } 961 | } 962 | } 963 | } 964 | } 965 | } 966 | 967 | private void applyDeploymentConfig(CpsScript script, OpenShiftDSL openshift, Map context) { 968 | Map deployCtx = context.deploy 969 | def labels=deployCtx.labels 970 | 971 | Map initDeploymemtConfigStatus=loadDeploymentConfigStatus(openshift, labels) 972 | Map models = loadObjectsFromTemplate(openshift, context.templates.deployment, context,'deployment') 973 | 974 | if (initDeploymemtConfigStatus.size()>0){ 975 | for (Map dc: initDeploymemtConfigStatus.values()) { 976 | if ('DeploymentConfig'.equalsIgnoreCase(dc.kind)) { 977 | Map newDc=models["${key(dc)}"] 978 | if (newDc!=null) { 979 | for (Map c : dc.spec.template.spec.containers) { 980 | String dcName = c.name 981 | for (Map newC : newDc.spec.template.spec.containers) { 982 | if (dcName.equalsIgnoreCase(newC.name)) { 983 | newC.image = c.image 984 | script.echo "Updating '${key(dc)}' containers['${dcName}'].image=${c.image}" 985 | break; 986 | } 987 | } 988 | } 989 | } 990 | } 991 | } 992 | } 993 | 994 | List upserts=[] 995 | List replaces=[] 996 | 997 | for (Map m : models.values()) { 998 | if ('ImageStream'.equalsIgnoreCase(m.kind)){ 999 | upserts.add(m) 1000 | } 1001 | } 1002 | script.echo "Applying ImageStream" 1003 | openshift.apply(upserts) 1004 | for (Map m : upserts) { 1005 | String sourceImageStreamKey=context.build.status["BaseImageStream/${getImageStreamBaseName(m)}"]['ImageStream'] 1006 | Map sourceImageStream = context.build.status[sourceImageStreamKey] 1007 | String sourceImage=sourceImageStream.status.tags[context.buildEnvName].items[0].image 1008 | String sourceImageStreamRef="${sourceImageStream.metadata.namespace}/${sourceImageStream.metadata.name}@${sourceImage}" 1009 | String targetImageStreamRef="${m.metadata.name}:${labels['env-name']}" 1010 | String tempImageTagName="tmp-${labels['env-name']}" 1011 | String temp2ImageTagName="tmp2-${labels['env-name']}" 1012 | //The 2 steps tagging (import and tag) is required to create a `ImageStreamImage` that is local to the target project 1013 | 1014 | //Workaround: https://github.com/openshift/origin/issues/14631 1015 | //Make sure there is at least one tag in the ImageStream, this tag is not used whatsover, and will be deleted 1016 | script.echo "(workaround) Tagging '${sourceImageStreamRef}' as 'tmp-${tempImageTagName}'" 1017 | openshift.tag(sourceImageStreamRef, "${m.metadata.name}:tmp-${tempImageTagName}") 1018 | 1019 | //Import the source image to a temporary tag: Source Project and Destination Project may be different 1020 | script.echo "Importing Image '${sourceImageStreamRef}' as '${m.metadata.name}:${tempImageTagName}'" 1021 | openshift.raw('import-image', "${m.metadata.name}:${tempImageTagName}", "--from=docker-registry.default.svc:5000/${sourceImageStream.metadata.namespace}/${sourceImageStream.metadata.name}@${sourceImage}", '--insecure=true', '--confirm=true') 1022 | 1023 | //Re-impot the source image from the temporary tag to another temporary tag: Source Project and Destination Project are the same 1024 | script.echo "Importing Image '${m.metadata.name}:${tempImageTagName}' as '${m.metadata.name}:${temp2ImageTagName}'" 1025 | openshift.raw('import-image', "${m.metadata.name}:${temp2ImageTagName}", "--from=docker-registry.default.svc:5000/${deployCtx.projectName}/${m.metadata.name}@${sourceImage}", '--insecure=true', '--confirm=true') 1026 | 1027 | //Actually update the final tag based on the 2nd temporary imported Image 1028 | script.echo "Tagging '${m.metadata.name}@${temp2ImageTagName}' as '${targetImageStreamRef}'" 1029 | openshift.tag("${m.metadata.name}:${temp2ImageTagName}", targetImageStreamRef) 1030 | 1031 | script.echo "Deleting temporary tag: '${m.metadata.name}:${tempImageTagName}'" 1032 | openshift.tag("${m.metadata.name}:${tempImageTagName}", '-d') 1033 | 1034 | script.echo "Deleting temporary tag: '${m.metadata.name}:${temp2ImageTagName}'" 1035 | openshift.tag("${m.metadata.name}:${temp2ImageTagName}", '-d') 1036 | 1037 | script.echo "Deleting temporary tag: '${m.metadata.name}:tmp-${tempImageTagName}'" 1038 | openshift.tag("${m.metadata.name}:tmp-${tempImageTagName}", '-d') 1039 | 1040 | //script.echo "Tagging '${sourceImageStreamRef}' as '${targetImageStreamRef}'" 1041 | //openshift.tag(sourceImageStreamRef, targetImageStreamRef) 1042 | } 1043 | script.echo "Applying Configurations" 1044 | upserts.clear() 1045 | for (Map m : models.values()) { 1046 | Map annotations=m?.metadata?.annotations?:[:] 1047 | if ("Route".equalsIgnoreCase(m.kind)) { 1048 | String secretName=(annotations[ANNOTATION_ROUTE_TLS_SECRET_NAME+".${deployCtx.envKeyName}"])?:(annotations[ANNOTATION_ROUTE_TLS_SECRET_NAME]) 1049 | if (secretName!=null){ 1050 | script.echo "Applying TLS using secret/${secretName} for '${key(m)}'" 1051 | m.spec.tls = m.spec.tls?:[:] 1052 | def selector=openshift.selector("secrets/${secretName}") 1053 | if (selector.count() == 1){ 1054 | script.echo "Modifying '${key(m)}'" 1055 | Map secret=selector.object() 1056 | m.spec.tls.caCertificate=new String(secret.data.caCertificate.decodeBase64()) 1057 | m.spec.tls.certificate=new String(secret.data.certificate.decodeBase64()) 1058 | m.spec.tls.key=new String(secret.data.key.decodeBase64()) 1059 | } 1060 | } 1061 | 1062 | replaces.add(m) 1063 | }else{ 1064 | String sourceName=annotations[ANNOTATION_AS_COPY_OF+".${deployCtx.envKeyName}"]?:annotations[ANNOTATION_AS_COPY_OF] 1065 | if (sourceName!=null && sourceName.length()>0) { 1066 | script.echo "Creating a copy of '${m.kind}/${sourceName}' as '${key(m)}'" 1067 | def selector = openshift.selector("${m.kind}/${sourceName}") 1068 | if (selector.count() == 1) { 1069 | Map sourceModel=selector.object(exportable:true); 1070 | sourceModel.metadata.name=m.metadata.name 1071 | upserts.add(sourceModel) 1072 | } 1073 | }else { 1074 | Map current = initDeploymemtConfigStatus[key(m)] 1075 | if (allowCreateOrUpdate(m, current)) { 1076 | upserts.add(m) 1077 | } 1078 | } 1079 | } 1080 | } 1081 | openshift.apply(upserts).label(['app':"${labels['app-name']}-${labels['env-name']}", 'app-name':labels['app-name'], 'env-name':labels['env-name']], "--overwrite") 1082 | 1083 | if (replaces.size()>0) { 1084 | openshift.apply(replaces, '--force=true').label(['app':"${labels['app-name']}-${labels['env-name']}", 'app-name':labels['app-name'], 'env-name':labels['env-name']], "--overwrite") 1085 | } 1086 | 1087 | waitForDeploymentsToComplete(script, openshift, labels) 1088 | 1089 | openshift.selector('route', labels + ['frontend':'true']).withEach { 1090 | Map route= it.object() 1091 | if (route.spec.tls){ 1092 | deployCtx['environmentUrl']= "https://${route.spec.host}${route.spec.path?:'/'}" 1093 | }else{ 1094 | deployCtx['environmentUrl']= "http://${route.spec.host}${route.spec.path?:'/'}" 1095 | } 1096 | } 1097 | 1098 | 1099 | //return loadDeploymentConfigStatus(openshift, labels) 1100 | } 1101 | 1102 | private Map loadDeploymentConfigStatus(OpenShiftDSL openshift, Map labels){ 1103 | Map buildOutput = [:] 1104 | def selector=openshift.selector('dc', labels) 1105 | 1106 | if (selector.count()>0) { 1107 | for (Map dc : selector.objects()) { 1108 | String rcName = "ReplicationController/${dc.metadata.name}-${dc.status.latestVersion}" 1109 | def rcSelector = openshift.selector(rcName) 1110 | if (rcSelector.count()>0) { 1111 | Map rc = rcSelector.object() 1112 | buildOutput[rcName] = [ 1113 | 'kind' : rc.kind, 1114 | 'metadata': ['name': rc.metadata.name], 1115 | 'status' : rc.status, 1116 | 'phase' : rc.metadata.annotations['openshift.io/deployment.phase'] 1117 | ] 1118 | } 1119 | List containers=[] 1120 | if (dc?.spec?.template?.spec?.containers != null){ 1121 | for (Map c : dc.spec.template.spec.containers){ 1122 | containers.add(['name':c.name, 'image':c.image]) 1123 | } 1124 | } 1125 | buildOutput["${key(dc)}"] = [ 1126 | 'kind' : dc.kind, 1127 | 'metadata': ['name': dc.metadata.name], 1128 | 'status' : ['latestVersion': dc.status.latestVersion, 'latestReplicationControllerName': rcName], 1129 | 'spec':[ 1130 | 'template':[ 1131 | 'spec':[ 1132 | 'containers':dc?.spec?.template?.spec?.containers 1133 | ] 1134 | ] 1135 | ] 1136 | ] 1137 | } 1138 | } 1139 | return buildOutput 1140 | } 1141 | } // end class 1142 | --------------------------------------------------------------------------------