├── .gitattributes ├── .gitignore ├── Bamboo ├── README.md └── find_and_store_latest_tag.sh ├── LICENSE ├── README.md ├── artifactoryHandler ├── ArtifactoryHandler.groovy ├── ArtifactoryHandlerTest.groovy ├── README.md ├── logback.xml ├── resources │ ├── ArtifactoryDomainVersionArtifact.png │ └── deploy-scripts.tar.gz └── testArtifactory.sh ├── bitbucket ├── .netrc ├── README.md ├── _bitbucket_repo_functions.sh ├── bitbucket_create_repo-branch-restrictions.sh ├── bitbucket_get_repo_sizes.sh ├── bitbuket2bitbucket-migration.sh ├── create_project.sh ├── run_git_gc.sh └── scriptrunner │ └── checkJiraReference.groovy ├── crucible ├── README.md └── crucible_review_report.py ├── doCppCheck ├── do_cppcheck-settings-template.rb ├── do_cppcheck-suppressions-template.lst └── do_cppcheck.rb ├── exalate ├── ado-incoming-from-jira-cloud.groovy └── jiracloud-outgoing.groovy ├── git-repo-analyzer ├── .gitignore ├── README.md ├── git-find-sha1-of-leaf-tags.sh ├── git-object-sizes-in-repo-analyzer.sh ├── git-object-sizes-usages-snips.md ├── git-workspace-file-type-analyzer.sh ├── git_create-update_dir2gits.sh └── git_meassureGits.sh ├── git-utils ├── git-batch-push-process-pull.sh ├── git-retag.sh └── git-submodule-usage.sh ├── jenkins-autoupdate-plugins ├── Jenkinsfile ├── Readme.MD └── UpdatePlugins.sh ├── jenkins-job-dsl ├── readme.md └── seed.groovy ├── jenkins-tricks-examples ├── KeepLogForeverThisAndUpstreamBuilds.groovy ├── cancelDownStreamJobQueue │ ├── JobDSLcancelDownstreamQueue.groovy │ ├── README.md │ └── cancelDownstreamQueue.groovy └── jenkins-agent-scripts │ └── jenkins--agent-state-n-toogle.sh ├── jira ├── README.md ├── filters │ └── readme.md └── scripts │ ├── README.md │ ├── ScriptRunner │ ├── createEpicsFromRequests.groovy │ ├── createEvalsFromRequests.groovy │ ├── createEvalsFromRequests2ProductAndTeamsProjects.groovy │ ├── fields │ │ ├── childrenImplementedSolution.groovy │ │ ├── parentAccounting.groovy │ │ ├── parentCombustionType.groovy │ │ ├── parentDescription.groovy │ │ ├── parentEngineType.groovy │ │ ├── parentExpectedCost.groovy │ │ ├── parentExpectedWorkhours.groovy │ │ ├── parentFixVersions-console.groovy │ │ ├── parentFixVersions.groovy │ │ ├── parentID.groovy │ │ ├── parentIssueReason.groovy │ │ ├── parentPlatform.groovy │ │ ├── parentReferencedPCB.groovy │ │ ├── parentSiblings.groovy │ │ ├── parentSiteType.groovy │ │ ├── parentTargetProducts.groovy │ │ ├── siblings.groovy │ │ ├── sumEstOriginalTime.groovy │ │ ├── sumEstOriginalTimeStoryOnly.groovy │ │ ├── sumEstOriginalTimeStorySubtask.groovy │ │ └── sumEstStoryPoints.groovy │ ├── request2epicsValidatorNPostfuntion.groovy │ ├── validateCreateEpicsFromRequests.groovy │ └── workflow │ │ ├── epicValidationStoriesDone.groovy │ │ ├── implSolutionRequired.groovy │ │ ├── requestDoneEpicsDone.groovy │ │ ├── setEpicStatusDone.groovy │ │ └── validators │ │ ├── blockEpic2DoneRejectedIfChildsNotRejectedDone.groovy │ │ └── implSolutionRequiredv2.groovy │ ├── _jira_project_create_update_functions.sh │ ├── component_delete_from_parameters.sh │ ├── create-jira-project-from-shared-config.sh │ ├── create-projects-n-update-components.sh │ ├── create_component_from_parameters.sh │ ├── create_components_from_json_import.sh │ ├── create_releases_from_json_import.sh │ ├── jira-delete-users.sh │ ├── jira-delete-versions.sh │ ├── jira_attachments_import.sh │ ├── jqlScripts │ └── src │ │ └── com │ │ └── onresolve │ │ └── jira │ │ └── groovy │ │ └── jql │ │ └── MismatchedThemes.groovy │ ├── pom.xml │ ├── scriptedFields │ ├── LastComment.groovy │ └── TimeSpentAsHours.groovy │ ├── sql │ └── inactiveUser.sql │ ├── update_status_releases_from_json_import.sh │ ├── various │ ├── FindUsers.groovy │ └── fixResolutionStatus.groovy │ └── workflowScripts │ ├── BlockStoriesWithOpenBlockingIssues.groovy │ ├── CloneAndLink.groovy │ ├── OriginalEstimateValidation.groovy │ ├── SetDefaultAssignee.groovy │ ├── SetDevTeams.groovy │ ├── SetProduct.groovy │ ├── StoryTransitionInProgressSubtask.groovy │ ├── TransitionLinkedDefect.groovy │ ├── block-epics-open-stories.groovy │ ├── block-requests-with-open-epics.groovy │ ├── create-evals-in-separate-project-with-components-based-on-cf.groovy │ ├── scriptrunner-create-issues-based-on-multiselected-cf.groovy │ └── setEpicStatus.groovy ├── misc ├── checkFileSyncModified.sh ├── createStagingNo.rb └── jenkins-createUniqueArtifact.rb ├── powerping ├── README.md ├── config-example.yml └── powerPing.groovy ├── runXmlAccess ├── Command.groovy └── run_xmlaccess.groovy ├── setversion ├── setversion-headerfile-template.h ├── setversion-javafile-template.properties ├── setversion.rb ├── setversion.sh ├── setversion_scmInDevCommits.rb └── version_info.h ├── small-git-tricks └── commits-per-subfolder.md ├── testing-idea ├── functional │ ├── tests.inc │ └── transformAndRun.bats ├── readme.md └── run_tests_Linux.sh └── transformAndRun ├── config.yml ├── readme.md ├── story.txt ├── transform.yml └── transformAndRun.groovy /.gitattributes: -------------------------------------------------------------------------------- 1 | *.sh text eol=lf -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### JetBrains template 2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 3 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 4 | 5 | # User-specific stuff: 6 | .idea/workspace.xml 7 | .idea/tasks.xml 8 | .idea/dictionaries 9 | .idea/vcs.xml 10 | .idea/jsLibraryMappings.xml 11 | .idea/bashsupport_project.xml 12 | .idea/* 13 | *.iml 14 | # Sensitive or high-churn files: 15 | .idea/dataSources.ids 16 | .idea/dataSources.xml 17 | .idea/dataSources.local.xml 18 | .idea/sqlDataSources.xml 19 | .idea/dynamic.xml 20 | .idea/uiDesigner.xml 21 | 22 | # Gradle: 23 | .idea/gradle.xml 24 | .idea/libraries 25 | 26 | # Mongo Explorer plugin: 27 | .idea/mongoSettings.xml 28 | 29 | ## File-based project format: 30 | *.iws 31 | 32 | ## Plugin-specific files: 33 | 34 | # IntelliJ 35 | /out/ 36 | 37 | # mpeltonen/sbt-idea plugin 38 | .idea_modules/ 39 | 40 | ### JIRA plugin 41 | atlassian-ide-plugin.xml 42 | 43 | ### Crashlytics plugin (for Android Studio and IntelliJ) 44 | com_crashlytics_export_strings.xml 45 | crashlytics.properties 46 | crashlytics-build.properties 47 | fabric.properties 48 | 49 | ### Java template 50 | *.class 51 | 52 | # Mobile Tools for Java (J2ME) 53 | .mtj.tmp/ 54 | 55 | # Package Files # 56 | *.jar 57 | *.war 58 | *.ear 59 | 60 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 61 | hs_err_pid* 62 | # Created by .ignore support plugin (hsz.mobi) 63 | 64 | 65 | ### Maven Stuff 66 | /target/* -------------------------------------------------------------------------------- /Bamboo/README.md: -------------------------------------------------------------------------------- 1 | # Scripts for Bamboo stuff 2 | 3 | ## Find latest release tag and store it for later 4 | `find_and_store_latest_tag.sh` finds the latest tag matching a hardcoded pattern. 5 | It stores this as a property in a gitinfo.properties file. 6 | This allows for environment injection in a following build step. 7 | 8 | If no tag is found, it will actually create and push a Rel_0.0.0 tag 9 | -------------------------------------------------------------------------------- /Bamboo/find_and_store_latest_tag.sh: -------------------------------------------------------------------------------- 1 | # For Bamboo: Find and store latest release tag. 2 | # 3 | # Script to find the lastest release tag and store it in a properties file 4 | # 5 | # This allows for environment injection in a later step in the build. 6 | # (this is currently the easiest way in Bamboo to pass env values). 7 | # 8 | # The script find the latest tag matching "Rel.*". 9 | # 10 | # NOTE: If no previous tag is found, 11 | # the script creates a Rel_0.0.0 tag on the initial commit. 12 | # This is a opinionated design choice and might not be what you want. 13 | # It was chosen in context as an easy alternative to make later steps always work. 14 | # 15 | 16 | # Find the latest release tag using git describe 17 | tag=git describe --tags --match 'Rel.*' --abbrev=0 18 | 19 | # If the above command fails, it is probably because no matching tag was found 20 | # So we actually solve this by adding a 0.0.0 tag to the initial commit. 21 | if [ $? -ne 0 ]; then 22 | # Find initial commit (find commit with zero parents) 23 | initial=git rev-list --max-parents=0 HEAD 24 | tag=Rel_0.0.0 25 | # Tag the found initial commit as Rel_0.0.0 26 | git tag -m "Initial commit tagged as Rel_0.0.0" $tag $initial 27 | 28 | # Do the annoying workaround needed because Bamboo plan repos are 29 | # cloned from a local filesystem cache, so we can't push to "origin". 30 | # Luckily, Bamboo provides a variable with the location of the original repo 31 | # So we can add that as a new remote 32 | git remote add central ${bamboo.planRepository.repositoryUrl} 33 | git remote update central 34 | # Push the Rel_0.0.0 tag 35 | git push central $tag 36 | fi 37 | 38 | # write tag description as latest_tag to a gitinfo.properties file 39 | "latest_tag=$tag" | out-file -encoding ascii gitinfo.properties 40 | 41 | 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Praqma 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /artifactoryHandler/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | %d{HH:mm:ss.SSS} %-5level %-50logger{0} - %msg%n 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /artifactoryHandler/resources/ArtifactoryDomainVersionArtifact.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Praqma/code-utils/5f6bcf5e4836aa03666cc519e358285ff8e22f6b/artifactoryHandler/resources/ArtifactoryDomainVersionArtifact.png -------------------------------------------------------------------------------- /artifactoryHandler/resources/deploy-scripts.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Praqma/code-utils/5f6bcf5e4836aa03666cc519e358285ff8e22f6b/artifactoryHandler/resources/deploy-scripts.tar.gz -------------------------------------------------------------------------------- /artifactoryHandler/testArtifactory.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Hard coded values 4 | SCRIPT_VERSION="1.0.0" 5 | ARTIFACTORY_IMAGE="jfrog-docker-reg2.bintray.io/jfrog/artifactory-pro:latest" 6 | ARTIFACTORY_PORT="8081:8081" 7 | CONTAINER_NAME="artifactory_pro_latest_test" 8 | TEST_SCRIPT="ArtifactoryHandlerTest.groovy" 9 | 10 | #Commands: Change path if needed. 11 | ECHO="echo" 12 | WHOAMI="whoami" 13 | DOCKER="docker" 14 | GROOVY="groovy" 15 | SLEEP="sleep" 16 | SLEEP_INTERVAL=30 17 | 18 | 19 | function print_help() 20 | { 21 | $ECHO "" 22 | $ECHO "--------------------------------------------" 23 | $ECHO "It is expected that this script is located in the same directory as the test script!" 24 | $ECHO "" 25 | $ECHO "Usage: ./testArtifactory.sh" 26 | $ECHO "" 27 | $ECHO "SCRIPT_VERSION=$SCRIPT_VERSION" 28 | $ECHO "--------------------------------------------" 29 | 30 | } 31 | function usage() 32 | { 33 | print_help 34 | 1>&2; exit 1; 35 | } 36 | 37 | # TODO: THIS NEEDS WORK - We need to parse the output to see if it is there. 38 | function cleanupContainer(){ 39 | $ECHO "Check if container already exists." 40 | local CMD="$DOCKER ps --filter 'name=$CONTAINER_NAME'" 41 | $CMD 42 | if [ "$?" -ne 0 ]; then 43 | $ECHO "ERROR: Could not CHECK for container: $CONTAINER_NAME!!!" 44 | exit 1 45 | fi 46 | 47 | $ECHO "Removing $CONTAINER_NAME" 48 | CMD="$DOCKER rm $CONTAINER_NAME" 49 | $CMD 50 | if [ "$?" -ne 0 ]; then 51 | $ECHO "ERROR: Could not CHECK for container: $CONTAINER_NAME!!!" 52 | exit 1 53 | fi 54 | } 55 | 56 | function pullImage(){ 57 | $ECHO "Pulling latest image..." 58 | local CMD="$DOCKER pull $ARTIFACTORY_IMAGE" 59 | $CMD 60 | if [ "$?" -ne 0 ]; then 61 | $ECHO "ERROR: Could not PULL image: $ARTIFACTORY_IMAGE!!!" 62 | exit 1 63 | fi 64 | } 65 | 66 | function runContainer(){ 67 | local CMD="$DOCKER run -d --name $CONTAINER_NAME -p $ARTIFACTORY_PORT $ARTIFACTORY_IMAGE" 68 | $CMD 69 | if [ "$?" -ne 0 ]; then 70 | $ECHO "ERROR: Could not START container: $CONTAINER_NAME!!!" 71 | exit 1 72 | fi 73 | 74 | $ECHO "Waiting $SLEEP_INTERVAL for Artifactory to start..." 75 | CMD="$SLEEP $SLEEP_INTERVAL" 76 | $CMD 77 | } 78 | 79 | function setupDocker() 80 | { 81 | $ECHO "Setting up Docker..." 82 | #cleanupContainer 83 | pullImage 84 | runContainer 85 | } 86 | 87 | function tearDownDocker(){ 88 | $ECHO "Tearing down Docker..." 89 | local CMD="$DOCKER stop $CONTAINER_NAME" 90 | $CDM 91 | if [ "$?" -ne 0 ]; then 92 | $ECHO "ERROR: Could not STOP container: $CONTAINER_NAME!!!" 93 | exit 1 94 | fi 95 | 96 | $ECHO "Waiting $SLEEP_INTERVAL for Artifactory to stop..." 97 | CMD="$SLEEP $SLEEP_INTERVAL" 98 | $CMD 99 | 100 | CMD="$DOCKER rm $CONTAINER_NAME" 101 | $CMD 102 | if [ "$?" -ne 0 ]; then 103 | $ECHO "ERROR: Could not REMOVE container: $CONTAINER_NAME!!!" 104 | exit 1 105 | fi 106 | } 107 | 108 | function runTests(){ 109 | $ECHO "Running tests..." 110 | local REPO1="libs-content-test" 111 | local REPO2="libs-content-local" 112 | local CMD="$GROOVY ArtifactoryHandler.groovy --action create-repo --repository $REPO1 --web-server http://localhost:8081/ --userName admin --password password" 113 | $CMD 114 | if [ "$?" -ne 0 ]; then 115 | $ECHO "ERROR: Could not create REPO: $REPO1!!!" 116 | exit 1 117 | fi 118 | 119 | CMD="$GROOVY ArtifactoryHandler.groovy --action 'create-repo' --repository '$REPO2' --web-server 'http://localhost:8081/' --userName 'admin' --password 'password'" 120 | #$CMD 121 | if [ "$?" -ne 0 ]; then 122 | $ECHO "ERROR: Could not create REPO: $REPO2!!!" 123 | exit 1 124 | fi 125 | 126 | CMD="$GROOVY $TEST_SCRIPT" 127 | $CMD 128 | if [ "$?" -ne 0 ]; then 129 | $ECHO "ERROR: ARTIFACTORY tests have failures!!!" 130 | exit 1 131 | fi 132 | } 133 | 134 | setupDocker 135 | runTests 136 | tearDownDocker -------------------------------------------------------------------------------- /bitbucket/.netrc: -------------------------------------------------------------------------------- 1 | machine localhost 2 | login admin 3 | password password 4 | -------------------------------------------------------------------------------- /bitbucket/README.md: -------------------------------------------------------------------------------- 1 | # bitbucket_create_repo-branch-restrictions.sh 2 | 3 | This script will create the following in BitBucket Server 4 | - a repository 5 | - potential push and already created git repository which is also a dir in `cwd` 6 | - create remote `stable` and `release` branches based on local `master` branch 7 | - push all tags 8 | - can push mirror 9 | - configure the repository with branches restriction (e.g. who can push to which branch and tag pattern ) 10 | 11 | Prework: 12 | - Modify the `.netrc` file or create your own 13 | - Modify the `bitbucket/bitbucket_create_repo-branch-restrictions.sh` 14 | - `bitbucket_admin_group="bitbucket-sys-admins"` 15 | - `bitbucket_url="https://localhost:7990"` 16 | - `ci_user="jenkins"` 17 | 18 | Example call: 19 | - `bitbucket_create_repo-branch-restrictions.sh [./.netrc|]` 20 | 21 | .. where `` can also be identical to a subdirectory that will be pushed. More than one are then comma separated. 22 | 23 | It demonstrates protecting of branches in the Praqma Git Phlow model and multiple master branches in the same repository 24 | 25 | TODO: 26 | - Parameterize the needed `Prework` modifications 27 | -------------------------------------------------------------------------------- /bitbucket/bitbucket_create_repo-branch-restrictions.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | [[ ${debug:-} == "true" ]] && set -x 5 | set -u 6 | 7 | if [[ "${1}X" == "X" ]] ; then 8 | echo "Please parse bitbucket_project as parameter 1" 9 | exit 1 10 | else 11 | bitbucket_project=${1} # space seperated list 12 | fi 13 | 14 | 15 | if [[ "${2}X" == "X" ]] ; then 16 | echo "Please parse repo_names as parameter 2 as space separated list" 17 | exit 1 18 | else 19 | repo_names="${2}" # space seperated list 20 | fi 21 | 22 | if [[ "${3:-}" == "" ]] ; then 23 | echo "Please parse netrc_file as parameter 3" 24 | exit 1 25 | else 26 | netrc_file=${3} 27 | fi 28 | 29 | if [[ "${4:-}X" == "X" ]] ; then 30 | echo "Please parse bitbucket_url as parameter 4" 31 | exit 1 32 | else 33 | bitbucket_url="${4}" 34 | fi 35 | 36 | if [[ "${5:-}X" == "X" ]] ; then 37 | echo "Please parse ci_user as parameter 5 for branch permissions if needed" 38 | else 39 | ci_user="${5:-}" 40 | fi 41 | 42 | bitbucket_admin_group="bitbucket_admins" 43 | 44 | curl_PUT_cmd="curl --fail -D- --insecure --netrc-file ${netrc_file} -X PUT" 45 | curl_DELETE_cmd="curl --fail -D- --insecure --netrc-file ${netrc_file} -X DELETE" 46 | curl_POST_cmd="curl --fail -D- --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json" 47 | #bitbucket_admin_user="admin" 48 | #bitbucket_admin_password="password" 49 | #curl_PUT_cmd="curl --fail -D- --insecure -u ${bitbucket_admin_user}:${bitbucket_admin_password} -X PUT" 50 | #curl_POST_cmd="curl --fail -D- --insecure -u ${bitbucket_admin_user}:${bitbucket_admin_password} -X POST -H Content-Type:application/json" 51 | 52 | source ${BASH_SOURCE%/*}/_bitbucket_repo_functions.sh || source ./_bitbucket_repo_functions.sh 53 | 54 | for repo_name in $(echo ${repo_names} | sed -e 's/,/ /g'); do 55 | echo "#################################################################" 56 | echo " START: $repo_name " 57 | echo "#################################################################" 58 | create_repo ${bitbucket_url} ${bitbucket_project} ${repo_name} "--mirror" 59 | # repo_prereceive_force_push_hook_enable $repo_name $bitbucket_project 60 | 61 | # create_permission_set_restricted_groups "heads/*" "$bitbucket_admin_group" $bitbucket_project $repo_name # only admin creates new 'root' branches and 'name-spaces' 62 | # 63 | # create_permission_set_restricted "heads/**/master" ${ci_user} $bitbucket_project $repo_name 64 | # create_permission_set_restricted "heads/**/stable" $ci_user $bitbucket_project $repo_name 65 | # create_permission_set_restricted "heads/**/release" $ci_user $bitbucket_project $repo_name 66 | # create_permission_set_restricted "tags/**" $ci_user $bitbucket_project $repo_name 67 | # 68 | # create_permission_set_rewrite_history "heads/**/ready/*" "" $bitbucket_project $repo_name 69 | # create_permission_set_rewrite_history "heads/**/dev/*" "" $bitbucket_project $repo_name 70 | # create_permission_set_rewrite_history_deletion "heads/**/feature/*" $bitbucket_admin_group $bitbucket_project $repo_name 71 | echo "#################################################################" 72 | echo " DONE: $repo_name " 73 | echo "#################################################################" 74 | done 75 | -------------------------------------------------------------------------------- /bitbucket/bitbucket_get_repo_sizes.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | [[ "${debug:-}" == "true" ]] && set -x 6 | 7 | [[ "${1:-}" == "" ]] && echo "Please set the server URL as param 1" && exit 1 8 | [[ "${2:-}" == "" ]] && echo "Please set netrc file as param 3" && exit 1 9 | 10 | set -euo pipefail 11 | 12 | url=${1} 13 | netrc_file=${2} 14 | [[ -f "${netrc_file}" ]] || { echo "Netrc file: ${netrc_file} does not exist" && exit 1; } 15 | limit=100000 16 | 17 | output_file_name="${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).projects.repos.txt" 18 | output_server_size_filename="${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).projects.repos.size.txt" 19 | echo "output_file_name : ${WORKSPACE:-.}/${output_file_name}" 20 | 21 | rm -rf ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1)*.* 22 | 23 | printf "%-60s : %-20s : %-10s : %-10s : %-5s : %-5s %s\n" "project/repo-path" "bytes" "mbytes" "gbytes" "LFS" "repo-id" "repo-URL" 24 | printf "%-60s : %-20s : %-10s : %-10s : %-5s : %-5s %s\n" "project/repo-path" "bytes" "mbytes" "gbytes" "LFS" "repo-id" "repo-URL" > $output_file_name 25 | IFS=$'\r\n' 26 | server_size_mb=0 27 | projects_count=0 28 | projects_slugs_counts=0 29 | for bitbucket_project in $(curl --fail --silent --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --url ${url}/rest/api/1.0/projects?limit=${limit} | jq -r .values[].key ); do 30 | project_size_mb=0 31 | projects_count=$(( ${projects_count:-0} + 1 )) 32 | for slug in $(curl --fail --silent --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --url ${url}/rest/api/1.0/projects/${bitbucket_project}/repos?limit=${limit} | jq -r .values[].slug ); do 33 | 34 | slugs_count=$(( ${slugs_count:-0} + 1 )) 35 | projects_slugs_counts=$(( ${projects_slugs_counts:-0} + 1 )) 36 | 37 | repo_id=$(curl --fail --silent --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --url ${url}/rest/api/1.0/projects/${bitbucket_project}/repos/${slug} | jq .id ) 38 | repo_url=${url}/projects/${bitbucket_project}/repos/${slug} 39 | size_bytes=$(curl --fail --silent --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --url ${url}/projects/${bitbucket_project}/repos/${slug}/sizes | jq -r .repository) 40 | size_mb=$(awk '{printf "%d", $1/$2/$2}' <<< "$size_bytes 1024" ) 41 | size_gb=$(awk '{printf "%d", $1/$2/$2/$2}' <<< "$size_bytes 1024" ) 42 | 43 | project_size_mb=$(( ${project_size_mb} + ${size_mb} )) 44 | 45 | _lfs_exit_code=0 46 | lfs_output=$(curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --url ${url}/rest/git-lfs/admin/projects/${bitbucket_project}/repos/${slug}/enabled 2>&1) || _lfs_exit_code=$? 47 | lfs_status="-" 48 | if [[ ${_lfs_exit_code} -eq 0 ]]; then 49 | lfs_status="+" 50 | fi 51 | 52 | printf "%-60s : %-20s : %-10s : %-10s : %-5s : %-5s : %s\n" "${bitbucket_project}/repos/$slug" "${size_bytes}" "${size_mb}" "${size_gb}" "${lfs_status}" "${repo_id}" "${repo_url}" 53 | printf "%-60s : %-20s : %-10s : %-10s : %-5s : %-5s : %s\n" "${bitbucket_project}/repos/$slug" "${size_bytes}" "${size_mb}" "${size_gb}" "${lfs_status}" "${repo_id}" "${repo_url}" >> $output_file_name 54 | printf "${bitbucket_project}/$slug\n" >> ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).${bitbucket_project}.repos.txt 55 | unset _lfs_exit_code 56 | done 57 | printf "Project count/size(MB): ${bitbucket_project} : ${slugs_count:-0} / ~${project_size_mb:-0} MB\n" > ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).${bitbucket_project}.size.mb.txt 58 | printf "Project count/size(MB): ${bitbucket_project} : ${slugs_count:-0} / ~${project_size_mb:-0} MB\n\n" 59 | server_size_mb=$(( ${server_size_mb:-0} + ${project_size_mb:-0} )) 60 | unset slugs_count 61 | done 62 | printf "Projects-count/repos-count/size(MB):: ${projects_count:-0} / ${projects_slugs_counts:-0} / ~${server_size_mb:-0} MB\n" > ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).size.mb.txt 63 | cat ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).size.mb.txt 64 | 65 | cat ${WORKSPACE:-.}/$(echo $url | cut -d / -f 3 | cut -d : -f 1).*.repos.txt 66 | 67 | -------------------------------------------------------------------------------- /bitbucket/bitbuket2bitbucket-migration.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | df -h 4 | export GIT_SSL_NO_VERIFY=1 5 | 6 | if [[ -d ./code-utils ]] ; then 7 | cd ./code-utils 8 | git fetch -ap 9 | git reset --hard origin/master 10 | cd .. 11 | else 12 | git clone https://github.com/Praqma/code-utils.git 13 | fi 14 | 15 | if [[ "${debug:-}" == true ]]; then 16 | export GIT_TRACE=1 17 | set -x 18 | fi 19 | 20 | project=$(echo ${projects_slug} | cut -d / -f1) 21 | slug=$(echo ${projects_slug} | cut -d / -f 3 | cut -d : -f 1 ) 22 | lfs_statement=$(echo ${projects_slug} | cut -d : -f 2) 23 | 24 | [[ ${lfs_statement:-} != "" ]] && [[ ${lfs_statement:-} != "${projects_slug}" ]] && eval ${lfs_statement} 25 | 26 | if [[ -d ${slug}.git ]]; then 27 | cd ${slug}.git 28 | git fetch origin -ap 29 | git fetch origin --tags 30 | if [[ ${lfs_enabled:-true} == true ]]; then 31 | git lfs fetch --all || { 32 | exitcode_lfs_fetch=$? 33 | if [[ ${lfs_accept_missing:-false} == true ]]; then 34 | echo "INFO: we accept issues" 35 | git lfs uninstall --local 36 | else 37 | exit $exitcode_lfs_fetch 38 | fi 39 | } 40 | fi 41 | else 42 | mkdir ${slug}.git 43 | cd ${slug}.git 44 | git init --bare 45 | git config --add remote.origin.url ${old_server_url}/${project}/${slug}.git 46 | git config --add remote.origin.fetch +refs/heads/*:refs/heads/* 47 | git config --add remote.origin.fetch +refs/tags/*:refs/tags/* 48 | git config -l --local 49 | git fetch origin -ap 50 | git fetch origin --tags 51 | if [[ ${lfs_enabled:-true} == true ]]; then 52 | git lfs install --local 53 | git lfs fetch --all || { 54 | exitcode_lfs_fetch=$? 55 | if [[ ${lfs_accept_missing:-false} == true ]]; then 56 | echo "INFO: we accept issues" 57 | git lfs uninstall --local 58 | else 59 | exit $exitcode_lfs_fetch 60 | fi 61 | } 62 | fi 63 | fi 64 | if [[ -d lfs ]] ; then 65 | du -sh lfs 66 | fi 67 | git show-ref 68 | 69 | if [[ ${bitbucket_server_type:-} == "bitbucketProd" ]]; then 70 | export bitbucket_server=$bitbucket_prod_server 71 | fi 72 | bitbucket_server_url=https://${bitbucket_username}:${bitbucket_password}@${bitbucket_server} 73 | git push ${bitbucket_server_url}/scm/${project}/${slug}.git --mirror || { 74 | cd ${WORKSPACE} 75 | export netrc_file=~/.netrc 76 | source ./code-utils/bitbucket/_bitbucket_repo_functions.sh 77 | bash ./code-utils/bitbucket/bitbucket_create_repo-branch-restrictions.sh "${project}" "${slug}" ${netrc_file} "${bitbucket_server_url}" "$(whoami)" || { 78 | echo "machine ${bitbucket_server}" >> ./.netrc 79 | echo "login ${bitbucket_username}" >> ./.netrc 80 | echo "password ${bitbucket_password}" >> ./.netrc 81 | export netrc_file=$(pwd)/.netrc 82 | source ./code-utils/bitbucket/_bitbucket_repo_functions.sh 83 | bash ./code-utils/bitbucket/bitbucket_create_repo-branch-restrictions.sh "${project}" "${slug}" ${netrc_file} "${bitbucket_server_url}" "$(whoami)" 84 | rm -rf ./.netrc 85 | } 86 | cd ${slug}.git 87 | git push ${bitbucket_server_url}/scm/${project}/${slug}.git --mirror 88 | } 89 | bitbucket_server_url=https://${bitbucket_username}:${bitbucket_password}@${bitbucket_server} 90 | if [[ ${lfs_enabled:-true} == true ]]; then 91 | git lfs ls-files -a -s 92 | git lfs push --all ${bitbucket_server_url}/scm/${project}/${slug}.git || { 93 | exitcode_lfs_push=$? 94 | if [[ ${lfs_accept_missing:-false} == true ]]; then 95 | echo "INFO: we accept issues" 96 | else 97 | exit $exitcode_lfs_push 98 | fi 99 | } 100 | fi 101 | 102 | git ls-remote --heads origin > ${WORKSPACE}/origin_heads.txt 103 | git ls-remote --tags origin > ${WORKSPACE}/origin_tags.txt 104 | 105 | git ls-remote --heads ${bitbucket_server_url}/scm/${project}/${slug}.git > ${WORKSPACE}/aws_heads.txt 106 | git ls-remote --tags ${bitbucket_server_url}/scm/${project}/${slug}.git > ${WORKSPACE}/aws_tags.txt 107 | 108 | cd ${WORKSPACE} 109 | 110 | diff -y origin_heads.txt aws_heads.txt && echo "All good - heads are identical" || { 111 | echo "ERROR: heads differ" 112 | exit_code=2 113 | } 114 | 115 | diff -y origin_tags.txt aws_tags.txt && echo "All good - tags are identical" || { 116 | echo "ERROR: tags differ" 117 | exit_code=2 118 | } 119 | 120 | 121 | rm -rf ${slug}-test.git 122 | git clone ${bitbucket_server_url}/scm/${project}/${slug}.git --mirror ${slug}-test.git 123 | if [[ ${lfs_enabled:-true} == later ]]; then 124 | cd ${slug}-test.git 125 | git lfs install --local 126 | git lfs fetch --all || { 127 | exitcode_lfs_refetch=$? 128 | if [[ ${lfs_accept_missing:-false} == true ]]; then 129 | echo "INFO: we accept issues" 130 | else 131 | exit $exitcode_lfs_refetch 132 | fi 133 | } 134 | fi 135 | 136 | 137 | exit ${exit_code:-} 138 | -------------------------------------------------------------------------------- /bitbucket/run_git_gc.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | function end { 6 | powershell -Command Get-Volume || df -h 7 | date "+%Y-%m-%d %H:%M:%S" 8 | } 9 | 10 | TRAP 'end' 0 SIGINT SIGTERM SIGABRT SIGQUIT SIGHUP 11 | 12 | [[ ${debug:-} == true ]] && set -x 13 | [[ ${1:-} == "" ]] && { echo "Please specify the repos root or the bare repo directory to gc" ; exit 1 ; } 14 | 15 | [[ ${aggressive:-} == true ]] && { echo "Running aggressive" && aggressive="--aggressive"; } 16 | 17 | bitbucket_repo_root=$1 18 | [[ ! -d $bitbucket_repo_root ]] && { echo "$bitbucket_repo_root does not exist" ; exit 1; } 19 | cd $bitbucket_repo_root 20 | 21 | date "+%Y-%m-%d %H:%M:%S" 22 | 23 | if [[ $(git config core.bare) == "true" ]]; then 24 | repos=$(basename `pwd`) 25 | echo "Executing single repo: $repos" 26 | cd .. 27 | else 28 | repos=$(ls -1) 29 | printf "Executing server repos: %s\n" $(ls -1 | wc -l) 30 | fi 31 | 32 | powershell -Command Get-Volume || df -h 33 | 34 | IFS=$'\r\n' 35 | for repo_id in $repos; do 36 | echo 37 | echo "BEGIN: $repo_id" 38 | cd $repo_id 39 | cat repository-config 40 | printf "%s - before\n" "$(du -sh . )" 41 | printf "\nEmpty paths in refs/:\n" 42 | /usr/bin/find refs/ -type d -empty 43 | echo 44 | printf "count-objects:\n" 45 | git count-objects -v || { 46 | echo $? 47 | ls -la 48 | cd - 49 | echo "END: $repo_id" 50 | continue 51 | } 52 | echo 53 | git_gc_cmd="git gc ${aggressive:-}" 54 | if [[ ${dryrun:-} == true ]]; then 55 | echo "Dryrun: ${git_gc_cmd}" 56 | else 57 | eval ${git_gc_cmd} || { 58 | sleep 60 59 | eval ${git_gc_cmd} || { 60 | echo $? 61 | ls -la 62 | cd - 63 | echo "END: $repo_id" 64 | continue 65 | } 66 | } 67 | fi 68 | echo 69 | echo "Branches: after" 70 | /usr/bin/find refs/ -type d -empty 71 | printf "count-objects:\n" 72 | git count-objects -v 73 | printf "%s - after\n" "$(du -sh . )" 74 | cd - 75 | echo "END: $repo_id" 76 | echo 77 | done 78 | 79 | -------------------------------------------------------------------------------- /crucible/README.md: -------------------------------------------------------------------------------- 1 | # Create a review report based on a PAC markdown release note and Jira and Crucible 2 | 3 | The python script creates an ascii review report harvesting data from Crucible based on a report already generated by 4 | PAC (Praqmatic Automated Changelog) and this template: https://github.com/Praqma/Praqmatic-Automated-Changelog/blob/master/templates/default_id_report.md 5 | 6 | ## Instructions 7 | * Update the script with credentials 8 | * Update the script with server url 9 | * Run the script: `python crucible_review_report.py --pac_md_file [ --listaccum true ]` 10 | 11 | ## NOTES 12 | * Orignally developed at a customer some time ago, so the instructions are not verified 13 | -------------------------------------------------------------------------------- /crucible/crucible_review_report.py: -------------------------------------------------------------------------------- 1 | import re 2 | import argparse 3 | import requests 4 | import json 5 | from dotmap import DotMap 6 | 7 | 8 | user='' 9 | password='' 10 | 11 | crucible_rest_base_url='/rest-service/' #http_base_url 12 | crucible_reviewsForIssue_extension_url='search-v1/reviewsForIssue?jiraKey=' 13 | crucible_review_interface_extension_url='reviews-v1/' 14 | 15 | headers={'content-type':'application/json', 'accept':'application/json'} 16 | auth=(user,password) 17 | 18 | parser = argparse.ArgumentParser() 19 | parser.add_argument("-f", \ 20 | "--pac_md_file", \ 21 | required=True, \ 22 | help="The markdown file produced by PAC tools" ) 23 | parser.add_argument("-d", \ 24 | "--debug", \ 25 | action='store_true', \ 26 | default='false', \ 27 | help="Print debug information" ) 28 | parser.add_argument("-a", \ 29 | "--listaccum", \ 30 | action='store_true', \ 31 | default='false', \ 32 | help="Also list the commits for Accumulated .... merges from ready2master. String match based" ) 33 | 34 | args = parser.parse_args() 35 | 36 | pac_md_file=args.pac_md_file 37 | debug=args.debug 38 | listaccum=args.listaccum 39 | 40 | 41 | review_ok=[] 42 | review_review=[] 43 | review_draft=[] 44 | review_other=[] 45 | 46 | def get_issue_ok_list_crucible(issue_id): 47 | reviewsForIssue = crucible_rest_base_url 48 | reviewsForIssue += crucible_reviewsForIssue_extension_url 49 | reviewsForIssue += issue_id 50 | # print reviewsForIssue 51 | r = requests.get(reviewsForIssue, auth=auth, headers=headers) 52 | 53 | parsed_json = json.loads(r.text) 54 | 55 | # print json.dumps(parsed_json, sort_keys=True, indent=4) 56 | 57 | reviewData_list = parsed_json['reviewData'] 58 | reviews=[] 59 | for reviewitem in reviewData_list: 60 | permaId = reviewitem['permaId']['id'] 61 | reviews.append(permaId) 62 | 63 | 64 | for review in reviews: 65 | url_str = crucible_rest_base_url 66 | url_str += crucible_review_interface_extension_url 67 | url_str += review 68 | url_str += '/details' 69 | r = requests.get(url_str, auth=auth, headers=headers) 70 | parsed_json = json.loads(r.text) 71 | # print json.dumps(parsed_json, sort_keys=True, indent=4) 72 | 73 | jsonmap = DotMap(parsed_json) 74 | review_status = jsonmap.state 75 | if review_status == 'Closed': 76 | for revision, revision2 in jsonmap.reviewItems.items(): 77 | string = revision2[0]['expandedRevisions'][0]['revision'][:7] 78 | review_ok.append(string) 79 | elif review_status == 'Review': 80 | for revision, revision2 in jsonmap.reviewItems.items(): 81 | string = revision2[0]['expandedRevisions'][0]['revision'][:7] 82 | review_review.append(string) 83 | elif review_status == 'Draft' : 84 | for revision, revision2 in jsonmap.reviewItems.items(): 85 | string = revision2[0]['expandedRevisions'][0]['revision'][:7] 86 | review_draft.append(string) 87 | else: 88 | for revision, revision2 in jsonmap.reviewItems.items(): 89 | string = revision2[0]['expandedRevisions'][0]['revision'][:7] 90 | string += ':' + review_status 91 | review_other.append(string) 92 | 93 | 94 | pac_issuenone_regex = re.compile('^## [Unspecified|Nones].*$') 95 | pac_issueid_regex = re.compile('^##\s(SAM-\d+).*$') 96 | pac_issueid_accum_regex = re.compile('^-\s([a-z0-9]{7}):\s(Accumulated commit of the following from branch.*$)') 97 | pac_issueid_real_regex = re.compile('^-\s([a-z0-9]{7}):\s(.*$)') 98 | pac_issueid_stats_regex = re.compile('^##\sStatistics') 99 | 100 | with open(pac_md_file) as f: 101 | for line in f: 102 | m = pac_issueid_regex.match(line) 103 | if m: 104 | get_issue_ok_list_crucible(m.group().replace('## ','').split(' ')[0]) 105 | continue 106 | 107 | #print review_ok 108 | #print review_review 109 | #print review_draft 110 | #print review_other 111 | 112 | with open(pac_md_file) as f: 113 | for line in f: 114 | m = pac_issueid_regex.match(line) 115 | if m: 116 | print '\n' 117 | print m.group().replace('## ','').replace(' Unspecified','Unspecified').replace(' Nones', 'Nones') 118 | # Unspecified','Unspecified').replace(' Nones:) 119 | # get_issue(m.group().replace('##','')) 120 | continue 121 | 122 | m = pac_issueid_accum_regex.match(line) 123 | if m: 124 | if listaccum == True: 125 | print "( )Accum : " + m.group(1)[0:7] + " " + m.group(2) 126 | continue 127 | 128 | m = pac_issueid_real_regex.match(line) 129 | if m: 130 | if m.group(1)[0:7] in review_ok: 131 | print " (+)Real : " + m.group(1)[0:7] + " " + m.group(2) 132 | elif m.group(1)[0:7] in review_review: 133 | print " (r)Real : " + m.group(1)[0:7] + " " + m.group(2) 134 | elif m.group(1)[0:7] in review_draft: 135 | print " (d)Real : " + m.group(1)[0:7] + " " + m.group(2) 136 | else: 137 | print " ( )Real : " + m.group(1)[0:7] + " " + m.group(2) 138 | continue 139 | 140 | pac_issueid_stats_regex 141 | m = pac_issuenone_regex.match(line) 142 | if m: 143 | print 144 | print (m.group().replace('## ', '') + ':') 145 | continue 146 | 147 | if review_other: 148 | print "For some reason these reviews are in weird state, but refered:" 149 | for review in review_other: 150 | print review -------------------------------------------------------------------------------- /doCppCheck/do_cppcheck-settings-template.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/ruby 2 | 3 | module CppcheckSettings 4 | 5 | ############################# 6 | # do_cppcheck configuration # 7 | ############################# 8 | # Should this script fail with an error if cppcheck reports it can not 9 | # find every header file during analysis? 10 | # This mean the scripts abort if the following error is found in the results: 11 | # "error id="missingInclude" severity="style" msg="Cppcheck cannot find all the include files (use --check-config for details)" 12 | FAIL_ON_MISSING_HEADER_FILES = true 13 | # fail script if errors from commands are reported? 14 | FAIL_ON_STDERR_MSG = true 15 | 16 | ############################################################################### 17 | # Sources 18 | ############################################################################### 19 | ### Searching for source files configuration ### 20 | # Search recursively from this lists of paths (regexp or string). 21 | # Current path . (period) is allowed. 22 | SRC_SEARCH_PATH = [ "." ] 23 | # Find file matching this regexp: 24 | SRC_FILE_SEARCH_REGEXP = /^[a-zA-Z0-9_]+.cpp/ # might also use /^[a-zA-Z0-9_]+.cpp$/ 25 | 26 | ### Filtering sources and excluding sources ### 27 | # Whitelist of filename suffixes - regexp or string 28 | # MUST include the . (period) 29 | # This list can be empty, if SRC_FILE_SEARCH_REGEXP only find exactly those file 30 | # with correct ending. 31 | SRC_SUFFIX_LIST = [ /\.cpp$/ ] 32 | 33 | # Exclude all files and directories containing one of these string 34 | # or matching one the regexp. 35 | SRC_BLACKLIST_PATTERNS = [ /\.\/include\/msg_bus*/, "host", "vendor", "docs", "examples", "test", "tools", /\/moc_*/, /Adaptor.cpp$/, /Proxy.cpp$/ ] 36 | 37 | ############################################################################### 38 | # Headers - settings follows same conventions as the sources above 39 | ############################################################################### 40 | HEADER_SEARCH_PATH = [ "." ] 41 | HEADER_FILE_SEARCH_REGEXP = /^[a-zA-Z0-9_]+.\.h$/ 42 | HEADER_SUFFIX_LIST = [ ] 43 | 44 | HEADER_BLACKLIST_PATTERNS = [ /\.\/host\//, /\.\/docs\//, /\.\/examples\//, /\.\/tools\//, /\.\/test\// ] 45 | 46 | # Append these headers manually to Cppcheck. Eg. if they can not be found automatically. 47 | # The will be given to cppcheck with the prefix -I (for include dirs) 48 | HEADER_APPEND_DIR_LIST = [ "./msgbus_applications/view/include", "/usr/include/qt4/QtCore" ] 49 | 50 | 51 | ########################## 52 | # Cppcheck configuration # 53 | ########################## 54 | # Cppcheck executeable name 55 | CPPCHECK_EXEC="cppcheck" 56 | 57 | # Currently we execute cppcheck this way, first with --check-config then with --enable=all for the real analysis 58 | # cppcheck --enable=all" + " --file-list=cppcheckSourceFiles.lst" + " --includes-file=cppcheckHeaderFiles.lst" 59 | # The real check also have " --xml 2> cppcheck-results.xml" added for output to a file. 60 | # To check possible parameters, run cppcheck --help. 61 | # For example you could add -DQT_DEPRECATED -DQT3_SUPPORT to avoid checking those configurations. 62 | CPPCHECK_ADDITIONAL_PARAMETERS = [ "-DQT3_support", "-DQT_DEPRECATED" ] 63 | 64 | # Note on threads: do_cppcheck.rb script will automatically look for env. var. CPPCHECK_THREAD_COUNT and use that 65 | # as -j $CPPCHECK_THREAD_COUNT for using more jobs in parallel when checking. It is not part of the additional 66 | # parameter above, as optimal thread count will differ from build host to build host, thus it better selecting it 67 | # automatically. 68 | 69 | # Reference to file with errors to whitelist 70 | # Read the file for how to suppress warnings from Cppcheck. 71 | # It it does not exist, a template should be available with the script 72 | # Comment out if not used! 73 | CPPCHECK_SUPPRESSION_FILE = "do_cppcheck-suppressions.lst" 74 | 75 | end 76 | -------------------------------------------------------------------------------- /doCppCheck/do_cppcheck-suppressions-template.lst: -------------------------------------------------------------------------------- 1 | // Cppcheck states in it's help the following way to whitelist error. 2 | // This can be used for error we either acknowledge is okay, or that we can not 3 | // fix due to third party code etc. 4 | // Cppcheck uses the following syntax. 5 | // -suppress= Suppress warnings that match . The format of 6 | // is: 7 | // [error id]:[filename]:[line] 8 | // The [filename] and [line] are optional. If [error id] 9 | // is a wildcard '*', all error ids match. 10 | // --suppressions-list= 11 | // Suppress warnings listed in the file. Each suppression 12 | // is in the same format as above. 13 | 14 | // This file is included, through the --suppression-list paramter, if configured in 15 | // the do_cppcheck-settings.rb file. 16 | // 17 | // Do not use linenumber - but always id and file. Note it supresses alle the warnings with the same id. 18 | // 19 | // This it QT error, we can not fix 20 | // 21 | // 22 | // 23 | // We will exclude everything regarding gt4 stuff 24 | *:/usr/include/qt4* 25 | 26 | // You can also exclude less like just: 27 | // preprocessorErrorDirective:/usr/include/qt4/QtCore/qglobal.h 28 | 29 | 30 | // We do not have control over msgbus_applications/view/ViewMachine.h thus excluding this file also 31 | *:msgbus_applications/view/ViewMachine.h 32 | -------------------------------------------------------------------------------- /exalate/jiracloud-outgoing.groovy: -------------------------------------------------------------------------------- 1 | 2 | replica.reporter = issue.reporter 3 | replica.assignee = issue.assignee 4 | replica.labels = issue.labels 5 | replica.myLabels = issue.labels 6 | replica.descriptionHtml = nodeHelper.getHtmlField(issue, "description") 7 | replica.environmentHtml = nodeHelper.getHtmlField(issue, "Environment") 8 | 9 | replica.linkToJamaFeaturesMD = issue.customFields."Link to Jama Features".value 10 | replica.linkToJamaFeaturesHTML = nodeHelper.getHtmlField(issue, "customfield_11990") 11 | replica.jamaProxy = issue.customFields."Jama proxy".value 12 | 13 | replica.acceptanceCriteriaMD = issue.customFields."Acceptance Criteria".value 14 | replica.acceptanceCriteriaHTML = nodeHelper.getHtmlField(issue, "customfield_11972") 15 | 16 | replica.acceptanceCriteriaHTML = nodeHelper.getHtmlField(issue, "customfield_11972") 17 | 18 | replica.driverProjects = issue.customFields."Driver project(s)" 19 | 20 | replica.requestType = issue.customFields."Platform Request type".value 21 | 22 | replica.fixVersions = issue.fixVersions 23 | replica.affectsVersions = issue."Affects versions" 24 | replica.targetBuild = issue."Target Build"?.value 25 | replica.test = issue."Test" 26 | 27 | replica.userImpact = issue."User Impact"?.value 28 | 29 | replica.foundIn = issue.customFields."Found in".value 30 | replica.foundBy = issue.customFields."Found by"?.value 31 | replica.fixedIn = issue.customFields."Fixed in"?.value 32 | 33 | replica.function = issue.customFields."Function"?.value 34 | 35 | 36 | replica.reopenedDate = issue.customFields."Reopened date"?.value 37 | 38 | replica.issueLinks = issue.issueLinks 39 | replica.issueLinksString = issue.issueLinks.collect{ it -> 40 | def local_url='https://'+ jira_cloud_prefix +'.atlassian.net/browse/' + nodeHelper.getHubIssue(it.otherIssueId).key 41 | '' + local_url + ":" + it.linkName + "," + it.isOutward 42 | } 43 | //debug.error("${nodeHelper.getHubIssue(issue.issueLinks[0].otherIssueId).key}") 44 | 45 | replica.remoteIssueLinks = nodeHelper.getRemoteIssueLinks(issue) 46 | 47 | replica.weblinks = issue.weblinks.collect { weblink -> 48 | [ 49 | url: weblink.url, 50 | title: weblink.title, 51 | description: weblink.description 52 | ] 53 | } 54 | replica.weblinks2 = issue.get("issuelinks").findAll { link -> 55 | link.type.name == "Web Link" 56 | }.collect { webLink -> 57 | [ 58 | url: webLink.outwardIssue != null ? webLink.outwardIssue.get("url") : null, 59 | title: webLink.outwardIssue != null ? webLink.outwardIssue.get("title") : "", 60 | description: webLink.outwardIssue != null ? webLink.outwardIssue.get("description") : "" 61 | ] 62 | } 63 | 64 | replica.derefLinks = issue.issuelinks.collect { link -> 65 | [ 66 | id: link.id, // Get the link ID 67 | url: link.url, 68 | linkName: link.linkName 69 | ] 70 | } 71 | 72 | replica.status = issue.status 73 | replica.parentId = issue.parentId 74 | 75 | // COMMENTS 76 | replica.comments = nodeHelper.getHtmlComments(issue) 77 | //// 78 | 79 | replica.project = issue.project 80 | replica.key = issue.key 81 | replica.type = issue.type 82 | replica.summary = issue.summary 83 | replica.priority = issue.priority 84 | replica.attachments = issue.attachments 85 | 86 | replica.components = issue.components 87 | 88 | replica.storyPoints = issue.customFields."Story Points"?.value 89 | replica.originalEstimate = issue.originalEstimate 90 | 91 | replica.TeamEstimate1 = issue.customFields."Team Estimate 1".value 92 | replica.TeamEstimate2 = issue.customFields."Team Estimate 2".value 93 | replica.TeamEstimate3 = issue.customFields."Team Estimate 3".value 94 | replica.TeamEstimate4 = issue.customFields."Team Estimate 4".value 95 | replica.TeamEstimate5 = issue.customFields."Team Estimate 5".value 96 | 97 | replica.Estimate1 = issue.customFields."Estimate 1".value 98 | replica.Estimate2 = issue.customFields."Estimate 2".value 99 | replica.Estimate3 = issue.customFields."Estimate 3".value 100 | replica.Estimate4 = issue.customFields."Estimate 4".value 101 | replica.Estimate5 = issue.customFields."Estimate 5".value 102 | -------------------------------------------------------------------------------- /git-repo-analyzer/.gitignore: -------------------------------------------------------------------------------- 1 | *.txt 2 | *.tmp 3 | -------------------------------------------------------------------------------- /git-repo-analyzer/README.md: -------------------------------------------------------------------------------- 1 | # git-workspace-file-type-analyzer.sh 2 | 3 | The objective is to traverse a 'dir' to figure out how all files are interpreted by the unix tool `file` and how `git` sees the file. 4 | 5 | It is important when working with `git` ( or simular DVS ) as all files and their revisions are default distributed to everyone. It is up to you what you should or should not do with each file and/or extension. 6 | 7 | The analyzer find all files in the `pwd` and outputs: 8 | * a set of files list with different aspects of the analysis: 9 | * ascii_files_size_sorted.txt 10 | * binary_files_size_sorted.txt 11 | * verdict_size_sorted.txt 12 | * verdict_type_sorted.txt 13 | * ascii_extension.txt 14 | * binary_extension.txt 15 | * Type legend: 16 | * gA: Git ascii 17 | * gB: Git binary 18 | * fA: `file` tool reported 'ASCII text' or simular 19 | * fB: `file` tool reported other than 'ASCII text' or simular 20 | * fE: `file` tool reported 'empty' 21 | 22 | Usage: `git-workspace-file-type-analyzer.sh ` 23 | 24 | # git-object-sizes-in-repo-analyzer.sh 25 | 26 | The objective is to analyze an already existing git repo for all files in whole history. Each file is listed as its entry/entries in the internal datastructure and their impact to the disc. For this reason the amount of revisions of a file does not correspond to the amount of entries in the output list. If each revision of a file is interesting this is also available. 27 | 28 | It is suppported that it is given a sub-dir-path in case of submodules. 29 | 30 | It is designed to be executed from a Jenkins Freestyle or Matrix job and it stores the output files in the WORKSPACE variable dir. WORKSPACE should be absolute path. If not set, it store them in "." 31 | 32 | Output file: 33 | * `bigtosmall_errors.txt` ( if error occurred while parsing blobs directly stored in the .idx ) 34 | * `bigtosmall_errors_revision.txt` ( if error occurred while parsing blobs stored as revisions/deltas in the .idx ) 35 | * `bigtosmall_sorted_size_files.txt` ( path/file impact in repository which is store directly in the .idx file - usually big and binary files (*) ) 36 | * `bigtosmall_sorted_size_files_revisions.txt` ( path/file impact in repository which is store directly in the .idx file - usually big and binary files (*) ) 37 | * `bigtosmall_sorted_size_total.txt` ( the sum of each path/file and amount of revisions in `bigtosmall_sorted_size_files.txt` file (**) ) 38 | * `bigtosmall_sorted_size_total_revisions.txt` ( the sum of each path/file and amount of revisions in `bigtosmall_sorted_size_files_revisions.txt` file (**) ) 39 | * `branches_embedded.txt` ( a list of branches which are embedded hence not leaves in the history tree - hence the branches are targeted to be deleted (***) ) 40 | * `branches_embedded_tagged.txt` ( a list of branches which are embedded hence not leaves in the history tree, but also tagged - hence targeted to be deleted (***) ) 41 | * `branches_leaves.txt` ( a list of branches which are leaves in the history tree, but not tagged - hence likely active (****) ) 42 | * `branches_leaves_tagged.txt` ( a list of branches which are leaves in the history tree and tagged - hence they could be target to be deleted (****) ) 43 | 44 | (*) : H/B marker mean of the path/file is in current revision HEAD(H) or secondary in a branch(B); The blob check-sum to make line unique; The size in in bytes. Path/file: All files are listed sorted at their largest(first) appearance. Remember to check both files for total impack and understanding to which extended the path/file packed partially/fully. 45 | (**) : The total size in in bytes; H/B marker mean of the path/file is in current revision HEAD(H) or secondary in a branch(B); Amount of instances found; Path/file. Remember to check both files for total impack and understanding to which extended the path/file packed partially/fully. 46 | (***) : List the branches ; last sha1 - committer date ; refs pointing to sha1 ; git commit subject 47 | (****) : List the branches ; amount commit/files compared to default branch; last sha1 - committer date ; refs pointing to sha1 ; git commit subject 48 | 49 | Usage: `[debug=true] [repack=false] [invest_remote_branches=true=false] [WORKSPACE=``] git-object-sizes-in-repo-analyzer.sh []` 50 | 51 | # git-sizer (external tool) 52 | In combination with the above tools for deep analysis on object level it could also be interesting to get a overview of the stats of the repository. It is also advised to read the recommandations for working with git repositories. 53 | 54 | https://github.com/github/git-sizer 55 | 56 | 57 | -------------------------------------------------------------------------------- /git-repo-analyzer/git-find-sha1-of-leaf-tags.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu -o pipefail 4 | 5 | [[ ${debug:-} == true ]] && set -x 6 | 7 | cd $1 8 | 9 | 10 | declare -A leaf_sha1s 11 | 12 | for sha1 in $(git rev-list --all --children | grep -E "^[a-f0-9]{40}$" | sort -u) ; do 13 | tags_of_sha1=$( git tag --points-at $sha1 ) 14 | if [[ ${tags_of_sha1:-} != "" ]]; then 15 | leaf_sha1s[$sha1]="${tags_of_sha1}" 16 | fi 17 | done 18 | 19 | if [[ "${#leaf_sha1s[@]}" -eq 1 ]]; then 20 | found_sha1=${leaf_sha1s[@]} 21 | echo "found_sha1=${!leaf_sha1s[@]}" 22 | exit 0 23 | else 24 | printf "More than one leaf:\n" 25 | { 26 | for sha1 in "${!leaf_sha1s[@]}"; do 27 | printf "%s %s\n" "$sha1" "${leaf_sha1s[$sha1]}" 28 | done 29 | } | sort -h 30 | exit 1 31 | fi 32 | -------------------------------------------------------------------------------- /git-repo-analyzer/git-object-sizes-usages-snips.md: -------------------------------------------------------------------------------- 1 | # Installing 2 | I usually "install" the repo analyser in the CI(Jenkins etc.) job this way: 3 | ```#!/bin/bash 4 | set -euo pipefail 5 | 6 | if [[ -d code-utils ]]; then 7 | git -C code-utils fetch origin -ap 8 | git -C code-utils reset --hard origin/master 9 | else 10 | git clone https://github.com/Praqma/code-utils.git 11 | fi 12 | ``` 13 | 14 | # Get the repo to analyze 15 | Either you have already cloned/update the repo from the SCM plugin of your CI system. You can do bare, mirror or sparse as the workspace is not needed. Or you can now clone it: 16 | ``` 17 | #!/bin/bash 18 | set -euo pipefail 19 | 20 | if [[ -d ${repo}.git ]]; then 21 | git -C {trepo} fetch origin -ap 22 | else 23 | git clone --bare ${repo} --mirror ${repo}.git 24 | fi 25 | ``` 26 | You now have the repo to analyze in your workspace. 27 | You can also at this stage add a `git lfs migrate` or `git filter-repo` to change the history and analyze the impack of your efforts ala 28 | 29 | ## LFS migrate 30 | ``` 31 | git lfs migrate import -y --everything \ 32 | --include="\ 33 | *.tar,*.bz2,*.mat,*.zip,*.wav,*.elf,*.exe,*.cof,*.f32,*.sdf,*.obj,*.dll,*.blob,*.pdb,*.a,*.dbg,*.bmp,*.pcm,*.yuv\ 34 | ,*.bsc,*.dfu,*.png,*.jpg,*.pdf,*.ai,*.doc,*.docx,*.ppt,*.pptx,*.xls,*.xlsx\ 35 | ,*.mp3,*.pyd,*.so,*.rom,*.mdl,*.jar,*.fig,*.bin,*.lib\ 36 | ,*.Lib,*.EXE,*.LIB,*.PCM,*.PNG\ 37 | ,*.Exe\ 38 | ,GRU512_res_mel_GRUweights_bestepoch\ 39 | ,C_voice_av_imag,C_voice_av_real,C_noise_av_imag,C_noise_av_real\ 40 | " 41 | ``` 42 | 43 | ## Filter-repo 44 | ``` 45 | filter_repo_file="../filter-repo-clean-file.txt" && rm -f ${filter_repo_file} 46 | 47 | IFS=' ' 48 | for split_path in ${split_paths}; do 49 | echo "${split_path}" >> $filter_repo_file 50 | done 51 | 52 | git filter-repo \ 53 | --force \ 54 | --replace-refs delete-no-add \ 55 | --paths-from-file $filter_repo_file 56 | 57 | git gc --prune 58 | 59 | du -sh ./objects 60 | ``` 61 | 62 | 63 | # Running it 64 | ``` 65 | bash code-utils/git-repo-analyzer/git-object-sizes-in-repo-analyzer.sh ${target_repo}.git 66 | ``` 67 | 68 | # Archiving 69 | I usually archive the `*.txt` for later analyzis and sharing etc 70 | -------------------------------------------------------------------------------- /git-repo-analyzer/git_create-update_dir2gits.sh: -------------------------------------------------------------------------------- 1 | set -x 2 | set -e 3 | 4 | root_folder=`pwd` 5 | git_folder=$1 6 | cd $git_folder 7 | 8 | if [ "${BUILD_URL}X" == "X" ] ; then 9 | export BUILD_URL="Test-run" 10 | fi 11 | 12 | export PATH=/cygdrive/c/Program\ Files\ \(x86\)/Git/bin:${PATH} 13 | export PATH=/cygdrive/c/Cygwin/bin:${PATH} 14 | 15 | #git config --global core.autocrlf false 16 | #git config --global user.name "Claus Schneider (Praqma)" 17 | #git config --global user.email "claus.schneider-ext@praqma.net" 18 | 19 | for dir in `find . -maxdepth 1 -mindepth 1 -type d` ; do 20 | echo $dir 21 | cd $dir 22 | 23 | if [ "${WIPE_GIT_FOLDER}X" == "trueX" ] ; then 24 | rm -rf .git* 25 | fi 26 | 27 | if [ -e .git ] ; then 28 | size_before_commit=`du -sm .git | awk -F" " '{ print $1 }' ` 29 | git add -A 30 | git status 31 | git commit --allow-empty -m "${BUILD_URL} " 32 | else 33 | git init 34 | touch .gitignore 35 | # echo "*.updt" > .gitignore 36 | # echo "view.dat" >> .gitignore 37 | # echo "*.csv" >> .gitignore 38 | git add .gitignore 39 | git commit -m "init: ${BUILD_URL}" 40 | git add -A 41 | git status 42 | git commit --amend --no-edit 43 | size_before_commit=`du -sm .git | awk -F" " '{ print $1 }' ` 44 | fi 45 | size_after_commit=`du -sm .git | awk -F" " '{ print $1 }' ` 46 | size_delta=`echo "${size_after_commit} - ${size_before_commit}" | bc -l` 47 | git commit --allow-empty --amend --no-edit -m "${BUILD_URL}: git size in Mb: ${size_after_commit} - ${size_before_commit} = ${size_delta}" 48 | 49 | if [ ! -e git_size.csv ] ; then 50 | echo "SizeInMb,Delta" > git_size.csv 51 | fi 52 | echo "${size_after_commit},${size_delta}" >> git_size.csv 53 | 54 | git status 55 | 56 | git log -2 57 | 58 | du -sk .git 59 | 60 | du -sh .git 61 | 62 | echo "Leaving: $dir" 63 | cd .. 64 | 65 | done 66 | cd $root_folder 67 | -------------------------------------------------------------------------------- /git-repo-analyzer/git_meassureGits.sh: -------------------------------------------------------------------------------- 1 | set -x 2 | set -e 3 | 4 | root_folder=`pwd` 5 | 6 | export PATH=/cygdrive/c/Program\ Files\ \(x86\)/Git/bin:${PATH} 7 | export PATH=/cygdrive/c/Cygwin/bin:${PATH} 8 | 9 | git config --global core.autocrlf false 10 | git config --global user.name "my name" 11 | git config --global user.email "my@email" 12 | 13 | for dir in `find . -maxdepth 1 -mindepth 1 -type d` ; do 14 | echo $dir 15 | cd $dir 16 | csv_file="${root_folder}/git_size_$(basename ${dir}).csv" 17 | size_after_commit=`du -sm .git | awk -F" " '{ print $1 }' ` 18 | 19 | if [ -e ${csv_file} ] ; then 20 | size_before_commit=`cat ${csv_file} | tail -1 | awk -F "," '{print $1}'` 21 | else 22 | size_before_commit="${size_after_commit}" 23 | fi 24 | 25 | size_delta=`echo "${size_after_commit} - ${size_before_commit}" | bc -l` 26 | 27 | if [ ! -e ${csv_file} ] ; then 28 | echo "SizeInMb,Delta" > ${csv_file} 29 | fi 30 | echo "${size_after_commit},${size_delta}" >> ${csv_file} 31 | 32 | git status 33 | 34 | git log -2 35 | 36 | du -sk .git 37 | 38 | du -sh .git 39 | 40 | echo "Leaving: $dir" 41 | cd .. 42 | 43 | done 44 | cd $root_folder 45 | -------------------------------------------------------------------------------- /git-utils/git-batch-push-process-pull.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "git checkout master" 4 | git checkout master 5 | 6 | echo "git creating new local branch from timestamp" 7 | INPUT_BRANCH_NAME=$(date +%s) 8 | git checkout -b $INPUT_BRANCH_NAME 9 | 10 | echo "Add new files to the staging area" 11 | git add . 12 | 13 | echo "Commit the new files" 14 | git commit -m "Add new files" 15 | 16 | echo "New files committed:" 17 | git diff --cached --name-only 18 | 19 | git push -u origin $INPUT_BRANCH_NAME 20 | 21 | echo "Branch '$INPUT_BRANCH_NAME' pushed to remote." 22 | 23 | echo "waiting for output from pipeline..." 24 | 25 | #it will have name like refs/heads/1739360601_20250212.24_output 26 | echo -e "\n\n\tif pipeline take longer than 60s check this page\n\t\n\n" 27 | while ! git ls-remote --exit-code --heads origin refs/heads/$INPUT_BRANCH_NAME*_output 28 | do echo 'Hit CTRL+C to stop';sleep 5; done 29 | 30 | if git ls-remote --exit-code --heads origin refs/heads/$INPUT_BRANCH_NAME*_output 2>&1 1>/dev/null 31 | then 32 | OUTPUT_BRANCH_NAME=$(git ls-remote --exit-code --heads origin refs/heads/$INPUT_BRANCH_NAME*_output| cut -d/ -f3-) 33 | echo "pipeline created $OUTPUT_BRANCH_NAME branch, running git pull and checkout" 34 | git pull 2>&1 1>/dev/null 35 | 36 | git checkout $OUTPUT_BRANCH_NAME 37 | git checkout master 38 | #it has strange bug, after git pull and checkout to master, git does not see files, so im checkouting to output branch, then to master 39 | 40 | git checkout $OUTPUT_BRANCH_NAME audio_files/*.wav 2>&1 1>/dev/null 41 | git checkout $OUTPUT_BRANCH_NAME audio_files/*.txt 2>&1 1>/dev/null 42 | 43 | echo "removing remote input branch" 44 | git push origin --delete $INPUT_BRANCH_NAME 45 | 46 | else 47 | echo "remote branch not found" 48 | exit -1 49 | fi 50 | -------------------------------------------------------------------------------- /git-utils/git-retag.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Enable strict mode for better error handling 4 | set -euo pipefail 5 | 6 | while getopts ":o:n:dph" opt; do 7 | case ${opt} in 8 | o ) 9 | tag_orig=$OPTARG 10 | ;; 11 | n ) 12 | tag_new=$OPTARG 13 | ;; 14 | p ) 15 | push_new_tag=true 16 | ;; 17 | d ) 18 | delete_old_tag=true 19 | ;; 20 | \?|h ) 21 | echo "Usage: $0 -o -n [-p [-d]]" 22 | echo " -o Specify the old tag to be retagged" 23 | echo " -n Specify the new tag name" 24 | echo " -p Push the new tag to remote" 25 | echo " -d Delete the old tag from remote after retagging: only if you use -p" 26 | echo " -h Show this help message" 27 | exit 1 28 | ;; 29 | esac 30 | done 31 | 32 | if [[ -z "${tag_orig:-}" ]] || [[ -z "${tag_new:-}" ]]; then 33 | $0 -h 34 | exit 1 35 | fi 36 | 37 | trap 'rm -f ./tag_meta_data.txt' EXIT 38 | 39 | function get_old_tag_info() { 40 | 41 | git show refs/tags/$tag_orig > /dev/null || git fetch --force origin --no-tags refs/tags/$tag_orig:refs/tags/$tag_orig 42 | export GIT_AUTHOR_DATE="$(git tag -l --format="%(taggerdate:iso)" ${tag_orig})" 43 | git tag -l --format="%(taggerdate:raw)" ${tag_orig} 44 | export GIT_COMMITTER_DATE="${GIT_AUTHOR_DATE}" 45 | 46 | export GIT_AUTHOR_NAME=$(git tag -l --format="%(taggername)" ${tag_orig}) 47 | export GIT_COMMITTER_NAME=${GIT_AUTHOR_NAME} 48 | export GIT_AUTHOR_EMAIL=$(git tag -l --format="%(taggeremail)" ${tag_orig}) 49 | export GIT_COMMITTER_EMAIL=${GIT_AUTHOR_EMAIL} 50 | export GIT_TAGGER_NAME=${GIT_AUTHOR_NAME} 51 | export GIT_TAGGER_EMAIL=${GIT_AUTHOR_EMAIL} 52 | export GIT_TAGGER_DATE=${GIT_AUTHOR_DATE} 53 | git tag -l --format '%(contents)' ${tag_orig} > ./tag_meta_data.txt 54 | } 55 | 56 | function create_new_tag() { 57 | 58 | if [ ! -f ./tag_meta_data.txt ]; then 59 | echo "Error: Metadata file not found. Cannot create new tag." 60 | exit 1 61 | fi 62 | 63 | echo "Creating new tag ${tag_new} with data and message from ${tag_orig}" 64 | git tag -f -a -F ./tag_meta_data.txt ${tag_new} ${tag_orig}^{} 65 | git tag -l --format="%(taggerdate:iso)" ${tag_new} 66 | git tag -l --format="%(taggerdate:raw)" ${tag_new} 67 | 68 | } 69 | 70 | function delete_old_tag_remotely() { 71 | echo "Deleting old tag ${tag_orig} from remote" 72 | git push origin :refs/tags/${tag_orig} 73 | echo "Deleting old tag ${tag_orig} locally" 74 | git tag -d ${tag_orig} 75 | } 76 | 77 | function push_new_tag() { 78 | echo "Pushing new tag ${tag_new} to remote" 79 | git push origin refs/tags/${tag_new} 80 | } 81 | 82 | 83 | get_old_tag_info ${tag_orig} 84 | create_new_tag ${tag_new} 85 | [[ ${push_new_tag:-false} == true ]] && push_new_tag ${tag_orig} && { 86 | [[ ${delete_old_tag:-false} == true ]] && delete_old_tag_remotely ${tag_orig} 87 | } 88 | -------------------------------------------------------------------------------- /git-utils/git-submodule-usage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Enable strict mode for better error handling 4 | set -euo pipefail 5 | 6 | while getopts "s:h" opt; do 7 | case ${opt} in 8 | s ) 9 | subm_repo_name="${OPTARG}" 10 | shift 11 | ;; 12 | \?|h ) 13 | echo "Usage: $0 -s " 14 | echo " -s Specify the old tag to be retagged" 15 | exit 1 16 | ;; 17 | esac 18 | done 19 | 20 | if [[ -z "${subm_repo_name:-}" ]]; then 21 | $0 -h 22 | exit 1 23 | fi 24 | 25 | for subm_path in $(git config get -f .gitmodules --all --regexp ".*/${subm_repo_name}.path$"); do 26 | printf "Submodule path: %s\n" "$subm_path" 27 | for sha1_root in $(git rev-list --all -- ${subm_path}) ; do 28 | sha1_sub="$(git ls-tree $sha1_root ${subm_path} | cut -d " " -f 3 | cut -d$'\t' -f 1 )" || true 29 | 30 | if [ -z "${sha1_sub:-}" ]; then 31 | sha1_sub="(no submodule)" 32 | else 33 | git -C ${subm_path} rev-parse --verify $sha1_sub > /dev/null 2>&1 || { 34 | git -C ${subm_path} fetch origin $sha1_sub > /dev/null 35 | git artifact fetch-tags -s $sha1_sub > /dev/null 36 | git -C ${subm_path} rev-parse --verify $sha1_sub > /dev/null 2>&1 || { 37 | sha1_sub_ls_remote="(dead sha1)" 38 | } 39 | } 40 | 41 | sha1_sub_ls_remote=$(git -C ${subm_path} ls-remote --tags origin | grep $sha1_sub | cut -d / -f 3-) || true 42 | 43 | if [ -z "${sha1_sub_ls_remote:-}" ]; then 44 | sha1_sub_ls_remote="(no tag)" 45 | fi 46 | fi 47 | 48 | remote_branches_contains_count=$(git branch -r --contains $sha1_root | wc -l) 49 | tags_contains_count=$(git tag --contains $sha1_root | wc -l) 50 | 51 | printf "%14.14s %-60.60s %-80.80s %-20.20s\n" \ 52 | "$sha1_sub" \ 53 | "$sha1_sub_ls_remote" \ 54 | "$(git log --oneline -1 --decorate --format="%h %cd %s" $sha1_root | cut -c1-90)" \ 55 | "(ct.br:$remote_branches_contains_count ct.t:${tags_contains_count})" 56 | if [[ "${verbose_branches:-verbose_branches_false}" == true && "$remote_branches_contains_count" -gt 0 ]]; then 57 | git branch -r --contains $sha1_root | grep -e origin/master -e origin/main -e origin/products/.* || true 58 | fi 59 | 60 | done 61 | echo 62 | done 63 | -------------------------------------------------------------------------------- /jenkins-autoupdate-plugins/Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | agent any 3 | 4 | stages { 5 | stage('Install plugin updates') { 6 | steps { 7 | sh label: 'Get jenkins-cli.jar', script: 8 | ''' 9 | #!/usr/bin/bash 10 | set +x 11 | wget --no-verbose ${JENKINS_URL}jnlpJars/jenkins-cli.jar 12 | ''' 13 | withCredentials( 14 | [usernamePassword( 15 | credentialsId: 'AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA', 16 | passwordVariable: 'PASSWORD', 17 | usernameVariable: 'USERNAME' 18 | )]) 19 | { 20 | sh label: 'Check for updates and install', script: 21 | ''' 22 | bash ./jenkinsJobs/UpdatePlugins/UpdatePlugins.sh 23 | ''' 24 | } 25 | script { 26 | currentBuild.description = readFile './description.txt' 27 | } 28 | archiveArtifacts artifacts: 'description.txt', followSymlinks: false 29 | } 30 | } 31 | } 32 | post { 33 | always { 34 | deleteDir() 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /jenkins-autoupdate-plugins/Readme.MD: -------------------------------------------------------------------------------- 1 | # Jenkins Pipeline to update Jenkins plugins 2 | 3 | This pipeline and script will install all updatable plugins. 4 | 5 | It can be a struggle to keep Jenkins plugin updated, and I have seen many installations running with plugins that has been outdated years ago, and the 6 | more versions you are behind the more reluctant you are to update. 7 | The cure is update when updates are available. 8 | 9 | It utilizes the [jenkins-cli](https://www.jenkins.io/doc/book/managing/cli/) `list-plugins` command to get the list 10 | of installed plugins, and utilizes that the version plugin's version number will be enclosed in parenthesis if there is an update version of the plugin. 11 | 12 | The output from `jenkins-cli list-plugins` is formatted like this example: 13 | 14 | ```text 15 | ansible Ansible plugin 403.v8d0ca_dcb_b_502 16 | ant Ant Plugin 511.v0a_a_1a_334f41b_ 17 | antisamy-markup-formatter OWASP Markup Formatter Plugin (162.v0e6ec0fcfcf6) 18 | apache-httpcomponents-client-4-api Apache HttpComponents Client 4.x API Plugin 4.5.14-208.v438351942757 19 | ... 20 | ``` 21 | 22 | in the example can be updated. An this pipeline *will* update to the new version. 23 | 24 | ## Requirements 25 | 26 | * Jenkins server configured to support [jenkins-cli](https://www.jenkins.io/doc/book/managing/cli/) 27 | * A user credential with permission to update plugins 28 | * The `Safe Restart Plugin` 29 | 30 | ## Setup 31 | 32 | 1. Configure a user with administrator permissions and create an api token for this user 33 | 2. Download jenkins-cli-jar from your 34 | 3. Verify jenkins responds to jenkins-cli by executing: 35 | 36 | ```bash 37 | java -jar jenkins-cli.jar -s -auth : list-plugins 38 | ``` 39 | 40 | 4. Create a Jenkins Secret type `username with password`, with the values from step 1. Note of it's ID value 41 | 5. Edit Jenkinsfile and replace `AAAAAAAA-AAAA-AAAA-AAAA-AAAAAAAAAAAA` with the ID value from step 4. 42 | 6. Ensure you have functional, regular back of Jenkins 43 | 7. Create the pipeline and make it run on. 44 | -------------------------------------------------------------------------------- /jenkins-autoupdate-plugins/UpdatePlugins.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | 3 | set +x 4 | 5 | AUTH=$USERNAME:$PASSWORD 6 | MYCLICMDLINE="java -jar ./jenkins-cli.jar -s ${JENKINS_URL} -auth $AUTH" 7 | 8 | TOINSTALL=$(${MYCLICMDLINE} list-plugins | grep -E ')$' | awk '{print $1}' | tr '\n' ' ') 9 | 10 | if [ -z "$TOINSTALL" ]; then 11 | echo "No plugins to update." | tee ./description.txt 12 | else 13 | echo "Updating the following plugins:
" | tee ./description.txt 14 | # shellcheck disable=SC2086 15 | echo ${TOINSTALL} | tr -s ' ' | sed -e 's/[[:space:]]/
/g' | tee --append ./description.txt 16 | echo '
' | tee --append ./description.txt 17 | # shellcheck disable=SC2086 18 | $MYCLICMDLINE install-plugin ${TOINSTALL} -restart 19 | # $MYCLICMDLINE safe-restart -message "Plugins have been updated, must restart" 20 | fi 21 | -------------------------------------------------------------------------------- /jenkins-job-dsl/readme.md: -------------------------------------------------------------------------------- 1 | # Jenkins job DSL 2 | 3 | The Jenkins job DSL groovy scripts creates the job for this project. 4 | 5 | You need a seed job to run the Job DSL script, that is for now manually created. 6 | 7 | Configuration: 8 | 9 | * on github push on this repo, start the job 10 | * process the Jenkins job DSL script: `jenkins-job-dsl/seed.groovy` 11 | * disable removed jobs 12 | * ignore removed views 13 | * NOTE is looks on ready branches, as we suppose job dsl changes comes in this way also. 14 | -------------------------------------------------------------------------------- /jenkins-job-dsl/seed.groovy: -------------------------------------------------------------------------------- 1 | REPOSITORY_URL = 'https://github.com/Praqma/code-utils.git' 2 | MAIN_BRANCH = 'master' 3 | REMOTE_NAME = 'origin' 4 | JOB_LABELS = 'dockerhost1' 5 | AUTOMATION_USER = 'ReleasePraqma' 6 | NUM_OF_BUILDS_TO_KEEP = 100 7 | // id of a credential created on the Jenkins master 8 | GITHUB_PRAQMA_CREDENTIALS = '100247a2-70f4-4a4e-a9f6-266d139da9db' 9 | 10 | INTEGRATION_JOB_NAME = 'code-utils_-_integrate_GEN' 11 | 12 | job(INTEGRATION_JOB_NAME) { 13 | logRotator { 14 | numToKeep(NUM_OF_BUILDS_TO_KEEP) 15 | } 16 | description("Integrate changes from ready branches to master, no verification, \nonly to ensure we follow the A Pragmatic Workflow: http://www.praqma.com/stories/a-pragmatic-workflow/" ) 17 | // Setting quit period as both seed job and the integration job are triggerede by Github push events 18 | // and we want the seed job to run first. 19 | quietPeriod(15) 20 | label(JOB_LABELS) 21 | 22 | properties { 23 | ownership { 24 | primaryOwnerId('bue') 25 | coOwnerIds('bue') 26 | } 27 | } 28 | 29 | authorization { 30 | permission('hudson.model.Item.Read', 'anonymous') 31 | } 32 | 33 | scm { 34 | git { 35 | remote { 36 | name(REMOTE_NAME) 37 | url(REPOSITORY_URL) 38 | // Chose credential created on the Jenkins master globally with the id GITHUB_PRAQMA_CREDENTIALS 39 | credentials(GITHUB_PRAQMA_CREDENTIALS) 40 | } 41 | branch("$REMOTE_NAME/ready/**") 42 | 43 | extensions { 44 | wipeOutWorkspace() 45 | } 46 | } 47 | } 48 | 49 | triggers { 50 | githubPush() 51 | } 52 | 53 | steps { 54 | shell('echo "This job is only for integration of changes from ready branches. Currently no automated verification."') 55 | } 56 | 57 | wrappers { 58 | buildName('${BUILD_NUMBER}#${GIT_REVISION,length=8}(${GIT_BRANCH})') 59 | pretestedIntegration("SQUASHED", MAIN_BRANCH, REMOTE_NAME) 60 | } 61 | 62 | publishers { 63 | pretestedIntegration() 64 | mailer('bue@praqma.net', false, true) 65 | } 66 | 67 | } 68 | 69 | -------------------------------------------------------------------------------- /jenkins-tricks-examples/KeepLogForeverThisAndUpstreamBuilds.groovy: -------------------------------------------------------------------------------- 1 | manager.listener.logger.println ("") 2 | manager.listener.logger.println ("########################################################") 3 | manager.listener.logger.println ("# Groovy script: Keep Upstream Builds Forever: START ") 4 | manager.listener.logger.println ("########################################################") 5 | manager.listener.logger.println ("") 6 | 7 | import jenkins.model.* 8 | import hudson.model.* 9 | 10 | def setKeepLog(AbstractBuild build, boolean toggle_keep_forever ){ 11 | manager.listener.logger.println("") 12 | manager.listener.logger.println("----------------------------------------------------------------------------------------------------------------------------------------------") 13 | manager.listener.logger.println("Upstream:" + build.getParent().getFullName() + " : " + build.number+ ":") 14 | manager.listener.logger.println("----------------------------------------------------------------------------------------------------------------------------------------------") 15 | manager.listener.logger.println(" : canToggleLogKeep(CURRENT) = " + build.canToggleLogKeep()) 16 | manager.listener.logger.println(" : isKeepLog(CURRENT) = " + build.isKeepLog()) 17 | manager.listener.logger.println("") 18 | manager.listener.logger.println(" -> Update it" ) 19 | build.keepLog(toggle_keep_forever) 20 | manager.listener.logger.println("") 21 | manager.listener.logger.println(" : isKeepLog (UPDATED): " + build.isKeepLog()) 22 | manager.listener.logger.println("----------------------------------------------------------------------------------------------------------------------------------------------") 23 | manager.listener.logger.println("") 24 | } 25 | 26 | def handleUpstreamBuildCausesKeepLog(List causes, boolean toggle_keep_forever ) { 27 | if (causes != null && !causes.isEmpty() ) { 28 | for ( Cause current : causes ) { 29 | if ( current instanceof Cause.UpstreamCause) { 30 | final Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) current; 31 | final String projectName = upstreamCause.getUpstreamProject(); 32 | final Integer buildNumber = upstreamCause.getUpstreamBuild(); 33 | 34 | final AbstractProject upstreamProject = (AbstractProject) manager.hudson.getItemByFullName(projectName); 35 | AbstractBuild upstream_build = upstreamProject.getBuildByNumber(buildNumber) 36 | 37 | handleUpstreamBuildCausesKeepLog(upstream_build.getCauses(), toggle_keep_forever) 38 | setKeepLog(upstream_build, toggle_keep_forever) 39 | 40 | } else if ( current instanceof Cause.UserCause ) { 41 | Cause.UserCause user_cause = (Cause.UserCause)current; 42 | manager.listener.logger.println("NOTE: Cause for the build: User executed " + user_cause.getUserName()); 43 | } 44 | } 45 | } else { 46 | manager.listener.logger.println("causes != null && !causes.isEmpty()"); 47 | } 48 | } 49 | 50 | def build = manager.build 51 | def toggle_keep_forever 52 | 53 | def parsed_toggle_keep_forever = build.buildVariableResolver.resolve("toggle_keep_forever") 54 | manager.listener.logger.println ("toggle_keep_forever parsed as env variable:" + parsed_toggle_keep_forever ) 55 | 56 | if ( parsed_toggle_keep_forever == null || parsed_toggle_keep_forever == "true" ) { 57 | toggle_keep_forever=true 58 | } 59 | if ( parsed_toggle_keep_forever == "false" ) { 60 | toggle_keep_forever=false 61 | } 62 | 63 | 64 | manager.listener.logger.println ("toggle_keep_forever:" + toggle_keep_forever ) 65 | 66 | handleUpstreamBuildCausesKeepLog(build.getCauses(), toggle_keep_forever) 67 | setKeepLog(build, toggle_keep_forever ) 68 | 69 | manager.listener.logger.println ("") 70 | manager.listener.logger.println ("########################################################") 71 | manager.listener.logger.println ("# Groovy script: Keep Upstream Builds Forever: END") 72 | manager.listener.logger.println ("########################################################") 73 | manager.listener.logger.println ("") 74 | -------------------------------------------------------------------------------- /jenkins-tricks-examples/cancelDownStreamJobQueue/JobDSLcancelDownstreamQueue.groovy: -------------------------------------------------------------------------------- 1 | package jobDSLUtils 2 | 3 | public class JobContentConfigurations { 4 | static void configurePostGroovyCancelDownstreamQueues (def this_groovyPostbuildRecorder ){ 5 | this_groovyPostbuildRecorder.with { 6 | script { 7 | script(''' 8 | manager.listener.logger.println ("") 9 | manager.listener.logger.println ("####################################################################") 10 | manager.listener.logger.println ("# Groovy script: Cancelling downstream queue before triggering them ") 11 | manager.listener.logger.println ("####################################################################") 12 | manager.listener.logger.println ("") 13 | 14 | //import jenkins.model.Jenkins 15 | def jenkinsQueue = manager.hudson.instance.queue 16 | 17 | def downstream_jobs = manager.build.getParent().getDownstreamProjects() 18 | 19 | def downstream_job_name = [] 20 | downstream_jobs.each { job -> 21 | downstream_job_name.add( job.getFullName()) 22 | } 23 | 24 | downstream_job_name.each { job_name -> 25 | manager.listener.logger.println ("Downstream project: " + job_name) 26 | def queue = [] 27 | jenkinsQueue.getItems().each { queue_item -> 28 | if ( queue_item.task.getFullName() == job_name ) { 29 | queue.add(queue_item) 30 | } 31 | } 32 | 33 | def queue_list = [] 34 | queue.each { queue_item -> 35 | queue_list.add( queue_item.getId()) } 36 | 37 | if ( queue_list.size() == 0 ) { 38 | manager.listener.logger.println ("There is no jobs in the queue of: " + job_name ) 39 | } else { 40 | queue_list.each { queue_id -> 41 | manager.listener.logger.println ("Cancelling queue item: " + queue_id + " of job: " + job_name ) 42 | jenkinsQueue.doCancelItem(queue_id) 43 | } 44 | } 45 | } 46 | manager.listener.logger.println ("") 47 | manager.listener.logger.println ("####################################################################") 48 | manager.listener.logger.println ("# Groovy script: DONE ") 49 | manager.listener.logger.println ("####################################################################") 50 | manager.listener.logger.println ("") 51 | ''') 52 | sandbox(true) 53 | } 54 | behavior(1) 55 | runForMatrixParent(false) 56 | } 57 | 58 | } 59 | } -------------------------------------------------------------------------------- /jenkins-tricks-examples/cancelDownStreamJobQueue/README.md: -------------------------------------------------------------------------------- 1 | # Cancel a downstream job queue (before they are scheduled) 2 | 3 | Scenario: A setup where there are long running tests (for example on hardware) and resources are limited. The result is that a long queue 4 | is building up on the test job(s). The project is usually (only) concerned about the latest/newest in the queue rather than the next scheduled. 5 | 6 | ## Prequisites 7 | 8 | ## HowTo -------------------------------------------------------------------------------- /jenkins-tricks-examples/cancelDownStreamJobQueue/cancelDownstreamQueue.groovy: -------------------------------------------------------------------------------- 1 | manager.listener.logger.println ("") 2 | manager.listener.logger.println ("####################################################################") 3 | manager.listener.logger.println ("# Groovy script: Cancelling downstream queue before triggering them ") 4 | manager.listener.logger.println ("####################################################################") 5 | manager.listener.logger.println ("") 6 | 7 | //import jenkins.model.Jenkins 8 | def jenkinsQueue = manager.hudson.instance.queue 9 | 10 | def downstream_jobs = manager.build.getParent().getDownstreamProjects() 11 | 12 | def downstream_job_name = [] 13 | downstream_jobs.each { job -> 14 | downstream_job_name.add( job.getFullName()) 15 | } 16 | 17 | downstream_job_name.each { job_name -> 18 | manager.listener.logger.println ("Downstream project: " + job_name) 19 | def queue = [] 20 | jenkinsQueue.getItems().each { queue_item -> 21 | if ( queue_item.task.getFullName() == job_name ) { 22 | queue.add(queue_item) 23 | } 24 | } 25 | 26 | def queue_list = [] 27 | queue.each { queue_item -> 28 | queue_list.add( queue_item.getId()) } 29 | 30 | if ( queue_list.size() == 0 ) { 31 | manager.listener.logger.println ("There is no jobs in the queue of: " + job_name ) 32 | } else { 33 | queue_list.each { queue_id -> 34 | manager.listener.logger.println ("Cancelling queue item: " + queue_id + " of job: " + job_name ) 35 | jenkinsQueue.doCancelItem(queue_id) 36 | } 37 | } 38 | } 39 | manager.listener.logger.println ("") 40 | manager.listener.logger.println ("####################################################################") 41 | manager.listener.logger.println ("# Groovy script: DONE ") 42 | manager.listener.logger.println ("####################################################################") 43 | manager.listener.logger.println ("") 44 | -------------------------------------------------------------------------------- /jenkins-tricks-examples/jenkins-agent-scripts/jenkins--agent-state-n-toogle.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | [[ ${debug:-} == true ]] && set -x 5 | 6 | # inspiration 7 | # https://gist.github.com/scarytom/5910362 8 | # https://lemmster.de/check-jenkins-for-running-executors-remotely-via-curl.html 9 | # https://stackoverflow.com/questions/37227562/how-to-know-whether-the-jenkins-build-executors-are-idle-or-not-in-jenkins-using 10 | # https://docs.cloudbees.com/docs/cloudbees-ci-kb/latest/client-and-managed-masters/execute-groovy-with-a-rest-call 11 | # 12 | 13 | [[ ${1:-} == "" ]] && { 14 | echo "Please specify command as 1st parameter ( offline , online, state )" 15 | exit 1 16 | } 17 | command="$1" 18 | 19 | [[ ${2:-} == "" ]] && { 20 | echo "Please specify agent name as 2nd parameter" 21 | exit 1 22 | } 23 | jenkins_agent_name="$2" 24 | 25 | sleep_sec="5" 26 | jenkins_server="" 27 | 28 | if [[ ${netrc_file:-} != "" ]]; then 29 | echo "Using netrc file $netrc_file" 30 | if [[ ! -f $netrc_file ]]; then 31 | echo "ERROR: $netrc_file does not exist" 32 | exit 1 33 | fi 34 | netrc_file_option="--netrc-file ${netrc_file}" 35 | else 36 | echo "Using default netrc file option. --netrc" 37 | netrc_file_option="--netrc-file /cygdrive/c/builds/.netrc" 38 | fi 39 | 40 | jenkins_agent_url="${jenkins_server}/computer/${jenkins_agent_name}" 41 | 42 | function set_busy_executors() { 43 | local -n _busy_execeutors=$1 44 | _busy_execeutors=$(curl -s --insecure ${netrc_file_option:-} --silent ${jenkins_agent_url}/ajaxExecutors \ 45 | | sed -e 's//\n/g' \ 46 | | grep -E '^[0-9].+' \ 47 | | grep href \ 48 | | wc -l \ 49 | ) || { 50 | local exitcode=$? 51 | if [[ $exitcode == 1 ]]; then 52 | _busy_execeutors=0 53 | else 54 | echo "ERROR: Something when wrong - exit code $exitcode" 55 | exit 1 56 | fi 57 | } 58 | } 59 | 60 | function print_state() { 61 | is_offline=$( curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/api/json | jq .offline ) 62 | if [[ $is_offline == false ]]; then 63 | printf "offline=false\n" 64 | else 65 | printf "offline=true\n" 66 | fi 67 | } 68 | 69 | case "${command}" in 70 | offline) 71 | # Mark as offline 72 | is_offline=$( curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/api/json | jq .offline ) 73 | if [[ ${is_offline:-} == false ]]; then 74 | echo "$jenkins_agent_name is online - mark it offline" 75 | curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/toggleOffline --request 'POST' --data 'Marked offline to be able to manage agent / server' 76 | elif [[ ${is_offline:-} == true ]]; then 77 | echo "Agent $jenkins_agent_name is already offline" 78 | exit 79 | else 80 | echo "ERROR: offline is: ${is_offline:-}" 81 | exit 1 82 | fi 83 | 84 | # Assume it has busy executors 85 | busy_execeutors= 86 | set_busy_executors busy_execeutors 87 | while [[ $busy_execeutors -gt 0 ]]; do 88 | echo "There are $busy_execeutors busy executors - Wait $sleep_sec secs and test again" 89 | sleep $sleep_sec 90 | set_busy_executors busy_execeutors 91 | sleep_sec=$(( sleep_sec * 2)) 92 | done 93 | 94 | echo "All executors are done .. - Safe to proceed" 95 | 96 | is_offline=$( curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/api/json | jq .offline ) 97 | printf "Agent %s is offline: %s\n" "${jenkins_agent_name}" "$is_offline" 98 | ;; 99 | online) 100 | is_offline=$( curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/api/json | jq .offline ) 101 | printf "Agent %s offline: %s\n" "${jenkins_agent_name}" "$is_offline" 102 | if [[ $is_offline == true ]]; then 103 | echo "Setting online" 104 | curl -s --insecure ${netrc_file_option:-} ${jenkins_agent_url}/toggleOffline --request 'POST' --data 'Mark online' 105 | sleep 1 106 | print_state 107 | elif [[ ${is_offline:-} == false ]]; then 108 | echo "Agent ${jenkins_agent_name} is already online" 109 | exit 110 | else 111 | echo "ERROR: offline is: ${is_offline:-}" 112 | exit 1 113 | fi 114 | ;; 115 | state) 116 | print_state 117 | ;; 118 | *) 119 | echo "Unknown command: $command" 120 | exit 1 121 | ;; 122 | esac 123 | 124 | 125 | -------------------------------------------------------------------------------- /jira/README.md: -------------------------------------------------------------------------------- 1 | # Jira related contributions 2 | 3 | ## Jira filters 4 | 5 | Over time we have created many small snippets of Jira filter we keep re-using. 6 | 7 | You can use them also - check [Jira filter readme](/jira/filters/readme.md) and the subdir [filters](/filters). 8 | -------------------------------------------------------------------------------- /jira/filters/readme.md: -------------------------------------------------------------------------------- 1 | # Jira filters 2 | 3 | You have a good and re-useable filter, for a generic workflow? Share it here... 4 | 5 | * Please put a few lines of use-case for the filter 6 | * Post the filter itself 7 | * Mention any dependencies, e.g. plugins needed 8 | 9 | _The filters also serves as a good guideline on how to work in Jira as they follow many of our best-practices_. 10 | 11 | ## Assignee resolves, team members close 12 | 13 | To help knowledge sharing and cross-functional team effort it can be a good idea to let team members close issue, after the assignee resolves them. 14 | That way another team member get to read what was done. It also helps to get new ideas and future improvements this workflow, so often new follow-up issues with even better ideas can be created. 15 | 16 | The following filter with find resolved issues withint the last 8 hours. Subscribe to th filter on daily basis, and when you get a mail go over the resolved issues to close those you didn't resolve yourself. 17 | 18 | CICD issues latest resolved: `project = CICD AND status = Resolved AND resolved >= -8h AND status != Closed` 19 | 20 | ## You want to know everything? 21 | 22 | If you truely want to know everything, here are all the issues you're not watching: 23 | 24 | `project = CICD AND creator not in (myjirausername) AND reporter not in (myjirausername) AND assignee not in (myjirausername) AND statusCategory not in (Done) AND watcher not in (myjirausername)` 25 | 26 | ## Filters in filters 27 | 28 | Tracking several filters in say one Kanban board, you can use filters in filter. But be carefull they tend to get really really slow: 29 | 30 | `filter = "12679" OR filter = "12692" OR filter = "12832" OR labels in (CICD-debt) ORDER BY Rank` 31 | 32 | ## All epic tasks 33 | 34 | This finds all tasks that have an epic assigned to them. Easy to get an overview, you can add columns in the view to see the epic. 35 | 36 | `project = "CICD" AND component in ("CoDE") AND status not in (Closed, Resolved) AND "Epic link" != EMPTY ORDER BY "Epic Link"` 37 | 38 | ## Active cases 39 | 40 | Once upon a time a project defined _active cases_ as cases in the CoDE component without epic link, or those with component CoDE but where there was an epic link and the epic in progress. 41 | That allowed for omitting issues from epic not put in progress yet and do planning on epics wihout someone working on them. 42 | The labels are to remove some general issues with special labels. 43 | 44 | `(project = "CICD" AND component in ("CoDE") AND "Epic link" = EMPTY OR project = "CICD" AND component in ("CoDE") AND "Epic Link" != EMPTY AND issueFunction in linkedIssuesOf("issuetype = Epic AND status = \"In Progress\"")) AND status not in (Closed, Resolved) AND issueFunction not in issueFieldMatch("", labels, "CICD-*|assessment")` 45 | 46 | ## Ad-hoc work is issues without epics 47 | 48 | If we define ad-hoc work, like support requests coming, as those issues not really planned thus not belonging to an epic they can be found with: 49 | 50 | `project = "CICD" AND component = "CoDE" AND "Epic Link" = EMPTY AND statusCategory not in (Done) AND issueFunction not in issueFieldMatch("", labels, "CICD-*|assessment") AND issuetype not in (Epic) AND issueFunction not in linkedIssuesOf("issuetype = Epic")` 51 | 52 | Some labels are not considered workable issues also, thus omitted. 53 | -------------------------------------------------------------------------------- /jira/scripts/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | The scripts contained within this repository are intended to be used within Jira. There are both groovy scripts, 4 | which are dependent upon the [ScriptRunner](https://marketplace.atlassian.com/plugins/com.onresolve.jira.groovy.groovyrunner/server/overview) 5 | plugin, and other add-hoc scripts for administrative purposes. 6 | 7 | ## Jira Scripts 8 | 9 | These are Jira and Script Runner specific. 10 | 11 | They **could** be dependent upon a specific version of the ScriptRunner plugin. There is also the added complexity of the 12 | Jira version. As ScriptRunner provides access to Jira classes with [Groovy](http://www.groovy-lang.org/) this is also a dependency. 13 | 14 | 15 | **NOTE:** These cannot be called production ready in any sense of the word. Use them as inspiration or a base for 16 | getting started. Most of them were hacked together with the earlier free version(s) of ScriptRunner on a Jira 6.X.X version. 17 | 18 | ### Workflow Scripts 19 | The scripts can be ran from a file or embedded **INLINE** into the post-function, validation or conditions on a transition. 20 | 21 | #### CloneAndLink.groovy And TransitionLinkedDefect.groovy 22 | **Use case** 23 | 24 | A 1st line error management project and multiple 2nd line projects. Defects come into the 1st line project. 1st line 25 | support verifies the defect and determine what 2nd line project(s) should handle it. 1st line support selects the project(s) from 26 | a multi-select list drop down and moves the issue to a status called awaiting action. A clone of the defect is generated 27 | into the selected project(s) and they are linked with a special link type for defects. 28 | 29 | The cloned defect is worked on in the 2nd line support team(s) and resolved. This triggers a transition in the 1st line project 30 | to a status "ready for test", if there are no other cloned defects not resolved. 31 | 32 | * The assignee is set to the project lead of the Target Project. 33 | * The summary is prepended with "CLONE of DEFECT KEY:", where key is the Key of the originating issue. 34 | * The creation date is set to the date of creation. 35 | * Links from Original issue are kept. 36 | * Estimates and time spent are zeroed out on cloned issue. 37 | 38 | ##### How to use 39 | 40 | * The Clone and link script is applied in 1st line support project. 41 | * The Transition linked defect script is applied in all applicable 2nd line support projects. 42 | 43 | **NOTE:** User permissions across the projects must be correct. There is a lot of hardcoded stuff in the scripts which will 44 | need to be cleaned up. 45 | 46 | #### OriginalEstimateValidation.groovy 47 | The purpose of this script is to ensure sub-tasks Original Estimate field has a value. 48 | 49 | ##### How to use 50 | Use this script as a validator on a transition and give an error message so the user knows why. 51 | 52 | ### JQL Scripts 53 | Script runner provides a way to code JQL queries in scripts. That way we can do queries which are difficult, complex or 54 | impossible with the standard query formats. These scripts must be ran from the file system. Script Runner provides a 55 | script root under JIRA_HOME/scripts. 56 | 57 | #### MismatchedThemes.groovy 58 | The use case is trying to create a hierarchy of Theme->Epic->Story with Jira links. The idea being that Epics and Story's 59 | belong to a Theme. 60 | 61 | Purpose is to find all User Stories that have a linked "Theme" issue type where the Epic of the linked Theme is 62 | **different** than the Epic of the User Story. If the either the User Story or the linked Themes has an Epic Link and the 63 | other does not then this is seen as a mismatch. 64 | 65 | ##### How to use 66 | 67 | * hasEpicMismatchWithTheme(): This will return an error as there are no quotation marks ("") and thus no query. 68 | * hasEpicMismatchWithTheme(""): This will search all issues. 69 | * hasEpicMismatchWithTheme("project = CP"): This will return stories for a specific project. 70 | 71 | ### Scripted Fields 72 | Script Runner allows you to create custom fields based on a groovy script. These fields are **NON** editable and are 73 | calculated. 74 | 75 | #### LastComment.groovy 76 | Use case is showing the last comment of an issue from a JQL search. 77 | 78 | #### TimeSpentAsHours.groovy 79 | Very simple script to calculate value in hours. Jira stores these values in milliseconds and presents them according 80 | to wishes, ie. hours, minutes, etc. The exports to word and xml will use the default estimation set in global 81 | configuration. The excel export does not do this :-( Therefor we need a scripted field to be used as a column for JQL searches. 82 | 83 | ## Other 84 | Any other types of scripts that has to do with Jira. 85 | 86 | ### SQL 87 | SQL snippets for working with Jira's database. 88 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/childrenImplementedSolution.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.fields.CustomField 2 | import com.atlassian.jira.issue.IssueManager 3 | import com.atlassian.jira.issue.Issue 4 | import com.atlassian.jira.issue.IssueImpl 5 | import com.atlassian.jira.issue.link.IssueLink 6 | import com.atlassian.jira.issue.link.IssueLinkImpl 7 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 8 | import com.atlassian.jira.issue.link.IssueLinkManager 9 | import com.atlassian.jira.component.ComponentAccessor 10 | 11 | import com.atlassian.jira.component.ComponentAccessor 12 | import com.atlassian.jira.issue.RendererManager 13 | import com.atlassian.jira.issue.fields.renderer.IssueRenderContext 14 | import com.atlassian.jira.issue.fields.renderer.wiki.AtlassianWikiRenderer 15 | 16 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 17 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 18 | 19 | def rendererManager = ComponentAccessor.getComponent(RendererManager) 20 | def renderContext = new IssueRenderContext(issue) 21 | 22 | def EPIC_STORY_LINK = "Epic-Story Link" 23 | 24 | try { 25 | CustomField implementedSolutionCf = customFieldManager.getCustomFieldObjectsByName("Implemented solution")[0] 26 | 27 | def stories = issueLinkManager.getOutwardLinks(issue.id).findAll { 28 | it.issueLinkType.name == EPIC_STORY_LINK 29 | } 30 | 31 | def table = """| *Story* | *Implemented Solution* | 32 | """ 33 | def implSolutionMap = stories.collectEntries { IssueLink it -> 34 | [it.getDestinationObject().key, it.getDestinationObject().getCustomFieldValue(implementedSolutionCf)] 35 | } 36 | if(implSolutionMap.keySet().size() > 0){ 37 | for (entry in implSolutionMap) { 38 | def value = entry?.value?.replaceAll("\n", "* ") 39 | table = table.concat("""|${entry.key} | * ${value} | 40 | """) 41 | } 42 | } 43 | 44 | rendererManager.getRenderedContent(AtlassianWikiRenderer.RENDERER_TYPE, table, renderContext) 45 | } catch(Exception e){ 46 | e 47 | } 48 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentAccounting.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Accounting") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | def requirement = issue.getCustomFieldValue(requirementCustomField) 22 | 23 | def issueManager = ComponentAccessor.getIssueManager() 24 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 25 | 26 | def value = requirementIssue.getCustomFieldValue(custom_field) 27 | value 28 | } catch(Exception e){ 29 | null 30 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentCombustionType.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Combustion Type") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | def requirement = issue.getCustomFieldValue(requirementCustomField) 22 | 23 | def issueManager = ComponentAccessor.getIssueManager() 24 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 25 | 26 | def value = requirementIssue.getCustomFieldValue(custom_field) 27 | value 28 | } catch(Exception e){ 29 | null 30 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentDescription.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | import com.atlassian.jira.component.ComponentAccessor 13 | import com.atlassian.jira.issue.RendererManager 14 | import com.atlassian.jira.issue.fields.renderer.IssueRenderContext 15 | import com.atlassian.jira.issue.fields.renderer.wiki.AtlassianWikiRenderer 16 | 17 | def issue = issue as Issue 18 | 19 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 20 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 21 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Accounting") as ImmutableCustomField 22 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 23 | 24 | def rendererManager = ComponentAccessor.getComponent(RendererManager) 25 | def renderContext = new IssueRenderContext(issue) 26 | def commentManager = ComponentAccessor.commentManager 27 | 28 | try { 29 | customFieldManager.getCustomFieldObjects(issue) 30 | def requirement = issue.getCustomFieldValue(requirementCustomField) 31 | 32 | def issueManager = ComponentAccessor.getIssueManager() 33 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 34 | 35 | def value = requirementIssue.getDescription() 36 | 37 | if (value) { 38 | rendererManager.getRenderedContent(AtlassianWikiRenderer.RENDERER_TYPE, value, renderContext) 39 | } 40 | 41 | } catch(Exception e){ 42 | null 43 | } 44 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentEngineType.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField engine_type = customFieldManager.getCustomFieldObjectByName("Engine Type") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | 22 | def requirement = issue.getCustomFieldValue(requirementCustomField) 23 | 24 | def issueManager = ComponentAccessor.getIssueManager() 25 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 26 | 27 | def engine_type_value = requirementIssue.getCustomFieldValue(engine_type) 28 | engine_type_value 29 | } catch(Exception e){ 30 | null 31 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentExpectedCost.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Expected Costs") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | def requirement = issue.getCustomFieldValue(requirementCustomField) 22 | 23 | def issueManager = ComponentAccessor.getIssueManager() 24 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 25 | 26 | def value = requirementIssue.getCustomFieldValue(custom_field) 27 | value 28 | } catch(Exception e){ 29 | null 30 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentExpectedWorkhours.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Expected Work Hours") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | 22 | def requirement = issue.getCustomFieldValue(requirementCustomField) 23 | 24 | def issueManager = ComponentAccessor.getIssueManager() 25 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 26 | 27 | def value = requirementIssue.getCustomFieldValue(custom_field) 28 | value 29 | } catch(Exception e){ 30 | null 31 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentFixVersions-console.groovy: -------------------------------------------------------------------------------- 1 | import org.apache.log4j.Level 2 | import org.apache.log4j.Logger 3 | 4 | import com.atlassian.jira.issue.IssueManager 5 | import com.atlassian.jira.component.ComponentAccessor 6 | import com.atlassian.jira.issue.Issue 7 | import com.atlassian.jira.issue.IssueImpl 8 | import com.atlassian.jira.issue.link.IssueLink 9 | import com.atlassian.jira.issue.link.IssueLinkImpl 10 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 11 | import com.atlassian.jira.issue.link.IssueLinkManager 12 | 13 | import com.atlassian.jira.project.version.Version 14 | 15 | import com.atlassian.jira.issue.fields.CustomField 16 | import com.atlassian.jira.issue.fields.ImmutableCustomField 17 | 18 | IssueManager issueManager = ComponentAccessor.getIssueManager() 19 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 20 | def projectComponentManager = ComponentAccessor.getProjectComponentManager() 21 | 22 | @com.onresolve.scriptrunner.parameters.annotation.ShortTextInput(description = "Enter Jira to execute the script on - leave empty for non-debug modes", label = "Jira Request Issue") 23 | String jiraDebugIssue 24 | 25 | Logger logger = Logger.getLogger("parent.FixVersions") 26 | 27 | logger.info("jiraDebugIssue=" + jiraDebugIssue) 28 | 29 | //Issue issue 30 | if ( jiraDebugIssue != null ){ 31 | issue = issueManager.getIssueObject(jiraDebugIssue) // Add an issue for testing 32 | logger.setLevel(Level.ALL) // ALL, WARN 33 | } 34 | 35 | 36 | def findParent(Issue issue, Logger logger, IssueLinkManager issueLinkManager){ 37 | if(issue.subTask){ 38 | return findParent(issue.getParentObject(), logger, issueLinkManager) 39 | } 40 | for(IssueLink issueLink : issueLinkManager.getInwardLinks(issue.id)){ 41 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 42 | logger.debug("Issue link type: " + issueLink.issueLinkType.name + " : " + issueLink.getSourceObject().getKey()) 43 | return issueLink.getSourceObject() 44 | } 45 | } 46 | } 47 | 48 | try { 49 | Issue parentIssue = findParent(issue, logger, issueLinkManager) as Issue 50 | logger.debug(parentIssue) 51 | 52 | parentIssue.getFixVersions() 53 | } catch(Exception e){ 54 | log.error(e) 55 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentFixVersions.groovy: -------------------------------------------------------------------------------- 1 | import org.apache.log4j.Level 2 | import org.apache.log4j.Logger 3 | 4 | import com.atlassian.jira.issue.IssueManager 5 | import com.atlassian.jira.component.ComponentAccessor 6 | import com.atlassian.jira.issue.Issue 7 | import com.atlassian.jira.issue.IssueImpl 8 | import com.atlassian.jira.issue.link.IssueLink 9 | import com.atlassian.jira.issue.link.IssueLinkImpl 10 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 11 | import com.atlassian.jira.issue.link.IssueLinkManager 12 | 13 | import com.atlassian.jira.project.version.Version 14 | 15 | import com.atlassian.jira.issue.fields.CustomField 16 | import com.atlassian.jira.issue.fields.ImmutableCustomField 17 | 18 | def findParent(Issue issue, Logger logger, IssueLinkManager issueLinkManager){ 19 | if(issue.subTask){ 20 | return findParent(issue.getParentObject(), logger, issueLinkManager) 21 | } 22 | for(IssueLink issueLink : issueLinkManager.getInwardLinks(issue.id)){ 23 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 24 | logger.debug("Issue link type: " + issueLink.issueLinkType.name + " : " + issueLink.getSourceObject().getKey()) 25 | return issueLink.getSourceObject() 26 | } 27 | } 28 | } 29 | 30 | try { 31 | Logger logger = Logger.getLogger("parent.FixVersions") 32 | IssueManager issueManager = ComponentAccessor.getIssueManager() 33 | 34 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 35 | Issue parentIssue = findParent(issue, logger, issueLinkManager) as Issue 36 | logger.debug(parentIssue) 37 | 38 | parentIssue.getFixVersions() 39 | } catch(Exception e){ 40 | log.error(e) 41 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentID.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.link.IssueLink 3 | import com.atlassian.jira.issue.link.IssueLinkImpl 4 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 5 | import com.atlassian.jira.issue.link.IssueLinkManager 6 | import com.atlassian.jira.component.ComponentAccessor 7 | import com.atlassian.jira.issue.fields.CustomField 8 | import com.atlassian.jira.issue.fields.ImmutableCustomField 9 | 10 | class RequirementFinder { 11 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 12 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 13 | 14 | private Issue getRequirementForEpic(Issue epic){ 15 | if(epic == null){ 16 | return null; 17 | } 18 | Issue requirement = null; 19 | Collection links = issueLinkManager.getInwardLinks(epic.getId()) 20 | try { 21 | links = links.findAll({it -> it.getIssueLinkType().getName() == "Hierarchy" && it.sourceObject.getIssueType().getName() == 'Request'}) 22 | return links[0].sourceObject 23 | } catch (Exception e){ 24 | return null 25 | } 26 | } 27 | 28 | public String getRequirement(Issue issueObject){ 29 | if(issueObject?.getIssueType()?.getName() == "Request"){ 30 | return issueObject; // or null 31 | } 32 | if(issueObject?.getIssueType()?.getName() == "Epic"){ 33 | return this.getRequirementForEpic(issueObject) 34 | } 35 | return null; 36 | } 37 | } 38 | def issue = issue as Issue 39 | def requirementFinder = new RequirementFinder(); 40 | requirementFinder.getRequirement(issue) -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentIssueReason.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Issue Reason") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | 22 | def requirement = issue.getCustomFieldValue(requirementCustomField) 23 | 24 | def issueManager = ComponentAccessor.getIssueManager() 25 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 26 | 27 | def value = requirementIssue.getCustomFieldValue(custom_field) 28 | value 29 | } catch(Exception e){ 30 | null 31 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentPlatform.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import com.atlassian.jira.issue.Issue 3 | import com.atlassian.jira.issue.IssueImpl 4 | import com.atlassian.jira.issue.link.IssueLink 5 | import com.atlassian.jira.issue.link.IssueLinkImpl 6 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 7 | import com.atlassian.jira.issue.link.IssueLinkManager 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Platform") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | 22 | def requirement = issue.getCustomFieldValue(requirementCustomField) 23 | 24 | def issueManager = ComponentAccessor.getIssueManager() 25 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 26 | 27 | def value = requirementIssue.getCustomFieldValue(custom_field) 28 | value 29 | } catch(Exception e){ 30 | null 31 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentReferencedPCB.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Referenced PCB-ID") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | try { 19 | customFieldManager.getCustomFieldObjects(issue) 20 | 21 | def requirement = issue.getCustomFieldValue(requirementCustomField) 22 | 23 | def issueManager = ComponentAccessor.getIssueManager() 24 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 25 | 26 | def value = requirementIssue.getCustomFieldValue(custom_field) 27 | value 28 | } catch(Exception e){ 29 | null 30 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentSiblings.groovy: -------------------------------------------------------------------------------- 1 | import org.apache.log4j.Level 2 | import org.apache.log4j.Logger 3 | 4 | import com.atlassian.jira.issue.IssueManager 5 | import com.atlassian.jira.component.ComponentAccessor 6 | import com.atlassian.jira.issue.Issue 7 | import com.atlassian.jira.issue.IssueImpl 8 | import com.atlassian.jira.issue.link.IssueLink 9 | import com.atlassian.jira.issue.link.IssueLinkImpl 10 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 11 | import com.atlassian.jira.issue.link.IssueLinkManager 12 | 13 | import com.atlassian.jira.project.version.Version 14 | 15 | import com.atlassian.jira.issue.fields.CustomField 16 | import com.atlassian.jira.issue.fields.ImmutableCustomField 17 | //import com.atlassian.jira.issue.RendererManager 18 | //import com.atlassian.jira.issue.fields.renderer.IssueRenderContext 19 | //import com.atlassian.jira.issue.fields.renderer.wiki.AtlassianWikiRenderer 20 | 21 | IssueManager issueManager = ComponentAccessor.getIssueManager() 22 | 23 | @com.onresolve.scriptrunner.parameters.annotation.ShortTextInput(description = "Enter Jira to execute the script on - leave empty for non-debug modes", label = "Jira Request Issue") 24 | String jiraDebugIssue 25 | 26 | Logger logger = Logger.getLogger("parent.parentSiblings") 27 | 28 | logger.info("jiraDebugIssue=" + jiraDebugIssue) 29 | 30 | //Issue issue 31 | if ( jiraDebugIssue != null ){ 32 | issue = issueManager.getIssueObject(jiraDebugIssue) // Add an issue for testing 33 | logger.setLevel(Level.ALL) // ALL, WARN 34 | } 35 | 36 | 37 | 38 | def findParent(Issue issue, Logger logger, IssueLinkManager issueLinkManager){ 39 | if(issue.subTask){ 40 | logger.info("Issue link type subtask - next: " + issue.subTask ) 41 | return findParent(issue.getParentObject(), logger, issueLinkManager) 42 | } 43 | for(IssueLink issueLink : issueLinkManager.getInwardLinks(issue.id)){ 44 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 45 | logger.info("Issue link type E: " + issueLink.issueLinkType.name + " : " + issueLink.getSourceObject().getKey()) 46 | return issueLink.getSourceObject() 47 | } 48 | } 49 | } 50 | 51 | try { 52 | 53 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 54 | Issue parentIssue = findParent(issue, logger, issueLinkManager) as Issue 55 | Issue parentIssueNotNull 56 | if ( parentIssue ) { 57 | parentIssueNotNull=parentIssue 58 | logger.info("Returned: " + parentIssueNotNull) 59 | } else { 60 | logger.info("Returned null: " + parentIssueNotNull) 61 | } 62 | 63 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 64 | ImmutableCustomField siblings_cf = customFieldManager.getCustomFieldObjectsByName("Siblings")[0] as ImmutableCustomField 65 | def value 66 | if ( parentIssueNotNull ) { 67 | value = parentIssueNotNull.getCustomFieldValue(siblings_cf) 68 | logger.info("value: " + value) 69 | if (value) { 70 | value 71 | /* 72 | // It is already rendered in parent: 73 | def rendererManager = ComponentAccessor.getComponent(RendererManager) 74 | def renderContext = new IssueRenderContext(issue) 75 | rendererManager.getRenderedContent(AtlassianWikiRenderer.RENDERER_TYPE, value, renderContext) 76 | */ 77 | } 78 | } 79 | 80 | } catch(Exception e){ 81 | log.error(e) 82 | } 83 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentSiteType.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.issue.Issue 2 | import com.atlassian.jira.issue.IssueImpl 3 | import com.atlassian.jira.issue.link.IssueLink 4 | import com.atlassian.jira.issue.link.IssueLinkImpl 5 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.component.ComponentAccessor 8 | 9 | import com.atlassian.jira.issue.fields.CustomField 10 | import com.atlassian.jira.issue.fields.ImmutableCustomField 11 | 12 | def issue = issue as Issue 13 | 14 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 15 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 16 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Site Type") as ImmutableCustomField 17 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 18 | 19 | try { 20 | customFieldManager.getCustomFieldObjects(issue) 21 | 22 | def requirement = issue.getCustomFieldValue(requirementCustomField) 23 | 24 | def issueManager = ComponentAccessor.getIssueManager() 25 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 26 | 27 | def value = requirementIssue.getCustomFieldValue(custom_field) 28 | value 29 | } catch(Exception e){ 30 | null 31 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/parentTargetProducts.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import org.apache.log4j.Level 3 | import org.apache.log4j.Logger 4 | import com.atlassian.jira.issue.Issue 5 | import com.atlassian.jira.issue.IssueImpl 6 | import com.atlassian.jira.issue.link.IssueLink 7 | import com.atlassian.jira.issue.link.IssueLinkImpl 8 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 9 | import com.atlassian.jira.issue.link.IssueLinkManager 10 | 11 | import com.atlassian.jira.issue.fields.CustomField 12 | import com.atlassian.jira.issue.fields.ImmutableCustomField 13 | 14 | Logger logger = Logger.getLogger("sacos.parent.Target products") 15 | //logger.setLevel(Level.DEBUG) 16 | 17 | //def issue = issue as Issue 18 | 19 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 20 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 21 | ImmutableCustomField custom_field = customFieldManager.getCustomFieldObjectByName("Target products") as ImmutableCustomField 22 | logger.info(custom_field.fieldName) 23 | ImmutableCustomField parentCustomField = customFieldManager.getCustomFieldObjectByName("Parent - ID") as ImmutableCustomField 24 | logger.info(parentCustomField.fieldName) 25 | 26 | try { 27 | customFieldManager.getCustomFieldObjects(issue) 28 | def issueManager = ComponentAccessor.getIssueManager() 29 | IssueImpl parentIssue = issueManager.getIssueObject(issue.getCustomFieldValue(parentCustomField).toString()) as IssueImpl 30 | logger.info(parentIssue) 31 | 32 | def value = parentIssue.getCustomFieldValue(custom_field) 33 | return value 34 | } catch(Exception e){ 35 | logger.error(e) 36 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/siblings.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | 3 | import com.atlassian.jira.issue.Issue 4 | import com.atlassian.jira.issue.IssueImpl 5 | import com.atlassian.jira.issue.RendererManager 6 | 7 | import com.atlassian.jira.issue.fields.renderer.IssueRenderContext 8 | import com.atlassian.jira.issue.fields.renderer.wiki.AtlassianWikiRenderer 9 | 10 | import com.atlassian.jira.issue.link.DefaultIssueLinkManager 11 | import com.atlassian.jira.issue.link.IssueLink 12 | import com.atlassian.jira.issue.link.IssueLinkImpl 13 | import com.atlassian.jira.issue.link.IssueLinkManager 14 | 15 | import com.atlassian.jira.issue.fields.CustomField 16 | import com.atlassian.jira.issue.fields.ImmutableCustomField 17 | 18 | try { 19 | DefaultIssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() as DefaultIssueLinkManager 20 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 21 | ImmutableCustomField requirementCustomField = customFieldManager.getCustomFieldObjectsByName("Parent - ID")[0] as ImmutableCustomField 22 | 23 | def rendererManager = ComponentAccessor.getComponent(RendererManager) 24 | def renderContext = new IssueRenderContext(issue) 25 | def commentManager = ComponentAccessor.commentManager 26 | def comment = commentManager.getLastComment(issue) 27 | 28 | def requirement = issue.getCustomFieldValue(requirementCustomField) 29 | 30 | def issueManager = ComponentAccessor.getIssueManager() 31 | IssueImpl requirementIssue = issueManager.getIssueObject(requirement.toString()) as IssueImpl 32 | 33 | def links = issueLinkManager.getOutwardLinks(requirementIssue.getId()) 34 | def linksMarkdown = links.findAll({ 35 | IssueLink it -> it.destinationObject.key != issue.key 36 | }).collect({ 37 | IssueLink it -> "[" + it.destinationObject.key + "] *Status*: " + 38 | it.destinationObject.getStatus().getName() + " *Fix version(s)*: " + 39 | it.destinationObject.getFixVersions().join(", ") + 40 | "\n" 41 | }).join(" ") 42 | 43 | if (linksMarkdown) { 44 | rendererManager.getRenderedContent(AtlassianWikiRenderer.RENDERER_TYPE, linksMarkdown, renderContext) 45 | } 46 | } catch(Exception e){ 47 | null 48 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/sumEstOriginalTime.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.atlassian.jira.component.ComponentAccessor; 4 | import org.apache.log4j.Level 5 | import org.apache.log4j.Logger 6 | 7 | import com.atlassian.jira.issue.search.SearchProvider 8 | import com.atlassian.jira.jql.parser.JqlQueryParser 9 | import com.atlassian.jira.web.bean.PagerFilter 10 | 11 | import com.atlassian.jira.issue.Issue 12 | import com.atlassian.jira.issue.IssueManager 13 | 14 | IssueManager issueManager = ComponentAccessor.getIssueManager() 15 | def issueLinkManager = ComponentAccessor.getIssueLinkManager() 16 | def cfManager = ComponentAccessor.getCustomFieldManager() 17 | 18 | Logger logger = Logger.getLogger("sacos.aggregate.estimation") 19 | logger.setLevel(Level.ALL) 20 | 21 | @com.onresolve.scriptrunner.parameters.annotation.ShortTextInput(description = "Enter Jira to execute the script on - leave empty for non-debug modes", label = "Jira Request Issue") 22 | String jiraDebugIssue 23 | 24 | logger.info("jiraDebugIssue=" + jiraDebugIssue) 25 | 26 | //Issue issue 27 | if ( jiraDebugIssue != null ){ 28 | issue = issueManager.getIssueObject(jiraDebugIssue) // Add an issue for testing 29 | logger.setLevel(Level.ALL) // ALL, WARN 30 | } 31 | 32 | 33 | long totalOrigTime = 0 34 | if (issue.getIssueTypeId() != "10000") { 35 | logger.info("Issue type is not executed on type: " + issue.getIssueType().getName()) 36 | return null 37 | } else { 38 | logger.info("Issue type is Epic - proceed: " + issue.getIssueType().getName()) 39 | } 40 | 41 | issueLinkManager.getOutwardLinks(issue.id)?.each {issueLink -> 42 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 43 | long origTime = issueLink.destinationObject.originalEstimate?:0 44 | if ( origTime != 0 ){ 45 | logger.info(issueLink.destinationObject.getIssueType().name + ": " + issueLink.destinationObject.getKey() + " : " + issueLink.issueLinkType.name + " : OK : originalEstimate=" + origTime ) 46 | totalOrigTime = origTime + totalOrigTime; 47 | logger.info("Aggregated : " + totalOrigTime ) 48 | } 49 | issueLink.destinationObject.getSubTaskObjects()?.each { issueSubtask -> 50 | long origTimeSubtask = issueSubtask.originalEstimate?:0 51 | if ( origTimeSubtask != 0 ){ 52 | logger.info("Subtask: " + issueSubtask.getKey() + " : OK : originalEstimate=" + origTimeSubtask ) 53 | totalOrigTime = origTimeSubtask + totalOrigTime; 54 | logger.info("Aggregated : " + totalOrigTime ) 55 | } 56 | } 57 | } else { 58 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : -skip") 59 | } 60 | 61 | } 62 | logger.info("Total time estimate: " + totalOrigTime ) 63 | 64 | return totalOrigTime 65 | 66 | 67 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/sumEstOriginalTimeStoryOnly.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.atlassian.jira.component.ComponentAccessor; 4 | import org.apache.log4j.Level 5 | import org.apache.log4j.Logger 6 | 7 | import com.atlassian.jira.issue.search.SearchProvider 8 | import com.atlassian.jira.jql.parser.JqlQueryParser 9 | import com.atlassian.jira.web.bean.PagerFilter 10 | 11 | def issueLinkManager = ComponentAccessor.getIssueLinkManager() 12 | def cfManager = ComponentAccessor.getCustomFieldManager() 13 | 14 | Logger logger = Logger.getLogger("sacos.aggregate.estimation") 15 | logger.setLevel(Level.ALL) 16 | 17 | long totalOrigTime = 0 18 | if (issue.getIssueTypeId() != "10000") { 19 | logger.info("Issue type is not executed on type: " + issue.getIssueType().getName()) 20 | return null 21 | } else { 22 | logger.info("Issue type is Epic - proceed: " + issue.getIssueType().getName()) 23 | } 24 | 25 | issueLinkManager.getOutwardLinks(issue.id)?.each {issueLink -> 26 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 27 | long origTime = issueLink.destinationObject.originalEstimate?:0 28 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : OK : originalEstimate=" + origTime ) 29 | totalOrigTime = origTime + totalOrigTime; 30 | } else { 31 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : -skip") 32 | } 33 | } 34 | logger.info("Total time estimate: " + totalOrigTime ) 35 | 36 | return totalOrigTime 37 | 38 | 39 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/sumEstOriginalTimeStorySubtask.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.atlassian.jira.component.ComponentAccessor; 4 | import org.apache.log4j.Level 5 | import org.apache.log4j.Logger 6 | 7 | import com.atlassian.jira.issue.search.SearchProvider 8 | import com.atlassian.jira.jql.parser.JqlQueryParser 9 | import com.atlassian.jira.web.bean.PagerFilter 10 | 11 | import com.atlassian.jira.issue.Issue 12 | import com.atlassian.jira.issue.IssueManager 13 | 14 | IssueManager issueManager = ComponentAccessor.getIssueManager() 15 | def issueLinkManager = ComponentAccessor.getIssueLinkManager() 16 | def cfManager = ComponentAccessor.getCustomFieldManager() 17 | 18 | Logger logger = Logger.getLogger("sacos.aggregate.estimation") 19 | logger.setLevel(Level.ALL) 20 | 21 | @com.onresolve.scriptrunner.parameters.annotation.ShortTextInput(description = "Enter Jira to execute the script on - leave empty for non-debug modes", label = "Jira Request Issue") 22 | String jiraDebugIssue 23 | 24 | logger.info("jiraDebugIssue=" + jiraDebugIssue) 25 | 26 | Issue issue 27 | if ( jiraDebugIssue != null ){ 28 | issue = issueManager.getIssueObject(jiraDebugIssue) // Add an issue for testing 29 | logger.setLevel(Level.ALL) // ALL, WARN 30 | } 31 | 32 | 33 | long totalOrigTime = 0 34 | if (issue.getIssueTypeId() != "10000") { 35 | logger.info("Issue type is not executed on type: " + issue.getIssueType().getName()) 36 | return null 37 | } else { 38 | logger.info("Issue type is Epic - proceed: " + issue.getIssueType().getName()) 39 | } 40 | 41 | issueLinkManager.getOutwardLinks(issue.id)?.each {issueLink -> 42 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 43 | long origTime = issueLink.destinationObject.originalEstimate?:0 44 | if ( origTime != 0 ){ 45 | logger.info(issueLink.destinationObject.getIssueType().name + ": " + issueLink.destinationObject.getKey() + " : " + issueLink.issueLinkType.name + " : OK : originalEstimate=" + origTime ) 46 | totalOrigTime = origTime + totalOrigTime; 47 | logger.info("Aggregated : " + totalOrigTime ) 48 | } 49 | issueLink.destinationObject.getSubTaskObjects()?.each { issueSubtask -> 50 | long origTimeSubtask = issueSubtask.originalEstimate?:0 51 | if ( origTimeSubtask != 0 ){ 52 | logger.info("Subtask: " + issueSubtask.getKey() + " : OK : originalEstimate=" + origTimeSubtask ) 53 | totalOrigTime = origTimeSubtask + totalOrigTime; 54 | logger.info("Aggregated : " + totalOrigTime ) 55 | } 56 | } 57 | } else { 58 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : -skip") 59 | } 60 | 61 | } 62 | logger.info("Total time estimate: " + totalOrigTime ) 63 | 64 | return totalOrigTime 65 | 66 | 67 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/fields/sumEstStoryPoints.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.atlassian.jira.component.ComponentAccessor; 4 | import org.apache.log4j.Level 5 | import org.apache.log4j.Logger 6 | 7 | import com.atlassian.jira.issue.search.SearchProvider 8 | import com.atlassian.jira.jql.parser.JqlQueryParser 9 | import com.atlassian.jira.web.bean.PagerFilter 10 | 11 | def issueLinkManager = ComponentAccessor.getIssueLinkManager() 12 | def cfManager = ComponentAccessor.getCustomFieldManager() 13 | 14 | Logger logger = Logger.getLogger("sacos.aggregate.estimation") 15 | logger.setLevel(Level.ALL) 16 | 17 | double totalSP = 0 18 | if (issue.getIssueTypeId() != "10000") { 19 | logger.info("Issue type is not executed on type: " + issue.getIssueType().getName()) 20 | return null 21 | } else { 22 | logger.info("Issue type is Epic - proceed: " + issue.getIssueType().getName()) 23 | } 24 | 25 | issueLinkManager.getOutwardLinks(issue.id)?.each {issueLink -> 26 | if (issueLink.issueLinkType.name == "Epic-Story Link" ) { 27 | def customFieldSP = ComponentAccessor.getCustomFieldManager().getCustomFieldObject("customfield_10006"); 28 | double SP = (double)(issueLink.destinationObject.getCustomFieldValue(customFieldSP) ?: 0) 29 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : OK : SP=" + SP ) 30 | totalSP = SP + totalSP; 31 | } else { 32 | logger.info("Issue link type: " + issueLink.issueLinkType.name + " : -skip") 33 | } 34 | } 35 | return totalSP 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/epicValidationStoriesDone.groovy: -------------------------------------------------------------------------------- 1 | // Not tested 2 | 3 | import com.atlassian.jira.component.ComponentAccessor 4 | import com.atlassian.jira.workflow.TransitionOptions 5 | 6 | // the name of the action you want to move the issue to 7 | final actionName = 'Close' 8 | 9 | //Name of the resolution children issues should have 10 | final resolutionName = 'In validation' 11 | 12 | // the name of the issue link 13 | final issueLinkName = 'Epic-Story Link' 14 | 15 | def workflow = ComponentAccessor.workflowManager.getWorkflow(issue) 16 | def actionId = workflow.allActions.findByName(actionName)?.id 17 | def linkManager = ComponentAccessor.issueLinkManager 18 | 19 | def epicIssue = linkManager.getInwardLinks(issue.id).find { it.issueLinkType.name == issueLinkName }?.sourceObject 20 | if (!epicIssue) { 21 | return 22 | } 23 | 24 | // Find all the linked - with the "Epic-Story Link" link - issues that their status is not the same as resolutionName 25 | 26 | def linkedIssues = linkManager 27 | .getOutwardLinks(epicIssue.id) 28 | .findAll { it.issueLinkType.name == issueLinkName } 29 | *.destinationObject?.findAll { it.resolution?.name != resolutionName } 30 | 31 | // If there are still open linked issues (except the one in transition) - then do nothing 32 | 33 | if (linkedIssues - issue) { 34 | return 35 | } 36 | 37 | def issueService = ComponentAccessor.issueService 38 | def inputParameters = issueService.newIssueInputParameters() 39 | 40 | inputParameters.setComment('This Epic closed automatically because all the issues in this Epic are closed.') 41 | inputParameters.setSkipScreenCheck(true) 42 | 43 | def transitionOptions = new TransitionOptions.Builder() 44 | .skipConditions() 45 | .skipPermissions() 46 | .skipValidators() 47 | .build() 48 | 49 | def loggedInUser = ComponentAccessor.jiraAuthenticationContext.loggedInUser 50 | def transitionValidationResult = issueService.validateTransition(loggedInUser, epicIssue.id, actionId, inputParameters, transitionOptions) 51 | assert transitionValidationResult.valid: transitionValidationResult.errorCollection 52 | 53 | def result = issueService.transition(loggedInUser, transitionValidationResult) 54 | assert result.valid: result.errorCollection -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/implSolutionRequired.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.opensymphony.workflow.InvalidInputException 4 | 5 | CustomFieldManager cfManager = ComponentAccessor.getCustomFieldManager() 6 | implSolutionCf = cfManager.getCustomFieldObjectsByName("Implemented solution")[0] 7 | 8 | log.warn(implSolutionCf) 9 | def implSolution = issue.getCustomFieldValue(implSolutionCf) 10 | log.warn(implSolution) 11 | 12 | if(["Bug", "Story", "Eval"].contains(issue.getIssueType().getName())){ 13 | log.warn("Story, Bug, Eval") 14 | if(!implSolution?.trim()){ 15 | throw new InvalidInputException("Implemented Solution is required for Story, Bug, Eval") 16 | false 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/requestDoneEpicsDone.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import com.atlassian.jira.workflow.TransitionOptions 3 | 4 | // the name of the action you want to move the issue to 5 | final actionName = 'Close' 6 | 7 | //Name of the resolution children issues should have 8 | final resolutionName = 'Done' 9 | 10 | // the name of the issue link 11 | final issueLinkName = '' 12 | 13 | def workflow = ComponentAccessor.workflowManager.getWorkflow(issue) 14 | def actionId = workflow.allActions.findByName(actionName)?.id 15 | def linkManager = ComponentAccessor.issueLinkManager 16 | 17 | def epicIssue = linkManager.getInwardLinks(issue.id).find { it.issueLinkType.name == issueLinkName }?.sourceObject 18 | if (!epicIssue) { 19 | return 20 | } 21 | 22 | // Find all the linked - with the "Epic-Story Link" link - issues that their status is not the same as resolutionName 23 | 24 | def linkedIssues = linkManager 25 | .getOutwardLinks(epicIssue.id) 26 | .findAll { it.issueLinkType.name == issueLinkName } 27 | *.destinationObject?.findAll { it.resolution?.name != resolutionName } 28 | 29 | // If there are still open linked issues (except the one in transition) - then do nothing 30 | 31 | if (linkedIssues - issue) { 32 | return 33 | } 34 | 35 | def issueService = ComponentAccessor.issueService 36 | def inputParameters = issueService.newIssueInputParameters() 37 | 38 | inputParameters.setComment('This Epic closed automatically because all the issues in this Epic are closed.') 39 | inputParameters.setSkipScreenCheck(true) 40 | 41 | def transitionOptions = new TransitionOptions.Builder() 42 | .skipConditions() 43 | .skipPermissions() 44 | .skipValidators() 45 | .build() 46 | 47 | def loggedInUser = ComponentAccessor.jiraAuthenticationContext.loggedInUser 48 | def transitionValidationResult = issueService.validateTransition(loggedInUser, epicIssue.id, actionId, inputParameters, transitionOptions) 49 | assert transitionValidationResult.valid: transitionValidationResult.errorCollection 50 | 51 | def result = issueService.transition(loggedInUser, transitionValidationResult) 52 | assert result.valid: result.errorCollection -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/setEpicStatusDone.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.ComponentManager; 2 | import com.atlassian.jira.component.ComponentAccessor; 3 | import com.atlassian.jira.issue.CustomFieldManager; 4 | import com.atlassian.jira.issue.fields.CustomField; 5 | import com.atlassian.jira.issue.IssueManager; 6 | import com.atlassian.jira.issue.MutableIssue; 7 | import com.atlassian.jira.issue.Issue; 8 | import com.atlassian.jira.user.ApplicationUser; 9 | import com.atlassian.jira.bc.issue.IssueService 10 | import com.atlassian.jira.component.ComponentAccessor 11 | import com.atlassian.jira.issue.IssueInputParametersImpl 12 | 13 | def currentUser = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 14 | def issueManager = ComponentAccessor.issueManager 15 | IssueService issueService = ComponentAccessor.getIssueService() 16 | def actionId = 31 // change this to the step that you want the issues to be transitioned to 17 | 18 | def transitionValidationResult 19 | def transitionResult 20 | 21 | 22 | def epicLink = ComponentAccessor.customFieldManager.getCustomFieldObjectByName("Epic Link") 23 | def epic = issue.getCustomFieldValue(epicLink) as String 24 | if(epic){ 25 | def issueE = issueManager.getIssueObject(epic); 26 | if(issueE.getStatus().name != "In Progress" ) 27 | { 28 | transitionValidationResult = issueService.validateTransition(currentUser, issueE.id, actionId,new IssueInputParametersImpl()) 29 | if (transitionValidationResult.isValid()) { 30 | transitionResult = issueService.transition(currentUser, transitionValidationResult) 31 | if (transitionResult.isValid()) 32 | { log.debug("Transitioned issue $issue through action $actionId") } 33 | else 34 | { log.debug("Transition result is not valid") } 35 | }else{ 36 | log.debug("The transitionValidation is not valid") 37 | } 38 | 39 | } 40 | } -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/validators/blockEpic2DoneRejectedIfChildsNotRejectedDone.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import com.atlassian.jira.issue.Issue 3 | import com.atlassian.jira.issue.IssueManager 4 | import com.atlassian.jira.issue.issuetype.IssueType 5 | import com.atlassian.jira.issue.link.IssueLink 6 | import com.atlassian.jira.issue.link.IssueLinkManager 7 | import com.atlassian.jira.issue.link.IssueLinkType 8 | import com.atlassian.jira.issue.status.Status 9 | import com.atlassian.jira.user.ApplicationUser 10 | import com.opensymphony.workflow.InvalidInputException 11 | 12 | IssueManager issueManager = ComponentAccessor.getIssueManager() 13 | ApplicationUser user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 14 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 15 | 16 | Issue epic = issue as Issue 17 | 18 | List allOutIssueLink = issueLinkManager.getOutwardLinks(epic.getId()) 19 | 20 | allOutIssueLink.each { IssueLink it -> 21 | Issue linkedIssue = it.destinationObject 22 | Status status = linkedIssue.getStatus() 23 | IssueLinkType issueLinkType = it.getIssueLinkType() 24 | 25 | if(!["Rejected", "Done"].contains(status.getName()) && issueLinkType.getOutward() == "is Epic of"){ 26 | throw new InvalidInputException("All stories and bugs in epic must be either Rejected or Done"); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /jira/scripts/ScriptRunner/workflow/validators/implSolutionRequiredv2.groovy: -------------------------------------------------------------------------------- 1 | import com.atlassian.jira.component.ComponentAccessor 2 | import com.atlassian.jira.issue.CustomFieldManager 3 | import com.opensymphony.workflow.InvalidInputException 4 | 5 | CustomFieldManager cfManager = ComponentAccessor.getCustomFieldManager() 6 | implSolutionCf = cfManager.getCustomFieldObjectsByName("Implemented solution")[0] 7 | 8 | log.warn(implSolutionCf) 9 | def implSolution = issue.getCustomFieldValue(implSolutionCf) 10 | log.warn(implSolution) 11 | 12 | if(["Bug", "Story"].contains(issue.getIssueType().getName())){ 13 | log.warn("Story or Bug") 14 | if(!implSolution?.trim()){ 15 | throw new InvalidInputException("Implemented Solution is required for Story and Bug") 16 | false 17 | } 18 | } -------------------------------------------------------------------------------- /jira/scripts/component_delete_from_parameters.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | 6 | [[ ${debug:-} == true ]] && set -x 7 | 8 | [[ ${jira_server:-} == true ]] && ( echo "jira_server is not set" && exit 1 ) 9 | 10 | jira_key="${1}" 11 | component_name="${2}" 12 | 13 | netrc_file=~/.netrc 14 | 15 | 16 | component_already_exists=$(curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/project/${jira_key}/components \ 17 | | jq -r ".[] | select(.name==\"${component_name}\").name" ) 18 | if [[ "${component_already_exists:-}" == "${component_name}" ]] ; then 19 | printf "Component: ${component_name} exists in project: ${jira_key} - delete :" 20 | else 21 | printf "Component: ${component_name} in project: ${jira_key} - does not exist - exit" 22 | exit 0 23 | fi 24 | 25 | component_id=$(curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/project/${jira_key}/components \ 26 | | jq -r ".[] | select(.name==\"${component_name}\").id" ) 27 | 28 | exit 29 | 30 | if ! curl --fail --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/component --upload-file jira_component2.json > /dev/null; then 31 | echo "Failed.. Maybe the component is already in the project.. - exit 1" 32 | exit 1 33 | else 34 | printf "Done\n" 35 | fi 36 | rm jira_component2.json 37 | -------------------------------------------------------------------------------- /jira/scripts/create-jira-project-from-shared-config.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | 5 | [[ ${debug:-} == true ]] && set -x 6 | set -u 7 | set -e 8 | 9 | 10 | [[ ${1:-} == "" ]] && echo "Please parse new project's desired key to as parameter 1" 11 | jira_project_new_key="$1" 12 | 13 | [[ ${2:-} == "" ]] && echo "Please parse new project's desired name to as parameter 2" 14 | jira_project_new_name="$2" 15 | 16 | [[ ${3:-} == "" ]] && echo "Please parse the project template to create from as parameter 3" 17 | jira_project_template_key="$3" 18 | 19 | [[ ${4:-} == "" ]] && echo "Please parse the project lead to create from as parameter 4" 20 | jira_project_lead="$4" 21 | 22 | if [[ ${create_mode:-} == "" ]]; then 23 | create_mode="skipOcreate" #deleteNcreate , skipNcreate(default), delete 24 | echo "Using default mode: create_mode=$create_mode - Options: deleteNcreate , skipOcreate(default), delete" 25 | else 26 | echo "Using create_mode: $create_mode - Options: deleteNcreate , skipOcreate(default), delete" 27 | fi 28 | 29 | netrc_file=$(echo ~).netrc 30 | 31 | curl_GET_cmd="curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - " 32 | curl_POST_cmd="curl --fail --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json -o - " 33 | curl_DELETE_cmd="curl --fail --insecure --netrc-file ${netrc_file} -X DELETE -H Content-Type:application/json -o - " 34 | 35 | project_key_found=$(${curl_GET_cmd} --silent --url "${jira_server}/rest/api/2/project/${jira_project_new_key}" | jq -r .key ) 36 | 37 | if [[ $create_mode == "delete" ]]; then 38 | if [[ "${project_key_found}" == "${jira_project_new_key}" ]] ; then 39 | printf "Project found: $project_key_found - delete it... and it might take some time depend on the amount of issues.." 40 | printf " - but sleep for 10 sec to offer abort...\n" 41 | sleep 10 42 | ${curl_DELETE_cmd} --url ${jira_server}/rest/api/2/project/${jira_project_new_key} 43 | exit 0 44 | else 45 | echo "Project not found: $jira_project_new_key - skip deleting" 46 | exit 0 47 | fi 48 | fi 49 | 50 | project_template_id=$(${curl_GET_cmd} --silent --url ${jira_server}/rest/api/2/project/${jira_project_template_key} | jq -r .id ) 51 | if [[ ${project_template_id} == "" ]] ; then 52 | echo "Template project: $jira_project_template_key NOT found on server: $jira_server" 53 | exit 1 54 | fi 55 | 56 | if [[ "${project_key_found}" == "${jira_project_new_key}" ]] ; then 57 | if [[ $create_mode == "deleteNcreate" ]]; then 58 | printf "Project found: $project_key_found - delete it first... and it might take some depend on the amount of issues.." 59 | if [[ ${force:-} == true ]] ; then 60 | printf " - run in force mode without sleeping\n" 61 | else 62 | printf " - but sleep for 10 sec to offer abort...\n" 63 | sleep 10 64 | fi 65 | echo ".. starting .." 66 | ${curl_DELETE_cmd} --url ${jira_server}/rest/api/2/project/${jira_project_new_key} 67 | fi 68 | if [[ $create_mode == "skipOcreate" ]]; then 69 | echo "Project found: $project_key_found - skip" 70 | exit 0 71 | fi 72 | else 73 | echo "Project: $jira_project_new_key not found on server: $jira_server - create it" 74 | fi 75 | 76 | echo "Using project: $jira_project_template_key as template on $jira_server" 77 | 78 | echo "Create new project key:$jira_project_new_key name:\"$jira_project_new_name\" from $jira_project_template_key" 79 | $curl_POST_cmd -d "{ \"key\":\"${jira_project_new_key}\", \"name\":\"${jira_project_new_name}\",\"lead\":\"$jira_project_lead\" }" \ 80 | --url ${jira_server}/rest/project-templates/1.0/createshared/${project_template_id} 81 | echo 82 | 83 | #curl -D- -u : -H "Content-Type:application/json" -X POST -d '{"user":["username"]}' -k https://jira-stg.example.com/rest/api/2/project/ABC/role/10002 84 | -------------------------------------------------------------------------------- /jira/scripts/create-projects-n-update-components.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -u 4 | 5 | [[ ${debug:-} == true ]] && set -x 6 | 7 | export jira_server="https://[:8080]" 8 | 9 | echo "Using Jira server: $jira_server" 10 | 11 | # Load functions 12 | source ${BASH_SOURCE%/*}/_jira_project_create_update_functions.sh|| source ./_jira_project_create_update_functions.sh 13 | 14 | 15 | if [[ ${1:-} == "" ]] ; then 16 | echo "Import files not in use : - skip" 17 | else 18 | jira_import_files=$1 19 | IFS=" " 20 | for jira_import_file in $jira_import_files; do 21 | if [[ -e ${jira_import_file} ]]; then 22 | echo "Using $jira_import_file" 23 | else 24 | echo "file '${jira_import_file}' does not exists" 25 | exit 1 26 | fi 27 | done 28 | unset IFS 29 | fi 30 | 31 | 32 | ################# 33 | # 34 | # Team project 35 | # 36 | ################# 37 | export mode="skipOcreate" 38 | #export create_mode="deleteNcreate" 39 | #create_team_project "" "" "" ["] 40 | 41 | unset mode 42 | 43 | ################# 44 | # 45 | # Product/Archive/Inbox project 46 | # 47 | ################# 48 | 49 | export mode="skipOcreate" 50 | #export create_mode="deleteNcreate" 51 | #create_jira_product_proj "" "" "" 52 | unset mode 53 | 54 | update_jira_proj_category_projs_w_components_of_teams_project_keys "SaCoS projects" "^S1.*\$|^S5K.*\$|^SACOS\$|^SMP.*\$" "^SCS.*\$" 55 | -------------------------------------------------------------------------------- /jira/scripts/create_component_from_parameters.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | 6 | [[ ${debug:-} == true ]] && set -x 7 | 8 | [[ ${jira_server:-} == true ]] && ( echo "jira_server is not set" && exit 1 ) 9 | 10 | jira_key="${1}" 11 | component_name="${2}" 12 | component_lead="${3}" 13 | 14 | netrc_file=~/.netrc 15 | 16 | 17 | component_already_exists=$(curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/project/${jira_key}/components \ 18 | | jq -r ".[] | select(.name==\"${component_name}\").name" ) 19 | if [[ "${component_already_exists:-}" == "${component_name}" ]] ; then 20 | echo "Component: ${component_name} already exists in project: ${jira_key} - skip" 21 | exit 22 | else 23 | printf "Component: ${component_name} in project: ${jira_key} - create: " 24 | fi 25 | 26 | echo "{ \"project\":\"$jira_key\"}" > jira_project.json 27 | 28 | if [[ ${component_lead:-} == "" ]]; then 29 | echo "{ \"name\": \"${component_name}\", \"assigneeType\": \"UNASSIGNED\" }" > jira_component.json 30 | else 31 | echo "{ \"name\": \"${component_name}\", \"leadUserName\": \"${component_lead}\", \"assigneeType\": \"COMPONENT_LEAD\" }" > jira_component.json 32 | fi 33 | jq -s '.[0] * .[1]' jira_project.json jira_component.json > jira_component2.json 34 | rm jira_component.json 35 | rm jira_project.json 36 | 37 | if ! curl --fail --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/component --upload-file jira_component2.json > /dev/null; then 38 | echo "Failed.. Maybe the component is already in the project.. - exit 1" 39 | exit 1 40 | else 41 | printf "Done\n" 42 | fi 43 | rm jira_component2.json 44 | -------------------------------------------------------------------------------- /jira/scripts/create_components_from_json_import.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | 6 | [[ ${debug:-} == true ]] && set -x 7 | [[ ${jira_server:-} == true ]] && ( echo "jira_server is not set" && exit 1 ) 8 | 9 | jira_key="${1}" 10 | reqex_components=${2} # '.*Sprint.*|.*6.*' 11 | import_file=${3} 12 | 13 | 14 | netrc_file=~/.netrc 15 | 16 | 17 | echo "{ \"project\":\"$jira_key\"}" > jira_project.json 18 | 19 | IFS=$'\r\n' 20 | for component in $(jq -r ".projects[0].components[] | select(.name? | match(\"${reqex_components}\")).name" $import_file ) ; do 21 | echo $component 22 | jq -r ".projects[0].components[] | select(.name == \"${component}\")" $import_file > jira_component.json 23 | jq -s '.[0] * .[1]' jira_project.json jira_component.json > jira_component2.json 24 | rm jira_component.json 25 | 26 | if ! curl --fail --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json -o - --url ${jira_server}/rest/api/2/component --upload-file jira_component2.json ; then 27 | echo "Failed.. Maybe the component is already in the project.. Exit code: $?" 28 | exit 1 29 | fi 30 | echo 31 | rm -f jira_component2.json 32 | done 33 | rm jira_project.json 34 | -------------------------------------------------------------------------------- /jira/scripts/create_releases_from_json_import.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | 6 | [[ ${debug:-} == true ]] && set -x 7 | [[ ${jira_server:-} == true ]] && ( echo "jira_server is not set" && exit 1 ) 8 | 9 | jira_key="${1}" 10 | reqex_releases=${2} # '.*Sprint.*|.*6.*' 11 | import_file=${3} 12 | 13 | netrc_file=~/.netrc 14 | 15 | 16 | echo "{ \"project\":\"$jira_key\"}" > jira_project.json 17 | 18 | jq -r ".projects[0].versions[] | select(.name? | match(\"${reqex_releases}\")).name" $import_file > releases.txt 19 | dos2unix releases.txt 20 | IFS=$'\r\n' 21 | for release in $(jq -r ".projects[0].versions[] | select(.name? | match(\"${reqex_releases}\")).name" $import_file ) ; do 22 | version_already_exists=$(curl --fail --insecure --netrc-file ${netrc_file} -X GET -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/project/${jira_key}/versions \ 23 | | jq -r ".[] | select(.name==\"${release}\").name" ) 24 | if [[ "${version_already_exists:-}" == "${release}" ]] ; then 25 | echo "Version: ${release} already exists in project: ${jira_key} - skip" 26 | continue 27 | else 28 | printf "Version: ${release} in project: ${jira_key} - create: " 29 | fi 30 | 31 | 32 | jq -r ".projects[0].versions[] | select(.name == \"${release}\")" $import_file > jira_release.json 33 | jq -s '.[0] * .[1]' jira_project.json jira_release.json > jira_release2.json 34 | 35 | # TODO: checck update script to handle exit codes from curl 36 | if ! curl --fail --insecure --netrc-file ${netrc_file} -X POST -H Content-Type:application/json -o - --silent --url ${jira_server}/rest/api/2/version --upload-file jira_release2.json > /dev/null ; then 37 | exit_code=$? 38 | if [[ $exit_code -eq 0 ]]; then 39 | printf " Failed.. Maybe the release is already in the project.. Exit code: $exit_code - same name lower / UPPER caps ?? - continue\n" 40 | continue 41 | else 42 | printf " Failed.. for unknown reason" 43 | cat jira_release2.json 44 | exit $exit_code 45 | fi 46 | else 47 | printf " $? : Done\n" 48 | fi 49 | rm -f jira_release.json 50 | rm -f jira_release2.json 51 | done 52 | 53 | rm -f jira_project.json -------------------------------------------------------------------------------- /jira/scripts/jira-delete-users.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #set -x 3 | 4 | input_file=$1 5 | 6 | IFS=$'\r\n' 7 | #for user in `jq .users[].name ${input_file} | sed -e 's/"//g'` 8 | #for user in $(ccm users -l) 9 | jira_user_group="change_synergy-import-unused-users" 10 | for user in $(curl --silent --fail --insecure --netrc-file /z//.netrc -X GET -H "Content-Type:application/json" --url "${jira_server}/rest/api/2/group/member?groupname=${jira_user_group}&includeInactiveUsers=true" | jq -r .values[].key) 11 | do 12 | echo "Delete: ${user}" 13 | curl --insecure --request DELETE --netrc-file ~/.netrc --url ${jira_server}/rest/api/2/user?username=$user 14 | sleep 1 15 | done 16 | -------------------------------------------------------------------------------- /jira/scripts/jira-delete-versions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #set -x 3 | 4 | jira_server=$1 5 | input_file=$2 6 | 7 | for id in `jq .[].id ${input_file} | sed -e 's/"//g'` 8 | do 9 | curl --insecure --request DELETE --netrc-file ./.netrc --url 'https://${jira_server}/rest/api/2/version/'$id'' 10 | done 11 | 12 | 13 | https://${jira_server}/rest/api/2/user/application?username=v7y1uvq&applicationKey= 14 | https://${jira_server}/rest/plugins/applications/1.0/installed/jira-software/license 15 | https://${jira_server}/rest/api/2/group/change_synergy-import-unused-users 16 | groupname=change_synergy-import-unused-users 17 | /rest/api/1.0/admin/groups/more-members?context=change_synergy-import-unused-users&limit=1000 -------------------------------------------------------------------------------- /jira/scripts/jira_attachments_import.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Author: Jaime Kirch da Silveira (Atlassian Cloud Support) 4 | # Last update: April, 17th, 2015 5 | 6 | # This will import all attachments to JIRA issues 7 | # Check this KB for more information: 8 | # https://confluence.atlassian.com/display/JIRAKB/Bulk+import+attachments+to+JIRA+issues+via+REST+API 9 | 10 | if [[ $# != 4 ]] 11 | then 12 | 13 | echo "Format: $0 " 14 | echo "Please notice that the JIRA URL must include all the path to access JIRA, including anything after the '/' (like /jira) and the protocol as well (like https://)" 15 | exit 16 | fi 17 | 18 | USERNAME=$1 19 | PASSWORD=$2 20 | PROJECT_KEY=$3 21 | JIRA_URL=$4 22 | 23 | AUTH_TYPE=cookie 24 | #AUTH_TYPE=basic 25 | 26 | COOKIE_FILE=cookie.txt 27 | 28 | if [ "${AUTH_TYPE}" = 'cookie' ] 29 | then 30 | curl --cookie-jar ${COOKIE_FILE} -H "Content-Type: application/json" -d '{"username":"'${USERNAME}'", "password":"'${PASSWORD}'" }' -X POST ${JIRA_URL}/rest/auth/1/session 31 | fi 32 | 33 | 34 | for key in ${PROJECT_KEY}-* 35 | do 36 | if [ "$(ls -A ${key})" ] 37 | then 38 | echo "Importing attachments for issue $key" 39 | for file in $key/* 40 | do 41 | echo "Importing file: $file" 42 | if [ "${AUTH_TYPE}" = 'cookie' ] 43 | then 44 | curl -D- -b ${COOKIE_FILE} -X POST --header "X-Atlassian-Token: no-check" -F "file=@${file}" ${JIRA_URL}/rest/api/2/issue/${key}/attachments 45 | else 46 | if [ "${AUTH_TYPE}" = 'basic' ] 47 | then 48 | curl -D- -u ${USERNAME}:${PASSWORD} -X POST --header "X-Atlassian-Token: no-check" -F "file=@${file}" ${JIRA_URL}/rest/api/2/issue/${key}/attachments 49 | fi 50 | fi 51 | done 52 | fi 53 | 54 | done 55 | -------------------------------------------------------------------------------- /jira/scripts/jqlScripts/src/com/onresolve/jira/groovy/jql/MismatchedThemes.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * Created by EXT.Tim.Harris on 20-02-2015. 3 | * 4 | * Purpose is to find all User Stories that have a linked "Theme" issue type 5 | * where the Epic of the linked Theme is different than the Epic of the User Story. 6 | * 7 | * If the either the User Story or the linked Themes has an Epic Link and the other 8 | * does not then this is seen as a mismatch. 9 | * 10 | */ 11 | 12 | package com.onresolve.jira.groovy.jql 13 | 14 | import com.atlassian.jira.component.ComponentAccessor 15 | import com.atlassian.jira.issue.Issue 16 | import com.atlassian.crowd.embedded.api.User 17 | import com.atlassian.jira.issue.link.IssueLink 18 | import com.atlassian.jira.issue.link.IssueLinkManager 19 | import com.atlassian.jira.jql.query.QueryCreationContext 20 | import com.atlassian.jira.util.MessageSet 21 | import com.atlassian.query.clause.TerminalClause 22 | import com.atlassian.query.operand.FunctionOperand 23 | import org.apache.lucene.index.Term 24 | import org.apache.lucene.search.BooleanClause 25 | import org.apache.lucene.search.BooleanQuery 26 | import org.apache.lucene.search.Query 27 | import org.apache.lucene.search.TermQuery 28 | 29 | class MismatchedThemes extends AbstractScriptedJqlFunction implements JqlQueryFunction{ 30 | Category log = Category.getInstance(MismatchedThemes.class) 31 | 32 | @Override 33 | String getDescription() { 34 | "Returns all stories where the epic link is not the same as the epic link of the linked Theme(if there is a linked Theme)." + 35 | " Non Linked Stories and themes are seen as mismatches!" 36 | } 37 | 38 | @Override 39 | List getArguments() { 40 | [ 41 | [ 42 | "description": "Subquery", 43 | "optional": false, 44 | ] 45 | ] 46 | } 47 | 48 | @Override 49 | String getFunctionName() { 50 | "hasEpicMismatchWithTheme" 51 | } 52 | 53 | def String subquery = ""; 54 | @Override 55 | MessageSet validate(User user, FunctionOperand operand, TerminalClause terminalClause) { 56 | def messageSet = super.validate(user, operand, terminalClause) 57 | if(operand.args.size() <= 0){ 58 | messageSet.addErrorMessage("You must supply a sub-query! It may be an empty set of parenthesis if you wish." + 59 | " This will search ALL issues!") 60 | } else { 61 | subquery = operand.args[0] 62 | } 63 | return messageSet; 64 | 65 | } 66 | 67 | @Override 68 | Query getQuery(QueryCreationContext queryCreationContext, FunctionOperand operand, TerminalClause terminalClause) { 69 | def booleanQuery = new BooleanQuery() 70 | def themeEpic; 71 | def storyEpic; 72 | 73 | log.debug("This is the subquery: " + subquery + "\n") 74 | issues = getIssues(subquery) 75 | for (Issue currIssue in issues) { 76 | if (currIssue.getIssueTypeId() == "10001") { 77 | storyEpic = getEpicLinkField(currIssue); 78 | def Issue theme = getLinkedTheme(currIssue); 79 | if(theme && theme.getIssueTypeObject().getName() == "Theme") { 80 | themeEpic = getEpicLinkField(theme); 81 | if(storyEpic != themeEpic) { 82 | booleanQuery.add(new TermQuery(new Term("issue_id",currIssue.id as String)),BooleanClause.Occur.SHOULD) 83 | } 84 | } 85 | } 86 | } 87 | return booleanQuery; 88 | 89 | } 90 | 91 | static String getEpicLinkField(Issue issue) { 92 | def customFieldMgr = ComponentAccessor.getCustomFieldManager(); 93 | def epicLinkField = customFieldMgr.getCustomFieldObjects(issue).find 94 | {it.name == 'Epic Link'} 95 | return epicLinkField.getValue(issue); 96 | } 97 | 98 | static Issue getLinkedTheme(Issue story) { 99 | def IssueLinkManager linkMgr = ComponentAccessor.getIssueLinkManager(); 100 | def theme = null; 101 | for (IssueLink link in linkMgr.getOutwardLinks(story.getId())) { 102 | if (link.getIssueLinkType().getName() == "Theme") { 103 | theme = link.getDestinationObject(); 104 | break; 105 | } 106 | } 107 | return theme; 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /jira/scripts/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 4.0.0 6 | 7 | com.adaptavist 8 | scriptrunner-parent 9 | 7 10 | 11 | 12 | com.onresolve.scriptrunner.assets 13 | sr-sample-plugins-jira 14 | 1.0.4-SNAPSHOT 15 | 16 | 17 | Example Company 18 | http://www.example.com/ 19 | 20 | 21 | JIRA Sample Plugins for ScriptRunner 22 | Sample plugin for ScriptRunner, has listeners and JQL functions etc 23 | atlassian-plugin 24 | 25 | 26 | 27 | com.atlassian.jira 28 | jira-api 29 | ${jira.version} 30 | provided 31 | 32 | 33 | 34 | com.atlassian.jira 35 | jira-core 36 | ${jira.version} 37 | provided 38 | 39 | 40 | 41 | com.onresolve.jira.groovy 42 | groovyrunner 43 | ${scriptrunner.version} 44 | provided 45 | 46 | 47 | 48 | com.atlassian.sal 49 | sal-api 50 | 3.0.3 51 | provided 52 | 53 | 54 | 55 | 56 | 57 | 58 | org.codehaus.gmavenplus 59 | gmavenplus-plugin 60 | 61 | 62 | 63 | com.atlassian.maven.plugins 64 | maven-jira-plugin 65 | 66 | 67 | 68 | 69 | 70 | 71 | ${project.groupId}.${project.artifactId} 72 | 7.3.6 73 | 7.3.6 74 | 75 | 76 | -------------------------------------------------------------------------------- /jira/scripts/scriptedFields/LastComment.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * Sometimes it is nice to show the last comments in an issue search. 3 | * Add the field to a column in JQL searches. 4 | */ 5 | 6 | import com.atlassian.jira.component.ComponentAccessor 7 | 8 | def commentManager = ComponentAccessor.getCommentManager() 9 | def comments = commentManager.getComments(issue) 10 | 11 | if (comments) { 12 | comments.last().author + ": " + comments.last().body 13 | } 14 | -------------------------------------------------------------------------------- /jira/scripts/scriptedFields/TimeSpentAsHours.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * Very simple script to calculate value in hours. 3 | * Jira stores these values in milliseconds and presents them according to wishes, ie. hours, minutes, etc. 4 | * The exports to word and xml will use the default estimation set in global configuration. 5 | * The excel export does not do this :-( 6 | * Therefor we need a scripted field to be used as a column for JQL searches. 7 | * 8 | * NOTE: The return must be a double as Script Runner expects this. 9 | */ 10 | def hours; 11 | if (issue.getTimeSpent() != null) { 12 | hours = issue.getTimeSpent().doubleValue() / 3600 13 | } else { 14 | hours = null 15 | } 16 | return hours 17 | -------------------------------------------------------------------------------- /jira/scripts/sql/inactiveUser.sql: -------------------------------------------------------------------------------- 1 | SELECT cwd_user.user_name, cwd_user.email_address, from_unixtime(round(cwd_user_attributes.attribute_value/1000)) FROM cwd_user, cwd_user_attributes WHERE cwd_user_attributes.user_id = cwd_user.id AND cwd_user_attributes.attribute_name = 'login.lastLoginMillis' AND from_unixtime(round(cwd_user_attributes.attribute_value/1000)) < CURDATE()-INTERVAL 31 DAY AND cwd_user.active = '1' INTO OUTFILE '/tmp/inactive_jira_user_$(date +%F).csv'; 2 | 3 | -------------------------------------------------------------------------------- /jira/scripts/update_status_releases_from_json_import.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | set -u 5 | 6 | [[ ${debug:-} == true ]] && set -x 7 | [[ ${jira_server:-} == true ]] && ( echo "jira_server is not set" && exit 1 ) 8 | 9 | jira_key="${1}" 10 | reqex_releases=${2} # '.*Sprint.*|.*6.*' 11 | import_file=${3} 12 | 13 | netrc_file=~/.netrc 14 | 15 | 16 | echo "{ \"project\":\"$jira_key\"}" > jira_project.json 17 | #jq -r ".projects[0].versions[] | select(.name? | match(\"${reqex_releases}\")).name" $import_file > releases.txt 18 | #dos2unix releases.txt 19 | 20 | echo "Getting versions" 21 | http_code=$(curl --fail --insecure -w '%{http_code}\n' --netrc-file ${netrc_file} -X GET -H Content-Type:application/json --silent --url ${jira_server}/rest/api/2/project/${jira_key}/versions -o ${jira_key}-versions.json) || { 22 | if [[ $http_code -eq 503 ]]; then 23 | echo "Try again in two seconds" 24 | sleep 2 25 | curl --fail --insecure -w '%{http_code}\n' --netrc-file ${netrc_file} -X GET -H Content-Type:application/json --silent --url ${jira_server}/rest/api/2/project/${jira_key}/versions -o ${jira_key}-versions.json 26 | fi 27 | } 28 | jira_versions=$(cat ${jira_key}-versions.json) 29 | import_versions=$(jq -r ".projects[0].versions[]" $import_file) # > import_versions.json 30 | 31 | function update_version { 32 | printf "." 33 | http_code=$(curl --fail --insecure -s -w '%{http_code}\n' --netrc-file ${netrc_file} -X PUT -H Content-Type:application/json -o tmp.json --url ${jira_server}/rest/api/2/version/${release_jira_id} --upload-file jira_release.json) 34 | } 35 | 36 | echo "loop them" 37 | IFS=$'\r\n' 38 | for release in $(jq -r ".projects[0].versions[] | select(.name? | match(\"${reqex_releases}\")).name" $import_file ) ; do 39 | # TODO: extract the release ones and then get the id and released from that 40 | version_exists=$(echo $jira_versions | jq -r ".[] | select(.name==\"${release}\").name" ) 41 | if [[ "${version_exists:-}" == "${release}" ]] ; then 42 | release_jira_id=$(echo $jira_versions | jq -r ".[] | select(.name==\"${release}\").id" ) 43 | released_jira_value=$(echo $jira_versions | jq -r ".[] | select(.name==\"${release}\").released" ) 44 | released_import_value=$(echo $import_versions | jq -r ". | select(.name==\"${release}\").released" ) 45 | if [[ ${released_import_value} == "false" ]]; then 46 | released_new_value="true" 47 | elif [[ ${released_import_value} == "true" ]]; then 48 | released_new_value="false" 49 | else 50 | echo "$version_exists - WHY here - exit 1" 51 | exit 1 52 | fi 53 | else 54 | # Skip releases if the list contains mix of upper and lower cases 55 | # [[ ${release} == "" ]] && { echo "${release} - WARNING: lower/Upper cap issue - skip"; continue ; } 56 | echo "WHY Heres - exit 2" 57 | exit 2 58 | fi 59 | [[ ${released_jira_value} == ${released_new_value} ]] && { 60 | echo "$version_exists already as it should be : ${released_jira_value} == ${released_new_value}" 61 | continue 62 | } 63 | printf "$version_exists to be updated: " 64 | echo "{ \"released\": $released_new_value }" > jira_release.json 65 | 66 | http_code=503 67 | try=0 68 | while [[ $http_code -eq 503 ]] ; do 69 | #sleep $try 70 | update_version || true 71 | try=$(( try + 1 )) 72 | done 73 | unset try 74 | if [[ $http_code -eq 200 ]]; then 75 | printf " - All good : " 76 | else 77 | printf " Failed.. for unknown reason: $http_code " 78 | cat jira_release.json 79 | exit $http_code 80 | fi 81 | printf " $http_code : Done\n" 82 | rm -f jira_release.json 83 | rm -f jira_release2.json 84 | done 85 | 86 | rm -f jira_project.json -------------------------------------------------------------------------------- /jira/scripts/various/FindUsers.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | In case that there's an additional group that controls access to certain projects, 3 | it can be useful to search for users who are only members of one group, but should 4 | also have access to the second group. 5 | */ 6 | 7 | import com.atlassian.crowd.manager.directory.DirectoryManager 8 | import com.atlassian.jira.bc.JiraServiceContextImpl 9 | import com.atlassian.jira.bc.user.UserService 10 | import com.atlassian.jira.bc.user.search.UserSearchParams 11 | import com.atlassian.jira.bc.user.search.UserSearchService 12 | import com.atlassian.jira.security.groups.GroupManager 13 | import com.atlassian.jira.component.ComponentAccessor 14 | import com.atlassian.jira.ComponentManager 15 | import com.atlassian.jira.security.login.LoginManager 16 | import com.atlassian.jira.user.ApplicationUser 17 | ​ 18 | // Either it can be Internal JIRA or one managed by Active Directory 19 | final String directoryToCheck = "Active Directory server" 20 | ​ 21 | def loginManager = ComponentAccessor.getComponent(LoginManager) 22 | def directoryManager = ComponentAccessor.getComponent(DirectoryManager) 23 | GroupManager groupManager = ComponentManager.getComponentInstanceOfType(GroupManager.class) 24 | ​ 25 | UserSearchParams.Builder paramBuilder = UserSearchParams.builder() 26 | .allowEmptyQuery(true) 27 | .includeActive(true) 28 | .includeInactive(false) 29 | ​ 30 | JiraServiceContextImpl jiraServiceContext = new JiraServiceContextImpl(ComponentAccessor.jiraAuthenticationContext.loggedInUser) 31 | ​ 32 | def trusted = groupManager.getGroup("second-level-group") 33 | ​ 34 | def allActiveUsers = ComponentAccessor.getComponent(UserSearchService).findUsers(jiraServiceContext, "", paramBuilder.build()) 35 | def directoryId = directoryManager.findAllDirectories()?.find { it.name.toLowerCase() == directoryToCheck.toLowerCase() }?.id 36 | ​ 37 | def idleUsers = allActiveUsers.findAll { user -> 38 | user.directoryId == directoryId && loginManager.getLoginInfo(user.username)?.lastLoginTime && groupManager.getGroupNamesForUser(user).contains("jira-software-users") && groupManager.getGroupNamesForUser(user).size() == 1 39 | } 40 | -------------------------------------------------------------------------------- /jira/scripts/various/fixResolutionStatus.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | This script can be used to fix the resolution status after a bad import. When 3 | bulk editing the resolution status, the resolution date will be changed to the 4 | time of the request. 5 | 6 | Specifically, this was used to fix a problem after importing from Redmine where 7 | the importer plugin from JIRA did not set the resolution status of closed tickets 8 | to either "Won't Fix" or "Done"/"Fixed". 9 | */ 10 | 11 | import com.atlassian.jira.ComponentManager 12 | import com.atlassian.jira.component.ComponentAccessor 13 | import com.atlassian.jira.config.ResolutionManager 14 | import com.atlassian.jira.config.StatusManager 15 | import com.atlassian.jira.event.type.EventDispatchOption 16 | import com.atlassian.jira.issue.Issue 17 | import com.atlassian.jira.issue.IssueManager 18 | import com.atlassian.jira.issue.MutableIssue 19 | import com.atlassian.jira.issue.UpdateIssueRequest 20 | import com.atlassian.jira.issue.resolution.Resolution 21 | import com.atlassian.jira.issue.search.SearchProvider 22 | import com.atlassian.jira.jql.parser.JqlQueryParser 23 | import com.atlassian.jira.security.Permissions 24 | import com.atlassian.jira.user.ApplicationUser 25 | import com.atlassian.jira.web.bean.PagerFilter 26 | 27 | def resolutionManager = ComponentAccessor.getComponent(ResolutionManager) 28 | def issueManager = ComponentAccessor.getIssueManager() 29 | def resolution = resolutionManager.getResolutionByName("Done") 30 | def user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 31 | def statusManager = ComponentAccessor.getComponent(StatusManager) 32 | def jqlQueryParser = ComponentAccessor.getComponent(JqlQueryParser.class) 33 | def searchProvider = ComponentAccessor.getComponent(SearchProvider.class) 34 | def query = jqlQueryParser.parseQuery("resolution = 1 and status = Done") 35 | def results = searchProvider.search(query, user, PagerFilter.getUnlimitedFilter()) 36 | 37 | results.getIssues().each {documentIssue -> 38 | //log.debug(documentIssue.key) 39 | def issue = issueManager.getIssueObject(documentIssue.id) 40 | setFixedResolution(issue.getKey(), resolution) 41 | } 42 | 43 | def void setFixedResolution(String issueKey, Resolution resolution) 44 | { 45 | def issueManager = ComponentAccessor.getIssueManager() 46 | MutableIssue issueObject = issueManager.getIssueObject(issueKey) 47 | def issueResolutionDate = issueObject.getResolutionDate() 48 | def resolutionStatus = issueObject.getResolution().description 49 | def user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 50 | def resolutionDate = issueObject.getResolutionDate() 51 | issueObject.setResolutionDate(issueResolutionDate) 52 | issueObject.setResolution(resolution) 53 | // remember that the updated-field can also be set, so it's even more transparent 54 | ComponentAccessor.getIssueManager().updateIssue((ApplicationUser)user, (MutableIssue)issueObject, UpdateIssueRequest.builder().build(), false) 55 | } 56 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/BlockStoriesWithOpenBlockingIssues.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | Stories with open blocking issues should not be set to Done. 3 | The button disappears from the issue view. 4 | 5 | This is meant to be added as a Condition on a transition. 6 | */ 7 | 8 | import com.atlassian.jira.issue.*; 9 | import com.atlassian.jira.component.ComponentAccessor; 10 | import com.atlassian.jira.issue.link.*; 11 | import java.util.List 12 | 13 | def issueLinkManager = ComponentAccessor.getIssueLinkManager() 14 | def issueManager = ComponentAccessor.getIssueManager() 15 | 16 | // issue is a special variable of the context of the transition 17 | def issue = issue 18 | 19 | // https://scriptrunner.adaptavist.com/latest/jira/custom-workflow-functions.html, see under Conditions 20 | passesCondition = true 21 | 22 | if(issue.getIssueType().getName() == "Story"){ 23 | List links = issueLinkManager.getInwardLinks(issue.id) 24 | for(IssueLink issueLink : links){ 25 | if(issueLink.issueLinkType.name == "Is blocked by"){ 26 | def status = issueLink.destinationObject.getStatus().getName() 27 | if(!["Rejected", "Done"].contains(status)){ 28 | passesCondition = false 29 | break; 30 | } 31 | } 32 | } 33 | } 34 | // functional programming style, no need for return 35 | passesCondition 36 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/CloneAndLink.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * Purpose is to clone an issue type to a user selected project. 3 | * The user performing the action must have permission to create issue 4 | * in the target project. The cloned issue will be linked to the original 5 | * defect with a "clones" link type. 6 | * 7 | * User selected project is a custom field called "Target Project". 8 | * The assignee is set to the project lead of the Target Project. 9 | * The summary is prepended with CLONE. 10 | * The creation date is set to the date of creation. 11 | * Links from Original issue are kept. 12 | * Estimates and time spent are zeroed out on cloned issue. 13 | * 14 | */ 15 | 16 | import com.atlassian.jira.component.ComponentAccessor 17 | import com.atlassian.jira.issue.CustomFieldManager 18 | import com.atlassian.jira.issue.IssueFactory 19 | import com.atlassian.jira.issue.IssueManager 20 | import com.atlassian.jira.issue.MutableIssue 21 | import com.atlassian.jira.issue.fields.CustomField 22 | import com.atlassian.jira.issue.link.IssueLink 23 | import com.atlassian.jira.issue.link.IssueLinkManager 24 | import com.atlassian.jira.project.Project 25 | import com.atlassian.jira.project.ProjectManager 26 | import com.atlassian.jira.security.JiraAuthenticationContext 27 | import com.atlassian.jira.issue.link.IssueLinkTypeManager 28 | import com.atlassian.jira.issue.link.IssueLinkType 29 | 30 | def final String LINK_TYPE_NAME = "Defect Cloners"; // Link types we care about 31 | 32 | CustomFieldManager cfMgr = ComponentAccessor.getCustomFieldManager(); 33 | ProjectManager projectMgr = ComponentAccessor.getProjectManager(); 34 | IssueLinkManager linkMgr = ComponentAccessor.getIssueLinkManager(); 35 | IssueLinkTypeManager linkTypeMgr = ComponentAccessor.getComponentOfType(IssueLinkTypeManager); 36 | IssueManager issueMgr = ComponentAccessor.getIssueManager(); 37 | IssueFactory issueFactory = ComponentAccessor.getIssueFactory(); 38 | JiraAuthenticationContext authContext = ComponentAccessor.getJiraAuthenticationContext(); 39 | 40 | final def currentUserObj = authContext.getUser().getDirectoryUser(); 41 | final def currentUserName = authContext.getUser().getName(); 42 | final def CustomField cf = cfMgr.getCustomFieldObjects(issue).find() {it.name == 'Target Project'} 43 | def Project projectObj; 44 | 45 | if(cf != null){ 46 | def Map projMap = issue.getCustomFieldValue(cf); 47 | if(projMap != null){ 48 | projectObj = projectMgr.getProjectByCurrentKey(projMap.get("key") as String); 49 | } 50 | } 51 | 52 | def MutableIssue newIssue = issueFactory.cloneIssue(issue); 53 | if (projectObj != null){ 54 | newIssue.setProjectId(projectObj.id); 55 | newIssue.setAssignee(projectMgr.getDefaultAssignee(projectObj,projectObj.getProjectComponents())); 56 | } 57 | newIssue.setSummary("CLONE of DEFECT " + '"' + issue.getKey() + '"' + ": " + newIssue.getSummary()); 58 | newIssue.setOriginalEstimate(0); 59 | newIssue.setEstimate(0); 60 | newIssue.setTimeSpent(0); 61 | created = new java.sql.Timestamp(Calendar.getInstance().getTime().getTime()); 62 | newIssue.setCreated(created); 63 | params = ["issue":newIssue]; 64 | newIssue = issueMgr.createIssueObject(currentUserName, params); 65 | 66 | def Collection linkTypesCollection = linkTypeMgr.getIssueLinkTypes(); 67 | def clonersID = 0; 68 | for (IssueLinkType linkType : linkTypesCollection) { 69 | if (linkType.getName() == LINK_TYPE_NAME) { 70 | clonersID = linkType.getId(); 71 | break; 72 | } 73 | } 74 | 75 | def sequence = 0; 76 | for (IssueLink link in linkMgr.getInwardLinks(issue.id)) { 77 | if(link.getIssueLinkType().getName() != LINK_TYPE_NAME) { 78 | linkMgr.createIssueLink(link.getSourceId(),newIssue.id, link.getLinkTypeId(),sequence,currentUserObj) 79 | sequence++; 80 | } 81 | } 82 | linkMgr.createIssueLink(newIssue.id,issue.id,clonersID,sequence,currentUserObj) 83 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/OriginalEstimateValidation.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This little snippet checks that an original estimate is set for sub task types. 3 | * Apply it as a validation on a transition and have script runner throw up an error message if it doesn't return true. 4 | */ 5 | 6 | import com.atlassian.jira.issue.Issue 7 | 8 | if(issue.isSubTask()) { 9 | def rteval = false 10 | if(issue.getOriginalEstimate() != null) { 11 | retval true; 12 | } 13 | return retval 14 | } -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/SetDefaultAssignee.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | This small snippet can be used ass a post-function in a workflow to allow 3 | automatically assigning the new sub-task to the story assignee, unless 4 | one was provided during creation. 5 | */ 6 | 7 | import com.atlassian.jira.component.ComponentAccessor; 8 | import com.atlassian.jira.issue.*; 9 | 10 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 11 | def optionsManager = ComponentAccessor.getOptionsManager() 12 | 13 | def user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 14 | 15 | if(issue.getIssueType().isSubTask()) { 16 | def parent = issue.getParentObject() 17 | if (parent != null && issue.getAssignee() == null){ 18 | issue.setAssignee(parent.getAssignee()) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/SetDevTeams.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This is a post function script. It is intended to be added to a create transition. It will set CheckBox values of a 3 | * custom field called "Development Team" to an option that matches. This is a SD use case therefor the request channel part of 4 | * the query. 5 | * 6 | * It is EXPECTED that the label is in the summary and enclosed in brackets []. 7 | * If there is no matching option a warning will be put in the logs. Nothing else... 8 | * 9 | * NOTE: That order of the post functions matter. This script MUST be after the reindex post function. Also if the summary 10 | * contains more than one valid label then the last match function will override any previously set values. 11 | */ 12 | 13 | import com.atlassian.jira.event.type.EventDispatchOption 14 | import com.atlassian.jira.component.ComponentAccessor 15 | import com.atlassian.jira.issue.search.SearchProvider 16 | import com.atlassian.jira.jql.parser.JqlQueryParser 17 | import com.atlassian.jira.web.bean.PagerFilter 18 | 19 | def KEY = "key = " 20 | def ESCAPE = "\\" 21 | def QUOTE = '"' 22 | def LABELS = ["DEV", "SPU", "BIA", "No Option"] 23 | def BRACKET_OPEN = "[" 24 | def BRACKET_CLOSE = "]" 25 | def CHANNEL = "request-channel-type = email" 26 | def CONTAINS = " ~ " 27 | def OPERATOR = " AND " 28 | def FIELD = "summary" 29 | def user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 30 | def issueManager = ComponentAccessor.getIssueManager() 31 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 32 | def productCF = customFieldManager.getCustomFieldObjectByName("Development Team") 33 | def optionsManager = ComponentAccessor.getOptionsManager() 34 | def jqlQueryParser = ComponentAccessor.getComponent(JqlQueryParser) 35 | def searchProvider = ComponentAccessor.getComponent(SearchProvider) 36 | 37 | // Mutable issue and array of checkbox options to set 38 | def issueMutable = issueManager.getIssueObject(issue.getKey()) 39 | def optionsToSelect = [] 40 | 41 | // Checkbox Options 42 | def cfConfig = productCF.getRelevantConfig(issueMutable) 43 | def optionsAvailable = optionsManager.getOptions(cfConfig) 44 | log.debug("Options Available: " + optionsAvailable.toString()) 45 | 46 | // Loop through the labels and see if they are in the summary. If so add them to the optionsToSelect array. 47 | LABELS.each { 48 | def LABEL = it 49 | def REGEX = ~".*(\\[${LABEL}\\]).*" 50 | if (issueMutable.getSummary() ==~ REGEX) { 51 | // Check that it iS an email channel, is the same key and has the label. Kind of overkill... 52 | def QUERY = KEY + issueMutable.getKey() + OPERATOR + FIELD + CONTAINS + QUOTE + ESCAPE + ESCAPE + 53 | BRACKET_OPEN + LABEL + ESCAPE + ESCAPE + BRACKET_CLOSE + QUOTE + OPERATOR + CHANNEL 54 | log.debug("--------------------") 55 | log.debug("QUERY is: " + QUERY) 56 | 57 | // This should only return one item. 58 | def query = jqlQueryParser.parseQuery(QUERY) 59 | def results = searchProvider.search(query, user, PagerFilter.getUnlimitedFilter()) 60 | if (results != null) { 61 | log.debug("Find Option: " + LABEL) 62 | def option = optionsAvailable.find {it.value in LABEL} 63 | if (option != null) { 64 | log.debug("Option to select: " + option) 65 | optionsToSelect.add(option) 66 | } else { 67 | log.warn("LABEL: " + LABEL + " not found as an option! Check the custom field!") 68 | } 69 | } 70 | log.debug("--------------------") 71 | } 72 | } 73 | 74 | log.debug("These options will be selected" + optionsToSelect.toString()) 75 | issueMutable.setCustomFieldValue(productCF,optionsToSelect) 76 | issueManager.updateIssue(user, issueMutable, EventDispatchOption.ISSUE_UPDATED, false) 77 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/SetProduct.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This is a post function script. It is intended to be added to a create transition. It will set a single select list 3 | * custom field called "Product" to an option that matches. This is a SD use case therefor the request channel part of 4 | * the query. 5 | * 6 | * It is EXPECTED that the label is in the summary and enclosed in brackets []. 7 | * If there is no matching option a warning will be put in the logs. Nothing else... 8 | * 9 | * NOTE: That order of the post functions matter. This script MUST be after the reindex post function. Also if the summary 10 | * contains more than one valid label then the last match function will override any previously set values. 11 | */ 12 | 13 | import com.atlassian.jira.component.ComponentAccessor 14 | import com.atlassian.jira.event.type.EventDispatchOption 15 | import com.atlassian.jira.issue.search.SearchProvider 16 | import com.atlassian.jira.jql.parser.JqlQueryParser 17 | import com.atlassian.jira.web.bean.PagerFilter 18 | 19 | def KEY = "key = " 20 | def ESCAPE = "\\" 21 | def QUOTE = '"' 22 | def LABELS = ["SPU", "BIA", "No Option"] 23 | def BRACKET_OPEN = "[" 24 | def BRACKET_CLOSE = "]" 25 | def CHANNEL = "request-channel-type = email" 26 | def CONTAINS = " ~ " 27 | def OPERATOR = " AND " 28 | def FIELD = "summary" 29 | def user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 30 | def issueManager = ComponentAccessor.getIssueManager() 31 | def customFieldManager = ComponentAccessor.getCustomFieldManager() 32 | def productCF = customFieldManager.getCustomFieldObjectByName("Product") 33 | def optionsManager = ComponentAccessor.getOptionsManager() 34 | def jqlQueryParser = ComponentAccessor.getComponent(JqlQueryParser) 35 | def searchProvider = ComponentAccessor.getComponent(SearchProvider) 36 | 37 | // Mutable issue 38 | def issueMutable = issueManager.getIssueObject(issue.getKey()) 39 | 40 | // Update issue if label is exact match 41 | LABELS.each { 42 | def LABEL = it 43 | def REGEX = ~".*(\\[${LABEL}\\]).*" 44 | log.debug("REGEX: " + REGEX) 45 | 46 | if (issueMutable.getSummary() ==~ REGEX) { 47 | // Check that it iS an email channel, is the same key and has the label. Kind of overkill... 48 | def QUERY = KEY + issueMutable.getKey() + OPERATOR + FIELD + CONTAINS + QUOTE + ESCAPE + ESCAPE + 49 | BRACKET_OPEN + LABEL + ESCAPE + ESCAPE + BRACKET_CLOSE + QUOTE + OPERATOR + CHANNEL 50 | 51 | log.debug("QUERY is: " + QUERY) 52 | def query = jqlQueryParser.parseQuery(QUERY) 53 | def results = searchProvider.search(query, user, PagerFilter.getUnlimitedFilter()) 54 | 55 | // This should only return one item 56 | results.getIssues().each { documentIssue -> 57 | def cfConfig = productCF.getRelevantConfig(issueMutable) 58 | def option = optionsManager.getOptions(cfConfig)?.find { it.toString() == LABEL } 59 | if (option != null) { 60 | log.debug("Updating field: " + productCF.getName() + " to value: " + option.toString() + " on issue: " + issueMutable.getKey()) 61 | issueMutable.setCustomFieldValue(productCF, option) 62 | issueManager.updateIssue(user, issueMutable, EventDispatchOption.ISSUE_UPDATED, false) 63 | } else { 64 | log.warn("No matching option for LABEL: " + LABEL) 65 | } 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/StoryTransitionInProgressSubtask.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | Set the story status to In Progress when a Sub-task is set to In progress. 3 | */ 4 | 5 | import com.atlassian.jira.component.ComponentAccessor 6 | import com.atlassian.jira.config.SubTaskManager; 7 | import com.atlassian.jira.issue.Issue; 8 | import com.atlassian.jira.issue.MutableIssue; 9 | import com.atlassian.jira.workflow.WorkflowTransitionUtil; 10 | import com.atlassian.jira.workflow.WorkflowTransitionUtilImpl; 11 | import com.atlassian.jira.util.JiraUtils; 12 | import com.opensymphony.workflow.WorkflowContext; 13 | 14 | Issue issue = issue 15 | 16 | def issueService = ComponentAccessor.getIssueService() 17 | def currentUser = ComponentAccessor.jiraAuthenticationContext.getLoggedInUser() 18 | 19 | if(issue.isSubTask()) { 20 | // Need to use a mutable object to have read/write to parent, i.e., story 21 | MutableIssue parent = issue.getParentObject() as MutableIssue 22 | WorkflowTransitionUtil workflowTransitionUtil = (WorkflowTransitionUtil) JiraUtils.loadComponent(WorkflowTransitionUtilImpl.class) 23 | 24 | String originalParentStatus = parent.getStatus().getSimpleStatus().getName() 25 | def isDevBacklogStatus = originalParentStatus in ['To Do', 'Clarification'] 26 | 27 | if (isDevBacklogStatus) { 28 | workflowTransitionUtil.setIssue(parent) 29 | // 21 is the id of "In Progress", see Text of the Workflow 30 | // The state name can't be used directly 31 | workflowTransitionUtil.setAction(21) 32 | if (workflowTransitionUtil.validate()) { 33 | workflowTransitionUtil.progress() 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/block-epics-open-stories.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | It's convenient to block transitioning epics to done when there are 3 | still stories and bugs that are not yet closed. 4 | 5 | Validators have the advantage that they don't hide the transition 6 | buttons, but yield a warning. 7 | 8 | The script can be used on a transition that goes into the Done state. 9 | It can easily be adapted to a condition as well. Please check 10 | examples under 11 | https://scriptrunner.adaptavist.com/5.9.1/jira/recipes/workflow/conditions.html 12 | */ 13 | 14 | import com.atlassian.jira.component.ComponentAccessor 15 | import com.atlassian.jira.issue.Issue 16 | import com.atlassian.jira.issue.IssueManager 17 | import com.atlassian.jira.issue.issuetype.IssueType 18 | import com.atlassian.jira.issue.link.IssueLink 19 | import com.atlassian.jira.issue.link.IssueLinkManager 20 | import com.atlassian.jira.issue.link.IssueLinkType 21 | import com.atlassian.jira.issue.status.Status 22 | 23 | import com.opensymphony.workflow.InvalidInputException 24 | 25 | IssueManager issueManager = ComponentAccessor.getIssueManager() 26 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 27 | 28 | Issue epic = issue as Issue // in context 29 | 30 | if(epic.getIssueType().getName() != "Epic"){ 31 | return; 32 | } 33 | 34 | List allOutIssueLink = issueLinkManager.getOutwardLinks(epic.getId()) 35 | 36 | allOutIssueLink.each { IssueLink it -> 37 | Issue linkedIssue = it.destinationObject 38 | Status status = linkedIssue.getStatus() 39 | IssueLinkType issueLinkType = it.getIssueLinkType() 40 | 41 | if(!["Rejected", "Done"].contains(status.getName()) && issueLinkType.getOutward() == "is Epic of"){ 42 | throw new InvalidInputException("All stories and bugs in epic must be have status set to either Done or Rejected"); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/block-requests-with-open-epics.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | For a client, we created a new layer on top of the epics. The 4 | parent-child (Hierarchy) link type is used to emulate this, in 5 | addition to configuring issue type schemes for the various projects. 6 | 7 | The script can also be used as a condition. Please see 8 | https://scriptrunner.adaptavist.com/5.9.1/jira/recipes/workflow/conditions.html 9 | */ 10 | 11 | import com.atlassian.jira.component.ComponentAccessor 12 | import com.atlassian.jira.issue.Issue 13 | import com.atlassian.jira.issue.IssueManager 14 | import com.atlassian.jira.issue.issuetype.IssueType 15 | import com.atlassian.jira.issue.link.IssueLink 16 | import com.atlassian.jira.issue.link.IssueLinkManager 17 | import com.atlassian.jira.issue.link.IssueLinkType 18 | import com.atlassian.jira.issue.status.Status 19 | import com.opensymphony.workflow.InvalidInputException 20 | 21 | IssueManager issueManager = ComponentAccessor.getIssueManager() 22 | IssueLinkManager issueLinkManager = ComponentAccessor.getIssueLinkManager() 23 | 24 | Issue request = issue as Issue // Just in context 25 | 26 | List allOutIssueLink = issueLinkManager.getOutwardLinks(request.getId()) 27 | 28 | // some closures could simplify this 29 | 30 | allOutIssueLink.each { IssueLink it -> 31 | Issue linkedIssue = it.destinationObject 32 | Status status = linkedIssue.getStatus() 33 | IssueLinkType issueLinkType = it.getIssueLinkType() 34 | 35 | if(!["Rejected", "Done"].contains(status.getName()) && linkedIssue.getIssueType().getName() == "Epic" && issueLinkType.getName() == "Hierarchy"){ 36 | throw new InvalidInputException("All epics in Request must have either status in Done or Rejected"); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /jira/scripts/workflowScripts/setEpicStatus.groovy: -------------------------------------------------------------------------------- 1 | /* 2 | JIRA does not handle the epic status when transitioning the Epic to done. 3 | The epics are then still shown in the Scrum board. 4 | 5 | You can use this script on a transition that takes the issue to a 6 | done state, and likewise, you can use it on transitions that go out 7 | of the state by adjusting the epicStatus variable. Epic Status is 8 | configured as a custom field with options (it may differ): 9 | 10 | - To Do 11 | - In Progress 12 | - Done 13 | */ 14 | 15 | import com.atlassian.jira.component.ComponentAccessor 16 | import com.atlassian.jira.issue.CustomFieldManager 17 | import com.atlassian.jira.issue.Issue 18 | import com.atlassian.jira.issue.IssueManager 19 | import com.atlassian.jira.issue.ModifiedValue 20 | import com.atlassian.jira.issue.customfields.CustomFieldType 21 | import com.atlassian.jira.issue.customfields.manager.OptionsManager 22 | import com.atlassian.jira.issue.customfields.option.Option 23 | import com.atlassian.jira.issue.customfields.option.Options 24 | import com.atlassian.jira.issue.fields.config.FieldConfig 25 | import com.atlassian.jira.issue.util.DefaultIssueChangeHolder 26 | import com.atlassian.jira.user.ApplicationUser 27 | 28 | IssueManager issueManager = ComponentAccessor.getIssueManager() 29 | CustomFieldManager customFieldManager = ComponentAccessor.getCustomFieldManager() 30 | OptionsManager optionsManager = ComponentAccessor.getOptionsManager() 31 | ApplicationUser user = ComponentAccessor.getJiraAuthenticationContext().getLoggedInUser() 32 | 33 | Issue epic = issue as Issue 34 | 35 | if(epic.getIssueType().getName() != "Epic"){ 36 | return 37 | } 38 | 39 | def epicStatusCf = customFieldManager.getCustomFieldObjectByName("Epic Status") 40 | def epicStatus = "Done" // To Do 41 | def changeHolder = new DefaultIssueChangeHolder() 42 | 43 | FieldConfig epicStatusFieldConfig = epicStatusCf.getRelevantConfig(epic) 44 | 45 | Options epicStatusOptions = optionsManager.getOptions(epicStatusFieldConfig); 46 | Option epicStatusOption = epicStatusOptions.getOptionForValue(epicStatus, null); 47 | epicStatusCf.updateValue(null, epic, new ModifiedValue(epic.getCustomFieldValue(epicStatusCf), epicStatusOption), changeHolder) 48 | -------------------------------------------------------------------------------- /misc/checkFileSyncModified.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script monitors a list of files for changes and report (exit false) 4 | # if one of the file changes, and not all of them. 5 | # The use case is to keep files in edit-sync, understood as changes in one 6 | # file imposes changes in another file (but the files are not identical, nor are 7 | # the changes. 8 | 9 | # Usage: 10 | # run script with list of file as one parameter: 11 | # ./script "file1 file2 file3" 12 | 13 | # The script create a SHA checksum of each file first time it runs. 14 | # Next time the script is executed, and one file is changed, the checksum does not match 15 | # and file changed count is incremented. If it at the end does not match the file count 16 | # not all files are changed. 17 | 18 | # NOTE: the script is not idempotent - running it a second time after a change is detected 19 | # there is no change (unless you changed the file between running the script) 20 | 21 | list=$1 22 | echo "Checking if file have been changed: $list" 23 | echo "If one file is changed, all must be changed" 24 | 25 | file_changed_count=0 26 | file_count=0 27 | changed_files_file="changed_file.lst" 28 | rm -rf $changed_files_file 29 | 30 | for f in $list; 31 | do 32 | if [ ! -f $f ] 33 | then 34 | echo "Did not find $f - skipping file" 35 | else 36 | file_count=$(($file_count + 1)) 37 | echo "Monitoring file $f for changes" 38 | old_sha=$f.sha1 39 | if [ -f $old_sha ]; 40 | then 41 | sha1sum -c $old_sha 42 | exit_value=$? 43 | if [ "$exit_value" -ne "0" ] 44 | then 45 | echo "Monitored file did not match old checksum" 46 | file_changed_count=$(($file_changed_count + 1)) 47 | echo $f >> $changed_files_file 48 | fi 49 | fi 50 | # create new checksum for file for next time we run this script 51 | shasum $f > $old_sha 52 | fi 53 | done 54 | 55 | if [ "$file_changed_count" -gt 0 ] && [ "$file_count" -ne "$file_changed_count" ] 56 | then 57 | echo "Some files did change - but not all. These file did change:" 58 | cat $changed_files_file 59 | else 60 | echo "OK - no file changed since last time" 61 | fi 62 | -------------------------------------------------------------------------------- /misc/createStagingNo.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/ruby 2 | 3 | # Parse input paramenter: 4 | # one mandatory settings file in ruby (not validating the file) 5 | if ARGV.length() != 1 then 6 | puts <<-EOF 7 | Please provide a number for this script 8 | 9 | usage: 10 | createStagingNo xxx 11 | 12 | Will return a modulo 10 of the number as key=value pair to eg. set an ENV VAR 13 | Can be used to do modulo on build number on jenkins jobs and use the number 14 | as a round robin pointer to eg. deploy sub-directories. 15 | EOF 16 | abort("Wrong input parameters") 17 | else 18 | inputNo = ARGV[0]; 19 | end 20 | 21 | begin 22 | puts "PROJECT_STAGE_NO_FROM_BUILD=" + (Integer(inputNo) % 10).to_s() 23 | rescue 24 | puts "PROJECT_STAGE_NO_FROM_BUILD=x" 25 | abort("Could not convert input argument to number") 26 | end 27 | -------------------------------------------------------------------------------- /powerping/README.md: -------------------------------------------------------------------------------- 1 | # Power Ping 2 | 3 | Checks DNS look, TCP connection and ping and curl of http(s) addresses against a list of hosts in the configuration file. 4 | 5 | Use if for example on a Jenkins build slave, to make sure all deployment target can be reach just before starting complex deployments. Typically relevant in cases with complex networking setup, and no so regularly deployment. Things can change over the weeks. 6 | 7 | Obviously monitoring can be an alternative, but this checks from one host to another, not from a monitoring server. 8 | 9 | ## Config file 10 | 11 | The configuration file is a YAML file with the following structure: 12 | 13 | ``` 14 | defaults: 15 | web_server_ports: [80] 16 | env_prod: 17 | http://www.google.com: [web_server_ports] 18 | http://artifactory.macrohard.com: [web_server_ports, 50000] 19 | env_qa: 20 | http://www.google.com: [web_server_ports] 21 | http://test.macrohard.com: [web_server_ports, 10000] 22 | ``` 23 | 24 | See [`config-example.yml`](config-example.yml) 25 | 26 | ## Usage 27 | 28 | `groovy powerPing.groovy myConfigFile.yml env_prod env_qa` 29 | 30 | 31 | ## Roadmap and improvements 32 | 33 | * support reporting in junit format, so each check is reported as a unit-test 34 | * allow a configuration, globally, or per target, to fail the script if a check fail - interesting if used in a Jenkins job 35 | * allow multiline host defintion, so the following repeated configuration becomes simpler: 36 | ``` 37 | testdb1.praqma.net: 38 | ports: [database_standard_ports] 39 | testdb2.praqma.net: 40 | ports: [database_standard_ports] 41 | testdb3.praqma.net: 42 | ports: [database_standard_ports] 43 | ``` 44 | simpler as 45 | ``` 46 | testdb1.praqma.net, testdb2.praqma.net, testdb2.praqma.net: 47 | ports: [database_standard_ports] 48 | ``` 49 | * allow a combination of configuration target, such as `GENERIC` in the `config-example.yml` or `QA` in `projectEnvironments.yml` to be executed based on environment variables. E.g. to be used in Jenkins job automation, to allow chosing configuration based on slave node name or something. 50 | 51 | -------------------------------------------------------------------------------- /powerping/config-example.yml: -------------------------------------------------------------------------------- 1 | # Example configuration file for what the powerping script 2 | # will check. 3 | 4 | # ************************************************************************** 5 | # * Indentation matter - it is YML so you get list, set etc. based on this * 6 | # ************************************************************************** 7 | 8 | 9 | 10 | # Example on general re-useable configuration, to avoid specifying same ports many times 11 | configuration: 12 | database_standard_ports: [40006, 40012, 40018] 13 | portal_standard_ports: [8879, 9043, 10039, 22] 14 | web_ports: [80, 443] 15 | 16 | 17 | # 'GENERIC' is target to check, it is named and used as parameter to powerping: 18 | # `groovy powerPing.groovy config-example.yml EXAMPLE` 19 | EXAMPLE: 20 | # specify DNS host name here to check DNS look. 21 | # IP-address would also work, but then we would check DNS 22 | code.praqma.net: 23 | # specify ports for the firewall check - we try a TCP connection to them 24 | ports: [80, 8090] 25 | fogbugz.praqma.net: 26 | ports: [80, 443] 27 | # You can use the configuration from the top of the file, to avoid specifying the same ports over and over again 28 | db.praqma.net: 29 | ports: [database_standard_ports] 30 | # You can add to those port, and extra one: 31 | db.praqma.net: 32 | ports: [database_standard_ports, 8080] 33 | # URL checking 34 | code.praqma.net: 35 | ports: [443, 80] 36 | paths: ['https://code.praqma.net/ci', 'https://code.praqma.net/docs'] 37 | -------------------------------------------------------------------------------- /runXmlAccess/Command.groovy: -------------------------------------------------------------------------------- 1 | package XmlAccess 2 | 3 | class Command { 4 | static def run(String command) { 5 | return run(command, new File(System.properties.'user.dir')) 6 | } 7 | 8 | static def run(String command, File workingDir) { 9 | println command 10 | def process = new ProcessBuilder(command) 11 | .directory(workingDir) 12 | .redirectErrorStream(true) 13 | .start() 14 | process.inputStream.eachLine { println it } 15 | process.waitFor(); 16 | return process.exitValue() 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /runXmlAccess/run_xmlaccess.groovy: -------------------------------------------------------------------------------- 1 | package XmlAccess 2 | 3 | import XmlAccess.Command 4 | 5 | /***************************************************************************** 6 | /* File: run_xmlaccess.groovy 7 | /* Usage: groovy run_xmlaccess [OPTIONS] URL SRCDIR 8 | /* Options: -stopOnFailure exits as soon as one of the commands fails 9 | /* Purpose: Execute all enumerated xmlaccess scripts in the directory 10 | /* specified in 2nd argument against the portal server specified 11 | /* with the url in 1st argument 12 | /* Requirements: The script assumes two environment variable as available 13 | /* DEPLOY_USER 14 | /* DEPLOY_PASS 15 | /* They represent the account used to deploy (run xmlaccess) 16 | /* 17 | /*****************************************************************************/ 18 | 19 | def cli = new CliBuilder(usage: 'groovy run_xmlaccess [OPTIONS] URL SRCDIR') 20 | cli.stopOnFailure('stop the script as soon as a command fails') 21 | 22 | def options = cli.parse(args) 23 | assert options 24 | 25 | assert options.arguments().size() == 2 : "Insufficient arguments. Requires Url and srcDir" 26 | def url = options.arguments()[0] 27 | def srcDir = options.arguments()[1] 28 | 29 | def deployUser = System.getenv('DEPLOY_USER') 30 | assert deployUser : "Requires DEPLOY_USER environment variable" 31 | def deployPass = System.getenv('DEPLOY_PASS') 32 | assert deployPass : "Requires DEPLOY_PASS environment variable" 33 | 34 | println "Portal URL: " + url 35 | println "xmlaccess directory: " + srcDir 36 | 37 | def files = [] 38 | def dir = new File(srcDir) 39 | println "Files found:" 40 | dir.eachFileMatch(~/\d-.*\.xml/) { file -> 41 | files << file 42 | println file.path 43 | } 44 | 45 | def failed 46 | // .find breaks on return true, continues on return false. 47 | // Clever trick to get stopOnFailure to work. 48 | files.find { file -> 49 | try{ 50 | String inPath = file.path 51 | String outPath = file.path + ".out" 52 | String command = "cmd /c xmlaccess -user $deployUser -password $deployPass -url $url -in $inPath -out $outPath" 53 | def exitCode = Command.run(command) 54 | if(exitCode != 0){ 55 | println "Failed for $file with exit code $exitCode" 56 | failed = true 57 | return options.stopOnFailure 58 | } 59 | } catch (Exception ex){ 60 | println "Failed for $file with Exception:\n$ex.message" 61 | println ex.stackTrace 62 | failed = true 63 | return options.stopOnFailure 64 | } 65 | } 66 | System.exit(failed ? 1 : 0) -------------------------------------------------------------------------------- /setversion/setversion-headerfile-template.h: -------------------------------------------------------------------------------- 1 | // Version info header file template 2 | // Must be included in the application 3 | 4 | // The version_info.h file should be found in addition to this one 5 | // on the include path, as it contains VERSION_INFO define and the following: 6 | // #define VERSION_INFO VERSION_NUMBER "-" BUILD_NUMBER 7 | 8 | // Automatically we changes these with a shell script. They are not committed back. 9 | // Do not change any of these default values below 10 | // They are to be changed automatically only by the build system 11 | // NOTE that only values available to the build system are used, 12 | // so they can be verified to be correct in the compiled application. 13 | #define BUILD_NUMBER "xxxx" 14 | #define BUILD_TAG "unknown" 15 | #define BUILD_DATE_TIME "1970-01-01_00-00-00" 16 | #define BUILD_SCM_INFO "not_available" 17 | #define PROJECT_CONFIG "none" 18 | -------------------------------------------------------------------------------- /setversion/setversion-javafile-template.properties: -------------------------------------------------------------------------------- 1 | # Version info file template for Java 2 | # Must be included in the application via the JAR file. 3 | # Inclusion into JAR file is handle by ANT build script. 4 | 5 | # Automatically we changes these with a shell script. They are not committed back. 6 | # Do not change any of these default values below 7 | # They are to be changed automatically only by the build system 8 | # NOTE that only values available to the build system are used, 9 | # so they can be verified to be correct in the compiled application. 10 | 11 | BUILD_NUMBER = xxxx 12 | BUILD_TAG = unknown 13 | BUILD_DATE_TIME = 1970-01-01_00-00-00 14 | BUILD_SCM_INFO = not_available 15 | PROJECT_CONFIG = none 16 | -------------------------------------------------------------------------------- /setversion/setversion.rb: -------------------------------------------------------------------------------- 1 | #!ruby 2 | 3 | # Usage: setversion.sh version_info_template version_info_automated.h 4 | # The script will copy the version header file template to the second file given 5 | # and insert build and version info automatically. 6 | # Script change the header file if running on Jenkins with global unique values, 7 | # while local developer build get less unique informations. 8 | # The automated file should be on the git ignore list, but the application should 9 | # depende on the includes from both the default version_info.h file, and the automated 10 | # one such the compilation will fail if the automated does not exists. 11 | require "find" 12 | require "fileutils" 13 | require "open3" 14 | 15 | # Parse input paramenter: 16 | # one mandatory settings file in ruby (not validating the file) 17 | if ARGV.length() != 2 then 18 | puts <<-EOF 19 | Please provide a settings file as only parameter 20 | 21 | usage: 22 | setversion.rb template_file target_file 23 | EOF 24 | abort("Wrong input parameters") 25 | else 26 | template = ARGV[0]; 27 | target = ARGV[1] 28 | # assume file is in correct format and load it 29 | puts "Using:" 30 | puts " templatefile - " + template 31 | puts " targetfile - " + target 32 | end 33 | 34 | if File.exist?(template) then 35 | FileUtils.cp template, target 36 | else 37 | abort("Template file: "+template +" did not exist.target!!!.") 38 | end 39 | 40 | # Note: By purpose this cookie is chosen to be hardcoded, so as long we 41 | # build on the same server we set version information. 42 | # Building historical build later, on another server, will not be able 43 | # without modification to get version information. That by purpose! 44 | # Further by checking on PROJECT-STAMP-VERSION we can disable/enable 45 | # if version is applied when building on Jenkins. 46 | # The PROJECT-BUILD-NUMBER is instead of Jenkins BUILD_NUMBER and to allow 47 | # changing it a bit. 48 | if ENV['JENKINS_SERVER_COOKIE'] == "uniquejenkinscookie-couldbecertcheckalso" then 49 | if ENV['PROJECT_STAMP_VERSION'] == "yes" then 50 | puts "Running on our JenkinsServer - setting version info to unique values" 51 | puts "Writing version information to file: "+ target 52 | 53 | File.open(target){ |source_file| 54 | content = source_file.read 55 | content.gsub!('xxxx', ENV['PROJECT_BUILD_NUMBER']) 56 | content.gsub!('unknown', 'jenkins') 57 | content.gsub!('1970-01-01_00-00-00', ENV['BUILD_ID']) 58 | gitVersion = %x(git rev-parse --short HEAD).gsub(/\n/, '') 59 | puts "setversion setting git version: ["+gitVersion+"]" 60 | content.gsub!('not_available', gitVersion) 61 | File.open(target, "w+"){ |f| f.write(content)} 62 | } 63 | else 64 | puts "Running on our JenkinsServer - by disabled setting unique values" 65 | puts "Writing version information to file: "+ target 66 | 67 | File.open(target){ |source_file| 68 | content = source_file.read 69 | content.gsub!('xxxx', 'ci-build') 70 | content.gsub!('unknown', 'jenkins') 71 | content.gsub!('1970-01-01_00-00-00', ENV['BUILD_ID']) 72 | content.gsub!('not_available', 'unknown revision') 73 | File.open(target, "w+"){ |f| f.write(content)} 74 | } 75 | end 76 | 77 | else 78 | puts "Local/developer build (not Jenkins) therefore less unique version info"; 79 | time = Time.new 80 | if ENV['USERNAME'] == nil then 81 | user = ENV['USER'] 82 | else 83 | user = ENV['USERNAME'] 84 | end 85 | File.open(target){ |source_file| 86 | content = source_file.read 87 | content.gsub!('xxxx', 'dev-snapshot') 88 | content.gsub!('unknown', user) 89 | content.gsub!('1970-01-01_00-00-00', time.strftime("%Y-%m-%d_%H-%M-%S")) 90 | content.gsub!('not_available', 'unknown revision') 91 | File.open(target, "w+"){ |f| f.write(content)} 92 | } 93 | end 94 | 95 | # Always, we will stamp in the PROJECT config information that notes a kind of 96 | # configuration or build option for the software. For more information see 97 | # the PROJECT config information concept. 98 | # This information is always put in the automated version information file 99 | # and will be none as default. The application can use or ignore it. 100 | # It origin is in the smart project, where we build either crawler or smart 101 | # and uses this CONFIG that the application use to termine and reveal if 102 | # is a crawler or normal smart system. It can also be used for TI projects 103 | # where building a bootloader or non-bootloader version sometimes. 104 | # The template will contain "none" and if the environemnt variable 105 | # PROJECT_CONFIG is available it will be replaced with it's content. 106 | puts "Checking for PROJECT config information..." 107 | if yc = ENV['PROJECT_CONFIG'] then #return true if env is set, but also if set and empty 108 | puts "Found PROJECT config information and inserting into automated version info"; 109 | if yc.empty? then yc = "envVarEmpty" end 110 | File.open(target){ |source_file| 111 | content = source_file.read 112 | content.gsub!('none', yc) 113 | File.open(target, "w+"){ |f| f.write(content)} 114 | } 115 | else 116 | puts "No PROJECT config information found in env. var. PROJECT_CONFIG, so not using it" 117 | end 118 | -------------------------------------------------------------------------------- /setversion/setversion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "setverson.sh DEPRECATED - used Ruby version" 4 | exit 1; 5 | 6 | # Usage: setversion.sh version_info_template version_info_automated.h 7 | # The script will copy the version header file template to the second file given 8 | # and insert build and version info automatically. 9 | # Script change the header file if running on Jenkins with global unique values, 10 | # while local developer build get less unique informations. 11 | # The automated file should be on the git ignore list, but the application should 12 | # depende on the includes from both the default version_info.h file, and the automated 13 | # one such the compilation will fail if the automated does not exists. 14 | 15 | 16 | if [ $# -ne 2 ]; then 17 | echo "ERROR: call this script with version template information header file and the target version_info_automated.h file as arguments"; 18 | exit 1; 19 | fi 20 | if [ -f "$1" ]; then 21 | IN_FILE=$1 22 | OUT_FILE=$2 23 | echo "Copying template version info file"; 24 | cp -v $IN_FILE $OUT_FILE 25 | else 26 | echo "ERROR; Supplied argument (file?) is not found, or a file"; 27 | exit 1; 28 | fi 29 | 30 | if [ "$JENKINS_SERVER_COOKIE" = "uniquejenkinscookie-couldbecertcheckalso" ]; then 31 | echo "Running on our JenkinsServer - setting version info to unique values"; 32 | echo "Writing version information to file: $OUT_FILE"; 33 | 34 | sed -i 's/#define BUILD_NUMBER "xxxx"/#define BUILD_NUMBER "'$BUILD_NUMBER'"/g' $OUT_FILE 35 | sed -i 's/#define BUILD_TAG "unknown"/#define BUILD_TAG "'$BUILD_TAG'"/g' $OUT_FILE 36 | sed -i 's/#define BUILD_DATE_TIME "1970-01-01_00-00-00"/#define BUILD_DATE_TIME "'$BUILD_ID'"/g' $OUT_FILE 37 | sed -i 's/#define BUILD_SCM_INFO "not_available"/#define BUILD_SCM_INFO "'$(git rev-list -n 1 HEAD)'"/g' $OUT_FILE 38 | # Not checking every search and replace sed-command output for exit code. Assumes it goes well. 39 | exit 0; 40 | else 41 | echo "Local/developer build (not Jenkins) therefore less unique version info"; 42 | # Note that we still substitute everything just to make sure we can see a difference to an 43 | # unmodified version_info_template. 44 | sed -i 's/#define BUILD_NUMBER "xxxx"/#define BUILD_NUMBER "dev-snapshot"/g' $OUT_FILE 45 | sed -i 's/#define BUILD_TAG "unknown"/#define BUILD_TAG "'$USER'"/g' $OUT_FILE 46 | sed -i 's/#define BUILD_DATE_TIME "1970-01-01_00-00-00"/#define BUILD_DATE_TIME "'$(date +%F_%T)'"/g' $OUT_FILE 47 | sed -i 's/#define BUILD_SCM_INFO "not_available"/#define BUILD_SCM_INFO "unknown revision"/g' $OUT_FILE 48 | # Not checking every search and replace sed-command output for exit code. Assumes it goes well. 49 | exit 0; 50 | fi 51 | -------------------------------------------------------------------------------- /setversion/setversion_scmInDevCommits.rb: -------------------------------------------------------------------------------- 1 | #!ruby 2 | 3 | # compared to the normal setversion.rb this scripts add git sha 4 | # to developer builds including a dirty sign "+" if uncommited changes 5 | # It can not be used on windows, as git is not always in path 6 | 7 | # Usage: setversion.sh version_info_template version_info_automated.h 8 | # The script will copy the version header file template to the second file given 9 | # and insert build and version info automatically. 10 | # Script change the header file if running on Jenkins with global unique values, 11 | # while local developer build get less unique informations. 12 | # The automated file should be on the git ignore list, but the application should 13 | # depende on the includes from both the default version_info.h file, and the automated 14 | # one such the compilation will fail if the automated does not exists. 15 | require "find" 16 | require "fileutils" 17 | require "open3" 18 | 19 | # Parse input paramenter: 20 | # one mandatory settings file in ruby (not validating the file) 21 | if ARGV.length() != 2 then 22 | puts <<-EOF 23 | Please provide a settings file as only parameter 24 | 25 | usage: 26 | setversion.rb template_file target_file 27 | EOF 28 | abort("Wrong input parameters") 29 | else 30 | template = ARGV[0]; 31 | target = ARGV[1] 32 | # assume file is in correct format and load it 33 | puts "Using:" 34 | puts " templatefile - " + template 35 | puts " targetfile - " + target 36 | end 37 | 38 | if File.exist?(template) then 39 | FileUtils.cp template, target 40 | else 41 | abort("Template file: "+template +" did not exist.target!!!.") 42 | end 43 | 44 | # Note: By purpose this cookie is chosen to be hardcoded, so as long we 45 | # build on the same server we set version information. 46 | # Building historical build later, on another server, will not be able 47 | # without modification to get version information. That by purpose! 48 | # Further by checking on PROJECT-STAMP-VERSION we can disable/enable 49 | # if version is applied when building on Jenkins. 50 | # The PROJECT-BUILD-NUMBER is instead of Jenkins BUILD_NUMBER and to allow 51 | # changing it a bit. 52 | if ENV['JENKINS_SERVER_COOKIE'] == "uniquejenkinscookie-couldbecertcheckalso" then 53 | if ENV['PROJECT_STAMP_VERSION'] == "yes" then 54 | puts "Running on our JenkinsServer - setting version info to unique values" 55 | puts "Writing version information to file: "+ target 56 | 57 | File.open(target){ |source_file| 58 | content = source_file.read 59 | content.gsub!('xxxx', ENV['PROJECT_BUILD_NUMBER']) 60 | content.gsub!('unknown', 'jenkins') 61 | content.gsub!('1970-01-01_00-00-00', ENV['BUILD_ID']) 62 | gitVersion = %x(git rev-parse --short HEAD).gsub(/\n/, '') 63 | puts "setversion setting git version: ["+gitVersion+"]" 64 | content.gsub!('not_available', gitVersion) 65 | File.open(target, "w+"){ |f| f.write(content)} 66 | } 67 | else 68 | puts "Running on our JenkinsServer - by disabled setting unique values" 69 | puts "Writing version information to file: "+ target 70 | 71 | File.open(target){ |source_file| 72 | content = source_file.read 73 | content.gsub!('xxxx', 'ci-build') 74 | content.gsub!('unknown', 'jenkins') 75 | content.gsub!('1970-01-01_00-00-00', ENV['BUILD_ID']) 76 | content.gsub!('not_available', 'unknown revision') 77 | File.open(target, "w+"){ |f| f.write(content)} 78 | } 79 | end 80 | 81 | else 82 | puts "Local/developer build (not Jenkins) therefore less unique version info"; 83 | time = Time.new 84 | if ENV['USERNAME'] == nil then 85 | user = ENV['USER'] 86 | else 87 | user = ENV['USERNAME'] 88 | end 89 | gitVersion = %x(git rev-parse --short HEAD).gsub(/\n/, '') 90 | puts "git version: ["+gitVersion+"]" 91 | # look if dirty 92 | %x(git status) 93 | puts %x(git status).include?("nothing to commit (working directory clean)") 94 | if (%x(git status).include?("nothing to commit (working directory clean)")) then 95 | dirty="" 96 | else 97 | dirty="+" 98 | end 99 | puts "dirty is " << dirty 100 | File.open(target){ |source_file| 101 | content = source_file.read 102 | content.gsub!('xxxx', 'dev-snapshot') 103 | content.gsub!('unknown', user) 104 | content.gsub!('1970-01-01_00-00-00', time.strftime("%Y-%m-%d_%H-%M-%S")) 105 | content.gsub!('not_available', gitVersion+dirty) 106 | #content.gsub!('not_available', 'unknown revision') 107 | File.open(target, "w+"){ |f| f.write(content)} 108 | } 109 | end 110 | -------------------------------------------------------------------------------- /setversion/version_info.h: -------------------------------------------------------------------------------- 1 | // Version info header file 2 | // Must be included in the application 3 | // This automated include file is made by a script and contains 4 | // the rest of the version information. 5 | #include "version_info_automated.h" 6 | 7 | 8 | 9 | // UPDATE THIS VERSION_NUMBER 10 | // It is supposed to be the next planned version of the main line 11 | // (master branch in git) 12 | #define VERSION_NUMBER "0.1.0" 13 | 14 | 15 | // Combination of information from this file and the automated one with 16 | // extended informaiton. 17 | #define VERSION_INFO VERSION_NUMBER "-" BUILD_NUMBER 18 | -------------------------------------------------------------------------------- /small-git-tricks/commits-per-subfolder.md: -------------------------------------------------------------------------------- 1 | # Find number of commits per subfolder 2 | 3 | Just wanted to share a oneliner I whipped up, for that use case where you want to analyse a big git repo 4 | and find out in which folder the activity has been. 5 | I.e. If you are considering splitting up a repo with a couple 100k commits, then it can be good to estimate 6 | the number of commits in the subfolders being split out. 7 | 8 | `for dir in ./*; do (echo "$dir " && git rev-list --count HEAD -- "$dir"); done` 9 | 10 | Yes, this also includes any direct files in root folder, but I didn’t bother excluding those. 11 | Feel free to improve and share back. 12 | -------------------------------------------------------------------------------- /testing-idea/functional/tests.inc: -------------------------------------------------------------------------------- 1 | transformAndRun 2 | -------------------------------------------------------------------------------- /testing-idea/functional/transformAndRun.bats: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bats 2 | 3 | @test "invoking transformAndRun without configuration file prints and error" { 4 | run groovy ../transformAndRun/transformAndRun.groovy 5 | [ "$status" -eq 1 ] 6 | [[ "$output" =~ "Missing YAML configuration file argument" ]] 7 | } 8 | -------------------------------------------------------------------------------- /testing-idea/readme.md: -------------------------------------------------------------------------------- 1 | # Tests 2 | 3 | This just describes an idea of testing code-utils. 4 | 5 | Idea: Be able to test all our code-utils, as a _black box_ - the say we use them on Jenkins and the automated setups. 6 | 7 | So typically this means, but is not limited to, calling a script with parameters. 8 | 9 | ## How to 10 | 11 | This directory contain a test script, to run tests towards the different scripts in the repository. 12 | 13 | Test script takes two parameters: 14 | 15 | * platform: Windows or Linux 16 | * testsuite: functional (later maybe unit, regression ...) 17 | 18 | Since this is to be executed from a Jenkins job, upon changes in the repository, it needs to be cross platform as we want tests to run on the two platforms. 19 | 20 | groovy run_tests.groovy $platform $testsuite 21 | 22 | The groovy script will execute, based on platform either: 23 | 24 | ./run_tests_$platform.sh $testsuite 25 | ./run_tests_$platform.bat $testsuite 26 | 27 | The script will chose which tests to include in a test suite based on a include file (which is cross-platform): $testsuite/tests.inc`. E.g. `functional/tests.inc`. 28 | 29 | 30 | 31 | ## Info 32 | 33 | Unit testing in Bash: 34 | 35 | * http://stackoverflow.com/a/1339454 36 | * http://stackoverflow.com/a/27859950 37 | * http://stackoverflow.com/a/14009705 38 | * http://testanything.org/producers.html 39 | * https://github.com/sstephenson/bats 40 | 41 | TAP: 42 | > If Bats is not connected to a terminal—in other words, if you run it from a continuous integration system, or redirect its output to a file—the results are displayed in human-readable, machine-parsable TAP format. 43 | https://github.com/sstephenson/bats#running-tests 44 | 45 | 46 | Windows ??? 47 | 48 | * http://blog.pluralsight.com/test-powershell-with-whatif 49 | 50 | -------------------------------------------------------------------------------- /testing-idea/run_tests_Linux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Usage: run_tests_Linux.sh $testsuite 4 | # E.g.: run_tests_Linux.sh functional 5 | 6 | TESTSUITE=$1 7 | 8 | # Directory of this script 9 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 10 | 11 | for l in `cat $TESTSUITE/tests.inc` 12 | do 13 | $SCRIPT_DIR/$TESTSUITE/$l.bats 14 | # FIXME - catch exception, and continue with next run.sh if it fails 15 | # run scripts are supposed to report in well know test format to jenkins 16 | done 17 | -------------------------------------------------------------------------------- /transformAndRun/config.yml: -------------------------------------------------------------------------------- 1 | files: 2 | - [story.txt, transform.yml] 3 | commands: 4 | - echo $USER is going to tell you a wonderful story. 5 | - echo This is a story involving $ENV_SECRET_WEAPON! 6 | - type story.txt 7 | -------------------------------------------------------------------------------- /transformAndRun/story.txt: -------------------------------------------------------------------------------- 1 | my-boss-$CLIENT cannot $DESIRED_ACTION(TM) yet. $SUPERIOR told him to $REQUIRED_TASK first. 2 | Luckily, $CLIENT can use his secret weapon, $ENV_SECRET_WEAPON, to $DESIRED_ACTION without having to $REQUIRED_TASK. 3 | 4 | I have a ${WEIRDNAME}.zip file I need to use! 5 | Please not using dollarsignWEIRDNAME without brackets do not work either. 6 | -------------------------------------------------------------------------------- /transformAndRun/transform.yml: -------------------------------------------------------------------------------- 1 | CLIENT: Jimmy 2 | DESIRED_ACTION: play video games 3 | SUPERIOR: mother 4 | REQUIRED_TASK: clean his room 5 | WEIRDNAME: crayzfilename-with-postfix_and_misc 6 | --------------------------------------------------------------------------------