├── jira-users-report ├── .tool-versions ├── user-report.sh └── Jenkinsfile ├── .tool-versions ├── artifactory-users-report ├── .tool-versions ├── user-report.sh └── Jenkinsfile ├── maintainers-info-report ├── .tool-versions ├── maintainers-info-report.sh └── Jenkinsfile ├── Jenkinsfile_updatecli ├── updatecli ├── values.yaml └── updatecli.d │ ├── ruby.yaml │ └── nodejs.yaml ├── fork-report ├── Gemfile ├── Gemfile.lock ├── Jenkinsfile └── fork-report.rb ├── permissions-report ├── Gemfile ├── Gemfile.lock ├── Jenkinsfile └── permissions-report.rb ├── jenkins-infra-data ├── generate-infra-data.sh ├── Jenkinsfile ├── get-jenkins-io-data.json ├── updates-jenkins-io_mirrors.sh └── get-jenkins-io_mirrors.sh ├── plugin-health-scoring ├── Jenkinsfile └── fetch-report.sh ├── .gitignore └── README.md /jira-users-report/.tool-versions: -------------------------------------------------------------------------------- 1 | jq 1.6 2 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | ruby 3.4.7 2 | nodejs 22.21.1 3 | -------------------------------------------------------------------------------- /artifactory-users-report/.tool-versions: -------------------------------------------------------------------------------- 1 | jq 1.6 2 | -------------------------------------------------------------------------------- /maintainers-info-report/.tool-versions: -------------------------------------------------------------------------------- 1 | jq 1.6 2 | -------------------------------------------------------------------------------- /Jenkinsfile_updatecli: -------------------------------------------------------------------------------- 1 | updatecli(action: 'diff') 2 | if (env.BRANCH_IS_PRIMARY) { 3 | updatecli(action: 'apply', cronTriggerExpression: '@daily') 4 | } 5 | -------------------------------------------------------------------------------- /updatecli/values.yaml: -------------------------------------------------------------------------------- 1 | github: 2 | user: "Jenkins Infra Bot (updatecli)" 3 | email: "60776566+jenkins-infra-bot@users.noreply.github.com" 4 | token: "UPDATECLI_GITHUB_TOKEN" 5 | branch: "main" 6 | owner: "jenkins-infra" 7 | repository: "infra-reports" 8 | -------------------------------------------------------------------------------- /fork-report/Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | git_source(:github) {|repo_name| "https://github.com/#{repo_name}" } 6 | 7 | gem 'graphql-client' 8 | gem 'httparty' 9 | gem 'json' 10 | 11 | gem "csv", "~> 3.3" 12 | 13 | gem "logger", "~> 1.6" 14 | -------------------------------------------------------------------------------- /artifactory-users-report/user-report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | 6 | curl -X GET -H 'Content-Length: 0' -u "${ARTIFACTORY_AUTH}" "https://repo.jenkins-ci.org/api/security/users" > artifactory-users-raw.json 7 | jq 'map(select(.realm | test("ldap"))) | [ .[].name ] | sort' artifactory-users-raw.json 8 | -------------------------------------------------------------------------------- /permissions-report/Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | git_source(:github) {|repo_name| "https://github.com/#{repo_name}" } 6 | 7 | gem 'graphql-client' 8 | gem 'httparty' 9 | gem 'json' 10 | gem 'openssl' 11 | gem 'jwt' 12 | gem 'time' 13 | gem 'base64' 14 | 15 | gem "csv", "~> 3.3" 16 | 17 | gem "logger", "~> 1.6" 18 | -------------------------------------------------------------------------------- /jira-users-report/user-report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | 7 | declare -a ALL_USERS 8 | 9 | URL="https://issues.jenkins.io/rest/api/2/group/member?groupname=jira-users" 10 | while true ; do 11 | echo "Querying URL: $URL" >&2 12 | curl --silent --fail -u "$JIRA_AUTH" "$URL" > jira-users-tmp.json 13 | 14 | output="$( jq '.values[] | .name' jira-users-tmp.json )" # Deliberately not raw because INFRA-2924 15 | readarray -t USERS <<< "$output" 16 | 17 | ALL_USERS+=( "${USERS[@]}" ) 18 | 19 | done="$( jq --raw-output '.isLast' jira-users-tmp.json )" 20 | if [[ "$done" = "true" ]] ; then 21 | break 22 | fi 23 | 24 | # Next URL is part of the output 25 | URL="$( jq --raw-output '.nextPage' jira-users-tmp.json )" 26 | done 27 | 28 | echo "${ALL_USERS[@]}" | jq --slurp '.' 29 | -------------------------------------------------------------------------------- /maintainers-info-report/maintainers-info-report.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | 7 | LIST_URL='https://reports.jenkins.io/maintainers.index.json' 8 | echo "Querying URL: $LIST_URL" >&2 9 | curl --silent --fail -u "$JIRA_AUTH" "$LIST_URL" > maintainers.index.json 10 | 11 | declare -a MAINTAINERS_LIST 12 | output="$( jq --raw-output '.[][]' maintainers.index.json | sort -u )" # Despite INFRA-2924 we can use raw because maintainers cannot have spaces per RPU 13 | readarray -t MAINTAINERS_LIST <<< "$output" 14 | 15 | echo '' > jira-users-list-tmp.json # clear file from any previous executions 16 | for USERNAME in "${MAINTAINERS_LIST[@]}" ; do 17 | 18 | URL="https://issues.jenkins.io/rest/api/2/user?username=$USERNAME" 19 | echo "Querying URL: $URL" >&2 20 | curl --silent --fail -u "$JIRA_AUTH" "$URL" > jira-user-tmp.json 21 | 22 | jq '{ name, displayName }' jira-user-tmp.json >> jira-users-list-tmp.json 23 | done 24 | 25 | jq --slurp '.' jira-users-list-tmp.json 26 | -------------------------------------------------------------------------------- /jenkins-infra-data/generate-infra-data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # generate-infra-data.sh: Generate a JSON report named $1 in the directory $2 (with a optional version $3). 3 | # Note: This script orchestrate generation by "parts": there are sub-scripts per services which generate partial content. 4 | 5 | set -o nounset 6 | set -o errexit 7 | set -o pipefail 8 | set -x 9 | 10 | REPORT_NAME="$1" 11 | test -n "${REPORT_NAME}" 12 | DIST_DIR="${2%/}" 13 | test -n "${DIST_DIR}" 14 | rm -rf "${DIST_DIR}" 15 | mkdir -p "${DIST_DIR}" 16 | # Sub scripts need this 17 | export DIST_DIR 18 | VERSION="${3}" 19 | 20 | command -v "date" >/dev/null || { echo "[ERROR] no 'jq' command found."; exit 1; } 21 | command -v "jq" >/dev/null || { echo "[ERROR] no 'jq' command found."; exit 1; } 22 | 23 | json="$(echo '{}' | jq --compact-output \ 24 | --argjson updatesJenkinsIoData "$(bash ./updates-jenkins-io_mirrors.sh)" \ 25 | --argjson getJenkinsIoData "$(bash ./get-jenkins-io_mirrors.sh)" \ 26 | --arg lastUpdate "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ 27 | --arg version "${VERSION}" \ 28 | '. + {"lastUpdate": $lastUpdate, "version": $version, "updates.jenkins.io": $updatesJenkinsIoData, "get.jenkins.io": $getJenkinsIoData}' \ 29 | )" 30 | 31 | ## Write report 32 | echo "${json}" > "${DIST_DIR}/${REPORT_NAME}" 33 | -------------------------------------------------------------------------------- /jira-users-report/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? 'H H/2 * * *' : '' 2 | def reportName = 'jira-users-report.json' 3 | 4 | pipeline { 5 | triggers { 6 | cron(cronExpr) 7 | } 8 | options { 9 | // This pipeline takes ~1.5 hour to run 10 | timeout(time: 2, unit: 'HOURS') 11 | lock(resource: "jira-users-report-${env.BRANCH_NAME}", inversePrecedence: true) 12 | buildDiscarder logRotator(daysToKeepStr: '90') 13 | } 14 | agent { 15 | label 'jnlp-linux-arm64' 16 | } 17 | stages { 18 | stage('Generate JIRA Users Report') { 19 | when { 20 | anyOf { 21 | changeset 'jira-users-report/**/*' 22 | expression { env.BRANCH_IS_PRIMARY } 23 | } 24 | } 25 | environment { 26 | JIRA_AUTH = credentials('jiraAuth') 27 | REPORT_NAME = "${reportName}" 28 | } 29 | steps { 30 | dir('jira-users-report') { 31 | sh 'bash ./user-report.sh > "${REPORT_NAME}"' 32 | archiveArtifacts reportName 33 | } 34 | } 35 | } 36 | stage('Publish JIRA Users Report') { 37 | when { 38 | expression { env.BRANCH_IS_PRIMARY } 39 | } 40 | steps { 41 | dir('jira-users-report') { 42 | publishReports ([reportName]) 43 | } 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /maintainers-info-report/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@daily' : '' 2 | def reportName = 'maintainers-info-report.json' 3 | 4 | pipeline { 5 | triggers { 6 | cron(cronExpr) 7 | } 8 | options { 9 | // This pipeline takes 30-40 minutes to execute 10 | timeout(time: 1, unit: 'HOURS') 11 | lock(resource: "maintainers-info-report-${env.BRANCH_NAME}", inversePrecedence: true) 12 | buildDiscarder logRotator(daysToKeepStr: '90') 13 | } 14 | agent { 15 | label 'jnlp-linux-arm64' 16 | } 17 | stages { 18 | stage('Generate Maintainers Info Report') { 19 | when { 20 | anyOf { 21 | changeset 'maintainers-info-report/**/*' 22 | expression { env.BRANCH_IS_PRIMARY } 23 | } 24 | } 25 | environment { 26 | JIRA_AUTH = credentials('jiraAuth') 27 | REPORT_NAME = "${reportName}" 28 | } 29 | steps { 30 | dir('maintainers-info-report') { 31 | sh 'bash ./maintainers-info-report.sh > "${REPORT_NAME}"' 32 | archiveArtifacts reportName 33 | } 34 | } 35 | } 36 | stage('Publish Maintainers Info Report') { 37 | when { 38 | expression { env.BRANCH_IS_PRIMARY } 39 | } 40 | steps { 41 | dir('maintainers-info-report') { 42 | publishReports ([reportName]) 43 | } 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /artifactory-users-report/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@hourly' : '' 2 | def reportName = 'artifactory-ldap-users-report.json' 3 | 4 | pipeline { 5 | triggers { 6 | cron(cronExpr) 7 | } 8 | options { 9 | // This pipeline takes 1-2 minutes max to execute 10 | timeout(time: 10, unit: 'MINUTES') 11 | lock(resource: "artifactory-ldap-users-report-${env.BRANCH_NAME}", inversePrecedence: true) 12 | buildDiscarder logRotator(daysToKeepStr: '90') 13 | } 14 | agent { 15 | label 'jnlp-linux-arm64' 16 | } 17 | stages { 18 | stage('Generate Artifactory Users Report') { 19 | when { 20 | anyOf { 21 | changeset 'artifactory-users-report/**/*' 22 | expression { env.BRANCH_IS_PRIMARY } 23 | } 24 | } 25 | environment { 26 | ARTIFACTORY_AUTH = credentials('artifactoryAdmin') 27 | REPORT_NAME = "${reportName}" 28 | } 29 | steps { 30 | dir('artifactory-users-report') { 31 | sh 'bash ./user-report.sh > "${REPORT_NAME}"' 32 | archiveArtifacts reportName 33 | } 34 | } 35 | } 36 | stage('Publish Artifactory Users Report') { 37 | when { 38 | expression { env.BRANCH_IS_PRIMARY } 39 | } 40 | steps { 41 | dir('artifactory-users-report') { 42 | publishReports ([reportName]) 43 | } 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /fork-report/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | activesupport (8.0.2) 5 | base64 6 | benchmark (>= 0.3) 7 | bigdecimal 8 | concurrent-ruby (~> 1.0, >= 1.3.1) 9 | connection_pool (>= 2.2.5) 10 | drb 11 | i18n (>= 1.6, < 2) 12 | logger (>= 1.4.2) 13 | minitest (>= 5.1) 14 | securerandom (>= 0.3) 15 | tzinfo (~> 2.0, >= 2.0.5) 16 | uri (>= 0.13.1) 17 | base64 (0.2.0) 18 | benchmark (0.4.0) 19 | bigdecimal (3.1.9) 20 | concurrent-ruby (1.3.5) 21 | connection_pool (2.5.0) 22 | csv (3.3.3) 23 | drb (2.2.1) 24 | fiber-storage (1.0.0) 25 | graphql (2.4.15) 26 | base64 27 | fiber-storage 28 | logger 29 | graphql-client (0.25.0) 30 | activesupport (>= 3.0) 31 | graphql (>= 1.13.0) 32 | httparty (0.22.0) 33 | csv 34 | mini_mime (>= 1.0.0) 35 | multi_xml (>= 0.5.2) 36 | i18n (1.14.7) 37 | concurrent-ruby (~> 1.0) 38 | json (2.10.2) 39 | logger (1.6.6) 40 | mini_mime (1.1.5) 41 | minitest (5.25.5) 42 | multi_xml (0.7.1) 43 | bigdecimal (~> 3.1) 44 | securerandom (0.4.1) 45 | tzinfo (2.0.6) 46 | concurrent-ruby (~> 1.0) 47 | uri (1.0.3) 48 | 49 | PLATFORMS 50 | ruby 51 | 52 | DEPENDENCIES 53 | csv (~> 3.3) 54 | graphql-client 55 | httparty 56 | json 57 | logger (~> 1.6) 58 | 59 | BUNDLED WITH 60 | 2.5.7 61 | -------------------------------------------------------------------------------- /fork-report/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@daily' : '' 2 | def reportName = 'github-jenkinsci-fork-report.json' 3 | 4 | pipeline { 5 | triggers { 6 | cron(cronExpr) 7 | } 8 | options { 9 | // This pipeline takes 1-2 minutes max to execute 10 | timeout(time: 10, unit: 'MINUTES') 11 | lock(resource: "github-jenkinsci-fork-report-${env.BRANCH_NAME}", inversePrecedence: true) 12 | buildDiscarder logRotator(daysToKeepStr: '90') 13 | } 14 | agent { 15 | label 'jnlp-linux-arm64' 16 | } 17 | stages { 18 | stage('Generate GitHub Forks Report') { 19 | when { 20 | anyOf { 21 | changeset 'fork-report/**/*' 22 | expression { env.BRANCH_IS_PRIMARY } 23 | } 24 | } 25 | environment { 26 | // Requires 'jenkins-infra-reports' to be of type GithubAppCredentials so $GITHUB_AUTH_PSW holds an IAT (Github Installation Access Token) valid for 1 hour 27 | GITHUB_AUTH = credentials('jenkins-infra-reports') 28 | REPORT_NAME = "${reportName}" 29 | } 30 | steps { 31 | dir('fork-report') { 32 | sh 'bundle install' 33 | sh 'bundle exec ruby ./fork-report.rb> "${REPORT_NAME}"' 34 | archiveArtifacts reportName 35 | } 36 | } 37 | } 38 | stage('Publish GitHub Forks Report') { 39 | when { 40 | expression { env.BRANCH_IS_PRIMARY } 41 | } 42 | steps { 43 | dir('fork-report') { 44 | publishReports ([reportName]) 45 | } 46 | } 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /permissions-report/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | activesupport (8.0.2) 5 | base64 6 | benchmark (>= 0.3) 7 | bigdecimal 8 | concurrent-ruby (~> 1.0, >= 1.3.1) 9 | connection_pool (>= 2.2.5) 10 | drb 11 | i18n (>= 1.6, < 2) 12 | logger (>= 1.4.2) 13 | minitest (>= 5.1) 14 | securerandom (>= 0.3) 15 | tzinfo (~> 2.0, >= 2.0.5) 16 | uri (>= 0.13.1) 17 | base64 (0.2.0) 18 | benchmark (0.4.0) 19 | bigdecimal (3.1.9) 20 | concurrent-ruby (1.3.5) 21 | connection_pool (2.5.0) 22 | csv (3.3.3) 23 | date (3.4.1) 24 | drb (2.2.1) 25 | fiber-storage (1.0.0) 26 | graphql (2.4.15) 27 | base64 28 | fiber-storage 29 | logger 30 | graphql-client (0.25.0) 31 | activesupport (>= 3.0) 32 | graphql (>= 1.13.0) 33 | httparty (0.22.0) 34 | csv 35 | mini_mime (>= 1.0.0) 36 | multi_xml (>= 0.5.2) 37 | i18n (1.14.7) 38 | concurrent-ruby (~> 1.0) 39 | json (2.10.2) 40 | jwt (2.10.1) 41 | base64 42 | logger (1.6.6) 43 | mini_mime (1.1.5) 44 | minitest (5.25.5) 45 | multi_xml (0.7.1) 46 | bigdecimal (~> 3.1) 47 | openssl (3.3.0) 48 | securerandom (0.4.1) 49 | time (0.4.1) 50 | date 51 | tzinfo (2.0.6) 52 | concurrent-ruby (~> 1.0) 53 | uri (1.0.3) 54 | 55 | PLATFORMS 56 | ruby 57 | 58 | DEPENDENCIES 59 | base64 60 | csv (~> 3.3) 61 | graphql-client 62 | httparty 63 | json 64 | jwt 65 | logger (~> 1.6) 66 | openssl 67 | time 68 | 69 | BUNDLED WITH 70 | 2.5.7 71 | -------------------------------------------------------------------------------- /permissions-report/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@daily' : '' 2 | def reportName = 'github-jenkinsci-permissions-report.json' 3 | 4 | pipeline { 5 | triggers { 6 | cron(cronExpr) 7 | } 8 | options { 9 | // This pipeline takes 9-10 hours max to execute 10 | timeout(time: 15, unit: 'HOURS') 11 | lock(resource: "github-jenkinsci-permissions-report-${env.BRANCH_NAME}", inversePrecedence: true) 12 | buildDiscarder logRotator(daysToKeepStr: '90') 13 | } 14 | agent { 15 | label 'jnlp-linux-arm64' 16 | } 17 | stages { 18 | stage('Generate GitHub Permissions Report') { 19 | when { 20 | anyOf { 21 | changeset 'permissions-report/**/*' 22 | expression { env.BRANCH_IS_PRIMARY } 23 | } 24 | } 25 | environment { 26 | GITHUB_APP_PRIVATE_KEY_B64 = credentials('githubapp-jenkins-infra-reports-private-key-b64') 27 | GITHUB_APP_ID = credentials('githubapp-jenkins-infra-reports-app-identifier') 28 | GITHUB_ORG_NAME = "jenkinsci" 29 | REPORT_NAME = "${reportName}" 30 | } 31 | steps { 32 | dir('permissions-report') { 33 | sh 'bundle install' 34 | sh 'bundle exec ruby ./permissions-report.rb > "${REPORT_NAME}"' 35 | archiveArtifacts reportName 36 | } 37 | } 38 | } 39 | stage('Publish GitHub Permissions Report') { 40 | when { 41 | expression { env.BRANCH_IS_PRIMARY } 42 | } 43 | steps { 44 | dir('permissions-report') { 45 | publishReports ([reportName]) 46 | } 47 | } 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /plugin-health-scoring/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@hourly' : '' 2 | def reportsFolder = 'plugin-health-scoring' 3 | def reportFile = 'scores.json' 4 | def reportLines = 0 5 | 6 | pipeline { 7 | agent { 8 | label 'jnlp-linux-arm64' 9 | } 10 | 11 | environment { 12 | REPORTS_FOLDER = "${reportsFolder}" 13 | REPORT_FILE = "${reportFile}" 14 | PHS_API_URL = 'https://plugin-health.jenkins.io/api/scores' 15 | } 16 | 17 | options { 18 | buildDiscarder logRotator(daysToKeepStr: '90') 19 | lock(resource: "plugin-health-scoring-${env.BRANCH_NAME}", inversePrecedence: true) 20 | timeout(time: 5, unit: 'MINUTES') 21 | disableConcurrentBuilds() 22 | } 23 | 24 | triggers { 25 | cron( cronExpr ) 26 | } 27 | 28 | stages { 29 | stage('Generate Report') { 30 | when { 31 | anyOf { 32 | changeset 'plugin-health-scoring/**/*' 33 | expression { env.BRANCH_IS_PRIMARY } 34 | } 35 | } 36 | steps { 37 | dir('plugin-health-scoring') { 38 | sh 'bash fetch-report.sh' 39 | archiveArtifacts reportFile 40 | } 41 | } 42 | } 43 | 44 | stage('Publish if report not empty') { 45 | when { 46 | anyOf { 47 | expression { env.BRANCH_IS_PRIMARY } 48 | } 49 | } 50 | steps { 51 | dir('plugin-health-scoring') { 52 | script { 53 | reportLines = sh(returnStdout:true, script: ''' 54 | wc -l "${REPORT_FILE}" | awk '{print $1}' 55 | ''').trim() 56 | echo "Found reportLines=${reportLines}" 57 | if (reportLines > 0) { 58 | publishReports([ "${reportsFolder}/${reportFile}" ]) 59 | } 60 | } 61 | } 62 | } 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /updatecli/updatecli.d/ruby.yaml: -------------------------------------------------------------------------------- 1 | name: Bump Ruby version in ASDF tools 2 | 3 | scms: 4 | default: 5 | kind: github 6 | spec: 7 | user: "{{ .github.user }}" 8 | email: "{{ .github.email }}" 9 | owner: "{{ .github.owner }}" 10 | repository: "{{ .github.repository }}" 11 | token: "{{ requiredEnv .github.token }}" 12 | username: "{{ .github.username }}" 13 | branch: "{{ .github.branch }}" 14 | 15 | sources: 16 | getInfraCIPackerImageVersion: 17 | kind: file 18 | name: Retrieve the current version of the Packer images used in production on infra.ci.jenkins.io 19 | spec: 20 | file: https://raw.githubusercontent.com/jenkins-infra/kubernetes-management/main/config/jenkins_infra.ci.jenkins.io.yaml 21 | # Prefiltering to avoid verbose output 22 | matchpattern: 'galleryImageVersion:\s"(.*)"' 23 | transformers: 24 | - findsubmatch: 25 | pattern: 'galleryImageVersion:\s"(.*)"' 26 | captureindex: 1 27 | getRubyVersionFromPackerImages: 28 | kind: yaml 29 | name: Get the latest Ruby version set in packer-images 30 | dependson: 31 | - getInfraCIPackerImageVersion 32 | spec: 33 | file: https://raw.githubusercontent.com/jenkins-infra/packer-images/{{ source "getInfraCIPackerImageVersion" }}/provisioning/tools-versions.yml 34 | key: $.ruby_version 35 | 36 | targets: 37 | updateAsdfToolsVersion: 38 | name: Update Ruby in the ASDF tools version file 39 | kind: file 40 | disablesourceinput: true 41 | spec: 42 | file: .tool-versions 43 | matchpattern: 'ruby\s(.*)' 44 | replacepattern: 'ruby {{ source "getRubyVersionFromPackerImages" }}' 45 | scmid: default 46 | 47 | actions: 48 | default: 49 | kind: github/pullrequest 50 | scmid: default 51 | spec: 52 | title: Bump Ruby version in ASDF tools to {{ source "getRubyVersionFromPackerImages" }} 53 | labels: 54 | - chore 55 | - ruby 56 | -------------------------------------------------------------------------------- /updatecli/updatecli.d/nodejs.yaml: -------------------------------------------------------------------------------- 1 | name: Bump NodeJS version in ASDF tools 2 | 3 | scms: 4 | default: 5 | kind: github 6 | spec: 7 | user: "{{ .github.user }}" 8 | email: "{{ .github.email }}" 9 | owner: "{{ .github.owner }}" 10 | repository: "{{ .github.repository }}" 11 | token: "{{ requiredEnv .github.token }}" 12 | username: "{{ .github.username }}" 13 | branch: "{{ .github.branch }}" 14 | 15 | sources: 16 | getInfraCIPackerImageVersion: 17 | kind: file 18 | name: Retrieve the current version of the Packer images used in production on infra.ci.jenkins.io 19 | spec: 20 | file: https://raw.githubusercontent.com/jenkins-infra/kubernetes-management/main/config/jenkins_infra.ci.jenkins.io.yaml 21 | # Prefiltering to avoid verbose output 22 | matchpattern: 'galleryImageVersion:\s"(.*)"' 23 | transformers: 24 | - findsubmatch: 25 | pattern: 'galleryImageVersion:\s"(.*)"' 26 | captureindex: 1 27 | getNodeJSVersionFromPackerImages: 28 | kind: yaml 29 | name: Get the latest NodeJS version set in packer-images 30 | dependson: 31 | - getInfraCIPackerImageVersion 32 | spec: 33 | file: https://raw.githubusercontent.com/jenkins-infra/packer-images/{{ source "getInfraCIPackerImageVersion" }}/provisioning/tools-versions.yml 34 | key: $.nodejs_linux_version 35 | 36 | targets: 37 | updateAsdfToolsVersion: 38 | name: Update NodeJS in the ASDF tools version file 39 | kind: file 40 | disablesourceinput: true 41 | spec: 42 | file: .tool-versions 43 | matchpattern: 'nodejs\s(.*)' 44 | replacepattern: 'nodejs {{ source "getNodeJSVersionFromPackerImages" }}' 45 | scmid: default 46 | 47 | actions: 48 | default: 49 | kind: github/pullrequest 50 | scmid: default 51 | spec: 52 | title: Bump NodeJS version in ASDF tools to {{ source "getNodeJSVersionFromPackerImages" }} 53 | labels: 54 | - chore 55 | - nodejs 56 | -------------------------------------------------------------------------------- /plugin-health-scoring/fetch-report.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euxo pipefail 3 | 4 | # 5 | # MIT License 6 | # 7 | # Copyright (c) 2024 Jenkins Infra 8 | # 9 | # Permission is hereby granted, free of charge, to any person obtaining a copy 10 | # of this software and associated documentation files (the "Software"), to deal 11 | # in the Software without restriction, including without limitation the rights 12 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | # copies of the Software, and to permit persons to whom the Software is 14 | # furnished to do so, subject to the following conditions: 15 | # 16 | # The above copyright notice and this permission notice shall be included in all 17 | # copies or substantial portions of the Software. 18 | # 19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 25 | # SOFTWARE. 26 | # 27 | 28 | ## Check for requirements 29 | for cli in curl jq mkdir cp 30 | do 31 | command -v "${cli}" || { echo "ERROR: command $cli is missing. Exiting."; exit 1; } 32 | done 33 | 34 | : "${REPORTS_FOLDER?Environment variable 'REPORTS_FOLDER' unset}" 35 | : "${REPORT_FILE?Environment variable 'REPORT_FILE' unset}" 36 | : "${PHS_API_URL?Environment variable 'PHS_API_URL' unset}" 37 | 38 | ### 39 | # using --compact-output to reduce output file by half. 40 | # adding report generation date in 'lastUpdate' key of the report. 41 | ### 42 | curl --location --fail --silent --show-error "${PHS_API_URL}" \ 43 | | jq --compact-output '. + { lastUpdate: (now | todate) }' > "${REPORT_FILE}" 44 | 45 | mkdir -p "${REPORTS_FOLDER}" 46 | cp "${REPORT_FILE}" "${REPORTS_FOLDER}/" 47 | -------------------------------------------------------------------------------- /jenkins-infra-data/Jenkinsfile: -------------------------------------------------------------------------------- 1 | def cronExpr = env.BRANCH_IS_PRIMARY ? '@hourly' : '' 2 | def reportName = 'index.json' 3 | def version = 'v3' 4 | def infraBaseFolder = 'infrastructure' 5 | def reportFolder = "${infraBaseFolder}/${version}" 6 | 7 | pipeline { 8 | triggers { 9 | cron(cronExpr) 10 | } 11 | options { 12 | // This pipeline takes 1-2 minutes max to execute 13 | timeout(time: 10, unit: 'MINUTES') 14 | lock(resource: "jenkins-infra-data-${env.BRANCH_NAME}", inversePrecedence: true) 15 | buildDiscarder logRotator(daysToKeepStr: '90') 16 | } 17 | agent { 18 | label 'jnlp-linux-arm64' 19 | } 20 | environment { 21 | DIST_DIR = "${WORKSPACE}/jenkins-infra-data/dist" 22 | PUBLISHED_REPORT = "${reportFolder}/${reportName}" 23 | REPORT_FOLDER = "${reportFolder}" 24 | REPORT_NAME = "${reportName}" 25 | VERSION = "${version}" 26 | INFRA_BASE_FOLDER = "${infraBaseFolder}" 27 | } 28 | stages { 29 | stage('Generate Jenkins Infrastructure Public Data report') { 30 | when { 31 | anyOf { 32 | changeset 'jenkins-infra-data/**/*' 33 | expression { env.BRANCH_IS_PRIMARY } 34 | } 35 | } 36 | steps { 37 | dir('jenkins-infra-data') { 38 | sh './generate-infra-data.sh "${REPORT_NAME}" "${DIST_DIR}" "${VERSION}"' 39 | sh 'ls -ltr "${DIST_DIR}"' 40 | 41 | archiveArtifacts artifacts: 'dist/**' 42 | } 43 | } 44 | } 45 | stage('Publish Jenkins Infrastructure Public Data report') { 46 | when { 47 | expression { env.BRANCH_IS_PRIMARY } 48 | } 49 | steps { 50 | dir('jenkins-infra-data') { 51 | sh ''' 52 | # Prepare directory/file structure for reports publication 53 | mkdir -p "${REPORT_FOLDER}" 54 | cp "${DIST_DIR}/${REPORT_NAME}" "${PUBLISHED_REPORT}" 55 | cp -r "${DIST_DIR}" "${INFRA_BASE_FOLDER}/latest" 56 | ''' 57 | 58 | publishReports ([env.INFRA_BASE_FOLDER]) 59 | } 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /jenkins-infra-data/get-jenkins-io-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "mirrors": { 3 | "ftp.halifax.rwth-aachen.de": { 4 | "outbound_ipv4": [ 5 | "137.226.34.46" 6 | ], 7 | "outbound_ipv6": [ 8 | "2a00:8a60:e012:a00::21" 9 | ] 10 | }, 11 | "ftp.belnet.be": { 12 | "outbound_ipv4": [ 13 | "193.190.198.27" 14 | ], 15 | "outbound_ipv6": [ 16 | "2001:6a8:3c80::27" 17 | ] 18 | }, 19 | "ftp-chi.osuosl.org": {}, 20 | "ftp-nyc.osuosl.org": {}, 21 | "mirror.xmission.com": {}, 22 | "mirrors.tuna.tsinghua.edu.cn": {}, 23 | "sg.mirror.servanamanaged.com": { 24 | "outbound_ipv4": [ 25 | "172.104.164.58" 26 | ], 27 | "outbound_ipv6": [ 28 | "2400:8901::2000:78ff:fee6:73fb" 29 | ] 30 | }, 31 | "2.mirrors.in.sahilister.net": { 32 | "outbound_ipv4": [ 33 | "82.180.146.159" 34 | ], 35 | "outbound_ipv6": [ 36 | "2400:d321:2217:1599::1" 37 | ] 38 | }, 39 | "mirror.bom.albony.in": {}, 40 | "mirror.bom2.albony.in": {}, 41 | "mirror.twds.com.tw": { 42 | "outbound_ipv4": [ 43 | "103.147.22.36" 44 | ], 45 | "outbound_ipv6": [ 46 | "2405:a640::36" 47 | ] 48 | }, 49 | "repo.jing.rocks": { 50 | "outbound_ipv4": [ 51 | "106.178.112.231" 52 | ], 53 | "outbound_ipv6": [ 54 | "240b:10:f00:1b00::/56" 55 | ] 56 | }, 57 | "mirror.ossplanet.net": { 58 | "outbound_ipv4": [ 59 | "163.22.17.70" 60 | ], 61 | "outbound_ipv6": [ 62 | "2001:e10:6840:17::70" 63 | ] 64 | }, 65 | "mirror.fi.ossplanet.net": { 66 | "outbound_ipv4": [ 67 | "65.21.197.134" 68 | ], 69 | "outbound_ipv6": [ 70 | "2a01:4f9:3b:1f22:5::2" 71 | ] 72 | }, 73 | "mirror.eu.ossplanet.net": { 74 | "outbound_ipv4": [ 75 | "65.21.121.31" 76 | ], 77 | "outbound_ipv6": [ 78 | "2a01:4f9:3b:4992::2" 79 | ] 80 | }, 81 | "ftp.yz.yamagata-u.ac.jp": { 82 | "outbound_ipv4": [ 83 | "133.24.248.16", 84 | "133.24.248.17", 85 | "133.24.248.18", 86 | "133.24.248.19", 87 | "133.24.248.21" 88 | ], 89 | "outbound_ipv6": [ 90 | "2001:df0:25e:e100::3", 91 | "2001:df0:25e:e100::2" 92 | ] 93 | }, 94 | "mirror.del.albony.in": {}, 95 | "mirror.freedif.org": { 96 | "outbound_ipv4": [ 97 | "66.96.199.63" 98 | ] 99 | }, 100 | "mirror.yandex.ru": {}, 101 | "mirrors.hostico.ro": {} 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /jenkins-infra-data/updates-jenkins-io_mirrors.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | set -x 7 | 8 | command -v 'jq' >/dev/null || { echo "[ERROR] no 'jq' command found."; exit 1; } 9 | command -v 'xq' >/dev/null || { echo "[ERROR] no 'xq' command found."; exit 1; } 10 | command -v 'dig' >/dev/null || { echo "[ERROR] no 'dig' command found."; exit 1; } 11 | 12 | test -d "${DIST_DIR}" 13 | 14 | mirrorsSource="https://mirrors.updates.jenkins.io/current/update-center.json?mirrorlist" 15 | mirrorTableQuery='body > div > div > div > table' 16 | mirrorRowXPath='//table/tbody/tr' 17 | cellXPath='//td[2]' 18 | fallback='archives.jenkins.io' 19 | updateCenterHostname=updates.jenkins.io 20 | 21 | function getIPsFromHostname() { 22 | dig +short "${1}" "${2}" | jq --raw-input --slurp 'split("\n") | map(select(test("\\.$") | not)) | map(select(length > 0))' 23 | return 24 | } 25 | 26 | # Retrieve the source HTML 27 | sourceHTML="$(curl --silent --show-error --location "${mirrorsSource}")" 28 | 29 | # Retrieving all rows of the table containing all mirrors 30 | mirrorRows="$(echo "${sourceHTML}" \ 31 | | xq --node --query "${mirrorTableQuery}" \ 32 | | xq --node --xpath "${mirrorRowXPath}" \ 33 | )" 34 | 35 | if [[ -z "${mirrorRows}" ]]; then 36 | echo "Error: no mirror returned from ${mirrorsSource}" 37 | exit 1 38 | fi 39 | 40 | # Retrieve a list of hostname, one per line. Keep only cells not finishing by " ago" (last update column) and that are not the fallback mirror. 41 | mirrorshostnames="$(echo "${mirrorRows}" | xq --xpath "${cellXPath}" | grep -v ' ago' | grep -v "${fallback}")" 42 | 43 | json='{"servers": []}' 44 | while IFS= read -r mirrorHostname 45 | do 46 | # As dig(1) can returns CNAME values, we need to filter IPs from its result(s) (those not finishing by a ".") 47 | ipv4="$(getIPsFromHostname "${mirrorHostname}" 'A')" 48 | ipv6="$(getIPsFromHostname "${mirrorHostname}" 'AAAA')" 49 | json="$(echo "${json}" | jq \ 50 | --arg hostname "${mirrorHostname}" \ 51 | --argjson ipv4 "${ipv4}" \ 52 | --argjson ipv6 "${ipv6}" \ 53 | '.servers |= . + [{"hostname": $hostname, "ipv4": $ipv4, "ipv6": $ipv6}]')" 54 | 55 | done <<< "${mirrorshostnames}" 56 | 57 | if [[ "${json}" == '{"mirrors": []}' ]]; then 58 | echo "Error: no mirror returned from ${mirrorsSource}" 59 | exit 1 60 | fi 61 | 62 | 63 | updateCenterIpv4="$(getIPsFromHostname "${updateCenterHostname}" 'A')" 64 | updateCenterIpv6="$(getIPsFromHostname "${updateCenterHostname}" 'AAAA')" 65 | 66 | json="$(echo "${json}" | jq \ 67 | --arg hostname "${updateCenterHostname}" \ 68 | --argjson ipv4 "${updateCenterIpv4}" \ 69 | --argjson ipv6 "${updateCenterIpv6}" \ 70 | '.servers |= . + [{"hostname": $hostname, "ipv4": $ipv4, "ipv6": $ipv6}]')" 71 | 72 | echo "${json}" 73 | exit 0 74 | -------------------------------------------------------------------------------- /fork-report/fork-report.rb: -------------------------------------------------------------------------------- 1 | # Usage: GITHUB_API_TOKEN=abcdefabcdef ruby fork-report.rb > report.json 2 | 3 | require 'graphql/client' 4 | require 'graphql/client/http' 5 | require 'httparty' 6 | require 'pp' 7 | require 'json' 8 | 9 | $auth = "bearer #{ENV['GITHUB_AUTH_PSW']}" 10 | 11 | module GitHubGraphQL 12 | HTTP = GraphQL::Client::HTTP.new('https://api.github.com/graphql') do 13 | def headers(context) 14 | { 15 | 'Authorization' => $auth 16 | } 17 | end 18 | end 19 | Schema = GraphQL::Client.load_schema(HTTP) 20 | Client = GraphQL::Client.new(schema: Schema, execute: HTTP) 21 | end 22 | 23 | 24 | CollaboratorsQuery = GitHubGraphQL::Client.parse <<-'GRAPHQL' 25 | 26 | query($repository_cursor: String) { 27 | organization(login: "jenkinsci") { 28 | repositories(first: 100, after: $repository_cursor) { 29 | pageInfo { 30 | startCursor 31 | hasNextPage 32 | endCursor 33 | } 34 | edges { 35 | node { 36 | nameWithOwner 37 | isFork 38 | parent { 39 | nameWithOwner 40 | } 41 | } 42 | } 43 | } 44 | } 45 | rateLimit { 46 | limit 47 | cost 48 | remaining 49 | resetAt 50 | } 51 | } 52 | GRAPHQL 53 | 54 | 55 | $table_data = [] 56 | 57 | def record_repository(repo_name, source_repo) 58 | $table_data << [ repo_name, source_repo ] 59 | end 60 | 61 | def ratelimit_info(rate_limit) 62 | STDERR.puts "Rate limit: Cost: #{rate_limit.cost}, limit #{rate_limit.limit}, remaining: #{rate_limit.remaining}, reset at: #{rate_limit.reset_at}" 63 | end 64 | 65 | repository_cursor = nil 66 | collaborator_cursor = nil 67 | error_count = 0 68 | 69 | loop do 70 | STDERR.puts "Calling with cursors: repository #{repository_cursor}" 71 | result = GitHubGraphQL::Client.query(CollaboratorsQuery, variables: {repository_cursor: repository_cursor }) 72 | 73 | if !result.errors[:data].empty? then 74 | STDERR.puts result.errors[:data] 75 | sleep 5 76 | if error_count > 50 then 77 | # fatal 78 | STDERR.puts 'Consecutive error count limit reached, aborting' 79 | abort('Too many errors') 80 | else 81 | error_count += 1 82 | end 83 | else 84 | error_count = 0 85 | 86 | result.data.organization.repositories.edges.each { |repo| 87 | repo_name = repo.node.name_with_owner 88 | STDERR.puts "Processing #{repo_name}" 89 | if repo.node.is_fork then 90 | record_repository(repo_name, repo.node.parent&.name_with_owner ) 91 | end 92 | } 93 | 94 | ratelimit_info(result.data.rate_limit) 95 | 96 | repository_paging = result.data.organization.repositories.page_info 97 | if repository_paging.has_next_page 98 | collaborator_cursor = nil 99 | repository_cursor = repository_paging.end_cursor 100 | STDERR.puts "Next page of repositories, from #{repository_cursor}" 101 | else 102 | break 103 | end 104 | end 105 | end 106 | 107 | puts JSON.generate($table_data) 108 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/node 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=node 3 | 4 | ### Node ### 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | .pnpm-debug.log* 13 | 14 | # Diagnostic reports (https://nodejs.org/api/report.html) 15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 16 | 17 | # Runtime data 18 | pids 19 | *.pid 20 | *.seed 21 | *.pid.lock 22 | 23 | # Directory for instrumented libs generated by jscoverage/JSCover 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | coverage 28 | *.lcov 29 | 30 | # nyc test coverage 31 | .nyc_output 32 | 33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 34 | .grunt 35 | 36 | # Bower dependency directory (https://bower.io/) 37 | bower_components 38 | 39 | # node-waf configuration 40 | .lock-wscript 41 | 42 | # Compiled binary addons (https://nodejs.org/api/addons.html) 43 | build/Release 44 | 45 | # Dependency directories 46 | node_modules/ 47 | jspm_packages/ 48 | 49 | # Snowpack dependency directory (https://snowpack.dev/) 50 | web_modules/ 51 | 52 | # TypeScript cache 53 | *.tsbuildinfo 54 | 55 | # Optional npm cache directory 56 | .npm 57 | 58 | # Optional eslint cache 59 | .eslintcache 60 | 61 | # Optional stylelint cache 62 | .stylelintcache 63 | 64 | # Microbundle cache 65 | .rpt2_cache/ 66 | .rts2_cache_cjs/ 67 | .rts2_cache_es/ 68 | .rts2_cache_umd/ 69 | 70 | # Optional REPL history 71 | .node_repl_history 72 | 73 | # Output of 'npm pack' 74 | *.tgz 75 | 76 | # Yarn Integrity file 77 | .yarn-integrity 78 | 79 | # dotenv environment variable files 80 | .env 81 | .env.development.local 82 | .env.test.local 83 | .env.production.local 84 | .env.local 85 | 86 | # parcel-bundler cache (https://parceljs.org/) 87 | .cache 88 | .parcel-cache 89 | 90 | # Next.js build output 91 | .next 92 | out 93 | 94 | # Nuxt.js build / generate output 95 | .nuxt 96 | dist 97 | 98 | # Gatsby files 99 | .cache/ 100 | # Comment in the public line in if your project uses Gatsby and not Next.js 101 | # https://nextjs.org/blog/next-9-1#public-directory-support 102 | # public 103 | 104 | # vuepress build output 105 | .vuepress/dist 106 | 107 | # vuepress v2.x temp and cache directory 108 | .temp 109 | 110 | # Docusaurus cache and generated files 111 | .docusaurus 112 | 113 | # Serverless directories 114 | .serverless/ 115 | 116 | # FuseBox cache 117 | .fusebox/ 118 | 119 | # DynamoDB Local files 120 | .dynamodb/ 121 | 122 | # TernJS port file 123 | .tern-port 124 | 125 | # Stores VSCode versions used for testing VSCode extensions 126 | .vscode-test 127 | 128 | # yarn v2 129 | .yarn/cache 130 | .yarn/unplugged 131 | .yarn/build-state.yml 132 | .yarn/install-state.gz 133 | .pnp.* 134 | 135 | ### Node Patch ### 136 | # Serverless Webpack directories 137 | .webpack/ 138 | 139 | # Optional stylelint cache 140 | 141 | # SvelteKit build / generate output 142 | .svelte-kit 143 | 144 | # End of https://www.toptal.com/developers/gitignore/api/node 145 | # 146 | plugin-migration/output 147 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jenkins GitHub Reports 2 | 3 | If you want to execute these scripts locally, use the Docker image `jenkinsciinfra/jenkins-agent-ubuntu-22.04`(source code at ). 4 | It includes all the required dependencies: 5 | 6 | - Ruby 7 | - Ruby Gems for [octokit.rb](http://octokit.github.io/octokit.rb/) to generate reports about the `jenkinsci` GitHub organization and graphql 8 | - Bash 9 | - JQ, Azure and other command lines used by bash scripts 10 | 11 | You can check the exact image version by checking the Jenkins agent pod template specified in `./JenkinsAgentPodTemplate.yaml`. 12 | 13 | ## Permissions Report 14 | 15 | Prints a two-dimensional JSON array optimized for use in [DataTables](https://www.datatables.net/) hosted at [Source Code Hosting](https://www.jenkins.io/doc/developer/publishing/source-code-hosting/). 16 | 17 | Format example: 18 | 19 | ```json 20 | [ 21 | [ 22 | "ldap-plugin", 23 | "olamy", 24 | "push" 25 | ], 26 | [ 27 | "ldap-plugin", 28 | "jglick", 29 | "push" 30 | ] 31 | ] 32 | ``` 33 | 34 | ### Usage 35 | 36 | We use a Github App for that, you'll need to define the following environment variables to run the script: 37 | 38 | - GITHUB_APP_PRIVATE_KEY_B64: The Github App private key in PEM format, encoded in base64 39 | - GITHUB_APP_ID: The GitHub App's identifier (type integer) set when registering an app 40 | - GITHUB_ORG_NAME: The Github organization name (ex: "jenkinsci") 41 | 42 | ```shell 43 | cd permisions-report/ 44 | ruby ./permisions-report.rb 45 | ``` 46 | 47 | ## Artifactory Users Report 48 | 49 | Creates a report listing all user accounts in Artifactory. 50 | 51 | Consumed by 52 | 53 | ### Usage 54 | 55 | This requires Artifactory admin user credentials. 56 | 57 | ```bash 58 | cd artifactory-users-report/ 59 | export ARTIFACTORY_AUTH=admin-username:admin-token 60 | bash ./user-report.sh 61 | ``` 62 | 63 | ## Jira Users Report 64 | 65 | Creates a report listing all user accounts in a Jira group containing plugin maintainers. 66 | Currently, we use `jira-users` for that, but may in the future use a more limited group. 67 | 68 | Consumed by 69 | 70 | ### Usage 71 | 72 | This requires Jira admin user credentials. 73 | 74 | ```bash 75 | cd jira-users-report/ 76 | export JIRA_AUTH=admin-username:admin-token 77 | bash ./user-report 78 | ``` 79 | 80 | ## Plugin Documentation Migration Report 81 | 82 | Creates an html file with the current state of the documentation migration project 83 | 84 | Consumed by docs-sig 85 | 86 | ### Usage 87 | 88 | We use a Github App for that, you'll need to define the following environment variables to run the script: 89 | 90 | - GITHUB_APP_PRIVATE_KEY_B64: The Github App private key in PEM format, encoded in base64 91 | - GITHUB_APP_ID: The GitHub App's identifier (type integer) set when registering an app 92 | - GITHUB_ORG_NAME: The Github organization name (ex: "jenkinsci") 93 | 94 | ```bash 95 | cd plugin-migration 96 | npm install 97 | node index.js > index.html 98 | ``` 99 | 100 | ## Infrastructure > get.jenkins.io mirrors report 101 | 102 | Creates a report listing all get.jenkins.io mirrors. 103 | 104 | Not consumed yet. 105 | 106 | ### Usage 107 | 108 | This requires curl(1), jq(1) and xq(1) command line tools. 109 | 110 | ```bash 111 | cd jenkins-infra-data/ 112 | ./get-jenkins-io_mirrors.sh 113 | ``` 114 | -------------------------------------------------------------------------------- /jenkins-infra-data/get-jenkins-io_mirrors.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o nounset 4 | set -o errexit 5 | set -o pipefail 6 | set -x 7 | 8 | command -v 'jq' >/dev/null || { echo "[ERROR] no 'jq' command found."; exit 1; } 9 | command -v 'xq' >/dev/null || { echo "[ERROR] no 'xq' command found."; exit 1; } 10 | command -v 'dig' >/dev/null || { echo "[ERROR] no 'dig' command found."; exit 1; } 11 | 12 | test -d "${DIST_DIR}" 13 | 14 | mirrorsSource='https://get.jenkins.io/index.html?mirrorstats' 15 | mirrorTableQuery='body > div > div > div > table' 16 | mirrorRowXPath='//table/tbody/tr' 17 | cellXPath='//td[@rowspan=2]' 18 | fallback='archives.jenkins.io' 19 | 20 | # Retrieve the source HTML 21 | sourceHTML="$(curl --silent --show-error --location "${mirrorsSource}")" 22 | 23 | # Retrieving all rows of the table containing all mirrors 24 | mirrorRows="$(echo "${sourceHTML}" \ 25 | | xq --node --query "${mirrorTableQuery}" \ 26 | | xq --node --xpath "${mirrorRowXPath}"\ 27 | )" 28 | 29 | if [[ -z "${mirrorRows}" ]]; then 30 | echo "Error: no mirror returned from ${mirrorsSource}" 31 | exit 1 32 | fi 33 | 34 | # Retrieve list of hostnames, one per line. Keep only cells not finishing by " ago" (last update column) and that are not the fallback mirror. 35 | hostnames="$(echo "${mirrorRows}" | xq --xpath "${cellXPath}" | grep -v ' ago' | grep -v "${fallback}")" 36 | additional_data="$(cd "$(dirname "$0")" && pwd -P)/get-jenkins-io-data.json" 37 | 38 | json='{"mirrors": []}' 39 | while IFS= read -r hostname 40 | do 41 | # As dig(1) can returns CNAME values, we need to filter IPs from its result(s) (those not finishing by a ".") 42 | ipv4="$(dig +short "${hostname}" A | jq --raw-input --slurp 'split("\n") | map(select(test("\\.$") | not)) | map(select(length > 0))')" 43 | ipv6="$(dig +short "${hostname}" AAAA | jq --raw-input --slurp 'split("\n") | map(select(test("\\.$") | not)) | map(select(length > 0))')" 44 | outbound_ipv4="$(jq --raw-output ".mirrors.\"${hostname}\".outbound_ipv4" "${additional_data}")" 45 | # Assume the same in and out IPv4s set if not specified in the additional data file 46 | if [ "${outbound_ipv4}" == "null" ] 47 | then 48 | outbound_ipv4="$ipv4" 49 | fi 50 | outbound_ipv6="$(jq --raw-output ".mirrors.\"${hostname}\".outbound_ipv6" "${additional_data}")" 51 | # Assume the same in and out IPv6s set if not specified in the additional data file 52 | if [ "${outbound_ipv6}" == "null" ] 53 | then 54 | outbound_ipv6="$ipv6" 55 | fi 56 | json="$(echo "${json}" | jq \ 57 | --arg hostname "${hostname}" \ 58 | --argjson ipv4 "${ipv4}" \ 59 | --argjson ipv6 "${ipv6}" \ 60 | --argjson outbound_ipv4 "${outbound_ipv4}" \ 61 | --argjson outbound_ipv6 "${outbound_ipv6}" \ 62 | '.mirrors |= . + [{"hostname": $hostname, "ipv4": $ipv4, "ipv6": $ipv6, "outbound_ipv4": $outbound_ipv4, "outbound_ipv6": $outbound_ipv6}]')" 63 | done <<< "${hostnames}" 64 | 65 | if [[ "${json}" == '{"mirrors": []}' ]]; then 66 | echo "Error: no mirror returned from ${mirrorsSource}" 67 | exit 1 68 | fi 69 | 70 | ## Provide outbound IPs for mirror providers to add in their allow-list for scanning 71 | 72 | # publick8s hosts the mirrorbits services which emit outbound requests to scan external mirrors 73 | publick8sIpv4List="$(curl --silent --show-error --location 'https://reports.jenkins.io/jenkins-infra-data-reports/azure.json' \ 74 | | jq '.["publick8s"].lb_outbound_ips.ipv4' \ 75 | )" 76 | publick8sIpv6List="$(curl --silent --show-error --location 'https://reports.jenkins.io/jenkins-infra-data-reports/azure.json' \ 77 | | jq '.["publick8s"].lb_outbound_ips.ipv6' \ 78 | )" 79 | # infra.ci.jenkins.io (controller and agents) may emit outbound requests to external mirrors for testing or setup purposes 80 | infraciIpv4List="$(curl --silent --show-error --location 'https://reports.jenkins.io/jenkins-infra-data-reports/azure-net.json' \ 81 | | jq '.["infra.ci.jenkins.io"].outbound_ips' \ 82 | )" 83 | json="$(echo "${json}" | jq \ 84 | --argjson publick8sIpv4List "${publick8sIpv4List}" \ 85 | --argjson publick8sIpv6List "${publick8sIpv6List}" \ 86 | --argjson infraciIpv4List "${infraciIpv4List}" \ 87 | '. += {"outbound_ipv4": ([$publick8sIpv4List + $infraciIpv4List] | flatten | unique), "outbound_ipv6": ([$publick8sIpv6List] | flatten | unique)}' \ 88 | )" 89 | 90 | echo "${json}" 91 | exit 0 92 | -------------------------------------------------------------------------------- /permissions-report/permissions-report.rb: -------------------------------------------------------------------------------- 1 | # Usage: ruby permission-report.rb > report.json 2 | 3 | require 'graphql/client' 4 | require 'graphql/client/http' 5 | require 'httparty' 6 | require 'pp' 7 | require 'json' 8 | require 'openssl' 9 | require 'jwt' 10 | require 'time' 11 | require 'base64' 12 | 13 | # Expects that the private key in PEM format. Converts the newlines 14 | if ENV['GITHUB_APP_PRIVATE_KEY_B64'] == '' then 15 | abort "Error: the environment variable GITHUB_APP_PRIVATE_KEY_B64 is empty." 16 | else 17 | PRIVATE_KEY = OpenSSL::PKey::RSA.new(Base64.decode64(ENV['GITHUB_APP_PRIVATE_KEY_B64']).gsub('\n', "\n")) 18 | end 19 | 20 | # The GitHub App's identifier (type integer) set when registering an app. 21 | APP_IDENTIFIER = ENV['GITHUB_APP_ID'] 22 | if APP_IDENTIFIER == '' then 23 | abort "Error: the environment variable GITHUB_APP_ID is empty." 24 | end 25 | 26 | # The organization to scan 27 | GITHUB_ORG_NAME = ENV['GITHUB_ORG_NAME'] 28 | if GITHUB_ORG_NAME == '' then 29 | abort "Error: the environment variable GITHUB_ORG_NAME is empty." 30 | end 31 | 32 | # Saves the raw payload and converts the payload to JSON format 33 | def get_payload_request(request) 34 | # request.body is an IO or StringIO object 35 | # Rewind in case someone already read it 36 | request.body.rewind 37 | # The raw text of the body is required for webhook signature verification 38 | @payload_raw = request.body.read 39 | begin 40 | @payload = JSON.parse @payload_raw 41 | rescue => e 42 | fail "Invalid JSON (#{e}): #{@payload_raw}" 43 | end 44 | end 45 | 46 | $userAgent = "Jenkins Infra Github App permissions-report (id: #{APP_IDENTIFIER})" 47 | 48 | def get_auth_token 49 | # Generate a JWT to authenticate the Github App 50 | payload = { 51 | # The time that this JWT was issued, _i.e._ now. 52 | iat: Time.now.to_i, 53 | 54 | # JWT expiration time (10 minute maximum) 55 | exp: Time.now.to_i + (10 * 60), 56 | 57 | # Your GitHub App's identifier number 58 | iss: APP_IDENTIFIER 59 | } 60 | 61 | # Cryptographically sign the JWT. 62 | jwt = "Bearer #{JWT.encode(payload, PRIVATE_KEY, 'RS256')}" 63 | 64 | # List installation for the Github App (ref: https://docs.github.com/en/rest/reference/apps#list-installations-for-the-authenticated-app) 65 | response = HTTParty.get('https://api.github.com/app/installations', :headers => { 66 | 'Authorization' => jwt, 67 | 'User-Agent' => $userAgent 68 | }) 69 | installationsResponse = response.parsed_response 70 | installationId = 0 71 | installationsResponse.each { |installation| 72 | if installation['account']['login'] == GITHUB_ORG_NAME then 73 | installationId = installation['id'] 74 | end 75 | } 76 | if installationId > 0 then 77 | STDERR.puts "Running permissions-report on the organization #{GITHUB_ORG_NAME}" 78 | else 79 | abort "Error: no Github App installation for the organization #{GITHUB_ORG_NAME}" 80 | end 81 | 82 | # Retrieve the Installation Access Token of the Github App (ref: https://docs.github.com/en/rest/reference/apps#create-an-installation-access-token-for-an-app) 83 | response = HTTParty.post("https://api.github.com/app/installations/#{installationId}/access_tokens", :headers => { 84 | 'Authorization' => jwt, 85 | 'User-Agent' => $userAgent 86 | }) 87 | auth = "Bearer #{response.parsed_response['token']}" 88 | end 89 | 90 | $auth = get_auth_token 91 | 92 | module GitHubGraphQL 93 | HTTP = GraphQL::Client::HTTP.new('https://api.github.com/graphql') do 94 | def headers(context) 95 | { 96 | 'Authorization' => context.has_key?(:authorization) ? context[:authorization] : $auth, 97 | 'User-Agent' => $userAgent 98 | } 99 | end 100 | end 101 | Schema = GraphQL::Client.load_schema(HTTP) 102 | Client = GraphQL::Client.new(schema: Schema, execute: HTTP) 103 | end 104 | 105 | CollaboratorsQuery = GitHubGraphQL::Client.parse <<-'GRAPHQL' 106 | 107 | query($github_org_name: String!, $repository_cursor: String, $collaborator_cursor: String) { 108 | organization(login: $github_org_name) { 109 | repositories(first: 10, after: $repository_cursor, privacy: PUBLIC) { 110 | pageInfo { 111 | startCursor 112 | hasNextPage 113 | endCursor 114 | } 115 | edges { 116 | node { 117 | name 118 | collaborators(first: 80, after: $collaborator_cursor) { 119 | totalCount 120 | pageInfo { 121 | startCursor 122 | hasNextPage 123 | endCursor 124 | } 125 | edges { 126 | permission 127 | node { 128 | login 129 | } 130 | } 131 | } 132 | } 133 | } 134 | } 135 | } 136 | rateLimit { 137 | limit 138 | cost 139 | remaining 140 | resetAt 141 | } 142 | } 143 | GRAPHQL 144 | 145 | 146 | $table_data = [] 147 | 148 | response = HTTParty.get("https://api.github.com/orgs/#{GITHUB_ORG_NAME}/members?role=admin", :headers => { 149 | 'Authorization' => $auth, 150 | 'User-Agent' => $userAgent 151 | }) 152 | 153 | $org_admins = response.parsed_response.map{|user| user['login']} 154 | 155 | def record_collaborator(repo_name, collaborator, permission) 156 | unless permission == 'READ' or $org_admins.include? collaborator then 157 | $table_data << [ repo_name, collaborator, permission ] 158 | end 159 | end 160 | 161 | def ratelimit_info(rate_limit) 162 | STDERR.puts "Rate limit: Cost: #{rate_limit.cost}, limit #{rate_limit.limit}, remaining: #{rate_limit.remaining}, reset at: #{rate_limit.reset_at}" 163 | end 164 | 165 | repository_cursor = nil 166 | collaborator_cursor = nil 167 | error_count = 0 168 | counter = 0 169 | max_rescue = 5 170 | 171 | loop do 172 | STDERR.puts "Calling with cursors: repository #{repository_cursor}, collaborator #{collaborator_cursor}" 173 | # Query with a new token every once in a while passed as context 174 | counter += 1 175 | if counter % 500 == 0 then 176 | STDERR.puts "Generating a new token (repo counter: #{counter})" 177 | $auth = get_auth_token 178 | end 179 | rescue_counter = 0 180 | begin 181 | result = GitHubGraphQL::Client.query(CollaboratorsQuery, variables: { 182 | github_org_name: GITHUB_ORG_NAME, 183 | repository_cursor: repository_cursor, 184 | collaborator_cursor: collaborator_cursor 185 | }, context: {authorization: $auth}) 186 | rescue EOFError => e 187 | STDERR.puts "Rescue of #{e.class}: #{e.message}" 188 | if rescue_counter < max_rescue then 189 | rescue_counter += 1 190 | STDERR.puts "Rescue #{rescue_counter}/#{max_rescue}, retrying in 10 seconds..." 191 | sleep 10 192 | retry 193 | end 194 | STDERR.puts 'Consecutive rescue count limit reached, aborting' 195 | abort('Too many rescues') 196 | end 197 | 198 | if !result.errors[:data].empty? then 199 | STDERR.puts result.errors[:data] 200 | sleep 5 201 | if error_count > 50 then 202 | # fatal 203 | STDERR.puts 'Consecutive error count limit reached, aborting' 204 | abort('Too many errors') 205 | else 206 | error_count += 1 207 | end 208 | else 209 | error_count = 0 210 | 211 | collaborator_paging = nil 212 | result.data.organization.repositories.edges.each { |repo| 213 | repo_name = repo.node.name 214 | STDERR.puts "Processing #{repo_name}" 215 | if repo.node.collaborators then 216 | collaborator_paging = repo.node.collaborators.page_info 217 | repo.node.collaborators.edges.each { |collaborator| 218 | record_collaborator(repo_name, collaborator.node.login, collaborator.permission) 219 | } 220 | else 221 | STDERR.puts "Nil collaborators, archived repo #{repo_name}?" 222 | end 223 | } 224 | 225 | ratelimit_info(result.data.rate_limit) 226 | 227 | if collaborator_paging&.has_next_page then 228 | collaborator_cursor = collaborator_paging.end_cursor 229 | STDERR.puts "Next page of collaborators, from #{collaborator_cursor}" 230 | else 231 | repository_paging = result.data.organization.repositories.page_info 232 | if repository_paging.has_next_page 233 | collaborator_cursor = nil 234 | repository_cursor = repository_paging.end_cursor 235 | STDERR.puts "Next page of repositories, from #{repository_cursor}" 236 | else 237 | break 238 | end 239 | end 240 | end 241 | end 242 | 243 | puts JSON.generate($table_data) 244 | --------------------------------------------------------------------------------