├── .ci
├── .docker-images.schema.json
├── .docker-images.yml
├── .yamlint.yml
├── docker
│ ├── Makefile
│ ├── dind-buildx
│ │ ├── Dockerfile
│ │ ├── build-example.sh
│ │ ├── config.json
│ │ └── docker-credential-env
│ ├── github-label-sync
│ │ └── Dockerfile
│ ├── jenkins-agent
│ │ └── Dockerfile
│ ├── kibana-yarn
│ │ └── Dockerfile
│ ├── shellcheck
│ │ └── Dockerfile
│ ├── tests
│ │ ├── test_helpers.bash
│ │ ├── tests.bats
│ │ └── tests_without_run.bats
│ ├── vmware-mock
│ │ └── Dockerfile
│ └── yamllint
│ │ └── Dockerfile
├── generate-releases.sh
├── generate-snapshots.sh
├── get-next-minor-version.sh
├── jobs
│ ├── apm-shared.yml
│ ├── apm-test-pipeline-mbp.yml
│ └── defaults.yml
├── packer_cache.sh
└── scripts
│ ├── build-beats-integrations-test-images.sh
│ ├── codecov
│ ├── gherkin-lint.sh
│ ├── markdown-lint.sh
│ ├── shellcheck
│ ├── validate-docker-images-config-yaml.sh
│ ├── validate-jjbb.sh
│ ├── validate.sh
│ └── yamllint
├── .editorconfig
├── .gcloudignore
├── .github
├── CODEOWNERS
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ ├── await-maven-artifact
│ │ ├── README.md
│ │ └── action.yml
│ ├── buildkite
│ │ ├── README.md
│ │ ├── action.yml
│ │ ├── cancel.sh
│ │ ├── download_artifacts.py
│ │ └── run.sh
│ ├── check-dependent-jobs
│ │ ├── README.md
│ │ └── action.yml
│ ├── comment-reaction
│ │ ├── README.md
│ │ └── action.yml
│ ├── deploy-my-kibana
│ │ ├── README.md
│ │ └── action.yml
│ ├── docker-layer-caching
│ │ └── action.yml
│ ├── docker-login
│ │ ├── README.md
│ │ └── action.yml
│ ├── elastic-stack-snapshot-branches
│ │ ├── README.md
│ │ ├── action.yml
│ │ └── script.py
│ ├── github-token
│ │ ├── README.md
│ │ └── action.yml
│ ├── is-admin
│ │ ├── README.md
│ │ └── action.yml
│ ├── is-member-elastic-org
│ │ ├── README.md
│ │ └── action.yml
│ ├── is-pr-author-member-elastic-org
│ │ ├── README.md
│ │ └── action.yml
│ ├── kibana-docker-image
│ │ ├── README.md
│ │ ├── action.yml
│ │ ├── build-and-push.sh
│ │ └── setup-vars.sh
│ ├── notify-build-status
│ │ ├── .eslintignore
│ │ ├── .eslintrc.json
│ │ ├── .gitattributes
│ │ ├── .gitignore
│ │ ├── .node-version
│ │ ├── .prettierignore
│ │ ├── .prettierrc.json
│ │ ├── README.md
│ │ ├── action.yml
│ │ ├── dist
│ │ │ ├── index.js
│ │ │ └── licenses.txt
│ │ ├── jest.config.js
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ ├── index.ts
│ │ │ └── main.ts
│ │ ├── tests
│ │ │ └── it
│ │ │ │ └── main.test.ts
│ │ └── tsconfig.json
│ ├── oblt-cli-cluster-credentials
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli-cluster-name-validation
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli-create-ccs
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli-create-custom
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli-create-serverless
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli-destroy-cluster
│ │ ├── README.md
│ │ └── action.yml
│ ├── oblt-cli
│ │ ├── README.md
│ │ └── action.yml
│ ├── opentelemetry
│ │ ├── README.md
│ │ └── action.yml
│ ├── pre-commit
│ │ └── action.yml
│ ├── publish-report
│ │ ├── README.md
│ │ ├── action.yml
│ │ └── example.png
│ ├── setup-git
│ │ ├── README.md
│ │ └── action.yml
│ ├── setup-npmrc
│ │ ├── README.md
│ │ └── action.yml
│ ├── setup-oblt-cli
│ │ ├── README.md
│ │ ├── action.yml
│ │ └── download.sh
│ ├── setup-vault-cli
│ │ ├── README.md
│ │ └── action.yml
│ ├── slack-message
│ │ ├── README.md
│ │ └── action.yml
│ ├── snapshoty-simple
│ │ ├── README.md
│ │ └── action.yml
│ ├── snapshoty
│ │ ├── .node-version
│ │ ├── README.md
│ │ ├── action.yml
│ │ ├── dist
│ │ │ ├── index.js
│ │ │ └── licenses.txt
│ │ ├── index.js
│ │ ├── package-lock.json
│ │ └── package.json
│ ├── test-report
│ │ └── action.yml
│ ├── undeploy-my-kibana
│ │ ├── README.md
│ │ └── action.yml
│ ├── updatecli
│ │ ├── README.md
│ │ └── action.yml
│ ├── validate-github-comment
│ │ ├── README.md
│ │ └── action.yml
│ ├── version-framework
│ │ └── action.yml
│ └── workflow-run
│ │ ├── README.md
│ │ └── action.yml
├── dependabot.yml
├── paths-labeller.yml
├── release-drafter.yml
└── workflows
│ ├── actions-test.yml
│ ├── build-test-no-jenkins.yml
│ ├── build-test.yml
│ ├── bump-aliases.yml
│ ├── codeql.yml
│ ├── licenses.yml
│ ├── lint.yml
│ ├── pre-commit.yml
│ ├── publish-docker-images.yml
│ ├── pytest_otel.yml
│ ├── release.yml
│ ├── scorecard.yml
│ ├── test-comment-reaction.yml
│ ├── test-deploy-my-kibana.yml
│ ├── test-elastic-stack-snapshot-branches.yml
│ ├── test-is-admin.yml
│ ├── test-is-member-elastic-org.yml
│ ├── test-is-pr-author-member-elastic-org.yml
│ ├── test-kibana-docker-image-action.yml
│ ├── test-notify-build-status.yml
│ ├── test-oblt-cli-cluster-credentials.yml
│ ├── test-oblt-cli-cluster-name-validation.yml
│ ├── test-oblt-cli-create-ccs.yml
│ ├── test-oblt-cli-create-custom.yml
│ ├── test-oblt-cli-create-serverless.yml
│ ├── test-oblt-cli.yml
│ ├── test-reporter.yml
│ ├── test-updatecli.yml
│ └── test-workflow-run.yml
├── .gitignore
├── .mergify.yml
├── .mvn
└── wrapper
│ ├── MavenWrapperDownloader.java
│ ├── maven-wrapper.jar
│ └── maven-wrapper.properties
├── .pre-commit-config.yaml
├── .pre-commit-hooks.yaml
├── CHANGELOG.md
├── INTEGRATIONS.md
├── JENKINS.md
├── LICENSE
├── README.md
├── catalog-info.yaml
├── change-on-master
├── docs
├── CLOUD.md
├── CONTRIBUTING.md
├── DEVELOPMENT.md
├── GUIDELINES.md
├── INTERNAL_DOCKER_IMAGES.md
├── JENKINS_GUIDELINES.md
├── PRECOMMIT.md
├── RELEASE.md
└── STORAGE.md
├── local
├── .gitignore
├── Dockerfile
├── Makefile
├── README.md
├── configs
│ ├── google.yaml
│ ├── jenkins.yaml
│ └── plugins.txt
├── docker-compose.yml
├── jenkins_jobs.ini
├── test-jjbb.sh
└── workers
│ ├── linux
│ └── Vagrantfile
│ ├── macosx
│ └── Vagrantfile
│ └── windows
│ ├── setup.ps1
│ ├── swarm.bat
│ ├── windows-2016
│ └── Vagrantfile
│ └── windows-2019
│ └── Vagrantfile
├── mvnw
├── mvnw.cmd
├── pom.xml
├── resources
├── JenkinsfileTemplate.groovy
├── approval-list
│ ├── apm-agent-dotnet.yml
│ ├── apm-agent-go.yml
│ ├── apm-agent-java.yml
│ ├── apm-agent-nodejs.yml
│ ├── apm-agent-php.yml
│ ├── apm-agent-python.yml
│ ├── apm-agent-ruby.yml
│ ├── apm-agent-rum-js.yml
│ ├── apm-agent-server.yml
│ └── beats.yml
├── buildKibana.groovy
├── ci-builds-mapping.json
├── ci-tests-mapping.json
├── co
│ └── elastic
│ │ └── README.md
├── commands-github-comment-markdown.template
├── custom-build-report-mapping.json
├── docs
│ ├── index.asciidoc
│ └── test.asciidoc
├── flaky-github-comment-markdown.template
├── flaky-github-issue.template
├── gitBaseCommit.groovy
├── github-comment-markdown.template
├── groovy-html-custom.template
├── pods
│ ├── dind-golang.yml
│ ├── dind-python.yml
│ ├── golang.yml
│ └── python.yml
├── reviewApproved.json
├── runbld
│ ├── build.json
│ ├── failure.json
│ └── log.json
├── scripts
│ ├── apm-cli
│ │ ├── .editorconfig
│ │ ├── .gitignore
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── apm-cli.py
│ │ ├── apm
│ │ │ ├── ApmCli.py
│ │ │ └── __init__.py
│ │ ├── pytest.ini
│ │ ├── requirements.txt
│ │ ├── setup.py
│ │ └── tests
│ │ │ ├── __init__.py
│ │ │ ├── features
│ │ │ └── cli_params.feature
│ │ │ └── test_cli.py
│ ├── artifacts-api-7.x-version.sh
│ ├── artifacts-api-latest-release-versions.sh
│ ├── artifacts-api-latest-versions.sh
│ ├── beats
│ │ ├── filebeat.yml
│ │ ├── metricbeat-logs.yml
│ │ ├── metricbeat.yml
│ │ ├── run_filebeat.sh
│ │ ├── run_metricbeat.sh
│ │ ├── run_metricbeat_logs.sh
│ │ └── wait_for_beat.sh
│ ├── build-kibana-docker-image.sh
│ ├── bundlesize.sh
│ ├── count-data.sh
│ ├── coverage.sh
│ ├── docker-logs.sh
│ ├── generate-build-data.sh
│ ├── generateReadme.sh
│ ├── git_base_comit_prepare.sh
│ ├── install-tools.bat
│ ├── install-with-choco.ps1
│ ├── install
│ │ ├── kind-setup.sh
│ │ ├── kind.sh
│ │ └── kubectl.sh
│ ├── jenkins
│ │ └── validateJenkinsfile.sh
│ ├── junit2otel.sh
│ ├── processJinjaTemplate.py
│ ├── pytest_apm
│ │ ├── .editorconfig
│ │ ├── .gitignore
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── pytest_apm.py
│ │ ├── requirements.txt
│ │ ├── setup.py
│ │ └── test_pytest_apm.py
│ ├── pytest_otel
│ │ ├── .editorconfig
│ │ ├── .gitignore
│ │ ├── .pre-commit-config.yaml
│ │ ├── CHANGELOG.md
│ │ ├── LICENSE.txt
│ │ ├── Makefile
│ │ ├── README.md
│ │ ├── docs
│ │ │ └── demos
│ │ │ │ ├── elastic
│ │ │ │ ├── README.md
│ │ │ │ ├── demo.env
│ │ │ │ ├── docker-compose.yml
│ │ │ │ ├── elastic-stack.yml
│ │ │ │ ├── elasticsearch
│ │ │ │ │ ├── roles.yml
│ │ │ │ │ ├── service_tokens
│ │ │ │ │ ├── users
│ │ │ │ │ └── users_roles
│ │ │ │ ├── images
│ │ │ │ │ ├── elastic-overview.png
│ │ │ │ │ ├── elastic-services.png
│ │ │ │ │ ├── elastic-span-details.png
│ │ │ │ │ └── elastic-transactions.png
│ │ │ │ └── kibana
│ │ │ │ │ └── kibana.yml
│ │ │ │ ├── jaeger
│ │ │ │ ├── README.md
│ │ │ │ ├── config
│ │ │ │ │ └── otel-collector-config.yaml
│ │ │ │ ├── demo.env
│ │ │ │ ├── docker-compose.yml
│ │ │ │ └── images
│ │ │ │ │ ├── jaeger-spans-all.png
│ │ │ │ │ ├── jaeger-spans-detail.png
│ │ │ │ │ └── jaeger-spans.png
│ │ │ │ └── test
│ │ │ │ └── test_demo.py
│ │ ├── mypy.ini
│ │ ├── pyproject.toml
│ │ ├── requirements.txt
│ │ ├── setup.cfg
│ │ ├── setup.py
│ │ ├── src
│ │ │ └── pytest_otel
│ │ │ │ └── __init__.py
│ │ ├── tests
│ │ │ ├── docker-compose.yml
│ │ │ ├── it
│ │ │ │ ├── conftest.py
│ │ │ │ ├── test_basic_plugin.py
│ │ │ │ ├── test_failure_code_plugin.py
│ │ │ │ ├── test_failure_plugin.py
│ │ │ │ ├── test_skip_plugin.py
│ │ │ │ ├── test_success_plugin.py
│ │ │ │ ├── test_xfail_no_run_plugin.py
│ │ │ │ ├── test_xfail_plugin.py
│ │ │ │ └── utils
│ │ │ │ │ └── __init__.py
│ │ │ ├── otel-collector.yaml
│ │ │ └── test_pytest_otel.py
│ │ └── tox.ini
│ ├── release-manager-analyser.sh
│ ├── release-manager.sh
│ └── setup-git-release.sh
├── slack-markdown.template
├── templates
│ ├── bundlesize.md.j2
│ └── coverage.md.j2
├── testAPMCli.groovy
├── versions
│ └── releases.properties
└── webhook-template.groovy
├── src
├── co
│ └── elastic
│ │ ├── LogReportQueueAnnotator.groovy
│ │ ├── Nexus.groovy
│ │ ├── NotificationManager.groovy
│ │ ├── README.md
│ │ ├── beats
│ │ └── BeatsFunction.groovy
│ │ └── matrix
│ │ ├── DefaultParallelTaskGenerator.groovy
│ │ └── ResultsProcessor.groovy
├── pipeline.gdsl
└── test
│ ├── groovy
│ ├── AbortBuildStepTest.groovy
│ ├── AgentMappingStepTests.groovy
│ ├── ApmBasePipelineTest.groovy
│ ├── ApmCliStepTests.groovy
│ ├── ArtifactsApiStepTests.groovy
│ ├── AxisStepTests.groovy
│ ├── Base64decodeStepTests.groovy
│ ├── Base64encodeStepTests.groovy
│ ├── BeatsStagesStepTests.groovy
│ ├── BeatsWhenStepTests.groovy
│ ├── BuildKibanaDockerImageStepTests.groovy
│ ├── BuildStatusTests.groovy
│ ├── BumpUtilsStepTests.groovy
│ ├── CancelPreviousRunningBuildsStepTests.groovy
│ ├── CheckLicensesStepTests.groovy
│ ├── CheckoutStepTests.groovy
│ ├── CmdStepTests.groovy
│ ├── CodecovStepTests.groovy
│ ├── ConvertGoTestResultsStepTests.groovy
│ ├── CoverageReportStepTests.groovy
│ ├── CreateFileFromTemplateStepTests.groovy
│ ├── DetailsURLStepTests.groovy
│ ├── DockerImageExistsStepTests.groovy
│ ├── DockerLoginStepTests.groovy
│ ├── DockerLogsStepTests.groovy
│ ├── DownloadStepTests.groovy
│ ├── DownloadWithCurlStepTests.groovy
│ ├── DownloadWithWgetStepTests.groovy
│ ├── DummyStepTests.groovy
│ ├── EchoColorStepTests.groovy
│ ├── ErrorIfEmptyStepTests.groovy
│ ├── FastCheckoutStepTests.groovy
│ ├── FilebeatStepTests.groovy
│ ├── FindOldestSupportedVersionStepTests.groovy
│ ├── FlattenMapStepTests.groovy
│ ├── GenerateBuildDataIntegrationTests.groovy
│ ├── GenerateChangelogTests.groovy
│ ├── GenerateGoBenchmarkDiffStepTests.groovy
│ ├── GenerateReportStepTests.groovy
│ ├── GetBlueoceanDisplayURLStepTests.groovy
│ ├── GetBlueoceanRestURLJobStepTests.groovy
│ ├── GetBlueoceanTabURLStepTests.groovy
│ ├── GetBranchNameFromArtifactsAPIStepTests.groovy
│ ├── GetBranchesFromAliasesStepTests.groovy
│ ├── GetBuildInfoJsonFilesStepTests.groovy
│ ├── GetCurrentBuildTimeStepTests.groovy
│ ├── GetFlakyJobNameStepTests.groovy
│ ├── GetGitCommitShaStepTests.groovy
│ ├── GetGitMatchingGroupStepTests.groovy
│ ├── GetGitRepoURLStepTests.groovy
│ ├── GetGithubTokenStepTests.groovy
│ ├── GetModulesFromCommentTriggerStepTests.groovy
│ ├── GetStageIdStepTests.groovy
│ ├── GetTraditionalPageURLStepTests.groovy
│ ├── GetVaultSecretStepTests.groovy
│ ├── GhStepTests.groovy
│ ├── GitChangelogStepTests.groovy
│ ├── GitCheckoutStepTests.groovy
│ ├── GitCmdStepTests.groovy
│ ├── GitCreateTagStepTests.groovy
│ ├── GitDeleteTagStepTests.groovy
│ ├── GitPushStepTests.groovy
│ ├── GitStepTests.groovy
│ ├── GithubApiCallStepTests.groovy
│ ├── GithubAppTokenStepTests.groovy
│ ├── GithubBranchRefStepTests.groovy
│ ├── GithubCheckStepTests.groovy
│ ├── GithubCommentIssueStepTests.groovy
│ ├── GithubCreateIssueStepTests.groovy
│ ├── GithubCreatePullRequestStepTests.groovy
│ ├── GithubEnvStepTests.groovy
│ ├── GithubIssuesStepTests.groovy
│ ├── GithubPrCheckApprovedStepTests.groovy
│ ├── GithubPrCommentStepTests.groovy
│ ├── GithubPrExistsStepTests.groovy
│ ├── GithubPrInfoStepTests.groovy
│ ├── GithubPrLabelsStepTests.groovy
│ ├── GithubPrLatestCommentStepTests.groovy
│ ├── GithubPrReviewsStepTests.groovy
│ ├── GithubPullRequestsStepTests.groovy
│ ├── GithubReleaseCreateTests.groovy
│ ├── GithubReleasePublishTests.groovy
│ ├── GithubRepoGetUserPermissionStepTests.groovy
│ ├── GithubTraditionalPrCommentStepTests.groovy
│ ├── GithubWorkflowRunTests.groovy
│ ├── GoDefaultVersionStepTests.groovy
│ ├── GoTestJUnitStepTests.groovy
│ ├── GoVersionStepTests.groovy
│ ├── GoogleStorageUploadExtStepTests.groovy
│ ├── GsutilStepTests.groovy
│ ├── HasCommentAuthorWritePermissionsTests.groovy
│ ├── HttpRequestStepTests.groovy
│ ├── InstallToolsStepTests.groovy
│ ├── Is32ArmStepTests.groovy
│ ├── Is32StepTests.groovy
│ ├── Is32x86StepTests.groovy
│ ├── Is64ArmStepTests.groovy
│ ├── Is64StepTests.groovy
│ ├── Is64x86StepTests.groovy
│ ├── IsArmStepTests.groovy
│ ├── IsBeforeGo1_16StepTests.groovy
│ ├── IsBranchIndexTriggerStepTests.groovy
│ ├── IsBranchStepTests.groovy
│ ├── IsBranchUnifiedReleaseAvailableStepTests.groovy
│ ├── IsBuildFailureStepTests.groovy
│ ├── IsCommentTriggerStepTests.groovy
│ ├── IsDarwinStepTests.groovy
│ ├── IsEmptyStepTests.groovy
│ ├── IsGitRegionMatchStepTests.groovy
│ ├── IsInstalledStepTests.groovy
│ ├── IsInternalCIStepTests.groovy
│ ├── IsMemberOfOrgStepTests.groovy
│ ├── IsMemberOfStepTests.groovy
│ ├── IsPRStepTests.groovy
│ ├── IsStaticWorkerStepTests.groovy
│ ├── IsTagStepTests.groovy
│ ├── IsTimerTriggerStepTests.groovy
│ ├── IsUpstreamTriggerStepTests.groovy
│ ├── IsUserTriggerStepTests.groovy
│ ├── IsX86StepTests.groovy
│ ├── Junit2OtelStepTests.groovy
│ ├── JunitAndStoreStepTests.groovy
│ ├── ListGithubReleasesStepTests.groovy
│ ├── LogStepTests.groovy
│ ├── LookForGitHubIssuesStepTests.groovy
│ ├── MatchesPrLabelStepTests.groovy
│ ├── MatrixStepTests.groovy
│ ├── MetricbeatStepTests.groovy
│ ├── MvnVersionTests.groovy
│ ├── NexusCloseStagingRepositoryTests.groovy
│ ├── NexusCreateStagingRepositoryTests.groovy
│ ├── NexusDropStagingRepositoryTests.groovy
│ ├── NexusFindStagingIdTests.groovy
│ ├── NexusReleaseStagingRepositoryTests.groovy
│ ├── NexusTests.groovy
│ ├── NexusUploadStagingArtifactTests.groovy
│ ├── NodeArchStepTests.groovy
│ ├── NodeJSDefaultVersionStepTests.groovy
│ ├── NodeOSStepTests.groovy
│ ├── NotificationManagerStepTests.groovy
│ ├── NotifyBuildResultStepTests.groovy
│ ├── NotifyStalledBeatsBumpsStepTests.groovy
│ ├── OpbeansPipelineStepTests.groovy
│ ├── PipelineManagerStepTests.groovy
│ ├── PreCommitStepTests.groovy
│ ├── PreCommitToJunitStepTests.groovy
│ ├── PromptStepTests.groovy
│ ├── PublishToCDNStepTests.groovy
│ ├── PushDockerImagesStepTests.groovy
│ ├── RandomNumberStepTests.groovy
│ ├── RandomStringStepTest.groovy
│ ├── ReleaseManagerAnalyserStepTests.groovy
│ ├── ReleaseManagerNotificationStepTests.groovy
│ ├── ReleaseManagerStepTests.groovy
│ ├── ReleaseNotificationStepTests.groovy
│ ├── RetryWithSleepStepTests.groovy
│ ├── RubygemsLoginStepTests.groovy
│ ├── RunE2EStepTests.groovy
│ ├── RunWatcherStepTests.groovy
│ ├── RunbldStepTests.groovy
│ ├── SendBenchmarksStepTests.groovy
│ ├── SendDataToElasticsearchStepTests.groovy
│ ├── SetEnvVarStepTests.groovy
│ ├── SetGithubCommitStatusStepTests.groovy
│ ├── SetupAPMGitEmailStepTests.groovy
│ ├── SnapshotyStepTests.groovy
│ ├── StackVersionsStepTests.groovy
│ ├── StageStatusCacheTests.groovy
│ ├── StashV2StepTests.groovy
│ ├── SuperLinterStepTests.groovy
│ ├── Tap2JunitStepTests.groovy
│ ├── TarStepTests.groovy
│ ├── ToJSONStepTests.groovy
│ ├── UnstashV2StepTests.groovy
│ ├── UntarStepTests.groovy
│ ├── UpdateGithubCommitStatusStepTests.groovy
│ ├── UploadPackagesToGoogleBucketExtStepTests.groovy
│ ├── WhenFalseStepTests.groovy
│ ├── WhenTrueStepTests.groovy
│ ├── WithAPMEnvStepTests.groovy
│ ├── WithAPMStepTests.groovy
│ ├── WithAWSEnvStepTests.groovy
│ ├── WithAzureCredentialsStepTests.groovy
│ ├── WithAzureEnvStepTests.groovy
│ ├── WithCloudEnvStepTests.groovy
│ ├── WithClusterEnvStepTests.groovy
│ ├── WithDockerEnvStepTests.groovy
│ ├── WithElasticsearchDeploymentEnvStepTests.groovy
│ ├── WithEnvMaskStepTests.groovy
│ ├── WithEsEnvStepTests.groovy
│ ├── WithFleetDeploymentEnvStepTests.groovy
│ ├── WithGCPEnvStepTests.groovy
│ ├── WithGhEnvStepTests.groovy
│ ├── WithGitReleaseStepTests.groovy
│ ├── WithGithubCheckStepTests.groovy
│ ├── WithGithubNotifyStepTests.groovy
│ ├── WithGithubStatusStepTests.groovy
│ ├── WithGoEnvStepTests.groovy
│ ├── WithGoEnvUnixStepTests.groovy
│ ├── WithGoEnvWindowsStepTests.groovy
│ ├── WithHubCredentialsStepTests.groovy
│ ├── WithKibanaDeploymentEnvStepTests.groovy
│ ├── WithKindEnvStepTests.groovy
│ ├── WithMageEnvStepTests.groovy
│ ├── WithNodeJSEnvStepTests.groovy
│ ├── WithNodeJSEnvUnixStepTests.groovy
│ ├── WithNodeStepTests.groovy
│ ├── WithNpmrcStepTests.groovy
│ ├── WithOtelEnvStepTests.groovy
│ ├── WithSecretVaultStepTests.groovy
│ ├── WithTerraformEnvStepTests.groovy
│ ├── WithTotpVaultStepTests.groovy
│ ├── WithVaultTokenStepTests.groovy
│ ├── WriteVaultSecretStepTests.groovy
│ ├── co
│ │ └── elastic
│ │ │ ├── TestUtils.groovy
│ │ │ └── mock
│ │ │ ├── DockerMock.groovy
│ │ │ ├── GetVaultSecretMock.groovy
│ │ │ ├── GithubEnvMock.groovy
│ │ │ ├── OtelHelperMock.groovy
│ │ │ ├── PullRequestMock.groovy
│ │ │ ├── RawBuildMock.groovy
│ │ │ ├── RunMock.groovy
│ │ │ ├── RunWrapperMock.groovy
│ │ │ ├── StepsMock.groovy
│ │ │ ├── WithGithubCheckMock.groovy
│ │ │ └── beats
│ │ │ ├── GetProjectDependencies.groovy
│ │ │ └── RunCommand.groovy
│ └── dummyDeclarativePipelineStepTests.groovy
│ └── resources
│ ├── 1744
│ ├── build-info.json
│ └── steps-errors.json
│ ├── __files
│ ├── .gitkeep
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-GDGfc.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-XJfvg.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-VgsWI.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-artifacts-p5tA3.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-blueTestSummary-A2SNo.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-changeSet-EJ5Ac.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-log-iMg03.txt
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-steps-7nqjX.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-0WDC4.json
│ ├── body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-C8sFC.json
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-abort-1-cobertura-api-json-QPHoF.txt
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-cobertura-1-cobertura-api-json-nxqFS.json
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-connectionRefused-1-cobertura-api-json-0OGve.txt
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-empty-1-cobertura-api-json-w9hla.txt
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-error-1-cobertura-api-json-w9hl2.txt
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-multiTestFailures-1-cobertura-api-json-A8nw5.txt
│ ├── body-job-it-job-getBuildInfoJsonFiles-job-success-1-cobertura-api-json-ZkKp7.txt
│ └── body-job-it-job-getBuildInfoJsonFiles-job-unstable-1-cobertura-api-json-vpDs1.txt
│ ├── artifacts-info.json
│ ├── build-info-manual.json
│ ├── build-info.json
│ ├── build-info_aborted.json
│ ├── build-info_aborted_allowed_to_run.json
│ ├── build-report.json
│ ├── buildKibana
│ └── package.json
│ ├── changeSet-info-empty-issues.json
│ ├── changeSet-info-manual.json
│ ├── changeSet-info.json
│ ├── corrupted
│ ├── build-info.json
│ ├── changeSet-info.json
│ ├── steps-errors.json
│ ├── tests-errors.json
│ └── tests-summary.json
│ ├── empty
│ ├── build-info.json
│ ├── changeSet-info.json
│ └── tests-summary.json
│ ├── env-info.json
│ ├── filebeatTest
│ └── filebeat_container_worker-0676d01d9601f8191.json
│ ├── filebeatTest_1
│ └── .empty
│ ├── filebeatTest_2
│ └── filebeat_container_worker-0676d01d9601f8191.json
│ ├── flake-empty-results.json
│ ├── flake-results.json
│ ├── flake-tests-errors-without-match.json
│ ├── flake-tests-errors.json
│ ├── flake-tests-summary.json
│ ├── folders
│ ├── beats.dsl
│ ├── getBuildInfoJsonFiles.dsl
│ └── it.dsl
│ ├── github-app-private-key-tests.pem
│ ├── job-info.json
│ ├── jobs
│ ├── beats
│ │ ├── beatsStages.dsl
│ │ └── beatsWhen.dsl
│ ├── cancelPreviousRunningBuilds.dsl
│ ├── cmd.dsl
│ ├── dockerLogin.dsl
│ ├── dockerLogs.dsl
│ ├── downstream.dsl
│ ├── gce-test-orchestrator.dsl
│ ├── gce-test.dsl
│ ├── getBuildInfoJsonFiles
│ │ ├── abort.dsl
│ │ ├── cobertura.dsl
│ │ ├── connectionRefused.dsl
│ │ ├── error.dsl
│ │ ├── multiTestFailures.dsl
│ │ ├── success.dsl
│ │ └── unstable.dsl
│ ├── getGitMatchingGroup.dsl
│ ├── gh.dsl
│ ├── git.dsl
│ ├── gitBaseCommit.dsl
│ ├── gitCheckout.dsl
│ ├── githubCheck.dsl
│ ├── githubCreateIssue.dsl
│ ├── githubCreatePullRequest.dsl
│ ├── githubEnv.dsl
│ ├── githubEnvSCM.dsl
│ ├── googleStorageUploadExt.dsl
│ ├── installTools.dsl
│ ├── isTimerTrigger.dsl
│ ├── isUserTrigger.dsl
│ ├── log.dsl
│ ├── matrix.dsl
│ ├── matrixNoAgent.dsl
│ ├── parentstream.dsl
│ ├── pipelineManager.dsl
│ ├── runWatcher.dsl
│ ├── runbld.dsl
│ ├── stashV2.dsl
│ ├── superLinter.dsl
│ ├── tar.dsl
│ ├── untar.dsl
│ ├── withAWSEnv.dsl
│ ├── withGitRelease.dsl
│ ├── withTotpVault.dsl
│ └── writeVaultSecret.dsl
│ ├── mappings
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort-062e59bc-aee5-43a9-b51d-675840836041.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort-d38e98f8-c876-4623-9320-b4ce5bd28a11.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1-c5c09898-834c-424f-9ca8-9f4e91e7fc8b.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_artifacts-f043eb1e-0073-48da-9893-dd59461efb3f.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_bluetestsummary-41b90449-f83e-42b2-8f22-4ee5efc95444.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_changeset-90fae60a-9bb0-4fff-9113-2f367b0b9408.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_log-8af6681b-d3a1-4a7e-9a4f-217d6ff17541.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_steps-f9074b7b-7552-4ad1-b753-22349b0f4aaf.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_tests-41732b44-c29c-45cd-a15c-cd2a5dd58832.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_abort_runs_1_tests-fa9db0ea-31b4-4dea-8ff2-529b6ae1051b.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty-e7245fc6-832c-4956-ad5f-ffe34cc47e9f.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty-f0372a29-0595-443e-b48d-307d40f71049.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1-b08cc966-0da5-4729-af36-6ff06aff9c83.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_artifacts-95d720ee-7fca-4e1b-944b-c0a331460ca5.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_bluetestsummary-e19c40b2-b92a-4a43-9825-5ee8d80188da.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_changeset-23cd877c-e408-46dc-ae5c-cb6acccf0708.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_log-5112a750-8f3f-4fde-8051-7d88f508da5f.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_steps-d0c9398e-e195-4670-a60c-42f6137c3e6a.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_tests-36ba286f-707b-40e0-b5ae-572b8abfc7e2.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_empty_runs_1_tests-c1fa48c7-e337-4393-998a-7040168b6d4c.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error-2c3a2576-4e16-4edf-b2f7-0788ecac483c.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error-3017b94a-413e-461a-a7f8-02f8ee07f2cd.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1-265a8c02-2f8f-46e5-8b9a-c5d9c2c7f75e.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_artifacts-e7860b67-f182-4323-af71-44bb34eceb3b.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_bluetestsummary-99dd832b-b8ee-4db2-9982-dca134cae081.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_changeset-92c31070-396c-47e7-b1fa-c997c717d279.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_log-844f7f75-8fcf-41e4-80f8-4fa1bd4be506.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_steps-61d5f603-0b12-44f1-a722-b476a42c0cdc.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_tests-6b4c64e1-08e4-4741-8a99-3eed927add1a.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_error_runs_1_tests-8e222025-8ab8-4bba-94b5-2b6a11127185.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures-a6e01d95-1c5d-44f8-a6fc-c97d89a54bcc.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures-b6ac8d3a-d153-4f70-afdf-edff5fa3c013.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1-73293896-1f81-4a49-9353-de3ade828c18.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1-a4bc67e2-46f1-4f55-aedd-27b8b0d7df62.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_artifacts-85ae4332-1700-476b-98a2-8da3b39c9a02.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_bluetestsummary-651dd676-7497-4cef-a114-f75ce1295fa5.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_changeset-d7963f13-7b6f-4ed9-a790-38420a365fa7.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_log-f78c0b1b-c892-4c95-8b42-0fd3a0cf5204.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_steps-d56fb1f2-cfe1-4f48-9b1a-5040f0109bef.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_tests-78b81daa-a2cd-4364-8d61-7bb0ea569daa.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_multitestfailures_runs_1_tests-f101af53-e5f6-4361-9a94-73cd3fb7f327.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success-951eb641-b8a4-4e00-af66-fe49d9d300ab.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success-bdbe8994-139e-4998-b4a4-3c517d841215.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1-1e1f97fa-e511-4db4-b588-f42600e08d81.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_artifacts-d535d999-487d-4dd4-8355-92b0b8c6b493.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_bluetestsummary-02390f58-e530-4635-adba-7401a88979c2.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_changeset-9c643d09-973d-4006-94cd-4666a9805cd8.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_log-dd0fd32c-55a5-45e3-880c-25d188dfde63.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_steps-fb5412aa-feea-4ccd-a33f-60d6bc76e741.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_tests-8aa319a8-dd29-45f0-b369-47737f0225fa.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_success_runs_1_tests-b502bfa7-b12d-42d8-83bf-d32783df3c7a.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable-4e78d485-81c1-4e30-90b9-8087ecf4556e.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable-5fec85fb-b2f9-45e3-825b-44ed5764c7ed.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1-c5b56bf5-50f8-4fab-994a-f1b8c8cab2fa.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_artifacts-a8843df0-75ef-4730-ab68-292fdee1e864.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_bluetestsummary-5a7821d9-8e71-4812-942f-4dd7322074b9.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_changeset-b94d99a2-a954-40d7-92b9-2d622e8baf53.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_log-5cd4b2df-95a8-49af-8443-9732239d5959.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_steps-0152ed4b-b083-4072-830e-7768364a3597.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_tests-134d5192-5383-40c8-9570-5a258d73f950.json
│ ├── blue_rest_organizations_jenkins_pipelines_it_getbuildinfojsonfiles_unstable_runs_1_tests-f64d7545-f136-4096-ab47-bf94114fbe7a.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-GDGfc.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-XJfvg.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-VgsWI.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-artifacts-p5tA3.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-blueTestSummary-A2SNo.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-changeSet-EJ5Ac.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-log-iMg03.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-steps-7nqjX.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-0WDC4.json
│ ├── mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-C8sFC.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-abort-1-cobertura-api-json-QPHoF.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-cobertura-1-cobertura-api-json-nxqFS.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-connectionRefused-1-cobertura-api-json-0OGve.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-empty-1-cobertura-api-json-w9hla.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-error-1-cobertura-api-json-w9hl2.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-multiTestFailures-1-cobertura-api-json-A8nw5.json
│ ├── mapping-job-it-job-getBuildInfoJsonFiles-job-success-1-cobertura-api-json-ZkKp7.json
│ └── mapping-job-it-job-getBuildInfoJsonFiles-job-unstable-1-cobertura-api-json-vpDs1.json
│ ├── metricbeatTest
│ └── metricbeat_container_worker-0676d01d9601f8191.json
│ ├── metricbeatTest_1
│ └── .empty
│ ├── metricbeatTest_2
│ └── metricbeat_container_worker-0676d01d9601f8191.json
│ ├── mockito-extensions
│ └── org.mockito.plugins.MockMaker
│ ├── pipeline-log.txt
│ ├── preCommitToJunit
│ ├── gherkin.txt
│ ├── output
│ │ ├── gherkin.xml
│ │ ├── pre-commit.xml
│ │ ├── simple.xml
│ │ └── skipped.xml
│ ├── pre-commit.txt
│ ├── simple.txt
│ └── skipped.txt
│ ├── steps-errors-allowed-to-run.json
│ ├── steps-errors-with-also-github-environmental-issue.json
│ ├── steps-errors-with-deleteDir-issue.json
│ ├── steps-errors-with-github-environmental-issue.json
│ ├── steps-errors-with-multiline.json
│ ├── steps-errors-with-multiple.json
│ ├── steps-errors.json
│ ├── steps-info.json
│ ├── tests-cobertura.json
│ ├── tests-errors-with-long-stacktrace.json
│ ├── tests-errors.json
│ ├── tests-info.json
│ ├── tests-summary.json
│ ├── tests-summary_failed.json
│ ├── watcher-output-no-log.json
│ ├── watcher-output-without-test-failures.json
│ └── watcher-output.json
├── updatecli
├── updatecli.d
│ └── bump-aliases.yml
└── values.yml
└── vars
├── README.md
├── abortBuild.groovy
├── abortBuild.txt
├── agentMapping.groovy
├── agentMapping.txt
├── apmCli.groovy
├── apmCli.txt
├── artifactsApi.groovy
├── artifactsApi.txt
├── axis.groovy
├── axis.txt
├── base64decode.groovy
├── base64decode.txt
├── base64encode.groovy
├── base64encode.txt
├── beatsStages.groovy
├── beatsStages.txt
├── beatsWhen.groovy
├── beatsWhen.txt
├── buildKibanaDockerImage.groovy
├── buildKibanaDockerImage.txt
├── buildStatus.groovy
├── buildStatus.txt
├── bumpUtils.groovy
├── bumpUtils.txt
├── cancelPreviousRunningBuilds.groovy
├── cancelPreviousRunningBuilds.txt
├── checkGitChanges.txt
├── checkLicenses.groovy
├── checkLicenses.txt
├── checkout.groovy
├── checkout.txt
├── cmd.groovy
├── cmd.txt
├── codecov.groovy
├── codecov.txt
├── convertGoTestResults.groovy
├── convertGoTestResults.txt
├── coverageReport.groovy
├── coverageReport.txt
├── createFileFromTemplate.groovy
├── createFileFromTemplate.txt
├── detailsURL.groovy
├── detailsURL.txt
├── dockerContext.groovy
├── dockerContext.txt
├── dockerImageExists.groovy
├── dockerImageExists.txt
├── dockerLogin.groovy
├── dockerLogin.txt
├── dockerLogs.groovy
├── dockerLogs.txt
├── download.groovy
├── download.txt
├── downloadWithCurl.groovy
├── downloadWithCurl.txt
├── downloadWithWget.groovy
├── downloadWithWget.txt
├── dummy.groovy
├── dummy.txt
├── dummyDeclarativePipeline.groovy
├── dummyDeclarativePipeline.txt
├── echoColor.groovy
├── echoColor.txt
├── errorIfEmpty.groovy
├── errorIfEmpty.txt
├── fastCheckout.groovy
├── fastCheckout.txt
├── filebeat.groovy
├── filebeat.txt
├── findOldestSupportedVersion.groovy
├── findOldestSupportedVersion.txt
├── flattenMap.groovy
├── flattenMap.txt
├── generateChangelog.groovy
├── generateChangelog.txt
├── generateGoBenchmarkDiff.groovy
├── generateGoBenchmarkDiff.txt
├── generateReport.groovy
├── generateReport.txt
├── getBlueoceanDisplayURL.groovy
├── getBlueoceanDisplayURL.txt
├── getBlueoceanRestURLJob.groovy
├── getBlueoceanRestURLJob.txt
├── getBlueoceanTabURL.groovy
├── getBlueoceanTabURL.txt
├── getBranchNameFromArtifactsAPI.groovy
├── getBranchNameFromArtifactsAPI.txt
├── getBranchUnifiedRelease.groovy
├── getBranchUnifiedRelease.txt
├── getBranchesFromAliases.groovy
├── getBranchesFromAliases.txt
├── getBuildInfoJsonFiles.groovy
├── getBuildInfoJsonFiles.txt
├── getCurrentBuildTime.groovy
├── getCurrentBuildTime.txt
├── getFlakyJobName.groovy
├── getFlakyJobName.txt
├── getGitCommitSha.groovy
├── getGitCommitSha.txt
├── getGitMatchingGroup.groovy
├── getGitMatchingGroup.txt
├── getGitRepoURL.groovy
├── getGitRepoURL.txt
├── getGithubToken.groovy
├── getGithubToken.txt
├── getModulesFromCommentTrigger.groovy
├── getModulesFromCommentTrigger.txt
├── getStageId.groovy
├── getStageId.txt
├── getTestClusterSecret.groovy
├── getTestClusterSecret.txt
├── getTraditionalPageURL.groovy
├── getTraditionalPageURL.txt
├── getVaultSecret.groovy
├── getVaultSecret.txt
├── gh.groovy
├── gh.txt
├── git.groovy
├── git.txt
├── gitChangelog.groovy
├── gitChangelog.txt
├── gitCheckout.groovy
├── gitCheckout.txt
├── gitCmd.groovy
├── gitCmd.txt
├── gitCreateTag.groovy
├── gitCreateTag.txt
├── gitDeleteTag.groovy
├── gitDeleteTag.txt
├── gitPush.groovy
├── gitPush.txt
├── githubApiCall.groovy
├── githubApiCall.txt
├── githubAppToken.groovy
├── githubAppToken.txt
├── githubBranchRef.groovy
├── githubBranchRef.txt
├── githubCheck.groovy
├── githubCheck.txt
├── githubCommentIssue.groovy
├── githubCommentIssue.txt
├── githubCreateIssue.groovy
├── githubCreateIssue.txt
├── githubCreatePullRequest.groovy
├── githubCreatePullRequest.txt
├── githubEnv.groovy
├── githubEnv.txt
├── githubIssues.groovy
├── githubIssues.txt
├── githubPrCheckApproved.groovy
├── githubPrCheckApproved.txt
├── githubPrComment.groovy
├── githubPrComment.txt
├── githubPrExists.groovy
├── githubPrExists.txt
├── githubPrInfo.groovy
├── githubPrInfo.txt
├── githubPrLabels.groovy
├── githubPrLabels.txt
├── githubPrLatestComment.groovy
├── githubPrLatestComment.txt
├── githubPrReviews.groovy
├── githubPrReviews.txt
├── githubPullRequests.groovy
├── githubPullRequests.txt
├── githubReleaseCreate.groovy
├── githubReleaseCreate.txt
├── githubReleasePublish.groovy
├── githubReleasePublish.txt
├── githubRepoGetUserPermission.groovy
├── githubRepoGetUserPermission.txt
├── githubTraditionalPrComment.groovy
├── githubTraditionalPrComment.txt
├── githubWorkflowRun.groovy
├── githubWorkflowRun.txt
├── goDefaultVersion.groovy
├── goDefaultVersion.txt
├── goTestJUnit.groovy
├── goTestJUnit.txt
├── goVersion.groovy
├── goVersion.txt
├── googleStorageUploadExt.groovy
├── googleStorageUploadExt.txt
├── gsutil.groovy
├── gsutil.txt
├── hasCommentAuthorWritePermissions.groovy
├── hasCommentAuthorWritePermissions.txt
├── httpRequest.groovy
├── httpRequest.txt
├── installTools.groovy
├── installTools.txt
├── is32.groovy
├── is32.txt
├── is32arm.groovy
├── is32arm.txt
├── is32x86.groovy
├── is32x86.txt
├── is64.groovy
├── is64.txt
├── is64arm.groovy
├── is64arm.txt
├── is64x86.groovy
├── is64x86.txt
├── isArm.groovy
├── isArm.txt
├── isBeforeGo1_16.groovy
├── isBeforeGo1_16.txt
├── isBranch.groovy
├── isBranch.txt
├── isBranchIndexTrigger.groovy
├── isBranchIndexTrigger.txt
├── isBranchUnifiedReleaseAvailable.groovy
├── isBranchUnifiedReleaseAvailable.txt
├── isBuildFailure.groovy
├── isBuildFailure.txt
├── isCommentTrigger.groovy
├── isCommentTrigger.txt
├── isDarwin.groovy
├── isDarwin.txt
├── isEmpty.groovy
├── isEmpty.txt
├── isGitRegionMatch.groovy
├── isGitRegionMatch.txt
├── isInstalled.groovy
├── isInstalled.txt
├── isInternalCI.groovy
├── isInternalCI.txt
├── isMemberOf.groovy
├── isMemberOf.txt
├── isMemberOfOrg.groovy
├── isMemberOfOrg.txt
├── isPR.groovy
├── isPR.txt
├── isPluginInstalled.groovy
├── isPluginInstalled.txt
├── isStaticWorker.groovy
├── isStaticWorker.txt
├── isTag.groovy
├── isTag.txt
├── isTimerTrigger.groovy
├── isTimerTrigger.txt
├── isUpstreamTrigger.groovy
├── isUpstreamTrigger.txt
├── isUserTrigger.groovy
├── isUserTrigger.txt
├── isX86.groovy
├── isX86.txt
├── junit2otel.groovy
├── junit2otel.txt
├── junitAndStore.groovy
├── junitAndStore.txt
├── listGithubReleases.groovy
├── listGithubReleases.txt
├── log.groovy
├── log.txt
├── lookForGitHubIssues.groovy
├── lookForGitHubIssues.txt
├── matchesPrLabel.groovy
├── matchesPrLabel.txt
├── matrix.groovy
├── matrix.txt
├── metricbeat.groovy
├── metricbeat.txt
├── mvnVersion.groovy
├── mvnVersion.txt
├── nexusCloseStagingRepository.groovy
├── nexusCloseStagingRepository.txt
├── nexusCreateStagingRepository.groovy
├── nexusCreateStagingRepository.txt
├── nexusDropStagingRepository.groovy
├── nexusDropStagingRepository.txt
├── nexusFindStagingId.groovy
├── nexusFindStagingId.txt
├── nexusReleaseStagingRepository.groovy
├── nexusReleaseStagingRepository.txt
├── nexusUploadStagingArtifact.groovy
├── nexusUploadStagingArtifact.txt
├── nodeArch.groovy
├── nodeArch.txt
├── nodeJSDefaultVersion.groovy
├── nodeJSDefaultVersion.txt
├── nodeOS.groovy
├── nodeOS.txt
├── notifyBuildResult.groovy
├── notifyBuildResult.txt
├── notifyStalledBeatsBumps.groovy
├── notifyStalledBeatsBumps.txt
├── obltGitHubComments.groovy
├── obltGitHubComments.txt
├── opbeansPipeline.groovy
├── opbeansPipeline.txt
├── otelHelper.groovy
├── otelHelper.txt
├── pipelineManager.groovy
├── pipelineManager.txt
├── preCommit.groovy
├── preCommit.txt
├── preCommitToJunit.groovy
├── preCommitToJunit.txt
├── prompt.groovy
├── prompt.txt
├── publishToCDN.groovy
├── publishToCDN.txt
├── pushDockerImages.groovy
├── pushDockerImages.txt
├── randomNumber.groovy
├── randomNumber.txt
├── randomString.groovy
├── randomString.txt
├── releaseManager.groovy
├── releaseManager.txt
├── releaseManagerAnalyser.groovy
├── releaseManagerAnalyser.txt
├── releaseManagerNotification.groovy
├── releaseManagerNotification.txt
├── releaseNotification.groovy
├── releaseNotification.txt
├── retryWithSleep.groovy
├── retryWithSleep.txt
├── rubygemsLogin.groovy
├── rubygemsLogin.txt
├── runE2E.groovy
├── runE2E.txt
├── runWatcher.groovy
├── runWatcher.txt
├── runbld.groovy
├── runbld.txt
├── sendBenchmarks.groovy
├── sendBenchmarks.txt
├── sendDataToElasticsearch.groovy
├── sendDataToElasticsearch.txt
├── setEnvVar.groovy
├── setEnvVar.txt
├── setGithubCommitStatus.groovy
├── setGithubCommitStatus.txt
├── setupAPMGitEmail.groovy
├── setupAPMGitEmail.txt
├── snapshoty.groovy
├── snapshoty.txt
├── stackVersions.groovy
├── stackVersions.txt
├── stageStatusCache.groovy
├── stageStatusCache.txt
├── stashV2.groovy
├── stashV2.txt
├── superLinter.groovy
├── superLinter.txt
├── tap2Junit.groovy
├── tap2Junit.txt
├── tar.groovy
├── tar.txt
├── toJSON.groovy
├── toJSON.txt
├── unstashV2.groovy
├── unstashV2.txt
├── untar.groovy
├── untar.txt
├── updateGithubCommitStatus.groovy
├── updateGithubCommitStatus.txt
├── uploadPackagesToGoogleBucket.groovy
├── uploadPackagesToGoogleBucket.txt
├── whenFalse.groovy
├── whenFalse.txt
├── whenTrue.groovy
├── whenTrue.txt
├── withAPM.groovy
├── withAPM.txt
├── withAPMEnv.groovy
├── withAPMEnv.txt
├── withAWSEnv.groovy
├── withAWSEnv.txt
├── withAzureCredentials.groovy
├── withAzureCredentials.txt
├── withAzureEnv.groovy
├── withAzureEnv.txt
├── withCloudEnv.groovy
├── withCloudEnv.txt
├── withClusterEnv.groovy
├── withClusterEnv.txt
├── withDockerEnv.groovy
├── withDockerEnv.txt
├── withElasticsearchDeploymentEnv.groovy
├── withElasticsearchDeploymentEnv.txt
├── withEnvMask.groovy
├── withEnvMask.txt
├── withEsEnv.groovy
├── withEsEnv.txt
├── withFleetDeploymentEnv.groovy
├── withFleetDeploymentEnv.txt
├── withGCPEnv.groovy
├── withGCPEnv.txt
├── withGhEnv.groovy
├── withGhEnv.txt
├── withGitRelease.groovy
├── withGitRelease.txt
├── withGithubCheck.groovy
├── withGithubCheck.txt
├── withGithubNotify.groovy
├── withGithubNotify.txt
├── withGithubStatus.groovy
├── withGithubStatus.txt
├── withGoEnv.groovy
├── withGoEnv.txt
├── withGoEnvUnix.groovy
├── withGoEnvUnix.txt
├── withGoEnvWindows.groovy
├── withGoEnvWindows.txt
├── withHubCredentials.groovy
├── withHubCredentials.txt
├── withKibanaDeploymentEnv.groovy
├── withKibanaDeploymentEnv.txt
├── withKindEnv.groovy
├── withKindEnv.txt
├── withMageEnv.groovy
├── withMageEnv.txt
├── withNode.groovy
├── withNode.txt
├── withNodeJSEnv.groovy
├── withNodeJSEnv.txt
├── withNodeJSEnvUnix.groovy
├── withNodeJSEnvUnix.txt
├── withNpmrc.groovy
├── withNpmrc.txt
├── withOtelEnv.groovy
├── withOtelEnv.txt
├── withPackerEnv.groovy
├── withPackerEnv.txt
├── withSecretVault.groovy
├── withSecretVault.txt
├── withTerraformEnv.groovy
├── withTerraformEnv.txt
├── withTotpVault.groovy
├── withTotpVault.txt
├── withVaultToken.groovy
├── withVaultToken.txt
├── writeVaultSecret.groovy
└── writeVaultSecret.txt
/.ci/.yamlint.yml:
--------------------------------------------------------------------------------
1 | extends: default
2 |
3 | rules:
4 | # 120 chars should be enough, but don't fail if a line is longer
5 | line-length:
6 | max: 120
7 | level: warning
8 |
9 | indentation:
10 | level: warning
11 |
--------------------------------------------------------------------------------
/.ci/docker/dind-buildx/config.json:
--------------------------------------------------------------------------------
1 | { "credsStore": "env" }
2 |
--------------------------------------------------------------------------------
/.ci/docker/dind-buildx/docker-credential-env:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # Basic credential helper,
3 | # it uses the environment variables DOCKER_REGISTRY, DOCKER_USER, and DOCKER_PASSWORD to authenticate.
4 | # inspired on https://gist.github.com/jasonk/480d87b49e4c8caf51932f184ff764b2
5 |
6 | die() {
7 | echo "$@" 1>&2
8 | exit 1
9 | }
10 |
11 | if [ -z "$DOCKER_REGISTRY" ]; then die "DOCKER_REGISTRY not set in environment"; fi
12 | case "$1" in
13 | get)
14 | read -r HOST
15 | if [ "$HOST" = "$DOCKER_REGISTRY" ]; then
16 | printf '{"ServerURL":"%s","Username":"%s","Secret":"%s"}\n' "$HOST" "$DOCKER_USER" "$DOCKER_PASSWORD"
17 | else
18 | die "No credentials available for $HOST"
19 | fi
20 | ;;
21 | store)
22 | ;;
23 | erase)
24 | ;;
25 | *)
26 | die "Unsupported operation"
27 | ;;
28 | esac
29 |
--------------------------------------------------------------------------------
/.ci/docker/github-label-sync/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:15.5.0-alpine3.12
2 |
3 | RUN npm install github-label-sync@2.0.0 -g
4 | WORKDIR /app
5 | ENTRYPOINT [ "/usr/local/bin/github-label-sync" ]
6 |
--------------------------------------------------------------------------------
/.ci/docker/jenkins-agent/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker.elastic.co/infra/jenkins-swarm:alma8
2 |
3 | # Need to switch to the `root` user, as the default is `jenkins`.
4 | USER root
5 |
6 | # Install jq
7 | RUN dnf install -qy jq
8 |
9 | # Install wget
10 | RUN dnf install -qy wget
11 |
12 | # Install hub
13 | RUN wget https://github.com/github/hub/releases/download/v2.14.2/hub-linux-amd64-2.14.2.tgz -O /tmp/hub.tgz \
14 | && mkdir /hub \
15 | && tar xvf /tmp/hub.tgz -C /hub --strip-components 1 \
16 | && ln -fs /hub/bin/hub /usr/local/bin/hub \
17 | && rm /tmp/hub.tgz
18 |
19 | # Install gh
20 | RUN dnf -qy install 'dnf-command(config-manager)' \
21 | && dnf config-manager --add-repo https://cli.github.com/packages/rpm/gh-cli.repo \
22 | && dnf install -qy gh \
23 | && dnf -qy clean all
24 |
25 | # Switch back to the `jenkins` user before
26 | USER jenkins
27 |
--------------------------------------------------------------------------------
/.ci/docker/shellcheck/Dockerfile:
--------------------------------------------------------------------------------
1 | # Resulting Alpine image
2 | FROM alpine:3.10.1
3 | COPY --from=koalaman/shellcheck:v0.9.0@sha256:a527e2077f11f28c1c1ad1dc784b5bc966baeb3e34ef304a0ffa72699b01ad9c /bin/shellcheck /bin
4 | RUN /bin/shellcheck -V
5 | WORKDIR /mnt
6 |
7 | ENTRYPOINT ["/bin/shellcheck"]
8 | CMD ["-V"]
9 |
--------------------------------------------------------------------------------
/.ci/docker/tests/test_helpers.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bats
2 | # shellcheck shell=bash
3 |
4 | # check dependencies
5 | (
6 | type docker &>/dev/null || ( echo "docker is not available"; exit 1 )
7 | type curl &>/dev/null || ( echo "curl is not available"; exit 1 )
8 | )>&2
9 |
10 | function cleanup {
11 | docker kill "$1" &>/dev/null ||:
12 | docker rm -fv "$1" &>/dev/null ||:
13 | }
14 |
--------------------------------------------------------------------------------
/.ci/docker/tests/tests_without_run.bats:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bats
2 |
3 | load 'test_helper/bats-support/load'
4 | load 'test_helper/bats-assert/load'
5 | load test_helpers
6 |
7 | IMAGE="docker.elastic.co/observability-ci/${DOCKERFILE//\//-}"
8 | CONTAINER="${DOCKERFILE//\//-}"
9 |
10 | @test "${DOCKERFILE} - build image" {
11 | cd $BATS_TEST_DIRNAME/..
12 | # Simplify the makefile as it does fail with '/bin/sh: 1: Bad substitution' in the CI
13 | if [ ! -e ${DOCKERFILE} ] ; then
14 | DOCKERFILE="${DOCKERFILE//-//}"
15 | fi
16 | run docker build --rm -t ${IMAGE} ${DOCKERFILE}
17 | assert_success
18 | }
19 |
20 | @test "${DOCKERFILE} - clean test containers" {
21 | cleanup $CONTAINER
22 | }
23 |
--------------------------------------------------------------------------------
/.ci/docker/vmware-mock/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.17.7-alpine3.15
2 |
3 | # download and compile vcsim and govc
4 | # govc for govc find -l debug purposes
5 | RUN go get -u github.com/vmware/govmomi/vcsim@v0.27.4 \
6 | && go get -u github.com/vmware/govmomi/govc@v0.27.4
7 |
8 | # default exposed port is 443
9 | EXPOSE 443
10 |
11 | # run start command
12 | ENTRYPOINT ["vcsim"]
13 | CMD ["-l", "0.0.0.0:443"]
14 |
--------------------------------------------------------------------------------
/.ci/docker/yamllint/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7.4-alpine3.10
2 |
3 | WORKDIR /yaml
4 |
5 | RUN pip install yamllint==1.16.0 && \
6 | rm -rf ~/.cache/pip
7 |
8 | ENTRYPOINT ["yamllint"]
9 | CMD ["--version"]
10 |
--------------------------------------------------------------------------------
/.ci/jobs/apm-shared.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - job:
3 | name: apm-shared
4 | description: apm-shared
5 | project-type: folder
6 |
--------------------------------------------------------------------------------
/.ci/jobs/defaults.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | ##### GLOBAL METADATA
4 |
5 | - meta:
6 | cluster: apm-ci
7 |
8 | ##### JOB DEFAULTS
9 |
10 | - job:
11 | logrotate:
12 | numToKeep: 20
13 | node: linux
14 | concurrent: true
15 | publishers:
16 | - email:
17 | recipients: infra-root+build@elastic.co
18 | # Webhook based rather than polling otherwise the GitHub API quota
19 | # will be overkilled. For such, periodic-folder-trigger is not needed
20 | # anymore, so we keep the comment below for clarity.
21 | # periodic-folder-trigger: 1w
22 | prune-dead-branches: true
23 |
--------------------------------------------------------------------------------
/.ci/scripts/codecov:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 |
4 | IMAGE="docker.elastic.co/observability-ci/codecov"
5 | docker pull "${IMAGE}" > /dev/null || true
6 |
7 | docker run --rm -t -v "$(pwd):/app" -u "$(id -u):$(id -g)" "${IMAGE}" "$@"
8 |
--------------------------------------------------------------------------------
/.ci/scripts/markdown-lint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 | IMAGE="ghcr.io/tcort/markdown-link-check:3.8.5"
4 | docker pull "${IMAGE}" > /dev/null || true
5 |
6 | for f in **/*.md
7 | do
8 | docker run --rm -t -v "${PWD}:/markdown:ro" -u "$(id -u):$(id -g)" "${IMAGE}" "--progress" "/markdown/${f}"
9 | done
10 |
--------------------------------------------------------------------------------
/.ci/scripts/shellcheck:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 | IMAGE="docker.elastic.co/observability-ci/shellcheck"
4 | docker pull "${IMAGE}" > /dev/null || true
5 | docker run --rm -t -v "$PWD:/mnt" -u "$(id -u):$(id -g)" "${IMAGE}" "$@"
6 |
--------------------------------------------------------------------------------
/.ci/scripts/validate-docker-images-config-yaml.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eo pipefail
3 |
4 | docker run --rm -t -v "${PWD}:/mnt:ro" -w /mnt node:19-alpine npx -y ajv-cli@5.0.0 test -s .ci/.docker-images.schema.json -d .ci/.docker-images.yml --valid --verbose
5 |
--------------------------------------------------------------------------------
/.ci/scripts/validate.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eo pipefail
3 |
4 | if [ -z "${JENKINS_URL}" ] ; then
5 | JENKINS_URL=https://beats-ci.elastic.co
6 | else
7 | # See https://jenkins.io/doc/book/pipeline/development/#linter
8 | JENKINS_URL=${JENKINS_URL%%/} ## Remove last / if it's defined
9 | JENKINS_CRUMB=$(curl --silent "${JENKINS_URL}/crumbIssuer/api/xml?xpath=concat(//crumbRequestField,\":\",//crumb)")
10 | fi
11 |
12 | ## Validate whether the URL is reachable before running anything else
13 | curl --silent ${JENKINS_URL}/ > /dev/null
14 |
15 | ## Iterate for each file without failing fast.
16 | set +e
17 | for file in "$@"; do
18 | if curl --silent -X POST -H "${JENKINS_CRUMB}" -F "jenkinsfile=<${file}" ${JENKINS_URL}/pipeline-model-converter/validate | grep -i -v successfully ; then
19 | echo "ERROR: jenkinslint failed for the file '${file}'"
20 | exit_status=1
21 | fi
22 | done
23 |
24 | exit $exit_status
25 |
--------------------------------------------------------------------------------
/.ci/scripts/yamllint:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e
3 | IMAGE="docker.elastic.co/observability-ci/yamllint"
4 | docker pull "${IMAGE}" > /dev/null || true
5 | docker run --rm -t -v "$PWD:/yaml" -u "$(id -u):$(id -g)" "${IMAGE}" --config-file .ci/.yamlint.yml "$@"
6 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # editorconfig.org
2 | root = true
3 |
4 | [*]
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | charset = utf-8
9 | trim_trailing_whitespace = true
10 | insert_final_newline = true
11 |
12 | [*.asciidoc]
13 | trim_trailing_whitespace = false
14 |
15 | [Jenkinsfile]
16 | indent_style = space
17 | indent_size = 2
18 | end_of_line = lf
19 | charset = utf-8
20 | trim_trailing_whitespace = true
21 | insert_final_newline = true
22 |
23 | [*.groovy]
24 | indent_style = space
25 | indent_size = 2
26 | end_of_line = lf
27 | charset = utf-8
28 | trim_trailing_whitespace = true
29 | insert_final_newline = true
30 |
31 | [*.dsl]
32 | indent_style = space
33 | indent_size = 2
34 | end_of_line = lf
35 | charset = utf-8
36 | trim_trailing_whitespace = true
37 | insert_final_newline = true
38 |
39 | [{Makefile,**.mk}]
40 | # Use tabs for indentation (Makefiles require tabs)
41 | indent_style = tab
42 |
--------------------------------------------------------------------------------
/.gcloudignore:
--------------------------------------------------------------------------------
1 | # Ignore generated credentials from google-github-actions/auth
2 | gha-creds-*.json
3 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # GitHub CODEOWNERS definition
2 | # See: https://help.github.com/articles/about-codeowners/
3 |
4 | # APM agents
5 | /resources/approval-list/apm-agent-*.yml @graphaelli
6 |
7 | # APM server
8 | /resources/approval-list/apm-server.yml @axw
9 |
10 | # Beats
11 | /resources/approval-list/beats.yml @andresrc
12 |
13 | # pytest_otel
14 | resources/scripts/pytest_otel @kuisathaverat
15 |
16 | # Auto assign reviews
17 | * @elastic/observablt-ci
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## What does this PR do?
2 |
3 |
6 |
7 | ## Why is it important?
8 |
9 |
12 |
13 | ## Related issues
14 | Closes #ISSUE
15 |
--------------------------------------------------------------------------------
/.github/actions/buildkite/cancel.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # Cancel the running build
4 | #
5 | set -euo pipefail
6 |
7 | MSG="parameter missing."
8 | ORG=${1:?$MSG}
9 | PIPELINE=${2:?$MSG}
10 | NUMBER=${3:?$MSG}
11 | BK_TOKEN=${4:?$MSG}
12 |
13 | curl \
14 | --no-progress-meter \
15 | -H "Authorization: Bearer $BK_TOKEN" \
16 | -X "PUT" \
17 | "https://api.buildkite.com/v2/organizations/$ORG/pipelines/$PIPELINE/builds/$NUMBER/cancel"
18 |
--------------------------------------------------------------------------------
/.github/actions/kibana-docker-image/setup-vars.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | docker_image="kibana-cloud"
6 | if [[ "${SERVERLESS}" == "true" ]]; then
7 | docker_image="kibana-serverless"
8 | fi
9 |
10 | kibana_commit_sha=$(git rev-parse HEAD)
11 | kibana_stack_version="$(jq -r .version package.json)-SNAPSHOT"
12 | docker_tag="${kibana_stack_version}-${kibana_commit_sha}"
13 | docker_reference="${DOCKER_REGISTRY}/${DOCKER_NAMESPACE}/${docker_image}:${docker_tag}"
14 | {
15 | echo "kibana-stack-version=${kibana_stack_version}"
16 | echo "kibana-commit-sha=${kibana_commit_sha}"
17 | echo "docker-registry=${DOCKER_REGISTRY}"
18 | echo "docker-namespace=${DOCKER_NAMESPACE}"
19 | echo "docker-image=${docker_image}"
20 | echo "docker-tag=${docker_tag}"
21 | echo "docker-reference=${docker_reference}"
22 | } >> "${GITHUB_OUTPUT}"
23 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/.eslintignore:
--------------------------------------------------------------------------------
1 | dist/
2 | lib/
3 | node_modules/
4 | jest.config.js
5 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/.gitattributes:
--------------------------------------------------------------------------------
1 | dist/** -diff linguist-generated=true
2 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/.node-version:
--------------------------------------------------------------------------------
1 | 18.17.0
2 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/.prettierignore:
--------------------------------------------------------------------------------
1 | dist/
2 | lib/
3 | node_modules/
4 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 120,
3 | "tabWidth": 2,
4 | "useTabs": false,
5 | "semi": false,
6 | "singleQuote": true,
7 | "trailingComma": "none",
8 | "bracketSpacing": false,
9 | "arrowParens": "avoid"
10 | }
11 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest',
3 | testEnvironment: 'node'
4 | }
5 |
--------------------------------------------------------------------------------
/.github/actions/notify-build-status/src/index.ts:
--------------------------------------------------------------------------------
1 | import {run} from './main'
2 |
3 | // noinspection JSIgnoredPromiseFromCall
4 | run()
5 |
--------------------------------------------------------------------------------
/.github/actions/publish-report/example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/.github/actions/publish-report/example.png
--------------------------------------------------------------------------------
/.github/actions/snapshoty/.node-version:
--------------------------------------------------------------------------------
1 | 20
2 |
--------------------------------------------------------------------------------
/.github/actions/snapshoty/action.yml:
--------------------------------------------------------------------------------
1 | name: 'Snapshoty'
2 | description: 'The best way to handle snapshot lifecycle.'
3 | inputs:
4 | config:
5 | description: 'Path to configuration file'
6 | required: true
7 | bucketName:
8 | description: 'Name of the bucket to use'
9 | required: true
10 | gcsClientEmail:
11 | description: 'Google Cloud email of the service account'
12 | required: true
13 | gcsPrivateKey:
14 | description: 'Google Cloud private key of the service account'
15 | required: true
16 | gcsPrivateKeyId:
17 | description: 'Google Cloud private key id of the service account'
18 | required: true
19 | gcsProject:
20 | description: 'Google Cloud project id of the service account'
21 | required: true
22 | runs:
23 | using: 'node20'
24 | main: 'dist/index.js'
25 |
--------------------------------------------------------------------------------
/.github/actions/snapshoty/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "snapshoty",
3 | "version": "1.0.0",
4 | "description": "Snapshoty Github Action",
5 | "main": "index.js",
6 | "scripts": {
7 | "lint": "eslint .",
8 | "prepare": "ncc build index.js -m -o dist --no-source-map-register --license licenses.txt",
9 | "test": "jest",
10 | "all": "npm run lint && npm run prepare && npm run test"
11 | },
12 | "keywords": [
13 | "GitHub",
14 | "Actions",
15 | "JavaScript"
16 | ],
17 | "author": "",
18 | "license": "Apache-2.0",
19 | "dependencies": {
20 | "@actions/core": "^1.10.1",
21 | "@actions/exec": "^1.1.1"
22 | },
23 | "devDependencies": {
24 | "@vercel/ncc": "^0.38.1",
25 | "eslint": "^8.29.0",
26 | "jest": "^29.3.1"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/.github/actions/validate-github-comment/README.md:
--------------------------------------------------------------------------------
1 | ## About
2 |
3 | GitHub Action to validate whether the GitHub comment was triggered by a user with write permissions. Otherwise, it will report a message.
4 |
5 | * [Usage](#usage)
6 | * [Configuration](#configuration)
7 |
8 | ## Usage
9 |
10 | ### Configuration
11 |
12 | Given the CI GitHub action:
13 |
14 | ```yaml
15 | ---
16 | name: Is GitHub comment allowed
17 | on:
18 | issue_comment:
19 | types: [created]
20 | jobs:
21 | run-action-if-comment:
22 | if: github.event.issue.pull_request && startsWith(github.event.comment.body, '/run-test')
23 | runs-on: ubuntu-latest
24 | steps:
25 | - uses: elastic/apm-pipeline-library/.github/actions/validate-github-comment@current
26 | ...
27 | ```
28 |
--------------------------------------------------------------------------------
/.github/paths-labeller.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - "build-steps":
3 | - resources/*"
4 | - vars/*"
5 | - "ci":
6 | - ".ci/*"
7 | - "groovy":
8 | - vars/*"
9 | - "**/*.groovy"
10 | - "tests":
11 | - "src/test/*"
12 | - "automation":
13 | - ".ci/*"
14 | - ".github/*"
15 | - ".mvn/*"
16 | - "local/*"
17 | - "main":
18 | - "local/Dockerfile"
19 | - "local/docker-compose.yml"
20 | - "local/Makefile"
21 | - "local/configs/*"
22 | - "linux":
23 | - "local/workers/linux/*"
24 | - "mac":
25 | - "local/workers/macosx/*"
26 | - "windows":
27 | - "local/workers/windows/*"
28 |
--------------------------------------------------------------------------------
/.github/workflows/build-test-no-jenkins.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # This workflow sets the test / build check to success in case it's a no Jenkins related PR and build-test.yml is not triggered
3 | # https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks
4 | name: build-test # The name must be the same as in build-test.yml
5 |
6 | on:
7 | pull_request:
8 | paths-ignore: # This expression needs to match the paths ignored on build-test.yml.
9 | - '**'
10 | - '!resources/**'
11 | - '!src/**'
12 | - '!vars/**'
13 | - '!.github/workflows/build-test.yml'
14 | - '!.mvn/*'
15 | - '!pom.xml'
16 | - '!mvnw'
17 |
18 | permissions:
19 | contents: read
20 |
21 | jobs:
22 | build:
23 | runs-on: ubuntu-latest
24 | steps:
25 | - run: 'echo "No build required"'
26 |
--------------------------------------------------------------------------------
/.github/workflows/licenses.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: licenses
3 |
4 | on:
5 | push:
6 | branches: [main]
7 | pull_request:
8 |
9 | # limit the access of the generated GITHUB_TOKEN
10 | permissions:
11 | contents: read
12 |
13 | jobs:
14 | licenses:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 | - uses: actions/setup-go@v5
19 | - name: License
20 | run: |
21 | go install github.com/elastic/go-licenser@latest
22 | ls -ltra "$(go env GOPATH)"/bin
23 | "$(go env GOPATH)"/bin/go-licenser -d -ext .groovy
24 |
--------------------------------------------------------------------------------
/.github/workflows/pre-commit.yml:
--------------------------------------------------------------------------------
1 | name: pre-commit
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [main]
7 |
8 | permissions:
9 | contents: read
10 |
11 | # Those checks require access to the internal docker registry.
12 | # As we are migrating to GH then we don't need to lint those files.
13 | env:
14 | SKIP: 'check-jenkins-pipelines,check-jjbb'
15 |
16 | jobs:
17 | pre-commit:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: elastic/apm-pipeline-library/.github/actions/pre-commit@main
21 |
--------------------------------------------------------------------------------
/.github/workflows/test-elastic-stack-snapshot-branches.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: test-elastic-stack-snapshot-branches
3 |
4 | on:
5 | workflow_dispatch:
6 |
7 | permissions:
8 | contents: read
9 |
10 | jobs:
11 | test:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - id: generator
15 | uses: elastic/apm-pipeline-library/.github/actions/elastic-stack-snapshot-branches@main
16 |
17 | test-with-exclude-branches:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - id: generator
21 | uses: elastic/apm-pipeline-library/.github/actions/elastic-stack-snapshot-branches@main
22 | with:
23 | exclude-branches: '7.17,main'
24 |
--------------------------------------------------------------------------------
/.github/workflows/test-oblt-cli.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: test-oblt-cli
3 |
4 | on:
5 | workflow_dispatch:
6 |
7 | permissions:
8 | contents: read
9 |
10 | jobs:
11 |
12 | run-oblt-cli:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v4
16 |
17 | - name: Setup Git
18 | uses: elastic/apm-pipeline-library/.github/actions/setup-git@main
19 |
20 | - uses: elastic/apm-pipeline-library/.github/actions/github-token@main
21 | with:
22 | url: ${{ secrets.VAULT_ADDR }}
23 | roleId: ${{ secrets.VAULT_ROLE_ID }}
24 | secretId: ${{ secrets.VAULT_SECRET_ID }}
25 |
26 | - uses: elastic/apm-pipeline-library/.github/actions/oblt-cli@main
27 | with:
28 | token: ${{ env.GITHUB_TOKEN }}
29 | command: 'cluster create ccs --remote-cluster=dev-oblt --cluster-name-prefix testgithubaction'
30 |
--------------------------------------------------------------------------------
/.github/workflows/test-reporter.yml:
--------------------------------------------------------------------------------
1 | ---
2 | ## Workflow to process the JUnit test results and add a report to the checks.
3 | name: Test Report
4 |
5 | on:
6 | workflow_run:
7 | workflows: [build-test, job-dsl, pytest_otel-build-test]
8 | types: [completed]
9 |
10 | permissions:
11 | contents: read
12 |
13 | jobs:
14 | report:
15 | runs-on: ubuntu-latest
16 | permissions:
17 | statuses: write
18 | checks: write
19 | contents: write
20 | pull-requests: write
21 | actions: write
22 | steps:
23 | - uses: elastic/apm-pipeline-library/.github/actions/test-report@main
24 | with:
25 | artifact: test-results # artifact name
26 | name: JUnit Tests # Name of the check run which will be created
27 | path: "**/*.xml" # Path to test results (inside artifact .zip)
28 | reporter: java-junit # Format of test results
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .gradle
3 | build
4 | *.iml
5 | release.properties
6 | target
7 | jenkins-cli.jar
8 |
9 | # Mac filesystem management files
10 | **/.DS_Store
11 |
12 | # For the BATS testing
13 | bats-core/
14 | target/
15 | .ci/docker/tests/test_helper/
16 |
17 | # For the vagrant
18 | .vagrant
19 |
20 | mail*.html
21 | metadata.txt
22 |
23 | out.html
24 | .vscode
25 |
26 | venv
27 |
28 | agent.jar
29 | local.pid
30 | local.log
31 | .classpath
32 | .project
33 | .settings/
34 | .venv
35 |
36 | latest-versions.json
37 |
38 | ## Wiremock
39 | wiremock-standalone.jar
40 |
41 | ## For the bump automation
42 | *.bck
43 | **/__pycache__
44 |
45 | # For github actions
46 | node_modules/
47 |
48 | snapshots/
49 | releases/
50 |
51 | # Ignore generated credentials from google-github-actions/auth
52 | gha-creds-*.json
53 |
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.1/apache-maven-3.8.1-bin.zip
2 | wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
3 |
--------------------------------------------------------------------------------
/catalog-info.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/catalog-info.yaml
--------------------------------------------------------------------------------
/change-on-master:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/change-on-master
--------------------------------------------------------------------------------
/docs/GUIDELINES.md:
--------------------------------------------------------------------------------
1 | # Guidance on coding patterns
2 |
3 | ## EditorConfig
4 |
5 | To ensure a common file format, there is a `.editorConfig` file [in place](../.editorconfig). To respect this file, [check](http://editorconfig.org/#download) if your editor does support it natively or you need to download a plugin.
6 |
7 | ### Commit Message Style
8 |
9 | Write [meaningful commit messages](http://who-t.blogspot.de/2009/12/on-commit-messages.html) and [adhere to standard formatting](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
10 |
11 | Good commit messages speed up the review process and help to keep this project maintainable in the long term.
12 |
13 | ## Code Style
14 |
15 | ### GitHub
16 |
17 | :innocent: **WiP**
18 |
19 | ### Jenkins
20 |
21 | See [coding guidelines of this project][apm-pipeline-library-guidelines]
22 |
23 | [apm-pipeline-library-guidelines]: JENKINS_GUIDELINES.md
24 |
--------------------------------------------------------------------------------
/docs/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Release a version
2 |
3 | Every time there are enough changes, we would release a new version. A version
4 | has a name like v[:number:].[:number:].[:number:] see [Semantic Versioning](https://semver.org/).
5 |
6 | ## Automated release process :rocket: (preferred)
7 |
8 | Follow the below steps:
9 |
10 | * Make sure your PRs contain the proper Github labels to group them under the proper changelog section, as defined in [Release-Drafter's configuration file](../.github/release-drafter.yml).
11 | * Navigate to the [GitHub job](https://github.com/elastic/apm-pipeline-library/actions/workflows/release.yml)
12 | * Choose `Run workflow` and what type of release.
13 | * Click `Run workflow` and wait for a few minutes to complete
14 |
--------------------------------------------------------------------------------
/local/.gitignore:
--------------------------------------------------------------------------------
1 | __files
2 | mappings
3 |
--------------------------------------------------------------------------------
/local/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker.elastic.co/infra/jenkins:202205181458.f1daa9ac6ec5
2 |
3 |
4 | COPY configs/plugins.txt /usr/share/jenkins/ref/plugins.txt
5 | RUN jenkins-plugin-cli -f /usr/share/jenkins/ref/plugins.txt
6 |
--------------------------------------------------------------------------------
/local/configs/plugins.txt:
--------------------------------------------------------------------------------
1 | disable-github-multibranch-status
2 | filesystem_scm
3 | google-compute-engine
4 | job-dsl
5 | metrics
6 | monitoring
7 | opentelemetry
8 | plot
9 | ssh-agent
10 |
11 | blueocean
12 | blueocean-jira
13 | configuration-as-code
14 | git
15 | git-client
16 | github-branch-source
17 | pipeline-model-definition
18 | workflow-cps-global-lib
19 |
--------------------------------------------------------------------------------
/local/jenkins_jobs.ini:
--------------------------------------------------------------------------------
1 | [job_builder]
2 | allow_duplicates=True
3 |
4 | [jenkins]
5 | url=http://jenkins:8080
6 |
--------------------------------------------------------------------------------
/local/workers/windows/setup.ps1:
--------------------------------------------------------------------------------
1 | Write-Host "Installing chocolatey..."
2 | Set-ExecutionPolicy Bypass -Scope Process -Force; iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
3 |
4 | Write-Host "Installing java..."
5 | & choco install adoptopenjdk8 -y --no-progress
6 |
7 | Write-Host "Installing docker-desktop..."
8 | & choco install docker-desktop -y --no-progress
9 |
10 | Write-Host "Downloading swarm..."
11 | [Net.ServicePointManager]::SecurityProtocol = "tls12"
12 | (New-Object Net.WebClient).DownloadFile('https://repo.jenkins-ci.org/releases/org/jenkins-ci/plugins/swarm-client/3.9/swarm-client-3.9.jar', 'swarm-client.jar')
13 | Copy-Item -Path swarm-client.jar -Destination c:\\Users\\vagrant
14 |
--------------------------------------------------------------------------------
/local/workers/windows/swarm.bat:
--------------------------------------------------------------------------------
1 | set "str=%COMPUTERNAME%"
2 | for /f "usebackq delims=" %%I in (`powershell "\"%str%\".toLower()"`) do set "lower=%%~I"
3 | java -jar c:\\Users\\vagrant\\swarm-client.jar -labels "windows %lower% %lower%-immutable windows-immutable" -master http://10.0.2.2:18080 -fsroot c:\\jenkins -deleteExistingClients
4 |
--------------------------------------------------------------------------------
/local/workers/windows/windows-2016/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 |
4 | Vagrant.configure("2") do |config|
5 | config.vm.box = "elastic/windows-2016-x86_64"
6 |
7 | config.vm.network "private_network", ip: "192.168.33.10"
8 | config.vm.hostname = "windows-2016"
9 |
10 | config.vm.provider "virtualbox" do |vb|
11 | vb.name = "windows-2016-apm-pipeline-library"
12 | vb.gui = false
13 | vb.memory = "6144"
14 | vb.cpus = 2
15 | end
16 |
17 | config.vm.provision "shell", privileged: "false", powershell_elevated_interactive: "true", path: "../setup.ps1"
18 | config.vm.provision "shell", privileged: "false", powershell_elevated_interactive: "true", path: "../swarm.bat"
19 | end
20 |
--------------------------------------------------------------------------------
/local/workers/windows/windows-2019/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 |
4 | Vagrant.configure("2") do |config|
5 | config.vm.box = "StefanScherer/windows_2019"
6 |
7 | config.vm.network "private_network", ip: "192.168.33.20"
8 | config.vm.hostname = "windows-2019"
9 |
10 | config.vm.provider "virtualbox" do |vb|
11 | vb.name = "windows-2019-apm-pipeline-library"
12 | vb.gui = false
13 | vb.memory = "6144"
14 | vb.cpus = 2
15 | end
16 |
17 | ## https://github.com/hashicorp/vagrant/issues/9138
18 | config.winrm.username = 'vagrant\vagrant'
19 | ## https://github.com/hashicorp/vagrant/issues/6430
20 | config.winrm.retry_limit = 30
21 | config.winrm.retry_delay = 10
22 | config.vm.provision "shell", privileged: "false", powershell_elevated_interactive: "true", path: "../setup.ps1"
23 | config.vm.provision "shell", privileged: "false", powershell_elevated_interactive: "false", path: "../swarm.bat"
24 | end
25 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-dotnet.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-go.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-java.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-nodejs.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-php.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-python.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-ruby.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-rum-js.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/apm-agent-server.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/approval-list/beats.yml:
--------------------------------------------------------------------------------
1 | # A list of trusted GitHub users that can build a job in the CI when
2 | # they submit a pull request
3 | # The name of the YAML file should match the name of the GitHub repo.
4 | USERS:
5 | - v1v
6 |
--------------------------------------------------------------------------------
/resources/co/elastic/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/resources/co/elastic/README.md
--------------------------------------------------------------------------------
/resources/commands-github-comment-markdown.template:
--------------------------------------------------------------------------------
1 | <%if (githubCommands?.size() > 0) {%>
2 | ## :robot: GitHub comments
3 |
4 | Expand to view the GitHub comments
5 |
6 |
7 | To re-run your PR in the CI, just comment with:
8 |
9 | <% githubCommands?.each { githubCommand, description -> %>
10 | <% githubFormattedCommand = githubCommand.contains('`') ? "${githubCommand}" : "`${githubCommand}`" %>
11 | - ${githubFormattedCommand} : ${description}
12 | <%}%>
13 |
14 | <%}%>
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/resources/docs/index.asciidoc:
--------------------------------------------------------------------------------
1 | :branch: current
2 | :server-branch: 6.5
3 | include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
4 |
5 | = Test Docs
6 |
7 | include::./test.asciidoc[Test Document]
8 |
--------------------------------------------------------------------------------
/resources/docs/test.asciidoc:
--------------------------------------------------------------------------------
1 | [[title]]
2 |
3 | == Title
4 |
5 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed mattis molestie tortor sed efficitur. In euismod imperdiet enim nec imperdiet. Maecenas id ex at tortor ultricies molestie. Nulla facilisi. Curabitur eget turpis ut nibh porttitor hendrerit. Suspendisse ullamcorper, sapien quis feugiat auctor, orci lorem posuere sapien, nec suscipit ipsum leo sit amet lacus. Nunc in odio ultrices, aliquet purus non, laoreet orci. Quisque semper tellus sed lacus congue sagittis. Ut sed dignissim turpis. Cras ut faucibus ante, in laoreet nunc. Etiam non risus finibus, imperdiet nulla in, commodo enim. Donec lobortis aliquam vulputate. Proin at fermentum mi, quis tristique ipsum. Mauris ultricies lectus various molestie dictum. Proin libero urna, condimentum ultricies nulla eget, mollis dapibus neque. Praesent dignissim porttitor augue commodo condimentum.
6 |
--------------------------------------------------------------------------------
/resources/flaky-github-issue.template:
--------------------------------------------------------------------------------
1 | ## Flaky Test
2 |
3 | * **Test Name:** `${testName}`
4 | * **Artifact Link:** ${jobUrl}
5 | * **PR:** ${PR?.trim() ?: 'None'}
6 | * **Commit:** ${commit?.trim() ?: 'None'}
7 |
8 | <% if (testData?.errorDetails?.trim()) {%>
9 | ### Error details
10 | ```
11 | ${testData.errorDetails}
12 | ```
13 | <% }%>
14 | <% if (testData?.errorStackTrace?.trim()) {%>
15 | ### Stack Trace
16 | ```
17 | ${testData.errorStackTrace}
18 | ```
19 | <% }%>
20 |
--------------------------------------------------------------------------------
/resources/pods/golang.yml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Pod
3 | spec:
4 | securityContext:
5 | runAsUser: 1000 # default UID of jenkins user in agent image
6 | containers:
7 | - name: golang
8 | image: docker.elastic.co/beats-dev/golang-crossbuild:1.17.5-main-debian10
9 | command:
10 | - sleep
11 | args:
12 | - infinity
13 | resources:
14 | limits:
15 | cpu: 2
16 | memory: 4Gi
17 | requests:
18 | cpu: 1
19 | memory: 4Gi
20 |
--------------------------------------------------------------------------------
/resources/pods/python.yml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Pod
3 | spec:
4 | securityContext:
5 | runAsUser: 1000 # default UID of jenkins user in agent image
6 | containers:
7 | - name: python
8 | image: python:3.9
9 | command:
10 | - sleep
11 | args:
12 | - infinity
13 | resources:
14 | limits:
15 | cpu: 2
16 | memory: 4Gi
17 | requests:
18 | cpu: 1
19 | memory: 4Gi
20 |
--------------------------------------------------------------------------------
/resources/reviewApproved.json:
--------------------------------------------------------------------------------
1 | [{
2 | "id": 186086539,
3 | "node_id": "MDE3OlB1bGxSZXF1ZXN0UmV2aWV3MTg2MDg2NTM5",
4 | "user": {
5 | "login": "githubusername",
6 | "type": "User",
7 | "site_admin": false
8 | },
9 | "body": "",
10 | "state": "APPROVED",
11 | "pull_request_url": "https://api.github.com/repos/org/repo/pulls/1",
12 | "author_association": "MEMBER",
13 | "submitted_at": "2018-12-18T14:13:16Z",
14 | "commit_id": "4457d4e98f91501bb7914cbb29e440a857972fee"
15 | }]
16 |
--------------------------------------------------------------------------------
/resources/runbld/log.json:
--------------------------------------------------------------------------------
1 | {
2 | "_index": "log-1542249257015",
3 | "_type": "t",
4 | "_id": "rF5RGGcBZI9dC_t5tN24",
5 | "_version": 1,
6 | "_score": null,
7 | "_source": {
8 | "time": "2018-11-15T17:09:47.090Z",
9 | "stream": "stderr",
10 | "log": "+ echo 'Cleanup complete.'",
11 | "size": 27,
12 | "ord": {
13 | "total": 113,
14 | "stream": 49
15 | },
16 | "build-id": "20181115170808-220B2A0D"
17 | },
18 | "fields": {
19 | "time": [
20 | "2018-11-15T17:09:47.090Z"
21 | ]
22 | },
23 | "sort": [
24 | 1542301787090
25 | ]
26 | }
27 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/.editorconfig:
--------------------------------------------------------------------------------
1 | [*]
2 | charset = utf-8
3 | end_of_line = lf
4 | indent_size = 4
5 | indent_style = space
6 | insert_final_newline = true
7 | max_line_length = 120
8 | tab_width = 4
9 |
10 | [Makefile]
11 | indent_style = tab
12 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | .coverage
3 | .idea
4 | .pytest_cache
5 | .venv
6 | *.pyc
7 | **/*.egg-info
8 | **/build
9 | **/dist
10 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/apm/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed to Elasticsearch B.V. under one or more contributor
2 | # license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright
4 | # ownership. Elasticsearch B.V. licenses this file to you under
5 | # the Apache License, Version 2.0 (the "License"); you may
6 | # not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http:www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing,
12 | # software distributed under the License is distributed on an
13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 | # KIND, either express or implied. See the License for the
15 | # specific language governing permissions and limitations
16 | # under the License.
17 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | junit_family=xunit2
3 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/requirements.txt:
--------------------------------------------------------------------------------
1 | ConfigArgParse
2 | coverage
3 | elastic-apm
4 | psutil
5 | pytest
6 | pytest-bdd
7 | pytest-mock
8 |
--------------------------------------------------------------------------------
/resources/scripts/apm-cli/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed to Elasticsearch B.V. under one or more contributor
2 | # license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright
4 | # ownership. Elasticsearch B.V. licenses this file to you under
5 | # the Apache License, Version 2.0 (the "License"); you may
6 | # not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http:www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing,
12 | # software distributed under the License is distributed on an
13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 | # KIND, either express or implied. See the License for the
15 | # specific language governing permissions and limitations
16 | # under the License.
17 |
--------------------------------------------------------------------------------
/resources/scripts/beats/filebeat.yml:
--------------------------------------------------------------------------------
1 | ---
2 | filebeat.autodiscover:
3 | providers:
4 | - type: docker
5 | templates:
6 | - config:
7 | - type: container
8 | paths:
9 | - /var/lib/docker/containers/${data.docker.container.id}/*.log
10 | processors:
11 | - add_host_metadata: ~
12 | - add_cloud_metadata: ~
13 | - add_docker_metadata: ~
14 | - add_kubernetes_metadata: ~
15 | output.file:
16 | path: "/output"
17 | filename: ${OUTPUT_FILE}
18 | permissions: 0644
19 | codec.format:
20 | string: '[%{[container.name]}][%{[container.image.name]}][%{[container.id]}][%{[@timestamp]}] %{[message]}'
21 |
--------------------------------------------------------------------------------
/resources/scripts/beats/metricbeat-logs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | metricbeat.modules:
3 | - module: docker
4 | metricsets:
5 | - "container"
6 | - "cpu"
7 | - "diskio"
8 | - "event"
9 | - "healthcheck"
10 | - "info"
11 | - "memory"
12 | - "network"
13 | hosts: ["unix:///var/run/docker.sock"]
14 | period: 10s
15 | enabled: true
16 |
17 |
18 | processors:
19 | - add_host_metadata: ~
20 | - add_cloud_metadata: ~
21 | - add_docker_metadata: ~
22 | - add_kubernetes_metadata: ~
23 |
24 | output.file:
25 | path: "/output"
26 | filename: ${OUTPUT_FILE}
27 | permissions: 0644
28 |
--------------------------------------------------------------------------------
/resources/scripts/beats/run_filebeat.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eu
3 | echo "OUTPUT_DIR=${OUTPUT_DIR}"
4 | echo "OUTPUT_FILE=${OUTPUT_FILE}"
5 | echo "CONFIG_PATH=${CONFIG_PATH}"
6 | echo "DOCKER_IMAGE=${DOCKER_IMAGE}"
7 |
8 | docker run \
9 | --detach \
10 | -v "${OUTPUT_DIR}:/output" \
11 | -v "${CONFIG_PATH}:/usr/share/filebeat/filebeat.yml" \
12 | -u 0:0 \
13 | -v /var/lib/docker/containers:/var/lib/docker/containers \
14 | -v /var/run/docker.sock:/var/run/docker.sock \
15 | -e OUTPUT_FILE="${OUTPUT_FILE}" \
16 | "${DOCKER_IMAGE}" \
17 | --strict.perms=false \
18 | -environment container \
19 | -E http.enabled=true > filebeat_docker_id
20 |
--------------------------------------------------------------------------------
/resources/scripts/beats/run_metricbeat.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eu
3 |
4 | echo "CONFIG_PATH=${CONFIG_PATH}"
5 | echo "DOCKER_IMAGE=${DOCKER_IMAGE}"
6 |
7 | docker run \
8 | --detach \
9 | -v "${CONFIG_PATH}:/usr/share/metricbeat/metricbeat.yml" \
10 | -u 0:0 \
11 | --mount type=bind,source=/proc,target=/hostfs/proc,readonly \
12 | --mount type=bind,source=/sys/fs/cgroup,target=/hostfs/sys/fs/cgroup,readonly \
13 | --mount type=bind,source=/,target=/hostfs,readonly \
14 | --net=host \
15 | -e ES_URL="${ES_URL}" \
16 | -e ES_USERNAME="${ES_USERNAME}" \
17 | -e ES_PASSWORD="${ES_PASSWORD}" \
18 | "${DOCKER_IMAGE}" \
19 | --strict.perms=false \
20 | -environment container \
21 | -E http.enabled=true \
22 | -e -system.hostfs=/hostfs > metricbeat_docker_id
23 |
--------------------------------------------------------------------------------
/resources/scripts/beats/wait_for_beat.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ID=${1:?"Missing parameter"}
4 | URL=${2:-"http://localhost:5066/stats?pretty"}
5 |
6 | echo "INFO: print existing docker context"
7 | docker ps -a || true
8 |
9 | echo "INFO: print docker inspect"
10 | docker inspect "${ID}"
11 |
12 | echo "INFO: wait for the docker container to be available"
13 | N=0
14 | until docker exec "${ID}" curl -sSfI --retry 10 --retry-delay 5 --max-time 5 "${URL}"
15 | do
16 | sleep 5
17 | if [ "${N}" -gt 6 ]; then
18 | break;
19 | fi
20 | N=$(("${N}" + 1))
21 | done
22 |
--------------------------------------------------------------------------------
/resources/scripts/install/kind-setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -exuo pipefail
3 |
4 | kind create cluster --image "kindest/node:${K8S_VERSION}" --config - <= 44.0.0", "wheel >= 0.30"]
10 | build-backend = "setuptools.build_meta"
11 |
12 | [tool.black]
13 | line-length = 120
14 | include = '\.pyi?$'
15 | exclude = '''
16 | /(
17 | \.git
18 | | _build
19 | | build
20 | | dist
21 | | elasticapm/utils/wrapt
22 | # The following are specific to Black, you probably don't want those.
23 | | blib2to3
24 | | tests/data
25 | )/
26 | '''
27 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/requirements.txt:
--------------------------------------------------------------------------------
1 | coverage==7.0.5
2 | opentelemetry-api==1.15.0
3 | opentelemetry-exporter-otlp==1.15.0
4 | opentelemetry-sdk==1.15.0
5 | psutil==5.9.3
6 | pytest==7.2.1
7 | pre-commit==2.21.0
8 | mypy==0.982
9 | pytest-docker==1.0.1
10 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Copyright The OpenTelemetry Authors
3 | # SPDX-License-Identifier: Apache-2.0
4 |
5 | from setuptools import setup
6 |
7 | setup()
8 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 | ---
4 | version: "3.7"
5 | services:
6 | otel-collector:
7 | image: otel/opentelemetry-collector:latest
8 | user: ${UID}:${GID}
9 | command: ["--config=/etc/otel-collector.yaml"]
10 | volumes:
11 | - ./otel-collector.yaml:/etc/otel-collector.yaml
12 | - ./:/tmp
13 | ports:
14 | - "4317:4317" # OTLP gRPC receiver
15 | networks:
16 | - default_net
17 |
18 | volumes:
19 | default_net:
20 | driver: local
21 | esdata:
22 | driver: local
23 |
24 | networks:
25 | default_net:
26 | driver: bridge
27 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from utils import is_portListening
3 |
4 |
5 | @pytest.fixture(scope="session")
6 | def otel_service(docker_ip, docker_services):
7 | """Ensure that port is listening."""
8 |
9 | # `port_for` takes a container port and returns the corresponding host port
10 | port = docker_services.port_for("otel-collector", 4317)
11 | docker_services.wait_until_responsive(
12 | timeout=30.0, pause=5, check=lambda: is_portListening(docker_ip, port)
13 | )
14 | return True
15 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_basic_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_OK
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_basic_plugin(pytester, otel_service):
19 | """test a simple test"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | def test_basic():
24 | time.sleep(5)
25 | pass
26 | """)
27 | assertTest(pytester, "test_basic", "passed", STATUS_CODE_OK, "passed", STATUS_CODE_OK)
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_failure_code_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_ERROR
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_failure_code_plugin(pytester, otel_service):
19 | """test a test with a code exception"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | def test_failure_code():
24 | d = 1/0
25 | pass
26 | """)
27 | assertTest(pytester, "test_failure_code", "failed", STATUS_CODE_ERROR, "failed", STATUS_CODE_ERROR)
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_failure_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_ERROR
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_failure_plugin(pytester, otel_service):
19 | """test a failed test"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | def test_failure():
24 | assert 1 < 0
25 | """)
26 | assertTest(pytester, "test_failure", "failed", STATUS_CODE_ERROR, "failed", STATUS_CODE_ERROR)
27 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_skip_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_OK
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_skip_plugin(pytester, otel_service):
19 | """test a skipped test"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | @pytest.mark.skip
24 | def test_skip():
25 | assert True
26 | """)
27 | assertTest(pytester, None, "passed", STATUS_CODE_OK, None, None)
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_success_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_OK
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_success_plugin(pytester, otel_service):
19 | """test a success test"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | def test_success():
24 | assert True
25 | """)
26 | assertTest(pytester, "test_success", "passed", STATUS_CODE_OK, "passed", STATUS_CODE_OK)
27 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_xfail_no_run_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_OK
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_xfail_no_run_plugin(pytester, otel_service):
19 | """test a marked as xfail test with run==false"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | @pytest.mark.xfail(run=False)
24 | def test_xfail_no_run():
25 | assert False
26 | """)
27 | assertTest(pytester, None, "passed", STATUS_CODE_OK, None, None)
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/it/test_xfail_plugin.py:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 |
4 | import pytest
5 | from utils import assertTest, STATUS_CODE_OK
6 |
7 | pytest_plugins = ["pytester"]
8 |
9 | common_code = """
10 | import os
11 | import time
12 | import logging
13 | import pytest
14 |
15 | """
16 |
17 |
18 | def test_xfail_plugin(pytester, otel_service):
19 | """test a marked as xfail test"""
20 | pytester.makepyfile(
21 | common_code
22 | + """
23 | @pytest.mark.xfail(reason="foo bug")
24 | def test_xfail():
25 | assert False
26 | """)
27 | assertTest(pytester, None, "passed", STATUS_CODE_OK, None, None)
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tests/otel-collector.yaml:
--------------------------------------------------------------------------------
1 | # Copyright The OpenTelemetry Authors
2 | # SPDX-License-Identifier: Apache-2.0
3 | ---
4 | receivers:
5 | otlp:
6 | protocols:
7 | grpc:
8 | endpoint: 0.0.0.0:4317
9 |
10 | exporters:
11 | logging:
12 | logLevel: debug
13 | file:
14 | path: /tmp/tests.json
15 | processors:
16 | batch:
17 |
18 | service:
19 | pipelines:
20 | traces:
21 | receivers:
22 | - otlp
23 | processors:
24 | - batch
25 | exporters:
26 | - logging
27 | - file
28 |
--------------------------------------------------------------------------------
/resources/scripts/pytest_otel/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py311
4 | py310
5 | py39
6 | py38
7 |
8 | [testenv]
9 | deps =
10 | pytest==7.1.3
11 | pytest-docker==1.0.1
12 | commands =
13 | pytest {tty:--color=yes} --capture=no \
14 | -p pytester --runpytest=subprocess \
15 | --junitxml {toxworkdir}{/}junit-{envname}.xml \
16 | tests/test_pytest_otel.py
17 |
18 | [testenv:linting]
19 | basepython = python3
20 | skip_install = true
21 | deps =
22 | pre-commit==2.20.0
23 | commands =
24 | pre-commit run
25 |
--------------------------------------------------------------------------------
/resources/templates/bundlesize.md.j2:
--------------------------------------------------------------------------------
1 | ### :package: Bundlesize report
2 |
3 | Filename | Size(bundled) | Size(gzip) | Diff(gzip)
4 | --- | --- | --- | ---
5 | {% for item in data -%}
6 | {%- if item.previousGzipSize -%}
7 | {%- if item.gzipSize - item.previousGzipSize > 0 -%}
8 | {%- set status = ":warning:" -%}
9 | {% else %}
10 | {%- set status = ":green_heart:" -%}
11 | {%- endif -%}
12 | {%- endif -%}
13 | {{ item.label }} | {{ item.parsedSize | int | filesizeformat(true) }} | {{ item.gzipSize | int | filesizeformat(true) }} | {% if item.previousGzipSize %} {{ status }} {{ ( item.gzipSize - item.previousGzipSize | int ) | filesizeformat(true) }} {% endif %}
14 | {% endfor %}
15 |
--------------------------------------------------------------------------------
/resources/templates/coverage.md.j2:
--------------------------------------------------------------------------------
1 | {%- if data -%}
2 | ### :globe_with_meridians: Coverage report
3 |
4 | Name | Metrics % (`covered/total`) | Diff
5 | --- | --- | ---
6 | {% for key, value in data.items() -%}
7 | {%- set status = ":grey_exclamation:" -%}
8 | {%- set diff = "" -%}
9 | {%- if value.previousRatio -%}
10 | {%- set diff = ( value.ratio|float - value.previousRatio|float ) |round(3) -%}
11 | {%- if value.previousRatio > value.ratio -%}
12 | {%- set status = ":-1:" -%}
13 | {% else %}
14 | {%- set status = ":+1:" -%}
15 | {%- endif -%}
16 | {%- endif -%}
17 | {%- if value.ratio|int >= 100 -%}
18 | {%- set status = ":green_heart:" -%}
19 | {%- endif -%}
20 | {%- if diff == 0 -%}
21 | {%- set diff = "" -%}
22 | {%- endif -%}
23 | {{ key }} | {{ value.ratio|float|round(3) }}% (`{{ value.numerator }}/{{ value.denominator }}`) | {{ status }} {{ diff }}
24 | {% endfor %}
25 | {%- endif -%}
26 |
--------------------------------------------------------------------------------
/resources/versions/releases.properties:
--------------------------------------------------------------------------------
1 | current_6=6.8.23
2 | current_7=7.17.22
3 | next_minor_7=7.17.23
4 | next_patch_7=7.17.23
5 | current_8=8.14.2
6 | next_minor_8=8.15.0
7 | next_patch_8=8.14.3
8 | edge_8=8.15.0
--------------------------------------------------------------------------------
/src/co/elastic/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/src/co/elastic/README.md
--------------------------------------------------------------------------------
/src/test/resources/1744/build-info.json:
--------------------------------------------------------------------------------
1 | {
2 | "artifactsZipFile": "https://beats-ci.elastic.co//job/e2e-tests/job/e2e-testing-mbp/job/PR-2688/1/artifact/*zip*/archive.zip",
3 | "causeOfBlockage": null,
4 | "causes": {
5 | "shortDescription": "Pull request #2688 opened"
6 | },
7 | "description": null,
8 | "durationInMillis": 6654442,
9 | "enQueueTime": "2022-06-23T05:13:07.176+0000",
10 | "endTime": null,
11 | "estimatedDurationInMillis": -1,
12 | "id": "1",
13 | "name": null,
14 | "organization": "jenkins",
15 | "pipeline": "PR-2688",
16 | "result": "FAILURE",
17 | "runSummary": "?",
18 | "startTime": "2022-06-23T05:13:07.185+0000",
19 | "state": "FINISHED",
20 | "type": "WorkflowRun",
21 | "commitId": "bd80c7a45645953d2eb2915026752c2879044a55+892e951c25615c5a004bed1e0e65050e86009061 (c856b4f734455f5543c04f1fb99aef752318f710)",
22 | "commitUrl": null
23 | }
24 |
--------------------------------------------------------------------------------
/src/test/resources/__files/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/src/test/resources/__files/.gitkeep
--------------------------------------------------------------------------------
/src/test/resources/__files/body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-blueTestSummary-A2SNo.json:
--------------------------------------------------------------------------------
1 | {"_class":"io.jenkins.blueocean.rest.model.BlueTestSummary","_links":{"self":{"_class":"io.jenkins.blueocean.rest.hal.Link","href":"/blue/rest/organizations/jenkins/pipelines/it/pipelines/getBuildInfoJsonFiles/pipelines/cobertura/runs/1//blueTestSummary/"}},"existingFailed":0,"failed":0,"fixed":0,"passed":0,"regressions":0,"skipped":0,"total":0}
--------------------------------------------------------------------------------
/src/test/resources/__files/body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-changeSet-EJ5Ac.json:
--------------------------------------------------------------------------------
1 | []
--------------------------------------------------------------------------------
/src/test/resources/__files/body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-0WDC4.json:
--------------------------------------------------------------------------------
1 | {
2 | "message" : "no tests",
3 | "code" : 404,
4 | "errors" : [ ]
5 | }
--------------------------------------------------------------------------------
/src/test/resources/__files/body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-C8sFC.json:
--------------------------------------------------------------------------------
1 | {
2 | "message" : "no tests",
3 | "code" : 404,
4 | "errors" : [ ]
5 | }
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-abort-1-cobertura-api-json-QPHoF.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-connectionRefused-1-cobertura-api-json-0OGve.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-empty-1-cobertura-api-json-w9hla.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-error-1-cobertura-api-json-w9hl2.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-multiTestFailures-1-cobertura-api-json-A8nw5.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-success-1-cobertura-api-json-ZkKp7.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/__files/body-job-it-job-getBuildInfoJsonFiles-job-unstable-1-cobertura-api-json-vpDs1.txt:
--------------------------------------------------------------------------------
1 | Not found
2 |
--------------------------------------------------------------------------------
/src/test/resources/build-info_aborted_allowed_to_run.json:
--------------------------------------------------------------------------------
1 | {
2 | "artifactsZipFile": "https://apm-ci.elastic.co//job/apm-agent-nodejs/job/ecs-logging-nodejs-mbp/job/PR-101/1/artifact/*zip*/archive.zip",
3 | "causeOfBlockage": null,
4 | "causes": {
5 | "shortDescription": "Pull request #101 opened"
6 | },
7 | "description": "The PR is not allowed to run in the CI yet",
8 | "durationInMillis": 189755,
9 | "enQueueTime": "2021-09-22T09:29:02.777+0000",
10 | "endTime": null,
11 | "estimatedDurationInMillis": -1,
12 | "id": "1",
13 | "name": null,
14 | "organization": "jenkins",
15 | "pipeline": "PR-101",
16 | "result": "ABORTED",
17 | "runSummary": "?",
18 | "startTime": "2021-09-22T09:29:02.785+0000",
19 | "state": "FINISHED",
20 | "type": "WorkflowRun",
21 | "commitId": "f61cab1e9b150b9fe1216efb8431849fa6224099+ae3cd0763415e131cee39d13539addead2b13665 (91d9f874aa47b1d2b93e7bdc36afe02ff5627221)",
22 | "commitUrl": null
23 | }
24 |
--------------------------------------------------------------------------------
/src/test/resources/buildKibana/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "kibana",
3 | "version": "8.0.0"
4 | }
5 |
--------------------------------------------------------------------------------
/src/test/resources/changeSet-info-empty-issues.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "affectedPaths": [
4 | "pipeline.yml"
5 | ],
6 | "author": {
7 | "avatar": null,
8 | "email": null,
9 | "fullName": "Lola Flores",
10 | "id": "Lola.Flores",
11 | "permission": null
12 | },
13 | "checkoutCount": 0,
14 | "commitId": "abcdefg",
15 | "issues": [ ],
16 | "msg": "indicator type url is in upper case (#1234)",
17 | "timestamp": "2021-02-22T10:23:51.000+0000"
18 | }
19 | ]
20 |
--------------------------------------------------------------------------------
/src/test/resources/changeSet-info-manual.json:
--------------------------------------------------------------------------------
1 | [ ]
2 |
--------------------------------------------------------------------------------
/src/test/resources/changeSet-info.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "affectedPaths": [
4 | "pipeline.yml"
5 | ],
6 | "author": {
7 | "avatar": null,
8 | "email": null,
9 | "fullName": "Lola Flores",
10 | "id": "Lola.Flores",
11 | "permission": null
12 | },
13 | "checkoutCount": 0,
14 | "commitId": "abcdefg",
15 | "issues": [
16 | {
17 | "_class": "io.jenkins.blueocean.blueocean_github_pipeline.GithubIssue",
18 | "id": "#1234",
19 | "url": "https://github.com/org/acme/issues/1234"
20 | }
21 | ],
22 | "msg": "indicator type url is in upper case (#1234)",
23 | "timestamp": "2021-02-22T10:23:51.000+0000"
24 | }
25 | ]
26 |
--------------------------------------------------------------------------------
/src/test/resources/corrupted/build-info.json:
--------------------------------------------------------------------------------
1 | {
2 | "result": "FAILURE",
3 | "durationInMillis": 1334790,
4 | "state": "FINISHED",
5 | "causes": null,
6 | "artifactsZipFile": "https://beats-ci.elastic.co/null"
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/resources/corrupted/changeSet-info.json:
--------------------------------------------------------------------------------
1 | []
2 |
--------------------------------------------------------------------------------
/src/test/resources/corrupted/steps-errors.json:
--------------------------------------------------------------------------------
1 | []
2 |
--------------------------------------------------------------------------------
/src/test/resources/corrupted/tests-errors.json:
--------------------------------------------------------------------------------
1 | []
2 |
--------------------------------------------------------------------------------
/src/test/resources/corrupted/tests-summary.json:
--------------------------------------------------------------------------------
1 | {
2 | "total": 0,
3 | "passed": 0,
4 | "failed": 0,
5 | "skipped": 0
6 | }
7 |
--------------------------------------------------------------------------------
/src/test/resources/empty/build-info.json:
--------------------------------------------------------------------------------
1 | {
2 | "result": "UNSTABLE",
3 | "durationInMillis": 1,
4 | "state": "FINISHED",
5 | "causes": null,
6 | "artifactsZipFile": "foo"
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/resources/empty/changeSet-info.json:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/src/test/resources/empty/tests-summary.json:
--------------------------------------------------------------------------------
1 | {
2 | "existingFailed": 0,
3 | "failed": 0,
4 | "fixed": 0,
5 | "passed": 0,
6 | "regressions": 0,
7 | "skipped": 0,
8 | "total": 0
9 | }
10 |
--------------------------------------------------------------------------------
/src/test/resources/env-info.json:
--------------------------------------------------------------------------------
1 | {
2 | "BRANCH_NAME": "main",
3 | "BUILD_DISPLAY_NAME": "#1",
4 | "BUILD_ID": "1",
5 | "BUILD_NUMBER": "1",
6 | "BUILD_TAG": "jenkins-project-main-1",
7 | "BUILD_URL": "http://localhost:18081/job/project/job/main/1/",
8 | "GIT_COMMIT": "4f0aea0e892678e46d62fd0a156f9c9c4b670995",
9 | "GIT_PREVIOUS_COMMIT": "4f0aea0e892678e46d62fd0a156f9c9c4b670995",
10 | "GIT_PREVIOUS_SUCCESSFUL_COMMIT": "4f0aea0e892678e46d62fd0a156f9c9c4b670995",
11 | "JOB_BASE_NAME": "main",
12 | "JOB_DISPLAY_URL": "http://localhost:18081/job/project/job/main/display/redirect",
13 | "JOB_NAME": "project/main",
14 | "JOB_URL": "http://localhost:18081/job/project/job/main/",
15 | "ORG_NAME": "acme",
16 | "REPO_NAME": "project"
17 | }
18 |
--------------------------------------------------------------------------------
/src/test/resources/filebeatTest/filebeat_container_worker-0676d01d9601f8191.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "fooID",
3 | "output": "foo.log",
4 | "config": "bar.xml",
5 | "image": "foo: latest",
6 | "workdir": "fooDir",
7 | "timeout": "30"
8 | }
9 |
--------------------------------------------------------------------------------
/src/test/resources/filebeatTest_1/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/src/test/resources/filebeatTest_1/.empty
--------------------------------------------------------------------------------
/src/test/resources/filebeatTest_2/filebeat_container_worker-0676d01d9601f8191.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "fooID",
3 | "output": "foo.log",
4 | "config": "bar.xml",
5 | "image": "foo: latest",
6 | "workdir": "fooDir",
7 | "timeout": "30",
8 | "archiveOnlyOnFail": "true"
9 | }
10 |
--------------------------------------------------------------------------------
/src/test/resources/flake-empty-results.json:
--------------------------------------------------------------------------------
1 | {
2 | "took" : 3,
3 | "timed_out" : false,
4 | "_shards" : {
5 | "total" : 1,
6 | "successful" : 1,
7 | "skipped" : 0,
8 | "failed" : 0
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/src/test/resources/flake-tests-errors-without-match.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "age": 1,
4 | "duration": 0,
5 | "errorDetails": "collection failure",
6 | "errorStackTrace": "My superduper stacktrace",
7 | "id": "io.jenkins.blueocean.service.embedded.rest.junit.BlueJUnitTestResult:%3Ajunit%2F(root)%2F(empty)%2FTest___Python_python_3_7_2___",
8 | "name": "Test / windows-3.6-none / test_send - notfound",
9 | "status": "FAILED"
10 | }
11 | ]
12 |
--------------------------------------------------------------------------------
/src/test/resources/flake-tests-errors.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "age": 1,
4 | "duration": 0,
5 | "errorDetails": "collection failure",
6 | "errorStackTrace": "My superduper stacktrace",
7 | "id": "io.jenkins.blueocean.service.embedded.rest.junit.BlueJUnitTestResult:%3Ajunit%2F(root)%2F(empty)%2FTest___Python_python_3_7_2___",
8 | "name": "Test / windows-3.6-none / test_send - tests.transports.test_urllib3",
9 | "status": "FAILED"
10 | }
11 | ]
12 |
13 |
--------------------------------------------------------------------------------
/src/test/resources/flake-tests-summary.json:
--------------------------------------------------------------------------------
1 | {
2 | "_class": "io.jenkins.blueocean.rest.model.BlueTestSummary",
3 | "_links": {
4 | "self": {
5 | "_class": "io.jenkins.blueocean.rest.hal.Link",
6 | "href": "/blue/rest/organizations/jenkins/pipelines/folder/pipelines/mbp/pipelines/branch/runs/49/blueTestSummary/"
7 | }
8 | },
9 | "existingFailed": 0,
10 | "failed": 1,
11 | "fixed": 0,
12 | "passed": 120,
13 | "regressions": 0,
14 | "skipped": 0,
15 | "total": 121
16 | }
17 |
--------------------------------------------------------------------------------
/src/test/resources/folders/beats.dsl:
--------------------------------------------------------------------------------
1 | folder('it/beats') {
2 | displayName('beats')
3 | description('beats ITs for the APM shared library')
4 | }
5 |
--------------------------------------------------------------------------------
/src/test/resources/folders/getBuildInfoJsonFiles.dsl:
--------------------------------------------------------------------------------
1 | folder('it/getBuildInfoJsonFiles') {
2 | displayName('getBuildInfoJsonFiles')
3 | description('getBuildInfoJsonFiles ITs for the APM shared library')
4 | }
5 |
--------------------------------------------------------------------------------
/src/test/resources/folders/it.dsl:
--------------------------------------------------------------------------------
1 | folder('it') {
2 | displayName('IT')
3 | description('ITs for the APM shared library')
4 | }
5 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/cancelPreviousRunningBuilds.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/cancelPreviousRunningBuilds'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('cancelPreviousRunningBuilds') {
6 | steps {
7 | sleep randomNumber(min: 5, max: 30)
8 | cancelPreviousRunningBuilds()
9 | }
10 | }
11 | }
12 | }'''
13 |
14 | pipelineJob(NAME) {
15 | definition {
16 | cps {
17 | script(DSL.stripIndent())
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/dockerLogin.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/dockerLogin'
2 | DSL = '''pipeline {
3 | agent none
4 | environment {
5 | DOCKER_REGISTRY = 'docker.elastic.co'
6 | DOCKER_SECRET = 'secret/observability-team/ci/docker-registry/prod'
7 | }
8 | stages {
9 | stage('linux') {
10 | agent { label 'linux && immutable' }
11 | steps {
12 | dockerLogin(secret: "${DOCKER_SECRET}", registry: "${DOCKER_REGISTRY}")
13 | }
14 | }
15 | stage('windows') {
16 | agent { label 'windows-immutable' }
17 | steps {
18 | dockerLogin(secret: "${DOCKER_SECRET}", registry: "${DOCKER_REGISTRY}")
19 | }
20 | }
21 | }
22 | }'''
23 |
24 | pipelineJob(NAME) {
25 | definition {
26 | cps {
27 | script(DSL.stripIndent())
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/dockerLogs.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/dockerLogs'
2 | DSL = '''pipeline {
3 | agent { label 'linux && immutable' }
4 | stages {
5 | stage('Docker run') {
6 | steps {
7 | sh 'docker run hello-world'
8 | }
9 | }
10 | stage('DockerLogs') {
11 | steps {
12 | dockerLogs(step: 'docker-logs', failNever: true)
13 | }
14 | }
15 | }
16 | }'''
17 |
18 | pipelineJob(NAME) {
19 | definition {
20 | cps {
21 | script(DSL.stripIndent())
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/downstream.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/downstream'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('error') {
6 | steps {
7 | error 'force a build error'
8 | }
9 | }
10 | }
11 | }'''
12 |
13 | pipelineJob(NAME) {
14 | definition {
15 | cps {
16 | script(DSL.stripIndent())
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/gh.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/gh'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('checkout') {
6 | steps {
7 | gitCheckout(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
8 | repo: 'https://github.com/elastic/apm-pipeline-library.git',
9 | branch: 'master',
10 | basedir: 'sub-folder')
11 | dir('sub-folder') {
12 | gh(command: 'issue list', flags: [ label: 'invalid'])
13 | }
14 | }
15 | }
16 | }
17 | }'''
18 |
19 | pipelineJob(NAME) {
20 | definition {
21 | cps {
22 | script(DSL.stripIndent())
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/git.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/git'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('git') {
6 | steps {
7 | git 'https://github.com/jglick/simple-maven-project-with-tests.git'
8 | deleteDir()
9 | }
10 | }
11 | stage('git_with_credentials') {
12 | steps {
13 | git credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
14 | url: 'https://github.com/jglick/simple-maven-project-with-tests.git',
15 | branch: 'master'
16 | }
17 | }
18 | }
19 | }'''
20 |
21 | pipelineJob(NAME) {
22 | definition {
23 | cps {
24 | script(DSL.stripIndent())
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/gitCheckout.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/gitCheckout'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('checkout') {
6 | steps {
7 | gitCheckout(basedir: 'sub-folder', branch: 'master',
8 | credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
9 | repo: 'https://github.com/octocat/Hello-World.git')
10 | sh 'ls -ltra'
11 | }
12 | }
13 | }
14 | }'''
15 |
16 | pipelineJob(NAME) {
17 | definition {
18 | cps {
19 | script(DSL.stripIndent())
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/githubCreateIssue.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/githubCreateIssue'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('checkout') {
6 | steps {
7 | gitCheckout(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
8 | repo: 'https://github.com/elastic/apm-pipeline-library.git',
9 | branch: 'master',
10 | basedir: 'sub-folder')
11 | dir('sub-folder') {
12 | githubCreateIssue(title: 'Foo', description: 'Bar', labels: 'invalid', milestone: 'chore')
13 | }
14 | }
15 | }
16 | }
17 | }'''
18 |
19 | pipelineJob(NAME) {
20 | definition {
21 | cps {
22 | script(DSL.stripIndent())
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/githubEnvSCM.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/githuEnvSCM'
2 |
3 | pipelineJob(NAME) {
4 | definition {
5 | cpsScm {
6 | scm {
7 | git('https://github.com/kuisathaverat/test.git')
8 | }
9 | scriptPath('githubEnv.groovy')
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/googleStorageUploadExt.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/googleStorageUploadExt'
2 | DSL = '''pipeline {
3 | agent { label "master" }
4 | environment {
5 | JOB_GCS_BUCKET = 'apm-ci-temp'
6 | JOB_GCS_CREDENTIALS = 'apm-ci-gcs-plugin-file-credentials'
7 | PIPELINE_LOG_LEVEL = 'DEBUG'
8 | }
9 | stages {
10 | stage('google-storage') {
11 | steps {
12 | touch file: 'file.txt', timestamp: 0
13 | googleStorageUploadExt(bucket: "gs://${env.JOB_GCS_BUCKET}/test-${env.BUILD_ID}/", pattern: 'file.txt', sharedPublicly: true)
14 | }
15 | }
16 | }
17 | }'''
18 |
19 | pipelineJob(NAME) {
20 | definition {
21 | cps {
22 | script(DSL.stripIndent())
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/installTools.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/installTools'
2 | DSL = '''pipeline {
3 | agent none
4 | stages {
5 | stage('windows') {
6 | agent { label 'windows-immutable' }
7 | steps {
8 | deleteDir()
9 | installTools([ [ tool: 'python3', version: '3.8'] ])
10 | }
11 | }
12 | }
13 | }'''
14 |
15 | pipelineJob(NAME) {
16 | definition {
17 | cps {
18 | script(DSL.stripIndent())
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/isTimerTrigger.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/isTimerTrigger'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('checkout') {
6 | steps {
7 | gitCheckout(basedir: 'sub-folder', branch: 'master',
8 | credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
9 | repo: 'https://github.com/octocat/Hello-World.git')
10 | }
11 | }
12 | stage('isTimerTrigger') {
13 | steps {
14 | script {
15 | if (isTimerTrigger()) {
16 | echo 'found'
17 | } else {
18 | echo 'not found'
19 | }
20 | }
21 | }
22 | }
23 | }
24 | }'''
25 |
26 | pipelineJob(NAME) {
27 | definition {
28 | cps {
29 | script(DSL.stripIndent())
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/isUserTrigger.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/isUserTrigger'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('checkout') {
6 | steps {
7 | gitCheckout(basedir: 'sub-folder', branch: 'master',
8 | credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
9 | repo: 'https://github.com/octocat/Hello-World.git')
10 | }
11 | }
12 | stage('isUserTrigger') {
13 | steps {
14 | script {
15 | if (isUserTrigger()) {
16 | echo 'found'
17 | } else {
18 | echo 'not found'
19 | }
20 | }
21 | }
22 | }
23 | }
24 | }'''
25 |
26 | pipelineJob(NAME) {
27 | definition {
28 | cps {
29 | script(DSL.stripIndent())
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/log.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/log'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('log with info') {
6 | steps { log(level: 'INFO', text: 'message') }
7 | }
8 | stage('log with debug') {
9 | stages {
10 | stage('debug disabled') {
11 | steps { log(level: 'DEBUG', text: 'message') }
12 | }
13 | stage('debug enabled') {
14 | environment { PIPELINE_LOG_LEVEL = 'DEBUG' }
15 | steps { log(level: 'DEBUG', text: 'message') }
16 | }
17 | }
18 | }
19 | }
20 | }'''
21 |
22 | pipelineJob(NAME) {
23 | definition {
24 | cps {
25 | script(DSL.stripIndent())
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/matrix.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/matrixAgent'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('Matrix sample') {
6 | steps {
7 | matrix(
8 | agent: 'linux',
9 | axes:[
10 | axis('VAR_NAME_00', [ 1, 2 ]),
11 | axis('VAR_NAME_01', [ 'a', 'b', 'c', 'd', 'e' ])
12 | ],
13 | excludes: [
14 | axis('VAR_NAME_00', [ 1 ]),
15 | axis('VAR_NAME_01', [ 'd', 'e' ]),
16 | ]
17 | ) {
18 | echo "${VAR_NAME_00} - ${VAR_NAME_01}"
19 | }
20 | }
21 | }
22 | }
23 | }'''
24 |
25 | pipelineJob(NAME) {
26 | definition {
27 | cps {
28 | script(DSL.stripIndent())
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/matrixNoAgent.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/matrixNoAgent'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('Matrix sample') {
6 | steps {
7 | matrix(
8 | axes:[
9 | axis('VAR_NAME_00', [ 1, 2 ]),
10 | axis('VAR_NAME_01', [ 'a', 'b', 'c', 'd', 'e' ])
11 | ],
12 | excludes: [
13 | axis('VAR_NAME_00', [ 1 ]),
14 | axis('VAR_NAME_01', [ 'd', 'e' ]),
15 | ]
16 | ) {
17 | echo "${VAR_NAME_00} - ${VAR_NAME_01}"
18 | }
19 | }
20 | }
21 | }
22 | }'''
23 |
24 | pipelineJob(NAME) {
25 | definition {
26 | cps {
27 | script(DSL.stripIndent())
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/parentstream.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/parentstream'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('trigger downstream') {
6 | steps {
7 | script {
8 | try {
9 | build job: 'downstream'
10 | } catch(e) {
11 | println e.getCauses()[0]?.getShortDescription()
12 | }
13 | }
14 | }
15 | }
16 | }
17 | }'''
18 |
19 | pipelineJob(NAME) {
20 | definition {
21 | cps {
22 | script(DSL.stripIndent())
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/pipelineManager.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/pipelineManager'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('pipelineManager') {
6 | steps {
7 | sleep randomNumber(min: 5, max: 30)
8 | pipelineManager(cancelPreviousRunningBuilds: [ when: 'ALWAYS' ])
9 | }
10 | }
11 | }
12 | }'''
13 |
14 | pipelineJob(NAME) {
15 | definition {
16 | cps {
17 | script(DSL.stripIndent())
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/runWatcher.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/runWatcher'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('runWatcher') {
6 | steps {
7 | runWatcher(watcher: '17635395-61cd-439a-963d-8e7bb6ab22b7')
8 | }
9 | }
10 | }
11 | }'''
12 |
13 | pipelineJob(NAME) {
14 | definition {
15 | cps {
16 | script(DSL.stripIndent())
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/superLinter.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/superLinter'
2 | DSL = '''
3 | pipeline {
4 | agent any
5 | stages {
6 | stage('super-linter') {
7 | steps {
8 | deleteDir()
9 | gitCheckout(credentialsId: '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken',
10 | repo: 'https://github.com/elastic/apm-pipeline-library.git',
11 | branch: 'master',
12 | basedir: 'sub-folder')
13 | dir('sub-folder') {
14 | superLinter(envs: [ 'VALIDATE_GO=false' ], failNever: true)
15 | }
16 | }
17 | }
18 | }
19 | }
20 | '''
21 |
22 | pipelineJob(NAME) {
23 | definition {
24 | cps {
25 | script(DSL.stripIndent())
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/withAWSEnv.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/withAWSEnv'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('withAWSEnv') {
6 | steps {
7 | withAWSEnv(secret: 'secret/observability-team/ci/service-account/aws-create-user') {
8 | sh 'aws --version'
9 | }
10 | }
11 | }
12 | }
13 | }'''
14 |
15 | pipelineJob(NAME) {
16 | definition {
17 | cps {
18 | script(DSL.stripIndent())
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/withTotpVault.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/withTotpVault'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('query totp in vault') {
6 | steps {
7 | withTotpVault(secret: 'totp-apm/code/v1v', code_var_name: 'VAULT_TOTP'){
8 | sh 'echo "VAULT_TOTP=${VAULT_TOTP}" > file.txt'
9 | }
10 | sh 'cat file.txt'
11 | }
12 | }
13 | }
14 | }'''
15 |
16 | pipelineJob(NAME) {
17 | definition {
18 | cps {
19 | script(DSL.stripIndent())
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/test/resources/jobs/writeVaultSecret.dsl:
--------------------------------------------------------------------------------
1 | NAME = 'it/writeVaultSecret'
2 | DSL = '''pipeline {
3 | agent any
4 | stages {
5 | stage('writeVaultSecret') {
6 | steps {
7 | writeVaultSecret(secret: 'secret/observability-team/ci/temp/github-comment',
8 | data: ['secret': "${BUILD_ID}"] )
9 | script {
10 | data = getVaultSecret(secret: 'secret/observability-team/ci/temp/github-comment')
11 | if (data.data.secret.contains("${BUILD_ID}")) {
12 | echo 'Assertion passed'
13 | } else {
14 | error('Assertion failed')
15 | }
16 | }
17 | }
18 | }
19 | }
20 | }'''
21 |
22 | pipelineJob(NAME) {
23 | definition {
24 | cps {
25 | script(DSL.stripIndent())
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-0WDC4.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "f47b6672-c981-39cd-bd28-a00433ec2ff7",
3 | "request" : {
4 | "url" : "/blue/rest/organizations/jenkins/pipelines/it/getBuildInfoJsonFiles/cobertura/runs/1/tests/?limit=10000000",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-0WDC4.json",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 10:37:10 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "X-Blueocean-Refresher" : "6f05bffd",
14 | "Cache-Control" : "no-cache, no-store, no-transform",
15 | "Content-Type" : "application/json",
16 | "Server" : "Jetty(9.4.39.v20210325)"
17 | }
18 | },
19 | "uuid" : "f47b6672-c981-39cd-bd28-a00433ec2ff7"
20 | }
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-C8sFC.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "fd5b68da-af6f-344b-a520-56db877abefc",
3 | "request" : {
4 | "url" : "/blue/rest/organizations/jenkins/pipelines/it/getBuildInfoJsonFiles/cobertura/runs/1/tests/?status=FAILED&limit=1000",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-blue-rest-organizations-jenkins-pipelines-it-getBuildInfoJsonFiles-cobertura-runs-1-tests-C8sFC.json",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 10:37:27 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "X-Blueocean-Refresher" : "6f05bffd",
14 | "Cache-Control" : "no-cache, no-store, no-transform",
15 | "Content-Type" : "application/json",
16 | "Server" : "Jetty(9.4.39.v20210325)"
17 | }
18 | },
19 | "uuid" : "fd5b68da-af6f-344b-a520-56db877abefc"
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-abort-1-cobertura-api-json-QPHoF.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "5dc9106c-59b4-3e53-8f6b-4f797cb0fccd",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/abort/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-abort-1-cobertura-api-json-QPHoF.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:18 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "5dc9106c-59b4-3e53-8f6b-4f797cb0fccd"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-cobertura-1-cobertura-api-json-nxqFS.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "736401f2-2f0a-37a7-9dab-e01f4662b55c",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/cobertura/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 200,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-cobertura-1-cobertura-api-json-nxqFS.json",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:48:59 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "X-Jenkins" : "2.289",
14 | "X-Jenkins-Session" : "6f05bffd",
15 | "X-Frame-Options" : "deny",
16 | "Content-Type" : "application/json;charset=utf-8",
17 | "Server" : "Jetty(9.4.39.v20210325)"
18 | }
19 | },
20 | "uuid" : "736401f2-2f0a-37a7-9dab-e01f4662b55c"
21 | }
22 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-connectionRefused-1-cobertura-api-json-0OGve.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "77ffaf00-180e-327d-9a23-153a841a59f6",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/connectionRefused/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-connectionRefused-1-cobertura-api-json-0OGve.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:54:06 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "77ffaf00-180e-327d-9a23-153a841a59f6"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-empty-1-cobertura-api-json-w9hla.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "befbe74c-2212-3b00-a68a-778c770928ac",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/empty/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-empty-1-cobertura-api-json-w9hla.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:35 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "befbe74c-2212-3b00-a68a-778c770928ac"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-error-1-cobertura-api-json-w9hl2.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "befbe74c-2212-3b00-a686-778c770928ac",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/error/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-error-1-cobertura-api-json-w9hl2.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:35 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "befbe74c-2212-3b00-a686-778c770928ac"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-multiTestFailures-1-cobertura-api-json-A8nw5.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "6d1869fa-29fd-3601-b6d8-68d6f81e480a",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/multiTestFailures/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-multiTestFailures-1-cobertura-api-json-A8nw5.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:56 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "6d1869fa-29fd-3601-b6d8-68d6f81e480a"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-success-1-cobertura-api-json-ZkKp7.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "a3437932-8105-3d44-86c4-89f94277f3d5",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/success/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-success-1-cobertura-api-json-ZkKp7.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:41 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "a3437932-8105-3d44-86c4-89f94277f3d5"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/mappings/mapping-job-it-job-getBuildInfoJsonFiles-job-unstable-1-cobertura-api-json-vpDs1.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "957605a9-270a-377a-a890-e3efe41cbb87",
3 | "request" : {
4 | "url" : "/job/it/job/getBuildInfoJsonFiles/job/unstable/1/cobertura/api/json?tree=results[elements[name,ratio,denominator,numerator]]&depth=3",
5 | "method" : "GET"
6 | },
7 | "response" : {
8 | "status" : 404,
9 | "bodyFileName" : "body-job-it-job-getBuildInfoJsonFiles-job-unstable-1-cobertura-api-json-vpDs1.txt",
10 | "headers" : {
11 | "Date" : "Wed, 21 Jul 2021 09:53:47 GMT",
12 | "X-Content-Type-Options" : "nosniff",
13 | "Content-Type" : "text/html;charset=utf-8",
14 | "Server" : "Jetty(9.4.39.v20210325)"
15 | }
16 | },
17 | "uuid" : "957605a9-270a-377a-a890-e3efe41cbb87"
18 | }
19 |
--------------------------------------------------------------------------------
/src/test/resources/metricbeatTest/metricbeat_container_worker-0676d01d9601f8191.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "fooID",
3 | "config": "bar.xml",
4 | "image": "foo: latest",
5 | "workdir": "fooDir",
6 | "timeout": "30"
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/resources/metricbeatTest_1/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/elastic/apm-pipeline-library/7c77d1f04691c17e22facbbbbd16d0ce4dbec6a8/src/test/resources/metricbeatTest_1/.empty
--------------------------------------------------------------------------------
/src/test/resources/metricbeatTest_2/metricbeat_container_worker-0676d01d9601f8191.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "fooID",
3 | "config": "bar.xml",
4 | "image": "foo: latest",
5 | "output": "foo.log",
6 | "workdir": "fooDir",
7 | "timeout": "30",
8 | "archiveOnlyOnFail": "true"
9 | }
10 |
--------------------------------------------------------------------------------
/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker:
--------------------------------------------------------------------------------
1 | mock-maker-inline
2 |
--------------------------------------------------------------------------------
/src/test/resources/preCommitToJunit/output/simple.xml:
--------------------------------------------------------------------------------
1 | ![CDATA[hookid: forbid-en-dashes
2 | ]]
--------------------------------------------------------------------------------
/src/test/resources/preCommitToJunit/output/skipped.xml:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/test/resources/preCommitToJunit/simple.txt:
--------------------------------------------------------------------------------
1 | Check for case conflicts........................................................................Passed
2 | Check that executables have shebangs............................................................Skipped
3 | isort................................................(no files to check)Skipped
4 | Detect the EXTREMELY confusing unicode character U+2013.........................................Failed
5 | hookid: forbid-en-dashes
6 |
--------------------------------------------------------------------------------
/src/test/resources/steps-errors-with-github-environmental-issue.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "displayDescription": "Server returned HTTP response code: 500, message: '500 Internal Server Error' for URL: https://api.g",
4 | "displayName": "Notifies GitHub of the status of a Pull Request",
5 | "durationInMillis": 447,
6 | "id": "6672",
7 | "input": null,
8 | "result": "FAILURE",
9 | "startTime": "2021-01-19T10:47:45.782+0000",
10 | "state": "FINISHED",
11 | "type": "STEP",
12 | "url": "https://beats-ci.elastic.co//blue/rest/organizations/jenkins/pipelines/Beats/pipelines/beats/pipelines/PR-23560/runs/1/steps/6672/log"
13 | }
14 | ]
15 |
--------------------------------------------------------------------------------
/src/test/resources/tests-cobertura.json:
--------------------------------------------------------------------------------
1 | {
2 | "Classes": {
3 | "denominator": 0,
4 | "numerator": 0,
5 | "ratio": 100
6 | },
7 | "Conditionals": {
8 | "denominator": 0,
9 | "numerator": 0,
10 | "ratio": 100
11 | },
12 | "Lines": {
13 | "denominator": 0,
14 | "numerator": 0,
15 | "ratio": 100
16 | },
17 | "Files": {
18 | "denominator": 0,
19 | "numerator": 0,
20 | "ratio": 100
21 | },
22 | "Packages": {
23 | "denominator": 0,
24 | "numerator": 0,
25 | "ratio": 100
26 | },
27 | "Methods": {
28 | "denominator": 0,
29 | "numerator": 0,
30 | "ratio": 100
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/test/resources/tests-summary.json:
--------------------------------------------------------------------------------
1 | {
2 | "_class": "io.jenkins.blueocean.rest.model.BlueTestSummary",
3 | "_links": {
4 | "self": {
5 | "_class": "io.jenkins.blueocean.rest.hal.Link",
6 | "href": "/blue/rest/organizations/jenkins/pipelines/folder/pipelines/mbp/pipelines/branch/runs/49/blueTestSummary/"
7 | }
8 | },
9 | "existingFailed": 0,
10 | "failed": 0,
11 | "fixed": 0,
12 | "passed": 121,
13 | "regressions": 0,
14 | "skipped": 0,
15 | "total": 121
16 | }
17 |
--------------------------------------------------------------------------------
/src/test/resources/tests-summary_failed.json:
--------------------------------------------------------------------------------
1 | {
2 | "_class": "io.jenkins.blueocean.rest.model.BlueTestSummary",
3 | "_links": {
4 | "self": {
5 | "_class": "io.jenkins.blueocean.rest.hal.Link",
6 | "href": "/blue/rest/organizations/jenkins/pipelines/folder/pipelines/mbp/pipelines/branch/runs/49/blueTestSummary/"
7 | }
8 | },
9 | "existingFailed": 1,
10 | "failed": 1,
11 | "fixed": 0,
12 | "passed": 120,
13 | "regressions": 0,
14 | "skipped": 0,
15 | "total": 121
16 | }
17 |
--------------------------------------------------------------------------------
/updatecli/values.yml:
--------------------------------------------------------------------------------
1 | github:
2 | owner: "elastic"
3 | repository: "apm-pipeline-library"
4 |
--------------------------------------------------------------------------------
/vars/abortBuild.txt:
--------------------------------------------------------------------------------
1 | Abort the given build with the given message
2 |
3 | ```
4 | // Kill the current build with the default message.
5 | abortBuild(build: currentBuild)
6 |
7 | // Kill the previous build for the current run and set its description message.
8 | abortBuild(build: currentBuild.getPreviousBuild, message: 'Abort previous build')
9 | ```
10 |
11 | * build: the RunBuild to be aborted. Mandatory
12 | * message: what's the message to be exposed as an error and in the build description. Optional. Default to 'Force to abort the build'
13 |
--------------------------------------------------------------------------------
/vars/agentMapping.txt:
--------------------------------------------------------------------------------
1 | Return the value for the given key.
2 |
3 | ```
4 | agentMapping.envVar('dotnet')
5 | agentMapping.agentVar('.NET')
6 | agentMapping.app('Python')
7 | agentMapping.id('All')
8 | agentMapping.opbeansApp('Python')
9 | agentMapping.yamlVersionFile('UI')
10 | ```
11 |
--------------------------------------------------------------------------------
/vars/artifactsApi.txt:
--------------------------------------------------------------------------------
1 | This step helps to query the artifacts-api Rest API and returns
2 | a JSON object.
3 |
4 | ```
5 | import groovy.transform.Field
6 |
7 | @Field def latestVersions
8 |
9 | script {
10 | versions = artifactsApi(action: 'latest-versions')
11 | }
12 | ```
13 |
14 | * action: What's the action to be triggered. Mandatory
15 |
16 | _NOTE_: It only supports *nix.
17 |
--------------------------------------------------------------------------------
/vars/axis.txt:
--------------------------------------------------------------------------------
1 | Build a vector of pairs [ name: "VAR_NAME", value: "VALUE" ]
2 | from a variable name (VAR_NAME) and a vector of values ([1,2,3,4,5]).
3 |
4 | ```
5 | def v = axis('MY_VAR', [1, 2, 3, 4, 5])
6 | def vs = axis('MY_VAR', ["1", "2", "3", "4", "5"])
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/base64decode.txt:
--------------------------------------------------------------------------------
1 | Decode a base64 input to string
2 |
3 | ```
4 | base64decode(input: "ZHVtbXk=", encoding: "UTF-8")
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/base64encode.txt:
--------------------------------------------------------------------------------
1 | Encode a text to base64
2 |
3 | ```
4 | base64encode(text: "text to encode", encoding: "UTF-8")
5 | ```
6 |
7 | * *text:* Test to calculate its base64.
8 | * *padding:* if true it'd apply padding (default true)
9 |
--------------------------------------------------------------------------------
/vars/beatsStages.txt:
--------------------------------------------------------------------------------
1 |
2 | Given the YAML definition then it creates all the stages
3 |
4 | The list of step's params and the related default values are:
5 |
6 | - project: the name of the project. Mandatory
7 | - content: the content with all the stages and commands to be transformed. Mandatory
8 | - function: the function to be called. Should implement the class BeatsFunction. Mandatory
9 | - filterStage: the name of the stage to be filtered. Optional
10 |
11 |
12 |
13 |
14 | script {
15 | def mapParallelTasks = [:]
16 | beatsStages(project: 'auditbeat', content: readYaml(file: 'auditbeat/Jenkinsfile.yml'), function: this.&myFunction)
17 | parallel(mapParallelTasks)
18 | }
19 |
20 | def myFunction(Map args = [:]) {
21 | ...
22 | }
23 |
24 |
--------------------------------------------------------------------------------
/vars/buildStatus.txt:
--------------------------------------------------------------------------------
1 | Fetch the current build status for a given job
2 | ```
3 | def status = buildStatus(host: 'localhost', job: ['apm-agent-java', 'apm-agent-java-mbp', 'main']), return_bool: false)
4 | ```
5 |
6 | * host: The Jenkins server to connect to. Defaults to `localhost`.
7 | * job: The job to fetch status for. This should be a list consisting of the path to job. For example, when viewing the Jenkins
8 | CI, in the upper-left of the browser, one might see a path to a job with a URL as follows:
9 |
10 | https://apm-ci.elastic.co/job/apm-agent-java/job/apm-agent-java-mbp/job/main/
11 |
12 | In this case, the corresponding list would be formed as:
13 |
14 | ['apm-agent-java', 'apm-agent-java-mbp', 'main']
15 |
16 | * as_bool: Returns `true` if the job status is `Success`. Any other job status returns `false`.
17 | * ssl: Set to `false` to disable SSL. Default is `true`.
18 |
--------------------------------------------------------------------------------
/vars/cancelPreviousRunningBuilds.txt:
--------------------------------------------------------------------------------
1 | **DEPRECATED**: use `disableConcurrentBuilds(abortPrevious: isPR())`
2 |
3 | Abort any previously running builds as soon as a new build starts
4 |
5 | ```
6 | cancelPreviousRunningBuilds()
7 | ```
8 |
9 | See https://issues.jenkins-ci.org/browse/JENKINS-43353
10 |
11 | * maxBuildsToSearch: number of previous builds to be searched and aborted if so. Default to 10.
12 |
--------------------------------------------------------------------------------
/vars/checkGitChanges.txt:
--------------------------------------------------------------------------------
1 | use git diff to check the changes on a path, then return true or false.
2 |
3 | ```
4 | def numOfChanges = checkGitChanges(target: env.CHANGE_TARGET, commit: env.GIT_SHA, prefix: '_beats')
5 | ```
6 |
7 | * target: branch or commit to use as reference to check the changes.
8 | * commit: branch or commit to compare target to
9 | * prefix: text to find at the beginning of file changes.
10 |
--------------------------------------------------------------------------------
/vars/checkLicenses.txt:
--------------------------------------------------------------------------------
1 | Use the elastic licenser
2 |
3 | ```
4 | checkLicenses()
5 |
6 | checkLicenses(ext: '.groovy')
7 |
8 | checkLicenses(skip: true, ext: '.groovy')
9 |
10 | checkLicenses(ext: '.groovy', exclude: './target', license: 'Elastic', licensor: 'Elastic A.B.')
11 |
12 | ```
13 |
14 | * skip: Skips rewriting files and returns exitcode 1 if any discrepancies are found. Default: false.
15 | * junit: Whether to generate a JUnit report. It does require the skip flag. Default: false.
16 | * exclude: path to exclude. (Optional)
17 | * ext: sets the file extension to scan for. (Optional)
18 | * license string: sets the license type to check: ASL2, Elastic, Cloud (default "ASL2"). (Optional)
19 | * licensor: sets the name of the licensor. (Optional)
20 |
21 | [Docker pipeline plugin](https://plugins.jenkins.io/docker-workflow)
22 |
--------------------------------------------------------------------------------
/vars/checkout.txt:
--------------------------------------------------------------------------------
1 | Override the `checkout` step to retry the checkout up to 3 times.
2 |
3 | ```
4 | checkout scm
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/cmd.txt:
--------------------------------------------------------------------------------
1 | Wrapper to run bat or sh steps based on the OS system.
2 |
3 | _NOTE_: bat with returnStdout requires @echo off to bypass the known issue
4 | https://issues.jenkins-ci.org/browse/JENKINS-44569
5 | Therefore it will be included automatically!
6 |
7 | For instance:
8 | ```
9 | if (isUnix) {
10 | sh(label: 'foo', script: 'git fetch --all')
11 | } else {
12 | bat(label: 'foo', script: 'git fetch --all')
13 | }
14 | ```
15 |
16 | Could be simplified with:
17 |
18 | ```
19 | cmd(label: 'foo', script: 'git fetch --all')
20 | ```
21 |
22 | Parameters:
23 | * See `sh` and `bat` steps
24 |
--------------------------------------------------------------------------------
/vars/codecov.txt:
--------------------------------------------------------------------------------
1 | Submits coverage information to codecov.io using their [bash script](https://codecov.io/bash")
2 |
3 | ```
4 | codecov(basedir: "${WORKSPACE}", repo: 'apm-agent-go', secret: 'secret/observability-team/ci/apm-agent-go-codecov')
5 | ```
6 | *repo*: The repository name (for example apm-agent-go), it is needed
7 | *basedir*: the folder to search into (the default value is '.').
8 | *flags*: a string holding arbitrary flags to pass to the codecov bash script
9 | *secret*: Vault secret where the CodeCov project token is stored.
10 |
11 | It requires to initialise the pipeline with githubEnv() first.
12 |
13 | [Original source](https://github.com/docker/jenkins-pipeline-scripts/blob/master/vars/codecov.groovy)
14 |
--------------------------------------------------------------------------------
/vars/convertGoTestResults.txt:
--------------------------------------------------------------------------------
1 | Converts the Go test result output to JUnit result file
2 |
3 | ```
4 | sh(label: 'Run test', script: 'go test -v ./...|tee unit-report.txt')
5 | convertGoTestResults(input: 'unit-report.txt', output: 'junit-report.xml')
6 | ```
7 |
8 | * input: file contains the verbose Go test output.
9 | * output: where to save the JUnit report.
10 |
--------------------------------------------------------------------------------
/vars/coverageReport.txt:
--------------------------------------------------------------------------------
1 | Grab the coverage files, and create the report in Jenkins.
2 |
3 | ```
4 | coverageReport("path_to_base_folder")
5 | coverageReport(baseDir: "path_to_base_folder", reportFiles: 'coverage*.html', coverageFiles: 'coverage*.xml')
6 | ```
7 |
8 | * baseDir: The path to the report directory relative to the workspace. Mandatory
9 | * reportFiles: Report Files. Mandatory
10 | * coverageFiles: Coverage Files. Mandatory
11 |
--------------------------------------------------------------------------------
/vars/createFileFromTemplate.txt:
--------------------------------------------------------------------------------
1 |
2 | Create a file given a Jinja template and the data in a JSON format
3 |
4 | ```
5 | // if the template to be used is the one in the shared library
6 | createFileFromTemplate(data: 'my-data.json', template: 'my-template.md.j2', output: 'file.md')
7 |
8 | // if the template to be used is another one in the local workspace
9 | createFileFromTemplate(data: 'my-data.json', template: 'src/foo/templates/my-template.md.j2', output: 'file.md', localTemplate: true)
10 | ```
11 |
12 | * data: JSON file with the data to be consumed in the template. Mandatory.
13 | * template: jinja template to be used. Mandatory.
14 | * output: the name of the file to be transformed. Mandatory.
15 | * localTemplate: whether to use the template in the local workspace. Optional. Default `false`.
16 |
--------------------------------------------------------------------------------
/vars/detailsURL.txt:
--------------------------------------------------------------------------------
1 | Generate the details URL to be added to the GitHub notifications. When possible it will look for the stage logs URL in BlueOcean.
2 |
3 | ```
4 | def url = detailsURL(tab: 'artifacts', isBlueOcean: true)
5 | ```
6 |
7 | * tab: What kind of details links will be used. Enum type: tests, changes, artifacts, pipeline or an ``). Default `pipeline`.
8 | * isBlueOcean: Whether to use the BlueOcean URLs. Default `false`.
9 |
--------------------------------------------------------------------------------
/vars/dockerContext.txt:
--------------------------------------------------------------------------------
1 | Fetch the docker environment in the current context using filebeat and metricbeat
2 |
3 | ```
4 | // Archive all the docker logs in the current context
5 | dockerContext(filebeatOutput: 'logs.txt', metricbeatOutput: 'health.txt') {
6 | //
7 | }
8 | ```
9 |
10 | * *filebeatOutput*: log file to save all Docker logs details (docker-filebeat.log). Optional
11 | * *metricbeatOutput*: log file to save all Docker metricbeat details (docker-metricbeat.log). Optional
12 | * *archiveOnlyOnFail:* if true only archive the files in case of failure.
13 |
14 | _NOTE_: Windows is not supported.
15 |
--------------------------------------------------------------------------------
/vars/dockerImageExists.txt:
--------------------------------------------------------------------------------
1 | Checks if the given Docker image exists.
2 |
3 | ```
4 | dockerImageExists(image: 'hello-world:latest')
5 | ```
6 |
7 | * image: Fully qualified name of the image
8 |
--------------------------------------------------------------------------------
/vars/dockerLogin.txt:
--------------------------------------------------------------------------------
1 | Login to hub.docker.com with an authentication credentials from a Vault secret.
2 | The vault secret contains `user` and `password` fields with the authentication details.
3 |
4 | ```
5 | dockerLogin(secret: 'secret/team/ci/secret-name')
6 | ```
7 |
8 | ```
9 | dockerLogin(secret: 'secret/team/ci/secret-name', registry: "docker.io")
10 | ```
11 |
12 | * secret: Vault secret where the user and password stored.
13 | * registry: Registry to login into.
14 | * role_id: vault role ID (Optional).
15 | * secret_id: vault secret ID (Optional).
16 |
--------------------------------------------------------------------------------
/vars/download.txt:
--------------------------------------------------------------------------------
1 | Download the given URL regardless of the tool.
2 |
3 | ```
4 | download(url: 'https://....', output: 'gsutil.tar.gz')
5 | ```
6 |
7 | * url: The URL to be downloaded. Mandatory
8 | * output: The file where the output will be written to. Mandatory.
9 |
--------------------------------------------------------------------------------
/vars/downloadWithCurl.txt:
--------------------------------------------------------------------------------
1 | Download the given URL using CUrl, if possible. It returns true if possible otherwise false
2 |
3 | ```
4 | downloadWithCurl(url: 'https://....', output: 'gsutil.tar.gz')
5 | ```
6 |
7 | * url: The URL to be downloaded. Mandatory
8 | * output: The file where the CUrl output will be written to. Mandatory.
9 |
--------------------------------------------------------------------------------
/vars/downloadWithWget.txt:
--------------------------------------------------------------------------------
1 | Download the given URL using Wget, if possible. It returns true if possible otherwise false
2 |
3 | ```
4 | downloadWithWget(url: 'https://....', output: 'gsutil.tar.gz')
5 | ```
6 |
7 | * url: The URL to be downloaded. Mandatory
8 | * output: The file where the wget output will be written to. Mandatory.
9 |
--------------------------------------------------------------------------------
/vars/dummy.txt:
--------------------------------------------------------------------------------
1 | A sample of a step implemantetion.
2 |
3 | ```
4 | dummy(text: 'hello world')
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/dummyDeclarativePipeline.txt:
--------------------------------------------------------------------------------
1 | A sample of a step implementation as a declarative pipeline.
2 |
3 | ```
4 | dummyDeclarativePipeline()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/echoColor.txt:
--------------------------------------------------------------------------------
1 | Print a text on color on a xterm.
2 |
3 | ```
4 | echoColor(text: '[ERROR]', colorfg: 'red', colorbg: 'black')
5 | ```
6 | * *text*: Text to print.
7 | * *colorfg*: Foreground color.(default, red, green, yellow,...)
8 | * *colorbg*: Background color.(default, red, green, yellow,...)
9 |
--------------------------------------------------------------------------------
/vars/errorIfEmpty.txt:
--------------------------------------------------------------------------------
1 | If the given value is empty or null then it throws an error
2 |
3 | ```
4 | errorIfEmpty(my_value, "it is not supported.")
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/findOldestSupportedVersion.txt:
--------------------------------------------------------------------------------
1 | Find the oldest stack version given the condition to compare with.
2 |
3 | If the version doesn't exist yet, it will try to use the closer snapshot, for example
4 | if 7.14.1 doesn't exist, it will try to use 7.14.1-SNAPSHOT or 7.x-SNAPSHOT,
5 | this will allow to develop integrations with unreleased features.
6 |
7 |
8 | ```
9 | findOldestSupportedVersion(versionCondition: "^7.14.0")
10 | ```
11 |
12 | * versionCondition: The condition to compare with. Mandatory
13 |
14 | NOTE: Current implementation only supports the `^` operator for version conditions
15 |
--------------------------------------------------------------------------------
/vars/flattenMap.txt:
--------------------------------------------------------------------------------
1 | Return the given map as a flattened map. By default, it uses the
2 | dot as a separator between keys. This separator can be customized too.
3 |
4 |
5 | ```
6 | def m = ["a": ["b": ["c":[1,2,3], "d":[4,5,6] ] ] ]
7 |
8 | def flattened = flattenMap(map: m)
9 |
10 | // flattened == ["a.b.c": [1,2,3], "a.b.d": [4,5,6] ]
11 |
12 | def flattenedDash = flattenMap(map: m, separator: "-")
13 |
14 | // flattenedDash == ["a-b-c": [1,2,3], "a-b-d": [4,5,6] ]
15 | ```
16 |
17 | * map: Map to be flattened.
18 | * separator: Character to be used to separate the keys of the map.
19 |
--------------------------------------------------------------------------------
/vars/generateChangelog.txt:
--------------------------------------------------------------------------------
1 | Programmatically generate a CHANGELOG
2 |
3 | ```
4 | generateChangelog(
5 | user: 'elastic',
6 | repo: 'apm-pipeline-library
7 | )
8 | ```
9 |
10 | * user: The GitHub user the repo belongs to. (Default: elastic)
11 | * repo: The GitHub repo to generate the CHANGELOG for. If this
12 | is not present, the `REPO_NAME` environment variable is
13 | used.
14 |
15 | [GitHub Changelog Generator documentation](https://github.com/github-changelog-generator/github-changelog-generator)
16 |
--------------------------------------------------------------------------------
/vars/generateGoBenchmarkDiff.txt:
--------------------------------------------------------------------------------
1 | Generate a Go benchmark report by comparing the existing benchmark with
2 | the `CHANGE_TARGET` variable if exists.
3 |
4 | This particular step is quite opinionated:
5 | - It relies on the `CHANGE_TARGET` to generate the diff to compare with.
6 | - CI builds archive their go benchmark report in the root folder.
7 | - It uses Golang to run the benchmark.
8 | - It produces a file with the name `bench.diff`
9 |
10 | ```
11 | // This will create a diff report with the name `bench.diff` in the build folder.
12 | generateGoBenchmarkDiff(file: 'bench.out', filter: 'exclude')
13 | ```
14 |
15 | * file: The name of the file to be compared with. Mandatory
16 | * filter: Whether to apply a filter in the diff. Values: `none`, `exclude`. Optional (default: `none`)
17 |
18 | _NOTE_: It only supports *nix.
19 |
--------------------------------------------------------------------------------
/vars/getBlueoceanDisplayURL.txt:
--------------------------------------------------------------------------------
1 | Provides the Blueocean URL for the current build/run
2 |
3 | ```
4 | def URL = getBlueoceanDisplayURL()
5 | ```
6 |
7 | [Powershell plugin](https://plugins.jenkins.io/powershell)
8 |
--------------------------------------------------------------------------------
/vars/getBlueoceanRestURLJob.txt:
--------------------------------------------------------------------------------
1 | Given the job URL then returns its BlueOcean Rest URL
2 |
3 | ```
4 | def URL = getBlueoceanRestURLJob(jobURL: env.JOB_URL)
5 | ```
6 |
7 | * jobURL: the job URL. Mandatory
8 |
--------------------------------------------------------------------------------
/vars/getBlueoceanTabURL.txt:
--------------------------------------------------------------------------------
1 | Provides the specific Blueocean URL tab for the current build/run
2 |
3 | Tab refers to the kind of available tabs in the BO view. So far:
4 | * pipeline
5 | * tests
6 | * changes
7 | * artifacts
8 |
9 | ```
10 | def testURL = getBlueoceanTabURL('test')
11 | def artifactURL = getBlueoceanTabURL('artifact')
12 | ```
13 |
--------------------------------------------------------------------------------
/vars/getBranchUnifiedRelease.txt:
--------------------------------------------------------------------------------
1 | Download the properties file for the given branch in the unified release
2 |
3 | ```
4 | // Download the properties file for the main branch
5 | getBranchUnifiedRelease('8.1'))
6 | ```
7 |
--------------------------------------------------------------------------------
/vars/getBranchesFromAliases.txt:
--------------------------------------------------------------------------------
1 | This step parses the given list of branch aliases and return
2 | the branch name.
3 |
4 | This is handy to support a dynamic branch generation without the need to
5 | update the name of the branch when a new minor release branch is created.
6 |
7 | This format supports passing an index, separated by the minus operator: '', which will retrieve the previous
8 | version for the last minor. If the index overflows the number of the total existing minors, the first minor will be retrieved (i.e.
9 | '').
10 |
11 | ```
12 | // Return the branch name for the main, 8.minor and 8.next-minor branches
13 | def branches = getBranchesFromAliases(aliases: ['main', '8.', '8.'])
14 |
15 | ```
16 |
17 |
18 | * aliases: the branch aliases (supported format major., major., major., major.). Mandatory
19 |
--------------------------------------------------------------------------------
/vars/getBuildInfoJsonFiles.txt:
--------------------------------------------------------------------------------
1 | Grab build related info from the Blueocean REST API and store it on JSON files.
2 | Then put all togeder in a simple JSON file.
3 |
4 | ```
5 | getBuildInfoJsonFiles(jobURL: env.JOB_URL, buildNumber: env.BUILD_NUMBER)
6 | ```
7 |
8 | * jobURL: the job URL. Mandatory
9 | * buildNumber: the build id. Mandatory
10 | * returnData: whether to return a data structure with the build details then other steps can consume them. Optional. Default false
11 |
--------------------------------------------------------------------------------
/vars/getCurrentBuildTime.txt:
--------------------------------------------------------------------------------
1 | Returns the timestamp formatted in xs:dateTime.
2 |
3 | See https://javadoc.jenkins-ci.org/hudson/model/Run.html#getTimestampString2--
4 |
5 | ```
6 | getCurrentBuildTime()
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/getFlakyJobName.txt:
--------------------------------------------------------------------------------
1 | Get the flaky job name in a given multibranch pipeline.
2 |
3 | ```
4 | getFlakyJobName(withBranch: 'main')
5 | ```
6 |
7 | * withBranch: the job base name to compare with. Mandatory
8 |
--------------------------------------------------------------------------------
/vars/getGitCommitSha.txt:
--------------------------------------------------------------------------------
1 | Get the current commit SHA from the .git folder.
2 | If the checkout was made by Jenkins, you would use the environment variable GIT_COMMIT.
3 | In other cases, you probably has to use this step.
4 |
5 | ```
6 | def sha = getGitCommitSha()
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/getGitRepoURL.txt:
--------------------------------------------------------------------------------
1 | Get the current git repository url from the .git folder.
2 | If the checkout was made by Jenkins, you would use the environment variable GIT_URL.
3 | In other cases, you probably has to use this step.
4 |
5 | ```
6 | def repoUrl = getGitRepoURL()
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/getGithubToken.txt:
--------------------------------------------------------------------------------
1 | return the Github token.
2 |
3 | ```
4 | def token = getGithubToken()
5 | ```
6 |
7 | * credentialsId: it is possible to pass a credentials ID as parameter, by default use a hardcoded ID
8 |
--------------------------------------------------------------------------------
/vars/getModulesFromCommentTrigger.txt:
--------------------------------------------------------------------------------
1 | If the build was triggered by a comment in GitHub then get the sorted list of
2 | modules which were referenced in the comment.
3 |
4 | Supported format:
5 | - `jenkins run the tests for the module foo`
6 | - `jenkins run the tests for the module foo,bar,xyz`
7 | - `jenkins run the tests for the module _ALL_`
8 |
9 | ```
10 | def modules = getModulesFromCommentTrigger()
11 | def modules = getModulesFromCommentTrigger(regex: 'module\\W+(.+)')
12 | ```
13 |
14 |
15 | * *regex*: the regex to search in the comment. The default one is the `'(?i).*(?:jenkins\\W+)?run\\W+(?:the\\W+)?tests\\W+for\\W+the\\W+module\\W+(.+)'`. Optional
16 | * *delimiter*: the delimiter to use. The default one is the `,`. Optional
17 |
--------------------------------------------------------------------------------
/vars/getStageId.txt:
--------------------------------------------------------------------------------
1 | Get the stage ID in the current context.
2 |
3 | ```
4 | def stage = getStageId()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/getTestClusterSecret.txt:
--------------------------------------------------------------------------------
1 | Get the Vault location where the test cluster secrets are stored
2 |
3 | ```
4 | def secret = "${getTestClusterSecret()}/my-test-cluster"
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/getTraditionalPageURL.txt:
--------------------------------------------------------------------------------
1 | Provides the specific tradditional URL tab for the current build/run
2 |
3 | Tab refers to the kind of available pages in the tradditional view. So far:
4 | * pipeline -> aka the build run (for BO compatibilities)
5 | * tests
6 | * changes
7 | * artifacts
8 | * cobertura
9 | * gcs
10 |
11 |
12 | ```
13 | def testURL = getTraditionalPageURL('tests')
14 | def artifactURL = getTraditionalPageURL('artifacts')
15 | ```
16 |
--------------------------------------------------------------------------------
/vars/getVaultSecret.txt:
--------------------------------------------------------------------------------
1 | Get a secret from the Vault.
2 | You will need some credentials created to use the vault :
3 | * vault-addr : the URL of the vault (https:vault.example.com:8200)
4 | * vault-role-id : the role to authenticate (db02de05-fa39-4855-059b-67221c5c2f63)
5 | * vault-secret-id : the secret to authenticate (6a174c20-f6de-a53c-74d2-6018fcceff64)
6 |
7 | ```
8 | def jsonValue = getVaultSecret('secret-name')
9 | ```
10 |
11 | ```
12 | def jsonValue = getVaultSecret(secret: 'secret/team/ci/secret-name')
13 | ```
14 |
15 | * *secret-name*: Name of the secret on the the vault root path.
16 | * role_id: vault role ID (Optional). Default 'vault-role-id'
17 | * secret_id: vault secret ID (Optional). Default 'vault-secret-id'
18 |
--------------------------------------------------------------------------------
/vars/git.txt:
--------------------------------------------------------------------------------
1 | Override the `git` step to retry the checkout up to 3 times.
2 |
3 | ```
4 | git scm
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/gitChangelog.txt:
--------------------------------------------------------------------------------
1 | Return the changes between the parent commit and the current commit.
2 |
3 | ```
4 | def changelog = gitChangelog()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/gitCmd.txt:
--------------------------------------------------------------------------------
1 | Execute a git command against the git repo, using the credentials passed.
2 | It requires to initialise the pipeline with githubEnv() first.
3 |
4 | ```
5 | gitCmd(credentialsId: 'my_credentials', cmd: 'push', args: '-f')
6 | ```
7 |
8 | * credentialsId: the credentials to access the repo.
9 | * cmd: Git command (tag, push, ...)
10 | * args: additional arguments passed to `git` command.
11 | * store: Whether to redirect the output to a file and archive it. Optional. Default value 'false'
12 |
--------------------------------------------------------------------------------
/vars/gitCreateTag.txt:
--------------------------------------------------------------------------------
1 | Create a git TAG named ${BUILD_TAG} and push it to the git repo.
2 | It requires to initialise the pipeline with githubEnv() first.
3 |
4 | ```
5 | gitCreateTag()
6 | ```
7 |
8 | ```
9 | gitCreateTag(tag: 'tagName', credentialsId: 'my_credentials')
10 | ```
11 |
12 | * tag: name of the new tag.
13 | * tagArgs: what arguments are passed to the tag command
14 | * credentialsId: the credentials to access the repo.
15 | * pushArgs: what arguments are passed to the push command
16 |
--------------------------------------------------------------------------------
/vars/gitDeleteTag.txt:
--------------------------------------------------------------------------------
1 | Delete a git TAG named ${BUILD_TAG} and push it to the git repo.
2 | It requires to initialise the pipeline with githubEnv() first.
3 |
4 | ```
5 | gitDeleteTag()
6 | ```
7 |
8 |
9 | ```
10 | gitDeleteTag(tag: 'tagName', credentialsId: 'my_credentials')
11 | ```
12 |
13 | * tag: name of the new tag.
14 | * credentialsId: the credentials to access the repo.
15 |
--------------------------------------------------------------------------------
/vars/gitPush.txt:
--------------------------------------------------------------------------------
1 | Push changes to the git repo.
2 | It requires to initialise the pipeline with githubEnv() first.
3 |
4 | ```
5 | gitPush()
6 | ```
7 |
8 | ```
9 | gitPush(args: '-f', credentialsId: 'my_credentials')
10 | ```
11 |
12 | * args: additional arguments passed to `git push` command.
13 | * credentialsId: the credentials to access the repo.
14 |
--------------------------------------------------------------------------------
/vars/githubApiCall.txt:
--------------------------------------------------------------------------------
1 |
2 | Make a REST API call to Github. It manage to hide the call and the token in the console output.
3 |
4 | ```
5 | githubApiCall(token: '4457d4e98f91501bb7914cbb29e440a857972fee', url: "https://api.github.com/repos/${repoName}/pulls/${prID}")
6 | ```
7 |
8 | * token: String to use as authentication token.
9 | * url: URL of the Github API call.
10 | * allowEmptyResponse: whether to allow empty responses. Default false.
11 | * method: what kind of request. Default 'POST' when using the data parameter. Optional.
12 | * data: Data to post to the API. Pass as a Map.
13 | * noCache: whether to force the API call without the already cached data if any. Default false.
14 | * failNever: NEVER fail the step, regardless of step result
15 |
16 | [Github REST API](https://developer.github.com/v3/)
17 |
--------------------------------------------------------------------------------
/vars/githubAppToken.txt:
--------------------------------------------------------------------------------
1 | Get the GitHub APP token given the vault secret
2 |
3 | ```
4 | def token = githubAppToken()
5 | ```
6 |
7 | * secret: vault secret used to interact with the GitHub App, it should have the `key`, `installation_id` and `app_id` fields. Default: 'secret/observability-team/ci/github-app'
8 |
9 | [GitHub Check docs](https://docs.github.com/en/free-pro-team@latest/rest/reference/checks#runs)
10 |
--------------------------------------------------------------------------------
/vars/githubBranchRef.txt:
--------------------------------------------------------------------------------
1 | return the branch name, if we are in a branch, or the git ref, if we are in a PR.
2 |
3 | ```
4 | def ref = githubBranchRef()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/githubCommentIssue.txt:
--------------------------------------------------------------------------------
1 | Comment an existing GitHub issue
2 |
3 | ```
4 | // Add a new comment to the issue 123 using the REPO_NAME and ORG_NAME env variables
5 | githubCommentIssue(id: 123, comment: 'My new comment')
6 |
7 | // Add a new comment to the issue 123 from foo/repo
8 | githubCommentIssue(org: 'foo', repo: 'repo', id: 123, comment: 'My new comment')
9 | ```
10 |
11 | * comment: The comment. Mandatory
12 | * id: The GitHub issue. Mandatory
13 | * org: The GitHub organisation. Optional. Default the ORG_REPO env variable
14 | * repo: The GitHub repository. Optional. Default the REPO_REPO env variable
15 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7
16 |
17 | _NOTE_:
18 | * Windows is not supported yet.
19 | * It uses hub. No supported yet by gh see https://github.com/cli/cli/issues/517
20 |
--------------------------------------------------------------------------------
/vars/githubCreateIssue.txt:
--------------------------------------------------------------------------------
1 | Create an Issue in GitHub as long as the command runs in the git repo.
2 |
3 | ```
4 | githubCreateIssue(title: 'Foo')
5 | githubCreateIssue(title: 'Foo', description: 'Something else to be added', assign: 'v1v', labels: 'automation')
6 | ```
7 |
8 | * title: The issue title. Mandatory
9 | * description: The issue description. Optional.
10 | * assign: A comma-separated list (no spaces around the comma) to assign to the created issue. Optional.
11 | * milestone: The milestone name to add to the created issue. Optional
12 | * labels: A comma-separated list (no spaces around the comma) of labels to add to this issue. Optional.
13 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7
14 |
15 | _NOTE_: Windows is not supported yet.
16 |
--------------------------------------------------------------------------------
/vars/githubEnv.txt:
--------------------------------------------------------------------------------
1 | Creates some environment variables to identified the repo and the change type (change, commit, PR, ...)
2 |
3 | ```
4 | githubEnv()
5 | ```
6 |
7 | * `GIT_URL`: if it is not set, it will create the environment variable GIT_URL, getting it from local repo.
8 | * `ORG_NAME`: id the organization name in the git URL, it sets this environment variable processing the GIT_URL.
9 | * `REPO_NAME`: repository name in the git URL, it sets this environment variable processing the GIT_URL.
10 | * `GIT_SHA`: current commit SHA1, it sets this getting it from local repo.
11 | * `GIT_BUILD_CAUSE`: build cause can be a pull request(pr), a commit, or a merge
12 | * `GIT_BASE_COMMIT`: On PRs points to the commit before make the merge, on branches is the same as GIT_COMMIT and GIT_SHA
13 |
--------------------------------------------------------------------------------
/vars/githubIssues.txt:
--------------------------------------------------------------------------------
1 | Look for the GitHub issues in the current project given the labels to be filtered with. It returns
2 | a dictionary with the issue id as primary key and then the status, title, labels and date values.
3 |
4 | ```
5 | // Look for all the open GitHub issues with labels foo and bar
6 | githubIssues(labels: [ 'foo', 'bar' ])
7 | ```
8 |
9 | * *labels*: list of labels to be filtered. Optional
10 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken
11 |
--------------------------------------------------------------------------------
/vars/githubPrCheckApproved.txt:
--------------------------------------------------------------------------------
1 | If the current build is a PR, it would check if it is approved or created
2 | by a user with write/admin permission on the repo or a trusted user.
3 |
4 | If it is not approved, the method will throw an error.
5 |
6 | ```
7 | githubPrCheckApproved()
8 | ```
9 |
10 | NOTE: `REPO_NAME` env variable is required, so gitHubEnv step is the one in charge
11 |
12 | ```
13 | githubPrCheckApproved(org: 'elastic', repo: 'apm-pipeline-library', changeId: 1000, token: "env.GITHUB_TOKEN")
14 | ```
15 |
16 | * *org:* GitHub organization/owner of the repository (by default ORG_NAME).
17 | * *repo:* GitHub repository name (by default REPO_NAME).
18 | * *changeId:* Pull request ID number (by default CHANGE_ID).
19 | * *token:* GitHub token to access to the API (by default [getGithubToken()](#getGithubToken)).
20 |
--------------------------------------------------------------------------------
/vars/githubPrComment.txt:
--------------------------------------------------------------------------------
1 | Add a comment or edit an existing comment in the GitHub.
2 |
3 | ```
4 | // Use default message
5 | githubPrComment()
6 |
7 | // Use default message and append the details to the message.
8 | githubPrComment(details: "${env.BUILD_URL}artifact/docs.txt")
9 |
10 | // Overrides the default message with 'foo bar'
11 | githubPrComment(message: 'foo bar')
12 | ```
13 |
14 | _NOTE_: To edit the existing comment is required these environment variables: `CHANGE_ID`
15 |
16 |
17 | Arguments:
18 |
19 | * details: URL of the details report to be reported as a comment. Default ''
20 | * commentFile: the file that will store the comment id. Default 'comment.id'
21 | * message: message to be used rather than the default message. Optional
22 |
23 | [Pipeline GitHub plugin](https://plugins.jenkins.io/pipeline-github)
24 |
--------------------------------------------------------------------------------
/vars/githubPrExists.txt:
--------------------------------------------------------------------------------
1 | Search if there are any Pull Request that matches the given
2 | Pull Request details.
3 |
4 | ```
5 | whenTrue(githubPrExists(title: 'my-title')) {
6 | echo "I'm a Pull Request"
7 | }
8 | ```
9 |
10 | * *labels*: Filter by labels. Optional
11 | * *title*: Filter by title (contains format). Mandatory
12 | * *state*: Filter by state {open|closed|merged|all} (default "open"). Optional
13 |
14 | NOTE: It uses `githubPullRequests`
15 |
--------------------------------------------------------------------------------
/vars/githubPrInfo.txt:
--------------------------------------------------------------------------------
1 | Get the Pull Request details from the Github REST API.
2 |
3 | ```
4 | def pr = githubPrInfo(token: token, repo: 'org/repo', pr: env.CHANGE_ID)
5 | ```
6 |
7 | * token: Github access token.
8 | * repo: String composed by the organization and the repository name ('org/repo').
9 | * pr: Pull Request number.
10 |
11 | [Github API call](https://developer.github.com/v3/pulls/#get-a-single-pull-request)
12 |
--------------------------------------------------------------------------------
/vars/githubPrLabels.txt:
--------------------------------------------------------------------------------
1 | If the current build is a PR, it would return the list of labels that
2 | are assigned to the PR.
3 |
4 | ```
5 | def labels = githubPrLabels()
6 | ```
7 |
8 | NOTE: `ORG_NAME` and `REPO_NAME` environment variables are required, so `gitHubEnv` step is the one in charge
9 |
--------------------------------------------------------------------------------
/vars/githubPrLatestComment.txt:
--------------------------------------------------------------------------------
1 | Search in the current Pull Request context the latest comment from the given list of
2 | users and pattern to match with.
3 |
4 | ```
5 | // Return the comment that matches the pattern '' and the owner of the comment is
6 | // elasticmachine
7 | githubPrLatestComment(pattern: '', users: [ 'elasticmachine' ])
8 | ```
9 |
10 | Arguments:
11 |
12 | * pattern: what's the pattern to be matched in the comments with. Mandatory.
13 | * users: the list of users that create the comment to be filtered with. Mandatory.
14 |
15 | _NOTE_: To edit the existing comment is required these environment variables: `ORG_NAME`, `REPO_NAME` and `CHANGE_ID`
16 |
--------------------------------------------------------------------------------
/vars/githubPrReviews.txt:
--------------------------------------------------------------------------------
1 | Get the Pull Request reviews from the Github REST API.
2 |
3 | ```
4 | def pr = githubPrReviews(token: token, repo: 'org/repo', pr: env.CHANGE_ID)
5 | ```
6 |
7 | * token: Github access token.
8 | * repo: String composed by the organization and the repository name ('org/repo').
9 | * pr: Pull Request number.
10 |
11 | [Github API call](https://developer.github.com/v3/pulls/reviews/#list-reviews-on-a-pull-request)
12 |
--------------------------------------------------------------------------------
/vars/githubPullRequests.txt:
--------------------------------------------------------------------------------
1 | Look for the GitHub Pull Requests in the current project given the labels to be
2 | filtered with. It returns a dictionary with the Pull Request id as primary key and
3 | then the title and branch values.
4 |
5 | ```
6 | // Look for all the open GitHub pull requests with titleContains: foo and
7 | // the foo and bar labels
8 | githubPullRequests(labels: [ 'foo', 'bar' ], titleContains: 'foo')
9 | ```
10 |
11 | * *labels*: Filter by labels. Optional
12 | * *titleContains*: Filter by title (contains format). Optional
13 | * *state*: Filter by state: {open|closed|merged|all}. Optional. Default "open"
14 | * *limit*: Maximum number of items to fetch . Optional. Default 200
15 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7
16 |
--------------------------------------------------------------------------------
/vars/githubRepoGetUserPermission.txt:
--------------------------------------------------------------------------------
1 | Get a user's permission level on a Github repo.
2 |
3 | ```
4 | githubRepoGetUserPermission(token: token, repo: 'org/repo', user: 'username')
5 | ```
6 | * token: Github access token.
7 | * repo: String composed by the organization and the repository name ('org/repo').
8 | * user: Github username.
9 |
10 | [Github API call](https://developer.github.com/v3/repos/collaborators/#review-a-users-permission-level)
11 |
--------------------------------------------------------------------------------
/vars/githubTraditionalPrComment.txt:
--------------------------------------------------------------------------------
1 | Add a comment or edit an existing comment in the GitHub Pull Request
2 | using the GitHub API.
3 |
4 | ```
5 | // create a new comment
6 | githubTraditionalPrComment(message: 'foo bar')
7 |
8 | // edit an existing comment
9 | githubTraditionalPrComment(message: 'foo bar', id: 12323)
10 | ```
11 |
12 | Arguments:
13 |
14 | * message: . Mandatory
15 | * id: the comment id to be edited. Optional
16 |
17 | _NOTE_: To edit the existing comment is required these environment variables:
18 | - `CHANGE_ID`
19 | - `ORG_NAME`
20 | - `REPO_NAME`
21 |
--------------------------------------------------------------------------------
/vars/goDefaultVersion.txt:
--------------------------------------------------------------------------------
1 |
2 | Return the value of the variable GO_VERSION, the value in the file `.go-version`, or a default value
3 |
4 | ```
5 | goDefaultVersion()
6 | ```
7 |
--------------------------------------------------------------------------------
/vars/goTestJUnit.txt:
--------------------------------------------------------------------------------
1 | Run Go unit tests and generate a JUnit report.
2 |
3 | ```
4 | goTestJUnit(options: '-v ./...', output: 'build/junit-report.xml')
5 | ```
6 |
7 | * *options:* Arguments used for `go test` see [gotestsum](https://pkg.go.dev/gotest.tools/gotestsum)
8 | * *output:* file path and name for the JUnit report output.
9 | * *version:* Go version to install, see [withgoenv](#withgoenv)
10 |
11 | ```
12 | pipeline {
13 | agent { label 'ubuntu' }
14 |
15 | stages {
16 | stage('GoTestJUnit') {
17 | steps {
18 | dir('src'){
19 | git 'https://github.com/elastic/ecs-logging-go-zap.git'
20 | goTestJUnit(options: '-v ./...', output: 'junit-report.xml', version: '1.14.2')
21 | }
22 | }
23 | post{
24 | cleanup{
25 | junit(testResults: 'src/junit-report.xml', allowEmptyResults: true)
26 | }
27 | }
28 | }
29 | }
30 | }
31 | ```
32 |
--------------------------------------------------------------------------------
/vars/goVersion.txt:
--------------------------------------------------------------------------------
1 | This step helps to query what golang versions have been released.
2 |
3 | ```
4 |
5 | // Get the latest stable release
6 | def latestGoRelease = goVersion(action: 'latest', unstable: false)
7 |
8 | // Get the latest release
9 | def latestGoVersion = goVersion(action: 'latest', unstable: true)
10 |
11 | // Get all the latest releases for the go1.15
12 | def latestGo115Releases = goVersion(action: 'versions', unstable: false, glob: '1.15')
13 | ```
14 |
15 | * action: What's the action to be triggered. Mandatory
16 | * glob: What's the filter, glob format, to be applied to the list of versions. Optional. Default 'none'
17 | * unstable: Whether to list the rc/beta releases. Optional. Default false.
18 |
--------------------------------------------------------------------------------
/vars/googleStorageUploadExt.txt:
--------------------------------------------------------------------------------
1 | Upload the given pattern files to the given bucket.
2 |
3 | ```
4 | // Copy file.txt into the bucket
5 | googleStorageUploadExt(pattern: 'file.txt', bucket: 'gs://bucket/folder/', credentialsId: 'foo', sharedPublicly: false)
6 |
7 | ```
8 |
9 | * bucket: The Google Storage bucket format gs://bucket/folder/subfolder/. Mandatory
10 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default to `JOB_GCS_CREDENTIALS`
11 | * pattern: The file to pattern to search and copy. Mandatory.
12 | * sharedPublicly: Whether to shared those objects publicly. Optional. Default false.
13 | * extraFlags: Extra flags to use with gsutil cp. Optional
14 |
--------------------------------------------------------------------------------
/vars/gsutil.txt:
--------------------------------------------------------------------------------
1 | Wrapper to interact with the gsutil command line. It returns the stdout output.
2 |
3 | ```
4 | // Copy file.txt into the bucket using the Jenkins credentials
5 | gsutil(command: 'cp file.txt gs://bucket/folder/', credentialsId: 'foo' ])
6 |
7 | // Copy file.txt into the bucket using Vault
8 | gsutil(command: 'cp file.txt gs://bucket/folder/', secret: 'foo' ])
9 | ```
10 |
11 | * command: The gsutil command to be executed. Mandatory
12 | * credentialsId: The credentials to login to GCP. (Optional). See [withGCPEnv](#withgcpenv)
13 | * secret: Name of the secret on the the vault root path. (Optional). See [withGCPEnv](#withgcpenv)
14 |
--------------------------------------------------------------------------------
/vars/hasCommentAuthorWritePermissions.txt:
--------------------------------------------------------------------------------
1 |
2 | Check if the author of a GitHub comment has admin or write permissions in the repository.
3 |
4 | ```
5 | if(!hasCommentAuthorWritePermissions(repoName: "elastic/kibana", commentId: env.GT_COMMENT_ID)){
6 | error("Only Elasticians can deploy Docker images")
7 | }
8 | ```
9 |
10 | * *repoName:* organization and name of the repository (Organization/Repository)
11 | * *commentId:* ID of the comment we want to check.
12 |
--------------------------------------------------------------------------------
/vars/httpRequest.txt:
--------------------------------------------------------------------------------
1 | Step to make HTTP request and get the result.
2 | If the return code is >= 400, it would throw an error.
3 |
4 | ```
5 | def body = httpRequest(url: "https://www.google.com")
6 | ```
7 |
8 | ```
9 | def body = httpRequest(url: "https://www.google.com", method: "GET", headers: ["User-Agent": "dummy"])
10 | ```
11 |
12 | ```
13 | def body = httpRequest(url: "https://duckduckgo.com", method: "POST", headers: ["User-Agent": "dummy"], data: "q=value&other=value")
14 | ```
15 |
16 | To return the response code instead of the body:
17 | ```
18 | def response_code = httpRequest(url: "https://www.google.com", response_code_only: true)
19 | ```
20 |
--------------------------------------------------------------------------------
/vars/is32.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a 32 bits using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is32()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/is32arm.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is an arm 32 bits based using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is32arm()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/is32x86.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a x86 32 bits using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is32x86()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/is64.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a 64 bits using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is64()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/is64arm.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is an arm 64 bits based using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is64arm()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/is64x86.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a x86 64 bits using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(is64x86()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isArm.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is an arm based using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(isArm()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isBeforeGo1_16.txt:
--------------------------------------------------------------------------------
1 | if the given Golang version is pre 1.16.
2 |
3 | ```
4 | whenTrue(isBeforeGo1_16(version: '1.17')) {
5 | ...
6 | }
7 | ```
8 |
9 | * version: Go version to install, if it is not set, it'll use GO_VERSION env var or [default version](#goDefaultVersion)
10 |
--------------------------------------------------------------------------------
/vars/isBranch.txt:
--------------------------------------------------------------------------------
1 | Whether the build is based on a Branch or no
2 |
3 | ```
4 | // Assign to a variable
5 | def branch = isBranch())
6 |
7 | // Use whenTrue condition
8 | whenTrue(isBranch()) {
9 | echo "I'm a Branch"
10 | }
11 | ```
12 |
--------------------------------------------------------------------------------
/vars/isBranchIndexTrigger.txt:
--------------------------------------------------------------------------------
1 | Check if the build was triggered by a Branch index.
2 |
3 | ```
4 | def branchIndexTrigger = isBranchIndexTrigger()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/isBranchUnifiedReleaseAvailable.txt:
--------------------------------------------------------------------------------
1 | Whether the given branch is an active branch in the unified release
2 |
3 | ```
4 | whenTrue(isBranchUnifiedReleaseAvailable('main')) {
5 | //
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isBuildFailure.txt:
--------------------------------------------------------------------------------
1 |
2 | Return true if the build status is FAILURE or UNSTABLE
3 | The status of the build changes when a stage ends,
4 | This means that the `isBuildFailure` step will not return the status of the build after the current stage,
5 | It returns the status of the build after previous stage.
6 | If you use this step on `post` stages the result is accurate,
7 | but in this cases it is better to use the [post stages](https://www.jenkins.io/doc/book/pipeline/syntax/#post)
8 |
9 | ```
10 | if(isBuildFailure()){
11 | echo("The build failed")
12 | }
13 | ```
14 |
--------------------------------------------------------------------------------
/vars/isCommentTrigger.txt:
--------------------------------------------------------------------------------
1 | Check if the build was triggered by a comment in GitHub and the user is an Elastic user.
2 | it stores the comment owner username in the GITHUB_COMMENT_AUTHOR environment variable and the
3 | comment itself in the GITHUB_COMMENT environment variable.
4 |
5 | ```
6 | def commentTrigger = isCommentTrigger()
7 | ```
8 |
9 | It requires [Github Pipeline plugin](https://plugins.jenkins.io/pipeline-github/) (>2.5)
10 |
11 | * *author:* GitHub comment author (by default `env.GITHUB_COMMENT_AUTHOR`).
12 | * *comment:* GitHub comment (by default `env.GITHUB_COMMENT`).
13 | * *repository*: The GitHub repository (by default `env.REPO_NAME`).
14 | * *org*: the GitHub organisation (by default `elastic`).
15 |
--------------------------------------------------------------------------------
/vars/isDarwin.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a Darwin based using the `nodeOS` step
2 |
3 | ```
4 | whenTrue(isDarwin()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isEmpty.txt:
--------------------------------------------------------------------------------
1 | If the given value is empty or null
2 |
3 | ```
4 | whenTrue(isEmpty("")) {
5 | //
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isInstalled.txt:
--------------------------------------------------------------------------------
1 | Whether the given tool is installed and available. It does also supports specifying the version.
2 | validation.
3 |
4 | ```
5 | // if docker is installed, the validation uses docker --version
6 | whenTrue(isInstalled(tool: 'docker', flag: '--version')) {
7 | // ...
8 | }
9 |
10 | // if 7zip is installed, the validations uses 7z
11 | whenTrue(isInstalled(tool: '7z')) {
12 | // ...
13 | }
14 | ```
15 |
16 | * tool: The name of the tool to check whether it is installed and available. Mandatory.
17 | * flag: The flag to be added to the validation. For instance `--version`. Optional.
18 | * version: The version of the tool to check with. Optional.
19 |
--------------------------------------------------------------------------------
/vars/isInternalCI.txt:
--------------------------------------------------------------------------------
1 | Whether the CI instance is the internal-ci one.
2 |
3 | ```
4 | whenTrue(isInternalCI()) {
5 | //
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isMemberOf.txt:
--------------------------------------------------------------------------------
1 | Check if the given GitHub user is member of the given GitHub team.
2 |
3 | ```
4 | whenTrue(isMemberOf(user: 'my-user', team: 'my-team')) {
5 | //...
6 | }
7 |
8 | whenTrue(isMemberOf(user: 'my-user', team: ['my-team', 'another-team'])) {
9 | //...
10 | }
11 |
12 | // using another organisation
13 | whenTrue(isMemberOf(user: 'my-user', team: 'my-team', org: 'acme')) {
14 | //...
15 | }
16 |
17 | ```
18 |
19 | * user: the GitHub user. Mandatory
20 | * team: the GitHub team or list of GitHub teams. Mandatory
21 | * org: the GitHub organisation. Optional. Default: 'elastic'
22 |
--------------------------------------------------------------------------------
/vars/isMemberOfOrg.txt:
--------------------------------------------------------------------------------
1 | Check if the given GitHub user is member of the given GitHub org.
2 |
3 | ```
4 | whenTrue(isMemberOfOrg(user: 'my-user')) {
5 | //...
6 | }
7 |
8 | whenTrue(isMemberOfOrg(user: 'my-user')) {
9 | //...
10 | }
11 |
12 | // using another organisation
13 | whenTrue(isMemberOfOrg(user: 'my-user', org: 'acme')) {
14 | //...
15 | }
16 |
17 | ```
18 |
19 | * *user*: the GitHub user. Mandatory
20 | * *org*: the GitHub organisation. Optional. Default: 'elastic'
21 |
--------------------------------------------------------------------------------
/vars/isPR.txt:
--------------------------------------------------------------------------------
1 | Whether the build is based on a Pull Request or no
2 |
3 | ```
4 | // Assign to a variable
5 | def pr = isPR())
6 |
7 | // Use whenTrue condition
8 | whenTrue(isPR()) {
9 | echo "I'm a Pull Request"
10 | }
11 | ```
12 |
--------------------------------------------------------------------------------
/vars/isPluginInstalled.txt:
--------------------------------------------------------------------------------
1 | Given the pluginName it validates whether it's installed and available.
2 |
3 | ```
4 | whenTrue(isPluginInstalled(pluginName: 'foo')) {
5 | echo "Foo plugin is installed"
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/isStaticWorker.txt:
--------------------------------------------------------------------------------
1 | Whether the existing worker is a static one
2 |
3 | ```
4 | // Assign to a variable
5 | def isStatic = isStaticWorker(labels: 'linux&&immutable')
6 |
7 | // Use whenTrue condition
8 | whenTrue(isStaticWorker(labels: 'linux&&immutable')) {
9 | echo "I'm a static worker"
10 | }
11 | ```
12 |
13 | TODO: as soon as macOS workers are ephemerals then we need to change this method
14 |
--------------------------------------------------------------------------------
/vars/isTag.txt:
--------------------------------------------------------------------------------
1 | Whether the build is based on a Tag Request or no
2 |
3 | ```
4 | // Assign to a variable
5 | def tag = isTag())
6 |
7 | // Use whenTrue condition
8 | whenTrue(isTag()) {
9 | echo "I'm a Tag"
10 | }
11 | ```
12 |
--------------------------------------------------------------------------------
/vars/isTimerTrigger.txt:
--------------------------------------------------------------------------------
1 | Check if the build was triggered by a timer (scheduled job).
2 |
3 | ```
4 | def timmerTrigger = isTimerTrigger()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/isUpstreamTrigger.txt:
--------------------------------------------------------------------------------
1 | Check if the build was triggered by an upstream job, being it possible to add some filters.
2 |
3 | ```
4 | def upstreamTrigger = isUpstreamTrigger()
5 | def upstreamTrigger = isUpstreamTrigger(filter: 'PR-')
6 | ```
7 |
8 | * filter: The string filter to be used when selecting the ustream build cause. If no filter is set, then 'all' will be used.
9 |
--------------------------------------------------------------------------------
/vars/isUserTrigger.txt:
--------------------------------------------------------------------------------
1 | Check if the build was triggered by a user.
2 | it stores the username in the BUILD_CAUSE_USER environment variable.
3 |
4 | ```
5 | def userTrigger = isUserTrigger()
6 | ```
7 |
--------------------------------------------------------------------------------
/vars/isX86.txt:
--------------------------------------------------------------------------------
1 | Whether the architecture is a x86 based using the `nodeArch` step
2 |
3 | ```
4 | whenTrue(isX86()) {
5 | ...
6 | }
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/listGithubReleases.txt:
--------------------------------------------------------------------------------
1 | List the GitHub releases in the current project. It returns
2 | a dictionary with the release id as primary key and then the whole information.
3 |
4 | ```
5 | listGithubReleases()
6 | ```
7 |
8 | * credentialsId: The credentials to access the repo (repo permissions). Optional. Default: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7
9 | * failNever: whether to fail the step in case on any failures when interacting with the GH cli tool. Default true.
10 |
--------------------------------------------------------------------------------
/vars/log.txt:
--------------------------------------------------------------------------------
1 | Allow to print messages with different levels of verbosity. It will show all messages that match
2 | to an upper log level than defined, the default level is debug.
3 | You have to define the environment variable PIPELINE_LOG_LEVEL to select
4 | the log level by default is INFO.
5 |
6 | Levels: DEBUG, INFO, WARN, ERROR
7 |
8 | ```
9 | log(level: 'INFO', text: 'message')
10 | ```
11 |
12 | * `level`: sets the verbosity of the messages (DEBUG, INFO, WARN, ERROR)
13 | * `text`: Message to print. The color of the messages depends on the level.
14 |
--------------------------------------------------------------------------------
/vars/matchesPrLabel.txt:
--------------------------------------------------------------------------------
1 | If the current build is a PR, it would return true if the given label
2 | matches with the list of assigned labels in the PR.
3 |
4 | ```
5 | whenTrue(matchesPrLabel(label: 'foo')) {
6 | ...
7 | }
8 | ```
9 |
10 | NOTE: `ORG_NAME` and `REPO_NAME` environment variables are required, so `gitHubEnv` step is the one in charge
11 |
--------------------------------------------------------------------------------
/vars/mvnVersion.txt:
--------------------------------------------------------------------------------
1 | Get a project version from Maven
2 |
3 | ```
4 | mvnVersion(
5 | showQualifiers: true
6 | )
7 | ```
8 | * qualifiers: Show any non-numerical text that may be present after MAJOR.MINOR.PATCH,
9 | such as additional labels for pre-release or build metadata. Specifically,
10 | this means the IncrementalVersion, BuildNumber, and Qualifier sections from
11 | the Maven version as specified in the Maven versioning guide.
12 |
13 | This script should be run from the root of a Maven-based project.
14 |
15 | [Maven versioning guide](https://docs.oracle.com/middleware/1212/core/MAVEN/maven_version.htm)
16 | [Semantic Versioning Specification](https://semver.org/)
17 |
--------------------------------------------------------------------------------
/vars/nexusCloseStagingRepository.txt:
--------------------------------------------------------------------------------
1 | Close a Nexus staging repository
2 |
3 | ```
4 | nexusCreateStagingRepository(
5 | url: "https://oss.sonatype.org",
6 | stagingProfileId: "comexampleapplication-1010",
7 | stagingId: "staging_id",
8 | secret: secret/release/nexus,
9 | role_id: apm-vault-role-id,
10 | secret_id: apm-vault-secret-id
11 | )
12 | ```
13 |
14 | * url: The URL to the repository. Usually https://oss.sonatype.org
15 | * stagingProfileId: Identifier for the staging profile
16 | * stagingId: Identifier for staging
17 | * secret: Vault secret (Optional)
18 | * role_id: vault role ID (Optional)
19 | * secret_id: vault secret ID (Optional)
20 |
21 |
22 | [Nexus staging documentation](https://help.sonatype.com/repomanager2/staging-releases)
23 | [Nexus OSSRH](https://oss.sonatype.org)
24 |
--------------------------------------------------------------------------------
/vars/nexusCreateStagingRepository.txt:
--------------------------------------------------------------------------------
1 | Create a Nexus staging repository
2 |
3 | ```
4 | nexusCreateStagingRepository(
5 | stagingProfileId: my_profile,
6 | description: "My new staging repo",
7 | url: https://oss.sonatype.org,
8 | retries: 20,
9 | secret: secret/release/nexus,
10 | role_id: apm-vault-role-id,
11 | secret_id: apm-vault-secret-id
12 | ```
13 |
14 | * stagingProfileId: The staging identifier to use when creating the repository
15 | * description: A description of the new staging repository
16 | * url: Nexus URL (default: https://oss.sonatype.org)
17 | * retries: Number of times to retry the remote API before giving up
18 | * secret: Vault secret (Optional)
19 | * role_id: vault role ID (Optional)
20 | * secret_id: vault secret ID (Optional)
21 |
22 |
23 | [Nexus staging documentation](https://help.sonatype.com/repomanager2/staging-releases)
24 | [Nexus OSSRH](https://oss.sonatype.org)
25 |
--------------------------------------------------------------------------------
/vars/nexusDropStagingRepository.txt:
--------------------------------------------------------------------------------
1 | Drop a Nexus staging repository
2 | ```
3 | nexusDropStagingRepository(
4 | url: "https://oss.sonatype.org",
5 | stagingProfileId: "comexampleapplication-1010",
6 | stagingId: "staging_id",
7 | secret: secret/release/nexus,
8 | role_id: apm-vault-role-id,
9 | secret_id: apm-vault-secret-id
10 | )
11 | ```
12 |
13 | * url: The URL to the repository. Usually https://oss.sonatype.org
14 | * stagingProfileId: Identifier for the staging profile
15 | * stagingId: Identifier for staging
16 | * secret: Vault secret (Optional)
17 | * role_id: vault role ID (Optional)
18 | * secret_id: vault secret ID (Optional)
19 |
20 |
21 | [Nexus staging documentation](https://help.sonatype.com/repomanager2/staging-releases)
22 | [Nexus OSSRH](https://oss.sonatype.org)
23 |
--------------------------------------------------------------------------------
/vars/nexusFindStagingId.txt:
--------------------------------------------------------------------------------
1 | Find a Nexus staging repository
2 |
3 | ```
4 | nexusFindStagingRepository(
5 | url: "https://oss.sonatype.org",
6 | stagingProfileId: "comexampleapplication-1010",
7 | groupId: "co.elastic.apm",
8 | secret: 'secret/release/nexus',
9 | role_id: apm-vault-role-id,
10 | secret_id: apm-vault-secret-id
11 | )
12 | ```
13 |
14 | * url: The URL to the repository. Usually https://oss.sonatype.org
15 | * stagingProfileId: Identifier for the staging profile
16 | * groupid: Our group id
17 | * secret: Vault secret (Optional)
18 | * role_id: vault role ID (Optional)
19 | * secret_id: vault secret ID (Optional)
20 |
21 |
22 | [Nexus staging documentation](https://help.sonatype.com/repomanager2/staging-releases)
23 | [Nexus OSSRH](https://oss.sonatype.org)
24 |
--------------------------------------------------------------------------------
/vars/nexusReleaseStagingRepository.txt:
--------------------------------------------------------------------------------
1 | Release a Nexus staging repository
2 |
3 | ```
4 | nexusReleaseStagingRepository(
5 | url: "https://oss.sonatype.org",
6 | stagingProfileId: "comexampleapplication-1010",
7 | stagingId: "co.elastic.foo",
8 | secret: secret/release/nexus,
9 | role_id: apm-vault-role-id,
10 | secret_id: apm-vault-secret-id
11 | ```
12 |
13 | * url: The URL to the repository. Usually https://oss.sonatype.org
14 | * stagingProfileId: Identifier for the staging profile
15 | * stagingId: Identifier of staging repository
16 | * secret: Vault secret (Optional)
17 | * role_id: vault role ID (Optional)
18 | * secret_id: vault secret ID (Optional)
19 |
20 |
21 | [Nexus staging documentation](https://help.sonatype.com/repomanager2/staging-releases)
22 | [Nexus OSSRH](https://oss.sonatype.org)
23 |
--------------------------------------------------------------------------------
/vars/nodeArch.txt:
--------------------------------------------------------------------------------
1 | Return the architecture in the current worker using the labels as the source of truth
2 |
3 | ```
4 | def arch = nodeArch()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/nodeJSDefaultVersion.txt:
--------------------------------------------------------------------------------
1 | Return the value in the file `.nvmrc`, or a default value.
2 |
3 | ```
4 | nodeJSDefaultVersion()
5 | ```
6 |
--------------------------------------------------------------------------------
/vars/nodeOS.txt:
--------------------------------------------------------------------------------
1 | Return the name of the Operating system based on the labels of the Node [linux, windows, darwin].
2 |
3 | NOTE: arm architecture is linux.
4 |
5 | ```
6 | def os = nodeOS()
7 | ```
8 |
--------------------------------------------------------------------------------
/vars/notifyStalledBeatsBumps.txt:
--------------------------------------------------------------------------------
1 | Evaluate if the latest bump update was merged a few days ago and if so
2 | send an email if configured for such an action.
3 |
4 | ```
5 | notifyStalledBeatsBumps(branch: '8.0', sendEmail: true, to: 'foo@acme.com')
6 | ```
7 |
8 | * *sendEmail*: whether to send an email. Optional. Default false
9 | * *to*: who should receive the email. Optional.
10 | * *subject*: what's the email subject. Optional. Default: `[Autogenerated]`
11 | * *branch*: what branch to be searched for. Mandatory.
12 | * *days*: search for any changes before those days. Optional. Default 7
13 |
--------------------------------------------------------------------------------
/vars/obltGitHubComments.txt:
--------------------------------------------------------------------------------
1 | The list of GitHub comments supported to be used in conjunction with the
2 | `triggers { issueCommentTrigger ... }` in order to trigger builds based on
3 | the given GitHub comments.
4 |
5 | ```
6 | pipeline {
7 | ...
8 | triggers {
9 | issueCommentTrigger("(${obltGitHubComments()}|/run benchmark tests)")
10 | }
11 | }
12 | ```
13 |
--------------------------------------------------------------------------------
/vars/opbeansPipeline.txt:
--------------------------------------------------------------------------------
1 | Opbeans Pipeline
2 |
3 | ```
4 | opbeansPipeline()
5 | opbeansPipeline(downstreamJobs: ['job1', 'folder/job1', 'mbp/PR-1'])
6 | ```
7 |
8 | * downstreamJobs: What downstream pipelines should be triggered once the release has been done. Default: []
9 |
--------------------------------------------------------------------------------
/vars/otelHelper.txt:
--------------------------------------------------------------------------------
1 | Helper method to interact with the OpenTelemetry Jenkins plugin
2 |
3 | ```
4 | withOtelEnv() {
5 | // block
6 | }
7 |
8 | // If you'd like to use a different credentials
9 | withOtelEnv(credentialsId: 'foo') {
10 | // block
11 | }
12 | ```
13 |
14 | **NOTE**: It requires the [OpenTelemetry plugin](https://plugins.jenkins.io/opentelemetry")
15 |
--------------------------------------------------------------------------------
/vars/pipelineManager.txt:
--------------------------------------------------------------------------------
1 | This step adds certain validations which might be required to be done per build, for such it does
2 | use other steps.
3 |
4 | ```
5 | pipelineManager([ cancelPreviousRunningBuilds: [ when: 'PR', params: [ maxBuildsToSearch: 5 ] ],
6 | firstTimeContributor: [ when: 'ALWAYS' ] ])
7 | ```
8 |
9 | * key: the name of the step.
10 | * key.value('when'): what condition should be evaluated to run the above step. Default 'always'. Possible values: 'PR', 'BRANCH', 'TAG' and 'ALWAYS'
11 | * key.value('params'): the arguments that the step can have.
12 |
--------------------------------------------------------------------------------
/vars/preCommitToJunit.txt:
--------------------------------------------------------------------------------
1 | Parse the pre-commit log file and generates a junit report
2 |
3 | ```
4 | preCommitToJunit(input: 'pre-commit.log', output: 'pre-commit-junit.xml')
5 | ```
6 |
7 | * input: the pre-commit output. Mandatory
8 | * output: the junit output. Mandatory
9 | * enableSkipped: whether to report skipped linting stages. Optional. Default false
10 |
--------------------------------------------------------------------------------
/vars/prompt.txt:
--------------------------------------------------------------------------------
1 | Wrapper to request an input approval and wait for the outcome
2 | It returns true or false
3 |
4 | ```
5 | stage('Approve to Release') {
6 | steps {
7 | setEnvVar('RELEASE', askAndWait(message: "You are about to release version ${env.TAG_NAME}. Do you wish to release it?"))
8 | }
9 | }
10 | stage('Release') {
11 | when {
12 | expression { return env.RELEASE == 'true' }
13 | }
14 | ...
15 |
16 | ```
17 |
--------------------------------------------------------------------------------
/vars/randomNumber.txt:
--------------------------------------------------------------------------------
1 | it generates a random number, by default the number is between 1 to 100.
2 |
3 | ```
4 | def i = randomNumber()
5 | ```
6 |
7 | ```
8 | def i = randomNumber(min: 1, max: 99)
9 | ```
10 |
--------------------------------------------------------------------------------
/vars/randomString.txt:
--------------------------------------------------------------------------------
1 | Generate a random string (alphanumeric and dash are allowed but not ending with dash_ )
2 |
3 | ```
4 | // Create a random string of 15 chars (alphanumeric)
5 | def value = randomString(size: 15)
6 | ```
7 |
8 | * size: the random string size.
9 |
--------------------------------------------------------------------------------
/vars/releaseManagerAnalyser.txt:
--------------------------------------------------------------------------------
1 | Given the release manager output then it analyses the failure if any, and returns
2 | the digested output to the end user.
3 |
4 | ```
5 | // analyse the release manager build output
6 | def output = releaseManagerAnalyser(file: 'release-manager.out')
7 |
8 | ```
9 |
10 | * file: the file with the release manager output. Mandatory.
11 |
--------------------------------------------------------------------------------
/vars/retryWithSleep.txt:
--------------------------------------------------------------------------------
1 | Retry a command for a specified number of times until the command exits successfully.
2 |
3 | ```
4 | retryWithSleep(retries: 2) {
5 | //
6 | }
7 |
8 | // Retry up to 3 times with a 5 seconds wait period
9 | retryWithSleep(retries: 3, seconds: 5, backoff: true) {
10 | //
11 | }
12 |
13 | // Retry up to 3 times and on each retry, execute a closure
14 | def myEffect = { echo 'Side effect!' }
15 | retryWithSleep(retries: 3, sideEffect: myEffect)
16 | //
17 | }
18 |
19 | ```
20 |
21 | * retries: the number of retries. Mandatory
22 | * seconds: the seconds to wait for. Optional. Default 10.
23 | * backoff: whether the wait period backs off after each retry. Optional. Default false
24 | * sleepFirst: whether to sleep before running the command. Optional. Default false
25 | * sideEffect: A closure to run after every retry
26 |
--------------------------------------------------------------------------------
/vars/rubygemsLogin.txt:
--------------------------------------------------------------------------------
1 | Login to Rubygems.com with an authentication credentials from a Vault secret.
2 | The vault secret contains `user` and `password` fields with the authentication details. Or if using `withApi` then
3 | it's required the vault secret with `apiKey`.
4 |
5 | ```
6 | rubygemsLogin(secret: 'secret/team/ci/secret-name') {
7 | sh 'gem push x.y.z'
8 | }
9 |
10 | rubygemsLogin.withApi(secret: 'secret/team/ci/secret-name') {
11 | sh 'gem push x.y.z'
12 | }
13 | ```
14 |
15 | * secret: Vault secret where the user, password or apiKey are stored.
16 |
--------------------------------------------------------------------------------
/vars/sendDataToElasticsearch.txt:
--------------------------------------------------------------------------------
1 | Send the JSON report file to Elastisearch. It returns the response body.
2 |
3 | ```
4 | def body = sendDataToElasticsearch(es: "https://ecs.example.com:9200", secret: "secret", data: '{"field": "value"}')
5 | ```
6 |
7 | ```
8 | def body = sendDataToElasticsearch(es: "https://ecs.example.com:9200",
9 | secret: "secret",
10 | data: '{"field": "value"}',
11 | restCall: '/jenkins-builds/_doc/',
12 | contentType: 'application/json',
13 | method: 'POST')
14 | ```
15 |
16 | * es: URL to Elasticsearch service.
17 | * secret: Path to the secret in the Vault, it should have `user` and `password` fields.
18 | * data: JSON data to insert in Elasticsearch.
19 | * restCall: REST call PATH to use, by default `/jenkins-builds/_doc/`
20 | * contentType: Content Type header, by default `application/json`
21 | * method: HTTP method used to send the data, by default `POST`
22 |
--------------------------------------------------------------------------------
/vars/setEnvVar.txt:
--------------------------------------------------------------------------------
1 |
2 | It sets an environment variable with either a string or boolean value as a parameter, it simplifies the declarative syntax.
3 |
4 | ```
5 | // Support string value
6 | setEnvVar('MY_ENV_VAR', 'value')
7 |
8 | // Support boolean value
9 | setEnvVar('MY_ENV_VAR', true)
10 | ```
11 |
12 | it replaces the following code
13 |
14 | ```
15 | script {
16 | env.MY_ENV_VAR = 'value')
17 | }
18 | ```
19 |
20 | NOTE: It creates a new environment variable, but it is not possible to overwrite
21 | the value of an environment variable defined in a `environment block`
22 | see https://stackoverflow.com/questions/53541489/updating-environment-global-variable-in-jenkins-pipeline-from-the-stage-level
23 |
--------------------------------------------------------------------------------
/vars/setGithubCommitStatus.txt:
--------------------------------------------------------------------------------
1 | Set the commit status on GitHub with an status passed as parameter or SUCCESS by default.
2 |
3 | ```
4 | setGithubCommitStatus(
5 | repoUrl: "${GIT_URL}",
6 | commitSha: "${GIT_COMMIT}",
7 | message: 'Build result.',
8 | state: "SUCCESS"
9 | )
10 | ```
11 |
12 | ```
13 | setGithubCommitStatus()
14 | ```
15 |
16 | ```
17 | setGithubCommitStatus(message: 'Build result.', state: "FAILURE")
18 | ```
19 |
20 | ```
21 | setGithubCommitStatus(message: 'Build result.', state: "UNSTABLE")
22 | ```
23 | * *repoUrl*: Repository URL.
24 | * *commitSha*: Commit SHA1.
25 | * *message*: message to post.
26 | * *state*: Status to report to Github.
27 |
28 | It requires [Github plugin](https://plugins.jenkins.io/github")
29 |
--------------------------------------------------------------------------------
/vars/setupAPMGitEmail.txt:
--------------------------------------------------------------------------------
1 | Configure the git email for the current workspace or globally.
2 |
3 | ```
4 | setupAPMGitEmail()
5 |
6 | // globally
7 | setupAPMGitEmail(global: true)
8 | ```
9 |
10 | * *global*: to configure the user and email account globally. Optional.
11 |
--------------------------------------------------------------------------------
/vars/snapshoty.txt:
--------------------------------------------------------------------------------
1 | Given the bucket and google secrets then run the snapshoty to upload the artifacts to the
2 | google bucket
3 |
4 | ```
5 | snapshoty(bucket: 'my-bucket',
6 | gcsAccountSecret: 'secrets/my-team/my-gcs-secret',
7 | dockerRegistry: 'my-docker-registry',
8 | dockerSecret: 'secrets/my-team/mydocker-secret')
9 | ```
10 |
11 | * *bucket*: The google bucket where to upload the artifacts to. Mandatory
12 | * *gcsAccountSecret*:
13 | * *dockerRegistry*: Vault secret where the user and password stored.
14 | * *dockerSecret*: Registry to login into Docker.
15 |
16 | **NOTE**: Windows is not supported
17 |
--------------------------------------------------------------------------------
/vars/stackVersions.txt:
--------------------------------------------------------------------------------
1 |
2 | Return the version currently used for testing.
3 |
4 | ```
5 | stackVersions() // [ '8.1.0', '8.0.0', '7.11.0', '7.10.2' ]
6 | stackVersions(snapshot: true) // [ '8.1.0-SNAPSHOT', '8.0.0-SNAPSHOT', '7.11.0-SNAPSHOT', '7.10.2-SNAPSHOT' ]
7 |
8 | stackVersions.edge() // '8.1.0'
9 | stackVersions.dev() // '7.11.0'
10 | stackVersions.release() // '8.0.0'
11 | stackVersions.release7() // '7.10.2'
12 | stackVersions.snapshot('7.11.1') // '7.11.1-SNAPSHOT'
13 | stackVersions.edge(snapshot: true) // '8.1.0-SNAPSHOT'
14 | ```
15 |
--------------------------------------------------------------------------------
/vars/superLinter.txt:
--------------------------------------------------------------------------------
1 | Run the github/super-linter step
2 |
3 | ```
4 | superLinter(envs: [ 'VALIDATE_GO=false' ])
5 | ```
6 |
7 | * *envs*: the list of new env variables to use, format variable=value. Optional
8 | * *failNever*: Never fail the build, regardless of the step result. Optional. Default 'false'
9 | * *dockerImage*: What's the docker image to use. Optional. Default: 'github/super-linter:latest'
10 | * junit: whether to generate the JUnit report. Default: true. Optional
11 |
--------------------------------------------------------------------------------
/vars/tap2Junit.txt:
--------------------------------------------------------------------------------
1 | Transform the TAP to JUnit, for such it uses some parameters
2 | to customise the generated output.
3 |
4 | ```
5 | // Use default setup
6 | tap2Junit()
7 |
8 | // Convert TAP files to JUnit using the suffix junit.xml
9 | tap2Junit(pattern: '*.TAP', suffix: 'junit.xml')
10 | ```
11 |
12 | * *package*: Name of the package in the JUnit report. Default 'co.elastic'.
13 | * *pattern*: What files that are TAP based should be searched. Default '*.tap'.
14 | * *suffix*: The suffix in the JUnit output files. Default 'junit-report.xml'
15 | * *nodeVersion*: What docker image used for transforming the tap to junit. Default 'node:12-alpine'
16 | * *failNever*: Never fail the build, regardless of the step result. Optional. Default 'false'
17 |
--------------------------------------------------------------------------------
/vars/tar.txt:
--------------------------------------------------------------------------------
1 | Compress a folder into a tar file.
2 |
3 | ```
4 | tar(file: 'archive.tgz', archive: true, dir: '.')
5 | ```
6 |
7 | * *file*: Name of the tar file to create.
8 | * *archive*: If true the file will be archive in Jenkins (default true).
9 | * *dir*: The folder to compress (default .), it should not contain the compress file.
10 | * *allowMissing*: whether to report UNSTABLE if tar command failed. Optional. Default 'true'
11 | * *failNever*: Never fail the build, regardless of the step result. Optional. Default 'true'
12 |
--------------------------------------------------------------------------------
/vars/toJSON.txt:
--------------------------------------------------------------------------------
1 | This step converts a JSON string to net.sf.json.JSON or and POJO to net.sf.json.JSON.
2 | readJSON show the JSON in the Blue Ocean console output so it can not be used.
3 | [JENKINS-54248](https://issues.jenkins-ci.org/browse/JENKINS-54248)
4 |
5 | ```
6 | net.sf.json.JSON obj = toJSON("{property: value, property1: value}")
7 | ```
8 |
9 | ```
10 | Person p = new Person();
11 | p.setName("John");
12 | p.setAge(50);
13 | net.sf.json.JSON obj = toJSON(p)
14 | ```
15 |
--------------------------------------------------------------------------------
/vars/untar.txt:
--------------------------------------------------------------------------------
1 | Extract the given tar file in the given folder if any, othrewise in the
2 | current directory.
3 |
4 | ```
5 | untar(file: 'src.tgz', dir: 'src')
6 | ```
7 |
8 | * *file*: Name of the tar file to extract. Optional (default 'archive.tgz').
9 | * *dir*: The folder where the extract will be done to. Optional (default '.').
10 | * *failNever*: Never fail the build, regardless of the step result. Optional (default 'true')
11 |
--------------------------------------------------------------------------------
/vars/updateGithubCommitStatus.txt:
--------------------------------------------------------------------------------
1 | Update the commit status on GitHub with the current status of the build.
2 |
3 | ```
4 | updateGithubCommitStatus(
5 | repoUrl: "${GIT_URL}"
6 | commitSha: "${GIT_COMMIT}"
7 | message: 'Build result.'
8 | )
9 | ```
10 |
11 | ```
12 | updateGithubCommitStatus()
13 | ```
14 |
15 | ```
16 | updateGithubCommitStatus(message: 'Build result.')
17 | ```
18 | * *repoUrl*: "${GIT_URL}"
19 | * *commitSha*: "${GIT_COMMIT}"
20 | * *message*: 'Build result.'
21 |
22 | It requires [Github plugin](https://plugins.jenkins.io/github)
23 |
--------------------------------------------------------------------------------
/vars/whenFalse.txt:
--------------------------------------------------------------------------------
1 | This step replaces those small scripts step blocks to check some condition,
2 | it simplifies Declarative syntax
3 |
4 | ```
5 | whenFalse(variable != 100){
6 | echo('Hello world')
7 | }
8 | ```
9 |
10 | it would replace the following code
11 |
12 | ```
13 | script{
14 | if(variable != 100){
15 | echo('Hello world')
16 | }
17 | }
18 | ```
19 |
--------------------------------------------------------------------------------
/vars/whenTrue.txt:
--------------------------------------------------------------------------------
1 | This step replaces those small scripts step blocks to check some condition,
2 | it simplifies Declarative syntax
3 |
4 | ```
5 | whenTrue(variable == 100){
6 | echo('Hello world')
7 | }
8 | ```
9 |
10 | it would replace the following code
11 |
12 | ```
13 | script{
14 | if(variable == 100){
15 | echo('Hello world')
16 | }
17 | }
18 | ```
19 |
--------------------------------------------------------------------------------
/vars/withAPM.txt:
--------------------------------------------------------------------------------
1 | It encloses a set of commands in a APM reporting context.
2 | This will generate APM data related with the block of code executed.
3 | The parameters accepted by withAPM are the same of [apmCli](#apmcli) step
4 |
5 | ```
6 | withAPM(serviceName: 'apm-traces', transactionNAme: 'test') {
7 | echo "OK"
8 | }
9 | ```
10 |
--------------------------------------------------------------------------------
/vars/withAPMEnv.txt:
--------------------------------------------------------------------------------
1 | Prepare the context with the ELASTIC_APM_SERVER_URL, ELASTIC_APM_SECRET_TOKEN,
2 | OTEL_EXPORTER_OTLP_ENDPOINT and OTEL_EXPORTER_OTLP_HEADERS environment
3 | variables that are consumed by the body in order to send the data to the APM Server.
4 |
5 | ```
6 | withAPMEnv(secret: 'secrets/my-secret-apm') {
7 | // the command that consumes those env variables.
8 | }
9 | ```
10 |
11 | * secret: vault secret used to interact with the APM server. Default: 'secret/observability-team/ci/jenkins-stats'
12 | * tokenFieldName: the field in the vault secret that contains the APM Server token. Default 'apmServerToken'
13 | * urlFieldName: the field in the vault secret that contains the APM Server URL. Default 'apmServerUrl'
14 |
--------------------------------------------------------------------------------
/vars/withAWSEnv.txt:
--------------------------------------------------------------------------------
1 | Configure the AWS context to run the given body closure. The AWS_PROFILE environment variable
2 | is also configured with the profile to be used.
3 |
4 | ```
5 | withAWSEnv(secret: 'secret/team/ci/service-account/aws-provisioner') {
6 | // block
7 | }
8 | ```
9 |
10 | * version: The aws CLI version to be installed. Optional (2.4.2)
11 | * forceInstallation: Whether to install aws regardless. Optional (false)
12 | * secret: Name of the secret on the the vault root path. (Optional).
13 | * role_id: vault role ID (Optional). Default 'vault-role-id'
14 | * secret_id: vault secret ID (Optional). Default 'vault-secret-id'
15 |
--------------------------------------------------------------------------------
/vars/withAzureCredentials.txt:
--------------------------------------------------------------------------------
1 | Wrap the azure credentials
2 |
3 | ```
4 | withAzureCredentials() {
5 | // block
6 | }
7 |
8 | withAzureCredentials(path: '/foo', credentialsFile: '.credentials.json') {
9 | // block
10 | }
11 | ```
12 |
13 | * path: root folder where the credentials file will be stored. (Optional). Default: ${HOME} env variable
14 | * credentialsFile: name of the file with the credentials. (Optional). Default: .credentials.json
15 | * secret: Name of the secret on the the vault root path. (Optional). Default: 'secret/apm-team/ci/apm-agent-dotnet-azure'
16 |
--------------------------------------------------------------------------------
/vars/withAzureEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the azure credentials in environment variables to be consumed within the body
2 |
3 | ```
4 | withAzureEnv(secret: 'secret/acme') {
5 | // block
6 | }
7 | ```
8 |
9 | * secret: Name of the secret on the the vault root path. (Optional). Default: 'secret/observability-team/ci/service-account/azure-vm-extension'
10 |
--------------------------------------------------------------------------------
/vars/withCloudEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the cloud credentials and entrypoints as environment variables that are masked
2 |
3 | ```
4 | withCloudEnv(cluster: 'test-cluster-azure') {
5 | // block
6 | }
7 | ```
8 |
9 | * cluster: Name of the cluster that was already created. Mandatory
10 |
11 | Environment variables:
12 | * `CLOUD_ID`
13 | * `CLOUD_PASSWORD`
14 | * `CLOUD_USERNAME`
15 |
16 | NOTE: secrets for the test clusters are located in Vault, see `getTestClusterSecret`
17 |
--------------------------------------------------------------------------------
/vars/withClusterEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the credentials and entrypoints as environment variables that are masked
2 | for the Cloud deployments, aka clusters.
3 |
4 | ```
5 | withClusterEnv(cluster: 'test-cluster-azure') {
6 | // block
7 | }
8 | ```
9 |
10 | * cluster: Name of the cluster that was already created. Mandatory
11 | * elasticsearch: Whether to configure the environment variables with the Elasticsearch URL/User/Pass. Optional
12 | * kibana: Whether to configure the environment variables with the Kibana URL. Optional
13 | * fleet: Whether to configure the environment variables with the Fleet URL and secret. Optional
14 |
15 | NOTE: secrets for the test clusters are located in Vault, see `getTestClusterSecret`
16 |
--------------------------------------------------------------------------------
/vars/withDockerEnv.txt:
--------------------------------------------------------------------------------
1 | Configure the Docker context to run the body closure, logining to hub.docker.com with an
2 | authentication credentials from a Vault secret. The vault secret contains `user` and `password`
3 | fields with the authentication details. with the below environment variables:
4 |
5 | * `DOCKER_USER`
6 | * `DOCKER_PASSWORD`
7 |
8 | ```
9 | withDockerEnv() {
10 | // block
11 | }
12 | withDockerEnv(secret: 'secret/team/ci/secret-name') {
13 | // block
14 | }
15 | withDockerEnv(secret: 'secret/team/ci/secret-name', registry: "docker.io") {
16 | // block
17 | }
18 | ```
19 |
--------------------------------------------------------------------------------
/vars/withElasticsearchDeploymentEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the Elasticsearch credentials and entrypoints as environment variables that are masked
2 | for the Elastic Cloud deployment
3 |
4 | ```
5 | withElasticsearchDeploymentEnv(cluster: 'test-cluster-azure') {
6 | // block
7 | }
8 | ```
9 |
10 | * cluster: Name of the cluster that was already created. Mandatory
11 |
12 | Environment variables:
13 | * `ELASTICSEARCH_URL`
14 | * `ELASTICSEARCH_USERNAME`
15 | * `ELASTICSEARCH_PASSWORD`
16 | * `ES_URL` - (deprecated)
17 | * `ES_USERNAME` - (deprecated)
18 | * `ES_PASSWORD` - (deprecated)
19 |
20 | NOTE: secrets for the test clusters are located in Vault, see `getTestClusterSecret`
21 |
--------------------------------------------------------------------------------
/vars/withEsEnv.txt:
--------------------------------------------------------------------------------
1 | Grab a secret from the vault and define some environment variables to access to an URL
2 |
3 | the secret must have this format
4 | `{ data: { user: 'username', password: 'user_password'} }``
5 |
6 | The following environment variables will be export and mask on logs
7 | * `CLOUD_URL`: URL for basic authentication "https://${user}:${password}@${url}"
8 | * `CLOUD_ADDR`: only the URL
9 | * `CLOUD_USERNAME`: username
10 | * `CLOUD_PASSWORD`: user password
11 |
12 | ```
13 | withEsEnv(){
14 | //block
15 | }
16 | ```
17 |
18 | ```
19 | withEsEnv(url: 'https://url.exanple.com', secret: 'secret-name'){
20 | //block
21 | }
22 | ```
23 |
--------------------------------------------------------------------------------
/vars/withFleetDeploymentEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the Fleet cluster credentials and entrypoints as environment variables that are masked
2 |
3 | ```
4 | withFleetDeploymentEnv(cluster: 'test-cluster-azure') {
5 | // block
6 | }
7 | ```
8 |
9 | * cluster: Name of the cluster that was already created. Mandatory
10 |
11 | Environment variables:
12 | * `FLEET_URL`
13 | * `FLEET_TOKEN`
14 |
15 | NOTE: secrets for the test clusters are located in Vault, see `getTestClusterSecret`
16 |
--------------------------------------------------------------------------------
/vars/withGCPEnv.txt:
--------------------------------------------------------------------------------
1 | Configure the GCP context to run the given body closure
2 |
3 | ```
4 | withGCPEnv(credentialsId: 'foo') {
5 | // block
6 | }
7 |
8 | withGCPEnv(secret: 'secret/team/ci/service-account/gcp-provisioner') {
9 | // block
10 | }
11 | ```
12 |
13 | * credentialsId: The credentials to login to GCP. (Optional).
14 | * secret: Name of the secret on the the vault root path (supported fields: credentials and value). (Optional).
15 | * role_id: vault role ID if using the secret argument (Optional). Default 'vault-role-id'
16 | * secret_id: vault secret ID if using the secret argument (Optional). Default 'vault-secret-id'
17 |
--------------------------------------------------------------------------------
/vars/withGhEnv.txt:
--------------------------------------------------------------------------------
1 |
2 | Configure the Gh context to run the given body closure
3 |
4 | withGhEnv(credentialsId: 'foo') {
5 | // block
6 | }
7 |
8 | * credentialsId: the secret type credential ID that contains the GITHUB_TOKEN.
9 |
--------------------------------------------------------------------------------
/vars/withGitRelease.txt:
--------------------------------------------------------------------------------
1 | Configure the git release context to run the body closure.
2 |
3 | ```
4 | withGitRelease() {
5 | // block
6 | }
7 |
8 |
9 | withGitRelease(credentialsId: 'some-credentials') {
10 | // block
11 | }
12 | ```
13 |
14 | * credentialsId: the credentials ID for the git user and token. Default '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken'
15 |
16 |
17 | _NOTE:_
18 | * This particular implementation requires to checkout with the step gitCheckout
19 | * Windows agents are not supported.
20 |
--------------------------------------------------------------------------------
/vars/withGithubNotify.txt:
--------------------------------------------------------------------------------
1 | Wrap the GitHub notify step either for GitHub status check or GitHub check, for such,
2 | it uses the `GITHUB_CHECK` environment variable to enable the GitHub Check.
3 |
4 | ```
5 | withGithubNotify(context: 'Build', description: 'Execute something') {
6 | // block
7 | }
8 |
9 | withGithubNotify(context: 'Test', description: 'UTs', tab: 'tests') {
10 | // block
11 | }
12 |
13 | withGithubNotify(context: 'Release', tab: 'artifacts') {
14 | // block
15 | }
16 | ```
17 |
18 | * context: Name of the GitHub check context. (Mandatory).
19 | * description: Description of the GitHub check. If unset then it will use the context.
20 | * Further parameters are defined in [withGithubCheck](#withGithubCheck) and [withGithubStatus](#withGithubStatus).
21 |
--------------------------------------------------------------------------------
/vars/withGoEnv.txt:
--------------------------------------------------------------------------------
1 | Install Go and run some command in a pre-configured environment multiplatform. For such
2 | it's recommended to use the `cmd` step.
3 |
4 | ```
5 | withGoEnv(version: '1.14.2'){
6 | sh(label: 'Go version', script: 'go version')
7 | }
8 | ```
9 |
10 | ```
11 | withGoEnv(version: '1.14.2', pkgs: [
12 | "github.com/magefile/mage",
13 | "github.com/elastic/go-licenser",
14 | "golang.org/x/tools/cmd/goimports",
15 | ]){
16 | sh(label: 'Run mage',script: 'mage -version')
17 | }
18 | }
19 | ```
20 |
21 | * version: Go version to install, if it is not set, it'll use GO_VERSION env var or [default version](#goDefaultVersion)
22 | * pkgs: Go packages to install with Go get before to execute any command.
23 | * os: OS to use. (Example: `linux`). This is an option argument and if not set, the worker label will be used.
24 |
--------------------------------------------------------------------------------
/vars/withGoEnvWindows.txt:
--------------------------------------------------------------------------------
1 | Install Go and run some command in a pre-configured environment for Windows.
2 |
3 | ```
4 | withGoEnvWindows(version: '1.14.2'){
5 | bat(label: 'Go version', script: 'go version')
6 | }
7 | ```
8 |
9 | ```
10 | withGoEnvWindows(version: '1.14.2', pkgs: [
11 | "github.com/magefile/mage",
12 | "github.com/elastic/go-licenser",
13 | "golang.org/x/tools/cmd/goimports",
14 | ]){
15 | bat(label: 'Run mage',script: 'mage -version')
16 | }
17 | }
18 | ```
19 |
20 | * version: Go version to install, if it is not set, it'll use GO_VERSION env var or [default version](#goDefaultVersion)
21 | * pkgs: Go packages to install with Go get before to execute any command.
22 | * os: OS to use. (Example: `windows`). This is an option argument and if not set, the worker label will be used.
23 |
--------------------------------------------------------------------------------
/vars/withHubCredentials.txt:
--------------------------------------------------------------------------------
1 | Configure the hub app to run the body closure.
2 |
3 | ```
4 | withHubCredentials(credentialsId: 'some-credentials') {
5 | // block
6 | }
7 | ```
8 |
9 | * credentialsId: the credentials ID for the git user and token. Default '2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken'
10 |
11 | _NOTE:_
12 | * Windows agents are not supported.
13 |
--------------------------------------------------------------------------------
/vars/withKibanaDeploymentEnv.txt:
--------------------------------------------------------------------------------
1 | Wrap the Kibana credentials and entrypoints as environment variables that are masked
2 | for the Elastic Cloud deployment
3 |
4 | ```
5 | withKibanaDeploymentEnv(cluster: 'test-cluster-azure') {
6 | // block
7 | }
8 | ```
9 |
10 | * cluster: Name of the cluster that was already created. Mandatory
11 |
12 | Environment variables:
13 | * `KIBANA_URL`
14 | * `KIBANA_USERNAME`
15 | * `KIBANA_PASSWORD`
16 |
17 | NOTE: secrets for the test clusters are located in Vault, see `getTestClusterSecret`
18 |
--------------------------------------------------------------------------------
/vars/withKindEnv.txt:
--------------------------------------------------------------------------------
1 | Install Kind, Kubectl and configure Kind to run some command within the kind/kubectl context
2 |
3 | ```
4 | withKindEnv(k8sVersion: 'v0.11.1', kindVersion: 'v1.23.0'){
5 | ..
6 | }
7 | ```
8 |
9 | * k8sVersion: K8s version to install. Optional
10 | * kindVersion: Kind version to install. Optional
11 |
--------------------------------------------------------------------------------
/vars/withMageEnv.txt:
--------------------------------------------------------------------------------
1 |
2 | Install Go and mage and run some command in a pre-configured environment.
3 |
4 | ```
5 | withMageEnv(version: '1.14.2'){
6 | sh(label: 'Go version', script: 'go version')
7 | }
8 | ```
9 |
10 | ```
11 | withMageEnv(version: '1.14.2', pkgs: [
12 | "github.com/elastic/go-licenser",
13 | "golang.org/x/tools/cmd/goimports",
14 | ]){
15 | sh(label: 'Run mage',script: 'mage -version')
16 | }
17 | }
18 | ```
19 |
20 | * version: Go version to install, if it is not set, it'll use GO_VERSION env var or the default one set in the withGoEnv step
21 | * pkgs: Go packages to install with Go get before to execute any command.
22 |
--------------------------------------------------------------------------------
/vars/withNodeJSEnv.txt:
--------------------------------------------------------------------------------
1 | Install Node.js with NVM and run some command in a pre-configured environment multiplatform. For such
2 | it's recommended to use the `cmd` step.
3 |
4 | ```
5 | withNodeJSEnv(version: '14.17.5'){
6 | cmd(label: 'Node version', script: 'node --version')
7 | }
8 | ```
9 |
10 | * version: Node.js version to install, if it is not set, it'll use [default version](#nodeJSDefaultVersion)
11 |
--------------------------------------------------------------------------------
/vars/withNodeJSEnvUnix.txt:
--------------------------------------------------------------------------------
1 | Install Node.js with NVM and run some command in a pre-configured environment for Unix.
2 |
3 | ```
4 | withNodeJSEnvUnix(version: '14.17.5'){
5 | sh(label: 'Node version', script: 'node --version')
6 | }
7 | ```
8 | * version: Node.js version to install, if it is not set, it'll use [default version](#nodeJSDefaultVersion)
9 |
--------------------------------------------------------------------------------
/vars/withNpmrc.txt:
--------------------------------------------------------------------------------
1 | Wrap the npmrc token
2 |
3 | ```
4 | withNpmrc() {
5 | // block
6 | }
7 |
8 | withNpmrc(path: '/foo', npmrcFile: '.npmrc') {
9 | // block
10 | }
11 | ```
12 |
13 | * path: root folder where the npmrc token will be stored. (Optional). Default: ${HOME} env variable
14 | * npmrcFile: name of the file with the token. (Optional). Default: .npmrc
15 | * registry: NPM registry. (Optional). Default: registry.npmjs.org
16 | * secret: Name of the secret on the the vault root path. (Optional). Default: 'secret/apm-team/ci/elastic-observability-npmjs'
17 |
--------------------------------------------------------------------------------
/vars/withOtelEnv.txt:
--------------------------------------------------------------------------------
1 | Configure the OpenTelemetry Jenkins context to run the body closure with the below
2 | environment variables:
3 |
4 | * `JENKINS_OTEL_SERVICE_NAME`
5 | * `OTEL_EXPORTER_OTLP_ENDPOINT`, opentelemetry 0.19 already provides this environment variable.
6 | * `OTEL_EXPORTER_OTLP_HEADERS`, opentelemetry 0.19 already provides this environment variable.
7 | * `ELASTIC_APM_SECRET_TOKEN`
8 | * `ELASTIC_APM_SERVER_URL`
9 | * `ELASTIC_APM_SERVICE_NAME`
10 | * `TRACEPARENT`, opentelemetry 0.19 already provides this environment variable.
11 |
12 | ```
13 | withOtelEnv() {
14 | // block
15 | }
16 |
17 | // If you'd like to use a different credentials
18 | withOtelEnv(credentialsId: 'foo') {
19 | // block
20 | }
21 | ```
22 |
23 | * credentialsId: the name of the credentials. Optional.
24 |
25 | **NOTE**: It requires the [OpenTelemetry plugin](https://plugins.jenkins.io/opentelemetry")
26 |
--------------------------------------------------------------------------------
/vars/withPackerEnv.txt:
--------------------------------------------------------------------------------
1 | Configure Packer context to run the given body closure
2 |
3 | ```
4 | withPackerEnv(version: '1.8.4') {
5 | // block
6 | }
7 | ```
8 |
9 | * version: The packer CLI version to be installed. Optional (1.8.4)
10 | * forceInstallation: Whether to install packer regardless. Optional (false)
11 |
--------------------------------------------------------------------------------
/vars/withTerraformEnv.txt:
--------------------------------------------------------------------------------
1 | Configure the Terraform context to run the given body closure
2 |
3 | ```
4 | withTerraformEnv(version: '0.15.1') {
5 | // block
6 | }
7 | ```
8 |
9 | * version: The terraform CLI version to be installed. Optional (1.1.9)
10 | * forceInstallation: Whether to install terraform regardless. Optional (false)
11 |
--------------------------------------------------------------------------------
/vars/withTotpVault.txt:
--------------------------------------------------------------------------------
1 | Get the [TOTP](https://en.wikipedia.org/wiki/Time-based_One-time_Password_algorithm) code from the vault, define the environment variables which have been
2 | passed as parameters and mask the secrets
3 |
4 | the TOTP must have this format
5 | ```
6 | {
7 | "request_id": "abcdef4a-f9d6-ce93-2536-32c3bb915ab7",
8 | "lease_id": "",
9 | "lease_duration": 0,
10 | "renewable": false,
11 | "data": {
12 | "code": "123456"
13 | },
14 | "warnings": null
15 | }
16 | ```
17 |
18 | The value for code_var_name will be exported as a variable and masked in the logs
19 |
20 | ```
21 | withTotpVault(secret: 'secret', code_var_name: 'VAULT_TOTP'){
22 | //block
23 | }
24 | ```
25 |
--------------------------------------------------------------------------------
/vars/withVaultToken.txt:
--------------------------------------------------------------------------------
1 | Wrap the vault token
2 |
3 | ```
4 | withVaultToken() {
5 | // block
6 | }
7 |
8 | withVaultToken(path: '/foo', tokenFile: '.myfile') {
9 | // block
10 | }
11 | ```
12 |
13 | * path: root folder where the vault token will be stored. (Optional). Default: ${WORKSPACE} env variable
14 | * tokenFile: name of the file with the token. (Optional). Default: .vault-token
15 |
--------------------------------------------------------------------------------
/vars/writeVaultSecret.txt:
--------------------------------------------------------------------------------
1 | Write the given data in vault for the given secret.
2 |
3 | ```
4 | writeVaultSecret(secret: 'secret/team/ci/temp/github-comment', data: ['secret': 'foo'] )
5 | ```
6 |
7 | * secret: Name of the secret on the the vault root path. Mandatory
8 | * data: What's the data to be written. Mandatory
9 |
--------------------------------------------------------------------------------