├── .azure-pipelines ├── azure-pipelines.yml ├── common-steps.yml ├── github-release │ ├── .npmrc │ ├── github-release.js │ ├── package-lock.json │ └── package.json └── release-pipeline.yml ├── .config └── 1espt │ └── PipelineAutobaseliningConfig.yml ├── .gitattributes ├── .github ├── CODEOWNERS └── workflows │ └── autoAssignABTT.yml ├── .gitignore ├── .npmrc ├── .vscode-test.mjs ├── .vscode ├── extensions.json ├── launch.json ├── settings.json └── tasks.json ├── .vscodeignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── RELEASE.md ├── SECURITY.md ├── assets ├── pipelines-file-icon.svg └── pipelines.png ├── eslint.config.mjs ├── examples ├── .vscode │ └── settings.json ├── extracted │ ├── .vscode │ │ └── settings.json │ ├── JobCancelTimeoutInMinutes_FromImpliedJob_LegacyQueue.0.yml │ ├── JobCancelTimeoutInMinutes_FromImpliedJob_LegacyQueue.1.yml │ ├── JobCancelTimeoutInMinutes_FromImpliedJob_LegacyServer.0.yml │ ├── JobCancelTimeoutInMinutes_FromImpliedJob_LegacyServer.1.yml │ ├── JobCancelTimeoutInMinutes_FromJob.0.yml │ ├── JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyQueue.0.yml │ ├── JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyQueue.1.yml │ ├── JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyServer.0.yml │ ├── JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyServer.1.yml │ ├── JobCondition_FromJob.0.yml │ ├── JobCondition_FromLegacyPhase.0.yml │ ├── JobCondition_FromLegacyPhase.1.yml │ ├── JobContainer_FromImpliedJob.0.yml │ ├── JobContainer_FromImpliedJob_LegacyQueue.0.yml │ ├── JobContainer_FromImpliedJob_LegacyQueue.1.yml │ ├── JobContainer_FromJob.0.yml │ ├── JobContainer_FromLegacyPhase.0.yml │ ├── JobContainer_FromLegacyPhase.1.yml │ ├── JobContinueOnError_FromImpliedJob_Boolean.0.yml │ ├── JobContinueOnError_FromImpliedJob_Expression.0.yml │ ├── JobContinueOnError_FromJob.0.yml │ ├── JobDependsOn_FromJob.0.yml │ ├── JobDependsOn_FromLegacyPhase.0.yml │ ├── JobDependsOn_FromLegacyPhase.1.yml │ ├── JobDisplayName_FromJob.0.yml │ ├── JobDisplayName_FromLegacyPhase.0.yml │ ├── JobDisplayName_FromLegacyPhase.1.yml │ ├── JobPool_FromImpliedJob.0.yml │ ├── JobPool_FromImpliedJob_LegacyQueue.0.yml │ ├── JobPool_FromImpliedJob_LegacyQueue.1.yml │ ├── JobPool_FromImpliedJob_LegacyServer.0.yml │ ├── JobPool_FromImpliedJob_LegacyServer.1.yml │ ├── JobPool_FromJob.0.yml │ ├── JobPool_FromLegacyPhase_LegacyQueue.0.yml │ ├── JobPool_FromLegacyPhase_LegacyQueue.1.yml │ ├── JobPool_FromLegacyPhase_LegacyServer.0.yml │ ├── JobPool_FromLegacyPhase_LegacyServer.1.yml │ ├── JobSteps_Bash.0.yml │ ├── JobSteps_Checkout.0.yml │ ├── JobSteps_CheckoutNone.0.yml │ ├── JobSteps_CheckoutNone.1.yml │ ├── JobSteps_CheckoutWithRepoDefined.0.yml │ ├── JobSteps_CheckoutWithRepoDefined.1.yml │ ├── JobSteps_FromImpliedJob.0.yml │ ├── JobSteps_FromJob.0.yml │ ├── JobSteps_FromLegacyPhase.0.yml │ ├── JobSteps_FromLegacyPhase.1.yml │ ├── JobSteps_PowerShell.0.yml │ ├── JobSteps_Script.0.yml │ ├── JobSteps_Task.0.yml │ ├── JobStrategy_Matrix_FromImpliedJob.0.yml │ ├── JobStrategy_Matrix_FromImpliedJob_LegacyQueue.0.yml │ ├── JobStrategy_Matrix_FromImpliedJob_LegacyQueue.1.yml │ ├── JobStrategy_Matrix_FromImpliedJob_LegacyServer.0.yml │ ├── JobStrategy_Matrix_FromImpliedJob_LegacyServer.1.yml │ ├── JobStrategy_Matrix_FromJob.0.yml │ ├── JobStrategy_Matrix_FromLegacyPhase_LegacyQueue.0.yml │ ├── JobStrategy_Matrix_FromLegacyPhase_LegacyQueue.1.yml │ ├── JobStrategy_Matrix_FromLegacyPhase_LegacyServer.0.yml │ ├── JobStrategy_Matrix_FromLegacyPhase_LegacyServer.1.yml │ ├── JobStrategy_Parallel_FromImpliedJob.0.yml │ ├── JobStrategy_Parallel_FromImpliedJob_LegacyQueue.0.yml │ ├── JobStrategy_Parallel_FromImpliedJob_LegacyQueue.1.yml │ ├── JobStrategy_Parallel_FromImpliedJob_LegacyServer.0.yml │ ├── JobStrategy_Parallel_FromImpliedJob_LegacyServer.1.yml │ ├── JobStrategy_Parallel_FromJob.0.yml │ ├── JobStrategy_Parallel_FromLegacyPhase_LegacyQueue.0.yml │ ├── JobStrategy_Parallel_FromLegacyPhase_LegacyQueue.1.yml │ ├── JobStrategy_Parallel_FromLegacyPhase_LegacyServer.0.yml │ ├── JobStrategy_Parallel_FromLegacyPhase_LegacyServer.1.yml │ ├── JobTimeoutInMinutes_FromImpliedJob_LegacyQueue.0.yml │ ├── JobTimeoutInMinutes_FromImpliedJob_LegacyQueue.1.yml │ ├── JobTimeoutInMinutes_FromImpliedJob_LegacyServer.0.yml │ ├── JobTimeoutInMinutes_FromImpliedJob_LegacyServer.1.yml │ ├── JobTimeoutInMinutes_FromJob.0.yml │ ├── JobTimeoutInMinutes_FromLegacyPhase_LegacyQueue.0.yml │ ├── JobTimeoutInMinutes_FromLegacyPhase_LegacyQueue.1.yml │ ├── JobTimeoutInMinutes_FromLegacyPhase_LegacyServer.0.yml │ ├── JobTimeoutInMinutes_FromLegacyPhase_LegacyServer.1.yml │ ├── JobVariables_FromJob.0.yml │ ├── JobVariables_FromJob.1.yml │ ├── JobVariables_FromLegacyPhase.0.yml │ ├── JobVariables_FromLegacyPhase.1.yml │ ├── JobWorkspace_FromImpliedJob.0.yml │ ├── JobWorkspace_FromImpliedJob_LegacyQueue.0.yml │ ├── JobWorkspace_FromImpliedJob_LegacyQueue.1.yml │ ├── JobWorkspace_FromJob.0.yml │ ├── JobWorkspace_FromLegacyPhase.0.yml │ ├── JobWorkspace_FromLegacyPhase.1.yml │ ├── MaxFileSize.0.yml │ ├── MaxFiles.0.yml │ ├── MaxFiles.1.yml │ ├── MaxFiles.2.yml │ ├── MaxResultSize_AcrossFiles.0.yml │ ├── MaxResultSize_AcrossFiles.1.yml │ ├── PipelineName_ImpliedJob.0.yml │ ├── PipelineName_ImpliedStage.0.yml │ ├── PipelineName_ImpliedStage_LegacyPhases.0.yml │ ├── PipelineName_ImpliedStage_LegacyPhases.1.yml │ ├── PipelineResources_BackCompat.0.yml │ ├── PipelineResources_BackCompat.1.yml │ ├── PipelineResources_BackCompat_OnlySupportsSelfRepo.0.yml │ ├── PipelineResources_BackCompat_RestrictsObjects.0.yml │ ├── PipelineResources_BackCompat_RestrictsUnknownProperties.0.yml │ ├── PipelineResources_Containers.0.yml │ ├── PipelineResources_Repositories.0.yml │ ├── PipelineTrigger.0.yml │ ├── PipelineTrigger_Minimal.0.yml │ ├── PipelineTrigger_None.0.yml │ ├── PipelineTrigger_None.1.yml │ ├── PipelineTrigger_RetrieveTriggersOnly.0.yml │ ├── PipelineTrigger_RetrieveTriggersOnly.1.yml │ ├── PipelineVariables_Mapping_ImpliedJob.0.yml │ ├── PipelineVariables_Mapping_ImpliedStage.0.yml │ ├── PipelineVariables_Mapping_ImpliedStage_LegacyPhases.0.yml │ ├── PipelineVariables_Mapping_ImpliedStage_LegacyPhases.1.yml │ ├── PipelineVariables_Sequence_ImpliedJob.0.yml │ ├── PipelineVariables_Sequence_ImpliedJob.1.yml │ ├── PipelineVariables_Sequence_ImpliedStage.0.yml │ ├── PipelineVariables_Sequence_ImpliedStage.1.yml │ ├── PipelineVariables_Sequence_ImpliedStage_LegacyPhases.0.yml │ ├── PipelineVariables_Sequence_ImpliedStage_LegacyPhases.1.yml │ ├── PipelinesResources_Repositories_CheckoutOptions.0.yml │ ├── Templates_General_ReferenceToAnotherRepository.0.yml │ ├── Templates_General_ReferenceToAnotherRepository.1.yml │ ├── Templates_General_ReferenceToAnotherRepository.2.yml │ ├── Templates_General_ReferenceToAnotherRepository.3.yml │ ├── Templates_General_ReferenceToSelfRepository.0.yml │ ├── Templates_General_ReferenceToSelfRepository.1.yml │ ├── Templates_General_ReferenceToSelfRepository.2.yml │ ├── Templates_General_ReferenceToSelfRepository.3.yml │ ├── Templates_JobTemplates_CanReferenceJobsTemplate.0.yml │ ├── Templates_JobTemplates_CanReferenceStepsTemplate.0.yml │ ├── Templates_JobTemplates_CanSpecifySelfSource.0.yml │ ├── Templates_JobTemplates_CannotDoubleHopRepositories.0.yml │ ├── Templates_JobTemplates_CannotSpecifySourceWithinAnotherRepository.0.yml │ ├── Templates_JobTemplates_DoesNotLoadTemplateWhenAlreadyHasErrors.0.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.1.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.11.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.12.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.3.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.5.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.7.yml │ ├── Templates_JobTemplates_FollowsCurrentDirectory.9.yml │ ├── Templates_JobTemplates_LegacyPhases.0.yml │ ├── Templates_JobTemplates_PreservesJobOrder.0.yml │ ├── Templates_JobTemplates_PreservesJobOrder.1.yml │ ├── Templates_JobTemplates_PreservesJobOrder.2.yml │ ├── Templates_JobTemplates_PreservesJobOrder_NestedTemplates.0.yml │ ├── Templates_JobTemplates_PreservesJobOrder_NestedTemplates.1.yml │ ├── Templates_JobTemplates_PreservesJobOrder_NestedTemplates.2.yml │ ├── Templates_JobTemplates_PreservesJobOrder_NestedTemplates.3.yml │ ├── Templates_JobTemplates_RequiresLiteralKeys.0.yml │ ├── Templates_StepTemplates_CanReferenceStepsTemplate.0.yml │ ├── Templates_StepTemplates_CanSpecifySelfSource.0.yml │ ├── Templates_StepTemplates_CannotDoubleHopRepositories.0.yml │ ├── Templates_StepTemplates_CannotSpecifySourceWithinAnotherRepository.0.yml │ ├── Templates_StepTemplates_ConditionallyInsertMapping.0.yml │ ├── Templates_StepTemplates_ConditionallyInsertSequence.0.yml │ ├── Templates_StepTemplates_DefaultParameters_RestrictExpressions.0.yml │ ├── Templates_StepTemplates_DoesNotLoadTemplateWhenAlreadyHasErrors.0.yml │ ├── Templates_StepTemplates_EvaluateMapping.0.yml │ ├── Templates_StepTemplates_EvaluateSequence.0.yml │ ├── Templates_StepTemplates_EvaluatelLiteral.0.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.1.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.11.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.12.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.3.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.5.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.7.yml │ ├── Templates_StepTemplates_FollowsCurrentDirectory.9.yml │ ├── Templates_StepTemplates_InsertMapping.0.yml │ ├── Templates_StepTemplates_InsertSequence.0.yml │ ├── Templates_StepTemplates_PreservesStepOrder.0.yml │ ├── Templates_StepTemplates_PreservesStepOrder.1.yml │ ├── Templates_StepTemplates_PreservesStepOrder.2.yml │ ├── Templates_StepTemplates_PreservesStepOrder_NestedTemplates.0.yml │ ├── Templates_StepTemplates_PreservesStepOrder_NestedTemplates.1.yml │ ├── Templates_StepTemplates_PreservesStepOrder_NestedTemplates.2.yml │ ├── Templates_StepTemplates_PreservesStepOrder_NestedTemplates.3.yml │ ├── Templates_StepTemplates_RecursionDoesNotStackOverflow.0.yml │ └── Templates_StepTemplates_RequiresLiteralKeys.0.yml ├── templateMappingInsert.yml ├── templateSequenceConditionalInsert.yml └── templateSequenceInsert.yml ├── language-configuration.json ├── package-lock.json ├── package.json ├── service-schema.json ├── src ├── clients │ └── devOps │ │ └── organizationsClient.ts ├── extension.ts ├── extensionApis.ts ├── helpers │ ├── azureDevOpsHelper.ts │ ├── controlProvider.ts │ ├── telemetryHelper.ts │ └── telemetryKeys.ts ├── logger.ts ├── messages.ts ├── schema-association-service-1espt.ts ├── schema-association-service.ts ├── schema-contributor.ts ├── test │ ├── helper.ts │ ├── suite │ │ ├── azureDevOpsHelper.test.ts │ │ ├── completion.test.ts │ │ ├── configuration.test.ts │ │ ├── diagnostics.test.ts │ │ ├── fromserver.test.ts │ │ └── testdata │ │ │ ├── schemas │ │ │ ├── all-inputs-schema.json │ │ │ ├── npm-schema.json │ │ │ └── special-characters-schema.json │ │ │ └── tasks │ │ │ ├── all-inputs-task.json │ │ │ ├── missing-input-mapping-exception-task.json │ │ │ ├── npm-task.json │ │ │ └── special-characters-task.json │ └── workspace │ │ ├── .vscode │ │ └── settings.json │ │ ├── autocomplete.yml │ │ ├── emptyfile.yml │ │ ├── invalidfile.txt │ │ ├── invalidfile.yml │ │ └── validfile.yml └── typings │ └── git.d.ts ├── syntaxes └── yaml.tmLanguage.json ├── tools └── extract-yaml-testcases │ ├── .npmrc │ ├── ExampleTests.cs │ ├── package-lock.json │ ├── package.json │ ├── readme.md │ ├── src │ └── main.ts │ └── tsconfig.json ├── tsconfig.json ├── tsconfig.test.json └── webpack.config.mjs /.azure-pipelines/azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # CI and PR build script 2 | # 3 | # There should be no deep magic here. The developer experience and CI experience 4 | # must remain as close to one another as possible. 5 | # 6 | # Developer experience: 7 | # npm install 8 | # (make changes) 9 | # npm test 10 | # vsce package 11 | # (give VSIX to someone for buddy testing) 12 | 13 | # This Yaml Document has been converted by ESAI Yaml Pipeline Conversion Tool. 14 | # This pipeline will be extended to the OneESPT template 15 | trigger: 16 | - main 17 | 18 | # no `pr` keyword because we want all PRs to run this 19 | resources: 20 | repositories: 21 | - repository: 1ESPipelineTemplates 22 | type: git 23 | name: 1ESPipelineTemplates/1ESPipelineTemplates 24 | ref: refs/tags/release 25 | extends: 26 | template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates 27 | parameters: 28 | settings: 29 | skipBuildTagsForGitHubPullRequests: true 30 | # featureFlags: 31 | # autoBaseline: false 32 | sdl: 33 | baseline: 34 | baselineSet: default 35 | baselineFile: $(Build.SourcesDirectory)/.gdn/.gdnbaselines 36 | sourceAnalysisPool: 37 | name: 1ES-ABTT-Shared-Pool 38 | image: abtt-windows-2022 39 | os: windows 40 | pool: 41 | name: 1ES-ABTT-Shared-Pool 42 | image: abtt-ubuntu-2204 43 | os: linux 44 | customBuildTags: 45 | - ES365AIMigrationTooling 46 | stages: 47 | - stage: stage 48 | jobs: 49 | - job: job 50 | templateContext: 51 | outputs: 52 | - output: pipelineArtifact 53 | displayName: 'Publish VSIX' 54 | artifactName: azure-pipelines-vscode 55 | targetPath: $(Build.ArtifactStagingDirectory) 56 | steps: 57 | # for convenience, we tag CI-produced packages with a version number 58 | # pointing to the commit which was built. for PRs, also include the PR #. 59 | - bash: | 60 | PACKAGE_VERSION=$(node -p "require('./package.json').version") 61 | 62 | if [ -n "$SYSTEM_PULLREQUEST_PULLREQUESTNUMBER" ]; then 63 | VERSION_STRING=${PACKAGE_VERSION}-pr-${SYSTEM_PULLREQUEST_PULLREQUESTNUMBER}-$(git rev-parse --short HEAD) 64 | else 65 | VERSION_STRING=${PACKAGE_VERSION}-ci-$(git rev-parse --short HEAD) 66 | fi 67 | 68 | npm --no-git-tag-version version $VERSION_STRING 69 | echo "##vso[build.updatebuildnumber]${VERSION_STRING}_${BUILD_BUILDID}" 70 | echo "$PACKAGE_VERSION" > version.txt 71 | displayName: Set version number of package and build 72 | 73 | - template: /.azure-pipelines/common-steps.yml@self 74 | -------------------------------------------------------------------------------- /.azure-pipelines/common-steps.yml: -------------------------------------------------------------------------------- 1 | # Common steps template 2 | # 3 | # Things which happen regardless of CI, PR, or release builds 4 | steps: 5 | - task: NodeTool@0 6 | displayName: Install Node 16 LTS or greater 7 | inputs: 8 | versionSpec: ">=16.13.0" 9 | 10 | - task: NpmAuthenticate@0 11 | inputs: 12 | workingFile: .npmrc 13 | 14 | - script: npm ci 15 | displayName: npm ci 16 | 17 | - script: npm run compile 18 | displayName: Build extension 19 | 20 | # TODO: call the schema endpoint to generate the shipped schema file 21 | # - script: | 22 | # echo no-op 23 | # displayName: Generate service-schema.json 24 | 25 | - script: Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 & 26 | displayName: Start xvfb 27 | 28 | - script: npm run test 29 | displayName: Run tests 30 | env: 31 | DISPLAY: ':99.0' 32 | 33 | - script: npm run lint 34 | displayName: Lint 35 | 36 | # Acquire the `vsce` tool and use it to package 37 | - script: | 38 | npm install -g @vscode/vsce 39 | displayName: Install VSCE 40 | 41 | - script: | 42 | npx @vscode/vsce package -o extension.vsix 43 | displayName: Create VSIX 44 | 45 | - script: | 46 | npx @vscode/vsce generate-manifest -i extension.vsix -o extension.manifest 47 | displayName: Create VSIX Manifest 48 | 49 | - script: | 50 | cp extension.manifest extension.signature.p7s 51 | displayName: Prepare Manifest Signature 52 | 53 | - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5 54 | inputs: 55 | ConnectedServiceName: $(ConnectedServiceName) 56 | UseMSIAuthentication: true 57 | AppRegistrationClientId: $(AppRegistrationClientId) 58 | AppRegistrationTenantId: $(AppRegistrationTenantId) 59 | EsrpClientId: $(EsrpClientId) 60 | AuthAKVName: $(AuthAKVName) 61 | AuthSignCertName: $(AuthSignCertName) 62 | FolderPath: '$(Build.SourcesDirectory)' 63 | Pattern: 'extension.signature.p7s' 64 | signConfigType: inlineSignParams 65 | inlineOperation: | 66 | [ 67 | { 68 | "keyCode": "CP-401405", 69 | "operationSetCode": "VSCodePublisherSign", 70 | "parameters" : [], 71 | "toolName": "sign", 72 | "toolVersion": "1.0" 73 | } 74 | ] 75 | SessionTimeout: 90 76 | MaxConcurrency: 25 77 | MaxRetryAttempts: 5 78 | PendingAnalysisWaitTimeoutMinutes: 5 79 | displayName: Sign Extension 80 | condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) 81 | 82 | - script: | 83 | npm run vscode:prepublish 84 | cat /home/vsts/.npm/_logs/*.log 85 | displayName: Echo npm error logs on failure 86 | condition: failed() 87 | 88 | # For releasable builds, we'll want the branch and the changelog 89 | # Expects that a 'version.txt' has been laid down by a previous step 90 | - bash: | 91 | echo $(Build.SourceBranch) | sed "s|refs/[^/]*/||" > branch.txt 92 | PACKAGE_VERSION=$(cat version.txt) 93 | VERSION_REGEX="## $(echo $PACKAGE_VERSION | sed 's/\./\\./g')" 94 | sed -n "/$VERSION_REGEX/,/## 1\..*/p" CHANGELOG.md | head -n -2 > minichangelog.txt 95 | displayName: Get branch and mini-changelog 96 | 97 | # Choose files to publish 98 | - task: CopyFiles@2 99 | displayName: Stage VSIX for publishing 100 | inputs: 101 | contents: |- 102 | extension.vsix 103 | version.txt 104 | branch.txt 105 | minichangelog.txt 106 | extension.signature.p7s 107 | extension.manifest 108 | targetFolder: $(Build.ArtifactStagingDirectory) 109 | -------------------------------------------------------------------------------- /.azure-pipelines/github-release/.npmrc: -------------------------------------------------------------------------------- 1 | registry=https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/ 2 | 3 | always-auth=true -------------------------------------------------------------------------------- /.azure-pipelines/github-release/github-release.js: -------------------------------------------------------------------------------- 1 | const octokit = require('@octokit/rest')({ 2 | headers: { 3 | 'user-agent': 'azure-pipelines/vscode-release-pipeline v1.0' 4 | } 5 | }); 6 | const util = require('util'); 7 | const exec = util.promisify(require('child_process').exec); 8 | const fs = require('fs'); 9 | 10 | const DEBUG_LOGGING = process.env.SYSTEM_DEBUG && process.env.SYSTEM_DEBUG == 'true'; 11 | let vsixName = process.argv[2] || null; 12 | let version = process.argv[3] || null; 13 | let token = process.argv[4] || null 14 | let signature = process.argv[5] || null 15 | let manifest = process.argv[6] || null 16 | 17 | if (token === null) { 18 | console.log(`Usage: 19 | 20 | github-release.js 21 | 22 | This will create a new release and tag on GitHub at the current HEAD commit. 23 | 24 | USE AT YOUR OWN RISK. 25 | This is intended to be run by the release pipeline only.`); 26 | process.exit(1); 27 | } 28 | 29 | async function createRelease() { 30 | let target_commitish; 31 | if (process.env.BUILD_SOURCEBRANCH) { 32 | target_commitish = process.env.BUILD_SOURCEBRANCH; 33 | } else { 34 | const { stdout: head_commit } = await exec('git rev-parse --verify HEAD'); 35 | target_commitish = head_commit.trim(); 36 | } 37 | 38 | const { stdout: body } = await exec('cat minichangelog.txt'); 39 | 40 | octokit.authenticate({ 41 | type: 'token', 42 | token: token 43 | }); 44 | 45 | console.log('Creating release...'); 46 | let createReleaseResult; 47 | try { 48 | createReleaseResult = await octokit.repos.createRelease({ 49 | owner: 'Microsoft', 50 | repo: 'azure-pipelines-vscode', 51 | tag_name: `v${version}`, 52 | target_commitish: target_commitish, 53 | name: `${version}`, 54 | body: body 55 | }); 56 | } catch (e) { 57 | throw e; 58 | } 59 | console.log('Created release.'); 60 | 61 | if (DEBUG_LOGGING) { 62 | console.log(createReleaseResult); 63 | } 64 | 65 | // Upload the VSIX 66 | const vsixSize = fs.statSync(vsixName).size; 67 | console.log('Uploading VSIX...'); 68 | let vsixUploadResult; 69 | try { 70 | vsixUploadResult = await octokit.repos.uploadAsset({ 71 | url: createReleaseResult.data.upload_url, 72 | headers: { 73 | 'content-length': vsixSize, 74 | 'content-type': 'application/zip', 75 | }, 76 | name: vsixName, 77 | file: fs.createReadStream(vsixName) 78 | }); 79 | } catch (e) { 80 | throw e; 81 | } 82 | console.log('Uploaded VSIX.'); 83 | 84 | // Upload the Manifest 85 | const manifestSize = fs.statSync(manifest).size; 86 | console.log('Uploading Manifest...'); 87 | let manifestUploadResult; 88 | try { 89 | manifestUploadResult = await octokit.repos.uploadAsset({ 90 | url: createReleaseResult.data.upload_url, 91 | headers: { 92 | 'content-length': manifestSize, 93 | 'content-type': 'application/xml', 94 | }, 95 | name: manifest, 96 | file: fs.createReadStream(manifest) 97 | }); 98 | } catch (e) { 99 | throw e; 100 | } 101 | console.log('Uploaded Manifest.'); 102 | 103 | // Upload the Signature 104 | const signatureSize = fs.statSync(signature).size; 105 | console.log('Uploading Signature...'); 106 | let signatureUploadResult; 107 | try { 108 | signatureUploadResult = await octokit.repos.uploadAsset({ 109 | url: createReleaseResult.data.upload_url, 110 | headers: { 111 | 'content-length': signatureSize, 112 | 'content-type': 'application/pkcs7-signature', 113 | }, 114 | name: signature, 115 | file: fs.createReadStream(signature) 116 | }); 117 | } catch (e) { 118 | throw e; 119 | } 120 | console.log('Uploaded Signature.'); 121 | 122 | if (DEBUG_LOGGING) { 123 | console.log("VISX Upload Result:" + vsixUploadResult); 124 | console.log("Manifest Upload Result:" + manifestUploadResult); 125 | console.log("Signature Upload Result:" + signatureUploadResult); 126 | } 127 | } 128 | 129 | try { 130 | createRelease(); 131 | } catch (err) { 132 | console.error(err); 133 | process.exit(1); 134 | } 135 | -------------------------------------------------------------------------------- /.azure-pipelines/github-release/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "github-release", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "github-release.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "Microsoft", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@octokit/rest": "^15.18.2" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /.azure-pipelines/release-pipeline.yml: -------------------------------------------------------------------------------- 1 | # Release build script 2 | # 3 | # Uses the common build logic, but also gains capabilities related to releasing the product. 4 | 5 | # Only trigger manually 6 | 7 | trigger: none 8 | 9 | pr: none 10 | 11 | resources: 12 | repositories: 13 | - repository: 1ESPipelineTemplates 14 | type: git 15 | name: 1ESPipelineTemplates/1ESPipelineTemplates 16 | ref: refs/tags/release 17 | 18 | extends: 19 | template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates 20 | parameters: 21 | settings: 22 | skipBuildTagsForGitHubPullRequests: true 23 | # featureFlags: 24 | # autoBaseline: false 25 | sdl: 26 | baseline: 27 | baselineSet: default 28 | baselineFile: $(Build.SourcesDirectory)/.gdn/.gdnbaselines 29 | sourceAnalysisPool: 30 | name: 1ES-ABTT-Shared-Pool 31 | image: abtt-windows-2022 32 | os: windows 33 | pool: 34 | name: 1ES-ABTT-Shared-Pool 35 | image: abtt-ubuntu-2204 36 | os: linux 37 | customBuildTags: 38 | - ES365AIMigrationTooling 39 | 40 | stages: 41 | - stage: stage 42 | jobs: 43 | - job: job 44 | templateContext: 45 | outputs: 46 | - output: pipelineArtifact 47 | displayName: 'Publish VSIX' 48 | artifactName: azure-pipelines-vscode 49 | targetPath: $(Build.ArtifactStagingDirectory) 50 | 51 | steps: 52 | # release version should be correctly set in package.json 53 | - bash: | 54 | PACKAGE_VERSION=$(node -p "require('./package.json').version") 55 | echo "##vso[build.updatebuildnumber]${PACKAGE_VERSION}_release_${BUILD_BUILDID}" 56 | echo "$PACKAGE_VERSION" > version.txt 57 | displayName: Set version number of build 58 | 59 | # do all the normal build stuff 60 | - template: /.azure-pipelines/common-steps.yml@self 61 | 62 | # if the mini changelog is empty, complain 63 | - bash: | 64 | echo minichangelog.txt: 65 | cat minichangelog.txt 66 | LINE_COUNT=$(cat minichangelog.txt | wc -l) 67 | if [ "$LINE_COUNT" -lt 3 ]; then 68 | echo Mini changelog is too short. Did you use the wrong version number in CHANGELOG.txt? 69 | exit 1 70 | fi 71 | displayName: Check for length of mini-changelog 72 | 73 | # create a GitHub Release 74 | - bash: | 75 | export npm_config_cache=$(Build.SourcesDirectory)/.azure-pipelines/github-release/npm-cache 76 | npm install 77 | displayName: Prepare to create GitHub Release 78 | workingDirectory: '$(Build.SourcesDirectory)/.azure-pipelines/github-release' 79 | condition: eq(variables['Build.SourceBranchName'], 'main') 80 | 81 | - bash: | 82 | SCRIPT=.azure-pipelines/github-release/github-release.js 83 | VSIX=extension.vsix 84 | VERSION=$(node -p "require('./package.json').version") 85 | VSIX_SIGNATURE=extension.signature.p7s 86 | VSIX_MANIFEST=extension.manifest 87 | node $SCRIPT $VSIX $VERSION $GITHUB_TOKEN $VSIX_SIGNATURE $VSIX_MANIFEST 88 | displayName: Create GitHub Release 89 | condition: eq(variables['Build.SourceBranchName'], 'main') 90 | env: 91 | GITHUB_TOKEN: $(GitHubSecret) 92 | -------------------------------------------------------------------------------- /.config/1espt/PipelineAutobaseliningConfig.yml: -------------------------------------------------------------------------------- 1 | ## DO NOT MODIFY THIS FILE MANUALLY. This is part of auto-baselining from 1ES Pipeline Templates. Go to [https://aka.ms/1espt-autobaselining] for more details. 2 | 3 | pipelines: 4 | 17236: 5 | retail: 6 | source: 7 | credscan: 8 | lastModifiedDate: 2024-09-09 9 | eslint: 10 | lastModifiedDate: 2024-09-09 11 | psscriptanalyzer: 12 | lastModifiedDate: 2024-09-09 13 | armory: 14 | lastModifiedDate: 2024-09-09 15 | binary: 16 | credscan: 17 | lastModifiedDate: 2025-02-03 18 | binskim: 19 | lastModifiedDate: 2025-02-03 20 | spotbugs: 21 | lastModifiedDate: 2025-02-03 22 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Set default behavior to automatically normalize line endings. 2 | * text=auto 3 | 4 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Global rule: 2 | * @microsoft/azure-pipelines-tasks-and-agent @microsoft/azure-pipelines-platform 3 | -------------------------------------------------------------------------------- /.github/workflows/autoAssignABTT.yml: -------------------------------------------------------------------------------- 1 | name: Auto Assign ABTT to Project Board 2 | 3 | on: 4 | issues: 5 | types: 6 | - opened 7 | 8 | jobs: 9 | assign_one_project: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | issues: write 13 | name: Assign to ABTT Project 14 | steps: 15 | - name: "Add triage and area labels" 16 | uses: actions-ecosystem/action-add-labels@v1 17 | with: 18 | github_token: ${{ secrets.GITHUB_TOKEN }} 19 | labels: | 20 | Area: VsCodeExtension 21 | triage 22 | 23 | - name: "Assign issues with 'Area: ABTT' label to project board" 24 | uses: actions/add-to-project@v0.4.1 25 | with: 26 | project-url: https://github.com/orgs/microsoft/projects/755 27 | github-token: ${{ secrets.ABTT_TOKEN }} 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | coverage 2 | dist 3 | out 4 | node_modules 5 | .vscode-test/ 6 | *.vsix 7 | .DS_Store 8 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | registry=https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/ 2 | 3 | always-auth=true -------------------------------------------------------------------------------- /.vscode-test.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | 3 | import { defineConfig } from '@vscode/test-cli'; 4 | import path from 'path'; 5 | 6 | export default defineConfig({ 7 | files: 'out/test/**/*.test.js', 8 | workspaceFolder: path.join(import.meta.dirname, 'src', 'test', 'workspace'), 9 | mocha: { 10 | timeout: 100000, 11 | }, 12 | coverage: { 13 | reporter: ['cobertura', 'text', 'html'], 14 | output: './coverage', 15 | } 16 | }); 17 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // See http://go.microsoft.com/fwlink/?LinkId=827846 3 | // for the documentation about the extensions.json format 4 | "recommendations": [ 5 | "dbaeumer.vscode-eslint", 6 | "ms-vscode.extension-test-runner" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | // A launch configuration that compiles the extension and then opens it inside a new window 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | { 6 | "version": "0.2.0", 7 | "configurations": [ 8 | { 9 | "name": "Extension", 10 | "type": "extensionHost", 11 | "request": "launch", 12 | "runtimeExecutable": "${execPath}", 13 | "args": [ 14 | "--extensionDevelopmentPath=${workspaceFolder}" 15 | ], 16 | "outFiles": [ 17 | "${workspaceFolder}/dist/**/*.js" 18 | ] 19 | }, 20 | { 21 | "name": "Extension Tests", 22 | "type": "extensionHost", 23 | "request": "launch", 24 | "testConfiguration": "${workspaceFolder}/.vscode-test.js", 25 | "args": [ 26 | "${workspaceFolder}/src/test/workspace", 27 | "--extensionDevelopmentPath=${workspaceFolder}", 28 | "--extensionTestsPath=${workspaceFolder}/out/test/index" 29 | ], 30 | "outFiles": [ 31 | "${workspaceFolder}/dist/**/*.js", 32 | "${workspaceFolder}/out/**/*.js" 33 | ], 34 | "preLaunchTask": "npm: compile:test", 35 | }, 36 | { 37 | "name": "Attach to Server", 38 | "type": "node", 39 | "request": "attach", 40 | "port": 6009, 41 | "restart": true, 42 | "outFiles": [ 43 | "${workspaceFolder}/../azure-pipelines-language-server/language-server/out/**/*.js", 44 | "${workspaceFolder}/../azure-pipelines-language-server/language-service/lib/**/*.js" 45 | ] 46 | } 47 | ], 48 | "compounds": [ 49 | { 50 | "name": "Launch Extension & Attach to Server", 51 | "configurations": [ 52 | "Extension", 53 | "Attach to Server" 54 | ] 55 | } 56 | ] 57 | } 58 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | // Place your settings in this file to overwrite default and user settings. 2 | { 3 | "files.exclude": { 4 | "out": false // set this to true to hide the "out" folder with the compiled JS files 5 | }, 6 | "search.exclude": { 7 | "out": true // set this to false to include "out" folder in search results 8 | }, 9 | // Turn off tsc task auto detection since we have the necessary tasks as npm scripts 10 | "typescript.tsc.autoDetect": "off" 11 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | // See https://go.microsoft.com/fwlink/?LinkId=733558 2 | // for the documentation about the tasks.json format 3 | { 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "type": "npm", 8 | "script": "watch", 9 | "problemMatcher": "$tsc-watch", 10 | "isBackground": true, 11 | "presentation": { 12 | "reveal": "never" 13 | }, 14 | "group": { 15 | "kind": "build", 16 | "isDefault": true 17 | } 18 | } 19 | ] 20 | } -------------------------------------------------------------------------------- /.vscodeignore: -------------------------------------------------------------------------------- 1 | .gitattributes 2 | .gitignore 3 | .npmrc 4 | .vscode-test.mjs 5 | eslint.config.mjs 6 | tsconfig.json 7 | tsconfig.test.json 8 | webpack.config.mjs 9 | .azure-pipelines/ 10 | .github/ 11 | .vscode/ 12 | .vscode-test/ 13 | coverage/ 14 | dist/**/*.map 15 | examples/ 16 | node_modules/ 17 | out/ 18 | src/ 19 | tools/ 20 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to the Azure Pipelines extension will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/). Versioning follows an internal Azure DevOps format that is not compatible with SemVer. 5 | 6 | ## 1.249.0 7 | ### Updated 8 | - M247 schema 9 | ### Fixed 10 | - Errors when trying to sign in for enhanced IntelliSense 11 | 12 | ## 1.247.3 13 | ### Added 14 | - Added ESRP Esrp CodeSigning step 15 | - Updated build generation to sign the VSIX 16 | - Updated upload steps to upload VSIX, signature & manifest 17 | 18 | ## 1.237.0 19 | ### Added 20 | - Added go-to-definition support for local templates (thanks @Stuart-Wilcox!) 21 | ### Updated 22 | - M235 schema 23 | 24 | ## 1.228.0 25 | ### Added 26 | - Added support for using [1ES Pipeline Template schema Intellisense](https://aka.ms/1espt) for users working on pipelines extending 1ES Pipeline Templates. This feature is available for users with `@microsoft.com` account only. 27 | 28 | ## 1.220.0 29 | ### Updated 30 | - M218 schema 31 | 32 | ## 1.208.0 33 | ### Added 34 | - Schema auto-detection now works for all workspaces 35 | ### Fixed 36 | - Updated dependencies to fix security vulnerabilities 37 | ### Updated 38 | - M206 schema 39 | 40 | ## 1.205.0 41 | ### Fixed 42 | - Fixed a bug in 1.204.0 that prevented schema auto-detection from working (thanks @krokofant!) 43 | ### Updated 44 | - All templates have been updated to use the latest versions of agents and tooling 45 | - M203 schema 46 | 47 | ## 1.204.0 48 | ### Fixed 49 | - Fixed Configure Pipeline flow 50 | - Updated dependencies to fix security vulnerabilities 51 | ### Updated 52 | - M202 schema 53 | 54 | ## 1.202.0 55 | ### Added 56 | - Your organization's schema will now be auto-detected and used if your repo is hosted in Azure Repos 57 | ### Fixed 58 | - Updated dependencies to fix security vulnerabilities 59 | ### Updated 60 | - M200 schema 61 | 62 | ## 1.195.0 63 | ### Fixed 64 | - Emojis no longer cause validation to fail (thanks @PaulTaykalo!) 65 | - The "Azure Pipelines: Configure Pipeline" command should work again 66 | ### Updated 67 | - M195 schema 68 | 69 | ## 1.194.1 70 | ### Fixed 71 | - Actually includes the changes intended for 1.194.0 72 | 73 | ## 1.194.0 74 | ### Fixed 75 | - Property autocompletion no longer adds a duplicate colon if one already exists 76 | - Fixed two crashes around conditional variables 77 | ### Updated 78 | - M194 schema 79 | 80 | ## 1.191.0 81 | ### Added 82 | - Supports [template expressions](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/expressions?view=azure-devops)! 83 | - Note: while expressions will no longer be marked as errors, there may still be some incorrect warnings. 84 | - Many thanks to @50Wliu for this long-awaited feature. 85 | ### Updated 86 | - M190 schema 87 | 88 | ## 1.188.1 89 | ### Fixed 90 | - Fixed regression finding default schema 91 | 92 | ## 1.188.0 93 | ### Fixed 94 | - Improved startup performance by 80% 95 | - Reduced extension size by 90% 96 | - Resolved several Dependabot alerts 97 | ### Updated 98 | - M187 schema 99 | - `azure-pipelines.customSchemaFile` can now point to a remote URL, as long as it does not require authentication 100 | - Declared "limited" support for untrusted workspaces (all features will work except for `azure-pipelines.customSchemaFile`) 101 | 102 | ## 1.183.0 103 | ### Breaking change 104 | - Configuration namespace has moved. If you added `customSchemaFile` or had keybindings to commands, you'll need to update your config. Wherever it says `[azure-pipelines].thing`, it should now read `azure-pipelines.thing`. And if you assigned a keybinding to `configure-pipeline`, change it to `azure-pipelines.configure-pipeline` instead. Sorry for the inconvenience, but this fixes several bugs and yields a better config experience. (Thanks @50Wliu!) 105 | 106 | ### Fixed 107 | - Extension readme points to correct branch (thanks @AtOMiCNebula!) 108 | - Several dependabot alerts 109 | 110 | ## 1.182.0 111 | 112 | All of the material changes in this version were courtesy of @50Wliu. Thanks! 113 | ### Fixed 114 | - Improve debuggability when working on extension + language server packages 115 | - Removed need to prompt for extension restart on schema change 116 | - Document the language client implementation 117 | 118 | ### Updated 119 | - M181 YAML schema 120 | 121 | ## 1.177.0 122 | ### Fixed 123 | - hopefully fixed some of the startup performance problems (hat tip to @50Wliu) 124 | ### Updated 125 | - M176 YAML schema 126 | 127 | ## 1.174.2 128 | ### Fixed 129 | - stopped passing null to `path.isAbsolute()` 130 | 131 | ## 1.174.1 132 | ### Added 133 | - allow relative paths to custom schema 134 | ### Fixed 135 | - over-notification when schema changes 136 | 137 | ## 1.174.0 138 | ### Fixed 139 | - updated schema to M174 140 | 141 | ## 1.170.0 142 | ### Added 143 | - Added an option to use a custom schema file 144 | ### Fixed 145 | - updated schema to M169 146 | 147 | ## 1.165.1 148 | ### Fixed 149 | - update a few dependencies 150 | 151 | ## 1.165.0 152 | ### Fixed 153 | - updated schema to M163 154 | - editor.autoIndent setting is updated from a bool to a string 155 | 156 | ## 1.157.5 157 | ### Added 158 | - Added YAML Templates and detection logic for Function App 159 | 160 | ## 1.157.4 161 | ### Added 162 | - Added an option to browse the pipeline targeting Azure Resource. 163 | 164 | ## 1.157.3 165 | ### Fixed 166 | - Azure Repos scenario for `Configure Pipeline` where url contains DefaultCollection 167 | 168 | ## 1.157.2 169 | ### Added 170 | - Added troubleshooting steps in README 171 | 172 | ## 1.157.1 173 | ### Fixed 174 | - Azure Repos scenario for `Configure Pipeline` 175 | 176 | ## 1.157.0 177 | ### Added 178 | - Added "Configure Pipeline" option in Command Palette (Ctrl+Shift+P) and File Explorer. This will configure a continuous integration (CI) and deployment (CD) pipeline to Azure Windows Web App 179 | 180 | ## 1.155.0 181 | ### Fixed 182 | - Updated to M155 schema including some new tasks 183 | - Several improvements to validation and auto-complete 184 | 185 | ## 1.152.0 186 | ### Added 187 | - Support for `stages` and other new schema 188 | 189 | ## 1.147.2 190 | ### Fixed 191 | - Partial support for expressions 192 | 193 | ## 1.147.1 194 | ### Fixed 195 | - Support aliases for task inputs 196 | 197 | ## 1.145.2 198 | ### Fixed 199 | - Identify required task inputs 200 | 201 | ## 1.145.1 202 | ### Fixed 203 | - updated to latest tasks in schema 204 | - allow expressions in some properties, including "condition" 205 | 206 | ## 1.145.0 207 | ### Fixed 208 | - link to correct GitHub repo in package.json 209 | - several YAML correctness bugs 210 | 211 | ## 1.144.0 212 | ### Fixed 213 | - LF vs CRLF line endings caused the validator to lose its place 214 | - removed several invalid auto-complete suggestions 215 | - enforce first property in some constructs 216 | 217 | ## 1.141.0 - 2018-09-05 218 | ### Added 219 | - Initial release 220 | - Syntax highlighting for Azure Pipelines files 221 | - Intellisense for Azure Pipelines files 222 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This project welcomes contributions and suggestions. Most contributions require you to agree to a 4 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us 5 | the rights to use your contribution. For details, visit https://cla.microsoft.com. 6 | 7 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide 8 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions 9 | provided by the bot. You will only need to do this once across all repos using our CLA. 10 | 11 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 12 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or 13 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://dev.azure.com/mseng/PipelineTools/_apis/build/status%2FVSCode%20Extension%2Fvscode-extension-ci?repoName=microsoft%2Fazure-pipelines-vscode&branchName=main)](https://dev.azure.com/mseng/PipelineTools/_build/latest?definitionId=17236&repoName=microsoft%2Fazure-pipelines-vscode&branchName=main) 2 | 3 | # Azure Pipelines for VS Code 4 | 5 | [Get it on the VS Code Marketplace!](https://marketplace.visualstudio.com/items?itemName=ms-azure-devops.azure-pipelines) 6 | 7 | This VS Code extension adds syntax highlighting and autocompletion for Azure Pipelines YAML to VS Code. It also helps you set up continuous build and deployment for Azure WebApps without leaving VS Code. 8 | 9 | ## Validation 10 | 11 | Basic YAML validation is built in to VS Code, but now you can have syntax highlighting that's aware of the Pipelines YAML schema. This means that you get red squigglies if you say `tasks:` where you meant `task:`. IntelliSense is also schema-aware. Wherever you are in the file, press Ctrl-Space to see what options you have at that point. 12 | 13 | By default, the extension will highlight known Azure Pipelines files in the root of your workspace. You can change the language mode at the lower right to work with one file at a time. Click the language picker, then choose "Azure Pipelines". If you have files which should always use this extension, set your user or workspace settings to match those file paths with this extension. For example: 14 | 15 | ```json 16 | { 17 | "files.associations": { 18 | "**/ci/*.yml": "azure-pipelines" 19 | } 20 | } 21 | ``` 22 | 23 | ### Schema auto-detection 24 | 25 | Out of the box, the extension has a generic schema file that includes only in-box tasks. 26 | You probably have custom tasks installed in your organization. 27 | 28 | To provide the most relevant IntelliSense, the extension will automatically detect and use your organization's schema! All you need to do is follow the instructions when prompted. 29 | 30 | ### Using a different Microsoft Entra tenant 31 | 32 | The extension uses your account's default tenant to connect to Azure DevOps. 33 | Sometimes, this is not what you want as your Azure DevOps organization is linked to a different tenant. 34 | 35 | To get the extension to use the correct tenant, go to the [Tenant](vscode://settings/azure-pipelines.tenant) setting and enter the Microsoft Entra tenant ID that your organization uses. 36 | You can view which tenant your organization is connected to by going to `https://dev.azure.com/YOUR-ORG-HERE/_settings/organizationAad`. 37 | 38 | ### Specific schema 39 | 40 | If you need to use a specific schema, that is also possible. 41 | 42 | 1. Visit `https://dev.azure.com/YOUR-ORG-HERE/_apis/distributedtask/yamlschema` and save the output as `my-schema.json`. 43 | 2. Edit your workspace's `settings.json` to include this: 44 | ```json 45 | { 46 | "azure-pipelines.customSchemaFile": "./path/to/my-schema.json" 47 | } 48 | ``` 49 | 50 | ## Document formatting 51 | 52 | Since this extension defines a new file type ("`azure-pipelines`"), any YAML formatter you've installed no longer applies to pipelines documents. 53 | Hat tip to @mgexm and @dotnetcanuck for [sharing how they restored this functionality](https://github.com/microsoft/azure-pipelines-vscode/issues/209#issuecomment-718168926). 54 | We'll demonstrate with the [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) VS Code extension: 55 | 56 | Add this to your `settings.json`: 57 | ```json 58 | "[azure-pipelines]": { 59 | "editor.defaultFormatter": "esbenp.prettier-vscode" 60 | }, 61 | ``` 62 | 63 | Both format on save and the `Format document` command should now work! 64 | 65 | ## Telemetry 66 | 67 | VS Code collects usage data and sends it to Microsoft to help improve our products and services. Read our [privacy statement](https://go.microsoft.com/fwlink/?LinkID=528096&clcid=0x409) to learn more. If you don’t wish to send usage data to Microsoft, you can set the `telemetry.enableTelemetry` setting to `false`. Learn more in our [FAQ](https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting). 68 | 69 | ## Extension Development 70 | 71 | If you are only working on the extension (i.e. syntax highlighting, configure pipeline, and the language client): 72 | - Run `npm install` to install all necessary dependencies 73 | - Run `npm run watch` to automatically rebuild the extension whenever you make changes 74 | - Run the "Extension" debug configuration to launch a VS Code window using your modified version of the extension 75 | 76 | If you are also working on the language server: 77 | - Follow the first two steps above 78 | - Clone the [azure-pipelines-language-server](https://github.com/microsoft/azure-pipelines-language-server) repository alongside this repository 79 | - Run `npm link ../azure-pipelines-language-server/language-server` 80 | - Follow the instructions in the language server README to link the language service to the language server 81 | - Add the `azure-pipelines-language-server` folder to your VS Code workspace 82 | - Run the "Launch Extension & Attach to Server" debug configuration 83 | - Note: In order to attach to the server, the extension must be activated (in other words, make sure you are editing an Azure Pipelines file) 84 | - In case the attach request timeouts before the server can start, wait for it to start and then run the "Attach to Server" debug configuration 85 | 86 | # Contributing 87 | 88 | See [CONTRIBUTING.md](CONTRIBUTING.md) if you want to jump in! 89 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Releasing the extension 2 | 3 | 0. Find the current sprint using https://whatsprintis.it. 4 | 0. Work in a branch. I sometimes go with `ship-`, for example, `ship-191`. 5 | 0. Update the version to the major sprint number using `npm version --no-git-tag-version 1.THE_SPRINT_VERSION.0`. 6 | - Replace `THE_SPRINT_VERSION` with `patch` if you are doing a bugfix release. 7 | 0. Ensure the [CHANGELOG](CHANGELOG.md) is up to date. 8 | 0. Update the [service schema](#bumping-service-schema). 9 | 0. Create a PR on GitHub, mostly for tracking reasons. 10 | 0. Manually queue a [Release build](https://dev.azure.com/mseng/PipelineTools/_build?definitionId=17237) against your PR branch. 11 | - This will create a GitHub release at the commit you've specified! 12 | 0. Ship the resulting package to the [Marketplace](https://marketplace.visualstudio.com/manage/publishers/ms-azure-devops). 13 | - You can grab it from either the pipeline run or off GitHub itself. 14 | 0. Run `npm version --no-git-tag-version patch` so that packages produced by CI are treated as newer than the released version. 15 | 0. Push that change and merge the PR. You can now delete the branch. 16 | 17 | ## Bumping service schema 18 | 19 | 0. Go to a personal Azure DevOps organization that is not joined to a work-related AAD organization 20 | 0. Get the new schema from https://dev.azure.com/YOUR-PERSONAL-ORG/_apis/distributedtask/yamlschema 21 | 0. Replace `service-schema.json` with the results of that endpoint. 22 | 0. In VS Code, run `Format document` to keep the diff readable. 23 | 0. Update `$comment` with the Azure DevOps sprint info (you can see the sprint number by going to https://dev.azure.com/YOUR-PERSONAL-ORG/_home/about). 24 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd). 40 | 41 | -------------------------------------------------------------------------------- /assets/pipelines-file-icon.svg: -------------------------------------------------------------------------------- 1 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /assets/pipelines.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/ace8a40d53d11dfcdd9ac0dc8e83718e35fd3e8f/assets/pipelines.png -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | 3 | import eslint from '@eslint/js'; 4 | import tseslint from 'typescript-eslint'; 5 | 6 | export default tseslint.config( 7 | eslint.configs.recommended, 8 | ...tseslint.configs.strictTypeChecked, 9 | ...tseslint.configs.stylisticTypeChecked, 10 | { 11 | ignores: [ 12 | '.azure-pipelines/', 13 | '.vscode-test/', 14 | 'dist/', 15 | 'out/', 16 | ], 17 | }, 18 | { 19 | languageOptions: { 20 | parserOptions: { 21 | projectService: { 22 | allowDefaultProject: ['*.?(m)js'], 23 | }, 24 | tsconfigRootDir: import.meta.dirname, 25 | } 26 | }, 27 | rules: { 28 | // Allow numbers and booleans in template expressions 29 | '@typescript-eslint/restrict-template-expressions': ['error', { 30 | allowNumber: true, 31 | allowBoolean: true, 32 | }], 33 | // Always use `return await` in async functions 34 | '@typescript-eslint/return-await': ['error', 'always'], 35 | // Prefer using String.match 36 | '@typescript-eslint/prefer-regexp-exec': 'off', 37 | }, 38 | }, 39 | { 40 | files: ['**/*.?(m)js'], 41 | extends: [tseslint.configs.disableTypeChecked], 42 | }, 43 | ); 44 | -------------------------------------------------------------------------------- /examples/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "*.yml": "azure-pipelines", 4 | "**/*.yml": "azure-pipelines" 5 | } 6 | } -------------------------------------------------------------------------------- /examples/extracted/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "**/*.yml": "azure-pipelines" 4 | } 5 | } -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | cancelTimeoutInMinutes: 5 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | cancelTimeoutInMinutes: 5 2 | pool: myPool 3 | steps: 4 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromImpliedJob_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | server: 2 | cancelTimeoutInMinutes: 5 3 | steps: 4 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromImpliedJob_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | cancelTimeoutInMinutes: 5 2 | pool: server 3 | steps: 4 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | cancelTimeoutInMinutes: 5 5 | - job: job3 6 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: 5 | name: myPool 6 | cancelTimeoutInMinutes: 5 7 | - phase: job3 8 | queue: 9 | name: myPool 10 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | cancelTimeoutInMinutes: 5 5 | pool: myPool 6 | - job: job3 7 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] 8 | pool: myPool -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | server: 5 | cancelTimeoutInMinutes: 5 6 | steps: 7 | - task: Bash@3 8 | - phase: job3 9 | server: 10 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] 11 | steps: 12 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobCancelTimeoutInMinutes_FromLegacyPhase_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | cancelTimeoutInMinutes: 5 5 | pool: server 6 | steps: 7 | - task: Bash@3 8 | - job: job3 9 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] 10 | pool: server 11 | steps: 12 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobCondition_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | dependsOn: job1 5 | condition: variables.condition1 6 | - job: job3 7 | condition: variables.condition2 -------------------------------------------------------------------------------- /examples/extracted/JobCondition_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | dependsOn: job1 5 | condition: variables.condition1 6 | - phase: job3 7 | condition: variables.condition2 -------------------------------------------------------------------------------- /examples/extracted/JobCondition_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | dependsOn: job1 5 | condition: variables.condition1 6 | - job: job3 7 | condition: variables.condition2 -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | pool: myPool 7 | container: dev1 8 | steps: 9 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | queue: 7 | name: myPool 8 | container: dev1 9 | steps: 10 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | pool: myPool 7 | container: dev1 8 | steps: 9 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | jobs: 7 | - job: job1 8 | - job: job2 9 | container: dev1 10 | - job: job3 11 | pool: myPool 12 | container: $[ variables.container ] 13 | steps: 14 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | phases: 7 | - phase: job1 8 | - phase: job2 9 | queue: 10 | name: myPool 11 | container: dev1 12 | - phase: job3 13 | queue: 14 | name: myPool 15 | container: $[ variables.container ] 16 | steps: 17 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContainer_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | type: docker 6 | jobs: 7 | - job: job1 8 | - job: job2 9 | pool: myPool 10 | container: dev1 11 | - job: job3 12 | pool: myPool 13 | container: $[ variables.container ] 14 | steps: 15 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobContinueOnError_FromImpliedJob_Boolean.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | continueOnError: true -------------------------------------------------------------------------------- /examples/extracted/JobContinueOnError_FromImpliedJob_Expression.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | continueOnError: $[ foo ] -------------------------------------------------------------------------------- /examples/extracted/JobContinueOnError_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | continueOnError: true 5 | - job: job3 6 | continueOnError: $[ foo ] -------------------------------------------------------------------------------- /examples/extracted/JobDependsOn_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | dependsOn: job1 5 | - job: job3 6 | dependsOn: 7 | - job1 8 | - job2 -------------------------------------------------------------------------------- /examples/extracted/JobDependsOn_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | dependsOn: job1 5 | - phase: job3 6 | dependsOn: 7 | - job1 8 | - job2 -------------------------------------------------------------------------------- /examples/extracted/JobDependsOn_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | dependsOn: job1 5 | - job: job3 6 | dependsOn: 7 | - job1 8 | - job2 -------------------------------------------------------------------------------- /examples/extracted/JobDisplayName_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | displayName: My fancy job name -------------------------------------------------------------------------------- /examples/extracted/JobDisplayName_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: myJob 3 | displayName: My fancy job name -------------------------------------------------------------------------------- /examples/extracted/JobDisplayName_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | displayName: My fancy job name -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | pool: 2 | name: myPool 3 | demands: 4 | - demand1 -equals value1 5 | - demand2 -equals value2 6 | steps: 7 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | demands: 4 | - demand1 -equals value1 5 | - demand2 -equals value2 6 | steps: 7 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | pool: 2 | name: myPool 3 | demands: 4 | - demand1 -equals value1 5 | - demand2 -equals value2 6 | steps: 7 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromImpliedJob_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | server: true 2 | steps: 3 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromImpliedJob_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | pool: server 2 | steps: 3 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | pool: pool2 5 | - job: job3 6 | pool: 7 | name: pool3 8 | - job: job4 9 | pool: 10 | name: pool4 11 | demands: one -eq one 12 | - job: job5 13 | pool: 14 | name: pool5 15 | demands: abc -eq one 16 | - job: job6 17 | pool: 18 | name: pool6 19 | demands: 20 | - demand1 -equals value1 21 | - demand2 -equals value2 22 | - job: job7 23 | pool: 24 | demands: 25 | - demand1 -equals value1 26 | - demand2 -equals value2 27 | - job: job8 28 | pool: 29 | name: azure pipelines 30 | vmImage: vs2017 -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromLegacyPhase_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: pool2 5 | - phase: job3 6 | queue: 7 | name: pool3 8 | - phase: job4 9 | queue: 10 | name: pool4 11 | demands: one -eq one 12 | - phase: job5 13 | queue: 14 | name: pool5 15 | demands: abc -eq one 16 | - phase: job6 17 | queue: 18 | name: pool6 19 | demands: 20 | - demand1 -equals value1 21 | - demand2 -equals value2 22 | - phase: job7 23 | queue: 24 | demands: 25 | - demand1 -equals value1 26 | - demand2 -equals value2 -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromLegacyPhase_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | pool: pool2 5 | - job: job3 6 | pool: pool3 7 | - job: job4 8 | pool: 9 | name: pool4 10 | demands: one -eq one 11 | - job: job5 12 | pool: 13 | name: pool5 14 | demands: abc -eq one 15 | - job: job6 16 | pool: 17 | name: pool6 18 | demands: 19 | - demand1 -equals value1 20 | - demand2 -equals value2 21 | - job: job7 22 | pool: 23 | demands: 24 | - demand1 -equals value1 25 | - demand2 -equals value2 -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromLegacyPhase_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | server: true -------------------------------------------------------------------------------- /examples/extracted/JobPool_FromLegacyPhase_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | pool: server -------------------------------------------------------------------------------- /examples/extracted/JobSteps_Bash.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - bash: echo hello from bash 3 | - bash: echo hello again from bash 4 | displayName: Fancy script 5 | name: fancyScript 6 | enabled: false 7 | condition: always() 8 | continueOnError: true 9 | timeoutInMinutes: 123 10 | failOnStderr: $(failOnStderrVariable) 11 | workingDirectory: $(workingDirectoryVariable) 12 | env: 13 | MY_VAR: value -------------------------------------------------------------------------------- /examples/extracted/JobSteps_Checkout.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | steps: 4 | - checkout: none 5 | - job: job2 6 | steps: 7 | - checkout: self 8 | - job: job3 9 | steps: 10 | - checkout: self 11 | clean: $(cleanVariable) 12 | fetchDepth: $(fetchDepthVariable) 13 | lfs: $(fetchDepthVariable) 14 | submodules: $(submodulesVariable) 15 | persistCredentials: $(persistCredentialsVariable) -------------------------------------------------------------------------------- /examples/extracted/JobSteps_CheckoutNone.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | steps: 4 | - checkout: none 5 | condition: true -------------------------------------------------------------------------------- /examples/extracted/JobSteps_CheckoutNone.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | steps: 4 | - checkout: none -------------------------------------------------------------------------------- /examples/extracted/JobSteps_CheckoutWithRepoDefined.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: self 3 | clean: true 4 | phases: 5 | - phase: job1 6 | steps: 7 | - checkout: none 8 | - phase: job2 9 | steps: 10 | - checkout: self 11 | - phase: job3 12 | steps: 13 | - checkout: self 14 | clean: $(cleanVariable) 15 | fetchDepth: $(fetchDepthVariable) 16 | lfs: $(fetchDepthVariable) 17 | submodules: $(submodulesVariable) 18 | persistCredentials: $(persistCredentialsVariable) -------------------------------------------------------------------------------- /examples/extracted/JobSteps_CheckoutWithRepoDefined.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: self 4 | clean: true 5 | jobs: 6 | - job: job1 7 | steps: 8 | - checkout: none 9 | - job: job2 10 | steps: 11 | - checkout: self 12 | - job: job3 13 | steps: 14 | - checkout: self 15 | clean: $(cleanVariable) 16 | fetchDepth: $(fetchDepthVariable) 17 | lfs: $(fetchDepthVariable) 18 | submodules: $(submodulesVariable) 19 | persistCredentials: $(persistCredentialsVariable) -------------------------------------------------------------------------------- /examples/extracted/JobSteps_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - task: Bash@3 3 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobSteps_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | steps: 4 | - task: Bash@3 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobSteps_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: myJob 3 | steps: 4 | - task: Bash@3 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobSteps_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: myJob 3 | steps: 4 | - task: Bash@3 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobSteps_PowerShell.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - powershell: write-host 'hello from powershell' 3 | - powershell: write-host 'hello again from powershell' 4 | displayName: Fancy script 5 | name: fancyScript 6 | enabled: false 7 | condition: always() 8 | continueOnError: true 9 | timeoutInMinutes: 123 10 | errorActionPreference: $(errorActionPreferenceVariable) 11 | failOnStderr: $(failOnStderrVariable) 12 | ignoreLASTEXITCODE: $(ignoreLASTEXITCODEVariable) 13 | workingDirectory: $(workingDirectoryVariable) 14 | env: 15 | MY_VAR: value -------------------------------------------------------------------------------- /examples/extracted/JobSteps_Script.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hello from script 1 3 | - script: echo hello from script 2 4 | displayName: Fancy script 5 | name: fancyScript 6 | enabled: false 7 | condition: always() 8 | continueOnError: true 9 | timeoutInMinutes: 123 10 | failOnStderr: $(failOnStderrVariable) 11 | workingDirectory: $(workingDirectoryVariable) 12 | env: 13 | MY_VAR: value -------------------------------------------------------------------------------- /examples/extracted/JobSteps_Task.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - task: Bash@3 3 | - task: Bash@3 4 | displayName: Fancy task 5 | name: fancyTask 6 | enabled: false 7 | condition: always() 8 | continueOnError: true 9 | timeoutInMinutes: 123 10 | inputs: 11 | myInput: input value 12 | env: 13 | MY_VAR: val -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | matrix: 3 | x64_release: 4 | arch: x64 5 | config: release 6 | x86_debug: 7 | arch: x86 8 | config: debug 9 | steps: 10 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | matrix: 4 | x64_release: 5 | arch: x64 6 | config: release 7 | x86_debug: 8 | arch: x86 9 | config: debug 10 | parallel: 2 11 | steps: 12 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | matrix: 3 | x64_release: 4 | arch: x64 5 | config: release 6 | x86_debug: 7 | arch: x86 8 | config: debug 9 | maxParallel: 2 10 | pool: myPool 11 | steps: 12 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromImpliedJob_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | server: 2 | matrix: 3 | x64_release: 4 | arch: x64 5 | config: release 6 | x86_debug: 7 | arch: x86 8 | config: debug 9 | parallel: 2 10 | steps: 11 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromImpliedJob_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | matrix: 3 | x64_release: 4 | arch: x64 5 | config: release 6 | x86_debug: 7 | arch: x86 8 | config: debug 9 | maxParallel: 2 10 | pool: server 11 | steps: 12 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | matrix: 6 | x64_release: 7 | arch: x64 8 | config: release 9 | x86_debug: 10 | arch: x86 11 | config: debug 12 | - job: job3 13 | strategy: 14 | matrix: 15 | x64_release: 16 | arch: x64 17 | config: release 18 | x86_debug: 19 | arch: x86 20 | config: debug 21 | maxParallel: 2 22 | - job: job4 23 | strategy: 24 | matrix: $[ variables.matrix ] 25 | maxParallel: $[ variables.maxParallel ] 26 | steps: 27 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromLegacyPhase_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: 5 | name: myPool2 6 | matrix: 7 | x64_release: 8 | arch: x64 9 | config: release 10 | x86_debug: 11 | arch: x86 12 | config: debug 13 | - phase: job3 14 | queue: 15 | name: myPool3 16 | matrix: 17 | x64_release: 18 | arch: x64 19 | config: release 20 | x86_debug: 21 | arch: x86 22 | config: debug 23 | parallel: 2 24 | - phase: job4 25 | queue: 26 | name: job4 27 | matrix: $[ variables.matrix ] 28 | parallel: $[ variables.maxParallel ] 29 | steps: 30 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromLegacyPhase_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | matrix: 6 | x64_release: 7 | arch: x64 8 | config: release 9 | x86_debug: 10 | arch: x86 11 | config: debug 12 | pool: myPool2 13 | - job: job3 14 | strategy: 15 | matrix: 16 | x64_release: 17 | arch: x64 18 | config: release 19 | x86_debug: 20 | arch: x86 21 | config: debug 22 | maxParallel: 2 23 | pool: myPool3 24 | - job: job4 25 | strategy: 26 | matrix: $[ variables.matrix ] 27 | maxParallel: $[ variables.maxParallel ] 28 | pool: job4 29 | steps: 30 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromLegacyPhase_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | server: 5 | matrix: 6 | x64_release: 7 | arch: x64 8 | config: release 9 | x86_debug: 10 | arch: x86 11 | config: debug 12 | - phase: job3 13 | server: 14 | matrix: 15 | x64_release: 16 | arch: x64 17 | config: release 18 | x86_debug: 19 | arch: x86 20 | config: debug 21 | parallel: 2 22 | - phase: job4 23 | server: 24 | matrix: $[ variables.matrix ] 25 | parallel: $[ variables.maxParallel ] 26 | steps: 27 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Matrix_FromLegacyPhase_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | matrix: 6 | x64_release: 7 | arch: x64 8 | config: release 9 | x86_debug: 10 | arch: x86 11 | config: debug 12 | pool: server 13 | - job: job3 14 | strategy: 15 | matrix: 16 | x64_release: 17 | arch: x64 18 | config: release 19 | x86_debug: 20 | arch: x86 21 | config: debug 22 | maxParallel: 2 23 | pool: server 24 | - job: job4 25 | strategy: 26 | matrix: $[ variables.matrix ] 27 | maxParallel: $[ variables.maxParallel ] 28 | pool: server 29 | steps: 30 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | parallel: 5 3 | steps: 4 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | parallel: 5 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | parallel: 5 3 | pool: myPool 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromImpliedJob_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | server: 2 | parallel: 5 3 | steps: 4 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromImpliedJob_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | strategy: 2 | parallel: 5 3 | pool: server 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | parallel: 5 6 | - job: job3 7 | strategy: 8 | parallel: $[ variables.parallel ] -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromLegacyPhase_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: 5 | name: myPool2 6 | parallel: 5 7 | - phase: job3 8 | queue: 9 | name: myPool3 10 | parallel: $[ variables.parallel ] -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromLegacyPhase_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | parallel: 5 6 | pool: myPool2 7 | - job: job3 8 | strategy: 9 | parallel: $[ variables.parallel ] 10 | pool: myPool3 -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromLegacyPhase_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | server: 5 | parallel: 5 6 | - phase: job3 7 | server: 8 | parallel: $[ variables.parallel ] -------------------------------------------------------------------------------- /examples/extracted/JobStrategy_Parallel_FromLegacyPhase_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | strategy: 5 | parallel: 5 6 | pool: server 7 | - job: job3 8 | strategy: 9 | parallel: $[ variables.parallel ] 10 | pool: server -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | timeoutInMinutes: 5 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | timeoutInMinutes: 5 2 | pool: myPool 3 | steps: 4 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromImpliedJob_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | server: 2 | timeoutInMinutes: 5 3 | steps: 4 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromImpliedJob_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | timeoutInMinutes: 5 2 | pool: server 3 | steps: 4 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | timeoutInMinutes: 5 5 | - job: job3 6 | timeoutInMinutes: $[ variables.theTimeoutInMinutes ] -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromLegacyPhase_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: 5 | name: myPool 6 | timeoutInMinutes: 5 7 | - phase: job3 8 | queue: 9 | name: myPool 10 | timeoutInMinutes: $[ variables.theTimeoutInMinutes ] -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromLegacyPhase_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | timeoutInMinutes: 5 5 | pool: myPool 6 | - job: job3 7 | timeoutInMinutes: $[ variables.theTimeoutInMinutes ] 8 | pool: myPool -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromLegacyPhase_LegacyServer.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | server: 5 | timeoutInMinutes: 5 6 | steps: 7 | - task: Bash@3 8 | - phase: job3 9 | server: 10 | timeoutInMinutes: $[ variables.theTimeoutInMinutes ] 11 | steps: 12 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobTimeoutInMinutes_FromLegacyPhase_LegacyServer.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | timeoutInMinutes: 5 5 | pool: server 6 | steps: 7 | - task: Bash@3 8 | - job: job3 9 | timeoutInMinutes: $[ variables.theTimeoutInMinutes ] 10 | pool: server 11 | steps: 12 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/JobVariables_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | variables: 5 | variableOne: value1 6 | variableTwo: value2 7 | - job: job3 8 | variables: 9 | - name: variableOne 10 | value: value1 11 | - name: variableTwo 12 | value: value2 13 | - job: job4 14 | variables: 15 | - name: variableOne 16 | value: value1 17 | - group: myVariableGroup 18 | - name: variableTwo 19 | value: value2 -------------------------------------------------------------------------------- /examples/extracted/JobVariables_FromJob.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | variables: 5 | variableOne: value1 6 | variableTwo: value2 7 | - job: job3 8 | variables: 9 | variableOne: value1 10 | variableTwo: value2 11 | - job: job4 12 | variables: 13 | - name: variableOne 14 | value: value1 15 | - group: myVariableGroup 16 | - name: variableTwo 17 | value: value2 -------------------------------------------------------------------------------- /examples/extracted/JobVariables_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | variables: 5 | variableOne: value1 6 | variableTwo: value2 7 | - phase: job3 8 | variables: 9 | - name: variableOne 10 | value: value1 11 | - name: variableTwo 12 | value: value2 13 | - phase: job4 14 | variables: 15 | - name: variableOne 16 | value: value1 17 | - group: myVariableGroup 18 | - name: variableTwo 19 | value: value2 -------------------------------------------------------------------------------- /examples/extracted/JobVariables_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | variables: 5 | variableOne: value1 6 | variableTwo: value2 7 | - job: job3 8 | variables: 9 | variableOne: value1 10 | variableTwo: value2 11 | - job: job4 12 | variables: 13 | - name: variableOne 14 | value: value1 15 | - group: myVariableGroup 16 | - name: variableTwo 17 | value: value2 -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | workspace: 2 | clean: all 3 | steps: 4 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromImpliedJob_LegacyQueue.0.yml: -------------------------------------------------------------------------------- 1 | queue: 2 | name: myPool 3 | workspace: 4 | clean: all 5 | steps: 6 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromImpliedJob_LegacyQueue.1.yml: -------------------------------------------------------------------------------- 1 | pool: myPool 2 | workspace: 3 | clean: all 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromJob.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | workspace: 5 | clean: all 6 | steps: 7 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromLegacyPhase.0.yml: -------------------------------------------------------------------------------- 1 | phases: 2 | - phase: job1 3 | - phase: job2 4 | queue: 5 | name: myPool 6 | workspace: 7 | clean: all 8 | steps: 9 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/JobWorkspace_FromLegacyPhase.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - job: job2 4 | pool: myPool 5 | workspace: 6 | clean: all 7 | steps: 8 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/MaxFileSize.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/MaxFiles.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: steps1.yml 3 | - template: steps2.yml -------------------------------------------------------------------------------- /examples/extracted/MaxFiles.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi 1 -------------------------------------------------------------------------------- /examples/extracted/MaxFiles.2.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi 2 -------------------------------------------------------------------------------- /examples/extracted/MaxResultSize_AcrossFiles.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: steps.yml 3 | - template: steps.yml 4 | - template: error.yml -------------------------------------------------------------------------------- /examples/extracted/MaxResultSize_AcrossFiles.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: | 3 | 12345678901234567890123456789012345678901234567890 # 50 4 | 12345678901234567890123456789012345678901234567890 # 100 5 | 12345678901234567890123456789012345678901234567890 # 150 6 | 12345678901234567890123456789012345678901234567890 # 200 -------------------------------------------------------------------------------- /examples/extracted/PipelineName_ImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | name: my pipline name format 2 | steps: 3 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineName_ImpliedStage.0.yml: -------------------------------------------------------------------------------- 1 | name: my pipline name format 2 | jobs: 3 | - job: jobOne 4 | steps: 5 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineName_ImpliedStage_LegacyPhases.0.yml: -------------------------------------------------------------------------------- 1 | name: my pipline name format 2 | phases: 3 | - phase: jobOne 4 | steps: 5 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineName_ImpliedStage_LegacyPhases.1.yml: -------------------------------------------------------------------------------- 1 | name: my pipline name format 2 | jobs: 3 | - job: jobOne 4 | steps: 5 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_BackCompat.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: self 3 | clean: true 4 | fetchDepth: 10 5 | lfs: true 6 | steps: 7 | - checkout: self -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_BackCompat.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: self 4 | clean: true 5 | fetchDepth: 10 6 | lfs: true 7 | steps: 8 | - checkout: self -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_BackCompat_OnlySupportsSelfRepo.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: foo -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_BackCompat_RestrictsObjects.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: self 3 | clean: 4 | - foo 5 | steps: 6 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_BackCompat_RestrictsUnknownProperties.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: self 3 | abc: def 4 | steps: 5 | - script: echo hi -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_Containers.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | containers: 3 | - container: dev1 4 | image: ubuntu:16.04 5 | - container: dev2 6 | image: ubuntu:17.10 7 | registry: privatedockerhub 8 | - container: dev3 9 | image: ubuntu:17.10 10 | options: --cpu-count 4 11 | - container: dev4 12 | image: ubuntu:17.10 13 | options: --hostname container-test --env test=foo --ip 192.168.0.1 14 | registry: privatedockerhub 15 | - container: dev5 16 | image: localimage:latest 17 | localImage: true 18 | options: --hostname container-test --env test=foo --ip 192.168.0.1 19 | registry: privatedockerhub -------------------------------------------------------------------------------- /examples/extracted/PipelineResources_Repositories.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: vso 4 | clean: true 5 | ref: main 6 | type: tfsgit 7 | - repository: vso.vc 8 | mappings: 9 | - localPath: $(Agent.SourcesDir) 10 | serverPath: $/Foo 11 | - cloak: true 12 | serverPath: $/Foo/Bar 13 | type: tfsversioncontrol 14 | steps: 15 | - task: Bash@3 -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger.0.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | batch: true 3 | branches: 4 | include: 5 | - main 6 | - releases/* 7 | exclude: 8 | - releases/old* 9 | paths: 10 | include: 11 | - '*' 12 | exclude: 13 | - readme.md 14 | - docs/* 15 | steps: 16 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger_Minimal.0.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | - main 3 | - releases/* 4 | steps: 5 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger_None.0.yml: -------------------------------------------------------------------------------- 1 | trigger: none -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger_None.1.yml: -------------------------------------------------------------------------------- 1 | trigger: none -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger_RetrieveTriggersOnly.0.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | - main 3 | steps: 4 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineTrigger_RetrieveTriggersOnly.1.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | - main -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Mapping_ImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | steps: 4 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Mapping_ImpliedStage.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | jobs: 4 | - job: jobOne 5 | steps: 6 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Mapping_ImpliedStage_LegacyPhases.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | phases: 4 | - phase: jobOne 5 | steps: 6 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Mapping_ImpliedStage_LegacyPhases.1.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | jobs: 4 | - job: jobOne 5 | steps: 6 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedJob.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | - name: var1 3 | value: val1 4 | steps: 5 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedJob.1.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | steps: 4 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedStage.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | - name: var1 3 | value: val1 4 | jobs: 5 | - job: jobOne 6 | steps: 7 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedStage.1.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | jobs: 4 | - job: jobOne 5 | steps: 6 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedStage_LegacyPhases.0.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | - name: var1 3 | value: val1 4 | phases: 5 | - phase: jobOne 6 | steps: 7 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelineVariables_Sequence_ImpliedStage_LegacyPhases.1.yml: -------------------------------------------------------------------------------- 1 | variables: 2 | var1: val1 3 | jobs: 4 | - job: jobOne 5 | steps: 6 | - script: echo hello -------------------------------------------------------------------------------- /examples/extracted/PipelinesResources_Repositories_CheckoutOptions.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: self 4 | checkoutOptions: 5 | clean: true 6 | fetchDepth: 10 7 | lfs: true 8 | submodules: true 9 | steps: 10 | - checkout: self -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToAnotherRepository.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: vso 4 | clean: true 5 | ref: main 6 | type: tfsgit 7 | steps: 8 | - task: Bash@3 9 | - template: my-steps.yml@vso -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToAnotherRepository.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: this one is wrong! -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToAnotherRepository.2.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: this one is right! -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToAnotherRepository.3.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: vso 4 | clean: true 5 | ref: main 6 | type: tfsgit 7 | steps: 8 | - task: Bash@3 9 | - script: this one is right! -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToSelfRepository.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: vso 4 | clean: true 5 | ref: main 6 | type: tfsgit 7 | steps: 8 | - task: Bash@3 9 | - template: my-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToSelfRepository.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: this one is right! -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToSelfRepository.2.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: this one is wrong! -------------------------------------------------------------------------------- /examples/extracted/Templates_General_ReferenceToSelfRepository.3.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: vso 4 | clean: true 5 | ref: main 6 | type: tfsgit 7 | steps: 8 | - task: Bash@3 9 | - script: this one is right! -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_CanReferenceJobsTemplate.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: my-jobs.yml 3 | parameters: 4 | env: 5 | entryfile1: aaa 6 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_CanReferenceStepsTemplate.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: my-jobs.yml 3 | parameters: 4 | env: 5 | ccc: ddd 6 | eee: fff -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_CanSpecifySelfSource.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: my-jobs.yml@self 3 | parameters: 4 | env: 5 | entryfile1: aaa 6 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_CannotDoubleHopRepositories.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | jobs: 5 | - template: my-jobs.yml@tools 6 | parameters: 7 | env: 8 | entryfile1: aaa 9 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_CannotSpecifySourceWithinAnotherRepository.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | jobs: 5 | - template: my-jobs.yml@tools 6 | parameters: 7 | env: 8 | entryfile1: aaa 9 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_DoesNotLoadTemplateWhenAlreadyHasErrors.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: error.yml 3 | - template: error.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | jobs: 5 | - template: aaa-jobs.yml 6 | - template: tools-entry-dir\tools-jobs.yml@tools 7 | - template: zzz-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.11.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: ttt 3 | steps: 4 | - script: echo hi from ttt-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.12.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | jobs: 5 | - job: bbb 6 | steps: 7 | - script: echo hi from bbb-jobs.yml 8 | - job: ttt 9 | steps: 10 | - script: echo hi from ttt-jobs.yml 11 | - job: zzz 12 | steps: 13 | - script: echo hi from zzz-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.3.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: bbb-dir\bbb-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.5.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: bbb 3 | steps: 4 | - script: echo hi from bbb-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.7.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: zzz 3 | steps: 4 | - script: echo hi from zzz-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_FollowsCurrentDirectory.9.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: ttt-dir\ttt-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_LegacyPhases.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - template: my-jobs.yml 3 | parameters: 4 | env: 5 | ccc: ddd 6 | eee: fff -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | steps: 4 | - script: echo hello from entry file 1 5 | - template: my-jobs.yml 6 | - job: job2 7 | steps: 8 | - script: echo hello from entry file 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: nestedJob1 3 | steps: 4 | - script: echo hello from template job 1 5 | - job: nestedJob2 6 | steps: 7 | - script: echo hello from template job 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder.2.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | steps: 4 | - script: echo hello from entry file 1 5 | - job: nestedJob1 6 | steps: 7 | - script: echo hello from template job 1 8 | - job: nestedJob2 9 | steps: 10 | - script: echo hello from template job 2 11 | - job: job2 12 | steps: 13 | - script: echo hello from entry file 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder_NestedTemplates.0.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: nestedJob1 3 | - job: nestedJob2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder_NestedTemplates.1.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: job1 3 | - template: nested-jobs.yml 4 | - job: job2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder_NestedTemplates.2.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: 1 3 | - template: jobs.yml 4 | - job: 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_PreservesJobOrder_NestedTemplates.3.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | - job: 1 3 | - job: job1 4 | - job: nestedJob1 5 | - job: nestedJob2 6 | - job: job2 7 | - job: 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_JobTemplates_RequiresLiteralKeys.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-jobs.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_CanReferenceStepsTemplate.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | env: 5 | entryfile1: aaa 6 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_CanSpecifySelfSource.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml@self 3 | parameters: 4 | env: 5 | entryfile1: aaa 6 | entryfile2: bbb -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_CannotDoubleHopRepositories.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | - repository: tools2 5 | steps: 6 | - template: my-steps.yml@tools -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_CannotSpecifySourceWithinAnotherRepository.0.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | steps: 5 | - template: my-steps.yml@tools -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_ConditionallyInsertMapping.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | insertExtraEnv1: true -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_ConditionallyInsertSequence.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | sign: true -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_DefaultParameters_RestrictExpressions.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_DoesNotLoadTemplateWhenAlreadyHasErrors.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: error.yml 3 | - template: error.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_EvaluateMapping.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | env: 5 | var1: val1 6 | var2: val2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_EvaluateSequence.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_EvaluatelLiteral.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | greeting1: echo hi john doe -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.1.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | steps: 5 | - template: aaa-steps.yml 6 | - template: tools-entry-dir\tools-steps.yml@tools 7 | - template: zzz-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.11.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi from ttt-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.12.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | repositories: 3 | - repository: tools 4 | steps: 5 | - script: echo hi from bbb-steps.yml 6 | - script: echo hi from ttt-steps.yml 7 | - script: echo hi from zzz-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.3.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: bbb-dir\bbb-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.5.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi from bbb-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.7.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hi from zzz-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_FollowsCurrentDirectory.9.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: ttt-dir\ttt-steps.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_InsertMapping.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | env: 5 | ccc: ddd 6 | eee: fff -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_InsertSequence.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml 3 | parameters: 4 | preBuild: 5 | - script: pre-build 1 6 | - script: pre-build 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hello from entry file 1 3 | - template: my-steps.yml 4 | - script: echo hello from entry file 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hello from template step 1 3 | - script: echo hello from template step 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder.2.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo hello from entry file 1 3 | - script: echo hello from template step 1 4 | - script: echo hello from template step 2 5 | - script: echo hello from entry file 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder_NestedTemplates.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo nested-steps 1 3 | - script: echo nested-steps 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder_NestedTemplates.1.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo steps 1 3 | - template: nested-steps.yml 4 | - script: echo steps 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder_NestedTemplates.2.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo 1 3 | - template: steps.yml 4 | - script: echo 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_PreservesStepOrder_NestedTemplates.3.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - script: echo 1 3 | - script: echo steps 1 4 | - script: echo nested-steps 1 5 | - script: echo nested-steps 2 6 | - script: echo steps 2 7 | - script: echo 2 -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_RecursionDoesNotStackOverflow.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: pipeline.yml -------------------------------------------------------------------------------- /examples/extracted/Templates_StepTemplates_RequiresLiteralKeys.0.yml: -------------------------------------------------------------------------------- 1 | steps: 2 | - template: my-steps.yml -------------------------------------------------------------------------------- /examples/templateMappingInsert.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | variables: {} 3 | 4 | phases: 5 | - phase: build 6 | variables: 7 | configuration: debug 8 | arch: x86 9 | ${{ insert }}: ${{ parameters.variables }} 10 | steps: 11 | - task: MSBuild@1 12 | - task: VSTest@2 -------------------------------------------------------------------------------- /examples/templateSequenceConditionalInsert.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | toolset: msbuild 3 | 4 | steps: 5 | # msbuild 6 | - ${{ if eq(parameters.toolset, 'msbuild') }}: 7 | - task: MSBuild@1 8 | - task: VSTest@2 9 | 10 | # dotnet 11 | - ${{ if eq(parameters.toolset, 'dotnet') }}: 12 | - task: DotNetCoreCLI@1 13 | inputs: 14 | command: build 15 | - task: DotNetCoreCLI@1 16 | inputs: 17 | command: test -------------------------------------------------------------------------------- /examples/templateSequenceInsert.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | preBuild: [] 3 | preTest: [] 4 | preSign: [] 5 | 6 | phases: 7 | - phase: Build 8 | queue: Hosted VS2017 9 | steps: 10 | - script: cred-scan 11 | - ${{ parameters.preBuild }} 12 | - task: MSBuild@1 13 | - ${{ parameters.preTest }} 14 | - task: VSTest@2 15 | - ${{ parameters.preSign }} 16 | - script: sign -------------------------------------------------------------------------------- /language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | "lineComment": "#" 4 | }, 5 | "brackets": [ 6 | ["{", "}"], 7 | ["[", "]"], 8 | ["(", ")"] 9 | ], 10 | "autoClosingPairs": [ 11 | ["{", "}"], 12 | ["[", "]"], 13 | ["(", ")"], 14 | ["\"", "\""], 15 | ["'", "'"] 16 | ], 17 | "surroundingPairs": [ 18 | ["{", "}"], 19 | ["[", "]"], 20 | ["(", ")"], 21 | ["\"", "\""], 22 | ["'", "'"] 23 | ], 24 | "folding": { 25 | "offSide": true 26 | }, 27 | "indentationRules": { 28 | "increaseIndentPattern": "^\\s*.*(:|-) ?(&\\w+)?(\\{[^}\"']*|\\([^)\"']*)?$", 29 | "decreaseIndentPattern": "^\\s+\\}$" 30 | } 31 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "azure-pipelines", 3 | "displayName": "Azure Pipelines", 4 | "description": "Syntax highlighting, IntelliSense, and more for Azure Pipelines YAML", 5 | "version": "1.249.0", 6 | "publisher": "ms-azure-devops", 7 | "aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217", 8 | "repository": { 9 | "type": "git", 10 | "url": "https://github.com/Microsoft/azure-pipelines-vscode" 11 | }, 12 | "homepage": "https://github.com/Microsoft/azure-pipelines-vscode/blob/main/README.md", 13 | "bugs": "https://github.com/Microsoft/azure-pipelines-vscode/issues/", 14 | "license": "MIT", 15 | "icon": "assets/pipelines.png", 16 | "galleryBanner": { 17 | "color": "#D4DCEC", 18 | "theme": "light" 19 | }, 20 | "engines": { 21 | "vscode": "^1.82.0" 22 | }, 23 | "categories": [ 24 | "Programming Languages", 25 | "Azure" 26 | ], 27 | "tags": [ 28 | "azure-pipelines", 29 | "Azure Pipelines", 30 | "YAML" 31 | ], 32 | "keywords": [ 33 | "YAML", 34 | "Azure Pipelines", 35 | "continuous integration", 36 | "CI/CD" 37 | ], 38 | "main": "./dist/extension", 39 | "capabilities": { 40 | "untrustedWorkspaces": { 41 | "supported": "limited", 42 | "restrictedConfigurations": [ 43 | "azure-pipelines.customSchemaFile" 44 | ] 45 | } 46 | }, 47 | "contributes": { 48 | "languages": [ 49 | { 50 | "id": "azure-pipelines", 51 | "configuration": "./language-configuration.json", 52 | "filenamePatterns": [ 53 | "azure-pipelines.{yml,yaml}", 54 | ".azure-pipelines.{yml,yaml}", 55 | "**/azure-pipelines/**/*.{yml,yaml}", 56 | "**/.azure-pipelines/**/*.{yml,yaml}", 57 | "**/.pipelines/**/*.{yml,yaml}", 58 | "vsts-ci.{yml,yaml}", 59 | ".vsts-ci.{yml,yaml}" 60 | ], 61 | "aliases": [ 62 | "Azure Pipelines" 63 | ], 64 | "icon": { 65 | "light": "./assets/pipelines-file-icon.svg", 66 | "dark": "./assets/pipelines-file-icon.svg" 67 | } 68 | } 69 | ], 70 | "grammars": [ 71 | { 72 | "language": "azure-pipelines", 73 | "scopeName": "source.yaml", 74 | "path": "./syntaxes/yaml.tmLanguage.json" 75 | } 76 | ], 77 | "configuration": { 78 | "title": "Azure Pipelines", 79 | "properties": { 80 | "azure-pipelines.1ESPipelineTemplatesSchemaFile": { 81 | "type": "boolean", 82 | "default": false, 83 | "description": "Use 1ES Pipeline Template schema file", 84 | "markdownDescription": "If enabled, [1ES Pipeline Template schema file](https://aka.ms/1espt) will take precedence over custom schema file when user is signed in with `@microsoft.com` account and the schema is available in the ADO organization." 85 | }, 86 | "azure-pipelines.customSchemaFile": { 87 | "type": "string", 88 | "description": "Use a different schema file", 89 | "scope": "machine-overridable" 90 | }, 91 | "azure-pipelines.tenant": { 92 | "type": "string", 93 | "description": "Microsoft Entra tenant ID to use when connecting to Azure DevOps. Leave empty to use your account's default tenant.", 94 | "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$|^$", 95 | "patternErrorMessage": "The tenant must be a valid GUID.", 96 | "scope": "resource" 97 | } 98 | } 99 | }, 100 | "configurationDefaults": { 101 | "[azure-pipelines]": { 102 | "editor.insertSpaces": true, 103 | "editor.tabSize": 2, 104 | "editor.quickSuggestions": { 105 | "other": true, 106 | "comments": false, 107 | "strings": true 108 | }, 109 | "editor.autoIndent": "full" 110 | } 111 | }, 112 | "commands": [ 113 | { 114 | "command": "azure-pipelines.reset-state", 115 | "title": "Reset 'do not ask again' messages", 116 | "category": "Azure Pipelines" 117 | } 118 | ] 119 | }, 120 | "extensionDependencies": [ 121 | "vscode.git" 122 | ], 123 | "scripts": { 124 | "vscode:prepublish": "npm run compile", 125 | "compile": "webpack --mode production --progress --color", 126 | "compile:dev": "webpack --mode development --progress --color", 127 | "compile:test": "tsc --project ./tsconfig.test.json", 128 | "lint": "eslint", 129 | "watch": "webpack --mode development --progress --color --watch", 130 | "test": "npm run compile:test && vscode-test" 131 | }, 132 | "devDependencies": { 133 | "@eslint/js": "^9.15.0", 134 | "@types/eslint__js": "^8.42.3", 135 | "@types/mocha": "^9.0.0", 136 | "@types/node": "~20.15.0", 137 | "@types/vscode": "~1.82.0", 138 | "@vscode/test-cli": "^0.0.10", 139 | "@vscode/test-electron": "^2.4.1", 140 | "eslint": "^9.15.0", 141 | "mocha": "^11.1.0", 142 | "ts-loader": "^8.0.14", 143 | "typescript": "~5.7.2", 144 | "typescript-eslint": "^8.16.0", 145 | "webpack": "^5.76.0", 146 | "webpack-cli": "^4.4.0" 147 | }, 148 | "dependencies": { 149 | "@vscode/extension-telemetry": "^0.5.1", 150 | "azure-devops-node-api": "^11.0.1", 151 | "azure-pipelines-language-server": "0.8.0", 152 | "vscode-languageclient": "^7.0.0", 153 | "vscode-uri": "^3.0.2" 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/clients/devOps/organizationsClient.ts: -------------------------------------------------------------------------------- 1 | import { ConnectionData } from 'azure-devops-node-api/interfaces/LocationsInterfaces'; 2 | 3 | import { telemetryHelper, extensionVersion } from '../../helpers/telemetryHelper'; 4 | 5 | export interface Organization { 6 | accountId: string; 7 | accountName: string; 8 | accountUri: string; 9 | properties: Record; 10 | } 11 | 12 | export class OrganizationsClient { 13 | private organizations?: Organization[]; 14 | 15 | constructor(private token: string) { } 16 | 17 | public async listOrganizations(forceRefresh?: boolean): Promise { 18 | if (this.organizations && !forceRefresh) { 19 | return this.organizations; 20 | } 21 | 22 | const { authenticatedUser } = await this.fetch("https://app.vssps.visualstudio.com/_apis/connectiondata"); 23 | if (authenticatedUser === undefined) { 24 | return []; 25 | } 26 | 27 | const { value: organizations } = await this.fetch<{ value: Organization[] }>(`https://app.vssps.visualstudio.com/_apis/accounts?memberId=${authenticatedUser.id}&api-version=7.0`); 28 | this.organizations = organizations.sort((org1, org2) => { 29 | const account1 = org1.accountName.toLowerCase(); 30 | const account2 = org2.accountName.toLowerCase(); 31 | if (account1 < account2) { 32 | return -1; 33 | } else if (account1 > account2) { 34 | return 1; 35 | } 36 | return 0; 37 | }); 38 | 39 | return this.organizations; 40 | } 41 | 42 | private async fetch(...[request, init]: Parameters): Promise { 43 | const response = await fetch(request, { 44 | ...init, 45 | headers: { 46 | ...init?.headers, 47 | 'Authorization': `Bearer ${this.token}`, 48 | 'Content-Type': 'application/json', 49 | 'User-Agent': `azure-pipelines-vscode ${extensionVersion}`, 50 | 'X-TFS-Session': telemetryHelper.getJourneyId(), 51 | } 52 | }); 53 | return (await response.json()) as T; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/extension.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import * as path from 'path'; 7 | import * as vscode from 'vscode'; 8 | import * as languageclient from 'vscode-languageclient/node'; 9 | 10 | import * as logger from './logger'; 11 | import { getSchemaAssociation, locateSchemaFile, onDidSelectOrganization, resetDoNotAskState, SchemaAssociationNotification } from './schema-association-service'; 12 | import { schemaContributor, CUSTOM_SCHEMA_REQUEST, CUSTOM_CONTENT_REQUEST } from './schema-contributor'; 13 | import { telemetryHelper } from './helpers/telemetryHelper'; 14 | 15 | /** 16 | * The unique string that identifies the Azure Pipelines languge. 17 | */ 18 | const LANGUAGE_IDENTIFIER = 'azure-pipelines'; 19 | 20 | /** 21 | * The document selector to use when deciding whether to activate Azure Pipelines-specific features. 22 | */ 23 | const DOCUMENT_SELECTOR = [ 24 | { language: LANGUAGE_IDENTIFIER, scheme: 'file' }, 25 | { language: LANGUAGE_IDENTIFIER, scheme: 'untitled' } 26 | ] 27 | 28 | export async function activate(context: vscode.ExtensionContext) { 29 | telemetryHelper.setTelemetry('isActivationEvent', 'true'); 30 | await telemetryHelper.callWithTelemetryAndErrorHandling('azurePipelines.activate', async () => { 31 | await activateYmlContributor(context); 32 | }); 33 | 34 | context.subscriptions.push(telemetryHelper); 35 | 36 | logger.log('Extension has been activated!', 'ExtensionActivated'); 37 | return schemaContributor; 38 | } 39 | 40 | async function activateYmlContributor(context: vscode.ExtensionContext) { 41 | const serverOptions: languageclient.ServerOptions = getServerOptions(context); 42 | const clientOptions: languageclient.LanguageClientOptions = getClientOptions(); 43 | const client = new languageclient.LanguageClient(LANGUAGE_IDENTIFIER, 'Azure Pipelines Language', serverOptions, clientOptions); 44 | 45 | const disposable = client.start(); 46 | context.subscriptions.push(disposable); 47 | 48 | // If this throws, the telemetry event in activate() will catch & log it 49 | await client.onReady(); 50 | 51 | // Fired whenever the server is about to validate a YAML file (e.g. on content change), 52 | // and allows us to return a custom schema to use for validation. 53 | client.onRequest(CUSTOM_SCHEMA_REQUEST, (resource: string) => { 54 | // TODO: Have a single instance for the extension but dont return a global from this namespace 55 | return schemaContributor.requestCustomSchema(resource); 56 | }); 57 | 58 | // Fired whenever the server encounters a URI scheme that it doesn't recognize, 59 | // and allows us to use the URI to determine the schema's content. 60 | client.onRequest(CUSTOM_CONTENT_REQUEST, (uri: string) => { 61 | return schemaContributor.requestCustomSchemaContent(uri); 62 | }); 63 | 64 | // TODO: Can we get rid of this since it's set in package.json? 65 | vscode.languages.setLanguageConfiguration(LANGUAGE_IDENTIFIER, { wordPattern: /("(?:[^\\"]*(?:\\.)?)*"?)|[^\s{}[\],:]+/ }); 66 | 67 | // Let the server know of any schema changes. 68 | context.subscriptions.push(vscode.workspace.onDidChangeConfiguration(async event => { 69 | if (event.affectsConfiguration('azure-pipelines.customSchemaFile') || 70 | event.affectsConfiguration('azure-pipelines.1ESPipelineTemplatesSchemaFile') || 71 | event.affectsConfiguration('azure-pipelines.tenant')) { 72 | await loadSchema(context, client); 73 | } 74 | })); 75 | 76 | // Load the schema if we were activated because an Azure Pipelines file. 77 | if (vscode.window.activeTextEditor?.document.languageId === LANGUAGE_IDENTIFIER) { 78 | await loadSchema(context, client); 79 | } 80 | 81 | // And subscribe to future open events, as well. 82 | context.subscriptions.push(vscode.window.onDidChangeActiveTextEditor(async () => { 83 | await loadSchema(context, client); 84 | })); 85 | 86 | // Or if the active editor's language changes. 87 | context.subscriptions.push(vscode.workspace.onDidOpenTextDocument(async textDocument => { 88 | // Ensure this event is due to a language change. 89 | // Since onDidOpenTextDocument is fired *before* activeTextEditor changes, 90 | // if the URIs are the same we know that the new text document must be 91 | // due to a language change. 92 | if (textDocument.uri !== vscode.window.activeTextEditor?.document.uri) { 93 | return; 94 | } 95 | 96 | await loadSchema(context, client); 97 | })); 98 | 99 | // Re-request the schema when sessions change since auto-detection is dependent on 100 | // being able to query ADO organizations, check if 1ESPT schema can be used using session credentials. 101 | context.subscriptions.push(vscode.authentication.onDidChangeSessions(async session => { 102 | if (session.provider.id === 'microsoft') { 103 | await loadSchema(context, client); 104 | } 105 | })); 106 | 107 | // We now have an organization for a non-Azure Repo folder, 108 | // so we can try auto-detecting the schema again. 109 | context.subscriptions.push(onDidSelectOrganization(async workspaceFolder => { 110 | await loadSchema(context, client, workspaceFolder); 111 | })); 112 | 113 | // eslint-disable-next-line @typescript-eslint/no-confusing-void-expression 114 | context.subscriptions.push(vscode.commands.registerCommand("azure-pipelines.reset-state", async () => await resetDoNotAskState(context))); 115 | } 116 | 117 | // Find the schema and notify the server. 118 | async function loadSchema( 119 | context: vscode.ExtensionContext, 120 | client: languageclient.LanguageClient, 121 | workspaceFolder?: vscode.WorkspaceFolder): Promise { 122 | if (workspaceFolder === undefined) { 123 | const textDocument = vscode.window.activeTextEditor?.document; 124 | if (textDocument?.languageId !== LANGUAGE_IDENTIFIER) { 125 | return; 126 | } 127 | 128 | workspaceFolder = vscode.workspace.getWorkspaceFolder(textDocument.uri); 129 | } 130 | 131 | const schemaFilePath = await locateSchemaFile(context, workspaceFolder); 132 | const schema = getSchemaAssociation(schemaFilePath); 133 | client.sendNotification(SchemaAssociationNotification.type, schema); 134 | } 135 | 136 | function getServerOptions(context: vscode.ExtensionContext): languageclient.ServerOptions { 137 | // TODO: Figure out a way to get sourcemaps working with webpack so that we can always 138 | // use the webpacked version. 139 | const languageServerPath = context.extensionMode === vscode.ExtensionMode.Development ? 140 | context.asAbsolutePath(path.join('node_modules', 'azure-pipelines-language-server', 'out', 'server.js')) : 141 | context.asAbsolutePath(path.join('dist', 'server.js')); 142 | 143 | return { 144 | run: { module: languageServerPath, transport: languageclient.TransportKind.ipc }, 145 | debug: { module: languageServerPath, transport: languageclient.TransportKind.ipc, options: { execArgv: ["--nolazy", "--inspect=6009"] } } 146 | }; 147 | } 148 | 149 | function getClientOptions(): languageclient.LanguageClientOptions { 150 | return { 151 | // Register the server for Azure Pipelines documents 152 | documentSelector: DOCUMENT_SELECTOR, 153 | synchronize: { 154 | // TODO: Switch to handling the workspace/configuration request 155 | configurationSection: ['yaml', 'http.proxy', 'http.proxyStrictSSL'], 156 | // Notify the server about file changes to YAML files in the workspace 157 | fileEvents: [ 158 | vscode.workspace.createFileSystemWatcher('**/*.?(e)y?(a)ml') 159 | ] 160 | }, 161 | }; 162 | } 163 | 164 | // this method is called when your extension is deactivated 165 | export function deactivate() { 166 | telemetryHelper.dispose(); 167 | } 168 | -------------------------------------------------------------------------------- /src/extensionApis.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import * as Messages from './messages'; 3 | import { API, GitExtension } from './typings/git'; 4 | 5 | let gitExtensionApi: API | undefined; 6 | export async function getGitExtensionApi(): Promise { 7 | if (gitExtensionApi === undefined) { 8 | const gitExtension = vscode.extensions.getExtension("vscode.git"); 9 | if (!gitExtension) { 10 | throw new Error(Messages.gitExtensionUnavailable); 11 | } 12 | 13 | if (!gitExtension.isActive) { 14 | await gitExtension.activate(); 15 | } 16 | 17 | if (!gitExtension.exports.enabled) { 18 | throw new Error(Messages.gitExtensionNotEnabled); 19 | } 20 | 21 | return gitExtension.exports.getAPI(1); 22 | } 23 | 24 | return gitExtensionApi; 25 | } 26 | -------------------------------------------------------------------------------- /src/helpers/azureDevOpsHelper.ts: -------------------------------------------------------------------------------- 1 | import * as Messages from '../messages'; 2 | 3 | // https://dev.azure.com/ OR https://org@dev.azure.com/ 4 | const AzureReposUrl = 'dev.azure.com/'; 5 | 6 | // git@ssh.dev.azure.com:v3/ 7 | const SSHAzureReposUrl = 'ssh.dev.azure.com:v3/'; 8 | 9 | // https://org.visualstudio.com/ 10 | const VSOUrl = '.visualstudio.com/'; 11 | 12 | // org@vs-ssh.visualstudio.com:v3/ 13 | const SSHVsoReposUrl = 'vs-ssh.visualstudio.com:v3/'; 14 | 15 | export function isAzureReposUrl(remoteUrl: string): boolean { 16 | return remoteUrl.includes(AzureReposUrl) || 17 | remoteUrl.includes(VSOUrl) || 18 | remoteUrl.includes(SSHAzureReposUrl) || 19 | remoteUrl.includes(SSHVsoReposUrl); 20 | } 21 | 22 | /** 23 | * Gets the organization name, project name, and repository name from a given Azure Repos URL. 24 | * 25 | * Note that Azure Repos support _many_ formats. 26 | * Here's all the ones we know of: 27 | * * New-style HTTPS: https://dev.azure.com///_git/ 28 | * * Old-style HTTPS: https://.visualstudio.com//_git/ 29 | * * New-style shorthand HTTPS: https://dev.azure.com//_git/ 30 | * * Old-style shorthand HTTPS: https://.visualstudio.com/_git/ 31 | * * Old-style default collection HTTPS: https://.visualstudio.com/DefaultCollection//_git/ 32 | * * Old-style default collection shorthand HTTPS: https://.visualstudio.com/DefaultCollection/_git/ 33 | * * New-style SSH: git@ssh.dev.azure.com:v3/// 34 | * * Old-style SSH: @vs-ssh.visualstudio.com:v3/// 35 | * 36 | * @param remoteUrl The Azure Repos URL to parse. 37 | * @returns Details about the URL. 38 | */ 39 | export function getRepositoryDetailsFromRemoteUrl(remoteUrl: string): { organizationName: string, projectName: string, repositoryName: string } { 40 | if (remoteUrl.includes(AzureReposUrl)) { 41 | const part = remoteUrl.substring(remoteUrl.indexOf(AzureReposUrl) + AzureReposUrl.length); 42 | const parts = part.split('/'); 43 | 44 | if (parts.length === 3) { 45 | // Shorthand URL: project & repository are the same, project is not specified. 46 | // https://dev.azure.com//_git/ 47 | return { 48 | organizationName: parts[0].trim(), 49 | projectName: parts[2].trim(), 50 | repositoryName: parts[2].trim() 51 | }; 52 | } 53 | 54 | if (parts.length !== 4) { 55 | throw new Error(Messages.failedToDetermineAzureRepoDetails); 56 | } 57 | 58 | // https://dev.azure.com///_git/ 59 | return { 60 | organizationName: parts[0].trim(), 61 | projectName: parts[1].trim(), 62 | repositoryName: parts[3].trim() 63 | }; 64 | } else if (remoteUrl.includes(VSOUrl)) { 65 | const part = remoteUrl.substring(remoteUrl.indexOf(VSOUrl) + VSOUrl.length); 66 | const organizationName = remoteUrl.substring(remoteUrl.indexOf('https://') + 'https://'.length, remoteUrl.indexOf('.visualstudio.com')); 67 | const parts = part.split('/'); 68 | 69 | if (parts[0].toLowerCase() === 'defaultcollection') { 70 | // Remove DefaultCollection from the URL. 71 | // Luckily, projects can't be named DefaultCollection, so this is always safe. 72 | parts.shift(); 73 | } 74 | 75 | if (parts.length === 2) { 76 | // Shorthand URL: project & repository are the same, project is not specified. 77 | // https://.visualstudio.com/_git/ 78 | return { 79 | organizationName: organizationName, 80 | projectName: parts[1].trim(), 81 | repositoryName: parts[1].trim() 82 | }; 83 | } 84 | 85 | if (parts.length !== 3) { 86 | throw new Error(Messages.failedToDetermineAzureRepoDetails); 87 | } 88 | 89 | // https://.visualstudio.com//_git/ 90 | return { 91 | organizationName: organizationName, 92 | projectName: parts[0].trim(), 93 | repositoryName: parts[2].trim() 94 | }; 95 | } else if (remoteUrl.includes(SSHAzureReposUrl) || remoteUrl.includes(SSHVsoReposUrl)) { 96 | const urlFormat = remoteUrl.includes(SSHAzureReposUrl) ? SSHAzureReposUrl : SSHVsoReposUrl; 97 | const part = remoteUrl.substring(remoteUrl.indexOf(urlFormat) + urlFormat.length); 98 | const parts = part.split('/'); 99 | if (parts.length !== 3) { 100 | throw new Error(Messages.failedToDetermineAzureRepoDetails); 101 | } 102 | 103 | return { 104 | organizationName: parts[0].trim(), 105 | projectName: parts[1].trim(), 106 | repositoryName: parts[2].trim() 107 | }; 108 | } else { 109 | throw new Error(Messages.notAzureRepoUrl); 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/helpers/controlProvider.ts: -------------------------------------------------------------------------------- 1 | import { InputBoxOptions, QuickPickItem, QuickPickOptions, window } from 'vscode'; 2 | import { telemetryHelper } from './telemetryHelper'; 3 | import * as TelemetryKeys from './telemetryKeys'; 4 | 5 | export async function showQuickPick(listName: string, listItems: T[] | Thenable, options: QuickPickOptions, itemCountTelemetryKey?: string): Promise { 6 | try { 7 | telemetryHelper.setTelemetry(TelemetryKeys.CurrentUserInput, listName); 8 | return await window.showQuickPick(listItems, { 9 | ignoreFocusOut: true, 10 | ...options 11 | }); 12 | } 13 | finally { 14 | if (itemCountTelemetryKey) { 15 | telemetryHelper.setTelemetry(itemCountTelemetryKey, (await listItems).length.toString()); 16 | } 17 | } 18 | } 19 | 20 | export async function showInputBox(inputName: string, options: InputBoxOptions): Promise { 21 | telemetryHelper.setTelemetry(TelemetryKeys.CurrentUserInput, inputName); 22 | return await window.showInputBox({ 23 | ignoreFocusOut: true, 24 | ...options 25 | }); 26 | } 27 | -------------------------------------------------------------------------------- /src/helpers/telemetryHelper.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import * as crypto from 'crypto'; 3 | 4 | import TelemetryReporter from '@vscode/extension-telemetry'; 5 | 6 | import * as TelemetryKeys from './telemetryKeys'; 7 | import * as logger from '../logger'; 8 | 9 | 10 | const extensionName = 'ms-azure-devops.azure-pipelines'; 11 | /* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access */ 12 | const packageJSON = vscode.extensions.getExtension(extensionName)?.packageJSON; // Guaranteed to exist 13 | export const extensionVersion: string = packageJSON.version; 14 | const aiKey: string = packageJSON.aiKey; 15 | /* eslint-enable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access */ 16 | 17 | type TelemetryProperties = Record; 18 | 19 | class TelemetryHelper { 20 | private journeyId: string = crypto.randomUUID(); 21 | 22 | private properties: TelemetryProperties = { 23 | [TelemetryKeys.JourneyId]: this.journeyId, 24 | }; 25 | 26 | private static reporter = new TelemetryReporter(extensionName, extensionVersion, aiKey); 27 | 28 | public dispose() { 29 | void TelemetryHelper.reporter.dispose(); 30 | } 31 | 32 | public getJourneyId(): string { 33 | return this.journeyId; 34 | } 35 | 36 | public setTelemetry(key: string, value: string): void { 37 | this.properties[key] = value; 38 | } 39 | 40 | // Log an error. 41 | // No custom properties are logged alongside the error. 42 | // FIXME: This should really be sendTelemetryException but I'm maintaining 43 | // backwards-compatibility with how it used to be sent, especially because 44 | // I don't have access to the Application Insights logs :D (winstonliu). 45 | public logError(layer: string, tracePoint: string, error: Error): void { 46 | TelemetryHelper.reporter.sendTelemetryErrorEvent( 47 | tracePoint, { 48 | [TelemetryKeys.JourneyId]: this.journeyId, 49 | layer, 50 | errorMessage: error.message, 51 | stack: error.stack ?? '', 52 | }, undefined, ['errorMesage', 'stack']); 53 | } 54 | 55 | // Executes the given function, timing how long it takes. 56 | // This *does NOT* send any telemetry and must be called within the context 57 | // of an ongoing `callWithTelemetryAndErrorHandling` session to do anything useful. 58 | // Helpful for reporting fine-grained timing of individual functions. 59 | // TODO: Rename to something with less potential for confusion, like 'time' or 'timeFunction'? 60 | public async executeFunctionWithTimeTelemetry(callback: () => Promise, telemetryKey: string): Promise { 61 | const startTime = Date.now(); 62 | try { 63 | return await callback(); 64 | } 65 | finally { 66 | this.setTelemetry(telemetryKey, ((Date.now() - startTime) / 1000).toString()); 67 | } 68 | } 69 | 70 | // Wraps the given function in a telemetry event. 71 | // The telemetry event sent ater function execution will contain how long the function took as well as any custom properties 72 | // supplied through initialize() or setTelemetry(). 73 | public async callWithTelemetryAndErrorHandling(command: string, callback: () => Promise): Promise { 74 | try { 75 | return await this.executeFunctionWithTimeTelemetry(callback, 'duration'); 76 | } catch (error) { 77 | TelemetryHelper.reporter.sendTelemetryErrorEvent( 78 | command, { 79 | ...this.properties, 80 | [TelemetryKeys.JourneyId]: this.journeyId, 81 | }); 82 | 83 | const message = error instanceof Error ? error.message : String(error); 84 | 85 | logger.log(message, command); 86 | 87 | if (message.includes('\n')) { 88 | void vscode.window.showErrorMessage('An error has occurred. Check the output window for more details.'); 89 | } else { 90 | void vscode.window.showErrorMessage(message); 91 | } 92 | } 93 | 94 | return undefined; 95 | } 96 | } 97 | 98 | export const telemetryHelper = new TelemetryHelper(); 99 | -------------------------------------------------------------------------------- /src/helpers/telemetryKeys.ts: -------------------------------------------------------------------------------- 1 | export const CurrentUserInput = 'currentUserInput'; 2 | export const JourneyId = 'journeyId'; 3 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { window } from "vscode"; 7 | 8 | // TODO: How can we write this to disk too so that we can remotely debug issues? 9 | // TODO: Set env var or something to turn logging on/off? 10 | 11 | const outputChannel = window.createOutputChannel('Azure Pipelines'); 12 | export function log(message: string, event?: string): void { 13 | let logMessage = `(${new Date().toLocaleString()}) `; 14 | 15 | if (event) { 16 | logMessage += `[${event}] `; 17 | } 18 | 19 | logMessage += message; 20 | outputChannel.appendLine(logMessage); 21 | } 22 | -------------------------------------------------------------------------------- /src/messages.ts: -------------------------------------------------------------------------------- 1 | export const gitExtensionUnavailable = 'Git extension could not be fetched. Please ensure it\'s installed and activated.'; 2 | export const gitExtensionNotEnabled = 'Git extension is not enabled. Please change the `git.enabled` setting to true.'; 3 | export const failedToDetermineAzureRepoDetails = 'Failed to determine Azure Repo details from remote url. Please ensure that the remote points to a valid Azure Repos url.'; 4 | export const notAzureRepoUrl = 'The repo isn\'t hosted with Azure Repos.'; 5 | export const selectOrganizationForEnhancedIntelliSense = 'Select Azure DevOps organization associated with the %s repository for enhanced Azure Pipelines IntelliSense.'; 6 | export const selectOrganizationLabel = 'Select organization'; 7 | export const selectOrganizationPlaceholder = 'Select Azure DevOps organization associated with the %s repository'; 8 | export const signInLabel = 'Sign In'; 9 | export const signInWithADifferentAccountLabel = 'Sign in with a different account'; 10 | export const changeTenantLabel = 'Change active tenant'; 11 | export const unableToAccessOrganization = 'Unable to access the "%s" organization. Make sure you\'re signed into the right Microsoft account or using the right tenant.'; 12 | export const signInForEnhancedIntelliSense = 'Sign in to Microsoft for enhanced Azure Pipelines IntelliSense'; 13 | export const userEligibleForEnahanced1ESPTIntellisense = 'Enable 1ESPT Schema in Azure Pipelines Extension settings for enhanced Intellisense'; 14 | export const notUsing1ESPTSchemaAsUserNotSignedInMessage = '1ESPT Schema is not used for Intellisense as you are not signed in with a `@microsoft.com` account'; 15 | export const enable1ESPTSchema = 'Enable'; 16 | export const doNotAskAgain = "Don't Ask Again"; 17 | -------------------------------------------------------------------------------- /src/schema-association-service-1espt.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import * as vscode from 'vscode'; 7 | import { URI, Utils } from 'vscode-uri'; 8 | import * as azdev from 'azure-devops-node-api'; 9 | import * as logger from './logger'; 10 | import * as Messages from './messages'; 11 | import { getAzureDevOpsSession } from './schema-association-service'; 12 | 13 | const milliseconds24hours = 86400000; 14 | 15 | export async function get1ESPTSchemaUri(azureDevOpsClient: azdev.WebApi, organizationName: string, session: vscode.AuthenticationSession, context: vscode.ExtensionContext, repoId1espt: string): Promise { 16 | try { 17 | if (session.account.label.endsWith("@microsoft.com")) { 18 | const gitApi = await azureDevOpsClient.getGitApi(); 19 | // Using getItem from GitApi: getItem(repositoryId: string, path: string, project?: string, scopePath?: string, recursionLevel?: GitInterfaces.VersionControlRecursionType, includeContentMetadata?: boolean, latestProcessedChange?: boolean, download?: boolean, versionDescriptor?: GitInterfaces.GitVersionDescriptor, includeContent?: boolean, resolveLfs?: boolean, sanitize?: boolean): Promise; 20 | const schemaFile = await gitApi.getItem(repoId1espt, "schema/1espt-base-schema.json", "1ESPipelineTemplates", undefined, undefined, true, true, true, undefined, true, true); 21 | 22 | const { content } = schemaFile; 23 | if (content === undefined) { 24 | logger.log(`File was retrieved without content for org: ${organizationName}`, 'SchemaDetection'); 25 | return undefined; 26 | } 27 | 28 | const schemaUri = Utils.joinPath(context.globalStorageUri, '1ESPTSchema', `${organizationName}-1espt-schema.json`); 29 | await vscode.workspace.fs.writeFile(schemaUri, Buffer.from(content)); 30 | return schemaUri; 31 | } 32 | else { 33 | // if user is signed in with account other than microsoft, then delete the 1ESPT schema file 34 | await delete1ESPTSchemaFileIfPresent(context); 35 | } 36 | } 37 | catch (error) { 38 | logger.log(`Error: ${error instanceof Error ? error.message : String(error)} while fetching 1ESPT schema for org: ${organizationName} : `, 'SchemaDetection'); 39 | } 40 | return undefined; 41 | } 42 | 43 | /** 44 | * Fetch cached 1ESPT schema if: 45 | 1) User is signed in with microsoft account 46 | 2) 1ESPT schema is enabled 47 | 3) last fetched 1ESPT schema is less than 24 hours old 48 | 4) Schema file exists 49 | * @param context 50 | * @param organizationName 51 | * @param session 52 | * @param lastUpdated1ESPTSchema 53 | * @returns 54 | */ 55 | export async function getCached1ESPTSchema(context: vscode.ExtensionContext, organizationName: string, session: vscode.AuthenticationSession, lastUpdated1ESPTSchema: Map): Promise { 56 | const lastUpdatedDate = lastUpdated1ESPTSchema.get(organizationName); 57 | if (!lastUpdatedDate) { 58 | return undefined; 59 | } 60 | 61 | const schemaUri1ESPT = Utils.joinPath(context.globalStorageUri, '1ESPTSchema', `${organizationName}-1espt-schema.json`); 62 | 63 | try { 64 | if (session.account.label.endsWith("@microsoft.com")) { 65 | if ((new Date().getTime() - lastUpdatedDate.getTime()) < milliseconds24hours) { 66 | try { 67 | await vscode.workspace.fs.stat(schemaUri1ESPT); 68 | logger.log("Returning cached schema for 1ESPT", 'SchemaDetection'); 69 | return schemaUri1ESPT; 70 | } catch { 71 | // Expected failure if file doesn't exist. 72 | } 73 | } 74 | // schema is older than 24 hours, fetch schema file again 75 | else { 76 | logger.log(`Skipping cached 1ESPT schema for ${organizationName} as it is older than 24 hours`, `SchemaDetection`); 77 | } 78 | } 79 | else { 80 | void vscode.window.showInformationMessage(Messages.notUsing1ESPTSchemaAsUserNotSignedInMessage, Messages.signInWithADifferentAccountLabel) 81 | .then(async action => { 82 | if (action === Messages.signInWithADifferentAccountLabel) { 83 | await getAzureDevOpsSession(context, { 84 | clearSessionPreference: true, 85 | createIfNone: true, 86 | }); 87 | } 88 | }); 89 | logger.log(`Skipping cached 1ESPT schema for ${organizationName} as user is not signed in with Microsoft account`, `SchemaDetection`); 90 | } 91 | } 92 | catch (error) { 93 | logger.log(`Error: ${error instanceof Error ? error.message : String(error)} while fetching cached 1ESPT schema for org: ${organizationName}. It's possible that the schema does not exist.`, 'SchemaDetection'); 94 | } 95 | 96 | return undefined; 97 | } 98 | 99 | /** 100 | * User is eligible for 1ESPT schema if 1ESPT schema is available in ADO organization 101 | * @param azureDevOpsClient 102 | * @param organizationName 103 | * @returns 104 | */ 105 | export async function get1ESPTRepoIdIfAvailable(azureDevOpsClient: azdev.WebApi, organizationName: string): Promise { 106 | try { 107 | const gitApi = await azureDevOpsClient.getGitApi(); 108 | const repository = await gitApi.getRepository('1ESPipelineTemplates', '1ESPipelineTemplates'); 109 | // Types are wrong and getRepository cah return null. 110 | // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition 111 | if (repository?.id === undefined) { 112 | logger.log(`1ESPipelineTemplates repo not found for org ${organizationName}`, `SchemaDetection`); 113 | return ""; // 1ESPT repo not found 114 | } 115 | 116 | return repository.id; 117 | } 118 | catch (error) { 119 | logger.log(`Error: ${error instanceof Error ? error.message : String(error)} while checking eligibility for enhanced Intellisense for 1ESPT schema for org: ${organizationName}.`, 'SchemaDetection'); 120 | return ""; 121 | } 122 | } 123 | 124 | export async function delete1ESPTSchemaFileIfPresent(context: vscode.ExtensionContext) { 125 | try { 126 | await vscode.workspace.fs.delete(Utils.joinPath(context.globalStorageUri, '1ESPTSchema'), { recursive: true }); 127 | } 128 | catch (error) { 129 | logger.log(`Error: ${error instanceof Error ? error.message : String(error)} while deleting 1ESPT schema. It's possible that the schema file does not exist`, 'SchemaDetection'); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/schema-contributor.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import { URI } from 'vscode-uri'; 7 | 8 | interface SchemaContributorProvider { 9 | readonly requestSchema: (resource: string) => string; 10 | readonly requestSchemaContent: (uri: string) => string; 11 | } 12 | 13 | // TODO: Add tests for this class. 14 | // TODO: Can we just get rid of this class? 15 | // registerContributor is never called, which means the other two methods always throw. 16 | class SchemaContributor { 17 | private _customSchemaContributors = new Map(); 18 | 19 | /** 20 | * Register a custom schema provider. 21 | * TODO: We might be able to use this to intelligently grab the schema for projects using Azure Repos. 22 | * 23 | * @param {string} schema the provider's name 24 | * @param requestSchema the requestSchema function 25 | * @param requestSchemaContent the requestSchemaContent function 26 | * @returns {boolean} 27 | */ 28 | public registerContributor(schema: string, 29 | requestSchema: (resource: string) => string, 30 | requestSchemaContent: (uri: string) => string): boolean { 31 | if (this._customSchemaContributors.has(schema)) { 32 | return false; 33 | } 34 | 35 | this._customSchemaContributors.set(schema, { 36 | requestSchema, 37 | requestSchemaContent 38 | }); 39 | 40 | return true; 41 | } 42 | 43 | /** 44 | * Asks each schema provider whether it has a schema for the given resource, 45 | * and returns the URI to the schema if it does. 46 | * 47 | * @param {string} resource the file to be validated 48 | * @returns {string} the schema uri 49 | */ 50 | public requestCustomSchema(resource: string): string { 51 | for (const contributor of this._customSchemaContributors.values()) { 52 | const uri = contributor.requestSchema(resource); 53 | if (uri) { 54 | return uri; 55 | } 56 | } 57 | 58 | // TODO: This is currently the only way to fallback to the default schema provider. 59 | // The upstream Red Hat server also falls back when receiving a falsy value, 60 | // so sync with their changes and change this to return false or something. 61 | // eslint-disable-next-line @typescript-eslint/only-throw-error 62 | throw `Unable to find custom schema for resource: '${resource}'`; 63 | } 64 | 65 | /** 66 | * If there is a schema provider that can handle the given URI, 67 | * returns the schema content corresponding to the URI. 68 | * TODO: If we stick to just local files and http(s), I doubt we need this. 69 | * 70 | * @param {string} uri the schema uri returned from requestSchema. 71 | * @returns {string} the schema content 72 | */ 73 | public requestCustomSchemaContent(uri: string): string { 74 | const { scheme } = URI.parse(uri); 75 | const contributor = this._customSchemaContributors.get(scheme); 76 | if (contributor) { 77 | return contributor.requestSchemaContent(uri); 78 | } 79 | 80 | // eslint-disable-next-line @typescript-eslint/only-throw-error 81 | throw `Unable to find custom schema content for uri: '${uri}'`; 82 | } 83 | } 84 | 85 | // global instance 86 | // TODO: Do this differently... why not instantiate? Static? Something else. 87 | const schemaContributor = new SchemaContributor(); 88 | 89 | export const CUSTOM_SCHEMA_REQUEST = 'custom/schema/request'; 90 | export const CUSTOM_CONTENT_REQUEST = 'custom/schema/content'; 91 | 92 | export { schemaContributor } ; 93 | -------------------------------------------------------------------------------- /src/test/helper.ts: -------------------------------------------------------------------------------- 1 | // From https://github.com/microsoft/vscode-extension-samples/blob/main/lsp-sample/client/src/test/helper.ts 2 | 3 | import * as vscode from 'vscode'; 4 | import * as path from 'path'; 5 | 6 | export let doc: vscode.TextDocument; 7 | export let editor: vscode.TextEditor; 8 | export let documentEol: string; 9 | export let platformEol: string; 10 | 11 | /** 12 | * Activates the vscode.lsp-sample extension 13 | */ 14 | export async function activate(docUri: vscode.Uri) { 15 | // The extensionId is `publisher.name` from package.json 16 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 17 | const ext = vscode.extensions.getExtension('ms-azure-devops.azure-pipelines')!; 18 | await ext.activate(); 19 | try { 20 | doc = await vscode.workspace.openTextDocument(docUri); 21 | editor = await vscode.window.showTextDocument(doc); 22 | await sleep(5000); // Wait for server activation 23 | } catch (e) { 24 | console.error(e); 25 | } 26 | } 27 | 28 | async function sleep(ms: number) { 29 | return await new Promise(resolve => setTimeout(resolve, ms)); 30 | } 31 | 32 | export const getDocPath = (p: string) => { 33 | return path.resolve(__dirname, '../../src/test/workspace', p); 34 | }; 35 | 36 | export const getDocUri = (p: string) => { 37 | return vscode.Uri.file(getDocPath(p)); 38 | }; 39 | 40 | export async function setTestContent(content: string): Promise { 41 | const all = new vscode.Range( 42 | doc.positionAt(0), 43 | doc.positionAt(doc.getText().length) 44 | ); 45 | return await editor.edit(eb => { 46 | eb.replace(all, content) 47 | }); 48 | } 49 | -------------------------------------------------------------------------------- /src/test/suite/azureDevOpsHelper.test.ts: -------------------------------------------------------------------------------- 1 | import * as assert from 'assert'; 2 | import { getRepositoryDetailsFromRemoteUrl, isAzureReposUrl } from '../../helpers/azureDevOpsHelper'; 3 | 4 | suite('Azure DevOps Helpers', () => { 5 | suite('isAzureReposUrl', () => { 6 | test('Returns true for dev.azure.com URLs', () => { 7 | assert.ok(isAzureReposUrl('https://dev.azure.com/organization/project/_git/repo')); 8 | }); 9 | 10 | test('Returns true for visualstudio.com URLs', () => { 11 | assert.ok(isAzureReposUrl('https://organization.visualstudio.com/project/_git/repo')); 12 | }); 13 | 14 | test('Returns true for SSH dev.azure.com URLs', () => { 15 | assert.ok(isAzureReposUrl('git@ssh.dev.azure.com:v3/organization/project/repo')); 16 | }); 17 | 18 | test('Returns true for SSH visualstudio.com URLs', () => { 19 | assert.ok(isAzureReposUrl('organization@vs-ssh.visualstudio.com:v3/organization/project/repo')); 20 | }); 21 | 22 | test('Returns false for other URLs', () => { 23 | assert.ok(!isAzureReposUrl('https://azure.com/organization/project/_git/repo')); 24 | assert.ok(!isAzureReposUrl('https://visualstudio.com/project/_git/repo')); 25 | assert.ok(!isAzureReposUrl('https://github.com/owner/repo.git')); 26 | }); 27 | }); 28 | 29 | suite('getRepositoryDetailsFromRemoteUrl', () => { 30 | test('Returns correct details for dev.azure.com URLs', () => { 31 | const details = getRepositoryDetailsFromRemoteUrl('https://dev.azure.com/organization/project/_git/repo'); 32 | assert.strictEqual(details.organizationName, 'organization'); 33 | assert.strictEqual(details.projectName, 'project'); 34 | assert.strictEqual(details.repositoryName, 'repo'); 35 | }); 36 | 37 | test('Returns correct details for shorthand dev.azure.com URLs', () => { 38 | const details = getRepositoryDetailsFromRemoteUrl('https://dev.azure.com/organization/_git/repo'); 39 | assert.strictEqual(details.organizationName, 'organization'); 40 | assert.strictEqual(details.projectName, 'repo'); 41 | assert.strictEqual(details.repositoryName, 'repo'); 42 | }); 43 | 44 | test('Returns correct details for visualstudio.com URLs', () => { 45 | const details = getRepositoryDetailsFromRemoteUrl('https://organization.visualstudio.com/project/_git/repo'); 46 | assert.strictEqual(details.organizationName, 'organization'); 47 | assert.strictEqual(details.projectName, 'project'); 48 | assert.strictEqual(details.repositoryName, 'repo'); 49 | }); 50 | 51 | test('Returns correct details for shorthand visualstudio.com URLs', () => { 52 | const details = getRepositoryDetailsFromRemoteUrl('https://organization.visualstudio.com/_git/repo'); 53 | assert.strictEqual(details.organizationName, 'organization'); 54 | assert.strictEqual(details.projectName, 'repo'); 55 | assert.strictEqual(details.repositoryName, 'repo'); 56 | }); 57 | 58 | test('Returns correct details for DefaultCollection visualstudio.com URLs', () => { 59 | const details = getRepositoryDetailsFromRemoteUrl('https://organization.visualstudio.com/DeFaUlTcOlLeCtIoN/project/_git/repo'); 60 | assert.strictEqual(details.organizationName, 'organization'); 61 | assert.strictEqual(details.projectName, 'project'); 62 | assert.strictEqual(details.repositoryName, 'repo'); 63 | }); 64 | 65 | test('Returns correct details for shorthand DefaultCollection visualstudio.com URLs', () => { 66 | const details = getRepositoryDetailsFromRemoteUrl('https://organization.visualstudio.com/DeFaUlTcOlLeCtIoN/_git/repo'); 67 | assert.strictEqual(details.organizationName, 'organization'); 68 | assert.strictEqual(details.projectName, 'repo'); 69 | assert.strictEqual(details.repositoryName, 'repo'); 70 | }); 71 | 72 | test('Returns correct details for SSH dev.azure.com URLs', () => { 73 | const details = getRepositoryDetailsFromRemoteUrl('git@ssh.dev.azure.com:v3/organization/project/repo'); 74 | assert.strictEqual(details.organizationName, 'organization'); 75 | assert.strictEqual(details.projectName, 'project'); 76 | assert.strictEqual(details.repositoryName, 'repo'); 77 | }); 78 | 79 | test('Returns correct details for SSH visualstudio.com URLs', () => { 80 | const details = getRepositoryDetailsFromRemoteUrl('organization@vs-ssh.visualstudio.com:v3/organization/project/repo'); 81 | assert.strictEqual(details.organizationName, 'organization'); 82 | assert.strictEqual(details.projectName, 'project'); 83 | assert.strictEqual(details.repositoryName, 'repo'); 84 | }); 85 | 86 | test('Throws error for invalid URLs', () => { 87 | assert.throws(() => { 88 | getRepositoryDetailsFromRemoteUrl('https://invalid.url/organization/project/_git/repo'); 89 | }, "The repo isn't hosted with Azure Repos"); 90 | 91 | assert.throws(() => { 92 | getRepositoryDetailsFromRemoteUrl('https:/dev.azure.com/DefaultCollection/organization/project/_git/repo'); 93 | }, /Failed to determine Azure Repo details/); 94 | 95 | assert.throws(() => { 96 | getRepositoryDetailsFromRemoteUrl('https://organization.visualstudio.com/invalid'); 97 | }, /Failed to determine Azure Repo details/); 98 | }); 99 | }); 100 | }); 101 | -------------------------------------------------------------------------------- /src/test/suite/completion.test.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import * as assert from 'assert'; 3 | import { getDocUri, activate } from '../helper'; 4 | 5 | // This suite performs autocomplete tests that look for what options are available for autocompletion 6 | // depending on where we are in a file, what the contents of that file are, and what the schema is. 7 | // 8 | // https://github.com/Microsoft/vscode/issues/23814 9 | // ALso useful: 10 | // https://github.com/Microsoft/vscode/issues/111 11 | // https://github.com/ipatalas/ngComponentUtility/blob/master/test/providers/componentCompletionProvider.test.ts 12 | // 13 | // TODO: Do we need to create a proper completion item provider? How does the language server tie in right now? 14 | // It may be impossible to check the completion items in the UI and we might have to check them in the server. 15 | // Or we can take the first suggestion and make sure it works... then we know the options are in the list. 16 | // Then for the specific recommendations we test that in the completion provider. 17 | // 18 | suite('Autocomplete Tests', () => { 19 | // empty file, beginning of file, end of file, middle of file 20 | // within a broken file, within a working file(in terms of validation results) 21 | 22 | test('When I use intellisense on a task then I am shown task names', async () => { 23 | // Arrange 24 | const docUri = getDocUri('autocomplete.yml'); 25 | 26 | // Act 27 | const completionsList = await getCompletions(docUri, new vscode.Position(15, 12)); 28 | 29 | // Assert 30 | 31 | // We expect a lot of tasks 32 | assert.ok(completionsList.items.length >= 100); 33 | 34 | // All of them should be task names 35 | assert.ok(completionsList.items.every(item => (item.label as string).match(/^[\w-]+@\d+$/))); 36 | }); 37 | }); 38 | 39 | async function getCompletions( 40 | docUri: vscode.Uri, 41 | position: vscode.Position, 42 | triggerCharacter?: string, 43 | ): Promise { 44 | await activate(docUri); 45 | 46 | // Executing the command `vscode.executeCompletionItemProvider` to simulate triggering completion 47 | // NOTE: This returns *all* completions without filtering, unlike editor.action.triggerSuggest 48 | return await vscode.commands.executeCommand( 49 | 'vscode.executeCompletionItemProvider', 50 | docUri, 51 | position, 52 | triggerCharacter 53 | ); 54 | } 55 | -------------------------------------------------------------------------------- /src/test/suite/configuration.test.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import * as assert from 'assert'; 3 | 4 | function getWorkspaceFolder() { 5 | return vscode.workspace.workspaceFolders?.[0] ?? assert.fail('No workspace folder'); 6 | } 7 | 8 | suite('Language configuration', () => { 9 | suite('Filename patterns', () => { 10 | const trackedUris: vscode.Uri[] = []; 11 | 12 | async function assertFileIsAzurePipelines(...pathComponents: string[]): Promise { 13 | const uri = vscode.Uri.joinPath(getWorkspaceFolder().uri, ...pathComponents); 14 | await vscode.workspace.fs.writeFile(uri, new Uint8Array()); 15 | trackedUris.push(uri); 16 | 17 | const doc = await vscode.workspace.openTextDocument(uri); 18 | assert.strictEqual(doc.languageId, 'azure-pipelines'); 19 | } 20 | 21 | suiteTeardown(async () => { 22 | await Promise.all(trackedUris.map(uri => vscode.workspace.fs.delete(uri))); 23 | }); 24 | 25 | for (const extension of ['yml', 'yaml']) { 26 | test(`Detects azure-pipelines.${extension}`, async () => { 27 | await assertFileIsAzurePipelines(`azure-pipelines.${extension}`); 28 | }); 29 | 30 | test(`Detects .azure-pipelines.${extension}`, async () => { 31 | await assertFileIsAzurePipelines(`.azure-pipelines.${extension}`); 32 | }); 33 | 34 | test(`Detects azure-pipelines/anything.${extension}`, async () => { 35 | await assertFileIsAzurePipelines('azure-pipelines', `anything.${extension}`); 36 | }); 37 | 38 | test(`Detects .azure-pipelines/anything.${extension}`, async () => { 39 | await assertFileIsAzurePipelines('.azure-pipelines', `anything.${extension}`); 40 | }); 41 | 42 | test(`Detects .pipelines/anything.${extension}`, async () => { 43 | await assertFileIsAzurePipelines('.pipelines', `anything.${extension}`); 44 | }); 45 | } 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /src/test/suite/diagnostics.test.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import * as assert from 'assert'; 7 | import * as vscode from 'vscode'; 8 | import { getDocUri, activate } from '../helper'; 9 | 10 | // This suite performs validation tests that look at validating yaml files. 11 | // The tests are looking at if there are any file diagnostics, and if there are, what are they. 12 | // 13 | // These tests need to ensure diagnostics are propagated to the ui, we do not need to test 14 | // every type and permutation of diagnostics, that should be handled in unit tests. 15 | 16 | // Helpers 17 | // 1. Workspace configuration settings are not as expected 18 | // console.log('workspace configuration: ' + JSON.stringify(vscode.workspace.getConfiguration())); 19 | // 2. 20 | 21 | suite('Diagnostics Tests', () => { 22 | test('Given an empty document, there should be no diagnostics', async () => { 23 | // Arrange 24 | const emptyFile = getDocUri('emptyfile.yml'); 25 | 26 | // Act 27 | const diagnostics = await getDiagnostics(emptyFile); 28 | 29 | // Assert 30 | assert.strictEqual(diagnostics.length, 0); 31 | }); 32 | 33 | test('Given a valid document, there should be no diagnostics', async () => { 34 | // Arrange 35 | const validFile = getDocUri('validfile.yml'); 36 | 37 | // Act 38 | const diagnostics = await getDiagnostics(validFile); 39 | 40 | // Assert 41 | assert.strictEqual(diagnostics.length, 0); 42 | }); 43 | 44 | test('Given an invalid document, there should be diagnostics', async () => { 45 | // Arrange 46 | const invalidFile = getDocUri('invalidfile.yml'); 47 | 48 | // Act 49 | const diagnostics = await getDiagnostics(invalidFile); 50 | 51 | // Assert 52 | assert.strictEqual(diagnostics.length, 1); 53 | assert.strictEqual(diagnostics[0].message, 'Incorrect type. Expected "object".'); 54 | assert.strictEqual(diagnostics[0].severity, vscode.DiagnosticSeverity.Warning); 55 | assert.deepStrictEqual(diagnostics[0].range, toRange(0, 0, 5, 0)); 56 | }); 57 | 58 | test('Manually selecting file type as Azure Pipelines works', async () => { 59 | // TODO: Write this test. I have not been able to find a way to manually set the file type through the vscode api. 60 | 61 | }); 62 | 63 | test('When manually activating an invalid file there should be diagnostics', async () => { 64 | // TODO: Write this test. I have not been able to find a way to manually set the file type through the vscode api. 65 | 66 | }); 67 | 68 | test('When manually activating a valid file there should not be diagnostics', async () => { 69 | // TODO: Write this test. I have not been able to find a way to manually set the file type through the vscode api. 70 | 71 | }); 72 | }); 73 | 74 | async function getDiagnostics(docUri: vscode.Uri): Promise { 75 | await activate(docUri); 76 | 77 | return vscode.languages.getDiagnostics(docUri); 78 | } 79 | 80 | function toRange(sLine: number, sChar: number, eLine: number, eChar: number) { 81 | const start = new vscode.Position(sLine, sChar); 82 | const end = new vscode.Position(eLine, eChar); 83 | return new vscode.Range(start, end); 84 | } 85 | -------------------------------------------------------------------------------- /src/test/suite/fromserver.test.ts: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | import * as assert from 'assert'; 7 | import * as vscode from 'vscode'; 8 | 9 | // This suite performs validation tests that look at validating yaml files. 10 | // The tests are looking at if there are any file validation errors, and if there are, what are they. 11 | // 12 | // These tests need to ensure validation errors are propagated to the ui, we do not need to test 13 | // every type and permutation of validation errors, that should be handled in unit tests. 14 | 15 | // Helpers 16 | // 1. Workspace configuration settings are not as expected 17 | // console.log('workspace configuration: ' + JSON.stringify(vscode.workspace.getConfiguration())); 18 | // 2. 19 | 20 | suite('Validation Tests From Server', function() { 21 | this.timeout(1000000); 22 | 23 | test ('Validate all files from server', async function () { 24 | const validFiles: vscode.Uri[] = await vscode.workspace.findFiles('**/extracted/*.yml'); 25 | 26 | const promises = validFiles.map(async function(testFile) { 27 | await testFileIsValid(testFile); 28 | }); 29 | await Promise.all(promises); 30 | }); 31 | }); 32 | 33 | async function testFileIsValid(file: vscode.Uri) { 34 | // Arrange and Act 35 | const emptyDocument: vscode.TextDocument = await vscode.workspace.openTextDocument(file); 36 | await vscode.window.showTextDocument(emptyDocument); 37 | await sleep(1000); // Give it time to show the validation errors, if any 38 | const diagnostics: vscode.Diagnostic[] = vscode.languages.getDiagnostics(file); 39 | 40 | // Assert 41 | assert.strictEqual(emptyDocument.languageId, 'azure-pipelines'); 42 | assert.strictEqual(diagnostics.length, 0, 'File: ' + file.path + ' Error: ' + JSON.stringify(diagnostics)); 43 | } 44 | 45 | async function sleep(ms: number) { 46 | return await new Promise((resolve) => setTimeout(resolve, ms)); 47 | } 48 | -------------------------------------------------------------------------------- /src/test/suite/testdata/schemas/all-inputs-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstProperty": [ "task" ], 3 | "properties": { 4 | "task": { 5 | "pattern": "^AllInputsTask@0$", 6 | "description": "All Inputs Task\n\nAll inputs task", 7 | "ignoreCase": "value" 8 | }, 9 | "inputs": { 10 | "properties": { 11 | "a1": { 12 | "description": "a1", 13 | "type": "integer", 14 | "ignoreCase": "key" 15 | }, 16 | "a2": { 17 | "description": "a2", 18 | "type": "integer", 19 | "ignoreCase": "key" 20 | }, 21 | "a3": { 22 | "description": "a3", 23 | "type": "boolean", 24 | "ignoreCase": "key" 25 | }, 26 | "a4": { 27 | "description": "a4", 28 | "type": "boolean", 29 | "ignoreCase": "key" 30 | }, 31 | "a5": { 32 | "description": "a5", 33 | "type": "string", 34 | "ignoreCase": "key" 35 | }, 36 | "a6": { 37 | "description": "a6", 38 | "type": "string", 39 | "ignoreCase": "key" 40 | }, 41 | "a7": { 42 | "description": "a7", 43 | "type": "string", 44 | "ignoreCase": "key" 45 | }, 46 | "a8": { 47 | "description": "a8", 48 | "type": "string", 49 | "ignoreCase": "key" 50 | }, 51 | "a9": { 52 | "description": "a9", 53 | "type": "string", 54 | "ignoreCase": "key" 55 | }, 56 | "a10": { 57 | "description": "a10", 58 | "type": "string", 59 | "ignoreCase": "key" 60 | }, 61 | "a11": { 62 | "description": "a11", 63 | "type": "string", 64 | "ignoreCase": "key" 65 | }, 66 | "a12": { 67 | "description": "a12", 68 | "type": "string", 69 | "ignoreCase": "key" 70 | }, 71 | "a13": { 72 | "description": "a13", 73 | "type": "string", 74 | "ignoreCase": "key" 75 | }, 76 | "a14": { 77 | "description": "a14", 78 | "type": "string", 79 | "ignoreCase": "key" 80 | }, 81 | "a15": { 82 | "description": "a15", 83 | "type": "string", 84 | "ignoreCase": "key" 85 | }, 86 | "a16": { 87 | "description": "a16", 88 | "type": "string", 89 | "ignoreCase": "key" 90 | }, 91 | "a17": { 92 | "description": "a17", 93 | "type": "string", 94 | "ignoreCase": "key" 95 | }, 96 | "a18": { 97 | "description": "a18", 98 | "type": "string", 99 | "ignoreCase": "key" 100 | }, 101 | "a21": { 102 | "description": "a21", 103 | "type": "string", 104 | "ignoreCase": "key" 105 | }, 106 | "a22": { 107 | "description": "a22", 108 | "type": "string", 109 | "ignoreCase": "key" 110 | }, 111 | "a23": { 112 | "description": "a23", 113 | "enum": [ 114 | "Key 1", 115 | "Key 2" 116 | ], 117 | "ignoreCase": "all" 118 | }, 119 | "a24": { 120 | "description": "a24", 121 | "enum": [ 122 | "Key 1", 123 | "Key 2" 124 | ], 125 | "ignoreCase": "all" 126 | }, 127 | "a25": { 128 | "description": "a25", 129 | "enum": [ 130 | "Key 1", 131 | "Key 2" 132 | ], 133 | "ignoreCase": "all" 134 | }, 135 | "a26": { 136 | "description": "a26", 137 | "enum": [ 138 | "Key 1", 139 | "Key 2" 140 | ], 141 | "ignoreCase": "all" 142 | }, 143 | "a27": { 144 | "description": "a27", 145 | "type": "string", 146 | "ignoreCase": "key" 147 | }, 148 | "a28": { 149 | "description": "a28", 150 | "type": "string", 151 | "ignoreCase": "key" 152 | } 153 | }, 154 | "description": "All Inputs Task inputs", 155 | "additionalProperties": false, 156 | "required": [] 157 | } 158 | }, 159 | "required": ["task"] 160 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/schemas/npm-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstProperty": [ "task" ], 3 | "properties": { 4 | "task": { 5 | "pattern": "^Npm@0$", 6 | "description": "npm\n\nRun an npm command", 7 | "ignoreCase": "value" 8 | }, 9 | "inputs": { 10 | "properties": { 11 | "cwd": { 12 | "description": "working folder", 13 | "type": "string", 14 | "ignoreCase": "key" 15 | }, 16 | "command": { 17 | "description": "npm command", 18 | "type": "string", 19 | "ignoreCase": "key" 20 | }, 21 | "arguments": { 22 | "description": "arguments", 23 | "type": "string", 24 | "ignoreCase": "key" 25 | } 26 | }, 27 | "description": "npm inputs", 28 | "additionalProperties": false, 29 | "required": ["cwd"] 30 | } 31 | }, 32 | "required": ["task", "inputs"] 33 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/schemas/special-characters-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstProperty": [ "task" ], 3 | "properties": { 4 | "task": { 5 | "pattern": "^SpecialCharactersTask@0$", 6 | "description": "friendly name\n\ndescription details", 7 | "ignoreCase": "value" 8 | }, 9 | "inputs": { 10 | "properties": { 11 | "input name": { 12 | "description": "my input label", 13 | "type": "string", 14 | "ignoreCase": "key" 15 | } 16 | }, 17 | "description": "friendly name inputs", 18 | "additionalProperties": false, 19 | "required": ["input name"] 20 | } 21 | }, 22 | "required": ["task", "inputs"] 23 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/tasks/all-inputs-task.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "00000000-0000-0000-0000-000000000000", 3 | "name": "AllInputsTask", 4 | "version": { 5 | "major": 0, 6 | "minor": 0, 7 | "patch": 0, 8 | "isTest": false 9 | }, 10 | "friendlyName": "All Inputs Task", 11 | "description": "All inputs task", 12 | "inputs": [{ 13 | "name": "a1", 14 | "label": "a1", 15 | "defaultValue": "", 16 | "type": "int", 17 | "helpMarkDown": "" 18 | }, 19 | { 20 | "name": "a2", 21 | "label": "a2", 22 | "defaultValue": "", 23 | "type": "INT", 24 | "helpMarkDown": "" 25 | }, 26 | { 27 | "name": "a3", 28 | "label": "a3", 29 | "defaultValue": "", 30 | "type": "boolean", 31 | "helpMarkDown": "" 32 | }, 33 | { 34 | "name": "a4", 35 | "label": "a4", 36 | "defaultValue": "", 37 | "type": "BOOLEAN", 38 | "helpMarkDown": "" 39 | }, 40 | { 41 | "name": "a5", 42 | "label": "a5", 43 | "defaultValue": "", 44 | "type": "multiline", 45 | "helpMarkDown": "" 46 | }, 47 | { 48 | "name": "a6", 49 | "label": "a6", 50 | "defaultValue": "", 51 | "type": "MULTILINE", 52 | "helpMarkDown": "" 53 | }, 54 | { 55 | "name": "a7", 56 | "label": "a7", 57 | "defaultValue": "", 58 | "type": "string", 59 | "helpMarkDown": "" 60 | }, 61 | { 62 | "name": "a8", 63 | "label": "a8", 64 | "defaultValue": "", 65 | "type": "STRING", 66 | "helpMarkDown": "" 67 | }, 68 | { 69 | "name": "a9", 70 | "label": "a9", 71 | "defaultValue": "", 72 | "type": "filepath", 73 | "helpMarkDown": "" 74 | }, 75 | { 76 | "name": "a10", 77 | "label": "a10", 78 | "defaultValue": "", 79 | "type": "FILEPATH", 80 | "helpMarkDown": "" 81 | }, 82 | { 83 | "name": "a11", 84 | "label": "a11", 85 | "defaultValue": "", 86 | "type": "securefile", 87 | "helpMarkDown": "" 88 | }, 89 | { 90 | "name": "a12", 91 | "label": "a12", 92 | "defaultValue": "", 93 | "type": "SECUREFILE", 94 | "helpMarkDown": "" 95 | }, 96 | { 97 | "name": "a13", 98 | "label": "a13", 99 | "defaultValue": "", 100 | "type": "identities", 101 | "helpMarkDown": "" 102 | }, 103 | { 104 | "name": "a14", 105 | "label": "a14", 106 | "defaultValue": "", 107 | "type": "IDENTITIES", 108 | "helpMarkDown": "" 109 | }, 110 | { 111 | "name": "a15", 112 | "label": "a15", 113 | "defaultValue": "", 114 | "type": "connectedservice", 115 | "helpMarkDown": "" 116 | }, 117 | { 118 | "name": "a16", 119 | "label": "a16", 120 | "defaultValue": "", 121 | "type": "CONNECTEDSERVICE", 122 | "helpMarkDown": "" 123 | }, 124 | { 125 | "name": "a17", 126 | "label": "a17", 127 | "defaultValue": "", 128 | "type": "connectedservicewithmoretext", 129 | "helpMarkDown": "" 130 | }, 131 | { 132 | "name": "a18", 133 | "label": "a18", 134 | "defaultValue": "", 135 | "type": "CONNECTEDSERVICEWITHMORETEXT", 136 | "helpMarkDown": "" 137 | }, 138 | { 139 | "name": "a21", 140 | "label": "a21", 141 | "defaultValue": "", 142 | "type": "querycontrol", 143 | "helpMarkDown": "" 144 | }, 145 | { 146 | "name": "a22", 147 | "label": "a22", 148 | "defaultValue": "", 149 | "type": "QUERYCONTROL", 150 | "helpMarkDown": "" 151 | }, 152 | { 153 | "name": "a23", 154 | "label": "a23", 155 | "defaultValue": "", 156 | "type": "picklist", 157 | "helpMarkDown": "", 158 | "options": { 159 | "Key 1": "Value 1", 160 | "Key 2": "Value 2" 161 | } 162 | }, 163 | { 164 | "name": "a24", 165 | "label": "a24", 166 | "defaultValue": "", 167 | "type": "PICKLIST", 168 | "helpMarkDown": "", 169 | "options": { 170 | "Key 1": "Value 1", 171 | "Key 2": "Value 2" 172 | } 173 | }, 174 | { 175 | "name": "a25", 176 | "label": "a25", 177 | "defaultValue": "", 178 | "type": "radio", 179 | "helpMarkDown": "", 180 | "options": { 181 | "Key 1": "Value 1", 182 | "Key 2": "Value 2" 183 | } 184 | }, 185 | { 186 | "name": "a26", 187 | "label": "a26", 188 | "defaultValue": "", 189 | "type": "RADIO", 190 | "helpMarkDown": "", 191 | "options": { 192 | "Key 1": "Value 1", 193 | "Key 2": "Value 2" 194 | } 195 | }, 196 | { 197 | "name": "a27", 198 | "label": "a27", 199 | "defaultValue": "", 200 | "type": "RADIO", 201 | "helpMarkDown": "" 202 | }, 203 | { 204 | "name": "a28", 205 | "label": "a28", 206 | "defaultValue": "", 207 | "type": "radio", 208 | "helpMarkDown": "" 209 | } 210 | ] 211 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/tasks/missing-input-mapping-exception-task.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "00000000-0000-0000-0000-000000000000", 3 | "name": "TaskWithUnknownInputType", 4 | "version": { 5 | "major": 0, 6 | "minor": 0, 7 | "patch": 0, 8 | "isTest": false 9 | }, 10 | "friendlyName": "name", 11 | "description": "Test with missing input type", 12 | "inputs": [{ 13 | "name": "command", 14 | "label": "npm command", 15 | "defaultValue": "install", 16 | "required": true, 17 | "type": "input-type-that-doesnt-exist", 18 | "helpMarkDown": "" 19 | } 20 | ] 21 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/tasks/npm-task.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "fe47e961-9fa8-4106-8639-368c022d43ad", 3 | "name": "Npm", 4 | "version": { 5 | "major": 0, 6 | "minor": 2, 7 | "patch": 22, 8 | "isTest": false 9 | }, 10 | "friendlyName": "npm", 11 | "description": "Run an npm command", 12 | "inputs": [{ 13 | "name": "cwd", 14 | "label": "working folder", 15 | "defaultValue": "", 16 | "required": true, 17 | "type": "filePath", 18 | "helpMarkDown": "Working directory where the npm command is run. Defaults to the root of the repo." 19 | }, { 20 | "name": "command", 21 | "label": "npm command", 22 | "defaultValue": "install", 23 | "required": true, 24 | "type": "string", 25 | "helpMarkDown": "" 26 | }, { 27 | "name": "arguments", 28 | "label": "arguments", 29 | "defaultValue": "", 30 | "type": "string", 31 | "helpMarkDown": "Additional arguments passed to npm." 32 | } 33 | ] 34 | } -------------------------------------------------------------------------------- /src/test/suite/testdata/tasks/special-characters-task.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "00000000-0000-0000-0000-000000000000", 3 | "name": "SpecialCharactersTask", 4 | "version": { 5 | "major": 0, 6 | "minor": 0, 7 | "patch": 0, 8 | "isTest": false 9 | }, 10 | "friendlyName": "\r\n\"friendly \r\n\"name\r\n\"", 11 | "description": "\r\n\"description \r\n\"details\r\n\"", 12 | "inputs": [{ 13 | "name": "\r\n\"input \r\n\"name\r\n\"", 14 | "label": "my input label", 15 | "defaultValue": "", 16 | "required": true, 17 | "type": "string", 18 | "helpMarkDown": "" 19 | } 20 | ] 21 | } -------------------------------------------------------------------------------- /src/test/workspace/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "*.yml": "azure-pipelines", 4 | "**/*.yml": "azure-pipelines" 5 | } 6 | } -------------------------------------------------------------------------------- /src/test/workspace/autocomplete.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | - name: os 3 | displayName: OS 4 | type: string 5 | default: '' 6 | 7 | steps: 8 | 9 | # Test 10 | - script: node make.js test 11 | displayName: Test with node 6 12 | - script: node make.js testLegacy --task "$(task_pattern)" 13 | displayName: Legacy tests with node 6 14 | 15 | # Authenticate with npm 16 | - task: npmA 17 | displayName: Authenticate with npm 18 | inputs: 19 | workingFile: .npmrc 20 | 21 | # Test with node 5 22 | - script: node make.js test --node 5 --runner ts 23 | displayName: Test with node 5 24 | - script: node make.js testLegacy --node 5 --runner ts --task "$(task_pattern)" 25 | displayName: Legacy tests with node 5 26 | - task: PublishTestResults@2 27 | displayName: Publish Test Results test-*.xml 28 | inputs: 29 | testResultsFiles: 'test-*.xml' 30 | testRunTitle: 'Node 5 Test Results' 31 | searchFolder: '$(System.DefaultWorkingDirectory)/testresults' 32 | -------------------------------------------------------------------------------- /src/test/workspace/emptyfile.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/azure-pipelines-vscode/ace8a40d53d11dfcdd9ac0dc8e83718e35fd3e8f/src/test/workspace/emptyfile.yml -------------------------------------------------------------------------------- /src/test/workspace/invalidfile.txt: -------------------------------------------------------------------------------- 1 | this is an invalid yaml schema file and should fail 2 | 3 | more text 4 | 5 | more 6 | -------------------------------------------------------------------------------- /src/test/workspace/invalidfile.yml: -------------------------------------------------------------------------------- 1 | this is an invalid yaml schema file and should fail 2 | 3 | more text 4 | 5 | more 6 | -------------------------------------------------------------------------------- /src/test/workspace/validfile.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | - name: os 3 | displayName: OS 4 | type: string 5 | default: '' 6 | 7 | steps: 8 | 9 | # Test 10 | - script: node make.js test 11 | displayName: Test with node 6 12 | - script: node make.js testLegacy --task "$(task_pattern)" 13 | displayName: Legacy tests with node 6 14 | 15 | # Publish test results 16 | - task: PublishTestResults@2 17 | displayName: Publish Test Results test-*.xml 18 | inputs: 19 | testResultsFiles: 'test-*.xml' 20 | testRunTitle: 'Node 6 Test Results' 21 | searchFolder: '$(System.DefaultWorkingDirectory)/testresults' 22 | 23 | # Test with node 5 24 | - script: node make.js test --node 5 --runner ts 25 | displayName: Test with node 5 26 | - script: node make.js testLegacy --node 5 --runner ts --task "$(task_pattern)" 27 | displayName: Legacy tests with node 5 28 | - task: PublishTestResults@2 29 | displayName: Publish Test Results test-*.xml 30 | inputs: 31 | testResultsFiles: 'test-*.xml' 32 | testRunTitle: 'Node 5 Test Results' 33 | searchFolder: '$(System.DefaultWorkingDirectory)/testresults' 34 | -------------------------------------------------------------------------------- /src/typings/git.d.ts: -------------------------------------------------------------------------------- 1 | // https://github.com/microsoft/vscode/blob/1.64.0/extensions/git/src/api/git.d.ts 2 | // Keep this in sync with the VS Code engine version in package.json. 3 | 4 | /*--------------------------------------------------------------------------------------------- 5 | * Copyright (c) Microsoft Corporation. All rights reserved. 6 | * Licensed under the MIT License. See License.txt in the project root for license information. 7 | *--------------------------------------------------------------------------------------------*/ 8 | 9 | import { Uri, Event, Disposable, ProviderResult } from 'vscode'; 10 | export { ProviderResult } from 'vscode'; 11 | 12 | export interface Git { 13 | readonly path: string; 14 | } 15 | 16 | export interface InputBox { 17 | value: string; 18 | } 19 | 20 | export const enum ForcePushMode { 21 | Force, 22 | ForceWithLease 23 | } 24 | 25 | export const enum RefType { 26 | Head, 27 | RemoteHead, 28 | Tag 29 | } 30 | 31 | export interface Ref { 32 | readonly type: RefType; 33 | readonly name?: string; 34 | readonly commit?: string; 35 | readonly remote?: string; 36 | } 37 | 38 | export interface UpstreamRef { 39 | readonly remote: string; 40 | readonly name: string; 41 | } 42 | 43 | export interface Branch extends Ref { 44 | readonly upstream?: UpstreamRef; 45 | readonly ahead?: number; 46 | readonly behind?: number; 47 | } 48 | 49 | export interface Commit { 50 | readonly hash: string; 51 | readonly message: string; 52 | readonly parents: string[]; 53 | readonly authorDate?: Date; 54 | readonly authorName?: string; 55 | readonly authorEmail?: string; 56 | readonly commitDate?: Date; 57 | } 58 | 59 | export interface Submodule { 60 | readonly name: string; 61 | readonly path: string; 62 | readonly url: string; 63 | } 64 | 65 | export interface Remote { 66 | readonly name: string; 67 | readonly fetchUrl?: string; 68 | readonly pushUrl?: string; 69 | readonly isReadOnly: boolean; 70 | } 71 | 72 | export const enum Status { 73 | INDEX_MODIFIED, 74 | INDEX_ADDED, 75 | INDEX_DELETED, 76 | INDEX_RENAMED, 77 | INDEX_COPIED, 78 | 79 | MODIFIED, 80 | DELETED, 81 | UNTRACKED, 82 | IGNORED, 83 | INTENT_TO_ADD, 84 | 85 | ADDED_BY_US, 86 | ADDED_BY_THEM, 87 | DELETED_BY_US, 88 | DELETED_BY_THEM, 89 | BOTH_ADDED, 90 | BOTH_DELETED, 91 | BOTH_MODIFIED 92 | } 93 | 94 | export interface Change { 95 | 96 | /** 97 | * Returns either `originalUri` or `renameUri`, depending 98 | * on whether this change is a rename change. When 99 | * in doubt always use `uri` over the other two alternatives. 100 | */ 101 | readonly uri: Uri; 102 | readonly originalUri: Uri; 103 | readonly renameUri: Uri | undefined; 104 | readonly status: Status; 105 | } 106 | 107 | export interface RepositoryState { 108 | readonly HEAD: Branch | undefined; 109 | readonly refs: Ref[]; 110 | readonly remotes: Remote[]; 111 | readonly submodules: Submodule[]; 112 | readonly rebaseCommit: Commit | undefined; 113 | 114 | readonly mergeChanges: Change[]; 115 | readonly indexChanges: Change[]; 116 | readonly workingTreeChanges: Change[]; 117 | 118 | readonly onDidChange: Event; 119 | } 120 | 121 | export interface RepositoryUIState { 122 | readonly selected: boolean; 123 | readonly onDidChange: Event; 124 | } 125 | 126 | /** 127 | * Log options. 128 | */ 129 | export interface LogOptions { 130 | /** Max number of log entries to retrieve. If not specified, the default is 32. */ 131 | readonly maxEntries?: number; 132 | readonly path?: string; 133 | } 134 | 135 | export interface CommitOptions { 136 | all?: boolean | 'tracked'; 137 | amend?: boolean; 138 | signoff?: boolean; 139 | signCommit?: boolean; 140 | empty?: boolean; 141 | noVerify?: boolean; 142 | requireUserConfig?: boolean; 143 | } 144 | 145 | export interface FetchOptions { 146 | remote?: string; 147 | ref?: string; 148 | all?: boolean; 149 | prune?: boolean; 150 | depth?: number; 151 | } 152 | 153 | export interface BranchQuery { 154 | readonly remote?: boolean; 155 | readonly pattern?: string; 156 | readonly count?: number; 157 | readonly contains?: string; 158 | } 159 | 160 | export interface Repository { 161 | 162 | readonly rootUri: Uri; 163 | readonly inputBox: InputBox; 164 | readonly state: RepositoryState; 165 | readonly ui: RepositoryUIState; 166 | 167 | getConfigs(): Promise<{ key: string; value: string; }[]>; 168 | getConfig(key: string): Promise; 169 | setConfig(key: string, value: string): Promise; 170 | getGlobalConfig(key: string): Promise; 171 | 172 | getObjectDetails(treeish: string, path: string): Promise<{ mode: string, object: string, size: number }>; 173 | detectObjectType(object: string): Promise<{ mimetype: string, encoding?: string }>; 174 | buffer(ref: string, path: string): Promise; 175 | show(ref: string, path: string): Promise; 176 | getCommit(ref: string): Promise; 177 | 178 | add(paths: string[]): Promise; 179 | clean(paths: string[]): Promise; 180 | 181 | apply(patch: string, reverse?: boolean): Promise; 182 | diff(cached?: boolean): Promise; 183 | diffWithHEAD(): Promise; 184 | diffWithHEAD(path: string): Promise; 185 | diffWith(ref: string): Promise; 186 | diffWith(ref: string, path: string): Promise; 187 | diffIndexWithHEAD(): Promise; 188 | diffIndexWithHEAD(path: string): Promise; 189 | diffIndexWith(ref: string): Promise; 190 | diffIndexWith(ref: string, path: string): Promise; 191 | diffBlobs(object1: string, object2: string): Promise; 192 | diffBetween(ref1: string, ref2: string): Promise; 193 | diffBetween(ref1: string, ref2: string, path: string): Promise; 194 | 195 | hashObject(data: string): Promise; 196 | 197 | createBranch(name: string, checkout: boolean, ref?: string): Promise; 198 | deleteBranch(name: string, force?: boolean): Promise; 199 | getBranch(name: string): Promise; 200 | getBranches(query: BranchQuery): Promise; 201 | setBranchUpstream(name: string, upstream: string): Promise; 202 | 203 | getMergeBase(ref1: string, ref2: string): Promise; 204 | 205 | tag(name: string, upstream: string): Promise; 206 | deleteTag(name: string): Promise; 207 | 208 | status(): Promise; 209 | checkout(treeish: string): Promise; 210 | 211 | addRemote(name: string, url: string): Promise; 212 | removeRemote(name: string): Promise; 213 | renameRemote(name: string, newName: string): Promise; 214 | 215 | fetch(options?: FetchOptions): Promise; 216 | fetch(remote?: string, ref?: string, depth?: number): Promise; 217 | pull(unshallow?: boolean): Promise; 218 | push(remoteName?: string, branchName?: string, setUpstream?: boolean, force?: ForcePushMode): Promise; 219 | 220 | blame(path: string): Promise; 221 | log(options?: LogOptions): Promise; 222 | 223 | commit(message: string, opts?: CommitOptions): Promise; 224 | } 225 | 226 | export interface RemoteSource { 227 | readonly name: string; 228 | readonly description?: string; 229 | readonly url: string | string[]; 230 | } 231 | 232 | export interface RemoteSourceProvider { 233 | readonly name: string; 234 | readonly icon?: string; // codicon name 235 | readonly supportsQuery?: boolean; 236 | getRemoteSources(query?: string): ProviderResult; 237 | getBranches?(url: string): ProviderResult; 238 | publishRepository?(repository: Repository): Promise; 239 | } 240 | 241 | export interface RemoteSourcePublisher { 242 | readonly name: string; 243 | readonly icon?: string; // codicon name 244 | publishRepository(repository: Repository): Promise; 245 | } 246 | 247 | export interface Credentials { 248 | readonly username: string; 249 | readonly password: string; 250 | } 251 | 252 | export interface CredentialsProvider { 253 | getCredentials(host: Uri): ProviderResult; 254 | } 255 | 256 | export interface PushErrorHandler { 257 | handlePushError(repository: Repository, remote: Remote, refspec: string, error: Error & { gitErrorCode: GitErrorCodes }): Promise; 258 | } 259 | 260 | export type APIState = 'uninitialized' | 'initialized'; 261 | 262 | export interface PublishEvent { 263 | repository: Repository; 264 | branch?: string; 265 | } 266 | 267 | export interface API { 268 | readonly state: APIState; 269 | readonly onDidChangeState: Event; 270 | readonly onDidPublish: Event; 271 | readonly git: Git; 272 | readonly repositories: Repository[]; 273 | readonly onDidOpenRepository: Event; 274 | readonly onDidCloseRepository: Event; 275 | 276 | toGitUri(uri: Uri, ref: string): Uri; 277 | getRepository(uri: Uri): Repository | null; 278 | init(root: Uri): Promise; 279 | openRepository(root: Uri): Promise 280 | 281 | registerRemoteSourcePublisher(publisher: RemoteSourcePublisher): Disposable; 282 | registerRemoteSourceProvider(provider: RemoteSourceProvider): Disposable; 283 | registerCredentialsProvider(provider: CredentialsProvider): Disposable; 284 | registerPushErrorHandler(handler: PushErrorHandler): Disposable; 285 | } 286 | 287 | export interface GitExtension { 288 | 289 | readonly enabled: boolean; 290 | readonly onDidChangeEnablement: Event; 291 | 292 | /** 293 | * Returns a specific API version. 294 | * 295 | * Throws error if git extension is disabled. You can listed to the 296 | * [GitExtension.onDidChangeEnablement](#GitExtension.onDidChangeEnablement) event 297 | * to know when the extension becomes enabled/disabled. 298 | * 299 | * @param version Version number. 300 | * @returns API instance 301 | */ 302 | getAPI(version: 1): API; 303 | } 304 | 305 | export const enum GitErrorCodes { 306 | BadConfigFile = 'BadConfigFile', 307 | AuthenticationFailed = 'AuthenticationFailed', 308 | NoUserNameConfigured = 'NoUserNameConfigured', 309 | NoUserEmailConfigured = 'NoUserEmailConfigured', 310 | NoRemoteRepositorySpecified = 'NoRemoteRepositorySpecified', 311 | NotAGitRepository = 'NotAGitRepository', 312 | NotAtRepositoryRoot = 'NotAtRepositoryRoot', 313 | Conflict = 'Conflict', 314 | StashConflict = 'StashConflict', 315 | UnmergedChanges = 'UnmergedChanges', 316 | PushRejected = 'PushRejected', 317 | RemoteConnectionError = 'RemoteConnectionError', 318 | DirtyWorkTree = 'DirtyWorkTree', 319 | CantOpenResource = 'CantOpenResource', 320 | GitNotFound = 'GitNotFound', 321 | CantCreatePipe = 'CantCreatePipe', 322 | PermissionDenied = 'PermissionDenied', 323 | CantAccessRemote = 'CantAccessRemote', 324 | RepositoryNotFound = 'RepositoryNotFound', 325 | RepositoryIsLocked = 'RepositoryIsLocked', 326 | BranchNotFullyMerged = 'BranchNotFullyMerged', 327 | NoRemoteReference = 'NoRemoteReference', 328 | InvalidBranchName = 'InvalidBranchName', 329 | BranchAlreadyExists = 'BranchAlreadyExists', 330 | NoLocalChanges = 'NoLocalChanges', 331 | NoStashFound = 'NoStashFound', 332 | LocalChangesOverwritten = 'LocalChangesOverwritten', 333 | NoUpstreamBranch = 'NoUpstreamBranch', 334 | IsInSubmodule = 'IsInSubmodule', 335 | WrongCase = 'WrongCase', 336 | CantLockRef = 'CantLockRef', 337 | CantRebaseMultipleBranches = 'CantRebaseMultipleBranches', 338 | PatchDoesNotApply = 'PatchDoesNotApply', 339 | NoPathFound = 'NoPathFound', 340 | UnknownPath = 'UnknownPath', 341 | } 342 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/.npmrc: -------------------------------------------------------------------------------- 1 | registry=https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/ 2 | 3 | always-auth=true -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/ExampleTests.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Globalization; 4 | using System.Threading; 5 | 6 | namespace Example 7 | { 8 | [TestClass] 9 | public sealed class PipelineParserBaselineTests 10 | { 11 | [TestInitialize] 12 | public void TestInitialize() 13 | { 14 | m_fileProvider = new YamlFileProvider(); 15 | m_fileProviderFactory = new YamlFileProviderFactory(m_fileProvider); 16 | SetupPipelineParser(); 17 | } 18 | 19 | [TestMethod] 20 | public void PipelineParserBaselineTests_JobCancelTimeoutInMinutes_FromImpliedJob_LegacyQueue() 21 | { 22 | // Arrange 23 | m_fileProvider.FileContent["ci.yml"] = @" 24 | queue: 25 | name: myPool 26 | cancelTimeoutInMinutes: 5 27 | steps: 28 | - script: echo hi 29 | "; 30 | var expected = @" 31 | cancelTimeoutInMinutes: 5 32 | pool: myPool 33 | steps: 34 | - script: echo hi 35 | "; 36 | 37 | // Act 38 | var actual = Load("ci.yml"); 39 | 40 | // Assert 41 | Assert.AreEqual(expected.Trim(), actual.Trim()); 42 | } 43 | 44 | [TestMethod] 45 | public void PipelineParserBaselineTests_JobCancelTimeoutInMinutes_FromImpliedJob_LegacyServer() 46 | { 47 | // Arrange 48 | m_fileProvider.FileContent["ci.yml"] = @" 49 | server: 50 | cancelTimeoutInMinutes: 5 51 | steps: 52 | - task: foo@1 53 | "; 54 | var expected = @" 55 | cancelTimeoutInMinutes: 5 56 | pool: server 57 | steps: 58 | - task: foo@1 59 | "; 60 | 61 | // Act 62 | var actual = Load("ci.yml"); 63 | 64 | // Assert 65 | Assert.AreEqual(expected.Trim(), actual.Trim()); 66 | } 67 | 68 | [TestMethod] 69 | public void PipelineParserBaselineTests_JobCancelTimeoutInMinutes_FromJob() 70 | { 71 | // Arrange 72 | var expected = @" 73 | jobs: 74 | - job: job1 75 | - job: job2 76 | cancelTimeoutInMinutes: 5 77 | - job: job3 78 | cancelTimeoutInMinutes: $[ variables.theCancelTimeoutInMinutes ] 79 | "; 80 | m_fileProvider.FileContent["ci.yml"] = expected; 81 | 82 | // Act 83 | var actual = Load("ci.yml"); 84 | 85 | // Assert 86 | Assert.AreEqual(expected.Trim(), actual.Trim()); 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "extract-yaml-testcases", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "extract-yaml-testcases", 9 | "version": "1.0.0", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "@types/node": "~20.9.2", 13 | "typescript": "~5.2.2" 14 | } 15 | }, 16 | "node_modules/@types/node": { 17 | "version": "20.9.5", 18 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/@types/node/-/node-20.9.5.tgz", 19 | "integrity": "sha1-u0QQFLy5HGN0Kw4f4luQL11YH6o=", 20 | "dev": true, 21 | "license": "MIT", 22 | "dependencies": { 23 | "undici-types": "~5.26.4" 24 | } 25 | }, 26 | "node_modules/typescript": { 27 | "version": "5.2.2", 28 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/typescript/-/typescript-5.2.2.tgz", 29 | "integrity": "sha1-XrteWlt18IXyK8P4Rg+6MIMQ+ng=", 30 | "dev": true, 31 | "license": "Apache-2.0", 32 | "bin": { 33 | "tsc": "bin/tsc", 34 | "tsserver": "bin/tsserver" 35 | }, 36 | "engines": { 37 | "node": ">=14.17" 38 | } 39 | }, 40 | "node_modules/undici-types": { 41 | "version": "5.26.5", 42 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/undici-types/-/undici-types-5.26.5.tgz", 43 | "integrity": "sha1-vNU5iT0AtW6WT9JlekhmsiGmVhc=", 44 | "dev": true, 45 | "license": "MIT" 46 | } 47 | }, 48 | "dependencies": { 49 | "@types/node": { 50 | "version": "20.9.5", 51 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/@types/node/-/node-20.9.5.tgz", 52 | "integrity": "sha1-u0QQFLy5HGN0Kw4f4luQL11YH6o=", 53 | "dev": true, 54 | "requires": { 55 | "undici-types": "~5.26.4" 56 | } 57 | }, 58 | "typescript": { 59 | "version": "5.2.2", 60 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/typescript/-/typescript-5.2.2.tgz", 61 | "integrity": "sha1-XrteWlt18IXyK8P4Rg+6MIMQ+ng=", 62 | "dev": true 63 | }, 64 | "undici-types": { 65 | "version": "5.26.5", 66 | "resolved": "https://pkgs.dev.azure.com/mseng/PipelineTools/_packaging/PipelineTools_PublicPackages/npm/registry/undici-types/-/undici-types-5.26.5.tgz", 67 | "integrity": "sha1-vNU5iT0AtW6WT9JlekhmsiGmVhc=", 68 | "dev": true 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "extract-yaml-testcases", 3 | "version": "1.0.0", 4 | "description": "A tool for extracting YAML test cases from a C# file", 5 | "main": "./out/main.js", 6 | "type": "module", 7 | "scripts": { 8 | "prestart": "npm run compile", 9 | "start": "node ./out/main.js", 10 | "compile": "tsc -p ./", 11 | "watch": "tsc -watch -p ./" 12 | }, 13 | "author": "Microsoft", 14 | "license": "MIT", 15 | "devDependencies": { 16 | "@types/node": "~20.9.2", 17 | "typescript": "~5.2.2" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/readme.md: -------------------------------------------------------------------------------- 1 | # Extract YAML Testcases 2 | 3 | The Azure Pipelines source code (accessible only to Microsoft employees) has a 4 | large set of unit tests for the YAML parser. This tool will extract those test 5 | cases into discrete files for use in unit testing the language server. 6 | 7 | This tool is open source, but the input data is not. From time to time, an 8 | Azure Pipelines employee should re-run the tool to extract the latest unit 9 | tests. TODO: follow up with Microsoft CELA to ensure we can ship the extracted 10 | test cases. 11 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/src/main.ts: -------------------------------------------------------------------------------- 1 | import fs from 'node:fs/promises'; 2 | import path from 'node:path'; 3 | 4 | if (process.argv.length <= 2) { 5 | usage(); 6 | } else { 7 | const options = parseOptions(process.argv); 8 | 9 | const data = await fs.readFile(options.input, 'utf-8'); 10 | await fs.mkdir(options.outputDir, { recursive: true }); 11 | await extractYaml(data, options.outputDir); 12 | } 13 | 14 | function usage(): void { 15 | console.log("usage: extract-yaml-testcases []"); 16 | } 17 | 18 | function parseOptions(rawArgs: string[]): { input: string, outputDir: string } { 19 | const interestingArgs = rawArgs.slice(2); 20 | if (interestingArgs.length > 2) { 21 | usage(); 22 | process.exit(1); 23 | } 24 | 25 | return { 26 | input: interestingArgs[0], 27 | outputDir: interestingArgs[1] ?? "./output" 28 | }; 29 | } 30 | 31 | async function extractYaml(fileData: string, outputDir: string): Promise { 32 | // find the TestMethod-attributed code blocks, extract their name and contents 33 | const matcher = /\[TestMethod\][^]*?public void (.*?)\([^]*?\{([^]*?)\}/g; 34 | let chunk = matcher.exec(fileData); 35 | while (chunk !== null) { 36 | await outputYaml(chunk[1], chunk[2], outputDir); 37 | chunk = matcher.exec(fileData); 38 | } 39 | } 40 | 41 | async function outputYaml(name: string, body: string, outputDir: string): Promise { 42 | const testCaseName = name.split('_').slice(1).join('_'); 43 | const outputBaseName = path.join(outputDir, testCaseName); 44 | 45 | const multilineStringMatcher = /@"([^]*?)"/g; 46 | let mlString = multilineStringMatcher.exec(body); 47 | let number = 0; 48 | while (mlString !== null) { 49 | const finalFileName = [outputBaseName, number.toString(), 'yml'].join('.'); 50 | const rawContents = mlString[1].trim(); 51 | // replace artificial tasks "foo@1", "myTask@1", and "myOtherTask@2" with a real task name 52 | const cookedContents = rawContents.replace(/foo@1|myTask@1|myOtherTask@2/gi, "Bash@3"); 53 | await fs.writeFile(finalFileName, cookedContents, 'utf8'); 54 | mlString = multilineStringMatcher.exec(body); 55 | number++; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /tools/extract-yaml-testcases/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "Node16", 4 | "target": "ES2022", 5 | "outDir": "out", 6 | "sourceMap": true, 7 | "rootDir": "src", 8 | "strict": true, 9 | "noUnusedLocals": true, 10 | "noImplicitReturns": true, 11 | "noFallthroughCasesInSwitch": true, 12 | "noUnusedParameters": true, 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Specify module code generation. */ 4 | "module": "ES2022", 5 | 6 | /* */ 7 | "moduleResolution": "Bundler", 8 | 9 | /* Report errors for fallthrough cases in switch statement. */ 10 | "noFallthroughCasesInSwitch": true, 11 | 12 | /* Report error when not all code paths in function return a value. */ 13 | "noImplicitReturns": true, 14 | 15 | /* Report errors on unused locals. */ 16 | "noUnusedLocals": true, 17 | 18 | /* Report errors on unused parameters. */ 19 | //"noUnusedParameters": true, 20 | 21 | /* Redirect output structure to the directory. */ 22 | "outDir": "out", 23 | 24 | /* Specifies the root directory of input files. Only use to control the output directory structure with --outDir. */ 25 | "rootDir": "src", 26 | 27 | /* Generates corresponding .map file. */ 28 | "sourceMap": true, 29 | 30 | /* Enable all strict type checking options. 31 | Enabling --strict enables 32 | --noImplicitAny, 33 | --noImplicitThis, 34 | --alwaysStrict, 35 | --strictNullChecks, 36 | --strictFunctionTypes 37 | --strictPropertyInitialization. 38 | */ 39 | "strict": true, 40 | 41 | /* Specify ECMAScript target version. */ 42 | "target": "ES2022", 43 | }, 44 | "include": [ 45 | "src" 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "./tsconfig.json" 4 | ], 5 | "compilerOptions": { 6 | // Tests are run directly by VS Code, so we need them in CommonJS format. 7 | // Node16 will look at package.json to determine the actual module format, 8 | // and since we don't have a "type" field, it will default to CommonJS. 9 | "module": "Node16", 10 | "moduleResolution": "Node16" 11 | }, 12 | "include": [ 13 | "src/test" 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /webpack.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | 3 | import path from 'path'; 4 | 5 | /** @type {import('webpack').Configuration} */ 6 | export default { 7 | target: 'node', 8 | entry: { 9 | extension: './src/extension.ts', 10 | server: './node_modules/azure-pipelines-language-server/out/server.js' 11 | }, 12 | output: { 13 | path: path.resolve(import.meta.dirname, 'dist'), 14 | filename: '[name].js', 15 | libraryTarget: 'commonjs2', 16 | devtoolModuleFilenameTemplate: '../[resource-path]' 17 | }, 18 | devtool: 'source-map', 19 | externals: { 20 | vscode: 'commonjs vscode', 21 | 'applicationinsights-native-metrics': 'commonjs applicationinsights-native-metrics', // we're not native 22 | }, 23 | mode: 'production', 24 | resolve: { 25 | extensions: ['.ts', '.js'] 26 | }, 27 | module: { 28 | rules: [ 29 | { 30 | test: /\.ts$/, 31 | exclude: /node_modules/, 32 | use: [ 33 | { 34 | loader: 'ts-loader' 35 | } 36 | ] 37 | } 38 | ] 39 | }, 40 | }; 41 | --------------------------------------------------------------------------------