├── .eslintignore
├── .eslintrc
├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── .prettierignore
├── .prettierrc
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Jenkinsfile
├── LICENSE
├── README.md
├── ci
├── README.md
├── integration.yml
└── integration_job_v2.yml
├── docs
├── details.md
├── development.md
└── getting-started.md
├── header.js
├── images
├── adding_config_map_task.png
├── adding_install_oc_task.png
├── adding_oc_cmd_task.png
├── basic_authentication.png
├── cmd_exec_config.png
├── conditional_cmd_exec_config.png
├── configure_config_map_task.png
├── configure_install_oc_task.png
├── file-path-connection-type.png
├── inline-config-connection-type.png
├── kubeconfig_authentication.png
├── kubernetes_service_connection.png
├── logo.png
├── oc_with_command_line_task.png
├── service-connection-types.png
└── token_authentication.png
├── package-lock.json
├── package.json
├── scripts
└── vsix-manifest-overrides.json
├── src
├── config-map-task.ts
├── config-map.ts
├── constants.ts
├── oc-auth.ts
├── oc-condition.ts
├── oc-conditional-exec-task.ts
├── oc-exec-task.ts
├── oc-exec.ts
├── oc-install.ts
├── oc-setup-task.ts
├── oc-utils.json
└── utils
│ ├── exec_helper.ts
│ └── zip_helper.ts
├── tasks
├── config-map
│ ├── icon.png
│ └── task.json
├── oc-cmd
│ ├── icon.png
│ └── task.json
├── oc-conditional-cmd
│ ├── icon.png
│ └── task.json
└── oc-setup
│ ├── icon.png
│ └── task.json
├── test
├── config-map.test.ts
├── fixtures
│ ├── openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
│ ├── openshift-origin-client-tools-v3.11.0-0cbc58b-mac.zip
│ └── openshift-origin-client-tools-v3.11.0-0cbc58b-windows.zip
├── index.coverage.ts
├── index.debug.ts
├── index.ts
├── oc-auth.test.ts
├── oc-exec.test.ts
├── oc-install.test.ts
└── toolrunnerStub.ts
├── tsconfig.json
└── vss-extension.json
/.eslintignore:
--------------------------------------------------------------------------------
1 | tasks
2 | scripts
3 | out
4 | node_modules
5 | images
6 | docs
7 | ci
8 | test
9 | header.js
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "env": {
4 | "browser": true,
5 | "es6": true,
6 | "jasmine": true
7 | },
8 | "extends": [
9 | "airbnb-base",
10 | "eslint:recommended",
11 | "plugin:@typescript-eslint/eslint-recommended",
12 | "plugin:@typescript-eslint/recommended",
13 | "plugin:@typescript-eslint/recommended-requiring-type-checking",
14 | "prettier",
15 | "prettier/@typescript-eslint"
16 | ],
17 | "parser": "@typescript-eslint/parser",
18 | "parserOptions": {
19 | "ecmaVersion": 2018,
20 | "comment": true,
21 | "project": "./tsconfig.json",
22 | "sourceType": "module"
23 | },
24 | "plugins": [
25 | "@typescript-eslint",
26 | "prettier",
27 | "header",
28 | "import"],
29 | "settings": {
30 | "import/core-modules": [ "vscode" ],
31 | "import/parsers": {
32 | "@typescript-eslint/parser": [".ts", ".tsx"]
33 | },
34 | "import/resolver": {
35 | "typescript": {},
36 | "node": {
37 | "extensions": [
38 | ".js",
39 | ".jsx",
40 | ".ts"
41 | ]
42 | }
43 | },
44 | "import/extensions": [
45 | ".js",
46 | ".jsx",
47 | ".ts"
48 | ]
49 | },
50 | "rules": {
51 | "header/header": [2, "./header.js"],
52 | "camelcase": 2,
53 | "consistent-return": 0,
54 | "consistent-this": [1, "that"],
55 | "curly": ["error", "multi-line"],
56 | "default-case": [2],
57 | "dot-notation": [2],
58 | "no-multiple-empty-lines": [1, { "max": 1, "maxEOF": 0 }],
59 | "eqeqeq": [2, "allow-null"],
60 | "guard-for-in": 2,
61 | "import/no-unresolved": ["error"],
62 | "import/no-duplicates": ["error"],
63 | "import/prefer-default-export": 0,
64 | "max-nested-callbacks": [1, 4],
65 | "max-classes-per-file": [0],
66 | "no-alert": 2,
67 | "no-caller": 2,
68 | "no-console": 0,
69 | "no-constant-condition": 2,
70 | "no-debugger": 2,
71 | "no-else-return": ["error"],
72 | "no-global-strict": 0,
73 | "no-irregular-whitespace": ["error"],
74 | "no-param-reassign": ["off", { "props": false }],
75 | "no-plusplus": "off",
76 | "no-restricted-syntax": "off",
77 | "no-shadow": ["error"],
78 | "no-underscore-dangle": 0,
79 | "no-useless-constructor": "off",
80 | "@typescript-eslint/no-use-before-define": 2,
81 | "@typescript-eslint/await-thenable": "error",
82 | "@typescript-eslint/unbound-method": ["error", { "ignoreStatic": true }],
83 | "no-var": 2,
84 | "object-shorthand": ["error", "properties"],
85 | "prefer-const": ["error", { "destructuring": "all" }],
86 | "prefer-template": 2,
87 | "radix": 2,
88 | "no-trailing-spaces": "error",
89 | "@typescript-eslint/prefer-regexp-exec": 0
90 | },
91 | "overrides": [
92 | {
93 | "files": ["*.test.ts"],
94 | "rules": {
95 | "no-unused-expressions": "off",
96 | "@typescript-eslint/unbound-method" : "off"
97 | }
98 | }
99 | ]
100 | }
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 |
2 | # -----------------------------------------------------------------------------------------------
3 | # Copyright (c) Red Hat, Inc. All rights reserved.
4 | # Licensed under the MIT License. See LICENSE file in the project root for license information.
5 | # -----------------------------------------------------------------------------------------------
6 |
7 | name: OpenShift VSTS CI
8 |
9 | on:
10 | push:
11 | branches: [ master ]
12 | pull_request:
13 | branches: [ master ]
14 |
15 | jobs:
16 | Linux:
17 | runs-on: ubuntu-latest
18 | strategy:
19 | matrix:
20 | node: [ '16', '14', '10' ]
21 | name: Node ${{ matrix.node }}
22 |
23 | steps:
24 | - uses: actions/checkout@v3
25 | - name: Setup Node.js environment
26 | uses: actions/setup-node@v3
27 | with:
28 | node-version: ${{ matrix.node }}
29 | - run: npm run setup
30 | - run: npm install
31 | - run: npm run compile
32 | - run: npm test
33 |
34 | Windows:
35 | runs-on: windows-latest
36 |
37 | steps:
38 | - uses: actions/checkout@v3
39 | - name: Setup Node.js environment
40 | uses: actions/setup-node@v3
41 | with:
42 | node-version: '10.x'
43 | - run: npm run setup
44 | - run: npm install
45 | - run: npm run build
46 | - run: npm test
47 |
48 | MacOS:
49 | runs-on: macos-latest
50 |
51 | steps:
52 | - uses: actions/checkout@v3
53 | - name: Setup Node.js environment
54 | uses: actions/setup-node@v3
55 | with:
56 | node-version: '10.x'
57 | - run: npm run setup
58 | - run: npm install
59 | - run: npm run build
60 | - run: npm test
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Generated
2 | node_modules
3 | lib
4 | out
5 | *.js
6 | *.js.map
7 | .taskkey
8 | *.vsix
9 |
10 | # Artefacts
11 | *.vsix
12 |
13 | # IDE
14 | .vscode
15 | .idea
16 |
17 | # Misc
18 | .DS_Store
19 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | **/lib
2 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "arrowParens": "always",
3 | "printWidth": 100,
4 | "singleQuote": true,
5 | "trailingComma": "all"
6 | }
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | ## 2.0.1 (February 2, 2023)
4 |
5 | * [#212](https://github.com/redhat-developer/openshift-vsts/pull/215) Add support to unzip .tar.gz archive
6 |
7 | ## 2.0.0 (December 20, 2022)
8 |
9 | This release updates the minimum node version supported to 10. Old workflows might break up when switching to it
10 | * [#196](https://github.com/redhat-developer/openshift-vsts/pull/196) Make extension works with node 10 by default
11 | * [#202](https://github.com/redhat-developer/openshift-vsts/pull/202) Add support to unzip .tar archive
12 | * [#194](https://github.com/redhat-developer/openshift-vsts/pull/194) Wrong bundle name cached in windows
13 | * [#200](https://github.com/redhat-developer/openshift-vsts/pull/200) Remove travis CI
14 |
15 | ## 1.6.0 (April 24, 2020)
16 |
17 | * [#151](https://github.com/redhat-developer/openshift-vsts/pull/153) Added cache support
18 | * [#152](https://github.com/redhat-developer/openshift-vsts/issues/152) Added new task to perform conditional commands
19 | * [#160](https://github.com/redhat-developer/openshift-vsts/pull/160) Added support for command interpolation
20 | * [#146](https://github.com/redhat-developer/openshift-vsts/issues/146) Added support to define connection config at runtime
21 | * [#165](https://github.com/redhat-developer/openshift-vsts/pull/165) Updated oc versions file
22 | * [#156](https://github.com/redhat-developer/openshift-vsts/pull/156) Refactor how errors are handled by extension
23 | * [#158](https://github.com/redhat-developer/openshift-vsts/pull/158) Switched from Azure pipelines to Github Actions
24 |
25 | Thanks to Rinor Maloku for his contribution
26 |
27 | ## 1.5.0 (February 17, 2020)
28 |
29 | * [#135](https://github.com/redhat-developer/openshift-vsts/issues/135) Added proxy support
30 | * [#130](https://github.com/redhat-developer/openshift-vsts/issues/130) Added support for pipes and redirector operators
31 | * [#136](https://github.com/redhat-developer/openshift-vsts/issues/136) Fixed issue when typing oc version in format x.xx
32 | * [#142](https://github.com/redhat-developer/openshift-vsts/issues/142) Moved to eslint
33 |
34 | ## 1.4.2 (December 3, 2019)
35 |
36 | This release brings to you:
37 | * [#120](https://github.com/redhat-developer/openshift-vsts/issues/120) Added Certificate Authority flag to connect to Openshift by using an exteral certificate authority file
38 | * [#124](https://github.com/redhat-developer/openshift-vsts/issues/124) Exported constants to external JSON file
39 |
40 | ## 1.4.1 - (November 20, 2019)
41 |
42 | * Updated marketplace description/fixed wrong links
43 |
44 | ## 1.4.0 - (November 12, 2019)
45 |
46 | This release brings to you:
47 |
48 | * [#46](https://github.com/redhat-developer/openshift-vsts/issues/46) Added new flag to ignore non success return value
49 | * [#97](https://github.com/redhat-developer/openshift-vsts/issues/97) Check if oc cli already exists locally and prevent from downloading new version
50 | * [#115](https://github.com/redhat-developer/openshift-vsts/issues/115) Changed way oc cli is downloaded. Now extension uses mirror.openshift.com
51 | * [#99](https://github.com/redhat-developer/openshift-vsts/issues/99) Added new Azure Devops badge
52 | * [#96](https://github.com/redhat-developer/openshift-vsts/issues/96) Changed way to create download directory
53 | * [#75](https://github.com/redhat-developer/openshift-vsts/pull/98) Use sinon to make test and preventing downloading useless archive
54 | * [#104](https://github.com/redhat-developer/openshift-vsts/issues/104) Create Jenkins task to publish new version into marketplace
55 | * [#74](https://github.com/redhat-developer/openshift-vsts/issues/74) Updated Azure Devops CI to work on all Oses
56 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | * Using welcoming and inclusive language
12 | * Being respectful of differing viewpoints and experiences
13 | * Gracefully accepting constructive criticism
14 | * Focusing on what is best for the community
15 | * Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances
20 | * Trolling, insulting/derogatory comments, and personal or political attacks
21 | * Public or private harassment
22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Scope
32 |
33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34 |
35 | ## Enforcement
36 |
37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38 |
39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40 |
41 | ## Attribution
42 |
43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44 |
45 | [homepage]: http://contributor-covenant.org
46 | [version]: http://contributor-covenant.org/version/1/4/
47 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contribution guidelines
2 |
3 |
4 |
5 | - [Filing issues](#filing-issues)
6 | - [Contributing patches](#contributing-patches)
7 |
8 |
9 |
10 | ## Filing issues
11 |
12 | File issues using the standard [Github issue tracker](https://github.com/redhat-developer/openshift-vsts/issues) for this repository.
13 | Before you submit a new issue, please search the list of issues to see if anyone already submitted a similar issue.
14 |
15 | ## Contributing patches
16 |
17 | Thank you for your contributions! Please follow this process to submit a patch:
18 |
19 | - Create an issue describing your proposed change to the repository.
20 | - Fork the repository and create a topic branch.
21 | See also [Understanding the GitHub Flow](https://guides.github.com/introduction/flow/).
22 | - Refer to the [README](./README.md) for how to build and test the _openshift-vsts_.
23 | - Submit a pull request with the proposed changes.
24 | To make reviewing and integration as smooth as possible, please run the `npm test` target prior to submitting the pull request.
25 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env groovy
2 |
3 | node('rhel8'){
4 | stage('Checkout repo') {
5 | deleteDir()
6 | git url: 'https://github.com/redhat-developer/openshift-vsts',
7 | branch: "${BRANCH}"
8 | }
9 |
10 | stage('Install requirements') {
11 | def nodeHome = tool 'nodejs-lts'
12 | env.PATH="${env.PATH}:${nodeHome}/bin"
13 | sh "npm run setup"
14 | }
15 |
16 | stage('Build') {
17 | sh "npm install"
18 | sh "npm run build"
19 | }
20 |
21 | withEnv(['JUNIT_REPORT_PATH=report.xml']) {
22 | stage('Test') {
23 | wrap([$class: 'Xvnc']) {
24 | sh "npm run test:report:ci"
25 | junit '**/test-report.xml'
26 | }
27 | }
28 | }
29 |
30 | stage('Package') {
31 | sh "npm run extension:create"
32 | }
33 |
34 | if(params.UPLOAD_LOCATION) {
35 | stage('Snapshot') {
36 | def filesToPush = findFiles(glob: '**/*.vsix')
37 | def extensionJson = readJSON file: 'vss-extension.json'
38 | sh "sftp -C ${UPLOAD_LOCATION}/snapshots/openshift-vsts/ <<< \$'put -p \"${filesToPush[0].path}\"'"
39 | }
40 | }
41 |
42 | if(publishToMarketPlace.equals('true')){
43 | timeout(time:5, unit:'DAYS') {
44 | input message:'Approve deployment?', submitter: 'jmaury,lstocchi'
45 | }
46 |
47 | stage("Publish to Marketplace") {
48 | withCredentials([[$class: 'StringBinding', credentialsId: 'vscode_java_marketplace', variable: 'TOKEN']]) {
49 | def vsix = findFiles(glob: '**/*.vsix')
50 | sh "npm run extension:publish"
51 | }
52 | archive includes:"**/*.vsix"
53 |
54 | stage "Promote the build to stable"
55 | def vsix = findFiles(glob: '**/*.vsix')
56 | def extensionJson = readJSON file: 'vss-extension.json'
57 | sh "sftp -C ${UPLOAD_LOCATION}/stable/openshift-vsts/ <<< \$'put -p \"${vsix[0].path}\"'"
58 | }
59 | }
60 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2016 The Kubernetes Authors All rights reserved.
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OpenShift Extension for Azure DevOps
2 |
3 | | System | Status |
4 | | ------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
5 | | Build ([master](https://github.com/redhat-developer/openshift-vsts/tree/master) branch) |  |
6 | | [Marketplace](https://marketplace.visualstudio.com/items?itemName=redhat.openshift-vsts) | [](https://marketplace.visualstudio.com/items?itemName=redhat.openshift-vsts)
7 |
8 | ## What is it
9 |
10 | The OpenShift Extension for Azure DevOps offers tasks for integrating [OpenShift](https://github.com/openshift/origin) into your Azure DevOps build and release pipelines, for example by executing user defined `oc` commands.
11 |
12 | The extension is distributed via the [Azure DevOps Marketplace](https://marketplace.visualstudio.com/azuredevops) and can be found [here](https://marketplace.visualstudio.com/items?itemName=redhat.openshift-vsts).
13 |
14 | ## Getting Started
15 |
16 | Below you can find a collection of resources to better undestand how the extension works.
17 |
18 | * [Getting Started](./docs/getting-started.md)
19 | * [Blog on developers.redhat.com](https://developers.redhat.com/blog/2019/12/05/introduction-to-the-red-hat-openshift-deployment-extension-for-microsoft-azure-devops/)
20 | * [Demo Video](https://www.youtube.com/watch?v=RBwpedmkvow)
21 |
22 | ## Known Issue
23 |
24 | New version 2.0.13 changed all the tasks definitions. This might cause old pipelines, created using plugin 2.0.12 or older, to fail during execution. The issue can be easily solved by load, modify (any irrelevant change is good enough) and save the old pipeline so new definition is created and saved. [#170](https://github.com/redhat-developer/openshift-vsts/issues/170) Thanks to Ludovit Varga.
25 |
26 | ## How to contribute
27 |
28 | If you want to contribute, make sure to follow the [contribution guidelines](./CONTRIBUTING.md) when you open issues or submit pull requests.
29 |
30 | You find all the information you need to get coding in the [Development](./docs/development.md) documentation.
31 |
32 |
--------------------------------------------------------------------------------
/ci/README.md:
--------------------------------------------------------------------------------
1 | # Extension sanity check tests
2 |
3 | This directory contains a set of YAML configuration files for sanity testing the latest development release of this extension published by [OpenShift VSTS](https://marketplace.visualstudio.com/manage/publishers/openshiftvsts).
4 | To see how to deploy a development version refer to [deploying to staging](../docs/develpoment.md#to-staging).
5 | The tests need to be triggered manually and the cluster connection (`integration_test_connection_openshift`) needs to be updated prior to doing so.
6 | There is no default/standby OpenShift cluster configured at the moment.
7 |
--------------------------------------------------------------------------------
/ci/integration.yml:
--------------------------------------------------------------------------------
1 | trigger: none
2 | pr: none
3 |
4 | jobs:
5 | - template: integration_job_v2.yml
6 | parameters:
7 | name: mac
8 | vmImage: 'macOS-10.13'
9 | - template: integration_job_v2.yml
10 | parameters:
11 | name: linux
12 | vmImage: 'ubuntu-16.04'
13 | - template: integration_job_v2.yml
14 | parameters:
15 | name: windows
16 | vmImage: 'vs2017-win2016'
17 |
--------------------------------------------------------------------------------
/ci/integration_job_v2.yml:
--------------------------------------------------------------------------------
1 | parameters:
2 | name: ''
3 | vmImage: ''
4 |
5 | jobs:
6 | - job: ${{parameters.name}}
7 | displayName: ${{parameters.name}}
8 | pool:
9 | vmImage: ${{parameters.vmImage}}
10 | steps:
11 | - task: oc-setup@2
12 | displayName: Setup oc
13 | inputs:
14 | openshiftService: 'integration_test_connection_openshift'
15 | - script: |
16 | oc new-project integration-test-${{parameters.name}}
17 | oc get pods
18 | displayName: Create namespace and list pods
19 | - task: oc-cmd@2
20 | displayName: Create ConfigMap
21 | inputs:
22 | openshiftService: 'integration_test_connection_openshift'
23 | cmd: 'oc create configmap test-config --from-literal=foo=bar -n integration-test-${{parameters.name}}'
24 | - task: config-map@2
25 | displayName: Update ConfigMap
26 | inputs:
27 | openshiftService: 'integration_test_connection_openshift'
28 | configMapName: 'test-config'
29 | namespace: 'integration-test-${{parameters.name}}'
30 | properties: '-foo snafu'
31 | - script: 'oc get configmaps test-config -o yaml -n integration-test-${{parameters.name}} | grep snafu'
32 | displayName: Check ConfigMap
33 | - bash: |
34 | oc delete project integration-test-${{parameters.name}}
35 | displayName: Delete namespace
36 | condition: always()
37 |
--------------------------------------------------------------------------------
/docs/details.md:
--------------------------------------------------------------------------------
1 | # OpenShift Extension for Azure DevOps
2 |
3 | 
4 |
5 | This Azure DevOps extension offers tasks for integrating [OpenShift](https://github.com/openshift/origin) into your build and release pipelines, for example by executing user defined `oc` commands.
6 |
7 | Refer to the [Get Started](https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md) guide to see the full list of tasks and how to use them.
8 |
9 | **NOTE:** This Azure DevOps extension assumes that cURL is already installed on the Agent that is running the build. If cURL is not located on the Agent, an error will be thrown, and the task will fail.
10 |
11 | This extension is currently in preview state.
12 | We are looking forward to your feedback, either on the [Marketplace](https://marketplace.visualstudio.com/items?itemName=redhat.openshift-vsts#review-details) or as feature requests on [GitHub](https://github.com/redhat-developer/openshift-vsts/issues).
13 |
--------------------------------------------------------------------------------
/docs/development.md:
--------------------------------------------------------------------------------
1 | # How to develop openshift-vsts
2 |
3 |
4 |
5 | - [Prerequisites](#prerequisites)
6 | - [npm build tasks](#npm-build-tasks)
7 | - [Transpile](#transpile)
8 | - [Test](#test)
9 | - [Create extension](#create-extension)
10 | - [Publish extension](#publish-extension)
11 | - [To staging](#to-staging)
12 | - [To production](#to-production)
13 | - [Write docs](#write-docs)
14 | - [CI](#ci)
15 | - [References](#references)
16 |
17 |
18 |
19 |
20 | ## Prerequisites
21 |
22 | - Install [Node.js](https://nodejs.org/en/)
23 | - Run npm setup script
24 |
25 | `> npm run setup`
26 |
27 |
28 | ## npm build tasks
29 |
30 | The various build tasks are driven via `npm`.
31 | Check [_package.json_](https://github.com/redhat-developer/openshift-vsts/blob/master/package.json) for the defined run scripts.
32 | To get a list of all available tasks run:
33 |
34 | `> npm run`
35 |
36 |
37 | ### Transpile
38 |
39 | To transpile TypeScript to JavaScript:
40 |
41 | `> npm run build`
42 |
43 | To watch your TypeScript files for changes and transpile on the fly:
44 |
45 | `> npm run build:watch`
46 |
47 |
48 | ### Test
49 |
50 | Test are written using [mocha](https://mochajs.org/) and live in the *_test_* directory of the checkout. You can run the tests via:
51 |
52 | `> npm test`
53 |
54 | There are a couple of test which go over the network and access the GitHub API.
55 | You can exclude them by setting the `MOCHA_TAG` environment variable like so:
56 |
57 | `MOCHA_TAG='--grep @network --invert' npm run test`
58 |
59 | For more information regarding test tagging refer to Mocha's [Tagging](https://github.com/mochajs/mocha/wiki/Tagging) documenttion.
60 |
61 | You can get an HTML version of the test results into the _out_ directory by running:
62 |
63 | `> npm test:report`
64 |
65 | If you are running the tests a lot, you might reach the GitHub API rate limit.
66 | In this case you can create a [GitHub access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line) and export it under the environment variable `GITHUB_ACCESS_TOKEN`.
67 |
68 |
69 | ### Create extension
70 |
71 | To create the extension (vsix file):
72 |
73 | `> npm run extension:create`
74 |
75 | To create the extension for a deploy to a staging publisher:
76 |
77 | `> npm run extension:create:dev`
78 |
79 | During development it can be handy to push patch releases to a test publisher.
80 | The following command will create the extension bumping the version of the extension as well as all as the versions of all tasks:
81 |
82 | `> npm run extension:create:patch`
83 |
84 |
85 |
86 | ### Publish extension
87 |
88 | Prerequisite for publishing from the command line is a [personal access token](https://docs.microsoft.com/en-us/azure/devops/extend/publish/command-line?view=vsts#acquire-the-tfs-cross-platform-command-line-interface).
89 | Once you have setup your token, you can chose to publish to a test/staging publisher or the production publisher 'redhat'.
90 |
91 |
92 | #### To staging
93 |
94 | Do do a staging deploy you can specify a staging publisher by setting the `DEV_PUBLISHER` environment variable:
95 |
96 | ```bash
97 | > export TOKEN=
98 | > export DEV_PUBLISHER=
99 | > npm run clean
100 | > npm run build
101 | > npm run extension:create:dev
102 | > npm run extension:publish:dev
103 | ```
104 |
105 | The [OpenShift VSTS](https://marketplace.visualstudio.com/manage/publishers/openshiftvsts) (OpenShiftVSTS) can be used as a shared development publisher.
106 |
107 | Once the extension is installed, you can share it with a given user:
108 |
109 | ```bash
110 | > export EXT_SHARES=
111 | > export DEV_PUBLISHER=
112 | > npm run extension:share:dev
113 | ```
114 |
115 | To unshare:
116 |
117 | ```bash
118 | > export EXT_SHARES=
119 | > export DEV_PUBLISHER=
120 | > npm run extension:unshare:dev
121 | ```
122 |
123 |
124 | #### To production
125 |
126 | Do do a production deploy under the [Red Hat](https://marketplace.visualstudio.com/manage/publishers/redhat) publisher follow these steps.
127 | They need to be executed from the _master_ branch.
128 |
129 | ```bash
130 | > export TOKEN=
131 | > npm run clean
132 | > npm install
133 | > npm run test
134 | > EXT_VERSION= npm run extension:version:set
135 | > npm run extension:create
136 | > npm run extension:publish
137 | > npm run extension:publish:commit
138 | ```
139 |
140 | You need to be member of this publisher!
141 |
142 |
143 | ### Write docs
144 |
145 | To write on the docs you can leverage [markserv](https://www.npmjs.com/package/markserv) npm, rendering and updating the docs as you go.
146 | Just run:
147 |
148 | ```bash
149 | > npm run docs
150 | ```
151 |
152 |
153 | ## CI
154 |
155 | There is one CI system configured for this project and it uses [GitHub Actions](https://github.com/redhat-developer/openshift-vsts/actions).
156 |
157 |
158 | ## References
159 |
160 | - Microsoft's [Develop Extensions](https://docs.microsoft.com/en-us/azure/devops/extend/?view=vsts) guide
161 | - [Getting Started](https://docs.microsoft.com/en-us/azure/devops/extend/get-started/node?view=vsts)
162 | - [Tutorials](https://docs.microsoft.com/en-us/azure/devops/extend/get-started/tutorials?view=vsts)
163 | - [Available pipeline environment variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=vsts)
164 | - [Publish from the command line](https://docs.microsoft.com/en-us/azure/devops/extend/publish/command-line?view=vsts)
165 | - [vsts-task-lib TypeScript API](https://github.com/Microsoft/vsts-task-lib/blob/master/node/docs/vsts-task-lib.md#toolrunnerToolRunnerargIf)
166 | - Microsoft's [Azure Pipelines tasks](https://github.com/Microsoft/vsts-tasks)
167 | - [Step by Step: Node Task with Typescript API](https://github.com/Microsoft/vsts-task-lib/blob/master/node/docs/stepbystep.md)
168 | - [How to Use npm as a Build Tool](https://www.keithcirkel.co.uk/how-to-use-npm-as-a-build-tool)
169 | - [Use Git within a pipeline script task](https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/git-commands?view=vsts&tabs=yaml)
170 | - [Custom authentication schemes](https://jessehouwing.net/vsts-build-release-define-custom-authentication-scheme/)
--------------------------------------------------------------------------------
/docs/getting-started.md:
--------------------------------------------------------------------------------
1 | # Getting started
2 |
3 | The OpenShift extension for Azure DevOps allows you to connect and interact with an [OpenShift](https://www.okd.io/) cluster as part of your build or release pipeline.
4 | The following paragraphs guide you through the process of using this extension.
5 |
6 |
7 |
8 | - [Connect to your OpenShift cluster](connect-to-your-openshift-cluster)
9 | - [Configuring the OpenShift service connection](#configuring-the-openshift-service-connection)
10 | - [Basic Authentication](#basic-authentication)
11 | - [Token Authentication](#token-authentication)
12 | - [Kubeconfig](#kubeconfig)
13 | - [Set up OpenShift connection runtime](#setup-the-openshift-connection-runtime)
14 | - [Pipeline Tasks](#pipeline-tasks)
15 | - [Install and setup oc](#install-and-setup-oc)
16 | - [Executing single oc commands](#executing-single-oc-commands)
17 | - [Executing single conditional oc commands](#executing-single-conditional-oc-commands)
18 | - [Updating a ConfigMap](#updating-a-configmap)
19 | - [YAML configuration](#yaml-configuration)
20 |
21 |
22 |
23 |
24 | ## Connect to your OpenShift cluster
25 |
26 | To use any of the pipeline tasks, you first need a way to connect to your cluster.
27 | In Azure DevOps, access to external and remote services is configured in [service connections](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=vsts).
28 |
29 | The OpenShift extension for Azure DevOps provides two ways to set up a connection: create a custom OpenShift service connection type which allows you to connect to your cluster using various authentication forms or by defining it at Task level when creating your pipeline.
30 |
31 | 
32 |
33 |
34 | ### Configuring the OpenShift service connection
35 |
36 | To configure an OpenShift connection, select the project settings (cogwheel icon).
37 | From there choose _Service connections_, followed by _New service connection_.
38 | Select the OpenShift service connection and use one of the following methods to configure authentication:
39 |
40 |
41 | ### Basic Authentication
42 |
43 | 
44 |
45 |
46 | - Server URL
47 | - Required. The URL of the Openshift cluster.
48 | - Username
49 | - Required. OpenShift username.
50 | - Password
51 | - Required. Password for the specified user.
52 | - Accept untrusted SSL certificates
53 | - Whether it is ok to accept self-signed (untrusted) certificated.
54 | - Certificate Authority File
55 | - The path where the certificate authority file is stored.
56 | - Service Connection Name
57 | - Required. The name you will use to refer to this service connection.
58 | - Grant Access permission to all pipelines
59 | - Allow all pipelines to use this connection. It allows YAML defined pipeline, which are not automatically authorized for service connections, to use this service connection.
60 |
61 |
62 |
63 |
64 | ### Token Authentication
65 |
66 | 
67 |
68 |
69 | - Server URL
70 | - Required. The URL of the Openshift cluster.
71 | - Accept untrusted SSL certificates
72 | - Whether it is ok to accept self-signed (untrusted) certificated.
73 | - Certificate Authority File
74 | - The path where the certificate authority file is stored.
75 | - API Token
76 | - Required.The API token used for authentication.
77 | - Service Connection Name
78 | - Required. The name you will use to refer to this service connection.
79 | - Grant Access permission to all pipelines
80 | - Allow all pipelines to use this connection. It allows YAML defined pipeline, which are not automatically authorized for service connections, to use this service connection.
81 |
82 |
83 |
84 | ### Kubeconfig
85 |
86 | 
87 |
88 |
89 | - Server URL
90 | - Required. The URL of the Openshift cluster.
91 | - Kubeconfig
92 | - The contents of the kubectl configuration file.
93 | - Service Connection Name
94 | - Required. The name you will use to refer to this service connection.
95 | - Grant Access permission to all pipelines
96 | - Allow all pipelines to use this connection. It allows YAML defined pipeline, which are not automatically authorized for service connections, to use this service connection.
97 |
98 |
99 | ---
100 |
101 | _**Note:** In version 1.\* of this extension the Azure DevOps built-in [Kubernetes service connection](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=vsts#sep-kuber) was used.
102 | If you want to you keep using this service connection you need to select the 1.* version when configuring a task._
103 |
104 | ---
105 |
106 |
107 | ### Set up the OpenShift connection at runtime
108 |
109 | To set up an OpenShift connection at runtime, select the _Set Up Configuration on Runtime_ option in the _Service connection type_ .
110 | You should be displayed with two options: File Path and Inline Configuration.
111 |
112 | - File Path allows you to add a path where the agent will find the config file to use during the execution
113 |
114 | 
115 |
116 | - Inline Configuration expects you to copy the content of your config. The extension will create a new config file with the content inserted
117 |
118 | 
119 |
120 |
121 | ## Pipeline Tasks
122 |
123 | The following paragraphs describe each of the provided pipeline tasks and their use.
124 |
125 | Based on the options used, a task could need cURL to download the oc bundle requested.
126 | Each task assumes that cURL is already installed on the Agent that is running the build.
127 | If cURL is not located on the Agent, an error will be thrown, and the task will fail.
128 |
129 |
130 | ### Install and setup oc
131 |
132 | The most generic task is the _Install and setup oc_ task.
133 | This task allows you to install a specific version of the OpenShift CLI (`oc`).
134 | The installed binary matches the OS of your agent.
135 | The task also adds `oc` to the `PATH` and creates a kubeconfig file for authentication against the OpenShift cluster.
136 |
137 | After adding and configuring a _Install and setup oc_ task in your pipeline, you can use `oc` directly within your _Command Line_ task, for example:
138 |
139 | 
140 |
141 | To add the _Install and setup oc_ task to your pipeline, you can filter the appearing task list by searching for _Install oc_.
142 | The _Install oc_ has three configuration options.
143 |
144 | 
145 |
146 |
147 | - Service Connection Type
148 | - Required. Allows to set up a connection at runtime or by choosing an existing service connection. See Connect to your OpenShift cluster.
149 | - Version of oc to use
150 | - Allows to specify the version of oc to use for command execution, eg v3.10.0. If left blank the latest stable version is used. You can also specify a direct URL to the oc release bundle. See How the cache works
151 | - Proxy
152 | - Allows to specify a proxy (host:port) to use to download oc cli
153 |
154 |
155 | ---
156 |
157 |
158 | ### Executing single oc commands
159 |
160 | In case you want to execute a single `oc` command you can use the _Execute OpenShift command_ task.
161 |
162 | To add this task, you can filter the appearing task list by searching for _Execute oc command_.
163 | The _Execute oc command_ has six configuration options.
164 |
165 | 
166 |
167 |
168 | - Service Connection Type
169 | - Required. Allows to set up a connection at runtime or by choosing an existing service connection. See Connect to your OpenShift cluster.
170 | - Version of oc to use
171 | - Allows to specify the version of oc to use for command execution, eg v3.10.0. If left blank the latest stable version is used. You can also specify a direct URL to the oc release bundle. See How the cache works
172 | - Command to run
173 | - The actual oc command to run starting with the oc sub-command, eg "rollout latest dc/my-app -n production". Check the notes below to find out more features supported by the extension.
174 | - Ignore on success return value
175 | - It ignores non success return value from the current step and keep executing the pipeline if it fails. If you are executing a step which contains command like create/delete/patch but the resource has already been created/deleted/patched the pipeline could fail. By checking this option this error will be skipped and the execution will keep going.
176 | - Use local oc executable
177 | - It forces the extension to use, if present, the oc cli found in the machine where the agent is running. If no version is specified, the extension will use the local oc cli no matter its version is. If a version is specified then the extension will first check if the oc cli installed has the same version requested by the user, if not the correct oc cli will be downloaded.
178 | - Proxy
179 | - Allows to specify a proxy (host:port) to use to download oc cli
180 |
181 |
182 |
183 | ---
184 |
185 | _**Note:** It is possible to use variables defined in the agent.
186 | For example, to reference a file in the artefact \_my\_sources you could do:_
187 |
188 | ```bash
189 | apply -f ${SYSTEM_DEFAULTWORKINGDIRECTORY}/_my_sources/my-openshift-config.yaml
190 | ```
191 |
192 | ---
193 |
194 | ---
195 |
196 | _**Note:** The extension support command interpolation.
197 | For example, to execute a command inside another one you can execute:
198 |
199 | ```
200 | oc logs $(oc get pod -l app=test -o name)
201 | ```
202 |
203 | ---
204 |
205 | ---
206 |
207 | _**Note:** The extension supports pipe (|) operators. Due to the limitation of Azure library the extension only support one single pipe per command. The pipe operator also allow to use a different ToolRunner than oc (i.e grep - the tool must be visible to the extension).
208 |
209 | ```
210 | oc describe pod/nodejs-ex | grep kubernetes
211 | ```
212 |
213 | ---
214 |
215 | ---
216 |
217 | _**Note:** The extension supports redirector (>, >>, 2>) operators. The redirector operator expect a valid path as argument.
218 |
219 |
220 | - > (write): create the file if it does not exist and write on it. If it exists, its content will be overwritten.
221 | - >> (append): append text to the file
222 | - 2> (write stderr): redirect stderr to a file
223 |
224 |
225 | ```
226 | oc describe pod/nodejs-ex | grep kubernetes > /path/log.txt
227 | ```
228 |
229 | ---
230 |
231 | ---
232 |
233 |
234 | ### Executing conditional oc commands
235 |
236 | In case you want to execute a single conditional `oc` command you can use the _Execute conditional oc command_ task.
237 |
238 | To add this task, you can filter the appearing task list by searching for _Execute conditional oc command_.
239 | The _Execute conditional oc command_ has ten configuration options.
240 |
241 | 
242 |
243 |
244 | - Service Connection Type
245 | - Required. Allows to set up a connection at runtime or by choosing an existing service connection. See Connect to your OpenShift cluster.
246 | - Version of oc to use
247 | - Allows to specify the version of oc to use for command execution, eg v3.10.0. If left blank the latest stable version is used. You can also specify a direct URL to the oc release bundle. See How the cache works
248 | - Command to run
249 | - The oc command to run whenever the condition is met, eg "rollout latest dc/my-app -n production". Check the additional features supported by the extension.
250 | - Ignore on success return value
251 | - It ignores non success return value from the current step and keep executing the pipeline if it fails. If you are executing a step which contains command like create/delete/patch but the resource has already been created/deleted/patched the pipeline could fail. By checking this option this error will be skipped and the execution will keep going.
252 | - Condition type
253 | - The condition type to be checked over the resource specified. The condition types supported in the current release are `Exists` and `Not_exists`.
254 | - Resource on which to verify the condition
255 | - The extension expects a clear name of the resource/resources to be checked (E.g pods -l app=test). In the case shown in the example the extension, based on the condition type chosen, will check if there is atleast one pod (Exists) or no pods at all (No_exists) with that label.
256 |
- Time (in milliseconds) after which to stop the execution
257 | - The time the extension will wait before to stop checking the condition status. If the condition will not be met before the timeout elapses the task will errored. N.B: The default timeout is 5 minutes.
258 |
- Skip timed out error
259 | - If checked it allows the extension to execute the command even if the timeout elapses. In this case the task will no errored when the timeout elapses and the task output will be displayed based on the result of the command execution
260 | - Use local oc executable
261 | - It forces the extension to use, if present, the oc cli found in the machine where the agent is running. If no version is specified, the extension will use the local oc cli no matter its version is. If a version is specified then the extension will first check if the oc cli installed has the same version requested by the user, if not the correct oc cli will be downloaded.
262 | - Proxy
263 | - Allows to specify a proxy (host:port) to use to download oc cli
264 |
265 |
266 | ---
267 |
268 | _**Note:** An example of conditional command task can be found here.
269 |
270 | ---
271 |
272 | ---
273 |
274 |
275 | ### Updating a ConfigMap
276 |
277 | An even more specific task offered by this extension is the _Update ConfigMap_ task.
278 | It allows you to update the properties of a given ConfigMap using a grid.
279 |
280 | To add this task, select the _+_ to add a task to your pipeline.
281 | You can filter the appearing task list by searching for _Update ConfigMap_.
282 | Add the _Update ConfigMap_ task to your pipeline using the _Add_ button.
283 |
284 | 
285 |
286 | The _Update ConfigMap_ task has six configuration options.
287 |
288 | 
289 |
290 |
291 | - Service Connection Type
292 | - Required. Allows to set up a connection at runtime or by choosing an existing service connection. See Connect to your OpenShift cluster.
293 | - Version of oc to use
294 | - Allows to specify the version of oc to use for command execution, eg v3.10.0. If left blank the latest stable version is used. You can also specify a direct URL to the oc release bundle. See How the cache works
295 | - Name of ConfigMap
296 | - Required.The name of the ConfigMap to update.
297 | - Namespace of ConfigMap
298 | - The namespace in which to find the ConfigMap. The current namespace is used if none is specified.
299 | - ConfigMap Properties
300 | - The properties to set/update. Only the properties which need creating/updating need to be listed. Space separated values need to be surrounded by quotes (").
301 | - Use local oc executable
302 | - It forces the extension to use, if present, the oc cli found in the machine where the agent is running. If no version is specified, the extension will use the local oc cli no matter its version is. If a version is specified then the extension will first check if the oc cli installed has the same version requested by the user, if not the correct oc cli will be downloaded.
303 | - Proxy
304 | - Allows to specify a proxy (host:port) to use to download oc cli
305 |
306 |
307 | ---
308 |
309 | _**Note:** It is possible to use variables defined in the agent.
310 | For example, to reference a variable MY_VAR defined in the pipeline configuration, you can use ${MY_VAR} as the property value._
311 |
312 |
313 | ## How the cache works in OpenShift VSTS extension
314 |
315 | OpenShift VSTS extension supports oc executable caching based by its version to avoid downloading the same bundle over and over when executing different pipelines.
316 |
317 | The cache is only enabled when the version is clearly specified in the task (e.g 4.1, 3.1.28..). If the version will be defined as an URL or left blank (when wanting to use the latest oc version available) the extension will try to download the oc version requested without checking the cache.
318 |
319 | The oc executable will be cached inside the `_work/_tool/oc` folder.
320 |
321 |
322 | ## YAML configuration
323 |
324 | You can also use the tasks of the OpenShift extension as part of a YAML defined pipeline.
325 | The following configuration shows an example for each of the provided tasks:
326 |
327 |
328 | ```yaml
329 | jobs:
330 | - job: myjob
331 | displayName: MyJob
332 | pool:
333 | vmImage: 'vs2017-win2016'
334 | steps:
335 | # Install oc so that it can be used within a 'script' or bash 'task'
336 | - task: oc-setup@2
337 | displayName: Setup oc
338 | inputs:
339 | openshiftService: 'my_openshift_connection'
340 | # A script task making use of 'oc'
341 | - script: |
342 | oc new-project my-project
343 | oc apply -f ${SYSTEM_DEFAULTWORKINGDIRECTORY}/openshift/config.yaml -n my-project
344 | displayName:
345 | # Single shot 'oc' command
346 | - task: oc-cmd@2
347 | displayName: Wait for deployment
348 | inputs:
349 | openshiftService: 'my_openshift_connection'
350 | cmd: 'rollout status -w deployment/my-app'
351 | # Updating an existing ConfigMap
352 | - task: config-map@2
353 | displayName: Update ConfigMap
354 | inputs:
355 | openshiftService: 'my_openshift_connection'
356 | configMapName: 'my-config'
357 | namespace: 'my-project'
358 | properties: '-my-key1 my-value1 -my-key2 my-value2'
359 | ```
360 |
361 | ---
362 |
363 |
364 | This example shows how to use the conditional command task.
365 | In this case an application will be deployed and its build logs will be retrieved when the deployment process succeed.
366 |
367 | ```yaml
368 | steps:
369 | - task: oc-cmd@2
370 | inputs:
371 | connectionType: 'Runtime Configuration'
372 | configurationPath: '/path/testconfig'
373 | version: '3.9.103'
374 | cmd: 'oc new-app https://github.com/sclorg/nodejs-ex -l app=test'
375 | - task: oc-conditional-cmd@2
376 | inputs:
377 | connectionType: 'Runtime Configuration'
378 | configurationPath: '/path/testconfig'
379 | version: '3.9.103'
380 | cmd: 'logs $(oc get bc -l app=test -o name)'
381 | condition: 'not_exists'
382 | resource: 'pods -l app=test'
383 | ```
384 |
385 | _**Note:** With Azure DevOps YAML defined pipelines are currently only available for build pipelines.
386 | Configuration as code for release pipelines is under development.
387 | See [here](https://stackoverflow.com/questions/52323065/azure-devops-yaml-release-pipelines) and [here](https://dev.azure.com/mseng/Azure%20DevOps%20Roadmap/_workitems/edit/1221170)._
388 |
389 |
390 |
391 |
--------------------------------------------------------------------------------
/header.js:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 |
--------------------------------------------------------------------------------
/images/adding_config_map_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/adding_config_map_task.png
--------------------------------------------------------------------------------
/images/adding_install_oc_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/adding_install_oc_task.png
--------------------------------------------------------------------------------
/images/adding_oc_cmd_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/adding_oc_cmd_task.png
--------------------------------------------------------------------------------
/images/basic_authentication.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/basic_authentication.png
--------------------------------------------------------------------------------
/images/cmd_exec_config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/cmd_exec_config.png
--------------------------------------------------------------------------------
/images/conditional_cmd_exec_config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/conditional_cmd_exec_config.png
--------------------------------------------------------------------------------
/images/configure_config_map_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/configure_config_map_task.png
--------------------------------------------------------------------------------
/images/configure_install_oc_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/configure_install_oc_task.png
--------------------------------------------------------------------------------
/images/file-path-connection-type.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/file-path-connection-type.png
--------------------------------------------------------------------------------
/images/inline-config-connection-type.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/inline-config-connection-type.png
--------------------------------------------------------------------------------
/images/kubeconfig_authentication.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/kubeconfig_authentication.png
--------------------------------------------------------------------------------
/images/kubernetes_service_connection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/kubernetes_service_connection.png
--------------------------------------------------------------------------------
/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/logo.png
--------------------------------------------------------------------------------
/images/oc_with_command_line_task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/oc_with_command_line_task.png
--------------------------------------------------------------------------------
/images/service-connection-types.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/service-connection-types.png
--------------------------------------------------------------------------------
/images/token_authentication.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/images/token_authentication.png
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "openshift-vsts",
3 | "version": "1.0.0",
4 | "description": "OpenShift TFS/VSTS Marketplace Extension",
5 | "main": "index.js",
6 | "engines": {
7 | "vscode": "^1.25.0"
8 | },
9 | "activationEvents": [
10 | "onDebug"
11 | ],
12 | "scripts": {
13 | "all": "npm install && npm run build && npm run test",
14 | "setup": "npm install -g tfx-cli typescript json del-cli copy-node-modules markserv ts-mocha mocha mocha-junit-reporter",
15 | "clean": "del-cli **/out **/node_modules **/.taskkey",
16 | "compile": "tsc -b .",
17 | "build": "npm run lint && tsc -b . && npm run copy-utils-file",
18 | "build:watch": "tsc -b tsconfig.json -w",
19 | "docs": "markserv .",
20 | "pretest": "npm run compile",
21 | "watch": "tsc -watch -p ./",
22 | "lint": "node_modules/.bin/eslint . --ext .ts --quiet",
23 | "test": "ts-mocha -p tsconfig.json test/*.test.ts --timeout 5000 ${MOCHA_TAG}",
24 | "test:report": "npm run test -- --reporter mocha-simple-html-reporter --reporter-options output=out/test-report.html",
25 | "test:report:ci": "npm run test -- --reporter mocha-junit-reporter --reporter-options mochaFile=out/test-report.xml",
26 | "prettier:base": "prettier --parser typescript --single-quote",
27 | "prettier:check": "npm run prettier:base -- --list-different \"src/*.ts\" \"test/*.ts\"",
28 | "prettier:write": "npm run prettier:base -- --write \"**/**/*.ts\"",
29 | "copy-node-modules": "for i in `ls tasks` ; do copy-node-modules . tasks/$i; done",
30 | "copy-utils-file": "cp src/oc-utils.json out/src/",
31 | "copy-task-utils": "for i in `ls tasks` ; do cp -r src/oc-utils.json tasks/$i/lib/; done",
32 | "copy-task-lib": "for i in `ls tasks` ; do cp -r out/src/. tasks/$i/lib/; done",
33 | "bump-task-version": "for i in `ls tasks` ; do TASK=$(cat tasks/$i/task.json | json -e 'this.version.Patch++'); echo \"$TASK\" > tasks/$i/task.json; done",
34 | "extension:version:set": "JSON=$(cat vss-extension.json | json -e \"this.version='${EXT_VERSION}'\") && echo \"$JSON\" > vss-extension.json",
35 | "extension:version:view": "cat vss-extension.json | json version",
36 | "preextension:create": "npm run copy-task-lib && npm run copy-node-modules && npm run copy-task-utils",
37 | "extension:create": "tfx extension create --token $TOKEN --output-path out",
38 | "preextension:create:dev": "npm run extension:create",
39 | "preextension:create:patch": "npm run bump-task-version && npm run extension:create -- --rev-version",
40 | "extension:create:dev": "tfx extension create --token $TOKEN --output-path out --publisher $DEV_PUBLISHER --overrides-file ./scripts/vsix-manifest-overrides.json",
41 | "extension:create:patch": "tfx extension create --token $TOKEN --output-path out --publisher $DEV_PUBLISHER --overrides-file ./scripts/vsix-manifest-overrides.json",
42 | "extension:publish": "tfx extension publish --token $TOKEN --vsix out/redhat.openshift-vsts-$(json -f vss-extension.json version).vsix",
43 | "extension:publish:dev": "tfx extension publish --token $TOKEN --vsix out/$DEV_PUBLISHER.openshift-vsts-$(json -f vss-extension.json version).vsix",
44 | "extension:share:dev": "tfx extension share --token $TOKEN --share-with $EXT_SHARES --publisher $DEV_PUBLISHER --extension-id openshift-vsts",
45 | "extension:unshare:dev": "tfx extension unshare --token $TOKEN --unshare-with $EXT_SHARES --publisher $DEV_PUBLISHER --extension-id openshift-vsts",
46 | "extension:publish:commit": "export EXT_VERSION=$(json -f vss-extension.json version) && git commit -a -m \"cut v${EXT_VERSION}\" && git tag v${EXT_VERSION} && git push --tags origin HEAD:master"
47 | },
48 | "repository": {
49 | "type": "git",
50 | "url": "https://github.com/redhat-developer/openshift-vsts.git"
51 | },
52 | "keywords": [],
53 | "author": "Red Hat",
54 | "license": "Apache-2.0",
55 | "bugs": {
56 | "url": "https://github.com/redhat-developer/openshift-vsts/issues"
57 | },
58 | "homepage": "https://github.com/redhat-developer/openshift-vsts#readme",
59 | "dependencies": {
60 | "@types/q": "^1.5.1",
61 | "@types/valid-url": "^1.0.2",
62 | "adm-zip": "^0.4.13",
63 | "argv-split": "^2.0.1",
64 | "azure-pipelines-task-lib": "^3.1.7",
65 | "azure-pipelines-tool-lib": "^1.3.2",
66 | "decompress": "^4.2.1",
67 | "decompress-tar": "^4.1.1",
68 | "decompress-targz": "^4.1.1",
69 | "del-cli": "^5.0.0",
70 | "node-fetch": "^2.6.7",
71 | "q": "^1.5.1",
72 | "semver": "^7.1.3",
73 | "substituter": "^1.3.0",
74 | "valid-url": "^1.0.9"
75 | },
76 | "devDependencies": {
77 | "@types/chai": "^4.1.6",
78 | "@types/glob": "^7.1.1",
79 | "@types/mocha": "^5.2.7",
80 | "@types/node": "^12.12.7",
81 | "@types/sinon": "^7.0.13",
82 | "@types/uuid": "^8.3.0",
83 | "@typescript-eslint/eslint-plugin": "^2.16.0",
84 | "@typescript-eslint/parser": "^2.16.0",
85 | "chai": "^4.2.0",
86 | "chai-fs": "^2.0.0",
87 | "commit-message-validator": "^0.1.6",
88 | "copy-node-modules": "^1.0.4",
89 | "eslint": "^6.7.1",
90 | "eslint-config-airbnb-base": "^14.0.0",
91 | "eslint-config-prettier": "^6.7.0",
92 | "eslint-import-resolver-typescript": "^2.0.0",
93 | "eslint-plugin-header": "^3.0.0",
94 | "eslint-plugin-import": "2.18.2",
95 | "eslint-plugin-json": "^2.0.1",
96 | "eslint-plugin-prettier": "^3.1.1",
97 | "istanbul": "^0.4.5",
98 | "mocha": "^6.0",
99 | "mocha-jenkins-reporter": "^0.4.8",
100 | "mocha-junit-reporter": "^1.18.0",
101 | "mocha-simple-html-reporter": "^1.1.0",
102 | "prettier": "^1.14.3",
103 | "remap-istanbul": "^0.13.0",
104 | "sinon": "^7.4.2",
105 | "ts-mocha": "^6.0",
106 | "ts-node": "^7.0.1",
107 | "typescript": "^4.0.2"
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/scripts/vsix-manifest-overrides.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "OpenShift Extension (dev)",
3 | "public": false
4 | }
5 |
--------------------------------------------------------------------------------
/src/config-map-task.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { RunnerHandler } from './oc-exec';
6 | import { InstallHandler } from './oc-install';
7 | import * as auth from './oc-auth';
8 | import { ConfigMap } from './config-map';
9 | import { BinaryVersion, convertStringToBinaryVersion, FindBinaryStatus, getAgentOsName, getReason } from './utils/exec_helper';
10 |
11 | import task = require('azure-pipelines-task-lib/task');
12 |
13 | async function run(): Promise {
14 | const version = task.getInput('version');
15 | const agentOS = getAgentOsName(task.getPlatform());;
16 | const useLocalOc: boolean = task.getBoolInput('useLocalOc');
17 | const proxy: string = task.getInput('proxy');
18 |
19 | const binaryVersion: BinaryVersion = convertStringToBinaryVersion(version);
20 | const ocBinary: FindBinaryStatus = await InstallHandler.installOc(binaryVersion, agentOS, useLocalOc, proxy);
21 | if (ocBinary.found === false) {
22 | return Promise.reject(new Error(getReason(ocBinary)));
23 | }
24 |
25 | const configMapName = task.getInput('configMapName');
26 | const namespace = task.getInput('namespace');
27 | const properties = task.getInput('properties');
28 | const configMap = new ConfigMap(configMapName, properties);
29 |
30 | await auth.createKubeConfig(ocBinary.path, agentOS);
31 | await RunnerHandler.execOc(ocBinary.path, configMap.patchCmd(namespace));
32 | }
33 |
34 | run()
35 | .then(() => {
36 | task.setResult(
37 | task.TaskResult.Succeeded,
38 | 'ConfigMap successfully updated.'
39 | );
40 | })
41 | .catch((err: Error) => {
42 | task.setResult(task.TaskResult.Failed, err.message);
43 | });
44 |
--------------------------------------------------------------------------------
/src/config-map.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import split = require('argv-split');
6 | import sub = require('substituter');
7 |
8 | class ConfigMap {
9 | readonly _name: string;
10 |
11 | readonly _properties: Map;
12 |
13 | constructor(name: string, properties: string) {
14 | this._name = name;
15 | this._properties = new Map();
16 | const keyValuePairs = split(properties);
17 | for (let i = 0; i < keyValuePairs.length; i += 2) {
18 | const key = keyValuePairs[i].replace(/^-/, '');
19 | let value = keyValuePairs[i + 1];
20 | value = sub(value, process.env);
21 | this._properties.set(key, value);
22 | }
23 | }
24 |
25 | get name(): string {
26 | return this._name;
27 | }
28 |
29 | patchCmd(namespace: string): string {
30 | let cmd = `patch configmap ${this.name} -p '{"data":{`;
31 | let i = 0;
32 | this._properties.forEach((value: string, key: string) => {
33 | cmd = `${cmd }"${key}": "${value}"`;
34 | if (i < this._properties.size - 1) {
35 | cmd = `${cmd }, `;
36 | }
37 | i++;
38 | });
39 | cmd = `${cmd }}}'`;
40 | if (namespace) {
41 | cmd = `${cmd } -n ${namespace}`;
42 | }
43 | return cmd;
44 | }
45 | }
46 |
47 | export { ConfigMap };
48 |
--------------------------------------------------------------------------------
/src/constants.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | export const LINUXV4 = 'openshift-client-linux';
6 | export const MACOSXV4 = 'openshift-client-mac';
7 | export const WINV4 = 'openshift-client-windows';
8 | export const LINUXV3 = 'linux';
9 | export const MACOSXV3 = 'macosx';
10 | export const WINV3 = 'windows';
11 | export const LATEST = 'latest';
12 |
13 | export const OC_TAR_GZ = 'oc.tar.gz';
14 | export const OC_ZIP = 'oc.zip';
15 |
16 | export const TAR_GZ = 'tar.gz';
17 | export const ZIP = 'zip';
18 |
19 | export const OPENSHIFT_SERVICE_OPTION = 'OpenShift Connection Service';
20 | export const RUNTIME_CONFIGURATION_OPTION = 'Runtime Configuration';
21 | export const BASIC_AUTHENTICATION = 'UsernamePassword';
22 | export const TOKEN_AUTHENTICATION = 'Token';
23 | export const NO_AUTHENTICATION = 'None';
24 |
--------------------------------------------------------------------------------
/src/oc-auth.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import * as fs from 'fs';
6 | import { RunnerHandler } from './oc-exec';
7 | import {
8 | BASIC_AUTHENTICATION,
9 | NO_AUTHENTICATION,
10 | TOKEN_AUTHENTICATION,
11 | RUNTIME_CONFIGURATION_OPTION,
12 | } from './constants';
13 |
14 | import task = require('azure-pipelines-task-lib/task');
15 | import tl = require('azure-pipelines-task-lib/task');
16 | import path = require('path');
17 |
18 | export interface OpenShiftEndpoint {
19 | /** URL to the OpenShiftServer */
20 | serverUrl: string;
21 |
22 | /** dictionary of auth data */
23 | parameters: {
24 | [key: string]: string;
25 | };
26 |
27 | /** auth scheme such as OAuth or username/password etc... */
28 | scheme: string;
29 | }
30 |
31 | /**
32 | * @return the OpenShift endpoint authorization as referenced by the task property 'openshiftService'.
33 | */
34 | export function getOpenShiftEndpoint(): OpenShiftEndpoint {
35 | const clusterConnection = task.getInput('openshiftService');
36 |
37 | const auth = task.getEndpointAuthorization(clusterConnection, false);
38 | const serverUrl = task.getEndpointUrl(clusterConnection, false);
39 |
40 | return {
41 | serverUrl,
42 | parameters: auth.parameters,
43 | scheme: auth.scheme
44 | };
45 | }
46 |
47 | /**
48 | * Determines whether certificate authority file should be used.
49 | *
50 | * @param endpoint the OpenShift endpoint.
51 | * @return oc option for using a certificate authority file.
52 | */
53 | export function getCertificateAuthorityFile(
54 | endpoint: OpenShiftEndpoint
55 | ): string {
56 | let certificateFile = '';
57 | if (endpoint.parameters.certificateAuthorityFile) {
58 | certificateFile = `--certificate-authority="${
59 | endpoint.parameters.certificateAuthorityFile
60 | }"`;
61 | }
62 | return certificateFile;
63 | }
64 |
65 | /**
66 | * Determines whether certificate verification should be skipped.
67 | *
68 | * @param endpoint the OpenShift endpoint.
69 | * @return oc option for skipping certificate verification.
70 | */
71 | export function skipTlsVerify(endpoint: OpenShiftEndpoint): string {
72 | let cmdSkipTlsVerify = '';
73 | if (endpoint.parameters.acceptUntrustedCerts === 'true') {
74 | cmdSkipTlsVerify = '--insecure-skip-tls-verify ';
75 | }
76 | return cmdSkipTlsVerify;
77 | }
78 |
79 | /**
80 | * Determines the default home directory of the user based on OS type
81 | *
82 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
83 | * @return the fully qualified path to the users home directory
84 | * @throws Error in case the environment variable to determine the users home
85 | * directory is not set.
86 | */
87 | export function userHome(osType: string): string {
88 | let workingDir;
89 |
90 | switch (osType) {
91 | case 'Windows_NT':
92 | workingDir = process.env.USERPROFILE;
93 | break;
94 | case 'Linux':
95 | case 'Darwin':
96 | workingDir = process.env.HOME;
97 | break;
98 | default:
99 | throw new Error('Unable to determine home directory');
100 | }
101 |
102 | if (workingDir === undefined) {
103 | throw new Error('Unable to determine home directory');
104 | }
105 |
106 | return workingDir;
107 | }
108 |
109 | /**
110 | * Writes the cluster auth config to disk and sets the KUBECONFIG env variable
111 | *
112 | * @param config The cluster auth config to write to disk
113 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
114 | */
115 | export function writeKubeConfigToFile(inlineConfig: string, osType: string): void {
116 | if (!inlineConfig) {
117 | throw new Error('Empty kubeconfig is not allowed');
118 | }
119 |
120 | const kubeConfigDir = path.join(userHome(osType), '.kube');
121 | if (!tl.exist(kubeConfigDir)) {
122 | tl.mkdirP(kubeConfigDir);
123 | }
124 |
125 | const kubeConfig = path.join(kubeConfigDir, 'config');
126 | tl.writeFile(kubeConfig, inlineConfig);
127 | }
128 |
129 | /**
130 | * Exports the KUBECONFIG environment variable.
131 | *
132 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
133 | */
134 | export function exportKubeConfig(osType: string): void {
135 | const kubeConfig = path.join(userHome(osType), '.kube', 'config');
136 | tl.setVariable('KUBECONFIG', kubeConfig);
137 | }
138 |
139 | /**
140 | * Creates the kubeconfig based on the endpoint authorization retrieved
141 | * from the OpenShift service connection.
142 | *
143 | * @param ocPath fully qualified path to the oc binary.
144 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
145 | */
146 | export async function createKubeConfigFromServiceConnection(ocPath: string, osType: string): Promise {
147 | const endpoint = getOpenShiftEndpoint();
148 |
149 | // potential values for EndpointAuthorization:
150 | //
151 | // parameters:{"apitoken":***}, scheme:'Token'
152 | // parameters:{"username":***,"password":***}, scheme:'UsernamePassword'
153 | // parameters:{"kubeconfig":***}, scheme:'None'
154 | const authType = endpoint.scheme;
155 | let useCertificateOrSkipTls = getCertificateAuthorityFile(endpoint);
156 | if (useCertificateOrSkipTls === '') {
157 | useCertificateOrSkipTls = skipTlsVerify(endpoint);
158 | }
159 | switch (authType) {
160 | case BASIC_AUTHENTICATION: {
161 | await RunnerHandler.execOc(
162 | ocPath,
163 | `login ${useCertificateOrSkipTls} -u ${endpoint.parameters.username} -p ${endpoint.parameters.password} ${endpoint.serverUrl}`
164 | );
165 | break;
166 | }
167 | case TOKEN_AUTHENTICATION: {
168 | await RunnerHandler.execOc(
169 | ocPath,
170 | `login ${useCertificateOrSkipTls} --token ${endpoint.parameters.apitoken} ${endpoint.serverUrl}`
171 | );
172 | break;
173 | }
174 | case NO_AUTHENTICATION: {
175 | writeKubeConfigToFile(endpoint.parameters.kubeconfig, osType);
176 | break;
177 | }
178 | default:
179 | throw new Error(`unknown authentication type '${authType}'`);
180 | }
181 | }
182 |
183 | /**
184 | * Verifies existence of the config file and sets KUBECONFIG environment variable.
185 | *
186 | * @param configPath The OpenShift endpoint.
187 | */
188 | export function exportKubeConfigToPath(configPath): void {
189 | try {
190 | if (fs.statSync(configPath).isFile()) {
191 | tl.setVariable('KUBECONFIG', configPath);
192 | } else {
193 | throw new Error(`Provided path ${configPath} is not a valid kubeconfig file.`);
194 | }
195 | } catch (ex) {
196 | throw new Error(`Provided kubeconfig path does not exist: ${configPath}`);
197 | }
198 | }
199 |
200 | export function setConfigurationRuntime(osType: string): void {
201 | const configurationType = tl.getInput("configurationType");
202 | if (configurationType === 'inline') {
203 | const inlineConfig = task.getInput('inlineConfig', false);
204 | writeKubeConfigToFile(inlineConfig, osType);
205 | exportKubeConfig(osType);
206 | } else {
207 | const configPath = task.getPathInput('configurationPath', false);
208 | exportKubeConfigToPath(configPath);
209 | }
210 | }
211 |
212 | /**
213 | * Creates the kubeconfig based on the connectionType selected by the user
214 | *
215 | * @param ocPath fully qualified path to the oc binary.
216 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
217 | */
218 | export async function createKubeConfig(ocPath: string, osType: string): Promise {
219 | const connectionType: string = task.getInput('connectionType');
220 |
221 | if (connectionType === RUNTIME_CONFIGURATION_OPTION) {
222 | setConfigurationRuntime(osType);
223 | } else {
224 | await createKubeConfigFromServiceConnection(ocPath, osType);
225 | exportKubeConfig(osType);
226 | }
227 | }
228 |
--------------------------------------------------------------------------------
/src/oc-condition.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { IExecSyncResult } from 'azure-pipelines-task-lib/toolrunner';
6 | import { ConditionStatus } from './utils/exec_helper';
7 | import { RunnerHandler } from './oc-exec';
8 |
9 | export class ConditionHandler {
10 |
11 | static defaultTimedOut: number = 5 * 60 * 1000;
12 |
13 | static async isConditionValid(path: string, condition: string, resource: string, inputTimedOut: string, noTimedOutError: boolean): Promise {
14 | const timedOut: number = this.getTimedOutFromInput(inputTimedOut);
15 | if (!timedOut) {
16 | return { valid: false, resultKind: 'condition-failed', reason: `TimedOut has not a valid value. Make sure to express its value in millisecond (e.g timedout: '10000').` };
17 | }
18 |
19 | console.log(`Start checking condition '${condition}' on resource '${resource}'. It could take a while... \n` +
20 | `This operation will run until the condition is met or the timed out elapses.\n` +
21 | `The timed out is currently set to ${timedOut / 1000} seconds.`);
22 |
23 | let status: ConditionStatus = this.checkCondition(path, condition, resource);
24 |
25 | if (status.resultKind === 'verification-in-progress') {
26 | const observer = RunnerHandler.spawnChild(path, ConditionHandler.getConditionCommandListener(resource));
27 |
28 | let timeoutRef: NodeJS.Timeout;
29 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
30 | const timeOutPromise: Promise = new Promise((resolve, _reject) => {
31 | timeoutRef = setTimeout(() => {
32 | if (noTimedOutError) {
33 | console.log(`Condition '${condition}' on resource '${resource}' has timed out but the 'noTimedOutError' option was enabled.`);
34 | resolve({ valid: true, resultKind: 'condition-skipped' });
35 | }
36 | resolve({ valid: false, resultKind: 'condition-timedout', reason: 'The timedout elapsed before the condition was met.'});
37 | }, timedOut);
38 | });
39 |
40 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
41 | const observerPromise: Promise = new Promise((resolve, _reject) => {
42 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
43 | observer.stdout.on('data', _data => {
44 | // when something changes in the cluster we force another check to prevent false positive with resource deletion
45 | const result: ConditionStatus = ConditionHandler.checkCondition(path, condition, resource)
46 | if (result.resultKind !== 'verification-in-progress') {
47 | resolve(result);
48 | }
49 | });
50 |
51 | observer.stderr.on('data', (errorMsg) => {
52 | if (condition === 'not_exists' && ConditionHandler.isNotFoundError(errorMsg)) {
53 | resolve({ valid: true, resultKind: "condition-ok" });
54 | }
55 | resolve({ valid: false, resultKind: 'condition-failed', reason: `Failed to verify the condition. Error: ${errorMsg}` });
56 | });
57 | });
58 |
59 | status = await Promise.race([timeOutPromise, observerPromise]);
60 | // clear
61 | clearTimeout(timeoutRef);
62 | if (!observer.killed) observer.kill();
63 | }
64 | if (status.resultKind === 'condition-ok') {
65 | console.log(`Condition '${condition}' on resource '${resource}' has been met.`);
66 | }
67 |
68 | return status;
69 | }
70 |
71 | static async sleep(ms: number):Promise {
72 | return new Promise(resolve => setTimeout(resolve, ms));
73 | }
74 |
75 | static getConditionCommandListener(resource: string): string {
76 | return `get ${resource} -o name --watch=true`;
77 | }
78 |
79 | static checkCondition(ocBinary: string, condition: string, resource: string): ConditionStatus {
80 | switch (condition) {
81 | case 'exists':
82 | return this.resourceExists(ocBinary, resource, true);
83 | case 'not_exists':
84 | return this.resourceExists(ocBinary, resource, false);
85 | default:
86 | return { valid: false, resultKind: 'condition-failed', reason: 'Condition type is unknown.' }; // never called
87 | }
88 | }
89 |
90 | static resourceExists(ocBinary: string, resource: string, exists: boolean) : ConditionStatus {
91 | const command = `get ${resource} -o name`;
92 | const execResult: IExecSyncResult = RunnerHandler.execOcSync(ocBinary, command, true);
93 | if (ConditionHandler.isCommandErrored(execResult)) {
94 | const errorMsg = execResult ? execResult.stderr : execResult.error.message;
95 | return { valid: false, resultKind: 'condition-failed', reason: `Failed to verify the condition. Error: ${errorMsg}` };
96 | }
97 | if (exists) {
98 | return this.resourceDoExist(execResult);
99 | }
100 | return this.resourceNotExist(execResult);
101 | }
102 |
103 | static resourceDoExist(execResult: IExecSyncResult): ConditionStatus {
104 | if (execResult && execResult.stdout !== '') {
105 | // the command succeeded, the resource exists
106 | return { valid: true, resultKind: "condition-ok" };
107 | }
108 | return { valid: false, resultKind: "verification-in-progress" };
109 | }
110 |
111 | static resourceNotExist(execResult: IExecSyncResult): ConditionStatus {
112 | if (execResult && (execResult.stdout === '' || ConditionHandler.isNotFoundError(execResult.stderr))) {
113 | // the server returned an empty stdout and it means that the resource doen't exist
114 | return { valid: true, resultKind: "condition-ok" };
115 | }
116 | return { valid: false, resultKind: "verification-in-progress" };
117 | }
118 |
119 | static getTimedOutFromInput(inputTimedOut: string): number {
120 | if (!inputTimedOut) {
121 | return this.defaultTimedOut;
122 | }
123 | // eslint-disable-next-line no-restricted-globals
124 | if (isNaN(+inputTimedOut)) {
125 | return undefined;
126 | }
127 | return +inputTimedOut;
128 | }
129 |
130 | static isNotFoundError(error: string): boolean {
131 | return error && error.replace('\n', '').endsWith('not found');
132 | }
133 |
134 | static isCommandErrored(execResult: IExecSyncResult): boolean {
135 | return !execResult || !!execResult.error || (execResult.stderr && !ConditionHandler.isNotFoundError(execResult.stderr));
136 | }
137 |
138 | }
--------------------------------------------------------------------------------
/src/oc-conditional-exec-task.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { RunnerHandler } from './oc-exec';
6 | import { InstallHandler } from './oc-install';
7 | import * as auth from './oc-auth';
8 | import { BinaryVersion, convertStringToBinaryVersion, FindBinaryStatus, getReason, ConditionStatus, isFailed, isTimedOut, getAgentOsName } from './utils/exec_helper';
9 | import { ConditionHandler } from './oc-condition';
10 |
11 | import task = require('azure-pipelines-task-lib/task');
12 |
13 | async function run(): Promise {
14 | const version = task.getInput('version');
15 | const argLine = task.getInput('cmd');
16 |
17 | const condition = task.getInput('condition');
18 | const resource = task.getInput('resource');
19 | const timedout = task.getInput('timedout');
20 | const noTimedOutError = task.getBoolInput('noTimedOutError');
21 |
22 | const ignoreFlag: boolean = task.getBoolInput('ignoreFlag');
23 | const useLocalOc: boolean = task.getBoolInput('useLocalOc');
24 | const proxy: string = task.getInput('proxy');
25 | const agentOS = getAgentOsName(task.getPlatform());;
26 |
27 | const binaryVersion: BinaryVersion = convertStringToBinaryVersion(version);
28 | const ocBinary: FindBinaryStatus = await InstallHandler.installOc(binaryVersion, agentOS, useLocalOc, proxy);
29 | if (ocBinary.found === false) {
30 | return Promise.reject(new Error(getReason(ocBinary)));
31 | }
32 |
33 | await auth.createKubeConfig(ocBinary.path, agentOS);
34 |
35 | const conditionStatus: ConditionStatus = await ConditionHandler.isConditionValid(ocBinary.path, condition, resource, timedout, noTimedOutError);
36 | if (isFailed(conditionStatus) || isTimedOut(conditionStatus)) {
37 | return Promise.reject(new Error(conditionStatus.reason));
38 | }
39 | await RunnerHandler.execOc(ocBinary.path, argLine, ignoreFlag);
40 | }
41 |
42 | run()
43 | .then(() => {
44 | task.setResult(
45 | task.TaskResult.Succeeded,
46 | 'oc command successfully executed.'
47 | );
48 | })
49 | .catch((err: Error) => {
50 | task.setResult(task.TaskResult.Failed, err.message);
51 | });
52 |
--------------------------------------------------------------------------------
/src/oc-exec-task.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { RunnerHandler } from './oc-exec';
6 | import { InstallHandler } from './oc-install';
7 | import * as auth from './oc-auth';
8 | import { BinaryVersion, convertStringToBinaryVersion, FindBinaryStatus, getAgentOsName, getReason } from './utils/exec_helper';
9 |
10 | import task = require('azure-pipelines-task-lib/task');
11 |
12 | async function run(): Promise {
13 | const version = task.getInput('version');
14 | const argLine = task.getInput('cmd');
15 | const ignoreFlag: boolean = task.getBoolInput('ignoreFlag');
16 | const useLocalOc: boolean = task.getBoolInput('useLocalOc');
17 | const proxy: string = task.getInput('proxy');
18 | const agentOS = getAgentOsName(task.getPlatform());;
19 |
20 | const binaryVersion: BinaryVersion = convertStringToBinaryVersion(version);
21 | const ocBinary: FindBinaryStatus = await InstallHandler.installOc(binaryVersion, agentOS, useLocalOc, proxy);
22 | if (ocBinary.found === false) {
23 | return Promise.reject(new Error(getReason(ocBinary)));
24 | }
25 |
26 | await auth.createKubeConfig(ocBinary.path, agentOS);
27 | await RunnerHandler.execOc(ocBinary.path, argLine, ignoreFlag);
28 | }
29 |
30 | run()
31 | .then(() => {
32 | task.setResult(
33 | task.TaskResult.Succeeded,
34 | 'oc command successfully executed.'
35 | );
36 | })
37 | .catch((err: Error) => {
38 | task.setResult(task.TaskResult.Failed, err.message);
39 | });
40 |
--------------------------------------------------------------------------------
/src/oc-exec.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import {
6 | ToolRunner,
7 | IExecOptions,
8 | IExecSyncResult
9 | } from 'azure-pipelines-task-lib/toolrunner';
10 | import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
11 | import * as fs from 'fs';
12 |
13 | import split = require('argv-split');
14 | import tl = require('azure-pipelines-task-lib/task');
15 | import stream = require('stream');
16 | import sub = require('substituter');
17 |
18 | export class RunnerHandler {
19 | /**
20 | * Prepares oc for execution and runs the specified command.
21 | *
22 | * @param ocPath absolute path to the oc binary. If null is passed the binary is determined by running 'which oc'.
23 | * @param argLine the command to run
24 | */
25 | static async execOc(path: string | null, argLine: string, ignoreFlag?: boolean): Promise {
26 | const ocPath = path === null ? 'oc' : path;
27 | const options: IExecOptions | undefined = RunnerHandler.createExecOptions(
28 | undefined,
29 | ignoreFlag
30 | );
31 |
32 | // substitute internal commands
33 | argLine = RunnerHandler.interpolateCommands(ocPath, argLine);
34 | if (!argLine) {
35 | return Promise.reject(new Error(`Failed to interpolate internal commands in ${argLine}`));
36 | }
37 |
38 | // split cmd based on redirection operators
39 | const cmds: string[] = argLine.split(/(?=2(?=>))|(?=[>|])/);
40 | const trs: ToolRunner[] = RunnerHandler.initToolRunners(cmds, ocPath);
41 | if (trs.length === 0) {
42 | return Promise.reject(new Error(`Unable to create any ToolRunner by ${argLine}`));
43 | }
44 | const tr: ToolRunner = RunnerHandler.unifyToolRunners(cmds, trs, options);
45 | await tr.exec(options);
46 |
47 | }
48 |
49 | /**
50 | * Creating a resulting toolrunner to execute user command
51 | *
52 | * @param cmds list of commands
53 | * @param trs list of toolrunners
54 | */
55 | static unifyToolRunners(cmds: string[], trs: ToolRunner[], options?: IExecOptions): ToolRunner {
56 | let i = 0;
57 | let trResult: ToolRunner = trs[i];
58 | while (++i < cmds.length) {
59 | const fstCmd: string = cmds[i - 1];
60 | const sndCmd: string = cmds[i];
61 | if (!fstCmd.startsWith('|') && sndCmd.startsWith('|')) {
62 | trResult = RunnerHandler.buildPipeToolRunner(cmds, trs, i);
63 | } else if (sndCmd.startsWith('>') && sndCmd.trim().length > 1) {
64 | const event =
65 | fstCmd.startsWith('2')
66 | ? (RunnerHandler.createExecOptions(options, undefined, true),
67 | 'stderr')
68 | : 'stdout';
69 | trResult.on(
70 | event,
71 | RunnerHandler.writeAfterCommandExecution(sndCmd, fstCmd.startsWith('>'))
72 | );
73 | }
74 | }
75 |
76 | return trResult;
77 | }
78 |
79 | static createExecOptions(options?: IExecOptions, ignoreReturnCode?: boolean, failOnStdErr?: boolean, isSilent?: boolean): IExecOptions {
80 | if (ignoreReturnCode === undefined && failOnStdErr === undefined && isSilent === undefined) {
81 | return options;
82 | }
83 |
84 | if (!options) {
85 | options = {
86 | cwd: process.cwd(),
87 | env: ({ ...process.env}) as { [key: string]: string },
88 | silent: isSilent !== undefined ? isSilent : false,
89 | failOnStdErr: failOnStdErr !== undefined ? failOnStdErr : false,
90 | ignoreReturnCode:
91 | ignoreReturnCode !== undefined ? ignoreReturnCode : false,
92 | windowsVerbatimArguments: false,
93 | outStream: process.stdout as stream.Writable,
94 | errStream: process.stderr as stream.Writable
95 | };
96 | } else {
97 | options.silent = isSilent !== undefined ? isSilent : false;
98 | options.ignoreReturnCode =
99 | ignoreReturnCode !== undefined
100 | ? ignoreReturnCode
101 | : options.ignoreReturnCode;
102 | options.failOnStdErr =
103 | failOnStdErr !== undefined ? failOnStdErr : options.failOnStdErr;
104 | }
105 | return options;
106 | }
107 |
108 | static buildPipeToolRunner(cmds: string[], trs: ToolRunner[], index: number): ToolRunner {
109 | const nextPipes: number[] = RunnerHandler._getNextPipes(cmds, index);
110 | let trPipeResult: ToolRunner = trs[nextPipes[nextPipes.length - 1]];
111 | for (let c = nextPipes.length - 2; c >= 0; c--) {
112 | trPipeResult = trs[nextPipes[c]].pipeExecOutputToTool(trPipeResult);
113 | }
114 | return trs[index - 1].pipeExecOutputToTool(trPipeResult);
115 | }
116 |
117 | static writeAfterCommandExecution(cmd: string, append: boolean): (data: any) => void {
118 | const writeAfterCommandsExecution = data => {
119 | const path = cmd.substring(1).trim();
120 | if (append) {
121 | fs.appendFile(path, data, err => {
122 | if (err) throw err;
123 | console.log(`The file ${path} has been saved!`);
124 | });
125 | } else {
126 | fs.writeFile(path, data, err => {
127 | if (err) throw err;
128 | console.log(`The file ${path} has been saved!`);
129 | append = true;
130 | });
131 | }
132 | };
133 | return writeAfterCommandsExecution;
134 | }
135 |
136 | static _getNextPipes(cmds: string[], index: number): number[] {
137 | const cmdsWithPipe: number[] = [];
138 | for (let i = index; i < cmds.length; i++) {
139 | if (!cmds[i].startsWith('|')) {
140 | break;
141 | }
142 | cmdsWithPipe.push(i);
143 | }
144 | return cmdsWithPipe;
145 | }
146 |
147 | /**
148 | * Splits the specified argument line into tokens and interpolates potential environment variables.
149 | *
150 | * @param argLine The command line arguments as single string
151 | * @param removeOc Flag to check if oc command is present in args and remove it
152 | * @return array of arguments with potential environment variables interpolated
153 | */
154 | static prepareCmdArguments(argLine: string, removeOc?: boolean): string[] {
155 | const interpolatedArgs = sub(argLine, process.env);
156 | let args = split(interpolatedArgs);
157 | if (removeOc && (args[0] === 'oc' || args[0] === 'oc.exe')) {
158 | args = args.slice(1);
159 | }
160 | return args;
161 | }
162 |
163 | /**
164 | * Build up a toolrunner based on the command to be executed
165 | *
166 | * @param cmd command to be executed
167 | * @param ocPath path oc cli tool
168 | */
169 | static prepareToolRunner(cmd: string, ocPath: string): ToolRunner {
170 | // first element in each command, without considering redirection operator, has to be the tool needed to execute it (e.g. oc, grep, findstr, ...)
171 | let tr: ToolRunner;
172 | if (cmd.startsWith('>') || cmd.startsWith('2')) {
173 | return tr;
174 | }
175 |
176 | cmd = cmd.startsWith('|') ? cmd.substring(1).trim() : cmd.trim();
177 | const arg = RunnerHandler.prepareCmdArguments(cmd);
178 | // add tool to exec
179 | if (arg[0] === 'oc' || arg[0] === 'oc.exe') {
180 | tr = tl.tool(ocPath);
181 | } else {
182 | // if user wants to use a different tool (e.g grep) to work with previous oc command output
183 | tr = tl.tool(tl.which(arg[0], true));
184 | }
185 | // add args to toolrunner
186 | for (const argN of arg.slice(1)) {
187 | tr.arg(argN);
188 | }
189 | return tr;
190 | }
191 |
192 | /**
193 | * Initialize all toolrunners for the list of commands specified
194 | *
195 | * @param cmds list of commands to be executed
196 | * @param ocPath path oc cli tool
197 | */
198 | static initToolRunners(cmds: string[], ocPath: string): ToolRunner[] {
199 | if (!cmds[0]) {
200 | return [];
201 | }
202 | // first cmd in list has to be oc cmd and user can omit "oc"
203 | if (!cmds[0].startsWith('oc')) {
204 | cmds[0] = `oc ${cmds[0]}`;
205 | }
206 |
207 | const trs: ToolRunner[] = [];
208 | // loop through concatenated commands
209 | for (const cmd of cmds) {
210 | trs.push(RunnerHandler.prepareToolRunner(cmd, ocPath));
211 | }
212 | return trs;
213 | }
214 |
215 | /**
216 | * Discover if args contains any internal commands (a command inside a command, e.g oc log $(oc get pods -l name=test)) and replaces
217 | * their values with their results by executing them one by one.
218 | *
219 | * @param ocPath absolute path to the oc binary. If null is passed the binary is determined by running 'which oc'.
220 | * @param argLine the command to run
221 | */
222 | static interpolateCommands(ocPath: string, argLine: string): string {
223 | // check if there are internal commands to be sustituted with their result
224 | const cmdsToSubstitute: string[] = RunnerHandler.matchInternalCommands(argLine);
225 | if (cmdsToSubstitute.length === 0) {
226 | return argLine;
227 | }
228 |
229 | for (const cmd of cmdsToSubstitute) {
230 | const execCmdResult = RunnerHandler.execOcSync(ocPath, cmd);
231 | if (execCmdResult && execCmdResult.stdout) {
232 | argLine = argLine.replace(`$(${cmd})`, execCmdResult.stdout);
233 | } else {
234 | return undefined;
235 | }
236 | }
237 |
238 | return argLine;
239 | }
240 |
241 | /**
242 | * Manual match to avoid using lookbehind regex rule which is only supported from ecma2018+ and it will result in failures with older nodejs versions
243 | * More info https://node.green/
244 | *
245 | * @param argLine command to run
246 | */
247 | static matchInternalCommands(argLine: string): string[] {
248 | const internals: string[] = [];
249 | let currIndex = 0;
250 | while (currIndex !== -1) {
251 | const startIndex = argLine.indexOf('$(', currIndex);
252 | let endIndex = -1;
253 | if (startIndex !== -1) {
254 | endIndex = argLine.indexOf(')', startIndex);
255 | internals.push(argLine.substring(startIndex + 2, endIndex));
256 | }
257 | currIndex = endIndex;
258 | }
259 | return internals;
260 | }
261 |
262 | static execOcSync(
263 | ocPath: string | null,
264 | argLine: string,
265 | isSilent?: boolean
266 | ): IExecSyncResult {
267 | if (ocPath === null) {
268 | ocPath = 'oc';
269 | }
270 |
271 | const options: IExecOptions | undefined = RunnerHandler.createExecOptions(undefined, false, false, isSilent);
272 | const oc: ToolRunner = tl.tool(ocPath);
273 | for (const arg of RunnerHandler.prepareCmdArguments(argLine, true)) {
274 | oc.arg(arg);
275 | }
276 |
277 | let execResult: IExecSyncResult;
278 |
279 | try {
280 | execResult = oc.execSync(options);
281 | tl.debug(`stdout ${execResult && execResult.stdout ? execResult.stdout : ''}`);
282 | tl.debug(`stderr ${execResult && execResult.stderr ? execResult.stderr : ''}`);
283 | } catch (err) {
284 | execResult = {
285 | code: 1,
286 | stderr: '',
287 | stdout: '',
288 | error: new Error(`Failed when executing ${argLine}. Error: ${err}`)
289 | };
290 | tl.debug(`error ex ${err}`);
291 | }
292 |
293 | return execResult;
294 | }
295 |
296 | static spawnChild(ocPath: string, argLine: string): ChildProcessWithoutNullStreams {
297 | const args: string[] = RunnerHandler.prepareCmdArguments(argLine, true);
298 | return spawn(ocPath, args);
299 | }
300 | }
301 |
--------------------------------------------------------------------------------
/src/oc-install.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import * as fs from 'fs';
6 | import { ToolRunner, IExecSyncResult } from 'azure-pipelines-task-lib/toolrunner';
7 | import * as toolLib from 'azure-pipelines-tool-lib/tool';
8 | import * as semver from 'semver';
9 | import { RunnerHandler } from './oc-exec';
10 | import { LINUXV3, MACOSXV3, WINV3, LINUXV4, MACOSXV4, WINV4, OC_TAR_GZ, OC_ZIP, LATEST, ZIP, TAR_GZ } from './constants';
11 | import { unzipArchive } from './utils/zip_helper';
12 | import { BinaryVersion, FindBinaryStatus } from './utils/exec_helper';
13 |
14 | import tl = require('azure-pipelines-task-lib/task');
15 | import path = require('path');
16 | import fetch = require('node-fetch');
17 |
18 | export class InstallHandler {
19 | /**
20 | * Downloads the requested oc CLI and returns the full path to the executable.
21 | *
22 | * @param versionToUse the version of `oc` to install.
23 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'. See https://nodejs.org/api/os.html#os_os_type
24 | * @param useLocalOc to use the current oc cli already installed in machine
25 | * @param proxy proxy to use to download oc
26 | * @return the full path to the installed executable or null if the install failed.
27 | */
28 | static async installOc(versionToUse: BinaryVersion, osType: string, useLocalOc: boolean, proxy: string): Promise {
29 | if (useLocalOc) {
30 | const localOcBinary: FindBinaryStatus = InstallHandler.getLocalOcBinary(versionToUse);
31 | if (localOcBinary.found) {
32 | return localOcBinary;
33 | }
34 | }
35 |
36 | if (!versionToUse.valid) {
37 | versionToUse = InstallHandler.latestStable(osType);
38 | if (versionToUse.valid === false) {
39 | return { found: false, reason: versionToUse.reason };
40 | }
41 | }
42 |
43 | // check if oc version requested exists in cache
44 | let versionToCache: string;
45 | if (versionToUse.type === 'number') {
46 | versionToCache = InstallHandler.versionToCache(versionToUse.value);
47 | const toolCached: FindBinaryStatus = InstallHandler.versionInCache(versionToCache, osType);
48 | if (toolCached.found) {
49 | return toolCached;
50 | }
51 | }
52 |
53 | tl.debug('creating download directory');
54 | const downloadDir = `${process.env.SYSTEM_DEFAULTWORKINGDIRECTORY}/.download`;
55 | if (!fs.existsSync(downloadDir)) {
56 | tl.mkdirP(downloadDir);
57 | }
58 |
59 | const url: string = await InstallHandler.getOcURLToDownload(versionToUse, osType);
60 | if (!url) {
61 | return { found: false, reason: 'Unable to determine URL where to download oc executable.' };
62 | }
63 |
64 | tl.debug(`downloading: ${url}`);
65 | const ocBinary: FindBinaryStatus = await InstallHandler.downloadAndExtract(url, downloadDir, osType, versionToCache, proxy);
66 | if (ocBinary.found === false) {
67 | return { found: false, reason: ocBinary.reason};
68 | }
69 |
70 | return ocBinary;
71 | }
72 |
73 | /**
74 | * Determines the latest stable version of the OpenShift CLI on mirror.openshift.
75 | *
76 | * @return the version of the latest OpenShift CLI on mirror.openshift.
77 | */
78 | static latestStable(osType: string): BinaryVersion {
79 | tl.debug('determining latest oc version');
80 |
81 | const bundle = InstallHandler.getOcBundleByOSAndVersion(osType, 4);
82 | if (!bundle) {
83 | return { valid: false, reason: 'Unable to find Oc bundle url. OS Agent is not supported at this moment.' };
84 | }
85 | const ocUtils = InstallHandler.getOcUtils();
86 | const url = `${ocUtils.openshiftV4BaseUrl}/${LATEST}/${bundle}`;
87 | tl.debug(`latest stable oc version: ${url}`);
88 |
89 | return { valid: true, type: 'url', value: url };
90 | }
91 |
92 | /**
93 | * Returns the download URL for the oc CLI for a given version v(major).(minor).(patch) (e.g v3.11.0).
94 | * The binary type is determined by the agent's operating system.
95 | *
96 | * @param {string} version Oc version.
97 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
98 | * @returns {Promise} Promise string representing the URL to the tarball. undefined is returned
99 | * if no matching URL can be determined for the given tag.
100 | */
101 | static ocBundleURL(version: string, osType: string, latest?: boolean): string {
102 | tl.debug(`determining tarball URL for version ${version}`);
103 |
104 | if (!version) {
105 | return undefined;
106 | }
107 |
108 | // remove char v if present to ensure old pipelines keep working when the extension will be updated
109 | if (version.startsWith('v')) {
110 | version = version.substr(1);
111 | }
112 |
113 | let url = '';
114 | // determine the base_url based on version
115 | let reg = new RegExp('\\d+(?=\\.)');
116 | const vMajorRegEx: RegExpExecArray = reg.exec(version);
117 | if (!vMajorRegEx || vMajorRegEx.length === 0) {
118 | tl.debug('Error retrieving version major');
119 | return undefined;
120 | }
121 | const vMajor: number = +vMajorRegEx[0];
122 | const ocUtils = InstallHandler.getOcUtils();
123 |
124 | // if we need the latest correct release of this oc version we need to retrieve the (major).(minor) of the version
125 | if (latest) {
126 | reg = new RegExp('\\d+\\.\\d+(?=\\.)*');
127 | const versionRegEx: RegExpExecArray = reg.exec(version);
128 | if (!versionRegEx || versionRegEx.length === 0) {
129 | tl.debug(
130 | 'Error retrieving version release - unable to find latest version'
131 | );
132 | return undefined;
133 | }
134 | const baseVersion: string = versionRegEx[0]; // e.g 3.11
135 | if (!ocUtils[`oc${baseVersion}`]) {
136 | tl.debug(`Error retrieving latest patch for oc version ${baseVersion}`);
137 | return undefined;
138 | }
139 | version = ocUtils[`oc${baseVersion}`];
140 | }
141 |
142 | if (vMajor === 3) {
143 | url = `${ocUtils.openshiftV3BaseUrl}/${version}/`;
144 | } else if (vMajor === 4) {
145 | url = `${ocUtils.openshiftV4BaseUrl}/${version}/`;
146 | } else {
147 | tl.debug('Invalid version');
148 | return undefined;
149 | }
150 |
151 | version = version.includes('stable') ? undefined : version;
152 | const bundle = InstallHandler.getOcBundleByOSAndVersion(osType, vMajor, version);
153 | if (!bundle) {
154 | tl.debug('Unable to find bundle url');
155 | return undefined;
156 | }
157 |
158 | url += bundle;
159 |
160 | tl.debug(`archive URL: ${url}`);
161 | return url;
162 | }
163 |
164 | static getOcBundleByOSAndVersion(osType: string, vMajor: number, version?: string): string {
165 | version = !version ? '' : `-${version}`;
166 | // determine the bundle path based on the OS type
167 | switch (`${osType}V${vMajor.toString()}`) {
168 | case 'LinuxV4': {
169 | return `${LINUXV4}${version}.${TAR_GZ}`;
170 | }
171 | case 'LinuxV3': {
172 | return `${LINUXV3}/${OC_TAR_GZ}`;
173 | }
174 | case 'DarwinV4': {
175 | return `${MACOSXV4}${version}.${TAR_GZ}`;
176 | }
177 | case 'DarwinV3': {
178 | return `${MACOSXV3}/${OC_TAR_GZ}`;
179 | }
180 | case 'Windows_NTV4': {
181 | return `${WINV4}${version}.${ZIP}`;
182 | }
183 | case 'Windows_NTV3': {
184 | return `${WINV3}/${OC_ZIP}`;
185 | }
186 | default: {
187 | return undefined;
188 | }
189 | }
190 | }
191 |
192 | /**
193 | * Downloads and extract the oc release archive.
194 | *
195 | * @param url the oc release download URL.
196 | * @param downloadDir the directory into which to extract the archive.
197 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
198 | * @param version version of oc cli to download
199 | * @param proxy proxy to use to download oc
200 | * It is the responsibility of the caller to ensure that the directory exist.
201 | */
202 | static async downloadAndExtract(url: string, downloadDir: string, osType: string, versionToCache: string, proxy: string): Promise {
203 | if (!url) {
204 | return { found: false, reason: 'URL where to download oc is not valid.' };
205 | }
206 |
207 | downloadDir = path.normalize(downloadDir);
208 | if (!tl.exist(downloadDir)) {
209 | return { found: false, reason: `Unable to extract Oc executable from archive. Directory ${downloadDir} does not exist.` };
210 | }
211 |
212 | const parts = url.split('/');
213 | const archive = parts[parts.length - 1];
214 | const archivePath = path.join(downloadDir, archive);
215 |
216 | if (!tl.exist(archivePath)) {
217 | const curl: ToolRunner = tl.tool('curl');
218 | curl
219 | .arg('-s')
220 | .argIf(!!proxy, ['-x', proxy])
221 | .arg('-L')
222 | .arg('-o')
223 | .arg(archivePath)
224 | .arg(url);
225 | await curl.exec();
226 | }
227 |
228 | tl.debug(`expanding ${archivePath} into ${downloadDir}`);
229 |
230 | let archiveType = path.extname(archive);
231 | const expandDir = archive.replace(archiveType, '');
232 | // handle tar.gz explicitly
233 | if (path.extname(expandDir) === '.tar') {
234 | archiveType = '.tar.gz';
235 | }
236 |
237 | await unzipArchive(archiveType, archivePath, downloadDir);
238 |
239 | let ocBinary = InstallHandler.ocBinaryByOS(osType);
240 |
241 | ocBinary = path.join(downloadDir, ocBinary);
242 | if (!tl.exist(ocBinary)) {
243 | return { found: false, reason: `Oc binary path ${ocBinary} doesn't exist.` };
244 | }
245 |
246 | fs.chmodSync(ocBinary, '0755');
247 | if (versionToCache) {
248 | await toolLib.cacheFile(ocBinary, InstallHandler.ocBinaryByOS(osType), 'oc', versionToCache);
249 | }
250 | return { found: true, path: ocBinary };
251 | }
252 |
253 | /**
254 | * Returns the url to download oc binary
255 | *
256 | * @param version the version to download
257 | * @param osType The OS of the agent. One of 'Linux', 'Darwin' or 'Windows_NT'.
258 | */
259 | static async getOcURLToDownload(version: BinaryVersion, osType: string): Promise {
260 | if (!version.valid) {
261 | return undefined;
262 | }
263 |
264 | if (version.valid && version.type === 'url') {
265 | return version.value;
266 | }
267 |
268 | let url = InstallHandler.ocBundleURL(version.value, osType, false);
269 | let findURLofLatest = !url;
270 | if (url) {
271 | // check if url is valid otherwise take the latest stable oc cli for this version
272 | const response = await fetch(url, { method: 'HEAD' });
273 | findURLofLatest = !response.ok;
274 | }
275 | if (findURLofLatest) {
276 | url = InstallHandler.ocBundleURL(version.value, osType, true);
277 | }
278 | return url;
279 | }
280 |
281 | /**
282 | * Adds oc to the PATH environment variable.
283 | *
284 | * @param ocPath the full path to the oc binary. Must be a non null.
285 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
286 | */
287 | static addOcToPath(ocPath: string, osType: string): void {
288 | if (ocPath === null || ocPath === '') {
289 | throw new Error('path cannot be null or empty');
290 | }
291 |
292 | if (osType === 'Windows_NT') {
293 | const dir = ocPath.substr(0, ocPath.lastIndexOf('\\'));
294 | tl.setVariable('PATH', `${dir };${ tl.getVariable('PATH')}`);
295 | } else {
296 | const dir = ocPath.substr(0, ocPath.lastIndexOf('/'));
297 | tl.setVariable('PATH', `${dir }:${ tl.getVariable('PATH')}`);
298 | }
299 | }
300 |
301 | /**
302 | * Retrieve the oc CLI installed in the machine.
303 | *
304 | * @param version the version of `oc` to be used. If not specified any `oc` version, if found, will be used.
305 | * @return the installed executable
306 | */
307 | static getLocalOcBinary(version: BinaryVersion): FindBinaryStatus {
308 | let ocBinaryStatus: FindBinaryStatus;
309 | let ocPath: string | undefined;
310 | try {
311 | ocPath = tl.which('oc', true);
312 | ocBinaryStatus = { found: true, path: ocPath };
313 | tl.debug(`ocPath ${ocPath}`);
314 | } catch (ex) {
315 | ocBinaryStatus = { found: false };
316 | tl.debug(`Oc has not been found on this machine. Err ${ ex}`);
317 | }
318 |
319 | if (version.valid && version.type === 'number' && ocPath) {
320 | const localOcVersion: BinaryVersion = InstallHandler.getOcVersion(ocPath);
321 | tl.debug(`localOcVersion ${localOcVersion} vs ${version.value}`);
322 | if (!localOcVersion.valid || localOcVersion.value.toLowerCase() !== version.value.toLowerCase()) {
323 | ocBinaryStatus = { found: false };
324 | }
325 | }
326 |
327 | return ocBinaryStatus;
328 | }
329 |
330 | /**
331 | * Retrieve the version of the oc CLI found in path.
332 | *
333 | * @param ocPath the path of `oc` to be used
334 | * @return the version of oc
335 | */
336 | static getOcVersion(ocPath: string): BinaryVersion {
337 | let result: IExecSyncResult | undefined = RunnerHandler.execOcSync(ocPath, 'version --short=true --client=true', true);
338 |
339 | if (!result || result.stderr) {
340 | tl.debug(`error ${result && result.stderr ? result.stderr : ''}`);
341 | // if oc version failed we're dealing with oc < 4.1
342 | result = RunnerHandler.execOcSync(ocPath, 'version', true);
343 | }
344 |
345 | if (!result || !result.stdout) {
346 | return { valid: false, reason: `An error occured when retrieving version of oc CLI in ${ocPath}` };
347 | }
348 |
349 | tl.debug(`stdout ${result.stdout}`);
350 | const regexVersion = new RegExp('v[0-9]+.[0-9]+.[0-9]+');
351 | const versionObj = regexVersion.exec(result.stdout);
352 |
353 | if (versionObj && versionObj.length > 0) {
354 | return { valid: true, type: 'number', value: versionObj[0]};
355 | }
356 |
357 | return { valid: false, reason: `The version of oc CLI in ${ocPath} is in an unknown format.`};
358 | }
359 |
360 | /**
361 | * Retrieve the version of oc CLI in cache
362 | *
363 | * @param versionToCache version to search in cache
364 | * @param osType the OS type. One of 'Linux', 'Darwin' or 'Windows_NT'.
365 | */
366 | static versionInCache(version: string, osType: string): FindBinaryStatus {
367 | let cachePath: string;
368 | if (version) {
369 | cachePath = toolLib.findLocalTool('oc', version);
370 | if (cachePath) {
371 | return { found: true, path: path.join(cachePath, InstallHandler.ocBinaryByOS(osType)) };
372 | }
373 | }
374 | return { found: false };
375 | }
376 |
377 | static getOcUtils(): { [key: string]: string } {
378 | const rawData = fs.readFileSync(path.resolve(__dirname || '', 'oc-utils.json'), 'utf-8');
379 | return JSON.parse(rawData.toString());
380 | }
381 |
382 | private static versionToCache(version: string): string {
383 | const sanitizedVersion: semver.SemVer = semver.coerce(version);
384 | if (!sanitizedVersion) return undefined;
385 | return sanitizedVersion.version;
386 | }
387 |
388 | private static ocBinaryByOS(osType: string): string {
389 | if (osType === 'Windows_NT') return 'oc.exe';
390 | return 'oc';
391 | }
392 | }
393 |
--------------------------------------------------------------------------------
/src/oc-setup-task.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { InstallHandler } from './oc-install';
6 | import * as auth from './oc-auth';
7 | import { BinaryVersion, convertStringToBinaryVersion, FindBinaryStatus, getAgentOsName, getReason } from './utils/exec_helper';
8 |
9 | import task = require('azure-pipelines-task-lib/task');
10 |
11 | async function run(): Promise {
12 | const version: string = task.getInput('version');
13 | const agentOS: string = getAgentOsName(task.getPlatform());
14 | const proxy: string = task.getInput('proxy');
15 |
16 | const binaryVersion: BinaryVersion = convertStringToBinaryVersion(version);
17 | const ocBinary: FindBinaryStatus = await InstallHandler.installOc(binaryVersion, agentOS, false, proxy);
18 | if (ocBinary.found === false) {
19 | return Promise.reject(new Error(getReason(ocBinary)));
20 | }
21 |
22 | InstallHandler.addOcToPath(ocBinary.path, agentOS);
23 | await auth.createKubeConfig(ocBinary.path, agentOS);
24 | }
25 |
26 | run()
27 | .then(() => {
28 | task.setResult(
29 | task.TaskResult.Succeeded,
30 | 'oc successfully installed and configured'
31 | );
32 | })
33 | .catch((err: Error) => {
34 | task.setResult(task.TaskResult.Failed, err.message);
35 | });
36 |
--------------------------------------------------------------------------------
/src/oc-utils.json:
--------------------------------------------------------------------------------
1 | {
2 | "openshiftV3BaseUrl": "https://mirror.openshift.com/pub/openshift-v3/clients",
3 | "openshiftV4BaseUrl": "https://mirror.openshift.com/pub/openshift-v4/clients/ocp",
4 | "oc3.3": "3.3.1.46.45",
5 | "oc3.4": "3.4.1.44.57",
6 | "oc3.5": "3.5.5",
7 | "oc3.6": "3.6",
8 | "oc3.7": "3.7.126",
9 | "oc3.8": "3.8.46",
10 | "oc3.9": "3.9.103",
11 | "oc3.10": "3.10.183",
12 | "oc3.11": "3.11.215",
13 | "oc4.1": "stable-4.1",
14 | "oc4.2": "stable-4.2",
15 | "oc4.3": "stable-4.3",
16 | "oc4.4": "stable-4.4"
17 | }
--------------------------------------------------------------------------------
/src/utils/exec_helper.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import validUrl = require('valid-url');
6 | import task = require('azure-pipelines-task-lib/task');
7 |
8 | interface ConditionValid {
9 | readonly valid: true;
10 | readonly resultKind: 'condition-ok';
11 | }
12 |
13 | interface ConditionSkipped {
14 | readonly valid: true;
15 | readonly resultKind: 'condition-skipped';
16 | }
17 |
18 | interface ConditionTimedOut {
19 | readonly valid: false,
20 | readonly resultKind: 'condition-timedout';
21 | readonly reason: string
22 | }
23 |
24 | interface ConditionFailed {
25 | readonly valid: false,
26 | readonly resultKind: 'condition-failed';
27 | readonly reason: string
28 | }
29 |
30 | interface ConditionVerificationInProgress {
31 | readonly valid: false;
32 | readonly resultKind: 'verification-in-progress';
33 | }
34 |
35 | export type ConditionStatus = ConditionValid | ConditionSkipped | ConditionTimedOut | ConditionFailed | ConditionVerificationInProgress;
36 |
37 | export function isFailed(execResult: ConditionStatus): execResult is ConditionFailed {
38 | return execResult.resultKind === 'condition-failed';
39 | }
40 |
41 | export function isTimedOut(execResult: ConditionStatus): execResult is ConditionTimedOut {
42 | return execResult.resultKind === 'condition-timedout';
43 | }
44 |
45 | interface BinaryVersionValid {
46 | readonly valid: true;
47 | readonly type: 'url' | 'number';
48 | readonly value: string;
49 | }
50 |
51 | interface BinaryVersionNotValid {
52 | readonly valid: false;
53 | readonly reason: string;
54 | }
55 |
56 | export type BinaryVersion = BinaryVersionValid | BinaryVersionNotValid;
57 |
58 | export interface BinaryFound {
59 | readonly found: true;
60 | readonly path: string;
61 | }
62 |
63 | export interface BinaryNotFound {
64 | readonly found: false;
65 | readonly reason?: string;
66 | }
67 |
68 | export type FindBinaryStatus = BinaryFound | BinaryNotFound;
69 |
70 | export function convertStringToBinaryVersion(version: string): BinaryVersion {
71 | if (!version) {
72 | return { valid: false, reason: 'User run extension without any version' };
73 | }
74 | if (validUrl.isWebUri(version)) {
75 | return { valid: true, type: 'url', value: version};
76 | }
77 | return { valid:true, type: 'number', value: version };
78 | }
79 |
80 | export function getReason(version: BinaryNotFound): string {
81 | return version.reason ? version.reason : 'error';
82 | }
83 |
84 | export function getAgentOsName(os: task.Platform): string {
85 | switch (os) {
86 | case task.Platform.Windows: return "Windows_NT";
87 | case task.Platform.Linux: return "Linux";
88 | case task.Platform.MacOS: return "Darwin";
89 | default: return "";
90 | }
91 | }
--------------------------------------------------------------------------------
/src/utils/zip_helper.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import decompress = require('decompress');
6 | import decompressTar = require('decompress-tar');
7 | import decompressTargz = require('decompress-targz');
8 | import Zip = require('adm-zip');
9 |
10 | export async function unzipArchive(
11 | archiveType: string,
12 | archivePath: string,
13 | downloadDir: string
14 | ): Promise {
15 | switch (archiveType) {
16 | case '.zip': {
17 | const zip = new Zip(archivePath);
18 | zip.extractAllTo(downloadDir);
19 | break;
20 | }
21 | case '.tar': {
22 | await decompress(archivePath, downloadDir, {
23 | filter: file => file.data.length > 0,
24 | plugins: [decompressTar()]
25 | });
26 | break;
27 | }
28 | case '.tgz':
29 | case '.tar.gz': {
30 | await decompress(archivePath, downloadDir, {
31 | filter: file => file.data.length > 0,
32 | plugins: [decompressTargz()]
33 | });
34 | break;
35 | }
36 | default: {
37 | return Promise.reject(new Error(`unknown archive format ${archivePath}`));
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/tasks/config-map/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/tasks/config-map/icon.png
--------------------------------------------------------------------------------
/tasks/config-map/task.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "69a85a38-f912-4fe6-98de-d97f8afa2f80",
3 | "name": "config-map",
4 | "friendlyName": "Update ConfigMap",
5 | "description": "Applies environment properties to specified OpenShift ConfigMap.",
6 | "author": "Red Hat",
7 | "helpMarkDown": "See [getting started](https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md)",
8 | "category": "Utility",
9 | "visibility": [
10 | "Build",
11 | "Release"
12 | ],
13 | "preview": true,
14 | "demands": [],
15 | "groups": [
16 | {
17 | "name": "openshiftCluster",
18 | "displayName": "OpenShift Cluster",
19 | "isExpanded": true
20 | },
21 | {
22 | "name": "commands",
23 | "displayName": "Commands",
24 | "isExpanded": true
25 | }
26 | ],
27 | "version": {
28 | "Major": 3,
29 | "Minor": 0,
30 | "Patch": 2
31 | },
32 | "minimumAgentVersion": "2.144.0",
33 | "instanceNameFormat": "config-map $(message)",
34 | "inputs": [
35 | {
36 | "name": "connectionType",
37 | "type": "pickList",
38 | "label": "Service connection type",
39 | "defaultValue": "OpenShift Connection Service",
40 | "required": true,
41 | "options": {
42 | "OpenShift Connection Service": "OpenShift Connection Service",
43 | "Runtime Configuration": "Set Up Configuration on Runtime"
44 | },
45 | "groupName": "openshiftCluster",
46 | "helpMarkDown": "Select a service connection type."
47 | },
48 | {
49 | "name": "openshiftService",
50 | "type": "connectedService:openshift",
51 | "label": "OpenShift service connection",
52 | "groupName": "openshiftCluster",
53 | "helpMarkDown": "Select OpenShift service connection to use.",
54 | "required": true,
55 | "visibleRule": "connectionType = OpenShift Connection Service"
56 | },
57 | {
58 | "name": "configurationType",
59 | "type": "radio",
60 | "label": "Configuration type",
61 | "defaultValue": "file",
62 | "groupName": "openshiftCluster",
63 | "helpMarkDown": "Type of configuration for oc command. It can be a file path or an inline script.",
64 | "options": {
65 | "file": "File Path",
66 | "inline": "Inline Configuration"
67 | },
68 | "visibleRule": "connectionType = Runtime Configuration"
69 | },
70 | {
71 | "name": "configurationPath",
72 | "type": "filePath",
73 | "label": "File Path",
74 | "defaultValue": "",
75 | "required": true,
76 | "helpMarkDown": "Filename, directory, or URL to the kubeconfig file that will be used with the commands.",
77 | "groupName": "openshiftCluster",
78 | "visibleRule": "configurationType = file"
79 | },
80 | {
81 | "name": "inlineConfig",
82 | "type": "multiLine",
83 | "properties": {
84 | "resizable": "true",
85 | "rows": "10",
86 | "maxLength": "5000"
87 | },
88 | "required": true,
89 | "defaultValue": "",
90 | "label": "Inline configuration",
91 | "helpMarkDown": "Inline kubeconfig for oc command",
92 | "groupName": "openshiftCluster",
93 | "visibleRule": "configurationType = inline"
94 | },
95 | {
96 | "name": "version",
97 | "type": "string",
98 | "label": "Version of oc",
99 | "defaultValue": "",
100 | "required": false,
101 | "groupName": "commands",
102 | "helpMarkDown": "Select the oc version to use e.g. 'v3.10.0' (leave blank for latest). You can also specify a direct URL to a oc release bundle."
103 | },
104 | {
105 | "name": "configMapName",
106 | "type": "string",
107 | "label": "Name of the ConfigMap",
108 | "defaultValue": "",
109 | "required": true,
110 | "groupName": "commands",
111 | "helpMarkDown": "Name of the ConfigMap to which to apply the properties."
112 | },
113 | {
114 | "name": "namespace",
115 | "type": "string",
116 | "label": "Namespace of ConfigMap",
117 | "defaultValue": "",
118 | "required": false,
119 | "groupName": "commands",
120 | "helpMarkDown": "Specify the namespace of the ConfigName. If none is specified the current namespace is used."
121 | },
122 | {
123 | "name": "properties",
124 | "type": "multiLine",
125 | "label": "ConfigMap properties",
126 | "defaultValue": "",
127 | "required": false,
128 | "helpMarkDown": "To view the ConfigMap parameters in a grid, click on “…” next to the textbox.",
129 | "groupName": "commands",
130 | "properties": {
131 | "editorExtension": "ms.vss-services-azure.parameters-grid"
132 | }
133 | },
134 | {
135 | "name": "uselocalOc",
136 | "type": "boolean",
137 | "label": "Use local oc executable",
138 | "defaultValue": "false",
139 | "required": false,
140 | "groupName": "commands",
141 | "helpMarkDown": "Check if oc is already installed in the machine and use that if available"
142 | },
143 | {
144 | "name": "proxy",
145 | "type": "string",
146 | "label": "Use proxy to download oc executable",
147 | "defaultValue": "",
148 | "required": false,
149 | "groupName": "commands",
150 | "helpMarkDown": "make use of a proxy to download oc executable"
151 | }
152 | ],
153 | "execution": {
154 | "Node10": {
155 | "target": "lib/config-map-task.js",
156 | "workingDirectory": "$(currentDirectory)"
157 | }
158 | }
159 | }
160 |
--------------------------------------------------------------------------------
/tasks/oc-cmd/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/tasks/oc-cmd/icon.png
--------------------------------------------------------------------------------
/tasks/oc-cmd/task.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "ebaf1cdc-a60f-4e57-ba29-e3b8f80aeb1e",
3 | "name": "oc-cmd",
4 | "friendlyName": "Execute oc command",
5 | "description": "OpenShift execute CLI command.",
6 | "author": "Red Hat",
7 | "helpMarkDown": "See [getting started](https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md)",
8 | "category": "Utility",
9 | "visibility": [
10 | "Build",
11 | "Release"
12 | ],
13 | "preview": true,
14 | "demands": [],
15 | "groups": [{
16 | "name": "openshiftCluster",
17 | "displayName": "OpenShift Cluster",
18 | "isExpanded": true
19 | },
20 | {
21 | "name": "commands",
22 | "displayName": "Commands",
23 | "isExpanded": true
24 | }
25 | ],
26 | "version": {
27 | "Major": 3,
28 | "Minor": 0,
29 | "Patch": 2
30 | },
31 | "minimumAgentVersion": "2.144.0",
32 | "instanceNameFormat": "oc-cmd $(message)",
33 | "inputs": [
34 | {
35 | "name": "connectionType",
36 | "type": "pickList",
37 | "label": "Service connection type",
38 | "defaultValue": "OpenShift Connection Service",
39 | "required": true,
40 | "options": {
41 | "OpenShift Connection Service": "OpenShift Connection Service",
42 | "Runtime Configuration": "Set Up Configuration on Runtime"
43 | },
44 | "groupName": "openshiftCluster",
45 | "helpMarkDown": "Select a service connection type."
46 | },
47 | {
48 | "name": "openshiftService",
49 | "type": "connectedService:openshift",
50 | "label": "OpenShift service connection",
51 | "groupName": "openshiftCluster",
52 | "helpMarkDown": "Select OpenShift service connection to use.",
53 | "required": true,
54 | "visibleRule": "connectionType = OpenShift Connection Service"
55 | },
56 | {
57 | "name": "configurationType",
58 | "type": "radio",
59 | "label": "Configuration type",
60 | "defaultValue": "file",
61 | "groupName": "openshiftCluster",
62 | "helpMarkDown": "Type of configuration for oc command. It can be a file path or an inline script.",
63 | "options": {
64 | "file": "File Path",
65 | "inline": "Inline Configuration"
66 | },
67 | "visibleRule": "connectionType = Runtime Configuration"
68 | },
69 | {
70 | "name": "configurationPath",
71 | "type": "filePath",
72 | "label": "File Path",
73 | "defaultValue": "",
74 | "required": true,
75 | "helpMarkDown": "Filename, directory, or URL to the kubeconfig file that will be used with the commands.",
76 | "groupName": "openshiftCluster",
77 | "visibleRule": "configurationType = file"
78 | },
79 | {
80 | "name": "inlineConfig",
81 | "type": "multiLine",
82 | "properties": {
83 | "resizable": "true",
84 | "rows": "10",
85 | "maxLength": "5000"
86 | },
87 | "required": true,
88 | "defaultValue": "",
89 | "label": "Inline configuration",
90 | "helpMarkDown": "Inline kubeconfig for oc command",
91 | "groupName": "openshiftCluster",
92 | "visibleRule": "configurationType = inline"
93 | },
94 | {
95 | "name": "version",
96 | "type": "string",
97 | "label": "Version of oc to use",
98 | "defaultValue": "",
99 | "required": false,
100 | "groupName": "commands",
101 | "helpMarkDown": "Select the oc version to use e.g. 'v3.10.0' (leave blank for latest). You can also specify a direct URL to a oc release bundle."
102 | },
103 | {
104 | "name": "cmd",
105 | "type": "string",
106 | "label": "Command to run",
107 | "defaultValue": "",
108 | "required": true,
109 | "groupName": "commands",
110 | "helpMarkDown": "Specify the oc command to run."
111 | },
112 | {
113 | "name": "ignoreFlag",
114 | "type": "boolean",
115 | "label": "Ignore non success return value",
116 | "defaultValue": "false",
117 | "required": false,
118 | "groupName": "commands",
119 | "helpMarkDown": "Specify if the non success return value of the oc command run has to be ignored. E.g if the command oc create/delete/... fail because the resource has already been created/deleted/.. the pipeline will continue its execution"
120 | },
121 | {
122 | "name": "uselocalOc",
123 | "type": "boolean",
124 | "label": "Use local oc executable",
125 | "defaultValue": "false",
126 | "required": false,
127 | "groupName": "commands",
128 | "helpMarkDown": "Check if oc is already installed in the machine and use that if available"
129 | },
130 | {
131 | "name": "proxy",
132 | "type": "string",
133 | "label": "Use proxy to download oc executable",
134 | "defaultValue": "",
135 | "required": false,
136 | "groupName": "commands",
137 | "helpMarkDown": "make use of a proxy to download oc executable"
138 | }
139 | ],
140 | "execution": {
141 | "Node10": {
142 | "target": "lib/oc-exec-task.js",
143 | "workingDirectory": "$(currentDirectory)"
144 | }
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/tasks/oc-conditional-cmd/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/tasks/oc-conditional-cmd/icon.png
--------------------------------------------------------------------------------
/tasks/oc-conditional-cmd/task.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "88e0bab6-9798-47cb-a37d-ef65f41992ae",
3 | "name": "OcConditionalCmd",
4 | "friendlyName": "Execute conditional oc command",
5 | "description": "OpenShift execute CLI command if the condition is met.",
6 | "author": "Red Hat",
7 | "helpMarkDown": "See [getting started](https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md)",
8 | "category": "Utility",
9 | "visibility": [
10 | "Build",
11 | "Release"
12 | ],
13 | "preview": true,
14 | "demands": [],
15 | "groups": [{
16 | "name": "openshiftCluster",
17 | "displayName": "OpenShift Cluster",
18 | "isExpanded": true
19 | },
20 | {
21 | "name": "commands",
22 | "displayName": "Commands",
23 | "isExpanded": true
24 | }
25 | ],
26 | "version": {
27 | "Major": 3,
28 | "Minor": 0,
29 | "Patch": 2
30 | },
31 | "minimumAgentVersion": "2.144.0",
32 | "instanceNameFormat": "Oc Conditional Cmd",
33 | "inputs": [
34 | {
35 | "name": "connectionType",
36 | "type": "pickList",
37 | "label": "Service connection type",
38 | "defaultValue": "OpenShift Connection Service",
39 | "required": true,
40 | "options": {
41 | "OpenShift Connection Service": "OpenShift Connection Service",
42 | "Runtime Configuration": "Set Up Configuration on Runtime"
43 | },
44 | "groupName": "openshiftCluster",
45 | "helpMarkDown": "Select a service connection type."
46 | },
47 | {
48 | "name": "openshiftService",
49 | "type": "connectedService:openshift",
50 | "label": "OpenShift service connection",
51 | "groupName": "openshiftCluster",
52 | "helpMarkDown": "Select OpenShift service connection to use.",
53 | "required": true,
54 | "visibleRule": "connectionType = OpenShift Connection Service"
55 | },
56 | {
57 | "name": "configurationType",
58 | "type": "radio",
59 | "label": "Configuration type",
60 | "defaultValue": "file",
61 | "groupName": "openshiftCluster",
62 | "helpMarkDown": "Type of configuration for oc command. It can be a file path or an inline script.",
63 | "options": {
64 | "file": "File Path",
65 | "inline": "Inline Configuration"
66 | },
67 | "visibleRule": "connectionType = Runtime Configuration"
68 | },
69 | {
70 | "name": "configurationPath",
71 | "type": "filePath",
72 | "label": "File Path",
73 | "defaultValue": "",
74 | "required": true,
75 | "helpMarkDown": "Filename, directory, or URL to the kubeconfig file that will be used with the commands.",
76 | "groupName": "openshiftCluster",
77 | "visibleRule": "configurationType = file"
78 | },
79 | {
80 | "name": "inlineConfig",
81 | "type": "multiLine",
82 | "properties": {
83 | "resizable": "true",
84 | "rows": "10",
85 | "maxLength": "5000"
86 | },
87 | "required": true,
88 | "defaultValue": "",
89 | "label": "Inline configuration",
90 | "helpMarkDown": "Inline kubeconfig for oc command",
91 | "groupName": "openshiftCluster",
92 | "visibleRule": "configurationType = inline"
93 | },
94 | {
95 | "name": "version",
96 | "type": "string",
97 | "label": "Version of oc to use",
98 | "defaultValue": "",
99 | "required": false,
100 | "groupName": "commands",
101 | "helpMarkDown": "Select the oc version to use e.g. 'v3.10.0' (leave blank for latest). You can also specify a direct URL to a oc release bundle."
102 | },
103 | {
104 | "name": "cmd",
105 | "type": "string",
106 | "label": "Command to run",
107 | "defaultValue": "",
108 | "required": true,
109 | "groupName": "commands",
110 | "helpMarkDown": "Specify the oc command to run."
111 | },
112 | {
113 | "name": "ignoreFlag",
114 | "type": "boolean",
115 | "label": "Ignore non success return value",
116 | "defaultValue": "false",
117 | "required": false,
118 | "groupName": "commands",
119 | "helpMarkDown": "Specify if the non success return value of the oc command run has to be ignored. E.g if the command oc create/delete/... fail because the resource has already been created/deleted/.. the pipeline will continue its execution"
120 | },
121 | {
122 | "name": "condition",
123 | "type": "pickList",
124 | "label": "Condition type",
125 | "defaultValue": "exists",
126 | "required": true,
127 | "options": {
128 | "exists": "Exists",
129 | "not_exists": "Not Exists"
130 | },
131 | "groupName": "commands",
132 | "helpMarkDown": "Select the condition to be verified on the resource."
133 | },
134 | {
135 | "name": "resource",
136 | "type": "string",
137 | "label": "Resource on which to verify the condition",
138 | "defaultValue": "",
139 | "required": true,
140 | "groupName": "commands",
141 | "helpMarkDown": "Specify the resource on which to verify the condition."
142 | },
143 | {
144 | "name": "timedout",
145 | "type": "string",
146 | "label": "Time (in milliseconds) after which to stop the execution",
147 | "defaultValue": "",
148 | "required": false,
149 | "groupName": "commands",
150 | "helpMarkDown": "Specify the time (in milliseconds) after which the extension should stop the execution."
151 | },
152 | {
153 | "name": "noTimedOutError",
154 | "type": "boolean",
155 | "label": "Skip timed out error",
156 | "defaultValue": "false",
157 | "required": false,
158 | "groupName": "commands",
159 | "helpMarkDown": "If checked there will be no timedout error. If the timedout elaspes the extension will try to execute the command anyway."
160 | },
161 | {
162 | "name": "uselocalOc",
163 | "type": "boolean",
164 | "label": "Use local oc executable",
165 | "defaultValue": "false",
166 | "required": false,
167 | "groupName": "commands",
168 | "helpMarkDown": "Check if oc is already installed in the machine and use that if available"
169 | },
170 | {
171 | "name": "proxy",
172 | "type": "string",
173 | "label": "Use proxy to download oc executable",
174 | "defaultValue": "",
175 | "required": false,
176 | "groupName": "commands",
177 | "helpMarkDown": "make use of a proxy to download oc executable"
178 | }
179 | ],
180 | "execution": {
181 | "Node10": {
182 | "target": "lib/oc-conditional-exec-task.js",
183 | "workingDirectory": "$(currentDirectory)"
184 | }
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/tasks/oc-setup/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/tasks/oc-setup/icon.png
--------------------------------------------------------------------------------
/tasks/oc-setup/task.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "44babac3-ab28-4f68-b843-bf2c295a4a2d",
3 | "name": "oc-setup",
4 | "friendlyName": "Install and setup oc",
5 | "description": "Installs oc and configures the PATH and KUBECONFIG environment variables.",
6 | "author": "Red Hat",
7 | "helpMarkDown": "See [getting started](https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md)",
8 | "category": "Utility",
9 | "visibility": [
10 | "Build",
11 | "Release"
12 | ],
13 | "preview": true,
14 | "demands": [],
15 | "groups": [
16 | {
17 | "name": "openshiftCluster",
18 | "displayName": "OpenShift Cluster",
19 | "isExpanded": true
20 | },
21 | {
22 | "name": "commands",
23 | "displayName": "Commands",
24 | "isExpanded": true
25 | }
26 | ],
27 | "version": {
28 | "Major": 3,
29 | "Minor": 0,
30 | "Patch": 2
31 | },
32 | "minimumAgentVersion": "2.144.0",
33 | "instanceNameFormat": "oc-setup $(message)",
34 | "inputs": [
35 | {
36 | "name": "connectionType",
37 | "type": "pickList",
38 | "label": "Service connection type",
39 | "defaultValue": "OpenShift Connection Service",
40 | "required": true,
41 | "options": {
42 | "OpenShift Connection Service": "OpenShift Connection Service",
43 | "Runtime Configuration": "Set Up Configuration on Runtime"
44 | },
45 | "groupName": "openshiftCluster",
46 | "helpMarkDown": "Select a service connection type."
47 | },
48 | {
49 | "name": "openshiftService",
50 | "type": "connectedService:openshift",
51 | "label": "OpenShift service connection",
52 | "groupName": "openshiftCluster",
53 | "helpMarkDown": "Select OpenShift service connection to use.",
54 | "required": true,
55 | "visibleRule": "connectionType = OpenShift Connection Service"
56 | },
57 | {
58 | "name": "configurationType",
59 | "type": "radio",
60 | "label": "Configuration type",
61 | "defaultValue": "file",
62 | "groupName": "openshiftCluster",
63 | "helpMarkDown": "Type of configuration for oc command. It can be a file path or an inline script.",
64 | "options": {
65 | "file": "File Path",
66 | "inline": "Inline Configuration"
67 | },
68 | "visibleRule": "connectionType = Runtime Configuration"
69 | },
70 | {
71 | "name": "configurationPath",
72 | "type": "filePath",
73 | "label": "File Path",
74 | "defaultValue": "",
75 | "required": true,
76 | "helpMarkDown": "Filename, directory, or URL to the kubeconfig file that will be used with the commands.",
77 | "groupName": "openshiftCluster",
78 | "visibleRule": "configurationType = file"
79 | },
80 | {
81 | "name": "inlineConfig",
82 | "type": "multiLine",
83 | "properties": {
84 | "resizable": "true",
85 | "rows": "10",
86 | "maxLength": "5000"
87 | },
88 | "required": true,
89 | "defaultValue": "",
90 | "label": "Inline configuration",
91 | "helpMarkDown": "Inline kubeconfig for oc command",
92 | "groupName": "openshiftCluster",
93 | "visibleRule": "configurationType = inline"
94 | },
95 | {
96 | "name": "version",
97 | "type": "string",
98 | "label": "Version of oc",
99 | "defaultValue": "",
100 | "required": false,
101 | "groupName": "commands",
102 | "helpMarkDown": "Select the oc version to use e.g. 'v3.10.0' (leave blank for latest). You can also specify a direct URL to a oc release bundle."
103 | },
104 | {
105 | "name": "proxy",
106 | "type": "string",
107 | "label": "Use proxy to download oc executable",
108 | "defaultValue": "",
109 | "required": false,
110 | "groupName": "commands",
111 | "helpMarkDown": "make use of a proxy to download oc executable"
112 | }
113 | ],
114 | "execution": {
115 | "Node10": {
116 | "target": "lib/oc-setup-task.js",
117 | "workingDirectory": "$(currentDirectory)"
118 | }
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/test/config-map.test.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { ConfigMap } from '../src/config-map';
6 |
7 | const {expect} = require('chai');
8 |
9 | describe('ConfigMap', () => {
10 | describe('#constructor', () => {
11 | it('creates named ConfigMap', () => {
12 | const configMap = new ConfigMap('foo', '');
13 | expect(configMap).to.be.instanceof(ConfigMap);
14 | expect(configMap.name).to.be.eq('foo');
15 | });
16 | });
17 |
18 | describe('#patchCmd', () => {
19 | process.env.MY_VAR = 'foo';
20 |
21 | after(() => {
22 | delete process.env.MY_VAR;
23 | });
24 |
25 | it('creates oc patch command from properties', () => {
26 | const configMap = new ConfigMap(
27 | 'foo',
28 | '-key1 value1 -key2 value2 -key3 value3'
29 | );
30 | expect(configMap.patchCmd('')).to.be.eq(
31 | 'patch configmap foo -p \'{"data":{"key1": "value1", "key2": "value2", "key3": "value3"}}\''
32 | );
33 | });
34 |
35 | it('creates oc patch command with namespace', () => {
36 | const configMap = new ConfigMap('foo', '-key1 value1');
37 | expect(configMap.patchCmd('my-space')).to.be.eq(
38 | 'patch configmap foo -p \'{"data":{"key1": "value1"}}\' -n my-space'
39 | );
40 | });
41 |
42 | it('interpolates environment variables', () => {
43 | const configMap = new ConfigMap('foo', '-key1 ${MY_VAR}');
44 | expect(configMap.patchCmd('my-space')).to.be.eq(
45 | 'patch configmap foo -p \'{"data":{"key1": "foo"}}\' -n my-space'
46 | );
47 | });
48 |
49 | it('no properties results in noop patch command', () => {
50 | const configMap = new ConfigMap('foo', '');
51 | expect(configMap.patchCmd('')).to.be.eq(
52 | 'patch configmap foo -p \'{"data":{}}\''
53 | );
54 | });
55 |
56 | it('removes quotes around properties values', () => {
57 | const configMap = new ConfigMap('foo', '-key "what now"');
58 | expect(configMap.patchCmd('')).to.be.eq(
59 | 'patch configmap foo -p \'{"data":{"key": "what now"}}\''
60 | );
61 | });
62 | });
63 | });
64 |
--------------------------------------------------------------------------------
/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-linux-64bit.tar.gz
--------------------------------------------------------------------------------
/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-mac.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-mac.zip
--------------------------------------------------------------------------------
/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-windows.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redhat-developer/openshift-vsts/d225d1257c978f1be02a55ccd153ca962ceafc53/test/fixtures/openshift-origin-client-tools-v3.11.0-0cbc58b-windows.zip
--------------------------------------------------------------------------------
/test/index.coverage.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the EPL v2.0 License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 |
6 | 'use strict';
7 |
8 | declare var global: any;
9 |
10 | /* tslint:disable no-require-imports */
11 |
12 | import * as fs from 'fs';
13 | import * as glob from 'glob';
14 | import * as paths from 'path';
15 |
16 | const istanbul = require('istanbul');
17 | const Mocha = require('mocha');
18 | const remapIstanbul = require('remap-istanbul');
19 |
20 | // Linux: prevent a weird NPE when mocha on Linux requires the window size from the TTY
21 | // Since we are not running in a tty environment, we just implementt he method statically
22 | const tty = require('tty');
23 | if (!tty.getWindowSize) {
24 | tty.getWindowSize = (): number[] => {
25 | return [80, 75];
26 | };
27 | }
28 | const config = {
29 | reporter: 'mocha-jenkins-reporter',
30 | ui: 'bdd',
31 | useColors: true,
32 | timeout: 15000
33 | };
34 |
35 | if (process.env.BUILD_ID && process.env.BUILD_NUMBER) {
36 | config.reporter = 'mocha-jenkins-reporter';
37 | }
38 |
39 | let mocha = new Mocha(config);
40 |
41 | function configure(mochaOpts: any): void {
42 | mocha = new Mocha(mochaOpts);
43 | }
44 | exports.configure = configure;
45 |
46 | function _mkDirIfExists(dir: string): void {
47 | if (!fs.existsSync(dir)) {
48 | fs.mkdirSync(dir);
49 | }
50 | }
51 |
52 | function _readCoverOptions(testsRoot: string): ITestRunnerOptions | undefined {
53 | const coverConfigPath = paths.join(testsRoot, '..', '..', 'coverconfig.json');
54 | if (fs.existsSync(coverConfigPath)) {
55 | const configContent = fs.readFileSync(coverConfigPath, 'utf-8');
56 | return JSON.parse(configContent);
57 | }
58 | return undefined;
59 | }
60 |
61 | function run(testsRoot: string, clb: any): any {
62 | // Read configuration for the coverage file
63 | const coverOptions = _readCoverOptions(testsRoot);
64 | if (coverOptions && coverOptions.enabled) {
65 | // Setup coverage pre-test, including post-test hook to report
66 | const coverageRunner = new CoverageRunner(coverOptions, testsRoot);
67 | coverageRunner.setupCoverage();
68 | }
69 |
70 | // Glob test files
71 | glob(
72 | '**.test.js',
73 | { cwd: testsRoot },
74 | (error, files): any => {
75 | if (error) {
76 | return clb(error);
77 | }
78 | try {
79 | // Fill into Mocha
80 | files.forEach((f): Mocha => mocha.addFile(paths.join(testsRoot, f)));
81 | // Run the tests
82 | let failureCount = 0;
83 |
84 | mocha
85 | .run()
86 | .on('fail', () => failureCount++)
87 | .on('end', () => clb(undefined, failureCount));
88 | } catch (error) {
89 | return clb(error);
90 | }
91 | }
92 | );
93 | }
94 | exports.run = run;
95 |
96 | interface ITestRunnerOptions {
97 | enabled?: boolean;
98 | relativeCoverageDir: string;
99 | relativeSourcePath: string;
100 | ignorePatterns: string[];
101 | includePid?: boolean;
102 | reports?: string[];
103 | verbose?: boolean;
104 | }
105 |
106 | class CoverageRunner {
107 | private coverageVar: string = `$$cov_${new Date().getTime()}$$`;
108 | private transformer: any = undefined;
109 | private matchFn: any = undefined;
110 | private instrumenter: any = undefined;
111 |
112 | constructor(private options: ITestRunnerOptions, private testsRoot: string) {
113 | if (!options.relativeSourcePath) {
114 | return;
115 | }
116 | }
117 |
118 | public setupCoverage(): void {
119 | // Set up Code Coverage, hooking require so that instrumented code is returned
120 | const self = this;
121 | self.instrumenter = new istanbul.Instrumenter({
122 | coverageVariable: self.coverageVar
123 | });
124 | const sourceRoot = paths.join(
125 | self.testsRoot,
126 | self.options.relativeSourcePath
127 | );
128 |
129 | // Glob source files
130 | const srcFiles = glob.sync('**/**.js', {
131 | cwd: sourceRoot,
132 | ignore: self.options.ignorePatterns
133 | });
134 |
135 | // Create a match function - taken from the run-with-cover.js in istanbul.
136 | const decache = require('decache');
137 | const fileMap: any = {};
138 | srcFiles.forEach(file => {
139 | const fullPath = paths.join(sourceRoot, file);
140 | fileMap[fullPath] = true;
141 |
142 | // On Windows, extension is loaded pre-test hooks and this mean we lose
143 | // our chance to hook the Require call. In order to instrument the code
144 | // we have to decache the JS file so on next load it gets instrumented.
145 | // This doesn't impact tests, but is a concern if we had some integration
146 | // tests that relied on VSCode accessing our module since there could be
147 | // some shared global state that we lose.
148 | decache(fullPath);
149 | });
150 |
151 | self.matchFn = (file: string): boolean => fileMap[file];
152 | self.matchFn.files = Object.keys(fileMap);
153 |
154 | // Hook up to the Require function so that when this is called, if any of our source files
155 | // are required, the instrumented version is pulled in instead. These instrumented versions
156 | // write to a global coverage variable with hit counts whenever they are accessed
157 | self.transformer = self.instrumenter.instrumentSync.bind(self.instrumenter);
158 | const hookOpts = { verbose: false, extensions: ['.js'] };
159 | istanbul.hook.hookRequire(self.matchFn, self.transformer, hookOpts);
160 |
161 | // initialize the global variable to stop mocha from complaining about leaks
162 | global[self.coverageVar] = {};
163 |
164 | // Hook the process exit event to handle reporting
165 | // Only report coverage if the process is exiting successfully
166 | process.on('exit', (code: number) => {
167 | self.reportCoverage();
168 | process.exitCode = code;
169 | });
170 | }
171 |
172 | /**
173 | * Writes a coverage report.
174 | * Note that as this is called in the process exit callback, all calls must be synchronous.
175 | *
176 | * @returns {void}
177 | *
178 | * @memberOf CoverageRunner
179 | */
180 | public reportCoverage(): void {
181 | const self = this;
182 | istanbul.hook.unhookRequire();
183 | let cov: any;
184 | if (
185 | typeof global[self.coverageVar] === 'undefined' ||
186 | Object.keys(global[self.coverageVar]).length === 0
187 | ) {
188 | console.error(
189 | 'No coverage information was collected, exit without writing coverage information'
190 | );
191 | return;
192 | } else {
193 | cov = global[self.coverageVar];
194 | }
195 |
196 | // TODO consider putting this under a conditional flag
197 | // Files that are not touched by code ran by the test runner is manually instrumented, to
198 | // illustrate the missing coverage.
199 | self.matchFn.files.forEach((file: any) => {
200 | if (cov[file]) {
201 | return;
202 | }
203 | self.transformer(fs.readFileSync(file, 'utf-8'), file);
204 |
205 | // When instrumenting the code, istanbul will give each FunctionDeclaration a value of 1 in coverState.s,
206 | // presumably to compensate for function hoisting. We need to reset this, as the function was not hoisted,
207 | // as it was never loaded.
208 | Object.keys(self.instrumenter.coverState.s).forEach(key => {
209 | self.instrumenter.coverState.s[key] = 0;
210 | });
211 |
212 | cov[file] = self.instrumenter.coverState;
213 | });
214 |
215 | // TODO Allow config of reporting directory with
216 | const reportingDir = paths.join(
217 | self.testsRoot,
218 | self.options.relativeCoverageDir
219 | );
220 | const includePid = self.options.includePid;
221 | const pidExt = includePid ? '-' + process.pid : '';
222 | const coverageFile = paths.resolve(reportingDir, `coverage${pidExt}.json`);
223 |
224 | // yes, do this again since some test runners could clean the dir initially created
225 | _mkDirIfExists(reportingDir);
226 |
227 | fs.writeFileSync(coverageFile, JSON.stringify(cov), 'utf8');
228 |
229 | const remappedCollector = remapIstanbul.remap(cov, {
230 | warn: (warning: any) => {
231 | // We expect some warnings as any JS file without a typescript mapping will cause this.
232 | // By default, we'll skip printing these to the console as it clutters it up
233 | if (self.options.verbose) {
234 | console.warn(warning);
235 | }
236 | }
237 | });
238 |
239 | const reporter = new istanbul.Reporter(undefined, reportingDir);
240 | const reportTypes =
241 | self.options.reports instanceof Array ? self.options.reports : ['lcov'];
242 | reporter.addAll(reportTypes);
243 | reporter.write(remappedCollector, true, () => {
244 | console.log(`reports written to ${reportingDir}`);
245 | });
246 | }
247 | }
--------------------------------------------------------------------------------
/test/index.debug.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 |
6 | import * as glob from 'glob';
7 | import * as Mocha from 'mocha';
8 | import * as path from 'path';
9 |
10 | process.on('unhandledRejection', err => {
11 | console.log('Unhandled rejection:', err);
12 | });
13 |
14 | // See https://github.com/mochajs/mocha/wiki/Using-mocha-programmatically#set-options for more info
15 | export function run(): Promise {
16 | // Create the mocha test
17 | const mocha = new Mocha({
18 | ui: 'tdd',
19 | useColors: true,
20 | timeout: 50000,
21 | slow: 50000
22 | });
23 | mocha.useColors(true);
24 |
25 | const testsRoot = path.resolve(__dirname);
26 |
27 | return new Promise((c, e) => {
28 | glob('**/**.test.js', { cwd: testsRoot }, (err, files) => {
29 | if (err) {
30 | return e(err);
31 | }
32 |
33 | // Add files to the test suite
34 | files.forEach(f => mocha.addFile(path.resolve(testsRoot, f)));
35 |
36 | try {
37 | // Run the mocha test
38 | mocha.run(failures => {
39 | if (failures > 0) {
40 | e(new Error(`${failures} tests failed.`));
41 | } else {
42 | c();
43 | }
44 | });
45 | } catch (ex) {
46 | e(ex);
47 | }
48 | });
49 | });
50 | }
51 |
--------------------------------------------------------------------------------
/test/index.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 |
6 | const mode: string = process.env.VSCA_TEST_MODE || 'coverage';
7 |
8 | module.exports = require(`./index.${mode}`);
9 |
--------------------------------------------------------------------------------
/test/oc-auth.test.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import * as fs from 'fs';
6 | import * as sinon from 'sinon';
7 | import * as OcAuth from '../src/oc-auth';
8 | import { RunnerHandler } from '../src/oc-exec';
9 | import {
10 | BASIC_AUTHENTICATION,
11 | OPENSHIFT_SERVICE_OPTION,
12 | TOKEN_AUTHENTICATION,
13 | RUNTIME_CONFIGURATION_OPTION,
14 | } from '../src/constants';
15 | import * as Task from 'azure-pipelines-task-lib/task';
16 | import path = require('path');
17 |
18 | const chai = require('chai');
19 | chai.use(require('chai-fs'));
20 |
21 | const { expect } = chai;
22 |
23 | describe('oc-auth', () => {
24 | let sandbox: sinon.SinonSandbox;
25 |
26 | beforeEach(() => {
27 | sandbox = sinon.createSandbox();
28 | });
29 |
30 | afterEach(() => {
31 | sandbox.restore();
32 | });
33 |
34 | describe('#writeKubeConfig', () => {
35 | before(() => {
36 | const testOutDir = path.join(__dirname, '..', '..', 'out');
37 | if (!fs.existsSync(testOutDir)) {
38 | fs.mkdirSync(testOutDir);
39 | }
40 | });
41 |
42 | after(() => {
43 | try {
44 | fs.rmdirSync(path.join(__dirname, '..', '..', 'out', '.kube'));
45 | } catch (e) {
46 | console.error(e);
47 | }
48 | delete process.env.HOME;
49 | delete process.env.KUBECONFIG;
50 | });
51 |
52 | it('writes kubeconfig from service', () => {
53 | const testWorkingDir = path.join(__dirname, '..', '..', 'out');
54 | process.env.HOME = testWorkingDir;
55 |
56 | const endpointAuth: Task.EndpointAuthorization = {
57 | parameters: {
58 | kubeconfig: 'my dummy kube config',
59 | },
60 | scheme: 'None'
61 | };
62 |
63 | sandbox.stub(Task, 'getEndpointAuthorization').withArgs("OCP Connection", false).returns(endpointAuth);
64 | sandbox.stub(Task, 'getEndpointUrl').withArgs("OCP Connection", false).returns('https://openshift.example.com');
65 | sandbox.stub(Task, 'getInput')
66 | .withArgs('connectionType').returns(OPENSHIFT_SERVICE_OPTION)
67 | .withArgs('openshiftService').returns('OCP Connection');
68 |
69 | return OcAuth.createKubeConfig('oc', 'Linux').then(
70 | (result: undefined) => {
71 | expect(result).to.be.undefined;
72 | expect(fs.existsSync(path.join(testWorkingDir, '.kube', 'config'))).to
73 | .be.true;
74 |
75 | const kubeconfig = process.env.KUBECONFIG;
76 | if (kubeconfig === undefined) {
77 | expect.fail('PATH not set');
78 | } else {
79 | expect(
80 | kubeconfig.includes(path.join(testWorkingDir, '.kube', 'config')),
81 | ).to.be.true;
82 | }
83 | },
84 | );
85 | });
86 |
87 | it('writes kubeconfig from inline config', () => {
88 | const testWorkingDir = path.join(__dirname, '..', '..', 'out');
89 | process.env.HOME = testWorkingDir;
90 | const fileContents = "DUMMY_CONTENT";
91 |
92 | sandbox.stub(Task, 'getInput')
93 | .withArgs('connectionType').returns(RUNTIME_CONFIGURATION_OPTION)
94 | .withArgs('configurationType').returns('inline')
95 | .withArgs('inlineConfig').returns(fileContents);
96 |
97 | return OcAuth.createKubeConfig('oc', 'Linux').then(
98 | (result: undefined) => {
99 | expect(result).to.be.undefined;
100 | expect(fs.existsSync(path.join(testWorkingDir, '.kube', 'config'))).to
101 | .be.true;
102 |
103 | const kubeconfig = process.env.KUBECONFIG;
104 | if (kubeconfig === undefined) {
105 | expect.fail('PATH not set');
106 | } else {
107 | expect(fs.readFileSync(kubeconfig).toString()).to.equal(fileContents)
108 | }
109 | },
110 | );
111 | });
112 |
113 | it('sets kubeconfig env variable when config path provided', () => {
114 | const testWorkingDir = path.join(__dirname, '..', '..', 'out');
115 | process.env.HOME = testWorkingDir;
116 | const fileContents = "DUMMY_CONTENT";
117 |
118 | const testConfigPath = path.join(testWorkingDir, '.kube', 'config');
119 | fs.writeFileSync(testConfigPath, fileContents);
120 |
121 | sandbox.stub(Task, 'getInput').
122 | withArgs('connectionType').returns(RUNTIME_CONFIGURATION_OPTION).
123 | withArgs('configurationType').returns('configurationPath');
124 | sandbox.stub(Task, 'getPathInput').returns(testConfigPath);
125 |
126 | return OcAuth.createKubeConfig('oc', 'Linux').then(
127 | (result: undefined) => {
128 | expect(result).to.be.undefined;
129 |
130 | const kubeconfig = process.env.KUBECONFIG;
131 | if (kubeconfig === undefined) {
132 | expect.fail('PATH not set');
133 | } else {
134 | expect(fs.readFileSync(kubeconfig).toString()).to.equal(fileContents)
135 | }
136 | },
137 | );
138 | });
139 |
140 | it('throws exception when setting kubeconfig to empty path', () => {
141 | const testWorkingDir = path.join(__dirname, '..', '..', 'out', '.kube', 'config');
142 | process.env.HOME = testWorkingDir;
143 |
144 | sandbox.stub(Task, 'getInput').returns('configurationPath');
145 | sandbox.stub(Task, 'getPathInput').returns(testWorkingDir);
146 |
147 | return expect(() => OcAuth.createKubeConfig('oc', 'Linux')).to.throw;
148 | });
149 |
150 | });
151 |
152 |
153 | describe('#userHome', () => {
154 | it('returns the USERPROFILE directory for Windows', () => {
155 | process.env.USERPROFILE = 'C:\\Users\\john';
156 | process.env.HOME = '/Users/john';
157 | expect(OcAuth.userHome('Windows_NT')).eq('C:\\Users\\john');
158 | });
159 |
160 | it('returns the HOME directory for Linux and Darwin', () => {
161 | process.env.USERPROFILE = 'C:\\Users\\john';
162 | process.env.HOME = '/Users/john';
163 | expect(OcAuth.userHome('Linux')).eq('/Users/john');
164 | });
165 |
166 | it('returns the HOME directory for Darwin', () => {
167 | process.env.USERPROFILE = 'C:\\Users\\john';
168 | process.env.HOME = '/Users/john';
169 | expect(OcAuth.userHome('Darwin')).eq('/Users/john');
170 | });
171 |
172 | it('throws error for unknown OS type', () => {
173 | expect(() => OcAuth.userHome('')).to.throw();
174 | });
175 | });
176 |
177 | describe('#createKubeConfig', () => {
178 | const endpoint: OcAuth.OpenShiftEndpoint = {
179 | serverUrl: 'url',
180 | scheme: BASIC_AUTHENTICATION,
181 | parameters: JSON.parse('{"key": "value"}'),
182 | };
183 |
184 | it('check if getCertificateAuthority is called with correct endpoint', async () => {
185 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
186 | const certificateStub = sandbox
187 | .stub(OcAuth, 'getCertificateAuthorityFile')
188 | .returns('flag');
189 | sandbox.stub(RunnerHandler, 'execOc');
190 | try {
191 | await OcAuth.createKubeConfig('path', 'OS');
192 | sinon.assert.calledWith(certificateStub, endpoint);
193 | } catch (err) {
194 | }
195 | });
196 |
197 | it('check if skipTlsVerify is called with correct endpoint if getCertificateAuthorityFile returns no flag', async () => {
198 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
199 | sandbox.stub(OcAuth, 'getCertificateAuthorityFile').returns('');
200 | const skipTlsStub = sandbox.stub(OcAuth, 'skipTlsVerify').returns('');
201 | sandbox.stub(RunnerHandler, 'execOc');
202 | sandbox.stub(OcAuth, 'exportKubeConfig');
203 | try {
204 | await OcAuth.createKubeConfig('path', 'OS');
205 | sinon.assert.calledWith(skipTlsStub, endpoint);
206 | } catch (err) {}
207 | });
208 |
209 | it('check if skipTlsVerify is NOT called if getCertificateAuthorityFile returns valid flag', async () => {
210 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
211 | sandbox.stub(OcAuth, 'getCertificateAuthorityFile').returns('flag');
212 | const skipTlsStub = sandbox.stub(OcAuth, 'skipTlsVerify').returns('');
213 | sandbox.stub(RunnerHandler, 'execOc');
214 | sandbox.stub(OcAuth, 'exportKubeConfig');
215 | try {
216 | await OcAuth.createKubeConfig('path', 'OS');
217 | expect(skipTlsStub).not.called;
218 | } catch (err) {
219 | }
220 | });
221 |
222 | it('check if correct oc command is executed with basic authentication type', async () => {
223 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
224 | const params =
225 | '{"username": "username", "password": "password", "acceptUntrustedCerts": "true"}';
226 | endpoint.parameters = JSON.parse(params);
227 |
228 | sandbox.stub(OcAuth, 'getCertificateAuthorityFile').returns('');
229 | sandbox.stub(OcAuth, 'skipTlsVerify').returns('');
230 | const commandStub = sandbox.stub(RunnerHandler, 'execOc');
231 | sandbox.stub(OcAuth, 'exportKubeConfig');
232 | try {
233 | await OcAuth.createKubeConfig('path', 'OS');
234 | sinon.assert.calledWith(
235 | commandStub,
236 | 'path',
237 | 'login -u username -p password url',
238 | );
239 | } catch (err) {
240 | }
241 | });
242 |
243 | it('check if correct oc command is executed with token authentication type', async () => {
244 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
245 | const params = '{"apitoken": "token", "acceptUntrustedCerts": "true"}';
246 | endpoint.parameters = JSON.parse(params);
247 | endpoint.scheme = TOKEN_AUTHENTICATION;
248 |
249 | sandbox.stub(OcAuth, 'getCertificateAuthorityFile').returns('');
250 | sandbox.stub(OcAuth, 'skipTlsVerify').returns('');
251 | const commandStub = sandbox.stub(RunnerHandler, 'execOc');
252 | sandbox.stub(OcAuth, 'exportKubeConfig');
253 | try {
254 | await OcAuth.createKubeConfig('path', 'OS');
255 | sinon.assert.calledWith(
256 | commandStub,
257 | 'path',
258 | 'login --token token url',
259 | );
260 | } catch (err) {
261 | }
262 | });
263 |
264 | it('check if new error is thrown if no vail authentication type is found', async () => {
265 | sandbox.stub(OcAuth, 'getOpenShiftEndpoint').returns(endpoint);
266 | const params = '{"acceptUntrustedCerts": "true"}';
267 | endpoint.parameters = JSON.parse(params);
268 | endpoint.scheme = 'invalidscheme';
269 |
270 | sandbox.stub(OcAuth, 'getCertificateAuthorityFile').returns('');
271 | sandbox.stub(OcAuth, 'skipTlsVerify').returns('');
272 | const createKubeConfigSpy = sandbox.stub(OcAuth, 'createKubeConfig');
273 | try {
274 | await createKubeConfigSpy('path', 'OS');
275 | } catch (err) {
276 | }
277 | expect(createKubeConfigSpy).throw;
278 | });
279 | });
280 |
281 | describe('getCertificateAuthorityFile', () => {
282 | it('return empty string if param certificateAuthorityFile NOT exists', () => {
283 | const params = '{"acceptUntrustedCerts": "true"}';
284 | const endpoint: OcAuth.OpenShiftEndpoint = {
285 | serverUrl: 'server',
286 | parameters: JSON.parse(params),
287 | scheme: BASIC_AUTHENTICATION
288 | };
289 | const res = OcAuth.getCertificateAuthorityFile(endpoint);
290 | expect(res).equals('');
291 | });
292 |
293 | it('return correct value if param certificateAuthorityFile exists', () => {
294 | const params = '{"certificateAuthorityFile": "path"}';
295 | const endpoint: OcAuth.OpenShiftEndpoint = {
296 | serverUrl: 'server',
297 | parameters: JSON.parse(params),
298 | scheme: BASIC_AUTHENTICATION
299 | };
300 | const res = OcAuth.getCertificateAuthorityFile(endpoint);
301 | expect(res).equals('--certificate-authority="path"');
302 | });
303 | });
304 |
305 | describe('skipTlsVerify', () => {
306 | it('return empty string if param acceptUntrustedCerts NOT exists', () => {
307 | const params = '{"certificateAuthorityFile": "path"}';
308 | const endpoint: OcAuth.OpenShiftEndpoint = {
309 | serverUrl: 'server',
310 | parameters: JSON.parse(params),
311 | scheme: BASIC_AUTHENTICATION
312 | };
313 | const res = OcAuth.skipTlsVerify(endpoint);
314 | expect(res).equals('');
315 | });
316 |
317 | it('return empty string if param acceptUntrustedCerts exists and its value is false', () => {
318 | const params = '{"acceptUntrustedCerts": "false"}';
319 | const endpoint: OcAuth.OpenShiftEndpoint = {
320 | serverUrl: 'server',
321 | parameters: JSON.parse(params),
322 | scheme: BASIC_AUTHENTICATION
323 | };
324 | const res = OcAuth.skipTlsVerify(endpoint);
325 | expect(res).equals('');
326 | });
327 |
328 | it('return correct value if param acceptUntrustedCerts exists', () => {
329 | const params = '{"acceptUntrustedCerts": "true"}';
330 | const endpoint: OcAuth.OpenShiftEndpoint = {
331 | serverUrl: 'server',
332 | parameters: JSON.parse(params),
333 | scheme: BASIC_AUTHENTICATION
334 | };
335 | const res = OcAuth.skipTlsVerify(endpoint);
336 | expect(res).equals('--insecure-skip-tls-verify ');
337 | });
338 | });
339 | });
340 |
--------------------------------------------------------------------------------
/test/oc-exec.test.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import * as chai from 'chai';
6 | import * as sinon from 'sinon';
7 | import { IExecSyncResult } from 'azure-pipelines-task-lib/toolrunner';
8 | import { RunnerHandler } from '../src/oc-exec';
9 |
10 | import { ToolRunnerStub } from './toolrunnerStub';
11 |
12 | const {expect} = chai;
13 | // import sinon
14 | import stream = require('stream');
15 | import tl = require('azure-pipelines-task-lib/task');
16 |
17 | describe('oc-exec', () => {
18 | let sandbox: sinon.SinonSandbox;
19 | let stubs: ToolRunnerStub;
20 |
21 | beforeEach(() => {
22 | sandbox = sinon.createSandbox();
23 | stubs = new ToolRunnerStub(sandbox);
24 | });
25 |
26 | afterEach(() => {
27 | sandbox.restore();
28 | });
29 |
30 | describe('#execOc', () => {
31 | it('check createExecOptions is called with correct params', async () => {
32 | const optionsStub = sandbox.stub(RunnerHandler, 'createExecOptions');
33 | sandbox.stub(RunnerHandler, 'initToolRunners').returns([stubs.tr]);
34 | sandbox.stub(RunnerHandler, 'unifyToolRunners').returns(stubs.tr);
35 | await RunnerHandler.execOc(null, 'cmd1', false);
36 | sinon.assert.calledWith(optionsStub, undefined, false);
37 | });
38 |
39 | it('check initToolRunners is called with correct commands splitted and correct path if set', async () => {
40 | sandbox.stub(RunnerHandler, 'createExecOptions');
41 | const initStub = sandbox.stub(RunnerHandler, 'initToolRunners').returns([stubs.tr]);
42 | sandbox.stub(RunnerHandler, 'unifyToolRunners').returns(stubs.tr);
43 | await RunnerHandler.execOc('path', 'cmd1 | cmd2', false);
44 | sinon.assert.calledWith(initStub, ['cmd1 ', '| cmd2'], 'path');
45 | });
46 |
47 | it('check initToolRunners is called with correct commands splitted and path passed is null', async () => {
48 | sandbox.stub(RunnerHandler, 'createExecOptions');
49 | const initStub = sandbox.stub(RunnerHandler, 'initToolRunners').returns([stubs.tr]);
50 | sandbox.stub(RunnerHandler, 'unifyToolRunners').returns(stubs.tr);
51 | await RunnerHandler.execOc(null, 'cmd1 >> cmd2', false);
52 | sinon.assert.calledWith(initStub, ['cmd1 ', '>', '> cmd2'], 'oc');
53 | });
54 |
55 | it('check unifyToolRunners is called with correct params', async () => {
56 | sandbox.stub(RunnerHandler, 'createExecOptions');
57 | sandbox
58 | .stub(RunnerHandler, 'initToolRunners')
59 | .returns([stubs.tr, stubs.tr]);
60 | const unifyStub = sandbox
61 | .stub(RunnerHandler, 'unifyToolRunners')
62 | .returns(stubs.tr);
63 | await RunnerHandler.execOc(null, 'cmd1 > cmd2', false);
64 | sinon.assert.calledWith(
65 | unifyStub,
66 | ['cmd1 ', '> cmd2'],
67 | [stubs.tr, stubs.tr],
68 | undefined
69 | );
70 | });
71 | });
72 |
73 | describe('#unifyToolRunners', () => {
74 | it('check if buildPipeToolRunner is called if there are pipes in commands', () => {
75 | const pipeStub = sandbox.stub(RunnerHandler, 'buildPipeToolRunner');
76 | RunnerHandler.unifyToolRunners(['cmd1', '| cmd2'], [stubs.tr, stubs.tr]);
77 | sinon.assert.calledOnce(pipeStub);
78 | });
79 |
80 | it('check if createExecOptions is called fstCmd is a 2', () => {
81 | const pipeStub = sandbox.stub(RunnerHandler, 'buildPipeToolRunner');
82 | const createExecStub = sandbox.stub(RunnerHandler, 'createExecOptions');
83 | RunnerHandler.unifyToolRunners(
84 | ['cmd1', '2', '> cmd2'],
85 | [stubs.tr, undefined, stubs.tr]
86 | );
87 | sinon.assert.notCalled(pipeStub);
88 | sinon.assert.calledOnce(createExecStub);
89 | });
90 |
91 | it('check if createExecOptions is not called if fstCmd is not a 2 but >', () => {
92 | const pipeStub = sandbox.stub(RunnerHandler, 'buildPipeToolRunner');
93 | const createExecStub = sandbox.stub(RunnerHandler, 'createExecOptions');
94 | RunnerHandler.unifyToolRunners(
95 | ['cmd1', '> cmd2'],
96 | [stubs.tr, undefined, stubs.tr]
97 | );
98 | sinon.assert.notCalled(pipeStub);
99 | sinon.assert.notCalled(createExecStub);
100 | });
101 |
102 | it('check first toolruner is returned if there is only one cmd', () => {
103 | const res = RunnerHandler.unifyToolRunners(['cmd1'], [stubs.tr]);
104 | expect(res).deep.equals(stubs.tr);
105 | });
106 | });
107 |
108 | describe('#createExecOptions', () => {
109 | const options = {
110 | cwd: process.cwd(),
111 | env: ({ ...process.env}) as { [key: string]: string },
112 | silent: false,
113 | failOnStdErr: true,
114 | ignoreReturnCode: true,
115 | windowsVerbatimArguments: false,
116 | outStream: process.stdout as stream.Writable,
117 | errStream: process.stderr as stream.Writable
118 | };
119 | it('return same options sent if ignoreReturnCode and failOnStdErr are not defined', () => {
120 | const res = RunnerHandler.createExecOptions(undefined);
121 | expect(res).equals(undefined);
122 | });
123 |
124 | it('new options are created if nothing is passed as params', () => {
125 | const res = RunnerHandler.createExecOptions(undefined, true, true);
126 | expect(options.failOnStdErr).equals(res.failOnStdErr);
127 | expect(options.ignoreReturnCode).equals(res.ignoreReturnCode);
128 | expect(options.windowsVerbatimArguments).equals(
129 | res.windowsVerbatimArguments
130 | );
131 | });
132 |
133 | it('check if options are changed correctly when requested', () => {
134 | const res = RunnerHandler.createExecOptions(options, false, false);
135 | expect(res.failOnStdErr).equals(false);
136 | expect(res.ignoreReturnCode).equals(false);
137 | });
138 | });
139 |
140 | describe('#prepareOcArguments', () => {
141 | before(() => {
142 | delete process.env.FOO;
143 | process.env.VSTS_TEST_VAR = 'nodes';
144 | });
145 |
146 | after(() => {
147 | delete process.env.VSTS_TEST_VAR;
148 | });
149 |
150 | it('should split arguments', () => {
151 | expect(RunnerHandler.prepareCmdArguments('get nodes'))
152 | .to.be.an('array')
153 | .that.include.ordered.members(['get', 'nodes']);
154 | });
155 |
156 | it('interpolate environment variables', () => {
157 | expect(RunnerHandler.prepareCmdArguments('get ${VSTS_TEST_VAR}'))
158 | .to.be.an('array')
159 | .that.include.ordered.members(['get', 'nodes']);
160 | });
161 |
162 | it('leave unknown environment variables intact', () => {
163 | expect(RunnerHandler.prepareCmdArguments('get ${FOO}'))
164 | .to.be.an('array')
165 | .that.include.ordered.members(['get', '${FOO}']);
166 | });
167 |
168 | it('remove leading oc', () => {
169 | expect(RunnerHandler.prepareCmdArguments('oc get nodes', true))
170 | .to.be.an('array')
171 | .that.include.ordered.members(['get', 'nodes']);
172 | });
173 |
174 | it('remove leading oc.exe', () => {
175 | expect(RunnerHandler.prepareCmdArguments('oc.exe get nodes', true))
176 | .to.be.an('array')
177 | .that.include.ordered.members(['get', 'nodes']);
178 | });
179 | });
180 |
181 | describe('#prepareToolRunner', () => {
182 | it('return undefined runer if cmd starts with >', () => {
183 | const res = RunnerHandler.prepareToolRunner('> cmd1', 'path');
184 | expect(res).equal(undefined);
185 | });
186 |
187 | it('return undefined runer if cmd starts with 2', () => {
188 | const res = RunnerHandler.prepareToolRunner('2> cmd1', 'path');
189 | expect(res).equal(undefined);
190 | });
191 |
192 | it('check if pipe is trimmed if it is in first pos', () => {
193 | sandbox.stub(tl, 'tool');
194 | const prepareCmdArgumentsStub = sandbox
195 | .stub(RunnerHandler, 'prepareCmdArguments')
196 | .returns(['oc']);
197 | RunnerHandler.prepareToolRunner('| cmd1', 'path');
198 | sinon.assert.calledWith(prepareCmdArgumentsStub, 'cmd1');
199 | });
200 |
201 | it('check if tool is called with right params when dealing with oc cli', () => {
202 | const toolStub = sandbox.stub(tl, 'tool');
203 | const whichStub = sandbox.stub(tl, 'which');
204 | sandbox.stub(RunnerHandler, 'prepareCmdArguments').returns(['oc']);
205 | RunnerHandler.prepareToolRunner('| cmd1', 'path');
206 | sinon.assert.calledWith(toolStub, 'path');
207 | sinon.assert.notCalled(whichStub);
208 | });
209 |
210 | it('check if which method is called when dealing with tool different from oc', () => {
211 | const toolStub = sandbox.stub(tl, 'tool');
212 | const whichStub = sandbox.stub(tl, 'which').returns('whichpath');
213 | sandbox.stub(RunnerHandler, 'prepareCmdArguments').returns(['cmd']);
214 | RunnerHandler.prepareToolRunner('| cmd1', 'path');
215 | sinon.assert.calledWith(whichStub, 'cmd', true);
216 | sinon.assert.calledWith(toolStub, 'whichpath');
217 | });
218 | });
219 |
220 | describe('#initToolRunners', () => {
221 | it('return empty array if there are no commands', () => {
222 | const res = RunnerHandler.initToolRunners([''], 'path');
223 | expect(res).deep.equal([]);
224 | });
225 |
226 | it('check prepareToolRunner is called n times where n are the commands', () => {
227 | const prepareToolRunnerStub = sandbox
228 | .stub(RunnerHandler, 'prepareToolRunner')
229 | .returns(stubs.tr);
230 | const res = RunnerHandler.initToolRunners(
231 | ['cmd1', 'cmd2', 'cmd3'],
232 | 'path'
233 | );
234 | sinon.assert.calledThrice(prepareToolRunnerStub);
235 | expect(res.length).equals(3);
236 | });
237 | });
238 |
239 | describe('#execOcSync', () => {
240 | const execRes: IExecSyncResult = {
241 | code: 1,
242 | error: undefined,
243 | stderr: undefined,
244 | stdout: 'xxxxxx xxxxxx xxxxxxx xxxxxx'
245 | };
246 |
247 | it('check tl.tool is called with right param if ocPath input is passed to execOcSync', () => {
248 | stubs.tr.execSync = () => execRes;
249 | const toolStub = sandbox.stub(tl, 'tool').returns(stubs.tr);
250 | RunnerHandler.execOcSync('path', 'args');
251 | sinon.assert.calledWith(toolStub, 'path');
252 | });
253 |
254 | it('check tl.tool is called with right param if ocPath input is null', () => {
255 | stubs.tr.execSync = () => execRes;
256 | const toolStub = sandbox.stub(tl, 'tool').returns(stubs.tr);
257 | RunnerHandler.execOcSync(null, 'args');
258 | sinon.assert.calledWith(toolStub, 'oc');
259 | });
260 |
261 | it('return correct result if execSync successfully completes', () => {
262 | stubs.tr.execSync = () => execRes;
263 | sandbox.stub(tl, 'tool').returns(stubs.tr);
264 | const res = RunnerHandler.execOcSync('path', 'args');
265 | expect(res).equals(execRes);
266 | });
267 |
268 | it('return undefined if execSync throws', () => {
269 | const execResult = {
270 | code: 1,
271 | stderr: '',
272 | stdout: '',
273 | error: new Error(`Failed when executing args. Error: text`)
274 | };
275 | stubs.tr.execSync = sinon.stub().throws('text');
276 | sandbox.stub(tl, 'tool').returns(stubs.tr);
277 | const res = RunnerHandler.execOcSync('path', 'args');
278 | expect(res.code).equals(execResult.code);
279 | expect(res.stderr).equals('');
280 | expect(res.stdout).equals('');
281 | expect(res.error.message.indexOf('Failed when executing args. Error: text')).equals(0);
282 | });
283 | });
284 | });
285 |
--------------------------------------------------------------------------------
/test/oc-install.test.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import * as chai from 'chai';
6 | import * as sinon from 'sinon';
7 | import * as fs from 'fs';
8 |
9 | import { IExecSyncResult } from 'azure-pipelines-task-lib/toolrunner';
10 | import { InstallHandler } from '../src/oc-install';
11 | import { RunnerHandler } from '../src/oc-exec';
12 | import { LINUXV3, MACOSXV3, WINV3, LINUXV4, MACOSXV4, WINV4, OC_TAR_GZ, OC_ZIP, LATEST, ZIP, TAR_GZ } from '../src/constants';
13 | import { ToolRunnerStub } from './toolrunnerStub';
14 | import * as utils from '../src/utils/zip_helper';
15 |
16 | const {expect} = chai;
17 | import path = require('path');
18 | import tl = require('azure-pipelines-task-lib/task');
19 |
20 | describe('InstallHandler', () => {
21 | let sandbox: sinon.SinonSandbox;
22 | let stubs: ToolRunnerStub;
23 | const testOutDir = `${__dirname}/../out/test/ocInstall`;
24 |
25 | beforeEach(() => {
26 | sandbox = sinon.createSandbox();
27 | stubs = new ToolRunnerStub(sandbox);
28 | process.env.SYSTEM_DEFAULTWORKINGDIRECTORY = testOutDir;
29 | });
30 |
31 | afterEach(() => {
32 | sandbox.restore();
33 | delete process.env.SYSTEM_DEFAULTWORKINGDIRECTORY;
34 | });
35 |
36 | describe('#ocInstall', () => {
37 | it('check if latestStable method is called if no ocVersion is passed', async () => {
38 | const latestStub = sandbox.stub(InstallHandler, 'latestStable').returns({ valid: true, type: 'url', value: 'http://url.com/ocbundle'});
39 | sandbox.stub(fs, 'existsSync').returns(true);
40 | sandbox.stub(InstallHandler, 'downloadAndExtract').resolves({ found: true, path: 'path' });
41 | await InstallHandler.installOc({ valid: false, reason: '' }, 'Darwin', false, '');
42 | expect(latestStub.calledOnce).to.be.true;
43 | });
44 |
45 | it('return correct error message if lastest version is not found', async () => {
46 | sandbox.stub(InstallHandler, 'latestStable').returns({ valid: false, reason: 'Unable to find Oc bundle url. OS Agent is not supported at this moment.' });
47 | const res = await InstallHandler.installOc({ valid: false, reason: '' }, 'Darwin', false, '');
48 | expect(res).deep.equals({ found: false, reason: 'Unable to find Oc bundle url. OS Agent is not supported at this moment.' });
49 |
50 | });
51 |
52 | it('return correct error message if task fails when downloadAndExtract doesnt return a valid ocBinary', async () => {
53 | sandbox.stub(fs, 'existsSync').returns(true);
54 | sandbox.stub(InstallHandler, 'downloadAndExtract').resolves({ found: false, reason: 'URL where to download oc is not valid.' });
55 | const res = await InstallHandler.installOc({ valid: true, type: 'url', value: 'path' }, 'Darwin', false, '');
56 | expect(res).deep.equals({ found: false, reason: 'URL where to download oc is not valid.' });
57 | });
58 |
59 | it('check if value returned by downloadAndExtract is returned when valid', async () => {
60 | sandbox.stub(fs, 'existsSync').returns(true);
61 | sandbox.stub(InstallHandler, 'downloadAndExtract').resolves({ found: true, path: 'path' });
62 | const result = await InstallHandler.installOc(
63 | { valid: true, type: 'url', value: 'path' },
64 | 'Darwin',
65 | false,
66 | ''
67 | );
68 | expect(result).deep.equals({ found: true, path: 'path' });
69 | });
70 | });
71 |
72 | describe('#latestStable', () => {
73 | it('check if binary not found returned if osType input is not valid', () => {
74 | sandbox.stub(InstallHandler, 'getOcBundleByOSAndVersion').returns(undefined);
75 | const res = InstallHandler.latestStable('fakeOS');
76 | expect(res).deep.equals({ valid: false, reason: 'Unable to find Oc bundle url. OS Agent is not supported at this moment.' });
77 | });
78 |
79 | it('check if url returned is valid based on OSType input', () => {
80 | sandbox.stub(InstallHandler, 'getOcBundleByOSAndVersion').returns('linux/oc.tar.gz');
81 | const res = InstallHandler.latestStable('linux');
82 | const ocUtils = InstallHandler.getOcUtils();
83 | expect(res).deep.equals({
84 | valid: true,
85 | type: 'url',
86 | value: `${ocUtils.openshiftV4BaseUrl}/${LATEST}/linux/oc.tar.gz`
87 | });
88 | });
89 | });
90 |
91 | describe('#ocBundleURL', () => {
92 | it('should return null when no version is passed', () => {
93 | const result = InstallHandler.ocBundleURL('', 'Linux');
94 | expect(result).to.be.undefined;
95 | });
96 |
97 | it('should return null when the version passed is null', () => {
98 | const result = InstallHandler.ocBundleURL(undefined, 'Linux');
99 | expect(result).to.be.undefined;
100 | });
101 |
102 | it('should return null if no valid version is passed', () => {
103 | const result = InstallHandler.ocBundleURL('version', 'Linux');
104 | expect(result).to.be.undefined;
105 | });
106 |
107 | it('should return correct url if oc version (v = 3) is valid', () => {
108 | const bundle = 'linux/oc.tar.gz';
109 | const version = '3.11.0';
110 | const ocUtils = InstallHandler.getOcUtils();
111 | const url = `${ocUtils.openshiftV3BaseUrl}/${version}/${bundle}`;
112 | sandbox.stub(InstallHandler, 'getOcBundleByOSAndVersion').returns(bundle);
113 | const res = InstallHandler.ocBundleURL(version, 'Linux');
114 | expect(res).equals(url);
115 | });
116 |
117 | it('should return correct url if oc version (v = 3) is valid', () => {
118 | const bundle = 'linux/oc.tar.gz';
119 | const version = '4.11';
120 | const ocUtils = InstallHandler.getOcUtils();
121 | const url = `${ocUtils.openshiftV4BaseUrl}/${version}/${bundle}`;
122 | sandbox.stub(InstallHandler, 'getOcBundleByOSAndVersion').returns(bundle);
123 | const res = InstallHandler.ocBundleURL(version, 'Linux');
124 | expect(res).equals(url);
125 | });
126 |
127 | it('should return null if oc version requested is different from the versions supported (3 and 4)', () => {
128 | const version = '5.1.0';
129 | const res = InstallHandler.ocBundleURL(version, 'Linux');
130 | expect(res).to.be.undefined;
131 | });
132 |
133 | it('should return null if no oc bundle url is found', () => {
134 | const version = '4.11';
135 | sandbox.stub(InstallHandler, 'getOcBundleByOSAndVersion').returns(null);
136 | const res = InstallHandler.ocBundleURL(version, 'Linux');
137 | expect(res).to.be.undefined;
138 | });
139 |
140 | it('should return null if latest version is requestd but version passed as param is invalid', () => {
141 | const version = '3';
142 | const res = InstallHandler.ocBundleURL(version, 'Linux', true);
143 | expect(res).to.be.undefined;
144 | });
145 |
146 | it('should return null if latest version is requestd but version passed as param dont have a latest version', () => {
147 | const version = '3.17';
148 | const res = InstallHandler.ocBundleURL(version, 'Linux', true);
149 | expect(res).to.be.undefined;
150 | });
151 | });
152 |
153 | describe('#downloadAndExtract', () => {
154 | it('return correct error message if url is not valid', async () => {
155 | const res = await InstallHandler.downloadAndExtract(
156 | '',
157 | 'path',
158 | 'Linux',
159 | '',
160 | 'ip:port'
161 | );
162 | expect(res).deep.equals({ found: false, reason: 'URL where to download oc is not valid.' });
163 | });
164 |
165 | it('return correct error message if download dir no exists', async () => {
166 | const normalizeStub = sandbox.stub(path, 'normalize').returns('path');
167 | sandbox.stub(tl, 'exist').returns(false);
168 | const ret = await InstallHandler.downloadAndExtract('url', 'path', 'Linux', '', 'ip:port');
169 | sinon.assert.calledOnce(normalizeStub)
170 | expect(ret).deep.equals({ found: false, reason: `Unable to extract Oc executable from archive. Directory path does not exist.` });
171 | });
172 |
173 | it('curl is called if archive path no exists', async () => {
174 | sandbox.stub(path, 'normalize').returns('path');
175 | sandbox
176 | .stub(tl, 'exist')
177 | .onFirstCall()
178 | .returns(true)
179 | .onSecondCall()
180 | .returns(false);
181 | const toolStub = sandbox.stub(tl, 'tool').returns(stubs.tr);
182 | try {
183 | await InstallHandler.downloadAndExtract('url', 'path', 'Linux', '', '');
184 | } catch (ex) {}
185 | sinon.assert.calledWith(toolStub, 'curl');
186 | expect(stubs.args.length).equals(5);
187 | });
188 |
189 | it('curl is called with -x arg if proxy is valid', async () => {
190 | sandbox.stub(path, 'normalize').returns('path');
191 | sandbox
192 | .stub(tl, 'exist')
193 | .onFirstCall()
194 | .returns(true)
195 | .onSecondCall()
196 | .returns(false);
197 | sandbox.stub(tl, 'tool').returns(stubs.tr);
198 | try {
199 | await InstallHandler.downloadAndExtract(
200 | 'url',
201 | 'path',
202 | 'Linux',
203 | '',
204 | 'ip:port'
205 | );
206 | } catch (ex) {}
207 |
208 | expect(stubs.args.length).equals(7);
209 | });
210 |
211 | it('null if oc path no exists', async () => {
212 | sandbox.stub(path, 'normalize').returns('path');
213 | sandbox
214 | .stub(tl, 'exist')
215 | .onFirstCall()
216 | .returns(true)
217 | .onSecondCall()
218 | .returns(true)
219 | .onThirdCall()
220 | .returns(false);
221 | sandbox.stub(utils, 'unzipArchive');
222 | const res = await InstallHandler.downloadAndExtract(
223 | 'url',
224 | 'path',
225 | 'Linux',
226 | '',
227 | 'ip:port'
228 | );
229 | expect(res).deep.equals({ found: false, reason: 'Oc binary path path doesn\'t exist.' });
230 | });
231 |
232 | it('check if correct oc path for Windows', async () => {
233 | sandbox.stub(path, 'normalize').returns('path');
234 | sandbox
235 | .stub(tl, 'exist')
236 | .onFirstCall()
237 | .returns(true)
238 | .onSecondCall()
239 | .returns(true)
240 | .onThirdCall()
241 | .returns(true);
242 | sandbox.stub(utils, 'unzipArchive');
243 | sandbox.stub(path, 'join').returns('path/oc.exe');
244 | sandbox.stub(fs, 'chmodSync');
245 | const res = await InstallHandler.downloadAndExtract(
246 | 'url',
247 | 'path',
248 | 'Windows_NT',
249 | '',
250 | 'ip:port'
251 | );
252 | expect(res).deep.equals({ found: true, path: 'path/oc.exe' });
253 | });
254 |
255 | it('check if correct oc path for Linux/Mac', async () => {
256 | sandbox.stub(path, 'normalize').returns('path');
257 | sandbox
258 | .stub(tl, 'exist')
259 | .onFirstCall()
260 | .returns(true)
261 | .onSecondCall()
262 | .returns(true)
263 | .onThirdCall()
264 | .returns(true);
265 | sandbox.stub(utils, 'unzipArchive');
266 | sandbox.stub(path, 'join').returns('path/oc');
267 | const chmod = sandbox.stub(fs, 'chmodSync');
268 | const res = await InstallHandler.downloadAndExtract(
269 | 'url',
270 | 'path',
271 | 'Linux',
272 | '',
273 | 'ip:port'
274 | );
275 | expect(res).deep.equals({ found: true, path: 'path/oc' });
276 | sinon.assert.calledWith(chmod, 'path/oc', '0755');
277 | });
278 | });
279 |
280 | describe('#getOcBundleByOS', () => {
281 | it('return correct value if osType is linux, major 4 and no version', () => {
282 | const res = InstallHandler.getOcBundleByOSAndVersion('Linux', 4);
283 | expect(res).equals(`${LINUXV4}.${TAR_GZ}`);
284 | });
285 |
286 | it('return correct value if osType is linux, major 4 and version', () => {
287 | const res = InstallHandler.getOcBundleByOSAndVersion('Linux', 4, '4.1');
288 | expect(res).equals(`${LINUXV4}-4.1.${TAR_GZ}`);
289 | });
290 |
291 | it('return correct value if osType is linux, major 3', () => {
292 | const res = InstallHandler.getOcBundleByOSAndVersion('Linux', 3);
293 | expect(res).equals(`${LINUXV3}/${OC_TAR_GZ}`);
294 | });
295 |
296 | it('return correct value if osType is windows, major 4 and no version', () => {
297 | const res = InstallHandler.getOcBundleByOSAndVersion('Windows_NT', 4);
298 | expect(res).equals(`${WINV4}.${ZIP}`);
299 | });
300 |
301 | it('return correct value if osType is windows, major 4 and version', () => {
302 | const res = InstallHandler.getOcBundleByOSAndVersion('Windows_NT', 4, '4.1');
303 | expect(res).equals(`${WINV4}-4.1.${ZIP}`);
304 | });
305 |
306 | it('return correct value if osType is windows, major 3', () => {
307 | const res = InstallHandler.getOcBundleByOSAndVersion('Windows_NT', 3);
308 | expect(res).equals(`${WINV3}/${OC_ZIP}`);
309 | });
310 |
311 | it('return correct value if osType is MACOSX, major 4 and no version', () => {
312 | const res = InstallHandler.getOcBundleByOSAndVersion('Darwin', 4);
313 | expect(res).equals(`${MACOSXV4}.${TAR_GZ}`);
314 | });
315 |
316 | it('return correct value if osType is MACOSX, major 4 and version', () => {
317 | const res = InstallHandler.getOcBundleByOSAndVersion('Darwin', 4, '4.1');
318 | expect(res).equals(`${MACOSXV4}-4.1.${TAR_GZ}`);
319 | });
320 |
321 | it('return correct value if osType is MACOSX, major 3 and no version', () => {
322 | const res = InstallHandler.getOcBundleByOSAndVersion('Darwin', 3);
323 | expect(res).equals(`${MACOSXV3}/${OC_TAR_GZ}`);
324 | });
325 |
326 | it('return null if osType is neither linux nor macosx nor windows', () => {
327 | const res = InstallHandler.getOcBundleByOSAndVersion('fakeOS', 3);
328 | expect(res).to.be.undefined;
329 | });
330 | });
331 |
332 | describe('#addOcToPath', () => {
333 | it('adds oc to PATH under Windows', () => {
334 | const ocDir =
335 | 'D:\\a\\r1\\a\\.download\\openshift-origin-client-tools-v3.10.0-dd10d17-windows';
336 | expect(tl.getVariable('PATH')).to.not.contain(ocDir);
337 | InstallHandler.addOcToPath(`${ocDir}\\oc.exe`, 'Windows_NT');
338 | expect(tl.getVariable('PATH')).to.contain(ocDir);
339 | });
340 |
341 | it('adds oc to PATH under macOS', () => {
342 | const ocDir =
343 | '/a/r1/a/.download/openshift-origin-client-tools-v3.10.0-dd10d17-mac';
344 | expect(tl.getVariable('PATH')).to.not.contain(ocDir);
345 | InstallHandler.addOcToPath(`${ocDir}/oc`, 'Darwin');
346 | expect(tl.getVariable('PATH')).to.contain(ocDir);
347 | });
348 |
349 | it('adds oc to PATH under Linux', () => {
350 | const ocDir =
351 | '/a/r1/a/.download/openshift-origin-client-tools-v3.10.0-dd10d17-linux-64bit';
352 | expect(tl.getVariable('PATH')).to.not.contain(ocDir);
353 | InstallHandler.addOcToPath(`${ocDir}/oc`, 'Linux');
354 | expect(tl.getVariable('PATH')).to.contain(ocDir);
355 | });
356 |
357 | it('throws error with null path', () => {
358 | try {
359 | InstallHandler.addOcToPath(null, 'Linux');
360 | expect.fail();
361 | } catch (err) {
362 | expect(err.message).to.eq('path cannot be null or empty');
363 | }
364 | });
365 |
366 | it('throws error with empty path', () => {
367 | try {
368 | InstallHandler.addOcToPath('', 'Linux');
369 | expect.fail();
370 | } catch (err) {
371 | expect(err.message).to.eq('path cannot be null or empty');
372 | }
373 | });
374 | });
375 |
376 | describe('#getLocalOcBinary', () => {
377 | it('returns path found by which if no error occurs and there is no version as input', () => {
378 | const whichStub = sandbox.stub(tl, 'which').returns('path');
379 | const res = InstallHandler.getLocalOcBinary({ valid: false, reason: '' });
380 | sinon.assert.calledWith(whichStub, 'oc');
381 | expect(res).deep.equals({ found: true, path: 'path' });
382 | });
383 |
384 | it('returns undefined if which fails retrieving oc path', () => {
385 | sandbox.stub(tl, 'which').throws();
386 | const res = InstallHandler.getLocalOcBinary({ valid: true, type: 'number', value: '1.1' });
387 | expect(res).deep.equals({ found: false });
388 | });
389 |
390 | it('returns nothing if oc path exists but oc version cannot be retrieved', () => {
391 | sandbox.stub(tl, 'which').returns('path');
392 | const getOcStub = sandbox
393 | .stub(InstallHandler, 'getOcVersion')
394 | .returns({ valid: false, reason: '' });
395 | const res = InstallHandler.getLocalOcBinary({ valid: true, type: 'number', value: '1.1' });
396 | sinon.assert.calledWith(getOcStub, 'path');
397 | expect(res).deep.equals({ found: false });
398 | });
399 |
400 | it('returns nothing if version found locally is not the one user wants to use', () => {
401 | sandbox.stub(tl, 'which').returns('path');
402 | sandbox.stub(InstallHandler, 'getOcVersion').returns({ valid: true, type: 'number', value: '2.1' });
403 | const res = InstallHandler.getLocalOcBinary({ valid: true, type: 'number', value: '1.1' });
404 | expect(res).deep.equals({ found: false });
405 | });
406 | });
407 |
408 | describe('#getVersionFromExecutable', () => {
409 | const versionRes: IExecSyncResult = {
410 | code: 1,
411 | error: undefined,
412 | stderr: undefined,
413 | stdout: 'xxxxxx v4.1.0 xxxxxx xxxxxxx xxxxxx'
414 | };
415 | let execOcStub: sinon.SinonStub;
416 |
417 | beforeEach(() => {
418 | execOcStub = sandbox.stub(RunnerHandler, 'execOcSync');
419 | });
420 |
421 | it('check if correct version is returned if oc version > 4', () => {
422 | execOcStub.returns(versionRes);
423 | const res = InstallHandler.getOcVersion('path');
424 | expect(res).deep.equals({ valid: true, type: 'number', value: 'v4.1.0' });
425 | });
426 |
427 | it('check if execOcSync is called twice if first call returns nothing', () => {
428 | execOcStub
429 | .onFirstCall()
430 | .returns(undefined)
431 | .onSecondCall()
432 | .returns(undefined);
433 | InstallHandler.getOcVersion('path');
434 | sinon.assert.calledTwice(execOcStub);
435 | });
436 |
437 | it('check if correct version is returned if first execOcSync method fails = oc version < 4', () => {
438 | versionRes.stdout = 'xxxxxx v3.2.0 xxxxxx xxxxxxx xxxxxx';
439 | execOcStub
440 | .onFirstCall()
441 | .returns(undefined)
442 | .onSecondCall()
443 | .returns(versionRes);
444 | const res = InstallHandler.getOcVersion('path');
445 | expect(res).deep.equals({ valid: true, type: 'number', value: 'v3.2.0' });
446 | });
447 |
448 | it('returns correct message if both oc calls fail', () => {
449 | execOcStub
450 | .onFirstCall()
451 | .returns(undefined)
452 | .onSecondCall()
453 | .returns(undefined);
454 | const res = InstallHandler.getOcVersion('path');
455 | expect(res).deep.equals({ valid: false, reason: 'An error occured when retrieving version of oc CLI in path' });
456 | });
457 |
458 | it('returns undefined if second call stdout is empty', () => {
459 | versionRes.stdout = undefined;
460 | execOcStub
461 | .onFirstCall()
462 | .returns(undefined)
463 | .onSecondCall()
464 | .returns(versionRes);
465 | const res = InstallHandler.getOcVersion('path');
466 | expect(res).deep.equals({ valid: false, reason: 'An error occured when retrieving version of oc CLI in path' });
467 | });
468 |
469 | it('returns undefined if execOcSync returns a not empty stdout without a valid version in it', () => {
470 | versionRes.stdout = 'xxxxx xxxxx xxxxxx xxxxxx xxxxx';
471 | execOcStub.returns(versionRes);
472 | const res = InstallHandler.getOcVersion('path');
473 | expect(res).deep.equals({ valid: false, reason: 'The version of oc CLI in path is in an unknown format.' });
474 | });
475 | });
476 | });
477 |
--------------------------------------------------------------------------------
/test/toolrunnerStub.ts:
--------------------------------------------------------------------------------
1 | /*-----------------------------------------------------------------------------------------------
2 | * Copyright (c) Red Hat, Inc. All rights reserved.
3 | * Licensed under the MIT License. See LICENSE file in the project root for license information.
4 | *-----------------------------------------------------------------------------------------------*/
5 | import { ToolRunner } from 'azure-pipelines-task-lib/toolrunner';
6 |
7 | export class ToolRunnerStub {
8 | public toolrunnerStub: sinon.SinonStubbedInstance;
9 |
10 | public get tr(): ToolRunner {
11 | return (this.toolrunnerStub as unknown) as ToolRunner;
12 | }
13 |
14 | public args: string[] = [];
15 |
16 | constructor(sandbox: sinon.SinonSandbox) {
17 | this.toolrunnerStub = sandbox.stub(ToolRunner.prototype);
18 | this.toolrunnerStub.arg.callsFake((val: string | string[]) => {
19 | Array.isArray(val) ? this.args.push(...val) : this.args.push(val);
20 | return this.tr;
21 | });
22 | this.toolrunnerStub.argIf.callsFake(
23 | (condition: any, val: string | string[]) => {
24 | if (condition) {
25 | Array.isArray(val) ? this.args.push(...val) : this.args.push(val);
26 | }
27 | return this.tr;
28 | }
29 | );
30 |
31 | this.toolrunnerStub.exec.resolves();
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "target": "es6",
5 | "module": "commonjs",
6 | "sourceMap": true,
7 | "declaration": true,
8 | "strict": false,
9 | "noUnusedLocals": true,
10 | "noUnusedParameters": true,
11 | "noImplicitReturns": true,
12 | "noFallthroughCasesInSwitch": true,
13 | "lib": [
14 | "es2015"
15 | ],
16 | "outDir": "out",
17 | "rootDir": "."
18 | }
19 | }
--------------------------------------------------------------------------------
/vss-extension.json:
--------------------------------------------------------------------------------
1 | {
2 | "manifestVersion": 1,
3 | "id": "openshift-vsts",
4 | "publisher": "redhat",
5 | "version": "2.0.1",
6 | "name": "OpenShift Extension",
7 | "description": "OpenShift tasks for Azure DevOps",
8 | "public": true,
9 | "galleryFlags": [
10 | "Preview"
11 | ],
12 | "categories": [
13 | "Azure Pipelines"
14 | ],
15 | "tags": [
16 | "utility",
17 | "tasks",
18 | "openshift",
19 | "oc"
20 | ],
21 | "links": {
22 | "home": {
23 | "uri": "https://github.com/redhat-developer/openshift-vsts"
24 | },
25 | "getstarted": {
26 | "uri": "https://github.com/redhat-developer/openshift-vsts/blob/master/docs/getting-started.md"
27 | },
28 | "support": {
29 | "uri": "https://github.com/redhat-developer/openshift-vsts/issues"
30 | },
31 | "issues": {
32 | "uri": "https://github.com/redhat-developer/openshift-vsts/issues"
33 | },
34 | "repository": {
35 | "uri": "https://github.com/redhat-developer/openshift-vsts"
36 | }
37 | },
38 | "repository": {
39 | "type": "git",
40 | "uri": "https://github.com/redhat-developer/openshift-vsts"
41 | },
42 | "content": {
43 | "details": {
44 | "path": "docs/details.md"
45 | },
46 | "license": {
47 | "path": "LICENSE"
48 | }
49 | },
50 | "targets": [
51 | {
52 | "id": "Microsoft.VisualStudio.Services"
53 | }
54 | ],
55 | "contributions": [
56 | {
57 | "id": "service-endpoint",
58 | "description": "Service endpoint for OpenShift cluster",
59 | "type": "ms.vss-endpoint.service-endpoint-type",
60 | "targets": [
61 | "ms.vss-endpoint.endpoint-types"
62 | ],
63 | "properties": {
64 | "name": "openshift",
65 | "displayName": "OpenShift",
66 | "url": {
67 | "displayName": "Server URL",
68 | "helpText": "URL for the OpenShift cluster to connect to."
69 | },
70 | "authenticationSchemes": [
71 | {
72 | "type": "ms.vss-endpoint.endpoint-auth-scheme-basic",
73 | "inputDescriptors": [
74 | {
75 | "id": "username",
76 | "name": "Username",
77 | "description": "Username",
78 | "inputMode": "textbox",
79 | "validation": {
80 | "isRequired": false,
81 | "dataType": "string"
82 | }
83 | },
84 | {
85 | "id": "password",
86 | "name": "Password",
87 | "description": "Password",
88 | "inputMode": "passwordbox",
89 | "isConfidential": true,
90 | "validation": {
91 | "isRequired": false,
92 | "dataType": "string"
93 | }
94 | },
95 | {
96 | "id": "acceptUntrustedCerts",
97 | "name": "Accept untrusted SSL certificates",
98 | "description": "If checked, self-signed certificates will be accepted. Please do not use this option in production environment.",
99 | "inputMode": "checkbox",
100 | "isConfidential": false,
101 | "validation": {
102 | "isRequired": false,
103 | "dataType": "boolean"
104 | }
105 | },
106 | {
107 | "id": "certificateAuthorityFile",
108 | "name": "Certificate Authority File",
109 | "description": "The path to a certificate authority file to correctly and securely authenticates with an OpenShift server that uses HTTPS",
110 | "inputMode": "textbox",
111 | "isConfidential": false,
112 | "validation": {
113 | "isRequired": false,
114 | "dataType": "string"
115 | }
116 | }
117 | ]
118 | },
119 | {
120 | "type": "ms.vss-endpoint.endpoint-auth-scheme-token",
121 | "inputDescriptors": [
122 | {
123 | "id": "acceptUntrustedCerts",
124 | "name": "Accept untrusted SSL certificates",
125 | "description": "If checked, self-signed certificates will be accepted. Please do not use this option in production environment.",
126 | "inputMode": "checkbox",
127 | "isConfidential": false,
128 | "validation": {
129 | "isRequired": false,
130 | "dataType": "boolean"
131 | }
132 | },
133 | {
134 | "id": "certificateAuthorityFile",
135 | "name": "Certificate Authority File",
136 | "description": "The path to a certificate authority file to correctly and securely authenticates with an OpenShift server that uses HTTPS",
137 | "inputMode": "textbox",
138 | "isConfidential": false,
139 | "validation": {
140 | "isRequired": false,
141 | "dataType": "string"
142 | }
143 | }
144 | ]
145 | },
146 | {
147 | "type": "ms.vss-endpoint.endpoint-auth-scheme-none",
148 | "inputDescriptors": [
149 | {
150 | "id": "kubeconfig",
151 | "name": "Kubeconfig",
152 | "description": "Copy your kubectl config",
153 | "inputMode": "textArea",
154 | "isConfidential": true,
155 | "validation": {
156 | "isRequired": true,
157 | "dataType": "string"
158 | }
159 | }
160 | ]
161 | }
162 | ],
163 | "helpMarkDown": "Learn More"
164 | }
165 | },
166 | {
167 | "id": "oc-cmd-task",
168 | "description": "Task to run user-defined oc command.",
169 | "type": "ms.vss-distributed-task.task",
170 | "targets": [
171 | "ms.vss-distributed-task.tasks"
172 | ],
173 | "properties": {
174 | "name": "tasks/oc-cmd"
175 | }
176 | },
177 | {
178 | "id": "oc-conditional-cmd-task",
179 | "description": "Task to run user-defined oc command when a condition is met.",
180 | "type": "ms.vss-distributed-task.task",
181 | "targets": [
182 | "ms.vss-distributed-task.tasks"
183 | ],
184 | "properties": {
185 | "name": "tasks/oc-conditional-cmd"
186 | }
187 | },
188 | {
189 | "id": "config-map-task",
190 | "description": "Task to apply ConfigMap properties.",
191 | "type": "ms.vss-distributed-task.task",
192 | "targets": [
193 | "ms.vss-distributed-task.tasks"
194 | ],
195 | "properties": {
196 | "name": "tasks/config-map"
197 | }
198 | },
199 | {
200 | "id": "oc-setup-task",
201 | "description": "Task to install oc and setup PATH.",
202 | "type": "ms.vss-distributed-task.task",
203 | "targets": [
204 | "ms.vss-distributed-task.tasks"
205 | ],
206 | "properties": {
207 | "name": "tasks/oc-setup"
208 | }
209 | }
210 | ],
211 | "icons": {
212 | "default": "images/logo.png"
213 | },
214 | "files": [
215 | {
216 | "path": "tasks/oc-cmd"
217 | },
218 | {
219 | "path": "tasks/oc-conditional-cmd"
220 | },
221 | {
222 | "path": "tasks/config-map"
223 | },
224 | {
225 | "path": "tasks/oc-setup"
226 | }
227 | ]
228 | }
--------------------------------------------------------------------------------