├── .eslintrc.json
├── .github
└── workflows
│ ├── pr.yaml
│ └── release.yaml
├── .gitignore
├── .npmrc
├── .vscode
├── extensions.json
├── launch.json
├── settings.json
└── tasks.json
├── .vscodeignore
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── examples
├── rpg-utils
│ ├── .gitignore
│ ├── Rules.mk
│ ├── iproj.json
│ ├── qrpgleref
│ │ ├── math.rpgleinc
│ │ └── string.rpgleinc
│ ├── qrpglesrc
│ │ ├── Rules.mk
│ │ ├── math.rpgle
│ │ └── string.rpgle
│ └── qtestsrc
│ │ ├── math.test.rpgle
│ │ ├── string.test.rpgle
│ │ └── testing.json
└── rpgunit-stmf
│ ├── .vscode
│ └── testing.json
│ ├── cbl.test.cblle
│ ├── rpgsqlstmf.test.sqlrpgle
│ └── rpgstmf.test.rpgle
├── icon.png
├── package-lock.json
├── package.json
├── schemas
└── testing.json
├── snippets
└── rpgle.json
├── src
├── api
│ └── ibmi.ts
├── codeCoverage.ts
├── components
│ ├── codeCov.ts
│ └── rpgUnit.ts
├── config.ts
├── configuration.ts
├── extension.ts
├── fileCoverage.ts
├── github.ts
├── logger.ts
├── manager.ts
├── runner.ts
├── storage.ts
├── testCase.ts
├── testDirectory.ts
├── testFile.ts
├── testLogger.ts
├── testObject.ts
├── types.ts
├── utils.ts
└── xmlParser.ts
├── tsconfig.json
└── webpack.config.js
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "parser": "@typescript-eslint/parser",
4 | "parserOptions": {
5 | "ecmaVersion": 6,
6 | "sourceType": "module"
7 | },
8 | "plugins": [
9 | "@typescript-eslint"
10 | ],
11 | "rules": {
12 | "@typescript-eslint/naming-convention": [
13 | "warn",
14 | {
15 | "selector": "import",
16 | "format": [ "camelCase", "PascalCase" ]
17 | }
18 | ],
19 | "@typescript-eslint/semi": "warn",
20 | "curly": "warn",
21 | "eqeqeq": "warn",
22 | "no-throw-literal": "warn",
23 | "semi": "off"
24 | },
25 | "ignorePatterns": [
26 | "out",
27 | "dist",
28 | "**/*.d.ts"
29 | ]
30 | }
--------------------------------------------------------------------------------
/.github/workflows/pr.yaml:
--------------------------------------------------------------------------------
1 | name: Build VSIX
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | # TODO: Uncomment this when Code for IBM i 3.0.0 is released
7 | # pull_request:
8 |
9 | jobs:
10 | build:
11 | name: Validation
12 |
13 | runs-on: ubuntu-latest
14 |
15 | strategy:
16 | matrix:
17 | node-version: [20.x]
18 |
19 | steps:
20 | - name: Checkout
21 | uses: actions/checkout@v4
22 |
23 | - name: Use Node.js ${{ matrix.node-version }}
24 | uses: actions/setup-node@v4
25 | with:
26 | node-version: ${{ matrix.node-version }}
27 | registry-url: 'https://registry.npmjs.org'
28 |
29 | - name: Install NPM Dependencies
30 | run: |
31 | npm install
32 | npm install -g vsce
33 |
34 | - name: Package
35 | run: vsce package
36 |
37 | - name: Upload VSIX
38 | uses: actions/upload-artifact@v4
39 | with:
40 | name: vscode-ibmi-testing-pr-build
41 | path: ./*.vsix
42 | if-no-files-found: error
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: Release and Publish to the Marketplace and Open VSX
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | publish_openvsx:
7 | description: 'Publish to Open VSX'
8 | type: boolean
9 | required: true
10 | default: true
11 | publish_marketplace:
12 | description: 'Publish to Marketplace'
13 | type: boolean
14 | required: true
15 | default: true
16 |
17 | release:
18 | types: [created]
19 |
20 | jobs:
21 | release:
22 | name: Release and Publish
23 |
24 | runs-on: ubuntu-latest
25 |
26 | strategy:
27 | matrix:
28 | node-version: [20.x]
29 |
30 | steps:
31 | - name: Checkout
32 | uses: actions/checkout@v4
33 | with:
34 | ref: main
35 |
36 | - name: Use Node.js ${{ matrix.node-version }}
37 | uses: actions/setup-node@v4
38 | with:
39 | node-version: ${{ matrix.node-version }}
40 | registry-url: 'https://registry.npmjs.org'
41 |
42 | - name: Install NPM Dependencies
43 | run: |
44 | npm install
45 | npm install -g vsce ovsx
46 |
47 | - name: Publish to Open VSX
48 | if: github.event_name == 'release' || inputs.publish_openvsx == true
49 | run: npx ovsx publish -p $OPENVSX_TOKEN
50 | env:
51 | OPENVSX_TOKEN: ${{ secrets.OPENVSX_TOKEN }}
52 |
53 | - name: Publish to Marketplace
54 | if: github.event_name == 'release' || inputs.publish_marketplace == true
55 | run: vsce publish -p $PUBLISHER_TOKEN
56 | env:
57 | PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
58 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | dist
3 | out
4 | .DS_Store
5 | *.vsix
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | registry = "https://registry.npmjs.org/"
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | // See http://go.microsoft.com/fwlink/?LinkId=827846
3 | // for the documentation about the extensions.json format
4 | "recommendations": [
5 | "dbaeumer.vscode-eslint",
6 | "amodio.tsl-problem-matcher"
7 | ]
8 | }
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | // A launch configuration that compiles the extension and then opens it inside a new window
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | {
6 | "version": "0.2.0",
7 | "configurations": [
8 | {
9 | "name": "Run Extension",
10 | "type": "extensionHost",
11 | "request": "launch",
12 | "args": [
13 | "--extensionDevelopmentPath=${workspaceFolder}",
14 | "${workspaceFolder}/examples/rpg-utils",
15 | "${workspaceFolder}/examples/rpgunit-stmf"
16 | ],
17 | "outFiles": [
18 | "${workspaceFolder}/dist/**/*.js"
19 | ],
20 | "preLaunchTask": "${defaultBuildTask}"
21 | }
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | // Place your settings in this file to overwrite default and user settings.
2 | {
3 | "files.exclude": {
4 | "out": false, // set this to true to hide the "out" folder with the compiled JS files
5 | "dist": false // set this to true to hide the "dist" folder with the compiled JS files
6 | },
7 | "search.exclude": {
8 | "out": true, // set this to false to include "out" folder in search results
9 | "dist": true // set this to false to include "dist" folder in search results
10 | },
11 | // Turn off tsc task auto detection since we have the necessary tasks as npm scripts
12 | "typescript.tsc.autoDetect": "off"
13 | }
14 |
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | // See https://go.microsoft.com/fwlink/?LinkId=733558
2 | // for the documentation about the tasks.json format
3 | {
4 | "version": "2.0.0",
5 | "tasks": [
6 | {
7 | "type": "npm",
8 | "script": "watch",
9 | "problemMatcher": "$ts-webpack-watch",
10 | "isBackground": true,
11 | "presentation": {
12 | "reveal": "never",
13 | "group": "watchers"
14 | },
15 | "group": {
16 | "kind": "build",
17 | "isDefault": true
18 | }
19 | }
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/.vscodeignore:
--------------------------------------------------------------------------------
1 | .github/**
2 | .vscode/**
3 | .vscode-test/**
4 | out/**
5 | node_modules/**
6 | src/**
7 | examples/**
8 | .gitignore
9 | .yarnrc
10 | webpack.config.js
11 | vsc-extension-quickstart.md
12 | **/tsconfig.json
13 | **/.eslintrc.json
14 | **/*.map
15 | **/*.ts
16 | **/.vscode-test.*
17 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Change Log
2 |
3 | All notable changes to the "vscode-ibmi-testing" extension will be documented in this file.
4 |
5 | Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.
6 |
7 | ## [Unreleased]
8 |
9 | - Initial release
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # 🙏 Contributing to IBM i Testing
2 |
3 | We welcome everyone to contribute to the **IBM i Testing** extension! We appreciate any contribution, whether it be to documentation or code.
4 |
5 | For ideas on where to help out, check out the [open issues](https://github.com/IBM/vscode-ibmi-testing/issues) and espically those labeled as [good first issue](https://github.com/IBM/vscode-ibmi-testing/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22). Once you are happy to share your changes, please create a pull request and it will be reviewed by the maintainers of this project.
6 |
7 | ## Getting Started
8 |
9 | 1. Install [VS Code](https://code.visualstudio.com/download) and [Node.js](https://nodejs.org/en/download/package-manager).
10 | 2. Create a [fork](https://github.com/IBM/vscode-ibmi-testing/fork) of this repository.
11 | 3. Clone your fork.
12 | ```sh
13 | git clone https://github.com/your-username/vscode-ibmi-testing.git
14 | cd vscode-ibmi-testing
15 | ```
16 | 4. Install all dependencies.
17 | ```sh
18 | npm install
19 | ```
20 | 5. Use `Run Extension` from VS Code's `Run and Debug` view.
21 |
22 | ## Contributors
23 |
24 | Thanks so much to everyone [who has contributed](https://github.com/IBM/vscode-ibmi-testing/graphs/contributors).
25 |
26 | * [@SanjulaGanepola](https://github.com/SanjulaGanepola)
27 | * [@worksofliam](https://github.com/worksofliam)
28 | * [@edmundreinhardt](https://github.com/edmundreinhardt)
29 | * [@tools400](https://github.com/tools400)
30 | * [@NicolasSchindler](https://github.com/NicolasSchindler)
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2025 IBM Corporation
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # IBM i Testing
2 |
3 |
4 |
5 | [](https://marketplace.visualstudio.com/items?itemName=IBM.vscode-ibmi-testing)
6 | [](https://marketplace.visualstudio.com/items?itemName=IBM.vscode-ibmi-testing)
7 |
8 | The [IBM i Testing](https://marketplace.visualstudio.com/items?itemName=IBM.vscode-ibmi-testing) extension allows developers to run unit tests and generate code coverage results for RPG and COBOL programs on IBM i. Under the covers, this extension leverages the [RPGUnit](https://irpgunit.sourceforge.io/help) testing framework.
9 |
10 | * 👨💻 **Run Tests**: Visualize and run tests suites out of local files or source members.
11 | * ⚙️ **Configure Tests**: Configure parameters to compile (`RUCRTRPG`/`RUCRTCLB`) and run (`RUCALLTST`) tests.
12 | * 📋 **View Test Results**: View detailed test results along with inline failure/error messages.
13 | * 🎯 **Generate Code Coverage**: View line and procedure level code coverage results as an overlay in the editor.
14 |
15 | ✨ Check out the full documentation [here](https://codefori.github.io/docs/developing/testing/)!
--------------------------------------------------------------------------------
/examples/rpg-utils/.gitignore:
--------------------------------------------------------------------------------
1 | .logs
2 | .evfevent
3 | .env
--------------------------------------------------------------------------------
/examples/rpg-utils/Rules.mk:
--------------------------------------------------------------------------------
1 | SUBDIRS = qrpglesrc
--------------------------------------------------------------------------------
/examples/rpg-utils/iproj.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "RPG Utilities",
3 | "repository": "https://github.com/IBM/vscode-ibmi-testing/tree/main/examples/rpgutils",
4 | "curlib": "&CURLIB",
5 | "preUsrlibl": [
6 | "RPGUNIT",
7 | "QDEVTOOLS"
8 | ],
9 | "includePath": [
10 | "qrpgleref"
11 | ],
12 | "buildCommand": "makei build",
13 | "compileCommand": "makei compile -f {filename}"
14 | }
--------------------------------------------------------------------------------
/examples/rpg-utils/qrpgleref/math.rpgleinc:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | dcl-pr factorial int(20) extproc('FACTORIAL');
4 | n int(3) const;
5 | end-pr;
6 |
7 | dcl-pr fibonacci int(20) extproc('FIBONACCI');
8 | n int(3) const;
9 | end-pr;
10 |
11 | dcl-pr oddOrEven varchar(4) extproc('ODDOREVEN');
12 | num int(10) const;
13 | end-pr;
--------------------------------------------------------------------------------
/examples/rpg-utils/qrpgleref/string.rpgleinc:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | dcl-pr isPalindrome ind extproc('ISPALINDROME');
4 | str varchar(30) const;
5 | end-pr;
--------------------------------------------------------------------------------
/examples/rpg-utils/qrpglesrc/Rules.mk:
--------------------------------------------------------------------------------
1 | MATH.MODULE: math.rpgle
2 | STRING.MODULE: string.rpgle
--------------------------------------------------------------------------------
/examples/rpg-utils/qrpglesrc/math.rpgle:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | ctl-opt nomain;
4 |
5 | dcl-proc factorial export;
6 | dcl-pi factorial int(20);
7 | n int(3) const;
8 | end-pi;
9 |
10 | if (n = 0);
11 | return 1;
12 | else;
13 | return n * factorial(n-1);
14 | endif;
15 | end-proc;
16 |
17 | dcl-proc fibonacci export;
18 | dcl-pi fibonacci int(20);
19 | n int(3) const;
20 | end-pi;
21 |
22 | if (n = 0) or (n = 1);
23 | return n;
24 | endif;
25 |
26 | return fibonacci(n - 1) + fibonacci(n - 2);
27 | end-proc;
28 |
29 | dcl-proc oddOrEven export;
30 | dcl-pi *n varchar(4);
31 | num int(10) const;
32 | end-pi;
33 |
34 | if %rem(num:2) = 0;
35 | return 'Even';
36 | else;
37 | return 'Odd';
38 | endif;
39 | end-proc;
--------------------------------------------------------------------------------
/examples/rpg-utils/qrpglesrc/string.rpgle:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | ctl-opt nomain;
4 |
5 | dcl-proc isPalindrome export;
6 | dcl-pi *n ind;
7 | str varchar(30) const;
8 | end-pi;
9 |
10 | dcl-s i int(5);
11 | dcl-s new_str varchar(30);
12 | dcl-s alphnum varchar(62) inz('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789');
13 | dcl-s char_to_check char(1);
14 | dcl-s new_len int(5);
15 | dcl-s ispal ind inz(*off);
16 |
17 | for i = 1 to %len(str);
18 | char_to_check = %subst(str:i:1);
19 | if %check(alphnum: char_to_check) = 0;
20 | new_str = new_str + %lower(char_to_check);
21 | endif;
22 | endfor;
23 |
24 | new_len = %len(new_str);
25 | i = 1;
26 | dow (i < new_len) and (%subst(new_str:i:1) = %subst(new_str:new_len:1));
27 | i += 1;
28 | new_len -= 1;
29 | enddo;
30 |
31 | if i >= new_len;
32 | ispal = *on;
33 | endif;
34 |
35 | return ispal;
36 | end-proc;
--------------------------------------------------------------------------------
/examples/rpg-utils/qtestsrc/math.test.rpgle:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | ctl-opt nomain;
4 |
5 | /include qrpgleref/math.rpgleinc
6 | /include qinclude,TESTCASE
7 |
8 | dcl-proc test_factorial export;
9 | dcl-pi *n extproc(*dclcase) end-pi;
10 |
11 | iEqual(1 : factorial(0));
12 | iEqual(1 : factorial(1));
13 | iEqual(120 : factorial(5));
14 | iEqual(2432902008176640000 : factorial(20));
15 | end-proc;
16 |
17 | dcl-proc test_fibonacci export;
18 | dcl-pi *n extproc(*dclcase) end-pi;
19 |
20 | iEqual(0 : fibonacci(0));
21 | iEqual(1 : fibonacci(1));
22 | iEqual(1 : fibonacci(2));
23 | iEqual(5 : fibonacci(5));
24 | iEqual(55 : fibonacci(10));
25 | iEqual(832040 : fibonacci(30));
26 | end-proc;
27 |
28 | dcl-proc test_oddOrEven export;
29 | dcl-pi *n extproc(*dclcase) end-pi;
30 |
31 | aEqual('Even' : oddOrEven(0));
32 | aEqual('Odd' : oddOrEven(1));
33 | aEqual('Even' : oddOrEven(2));
34 | aEqual('Odd' : oddOrEven(-3));
35 | aEqual('Even' : oddOrEven(-4));
36 | aEqual('Odd' : oddOrEven(99999));
37 | aEqual('Even' : oddOrEven(100000));
38 | end-proc;
--------------------------------------------------------------------------------
/examples/rpg-utils/qtestsrc/string.test.rpgle:
--------------------------------------------------------------------------------
1 | **free
2 |
3 | ctl-opt nomain;
4 |
5 | /include qrpgleref/string.rpgleinc
6 | /include qinclude,TESTCASE
7 |
8 | dcl-proc test_isPalindrome export;
9 | dcl-pi *n extproc(*dclcase) end-pi;
10 |
11 | nEqual(*on : isPalindrome(''));
12 | nEqual(*on : isPalindrome('AAA'));
13 | nEqual(*on : isPalindrome('123321'));
14 | nEqual(*off : isPalindrome('123123'));
15 | nEqual(*on : isPalindrome('^&**&^'));
16 | end-proc;
--------------------------------------------------------------------------------
/examples/rpg-utils/qtestsrc/testing.json:
--------------------------------------------------------------------------------
1 | {
2 | "rpgunit": {
3 | "rucrtrpg": {
4 | "tgtCcsid": 37,
5 | "dbgView": "*SOURCE",
6 | "cOption": [
7 | "*EVENTF"
8 | ],
9 | "module": [
10 | "MATH",
11 | "STRING"
12 | ]
13 | },
14 | "prefix": "T_"
15 | }
16 | }
--------------------------------------------------------------------------------
/examples/rpgunit-stmf/.vscode/testing.json:
--------------------------------------------------------------------------------
1 | {
2 | "rpgunit": {
3 | "rucrtrpg": {
4 | "text": "QTEMPLATE tests",
5 | "tgtCcsid": 37,
6 | "dbgView": "*SOURCE",
7 | "cOption": [
8 | "*EVENTF"
9 | ]
10 | },
11 | "rucrtcbl": {
12 | "text": "QTEMPLATE tests",
13 | "tgtCcsid": 37,
14 | "dbgView": "*SOURCE",
15 | "cOption": [
16 | "*EVENTF"
17 | ]
18 | },
19 | "prefix": "T_"
20 | }
21 | }
--------------------------------------------------------------------------------
/examples/rpgunit-stmf/cbl.test.cblle:
--------------------------------------------------------------------------------
1 | *=====================================================================*
2 | * Empty Unit Test Case (Cobol). *
3 | *=====================================================================*
4 | * Command to create the service program: *
5 | * RUCRTCBL TSTPGM(RPGUNIT/CBL) SRCFILE(RPGUNIT/QTEMPLATE) *
6 | *=====================================================================*
7 | * Tools/400 STRPREPRC instructions: *
8 | * >>PRE-COMPILER<< *
9 | * >>CRTCMD<< RUCRTCBL TSTPGM(&LI/&OB) + *
10 | * SRCFILE(&SL/&SF) SRCMBR(&SM); *
11 | * >>COMPILE<< *
12 | * >>PARM<< COPTION(*EVENTF); *
13 | * >>PARM<< DBGVIEW(*LIST); *
14 | * >>PARM<< BNDDIR(*N); *
15 | * >>END-COMPILE<< *
16 | * >>EXECUTE<< *
17 | * >>END-PRE-COMPILER<< *
18 | *=====================================================================*
19 | * Compile options: *
20 | * *SrcStmt - Assign SEU line numbers when compiling the *
21 | * source member. This option is required to *
22 | * position the LPEX editor to the line in error *
23 | * when the source member is opened from the *
24 | * RPGUnit view. *
25 | * *NoDebugIO - Do not generate breakpoints for input and *
26 | * output specifications. Optional but useful. *
27 | *=====================================================================*
28 |
29 | *=====================================================================*
30 | * setUpSte(), called once at the beginning of a test suite.
31 | *=====================================================================*
32 | PROCESS NOMONOPRC.
33 | IDENTIFICATION DIVISION.
34 | PROGRAM-ID. SETUPSTE.
35 | ENVIRONMENT DIVISION.
36 | CONFIGURATION SECTION.
37 | Special-Names.
38 | DATA DIVISION.
39 | WORKING-STORAGE SECTION.
40 | PROCEDURE DIVISION.
41 | SETUPSTE.
42 | GOBACK.
43 | END PROGRAM SETUPSTE.
44 |
45 | *=====================================================================*
46 | * setUp(), called at the beginning of each test case.
47 | *=====================================================================*
48 | PROCESS NOMONOPRC.
49 | IDENTIFICATION DIVISION.
50 | PROGRAM-ID. SETUP.
51 | ENVIRONMENT DIVISION.
52 | CONFIGURATION SECTION.
53 | Special-Names.
54 | DATA DIVISION.
55 | WORKING-STORAGE SECTION.
56 | PROCEDURE DIVISION.
57 | SETUP.
58 | GOBACK.
59 | END PROGRAM SETUP.
60 |
61 | *=====================================================================*
62 | * tearDwn(), called at the end of each test case.
63 | *=====================================================================*
64 | PROCESS NOMONOPRC.
65 | IDENTIFICATION DIVISION.
66 | PROGRAM-ID. TEARDWN.
67 | ENVIRONMENT DIVISION.
68 | CONFIGURATION SECTION.
69 | Special-Names.
70 | DATA DIVISION.
71 | WORKING-STORAGE SECTION.
72 | PROCEDURE DIVISION.
73 | TEARDWN.
74 | GOBACK.
75 | END PROGRAM TEARDWN.
76 |
77 | *=====================================================================*
78 | * tearDwnSte(), called once at the end of a test suite.
79 | *=====================================================================*
80 | PROCESS NOMONOPRC.
81 | IDENTIFICATION DIVISION.
82 | PROGRAM-ID. TEARDWNSTE.
83 | ENVIRONMENT DIVISION.
84 | CONFIGURATION SECTION.
85 | Special-Names.
86 | DATA DIVISION.
87 | WORKING-STORAGE SECTION.
88 | PROCEDURE DIVISION.
89 | TEARDWNSTE.
90 | GOBACK.
91 | END PROGRAM TEARDWNSTE.
92 |
93 | *=====================================================================*
94 | * Using assert() with 1 parameter.
95 | *=====================================================================*
96 | PROCESS NOMONOPRC.
97 | IDENTIFICATION DIVISION.
98 | PROGRAM-ID. TEST_TRUE.
99 | ENVIRONMENT DIVISION.
100 | CONFIGURATION SECTION.
101 | Special-Names.
102 | Copy CBLLINKAGE Of QINCLUDE.
103 | * Terminate Special-Names section , because CBLLINKAGE
104 | * does not do that.
105 | .
106 | DATA DIVISION.
107 | WORKING-STORAGE SECTION.
108 | 01 bResult Pic x(1).
109 | 88 bFalse Value "0".
110 | 88 bTrue Value "1".
111 | 01 x Pic x(1) Value "1".
112 | PROCEDURE DIVISION.
113 | TEST_TRUE.
114 |
115 | * -----------------------------------------
116 | * Compute a boolean result. Then ...
117 | *------------------------------------------
118 |
119 | If x = "1"
120 | Move "1" To bResult
121 | Else
122 | Move "0" To bResult.
123 |
124 | Call "assert" Using By Content bResult.
125 |
126 | * -----------------------------------------
127 | * ... or ...
128 | *------------------------------------------
129 |
130 | if x = "1"
131 | Call "assert" Using By Content B"1"
132 | else
133 | Call "assert" Using By Content B"0".
134 |
135 | GOBACK.
136 |
137 | END PROGRAM TEST_TRUE.
138 |
139 | *=====================================================================*
140 | * Using assert() with 2 parameters.
141 | *=====================================================================*
142 | PROCESS NOMONOPRC.
143 | IDENTIFICATION DIVISION.
144 | PROGRAM-ID. TEST_FALSE.
145 | ENVIRONMENT DIVISION.
146 | CONFIGURATION SECTION.
147 | Special-Names.
148 | Copy CBLLINKAGE Of QINCLUDE.
149 | * Terminate Special-Names section , because CBLLINKAGE
150 | * does not do that.
151 | .
152 | DATA DIVISION.
153 | WORKING-STORAGE SECTION.
154 | 01 bResult Pic x(1).
155 | 88 bFalse Value "0".
156 | 88 bTrue Value "1".
157 | 01 x Pic x(1) Value "2".
158 | PROCEDURE DIVISION.
159 | TEST_FALSE.
160 |
161 | * -----------------------------------------
162 | * Compute a boolean result. Then ...
163 | *------------------------------------------
164 |
165 | If x = "1"
166 | Move "1" To bResult
167 | Else
168 | Move "0" To bResult.
169 |
170 | Call "assert" Using By Content bResult
171 | By Content "x must be set to '1'.".
172 |
173 | * -----------------------------------------
174 | * ... or ...
175 | *------------------------------------------
176 |
177 | if x = "1"
178 | Call "assert" Using By Content B"1"
179 | By Content "x must be set to '1'."
180 | else
181 | Call "assert" Using By Content B"0"
182 | By Content "x must be set to '1'.".
183 |
184 | GOBACK.
185 |
186 | END PROGRAM TEST_FALSE.
187 |
188 | *=====================================================================*
189 | * Using fail() with 0 parameters.
190 | *=====================================================================*
191 | PROCESS NOMONOPRC.
192 | IDENTIFICATION DIVISION.
193 | PROGRAM-ID. TEST_FAIL0.
194 | ENVIRONMENT DIVISION.
195 | CONFIGURATION SECTION.
196 | Special-Names.
197 | Copy CBLLINKAGE Of QINCLUDE.
198 | * Terminate Special-Names section , because CBLLINKAGE
199 | * does not do that.
200 | .
201 | DATA DIVISION.
202 | WORKING-STORAGE SECTION.
203 | PROCEDURE DIVISION.
204 | TEST_FAIL0.
205 |
206 | * -----------------------------------------
207 | * Use fail() in case you want to make
208 | * the unit test fail.
209 | *------------------------------------------
210 |
211 | Call "fail".
212 |
213 | GOBACK.
214 |
215 | END PROGRAM TEST_FAIL0.
216 |
217 | *=====================================================================*
218 | * Using fail() with 1 parameter.
219 | *=====================================================================*
220 | PROCESS NOMONOPRC.
221 | IDENTIFICATION DIVISION.
222 | PROGRAM-ID. TEST_FAIL1.
223 | ENVIRONMENT DIVISION.
224 | CONFIGURATION SECTION.
225 | Special-Names.
226 | Copy CBLLINKAGE Of QINCLUDE.
227 | * Terminate Special-Names section , because CBLLINKAGE
228 | * does not do that.
229 | .
230 | DATA DIVISION.
231 | WORKING-STORAGE SECTION.
232 | PROCEDURE DIVISION.
233 | TEST_FAIL1.
234 |
235 | * -----------------------------------------
236 | * Use fail() in case you want to make
237 | * the unit test fail.
238 | *------------------------------------------
239 |
240 | Call "fail" Using By Content "Let the unit test fail.".
241 |
242 | GOBACK.
243 |
244 | END PROGRAM TEST_FAIL1.
--------------------------------------------------------------------------------
/examples/rpgunit-stmf/rpgsqlstmf.test.sqlrpgle:
--------------------------------------------------------------------------------
1 | **free
2 | //=====================================================================
3 | // Empty Unit Test Case. Prints a protocol of the execution flow.
4 | //=====================================================================
5 | // Command to create the service program:
6 | // cd /home/raddatz
7 | // git clone git@github.com:tools-400/irpgunit.git
8 | // CD DIR('/home/Raddatz/iRpgUnit/host')
9 | // RUCRTRPG TSTPGM(RPGUNIT/RPGSTMF) SRCFILE(RPGUNIT/QSRC)
10 | // SRCSTMF('./iRPGUnit/QTEMPLATE/rpgsqlstmf.sqlrpgle')
11 | // DEFINE('stmf') RPGPPOPT(*LVL2)
12 | // INCDIR('./iRPGUnit')
13 | // TGTCCSID(*JOB)
14 | //=====================================================================
15 | // Tools/400 STRPREPRC instructions:
16 | // >>PRE-COMPILER<<
17 | // >>CRTCMD<< RUCRTRPG TSTPGM(RPGUNIT/RPGSTMF) +
18 | // SRCSTMF('./iRPGUnit/QTEMPLATE/rpgsqlstmf.sqlrpgle');
19 | // >>COMPILE<<
20 | // >>PARM<< DEFINE('stmf');
21 | // >>PARM<< RPGPPOPT(*LVL2);
22 | // >>PARM<< INCDIR('./iRPGUnit');
23 | // >>PARM<< TGTCCSID(*JOB);
24 | // >>END-COMPILE<<
25 | // >>EXECUTE<<
26 | // >>END-PRE-COMPILER<<
27 | //=====================================================================
28 | // Compile options:
29 | // *SrcStmt - Assign SEU line numbers when compiling the
30 | // source member. This option is required to
31 | // position the LPEX editor to the line in error
32 | // when the source member is opened from the
33 | // RPGUnit view.
34 | // *NoDebugIO - Do not generate breakpoints for input and
35 | // output specifications. Optional but useful.
36 | //=====================================================================
37 | ctl-opt NoMain Option(*SrcStmt : *NoDebugIO);
38 | dcl-f QSYSPRT printer(80) oflind(*in70) usropn;
39 |
40 | /include qinclude,TESTCASE iRPGUnit Test Suite
41 | /include qinclude,SDS Program status data structure
42 |
43 | // ------------------------------------------------------------
44 | // SQL Options.
45 | // ------------------------------------------------------------
46 |
47 | exec sql
48 | set option commit = *none;
49 |
50 | // ------------------------------------------------------------
51 | // Global type templates.
52 | // ------------------------------------------------------------
53 |
54 | dcl-ds sql_status_t qualified template;
55 | ignSQLWarn ind inz(*off);
56 | end-ds;
57 |
58 | // ------------------------------------------------------------
59 | // Global Program Status.
60 | // ------------------------------------------------------------
61 |
62 | dcl-ds g_status qualified;
63 | srcSeq int(10);
64 | srcSeq2 int(10);
65 | sql likeds(sql_status_t) inz(*likeds);
66 | end-ds;
67 |
68 | // ============================================================
69 | // Opens the printer.
70 | // ============================================================
71 | dcl-proc openPrinter;
72 | dcl-pi *n extproc(*dclcase) end-pi;
73 |
74 | open QSYSPRT;
75 |
76 | end-proc;
77 |
78 | // ============================================================
79 | // Prints a message.
80 | // ============================================================
81 | dcl-proc print;
82 | dcl-pi *n extproc(*dclcase);
83 | text varchar(128) value options(*nopass);
84 | end-pi;
85 |
86 | dcl-ds lineOutput len(80);
87 | end-ds;
88 |
89 | if (%parms() >= 1);
90 | lineOutput = text;
91 | else;
92 | lineOutput = '';
93 | endif;
94 | write QSYSPRT lineOutput;
95 |
96 | end-proc;
97 |
98 | // ============================================================
99 | // Closes the printer.
100 | // ============================================================
101 | dcl-proc closePrinter;
102 | dcl-pi *n extproc(*dclcase) end-pi;
103 |
104 | if (%open(QSYSPRT));
105 | close QSYSPRT;
106 | endif;
107 |
108 | end-proc;
109 |
110 | // ------------------------------------------------------------
111 | // Specifies whether SQL warnings are ignored when
112 | // calling isSQLError().
113 | // ------------------------------------------------------------
114 | dcl-proc setIgnSQLWarn;
115 | dcl-pi *n extproc(*dclcase);
116 | i_ignore ind const;
117 | end-pi;
118 |
119 | g_status.sql.ignSQLWarn = i_ignore;
120 |
121 | end-proc;
122 |
123 | // ------------------------------------------------------------
124 | // Returns *on, when the last SQL statement ended with an
125 | // error, else *off;
126 | // ------------------------------------------------------------
127 | dcl-proc isSQLError;
128 | dcl-pi *n ind extproc(*dclcase);
129 | i_state char(5) const;
130 | end-pi;
131 |
132 | dcl-ds sqlState qualified;
133 | class char(2);
134 | qualifier char(3);
135 | end-ds;
136 |
137 | dcl-ds sql likeds(sql_status_t);
138 |
139 | sqlState = i_state;
140 | sql = g_status.sql;
141 |
142 | reset g_status.sql;
143 |
144 | select;
145 | // SQL code 00: Unqualified Successful Completion
146 | when (sqlState = '00000');
147 | // Execution of the operation was successful and did not
148 | // result in any type of warning or exception condition.
149 | return *off;
150 |
151 | // SQL code 01: Warning
152 | When (sqlState.class = '01');
153 | // Valid warning SQLSTATEs returned by an SQL routine.
154 | // Also used for RAISE_ERROR and SIGNAL.
155 | if (sql.ignSQLWarn);
156 | return *off;
157 | else;
158 | return *on;
159 | endif;
160 |
161 | // SQL code 02: No data
162 | When (sqlState = '02000');
163 | return *off;
164 |
165 | other;
166 | // Other problem or error
167 | return *on;
168 | endsl;
169 |
170 | end-proc;
171 |
172 | // ============================================================
173 | // Set up test suite. Executed once per RUCALLTST.
174 | // ============================================================
175 | dcl-proc setUpSuite export;
176 | dcl-pi *n extproc(*dclcase) end-pi;
177 |
178 | dcl-s rc char(1);
179 |
180 | runCmd('OVRPRTF FILE(QSYSPRT) TOFILE(*FILE) +
181 | SPLFNAME(PROC_FLOW) OVRSCOPE(*JOB)');
182 | monitor;
183 | openPrinter();
184 | print('Executing: setUpSuite()');
185 | on-error;
186 | // ignore errors ...
187 | endmon;
188 |
189 | // ... but try to remove the override.
190 | monitor;
191 | runCmd('DLTOVR FILE(QSYSPRT) LVL(*JOB)');
192 | on-error;
193 | dsply '*** Failed to delete QSYSPRT override! ***' rc;
194 | endmon;
195 |
196 | end-proc;
197 |
198 | // ============================================================
199 | // Tear down test suite.
200 | // ============================================================
201 | dcl-proc tearDownSuite export;
202 | dcl-pi *n extproc(*dclcase) end-pi;
203 |
204 | print('Executing: tearDownSuite()');
205 | closePrinter();
206 |
207 | end-proc;
208 |
209 | // ============================================================
210 | // Set up test case.
211 | // ============================================================
212 | dcl-proc setUp export;
213 | dcl-pi *n extproc(*dclcase) end-pi;
214 |
215 | print('Executing: - setUp()');
216 |
217 | end-proc;
218 |
219 | // ============================================================
220 | // Tear down test case.
221 | // ============================================================
222 | dcl-proc tearDown export;
223 | dcl-pi *n extproc(*dclcase) end-pi;
224 |
225 | print('Executing: - tearDown()');
226 |
227 | end-proc;
228 |
229 | // ============================================================
230 | // RPGUnit test case.
231 | // ============================================================
232 | dcl-proc testWhatever_1 export;
233 | dcl-pi *n extproc(*dclcase) end-pi;
234 |
235 | dcl-s count int(10);
236 |
237 | print('Executing: * testWhatever_1()');
238 |
239 | // Run
240 | exec sql
241 | SELECT COUNT(DAY_NUM) INTO :count
242 | FROM ( VALUES (1, 'Monday'),
243 | (2, 'Tuesday'),
244 | (3, 'Wednesday'),
245 | (4, 'Thursday'),
246 | (5, 'Friday'),
247 | (6, 'Saturday'),
248 | (7, 'Sunday') )
249 | AS week_days (DAY_NUM, DAY_NAME);
250 |
251 | // Place your assertions here.
252 | assert(not isSqlError(sqlState));
253 |
254 | assert(count = 7: 'A week must have 7 days');
255 |
256 | end-proc;
257 |
258 | // ============================================================
259 | // RPGUnit test case.
260 | // ============================================================
261 | dcl-proc testWhatever_2 export;
262 | dcl-pi *n extproc(*dclcase) end-pi;
263 |
264 | dcl-s count int(10);
265 |
266 | print('Executing: * testWhatever_2()');
267 |
268 | // Run
269 | exec sql
270 | SELECT COUNT(DAY_NUM) INTO :count
271 | FROM ( VALUES (1, 'Monday'),
272 | (2, 'Tuesday'),
273 | (3, 'Wednesday'),
274 | (4, 'Thursday'),
275 | (5, 'Friday'),
276 | (6, 'Saturday'),
277 | (7, 'Sunday') )
278 | AS week_days (DAY_NUM, DAY_NAME)
279 | WHERE DAY_NUM < 1 OR DAY_NUM > 7;
280 |
281 | // Place your assertions here.
282 | assert(not isSqlError(sqlState));
283 |
284 | assert(count = 0: 'Ordinal numbers of days must be between 1 and 7');
285 |
286 | end-proc;
--------------------------------------------------------------------------------
/examples/rpgunit-stmf/rpgstmf.test.rpgle:
--------------------------------------------------------------------------------
1 | **free
2 | //=====================================================================
3 | // Empty Unit Test Case. Prints a protocol of the execution flow.
4 | //=====================================================================
5 | // Command to create the service program:
6 | // cd /home/raddatz
7 | // git clone git@github.com:tools-400/irpgunit.git
8 | // CD DIR('/home/Raddatz/iRpgUnit/host')
9 | // RUCRTRPG TSTPGM(RPGUNIT/RPGSTMF) SRCFILE(RPGUNIT/QSRC)
10 | // SRCSTMF('./iRPGUnit/QTEMPLATE/rpgstmf.rpgle')
11 | // DEFINE('stmf') RPGPPOPT(*LVL2)
12 | // INCDIR('./iRPGUnit')
13 | // TGTCCSID(*JOB)
14 | //=====================================================================
15 | // Tools/400 STRPREPRC instructions:
16 | // >>PRE-COMPILER<<
17 | // >>CRTCMD<< RUCRTRPG TSTPGM(RPGUNIT/RPGSTMF) +
18 | // SRCSTMF('./iRPGUnit/QTEMPLATE/rpgstmf.rpgle');
19 | // >>COMPILE<<
20 | // >>PARM<< DEFINE('stmf');
21 | // >>PARM<< RPGPPOPT(*LVL2);
22 | // >>PARM<< INCDIR('./iRPGUnit');
23 | // >>PARM<< TGTCCSID(*JOB);
24 | // >>END-COMPILE<<
25 | // >>EXECUTE<<
26 | // >>END-PRE-COMPILER<<
27 | //=====================================================================
28 | // Compile options:
29 | // *SrcStmt - Assign SEU line numbers when compiling the
30 | // source member. This option is required to
31 | // position the LPEX editor to the line in error
32 | // when the source member is opened from the
33 | // RPGUnit view.
34 | // *NoDebugIO - Do not generate breakpoints for input and
35 | // output specifications. Optional but useful.
36 | //=====================================================================
37 | ctl-opt NoMain Option(*SrcStmt : *NoDebugIO);
38 | dcl-f QSYSPRT printer(80) oflind(*in70) usropn;
39 |
40 | /include qinclude,TESTCASE iRPGUnit Test Suite
41 | /include qinclude,SDS Program status data structure
42 |
43 | // ------------------------------------------------------------
44 | // Global type templates.
45 | // ------------------------------------------------------------
46 |
47 | dcl-ds sql_status_t qualified template;
48 | ignSQLWarn ind inz(*off);
49 | end-ds;
50 |
51 | // ------------------------------------------------------------
52 | // Global Program Status.
53 | // ------------------------------------------------------------
54 |
55 | dcl-ds g_status qualified;
56 | srcSeq int(10);
57 | srcSeq2 int(10);
58 | sql likeds(sql_status_t) inz(*likeds);
59 | end-ds;
60 |
61 | // ============================================================
62 | // Opens the printer.
63 | // ============================================================
64 | dcl-proc openPrinter;
65 | dcl-pi *n extproc(*dclcase) end-pi;
66 |
67 | open QSYSPRT;
68 |
69 | end-proc;
70 |
71 | // ============================================================
72 | // Prints a message.
73 | // ============================================================
74 | dcl-proc print;
75 | dcl-pi *n extproc(*dclcase);
76 | text varchar(128) value options(*nopass);
77 | end-pi;
78 |
79 | dcl-ds lineOutput len(80);
80 | end-ds;
81 |
82 | if (%parms() >= 1);
83 | lineOutput = text;
84 | else;
85 | lineOutput = '';
86 | endif;
87 | write QSYSPRT lineOutput;
88 |
89 | end-proc;
90 |
91 | // ============================================================
92 | // Closes the printer.
93 | // ============================================================
94 | dcl-proc closePrinter;
95 | dcl-pi *n extproc(*dclcase) end-pi;
96 |
97 | if (%open(QSYSPRT));
98 | close QSYSPRT;
99 | endif;
100 |
101 | end-proc;
102 |
103 | // ------------------------------------------------------------
104 | // Specifies whether SQL warnings are ignored when
105 | // calling isSQLError().
106 | // ------------------------------------------------------------
107 | dcl-proc setIgnSQLWarn;
108 | dcl-pi *n extproc(*dclcase);
109 | i_ignore ind const;
110 | end-pi;
111 |
112 | g_status.sql.ignSQLWarn = i_ignore;
113 |
114 | end-proc;
115 |
116 | // ------------------------------------------------------------
117 | // Returns *on, when the last SQL statement ended with an
118 | // error, else *off;
119 | // ------------------------------------------------------------
120 | dcl-proc isSQLError;
121 | dcl-pi *n ind extproc(*dclcase);
122 | i_state char(5) const;
123 | end-pi;
124 |
125 | dcl-ds sqlState qualified;
126 | class char(2);
127 | qualifier char(3);
128 | end-ds;
129 |
130 | dcl-ds sql likeds(sql_status_t);
131 |
132 | sqlState = i_state;
133 | sql = g_status.sql;
134 |
135 | reset g_status.sql;
136 |
137 | select;
138 | // SQL code 00: Unqualified Successful Completion
139 | when (sqlState = '00000');
140 | // Execution of the operation was successful and did not
141 | // result in any type of warning or exception condition.
142 | return *off;
143 |
144 | // SQL code 01: Warning
145 | When (sqlState.class = '01');
146 | // Valid warning SQLSTATEs returned by an SQL routine.
147 | // Also used for RAISE_ERROR and SIGNAL.
148 | if (sql.ignSQLWarn);
149 | return *off;
150 | else;
151 | return *on;
152 | endif;
153 |
154 | // SQL code 02: No data
155 | When (sqlState = '02000');
156 | return *off;
157 |
158 | other;
159 | // Other problem or error
160 | return *on;
161 | endsl;
162 |
163 | end-proc;
164 |
165 | // ============================================================
166 | // Set up test suite. Executed once per RUCALLTST.
167 | // ============================================================
168 | dcl-proc setUpSuite export;
169 | dcl-pi *n extproc(*dclcase) end-pi;
170 |
171 | dcl-s rc char(1);
172 |
173 | runCmd('OVRPRTF FILE(QSYSPRT) TOFILE(*FILE) +
174 | SPLFNAME(PROC_FLOW) OVRSCOPE(*JOB)');
175 | monitor;
176 | openPrinter();
177 | print('Executing: setUpSuite()');
178 | on-error;
179 | // ignore errors ...
180 | endmon;
181 |
182 | // ... but try to remove the override.
183 | monitor;
184 | runCmd('DLTOVR FILE(QSYSPRT) LVL(*JOB)');
185 | on-error;
186 | dsply '*** Failed to delete QSYSPRT override! ***' rc;
187 | endmon;
188 |
189 | end-proc;
190 |
191 | // ============================================================
192 | // Tear down test suite.
193 | // ============================================================
194 | dcl-proc tearDownSuite export;
195 | dcl-pi *n extproc(*dclcase) end-pi;
196 |
197 | print('Executing: tearDownSuite()');
198 | closePrinter();
199 |
200 | end-proc;
201 |
202 | // ============================================================
203 | // Set up test case.
204 | // ============================================================
205 | dcl-proc setUp export;
206 | dcl-pi *n extproc(*dclcase) end-pi;
207 |
208 | print('Executing: - setUp()');
209 |
210 | end-proc;
211 |
212 | // ============================================================
213 | // Tear down test case.
214 | // ============================================================
215 | dcl-proc tearDown export;
216 | dcl-pi *n extproc(*dclcase) end-pi;
217 |
218 | print('Executing: - tearDown()');
219 |
220 | end-proc;
221 |
222 | // ============================================================
223 | // RPGUnit test case.
224 | // ============================================================
225 | dcl-proc testWhatever_1 export;
226 | dcl-pi *n extproc(*dclcase) end-pi;
227 |
228 | print('Executing: * testWhatever_1()');
229 |
230 | // Run
231 | assert(sds.pgmName = 'TEMPLATE': 'Name of the test suite should be ''TEMPLATE''');
232 |
233 | // Place your assertions here.
234 |
235 | end-proc;
236 |
237 | // ============================================================
238 | // RPGUnit test case.
239 | // ============================================================
240 | dcl-proc testWhatever_2 export;
241 | dcl-pi *n extproc(*dclcase) end-pi;
242 |
243 | print('Executing: * testWhatever_2()');
244 |
245 | // Run
246 |
247 | // Place your assertions here.
248 | assert(sds.excData = '': 'There should be no exception data in SDS');
249 |
250 | end-proc;
--------------------------------------------------------------------------------
/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/IBM/vscode-ibmi-testing/dc73a2c6d29d545788595969df4e4b9adc154207/icon.png
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "vscode-ibmi-testing",
3 | "displayName": "IBM i Testing",
4 | "description": "IBM i Testing extension for VS Code",
5 | "icon": "icon.png",
6 | "author": {
7 | "name": "IBM",
8 | "url": "https://github.com/IBM"
9 | },
10 | "publisher": "IBM",
11 | "license": "Apache-2.0",
12 | "version": "1.0.0",
13 | "repository": {
14 | "url": "https://github.com/IBM/vscode-ibmi-testing"
15 | },
16 | "homepage": "https://github.com/IBM/vscode-ibmi-testing/blob/main/README.md",
17 | "bugs": {
18 | "url": "https://github.com/IBM/vscode-ibmi-testing/issues"
19 | },
20 | "keywords": [
21 | "ibmi",
22 | "iseries",
23 | "as400",
24 | "test",
25 | "irpgunit"
26 | ],
27 | "engines": {
28 | "vscode": "^1.96.0"
29 | },
30 | "categories": [
31 | "Testing"
32 | ],
33 | "activationEvents": [
34 | "onStartupFinished"
35 | ],
36 | "extensionDependencies": [
37 | "halcyontechltd.code-for-ibmi",
38 | "halcyontechltd.vscode-rpgle"
39 | ],
40 | "main": "./dist/extension.js",
41 | "contributes": {
42 | "jsonValidation": [
43 | {
44 | "fileMatch": [
45 | "testing.json",
46 | "TESTING.JSON"
47 | ],
48 | "url": "./schemas/testing.json"
49 | }
50 | ],
51 | "configuration": [
52 | {
53 | "title": "IBM i Testing",
54 | "properties": {
55 | "IBM i Testing.productLibrary": {
56 | "order": 0,
57 | "type": "string",
58 | "default": "RPGUNIT",
59 | "markdownDescription": "Specifies the name of the RPGUnit product library on the host."
60 | },
61 | "IBM i Testing.testSourceFiles": {
62 | "order": 1,
63 | "type": "array",
64 | "default": [
65 | "QTESTSRC"
66 | ],
67 | "items": {
68 | "type": "string",
69 | "description": "The name of a source file containing tests."
70 | },
71 | "markdownDescription": "A set of source files to search for tests in the library list."
72 | },
73 | "IBM i Testing.libraryListValidation": {
74 | "order": 2,
75 | "type": "object",
76 | "default": {
77 | "RPGUNIT": true,
78 | "QDEVTOOLS": true
79 | },
80 | "properties": {
81 | "RPGUNIT": {
82 | "type": "boolean",
83 | "default": true
84 | },
85 | "QDEVTOOLS": {
86 | "type": "boolean",
87 | "default": true
88 | }
89 | },
90 | "additionalProperties": false,
91 | "markdownDescription": "Controls whether checks are performed to ensure the required library is found on the library list before executing any tests. If not found, a notification is displayed. For RPGUNIT, note that the specific library which is checked for can be configured using the `IBM i Testing.productLibrary` setting."
92 | },
93 | "IBM i Testing.runOrder": {
94 | "order": 3,
95 | "type": "string",
96 | "default": "*API",
97 | "enum": [
98 | "*API",
99 | "*REVERSE"
100 | ],
101 | "enumDescriptions": [
102 | "Run test procedures in the order provided by the system API.",
103 | "Run test procedures in reverse order."
104 | ],
105 | "markdownDescription": "Specifies the order for running the test procedures. Useful to check that there is no dependencies between test procedures."
106 | },
107 | "IBM i Testing.libraryList": {
108 | "order": 4,
109 | "type": "string",
110 | "default": "*CURRENT",
111 | "enum": [
112 | "*CURRENT",
113 | "*JOBD"
114 | ],
115 | "enumDescriptions": [
116 | "The current library list is used to execute the unit test suite.",
117 | "The initial library list part of the specified job description is used to execute the unit test suite."
118 | ],
119 | "markdownDescription": "Specifies the library list for executing the specified unit test."
120 | },
121 | "IBM i Testing.jobDescription": {
122 | "order": 5,
123 | "type": "string",
124 | "default": "*DFT",
125 | "markdownDescription": "Specifies the name of the job description that is used to set the library list, when the `IBM i Testing: Library List` setting is set to `*JOBD`. `*DFT` can be used here to indicate the library of the unit test suite (service program) is searched for job description `RPGUNIT`."
126 | },
127 | "IBM i Testing.jobDescriptionLibrary": {
128 | "order": 6,
129 | "type": "string",
130 | "default": "",
131 | "markdownDescription": "Specifies the library that is searched for the job description. `*LIBL` can be used here to indicate all libraries in the user and system portions of the job's library list are searched until the first match is found."
132 | },
133 | "IBM i Testing.reportDetail": {
134 | "order": 7,
135 | "type": "string",
136 | "default": "*BASIC",
137 | "enum": [
138 | "*BASIC",
139 | "*ALL"
140 | ],
141 | "enumDescriptions": [
142 | "Full detail for failures and errors, no detail for successes.",
143 | "Full detail in all cases."
144 | ],
145 | "markdownDescription": "Specifies how detailed the test run report should be."
146 | },
147 | "IBM i Testing.createReport": {
148 | "order": 8,
149 | "type": "string",
150 | "default": "*ALLWAYS",
151 | "enum": [
152 | "*ALLWAYS",
153 | "*ERROR",
154 | "*NONE"
155 | ],
156 | "enumDescriptions": [
157 | "Creates a report regardless of the test result.",
158 | "Creates a report in case of failures and errors, only.",
159 | "Does not create any report."
160 | ],
161 | "markdownDescription": "Specifies whether a report is created."
162 | },
163 | "IBM i Testing.reclaimResources": {
164 | "order": 9,
165 | "type": "string",
166 | "default": "*NO",
167 | "enum": [
168 | "*NO",
169 | "*ALLWAYS",
170 | "*ONCE"
171 | ],
172 | "enumDescriptions": [
173 | "Resources are not reclaimed.",
174 | "Resources are reclaimed after each test case and at the end of the test suite.",
175 | "Resources are reclaimed at the end of the test suite."
176 | ],
177 | "markdownDescription": "Specifies when to reclaim resources. Resources, such as open files, can be reclaimed after each test case or at the end of the test suite. This option is useful if the test suite calls OPM programs, which do not set the `*INLR` indicator."
178 | }
179 | }
180 | }
181 | ],
182 | "snippets": [
183 | {
184 | "language": "rpgle",
185 | "path": "./snippets/rpgle.json"
186 | }
187 | ]
188 | },
189 | "scripts": {
190 | "vscode:prepublish": "npm run package",
191 | "compile": "webpack",
192 | "watch": "webpack --watch",
193 | "package": "webpack --mode production --devtool hidden-source-map",
194 | "pretest": "npm run compile && npm run lint",
195 | "lint": "eslint src --ext ts"
196 | },
197 | "dependencies": {
198 | "ansi-colors": "^4.1.3",
199 | "compare-versions": "^6.1.1",
200 | "lodash": "^4.17.21",
201 | "node-fetch": "^3.3.2",
202 | "octokit": "^4.1.2",
203 | "tmp": "^0.2.3",
204 | "unzipper": "^0.11.6",
205 | "xml2js": "^0.6.2"
206 | },
207 | "devDependencies": {
208 | "@halcyontech/vscode-ibmi-types": "file:C:\\Users\\SanjulaGanepola\\Git\\forks\\vscode-ibmi\\types",
209 | "@types/lodash": "^4.17.16",
210 | "@types/mocha": "^10.0.7",
211 | "@types/node": "20.x",
212 | "@types/tmp": "^0.2.6",
213 | "@types/unzipper": "^0.10.11",
214 | "@types/vscode": "^1.96.0",
215 | "@types/xml2js": "^0.4.14",
216 | "@typescript-eslint/eslint-plugin": "^7.14.1",
217 | "@typescript-eslint/parser": "^7.11.0",
218 | "eslint": "^8.57.0",
219 | "ts-loader": "^9.5.1",
220 | "typescript": "^5.4.5",
221 | "webpack": "^5.92.1",
222 | "webpack-cli": "^5.1.4"
223 | }
224 | }
--------------------------------------------------------------------------------
/schemas/testing.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://json-schema.org/draft/2020-12/schema",
3 | "type": "object",
4 | "properties": {
5 | "rpgunit": {
6 | "type": "object",
7 | "markdownDescription": "RPGUnit Test Configuration",
8 | "properties": {
9 | "rucrtrpg": {
10 | "type": "object",
11 | "markdownDescription": "The Create RPG Test Suite (`RUCRTRPG`) command compiles RPGLE and SQLRPGLE source members containing test procedures into a iRPGUnit test suite service program.",
12 | "properties": {
13 | "text": {
14 | "type": "string",
15 | "default": "",
16 | "markdownDescription": "Text description\n\nAllows you to enter text that briefly describes the RPGUnit test case and its function."
17 | },
18 | "cOption": {
19 | "type": "array",
20 | "default": [
21 | "*EVENTF"
22 | ],
23 | "items": {
24 | "type": "string",
25 | "default": "*EVENTF",
26 | "enum": [
27 | "*XREF",
28 | "*NOXREF",
29 | "*SECLVL",
30 | "*NOSECLVL",
31 | "*SHOWCPY",
32 | "*NOSHOWCPY",
33 | "*EXPDDS",
34 | "*NOEXPDDS",
35 | "*EXT",
36 | "*NOEXT",
37 | "*NOSHOWSKP",
38 | "*SHOWSKP",
39 | "*NOSRCSTMT",
40 | "*SRCSTMT",
41 | "*DEBUGIO",
42 | "*NODEBUGIO",
43 | "*UNREF",
44 | "*NOUNREF",
45 | "*NOEVENTF",
46 | "*EVENTF"
47 | ]
48 | },
49 | "markdownDescription": "Compile options\n\nRefer to the `OPTION` parameter in `CRTRPGMOD` command help."
50 | },
51 | "dbgView": {
52 | "type": "string",
53 | "default": "*SOURCE",
54 | "enum": [
55 | "*STMT",
56 | "*SOURCE",
57 | "*LIST",
58 | "*COPY",
59 | "*ALL",
60 | "*NONE"
61 | ],
62 | "markdownDescription": "Debugging views\n\nRefer to the `DBGVIEW` parameter in `CRTRPGMOD` command help."
63 | },
64 | "bndSrvPgm": {
65 | "type": "array",
66 | "default": [],
67 | "items": {
68 | "type": "string",
69 | "default": ""
70 | },
71 | "markdownDescription": "Bind service program\n\nRefer to the `BNDSRVPGM` parameter in `CRTSRVPGM` command help."
72 | },
73 | "bndDir": {
74 | "type": "array",
75 | "default": [],
76 | "items": {
77 | "type": "string",
78 | "default": ""
79 | },
80 | "markdownDescription": "Binding directory\n\nRefer to the `BNDDIR` parameter in `CRTSRVPGM` command help."
81 | },
82 | "bOption": {
83 | "type": "string",
84 | "default": "",
85 | "enum": [
86 | "*DUPPROC",
87 | "*DUPVAR"
88 | ],
89 | "markdownDescription": "Binding options\n\nRefer to the `OPTION` parameter in `CRTSRVPGM` command help."
90 | },
91 | "define": {
92 | "type": "array",
93 | "default": [],
94 | "items": {
95 | "type": "string",
96 | "default": ""
97 | },
98 | "markdownDescription": "Define condition names\n\nSpecifies condition names that are defined before the compilation begins. Using the parameter `DEFINE(condition-name)` is equivalent to coding the `/DEFINE` condition-name directive on the first line of the source file."
99 | },
100 | "dltSplf": {
101 | "type": "string",
102 | "default": "",
103 | "enum": [
104 | "*YES",
105 | "*NO"
106 | ],
107 | "markdownDescription": "Delete spooled files\n\nSpecifies if all spooled files (compile listing) created by the object creations commands are deleted on success."
108 | },
109 | "actGrp": {
110 | "type": "string",
111 | "default": "",
112 | "markdownDescription": "Activation group\n\nRefer to the `ACTGRP` parameter in `CRTSRVPGM` command help."
113 | },
114 | "module": {
115 | "type": "array",
116 | "default": [],
117 | "items": {
118 | "type": "string",
119 | "default": ""
120 | },
121 | "markdownDescription": "Module\n\nRefer to the `MODULE` parameter in `CRTSRVPGM` command help."
122 | },
123 | "rpgPpOpt": {
124 | "type": "string",
125 | "default": "",
126 | "enum": [
127 | "*DEFAULT",
128 | "*NONE",
129 | "*LVL1",
130 | "*LVL2"
131 | ],
132 | "markdownDescription": "Pre-compiler RPGPPOPT\n\nRefer to the `RPGPPOPT` parameter in `CRTSRVPGM` command."
133 | },
134 | "pOption": {
135 | "type": "array",
136 | "default": [],
137 | "items": {
138 | "type": "string",
139 | "default": "",
140 | "enum": [
141 | "*XREF",
142 | "*NOXREF",
143 | "*COMMA",
144 | "*PERIOD",
145 | "*JOB",
146 | "*SYSVAL",
147 | "*SECLVL",
148 | "*NOSECLVL",
149 | "*SEQSRC",
150 | "*NOSEQSRC",
151 | "*CVTDAT",
152 | "*NOCVTDAT",
153 | "*SQL",
154 | "*SYS",
155 | "*OPTLOB",
156 | "*NOOPTLOB",
157 | "*NOEXTIND",
158 | "*EXTIND",
159 | "*EVENTF",
160 | "*NOEVENTF"
161 | ]
162 | },
163 | "markdownDescription": "Pre-compiler OPTIONS\n\nRefer to the `OPTION` parameter in `CRTSQLRPGI` command help."
164 | },
165 | "compileOpt": {
166 | "type": "string",
167 | "default": "",
168 | "markdownDescription": "Pre-Compiler COMPILEOPT\n\nRefer to the `COMPILEOPT` parameter in `CRTSQLRPGI` command help."
169 | },
170 | "tgtRls": {
171 | "type": "string",
172 | "default": "",
173 | "markdownDescription": "Target release\n\nRefer to the `TGTRLS` parameter in `CRTSRVPGM` command help."
174 | },
175 | "incDir": {
176 | "type": "array",
177 | "default": [],
178 | "items": {
179 | "type": "string",
180 | "default": ""
181 | },
182 | "markdownDescription": "Include Directory\n\nSpecifies one or more directories to add to the search path used by the compiler to find copy files. The compiler will search the directories specified here if the copy files in the source program can not be resolved."
183 | },
184 | "tgtCcsid": {
185 | "type": "number",
186 | "default": 37,
187 | "markdownDescription": "Target CCSID\n\nSpecifies the CCSID that the compiler uses to read the source files."
188 | }
189 | }
190 | },
191 | "rucrtcbl": {
192 | "type": "object",
193 | "markdownDescription": "The Create COBOL Test Suite (`RUCRTCBL`) command compiles CBLLE and SQLCBLLE source members containing test procedures into a iRPGUnit test suite service program.",
194 | "properties": {
195 | "text": {
196 | "type": "string",
197 | "default": "",
198 | "markdownDescription": "Text description\n\nAllows you to enter text that briefly describes the RPGUnit test case and its function."
199 | },
200 | "cOption": {
201 | "type": "array",
202 | "default": [
203 | "*EVENTF"
204 | ],
205 | "items": {
206 | "type": "string",
207 | "default": "*EVENTF",
208 | "enum": [
209 | "*SOURCE",
210 | "*NOSOURCE",
211 | "*SRC",
212 | "*NOSRC",
213 | "*NOXREF",
214 | "*XREF",
215 | "*GEN",
216 | "*NOGEN",
217 | "*NOSEQUENCE",
218 | "*SEQUENCE",
219 | "*NOVBSUM",
220 | "*VBSUM",
221 | "*NONUMBER",
222 | "*NUMBER",
223 | "*LINENUMBER",
224 | "*NOMAP",
225 | "*MAP",
226 | "*NOOPTIONS",
227 | "*OPTIONS",
228 | "*QUOTE",
229 | "*APOST",
230 | "*NOSECLVL",
231 | "*SECLVL",
232 | "*PRTCORR",
233 | "*NOPRTCORR",
234 | "*MONOPRC",
235 | "*NOMONOPRC",
236 | "*RANGE",
237 | "*NORANGE",
238 | "*NOUNREF",
239 | "*UNREF",
240 | "*NOSYNC",
241 | "*SYNC",
242 | "*NOCRTF",
243 | "*CRTF",
244 | "*NODUPKEYCHK",
245 | "*DUPKEYCHK",
246 | "*NOINZDLT",
247 | "*INZDLT",
248 | "*NOBLK",
249 | "*BLK",
250 | "*STDINZ",
251 | "*NOSTDINZ",
252 | "*STDINZHEX00",
253 | "*NODDSFILLER",
254 | "*DDSFILLER",
255 | "*NOIMBEDERR",
256 | "*IMBEDERR",
257 | "*STDTRUNC",
258 | "*NOSTDTRUNC",
259 | "*NOCHGPOSSGN",
260 | "*CHGPOSSGN",
261 | "*NOEVENTF",
262 | "*EVENTF",
263 | "*MONOPIC",
264 | "*NOMONOPIC",
265 | "*NOCRTARKIDX",
266 | "*CRTARKIDX"
267 | ]
268 | },
269 | "markdownDescription": "Compile options\n\nRefer to the `OPTION` parameter in `CRTCBLMOD` command help."
270 | },
271 | "dbgView": {
272 | "type": "string",
273 | "default": "*SOURCE",
274 | "enum": [
275 | "*STMT",
276 | "*SOURCE",
277 | "*LIST",
278 | "*ALL",
279 | "*NONE"
280 | ],
281 | "markdownDescription": "Debugging views\n\nRefer to the `DBGVIEW` parameter in `CRTCBLMOD` command help."
282 | },
283 | "bndSrvPgm": {
284 | "type": "array",
285 | "default": [],
286 | "items": {
287 | "type": "string",
288 | "default": ""
289 | },
290 | "markdownDescription": "Bind service program\n\nRefer to the `BNDSRVPGM` parameter in `CRTSRVPGM` command help."
291 | },
292 | "bndDir": {
293 | "type": "array",
294 | "default": [],
295 | "items": {
296 | "type": "string",
297 | "default": ""
298 | },
299 | "markdownDescription": "Binding directory\n\nRefer to the `BNDDIR` parameter in `CRTSRVPGM` command help."
300 | },
301 | "bOption": {
302 | "type": "string",
303 | "default": "",
304 | "enum": [
305 | "*DUPPROC",
306 | "*DUPVAR"
307 | ],
308 | "markdownDescription": "Binding options\n\nRefer to the `OPTION` parameter in `CRTSRVPGM` command help."
309 | },
310 | "define": {
311 | "type": "array",
312 | "default": [],
313 | "items": {
314 | "type": "string",
315 | "default": ""
316 | },
317 | "markdownDescription": "Define condition names\n\nSpecifies condition names that are defined before the compilation begins. Using the parameter `DEFINE(condition-name)` is equivalent to coding the `/DEFINE` condition-name directive on the first line of the source file."
318 | },
319 | "dltSplf": {
320 | "type": "string",
321 | "default": "",
322 | "enum": [
323 | "*YES",
324 | "*NO"
325 | ],
326 | "markdownDescription": "Delete spooled files\n\nSpecifies if all spooled files (compile listing) created by the object creations commands are deleted on success."
327 | },
328 | "actGrp": {
329 | "type": "string",
330 | "default": "",
331 | "markdownDescription": "Activation group\n\nRefer to the `ACTGRP` parameter in `CRTSRVPGM` command help."
332 | },
333 | "module": {
334 | "type": "array",
335 | "default": [],
336 | "items": {
337 | "type": "string",
338 | "default": ""
339 | },
340 | "markdownDescription": "Module\n\nRefer to the `MODULE` parameter in `CRTSRVPGM` command help."
341 | },
342 | "pOption": {
343 | "type": "array",
344 | "default": [],
345 | "items": {
346 | "type": "string",
347 | "default": "",
348 | "enum": [
349 | "*XREF",
350 | "*NOXREF",
351 | "*GEN",
352 | "*NOGEN",
353 | "*COMMA",
354 | "*PERIOD",
355 | "*JOB",
356 | "*SYSVAL",
357 | "*QUOTESQL",
358 | "*APOSTSQL",
359 | "*QUOTE",
360 | "*APOST",
361 | "*SECLVL",
362 | "*NOSECLVL",
363 | "*EVENTF",
364 | "*NOEVENTF",
365 | "*CVTDT",
366 | "*NOCVTDT",
367 | "*SQL",
368 | "*SYS",
369 | "*OPTLOB",
370 | "*NOOPTLOB",
371 | "*NOEXTIND",
372 | "*EXTIND",
373 | "*SYSTIME",
374 | "*NOSYSTIME"
375 | ]
376 | },
377 | "markdownDescription": "Pre-compiler OPTIONS\n\nRefer to the `OPTION` parameter in `CRTSQLCBLI` command help."
378 | },
379 | "compileOpt": {
380 | "type": "string",
381 | "default": "",
382 | "markdownDescription": "Pre-Compiler COMPILEOPT\n\nRefer to the `COMPILEOPT` parameter in `CRTSQLCBLI` command help."
383 | },
384 | "tgtRls": {
385 | "type": "string",
386 | "default": "",
387 | "markdownDescription": "Target release\n\nRefer to the `TGTRLS` parameter in `CRTSRVPGM` command help."
388 | },
389 | "incDir": {
390 | "type": "array",
391 | "default": [],
392 | "items": {
393 | "type": "string",
394 | "default": ""
395 | },
396 | "markdownDescription": "Include Directory\n\nSpecifies one or more directories to add to the search path used by the compiler to find copy files. The compiler will search the directories specified here if the copy files in the source program can not be resolved."
397 | },
398 | "tgtCcsid": {
399 | "type": "number",
400 | "default": 37,
401 | "markdownDescription": "Target CCSID\n\nSpecifies the CCSID that the compiler uses to read the source files."
402 | }
403 | }
404 | },
405 | "prefix": {
406 | "type": "string",
407 | "default": "",
408 | "markdownDescription": "Prefix\n\nSpecifies a prefix (such as `T_`) to be used when compiling the service program for a test suite. This can be used to avoid naming conflicts with objects built as part of your application."
409 | }
410 | }
411 | }
412 | },
413 | "defaultSnippets": [
414 | {
415 | "label": "RPGUnit Test Configuration",
416 | "body": {
417 | "rpgunit": {
418 | "rucrtrpg": {
419 | "tgtCcsid": 37,
420 | "dbgView": "*SOURCE",
421 | "cOption": [
422 | "*EVENTF"
423 | ]
424 | },
425 | "rucrtcbl": {
426 | "tgtCcsid": 37,
427 | "dbgView": "*SOURCE",
428 | "cOption": [
429 | "*EVENTF"
430 | ]
431 | }
432 | }
433 | }
434 | }
435 | ]
436 | }
--------------------------------------------------------------------------------
/snippets/rpgle.json:
--------------------------------------------------------------------------------
1 | {
2 | "RPGUnit: Test Case": {
3 | "scope": "rpgle",
4 | "prefix": "test",
5 | "description": [
6 | "Create an RPGUnit test case.",
7 | "",
8 | "Each test case is an exported procedure that starts with the name 'test'."
9 | ],
10 | "body": [
11 | "dcl-proc test_$1 export;",
12 | "\tdcl-pi *n extproc(*dclcase) end-pi;",
13 | "",
14 | "\t$2",
15 | "end-proc;"
16 | ]
17 | },
18 | "RPGUnit: Setup Test Suite": {
19 | "scope": "rpgle",
20 | "prefix": "setUpSuite",
21 | "description": [
22 | "Used to set up a test suite before the first test case is started.",
23 | "",
24 | "This procedure can be used to set up test data or allocate resources before the first test case is started."
25 | ],
26 | "body": [
27 | "dcl-proc setUpSuite export;",
28 | "\t$0",
29 | "end-proc;"
30 | ]
31 | },
32 | "RPGUnit: Setup Test Case": {
33 | "scope": "rpgle",
34 | "prefix": "setUp",
35 | "description": [
36 | "Used to set up a test case before it is executed.",
37 | "",
38 | "This procedure is called for each test case."
39 | ],
40 | "body": [
41 | "dcl-proc setUp export;",
42 | "\t$0",
43 | "end-proc;"
44 | ]
45 | },
46 | "RPGUnit: Tear Down Test Suite": {
47 | "scope": "rpgle",
48 | "prefix": "tearDownSuite",
49 | "description": [
50 | "Used to tear down a test suite after the last test case has been executed.",
51 | "",
52 | "This procedure can be used to remove test data or deallocate resources after the last test case has been executed."
53 | ],
54 | "body": [
55 | "dcl-proc tearDownSuite export;",
56 | "\t$0",
57 | "end-proc;"
58 | ]
59 | },
60 | "RPGUnit: Tear Down Test Case": {
61 | "scope": "rpgle",
62 | "prefix": "tearDown",
63 | "description": [
64 | "Used to tear down a test case after it has been executed.",
65 | "",
66 | "This procedure is called for each test case."
67 | ],
68 | "body": [
69 | "dcl-proc tearDown export;",
70 | "\t$0",
71 | "end-proc;"
72 | ]
73 | },
74 | "RPGUnit: String Equal": {
75 | "scope": "rpgle",
76 | "prefix": "aEqual",
77 | "description": [
78 | "Compares the given String values 'expected' and 'actual'.",
79 | "",
80 | "The assertion fails, if both values are different.",
81 | "If the 'fieldName' parameter is specified, the message is prefixed with 'fieldName:'.",
82 | "",
83 | "// ✅ Pass",
84 | "aEqual( 'Hello' : 'Hello' );",
85 | "",
86 | "// ❌ Fail",
87 | "aEqual( 'HelloWorld' : 'Hello' );"
88 | ],
89 | "body": "aEqual(${1:expected} : ${2:actual} : ${3:fieldName});"
90 | },
91 | "RPGUnit: Integer Equal": {
92 | "scope": "rpgle",
93 | "prefix": "iEqual",
94 | "description": [
95 | "Compares the given Integer values expected and actual.",
96 | "",
97 | "The assertion fails, if both values are different.",
98 | "If the 'fieldName' parameter is specified, the message is prefixed with 'fieldName:'.",
99 | "",
100 | "// ✅ Pass",
101 | "iEqual( 123 : 123 );",
102 | "",
103 | "// ❌ Fail",
104 | "iEqual( 123 : 456 );"
105 | ],
106 | "body": "iEqual(${1:expected} : ${2:actual} : ${3:fieldName});"
107 | },
108 | "RPGUnit: Boolean Equal": {
109 | "scope": "rpgle",
110 | "prefix": "nEqual",
111 | "description": [
112 | "Compares the given Boolean values expected and actual.",
113 | "",
114 | "The assertion fails, if both values are different.",
115 | "If the 'fieldName' parameter is specified, the message is prefixed with 'fieldName:'.",
116 | "",
117 | "// ✅ Pass",
118 | "nEqual( *off : 1=2 );",
119 | "",
120 | "// ❌ Fail",
121 | "nEqual( *on : 'ABC'='DEF' );"
122 | ],
123 | "body": "nEqual(${1:expected} : ${2:actual} : ${3:fieldName});"
124 | },
125 | "RPGUnit: Assert": {
126 | "scope": "rpgle",
127 | "prefix": "assert",
128 | "description": [
129 | "Checks the specified Boolean expression for true.",
130 | "",
131 | "The assertion fails if the expression evaluates to false.",
132 | "When the assertion fails, the value of 'message' is added to the test report.",
133 | "",
134 | "// ✅ Pass",
135 | "assert( 1=1 : 'Impossible to fail' );",
136 | "",
137 | "// ❌ Fail",
138 | "rc = 1;",
139 | "assert( rc=0 : 'Unexpected return code. Expected: 0' );"
140 | ],
141 | "body": "assert(${1:booleanExpression} : ${2:message});"
142 | },
143 | "RPGUnit: Fail": {
144 | "scope": "rpgle",
145 | "prefix": "fail",
146 | "description": [
147 | "Produces an error and appends the specified 'message' to the test report.",
148 | "",
149 | "The test case is terminated.",
150 | "",
151 | "monitor;",
152 | " produceAnException(doFailure);",
153 | "",
154 | " // ❌ Fail: Only runs if exception was not caught",
155 | " fail('produceAnException(..) should have raised an error.');",
156 | "on-error;",
157 | " // ✅ Pass: Only runs if exception was caught",
158 | "endmon;"
159 | ],
160 | "body": "fail(${1:message});"
161 | },
162 | "RPGUnit: Get Monitored Message": {
163 | "scope": "rpgle",
164 | "prefix": "getMonitoredMessage",
165 | "description": [
166 | "Retrieves the latest *ESCAPE message from the job log. ",
167 | "",
168 | "Returns a data structure with the following information:",
169 | "• id - Message ID",
170 | "• text - First level text",
171 | "• pgm - Sender: program name",
172 | "• mod - Sender: module name",
173 | "• proc - Sender: procedure name",
174 | "• specNb - Sender: statement number",
175 | "",
176 | "Usually called within the 'on-error' section of a 'monitor' block.",
177 | "",
178 | "monitor;",
179 | " a = 10;",
180 | " b = 0;",
181 | " c = a / b;",
182 | "",
183 | " // ❌ Fail: Only runs if exception was not caught",
184 | " fail('Division by zero did not raise an error.');",
185 | "on-error;",
186 | " msgInfo = getMonitoredMessage(*ON);",
187 | "endmon;",
188 | "",
189 | "// ✅ Pass",
190 | "aEqual('MCH1211' : msgInfo.Id);"
191 | ],
192 | "body": "getMonitoredMessage(${1:*ON});"
193 | },
194 | "RPGUnit: Assert Job Log Contains": {
195 | "scope": "rpgle",
196 | "prefix": "assertJobLogContains",
197 | "description": [
198 | "Checks whether the job log contains the specified message ID between NOW and timeLimit.",
199 | "",
200 | "The value of timeLimit should be created with getFullTimeStamp().",
201 | "",
202 | "testStartTime = getFullTimeStamp();",
203 | "sndMCH1211Msg();",
204 | "",
205 | "// ✅ Pass",
206 | "assertJobLogContains('MCH1211' : testStartTime);"
207 | ],
208 | "body": "assertJobLogContains(${1:msgId} : ${2:timeLimit});"
209 | },
210 | "RPGUnit: Assert Message Queue Contains": {
211 | "scope": "rpgle",
212 | "prefix": "assertMessageQueueContains",
213 | "description": [
214 | "Checks whether a given message queue contains the specified message between NOW and timeLimit.",
215 | "",
216 | "The message is identified by any combination of ID, message text or message help where unused selection parameters must be omitted.",
217 | "• Message ID can be set to *EMPTY to test whether a message queue is empty (all other parameters must be omitted).",
218 | "• Message text and message help can be specified as *generic* strings values.",
219 | "• The value of timeLimit should be created with getFullTimeStamp().",
220 | "",
221 | "testStartTime = getFullTimeStamp();",
222 | "sndToMSGQ();",
223 | "",
224 | "// ✅ Pass",
225 | "assertMessageQueueContains('QMSGQT' : 'QTEMP' : *omit : 'MSGQT - iRPGUnit' : *omit : testStartTime);",
226 | "",
227 | "testStartTime = getFullTimeStamp();",
228 | "",
229 | "// ✅ Pass",
230 | "assertMessageQueueContains('QMSGQT' : 'QTEMP' : '*EMPTY' : *omit : *omit : testStartTime);"
231 | ],
232 | "body": "assertMessageQueueContains(${1:userOrQueue} : ${2:library} : ${3:msgId} : ${4:msgText} : ${5:msgHelp} : ${6:timeLimit});"
233 | }
234 | }
--------------------------------------------------------------------------------
/src/api/ibmi.ts:
--------------------------------------------------------------------------------
1 | import { CodeForIBMi } from "@halcyontech/vscode-ibmi-types";
2 | import Instance from "@halcyontech/vscode-ibmi-types/Instance";
3 | import { ComponentRegistry } from "@halcyontech/vscode-ibmi-types/api/components/manager";
4 | import { DeployTools } from "@halcyontech/vscode-ibmi-types/filesystems/local/deployTools";
5 | import { VscodeTools } from "@halcyontech/vscode-ibmi-types/ui/Tools";
6 | import { Extension, extensions } from "vscode";
7 |
8 | let baseExtension: Extension | undefined;
9 |
10 | export function loadBase(): CodeForIBMi | undefined {
11 | if (!baseExtension) {
12 | baseExtension = (extensions ? extensions.getExtension(`halcyontechltd.code-for-ibmi`) : undefined);
13 | }
14 |
15 | return (baseExtension && baseExtension.isActive && baseExtension.exports ? baseExtension.exports : undefined);
16 | }
17 |
18 | export function getInstance(): Instance | undefined {
19 | return (baseExtension && baseExtension.isActive && baseExtension.exports ? baseExtension.exports.instance : undefined);
20 | }
21 |
22 | export function getDeployTools(): typeof DeployTools | undefined {
23 | return (baseExtension && baseExtension.isActive && baseExtension.exports ? baseExtension.exports.deployTools : undefined);
24 | }
25 |
26 | export function getVSCodeTools(): typeof VscodeTools | undefined {
27 | return (baseExtension && baseExtension.isActive && baseExtension.exports ? baseExtension.exports.tools : undefined);
28 | }
29 |
30 | export function getComponentRegistry(): ComponentRegistry | undefined {
31 | return (baseExtension && baseExtension.isActive && baseExtension.exports ? baseExtension.exports.componentRegistry : undefined);
32 | }
--------------------------------------------------------------------------------
/src/codeCoverage.ts:
--------------------------------------------------------------------------------
1 | import { LogLevel, Uri, workspace } from "vscode";
2 | import * as tmp from "tmp";
3 | import * as path from "path";
4 | import * as unzipper from "unzipper";
5 | import * as xml2js from "xml2js";
6 | import { getInstance } from "./api/ibmi";
7 | import { Logger } from "./logger";
8 | import { CoverageData } from "./types";
9 |
10 | export namespace CodeCoverage {
11 | export async function getCoverage(outputZipPath: string): Promise {
12 | // Get ccdata XML from cczip
13 | const tmpDir = tmp.dirSync({ unsafeCleanup: true });
14 | const xml = await downloadCczip(outputZipPath, tmpDir);
15 |
16 | if (xml) {
17 | // Parse XML to get coverage data
18 | const coverageData = await getCoverageData(xml, tmpDir);
19 | return coverageData;
20 | }
21 | }
22 |
23 | async function downloadCczip(outputZipPath: string, tmpDir: tmp.DirResult): Promise {
24 | try {
25 | const ibmi = getInstance();
26 | const connection = ibmi!.getConnection();
27 | const content = connection.getContent();
28 |
29 | // Download remote cczip to local temp file
30 | const tmpFile = tmp.fileSync();
31 | await content.downloadStreamfileRaw(outputZipPath, tmpFile.name);
32 | Logger.log(LogLevel.Info, `Downloaded code coverage results to ${tmpFile.name}`);
33 |
34 | // Extract local temp file contents to temp directory
35 | const directory = await unzipper.Open.file(tmpFile.name);
36 | await directory.extract({ path: tmpDir.name });
37 | Logger.log(LogLevel.Info, `Extracted code coverage results to ${tmpDir.name}`);
38 |
39 | // Read and parse xml file from temp directory
40 | const ccdata = Uri.file(path.join(tmpDir.name, `ccdata`));
41 | const ccdataContent = await workspace.fs.readFile(ccdata);
42 | // TODO: Can we get an interface for the xml?
43 | const xml = await xml2js.parseStringPromise(ccdataContent);
44 |
45 | return xml;
46 | } catch (error: any) {
47 | Logger.logWithNotification(LogLevel.Error, `Failed to download code coverage results`, `${outputZipPath} - ${error}`);
48 | }
49 | }
50 |
51 | async function getCoverageData(xml: any, tmpdir: tmp.DirResult): Promise {
52 | try {
53 | const items: CoverageData[] = [];
54 |
55 | for (const source of xml.LLC.lineLevelCoverageClass) {
56 | const data = source[`$`];
57 | const testCase = source.testcase === undefined ?
58 | { hits: `` } : // Indicates that no lines were ran
59 | source.testcase[0][`$`];
60 |
61 | const sourcePath = path.join(tmpdir.name, `src`, data.sourceFile);
62 | const sourceUri = Uri.file(sourcePath);
63 | const rawSource = await workspace.fs.readFile(sourceUri);
64 | const sourceCode = rawSource.toString().split(`\n`);
65 |
66 | const realHits = testCase.v2fileHits || testCase.hits;
67 | const realLines = data.v2fileLines || data.lines;
68 | const realSigs = data.v2qualifiedSignatures || data.signatures;
69 |
70 | const indexesExecuted = getRunLines(sourceCode.length, realHits);
71 | const activeLines = getLines(realLines, indexesExecuted);
72 |
73 | const lineKeys = Object.keys(activeLines).map(Number);;
74 | let countRan = 0;
75 | lineKeys.forEach(key => {
76 | if (activeLines[key] === true) {
77 | countRan++;
78 | }
79 | });
80 | const percentRan = ((countRan / lineKeys.length) * 100).toFixed(0);
81 |
82 | items.push({
83 | basename: path.basename(data.sourceFile),
84 | path: data.sourceFile,
85 | localPath: sourcePath,
86 | coverage: {
87 | signitures: realSigs.split(`+`),
88 | lineString: realLines,
89 | activeLines,
90 | percentRan
91 | },
92 | });
93 | }
94 |
95 | return items;
96 | } catch (error) {
97 | Logger.logWithNotification(LogLevel.Error, `Failed to parse code coverage results`, `${error}`);
98 | }
99 | }
100 |
101 | function getLines(string: string, indexesExecuted: number[]): { [key: number]: boolean } {
102 | const lineNumbers = [];
103 | let line = 0;
104 | let currentValue = ``;
105 | let concat = false;
106 |
107 | for (const char of string) {
108 | switch (char) {
109 | case `#`:
110 | if (currentValue !== ``) {
111 | line = Number(currentValue);
112 | lineNumbers.push(line);
113 | }
114 |
115 | concat = true;
116 | line = 0;
117 | currentValue = ``;
118 | break;
119 |
120 | case `,`:
121 | if (currentValue !== ``) {
122 | line = Number(currentValue);
123 | lineNumbers.push(line);
124 | }
125 | currentValue = ``;
126 | break;
127 |
128 | case `+`:
129 | line = Number(currentValue);
130 | lineNumbers.push(line);
131 | concat = false;
132 | break;
133 |
134 | default:
135 | if (concat) {
136 | currentValue += char;
137 | } else {
138 | currentValue = ``;
139 | line += Number(char);
140 | lineNumbers.push(line);
141 | }
142 | break;
143 | }
144 | }
145 |
146 | let lines: { [key: number]: boolean } = {};
147 |
148 | for (const i in lineNumbers) {
149 | lines[lineNumbers[i]] = indexesExecuted.includes(Number(i));
150 | }
151 |
152 | return lines;
153 | }
154 |
155 | function getRunLines(numLines: number, hits: string): number[] {
156 | const hitLines: number[] = [];
157 |
158 | let hitChar;
159 | for (let i = 0, lineIndex = 0; lineIndex < numLines && i < hits.length; i++) {
160 | hitChar = hits.charCodeAt(i);
161 |
162 | if (hitChar <= 80) {
163 | hitChar -= 65;
164 |
165 | if (hitChar === 0) {
166 | lineIndex += 4;
167 | } else {
168 | if ((hitChar & 8) !== 0) {
169 | hitLines.push(lineIndex);
170 | }
171 | lineIndex++;
172 |
173 | if ((hitChar & 4) !== 0 && lineIndex < numLines) {
174 | hitLines.push(lineIndex);
175 | }
176 | lineIndex++;
177 |
178 | if ((hitChar & 2) !== 0 && lineIndex < numLines) {
179 | hitLines.push(lineIndex);
180 | }
181 | lineIndex++;
182 |
183 | if ((hitChar & 1) !== 0 && lineIndex < numLines) {
184 | hitLines.push(lineIndex);
185 | }
186 | lineIndex++;
187 | }
188 | }
189 | }
190 |
191 | return hitLines;
192 | }
193 | }
--------------------------------------------------------------------------------
/src/components/codeCov.ts:
--------------------------------------------------------------------------------
1 | import { ComponentIdentification, ComponentState, IBMiComponent } from "@halcyontech/vscode-ibmi-types/api/components/component";
2 | import IBMi from "@halcyontech/vscode-ibmi-types/api/IBMi";
3 | import { LogLevel } from "vscode";
4 | import { Logger } from "../logger";
5 |
6 | export class CodeCov implements IBMiComponent {
7 | static ID: string = "CODECOV";
8 | static MINIMUM_VERSION = "1.0.0";
9 |
10 | getIdentification(): ComponentIdentification {
11 | return {
12 | name: CodeCov.ID,
13 | version: CodeCov.MINIMUM_VERSION
14 | };
15 | }
16 |
17 | async getRemoteState(connection: IBMi, installDirectory: string): Promise {
18 | const content = connection.getContent();
19 |
20 | try {
21 | // Check if CODECOV command exists
22 | const library = 'QDEVTOOLS';
23 | const command = 'CODECOV';
24 | const commandExists = await content.checkObject({ library: library, name: command, type: '*CMD' });
25 | if (commandExists) {
26 | return "Installed";
27 | } else {
28 | Logger.log(LogLevel.Error, `${command} command not found in ${library}.LIB`);
29 | return 'NotInstalled';
30 | }
31 | } catch (error) {
32 | Logger.log(LogLevel.Error, `Failed to get remote state of CODECOV component. Error: ${error}`);
33 | return 'Error';
34 | }
35 | }
36 |
37 | update(connection: IBMi, installDirectory: string): Promise {
38 | return this.getRemoteState(connection, installDirectory);
39 | }
40 | }
--------------------------------------------------------------------------------
/src/components/rpgUnit.ts:
--------------------------------------------------------------------------------
1 | import { ComponentIdentification, ComponentState, IBMiComponent } from "@halcyontech/vscode-ibmi-types/api/components/component";
2 | import IBMi from "@halcyontech/vscode-ibmi-types/api/IBMi";
3 | import { Configuration, Section } from "../configuration";
4 | import { compareVersions } from 'compare-versions';
5 | import { GitHub, Release } from "../github";
6 | import { LogLevel, QuickPickItem, window } from "vscode";
7 | import * as tmp from "tmp";
8 | import * as path from "path";
9 | import { Logger } from "../logger";
10 |
11 | export class RPGUnit implements IBMiComponent {
12 | static ID: string = "RPGUnit";
13 | static MINIMUM_VERSION: string = '5.1.0-beta.002';
14 | static VERSION_REGEX = 'v\\d+(\\.\\d+){2}(\\.b\\d{1,3}|\\.r)?';
15 |
16 | getIdentification(): ComponentIdentification {
17 | return {
18 | name: RPGUnit.ID,
19 | version: RPGUnit.MINIMUM_VERSION,
20 | userManaged: true
21 | };
22 | }
23 |
24 | async getRemoteState(connection: IBMi, installDirectory: string): Promise {
25 | const content = connection.getContent();
26 |
27 | try {
28 | // Check if product library exists
29 | const productLibrary = Configuration.getOrFallback(Section.productLibrary);
30 | const productLibraryExists = await content.checkObject({ library: 'QSYS', name: productLibrary, type: '*LIB' });
31 | if (productLibraryExists) {
32 | // Get installed version of RPGUnit
33 | const versionCommand = content.toCl(`DSPSRVPGM`, { 'SRVPGM': `${productLibrary}/RUTESTCASE`, 'DETAIL': '*COPYRIGHT' });
34 | const versionResult = await connection.runCommand({ command: versionCommand, environment: `ile`, noLibList: true });
35 |
36 | if (versionResult.code === 0) {
37 | const versionMatch = versionResult.stdout.match(RPGUnit.VERSION_REGEX);
38 | if (versionMatch && versionMatch[0]) {
39 | const installedVersion = versionMatch[0];
40 |
41 | // Compare installed version with minimum version
42 | if (this.compareVersions(installedVersion, RPGUnit.MINIMUM_VERSION) >= 0) {
43 | Logger.log(LogLevel.Info, `Installed version of RPGUnit is v${installedVersion}`);
44 | return 'Installed';
45 | } else {
46 | Logger.log(LogLevel.Error, `Installed version of RPGUnit (v${installedVersion}) is lower than minimum version (v${RPGUnit.MINIMUM_VERSION})`);
47 | return 'NeedsUpdate';
48 | }
49 | } else {
50 | Logger.log(LogLevel.Error, `Failed to parse installed version of RPGUnit`);
51 | return 'NeedsUpdate';
52 | }
53 | } else {
54 | Logger.log(LogLevel.Error, `Failed to get installed version of RPGUnit. Error: ${versionResult.stderr}`);
55 | return 'NeedsUpdate';
56 | }
57 | } else {
58 | Logger.log(LogLevel.Error, `Product library ${productLibrary}.LIB does not exist`);
59 | return 'NotInstalled';
60 | }
61 | } catch (error) {
62 | Logger.log(LogLevel.Error, `Failed to get remote state of RPGUnit component. Error: ${error}`);
63 | return 'Error';
64 | }
65 | }
66 |
67 | async update(connection: IBMi, installDirectory: string): Promise {
68 | // Get current component state
69 | const state = await this.getRemoteState(connection, installDirectory);
70 |
71 | // Get releases from GitHub
72 | const releases = await GitHub.getReleases();
73 | if (releases.error) {
74 | Logger.logWithNotification(LogLevel.Error, `Failed to retrieve GitHub releases`, releases.error);
75 | return state;
76 | }
77 |
78 | // Filter releases (exclude releases which are drafts, do not have the required asset, or are below the minimum version)
79 | const filteredReleases = releases.data.filter(release => {
80 | const version = release.name || release.tag_name;
81 | return (release.draft === false) &&
82 | (release.assets.some(asset => asset.name === GitHub.ASSET_NAME)) &&
83 | this.compareVersions(version, RPGUnit.MINIMUM_VERSION) >= 0;
84 | });
85 | if (filteredReleases.length === 0) {
86 | Logger.logWithNotification(LogLevel.Error, `No GitHub releases found which are above the minimum version (${RPGUnit.MINIMUM_VERSION})`);
87 | return state;
88 | } else {
89 | Logger.log(LogLevel.Info, `Found ${filteredReleases.length} compatible GitHub release(s) in ${GitHub.OWNER}/${GitHub.REPO}`);
90 | }
91 |
92 | // Prompt user to select a release
93 | const items: (QuickPickItem & { release: Release })[] = filteredReleases.map(release => {
94 | const version = release.name || release.tag_name;
95 | const publishedAt = release.published_at ? new Date(release.published_at).toLocaleString() : undefined;
96 | const preRelease = release.prerelease ? ' (Pre-release)' : '';
97 | const description = (publishedAt ?
98 | (preRelease ? `${publishedAt} (Pre-release)` : publishedAt) :
99 | (preRelease ? `(Pre-release)` : ''));
100 |
101 | return {
102 | label: version,
103 | description: description,
104 | release: release
105 | };
106 | });
107 | const selectedRelease = await window.showQuickPick(items, {
108 | title: 'Select the GitHub release to install from',
109 | placeHolder: 'GitHub Release'
110 | });
111 | if (!selectedRelease) {
112 | Logger.logWithNotification(LogLevel.Error, `Installation aborted as GitHub release was not selected`);
113 | return state;
114 | }
115 |
116 | Logger.show();
117 | const content = connection.getContent();
118 | const config = connection.getConfig();
119 |
120 | // Check if product library exists
121 | const productLibrary = Configuration.getOrFallback(Section.productLibrary);
122 | const productLibraryExists = await content.checkObject({ library: 'QSYS', name: productLibrary, type: '*LIB' });
123 | if (productLibraryExists) {
124 | const result = await window.showInformationMessage('Delete product library',
125 | {
126 | modal: true,
127 | detail: `The product library ${productLibrary}.LIB already exists. Can it be deleted?`
128 | },
129 | 'Yes', 'No'
130 | );
131 | if (result === 'Yes') {
132 | // Deleting product library
133 | const deleteLibCommand = content.toCl(`DLTOBJ`, { 'OBJ': `QSYS/${productLibrary}`, 'OBJTYPE': `*LIB` });
134 | Logger.log(LogLevel.Info, `Deleting product library ${productLibrary}.LIB: ${deleteLibCommand}`);
135 | const deleteLibResult = await connection.runCommand({ command: deleteLibCommand, environment: `ile`, noLibList: true });
136 | if (deleteLibResult.code !== 0) {
137 | Logger.logWithNotification(LogLevel.Error, `Failed to delete library`, deleteLibResult.stderr);
138 | return state;
139 | }
140 | } else {
141 | Logger.logWithNotification(LogLevel.Error, `Installation aborted as product library was not deleted`);
142 | return state;
143 | }
144 | }
145 |
146 | // Downloading save file locally
147 | const localTempDir = tmp.dirSync({ unsafeCleanup: true });
148 | Logger.log(LogLevel.Info, `Downloading ${GitHub.ASSET_NAME} GitHub release asset from ${selectedRelease.release.name} to ${localTempDir.name}`);
149 | const asset = selectedRelease.release.assets.find(asset => asset.name === GitHub.ASSET_NAME)!;
150 | const isDownloaded = await GitHub.downloadReleaseAsset(asset, localTempDir.name);
151 | if (!isDownloaded.data) {
152 | Logger.logWithNotification(LogLevel.Error, `Failed to download GitHub release asset`, isDownloaded.error);
153 | return state;
154 | }
155 |
156 | // Uploading save file to IFS
157 | const localPath = path.join(localTempDir.name, GitHub.ASSET_NAME);
158 | const remoteTempDir = config.tempDir;
159 | const remotePath = path.posix.join(remoteTempDir, GitHub.ASSET_NAME);
160 | try {
161 | Logger.log(LogLevel.Info, `Uploading RPGUNIT save file to ${remotePath}`);
162 | await content.uploadFiles([{ local: localPath, remote: remotePath }]);
163 | } catch (error: any) {
164 | Logger.logWithNotification(LogLevel.Error, `Failed to upload save file`, error);
165 | return state;
166 | }
167 |
168 | // Creating save file in temporary library
169 | const createSavfCommand = content.toCl(`CRTSAVF`, {
170 | 'FILE': `${config.tempLibrary}/RPGUNIT`
171 | });
172 | Logger.log(LogLevel.Info, `Creating RPGUNIT save file in ${config.tempLibrary}.LIB: ${createSavfCommand}`);
173 | const createSavfResult = await connection.runCommand({ command: createSavfCommand, environment: `ile`, noLibList: true });
174 | if (createSavfResult.code !== 0 && !createSavfResult.stderr.includes('CPF5813')) {
175 | Logger.logWithNotification(LogLevel.Error, `Failed to create save file`, createSavfResult.stderr);
176 | return state;
177 | }
178 |
179 | // Transfer save file to temporary library
180 | const transferCommand = content.toCl(`CPYFRMSTMF`, {
181 | 'FROMSTMF': remotePath,
182 | 'TOMBR': `\'/QSYS.LIB/${config.tempLibrary}.LIB/RPGUNIT.FILE\'`,
183 | 'STMFCCSID': 37,
184 | 'MBROPT': `*REPLACE`
185 | });
186 | Logger.log(LogLevel.Info, `Transferring RPGUNIT save file to ${config.tempLibrary}.LIB: ${transferCommand}`);
187 | const transferResult = await connection.runCommand({ command: transferCommand, environment: `ile`, noLibList: true });
188 | if (transferResult.code !== 0) {
189 | Logger.logWithNotification(LogLevel.Error, `Failed to transfer save file`, transferResult.stderr);
190 | return state;
191 | }
192 |
193 | // Restoring library
194 | const restoreCommand = content.toCl(`RSTLIB`, {
195 | 'SAVLIB': 'RPGUNIT',
196 | 'DEV': `*SAVF`,
197 | 'SAVF': `${config.tempLibrary}/RPGUNIT`,
198 | 'RSTLIB': productLibrary
199 | });
200 | Logger.log(LogLevel.Info, `Restoring RPGUNIT save file contents into ${productLibrary}.LIB: ${restoreCommand}`);
201 | const restoreResult = await connection.runCommand({ command: restoreCommand, environment: `ile`, noLibList: true });
202 | if (restoreResult.code !== 0) {
203 | Logger.logWithNotification(LogLevel.Error, `Failed to restore save file contents`, restoreResult.stderr);
204 | return state;
205 | }
206 |
207 | // Clean up
208 | Logger.log(LogLevel.Info, `Cleaning up temporary files`);
209 | localTempDir.removeCallback();
210 | await connection.runCommand({ command: `rm -rf ${remotePath}` });
211 |
212 | const newState = await this.getRemoteState(connection, installDirectory);
213 | if (newState === 'Installed') {
214 | Logger.logWithNotification(LogLevel.Info, `RPGUnit ${selectedRelease.release.name} installed successfully into ${productLibrary}.LIB`);
215 | } else {
216 | Logger.logWithNotification(LogLevel.Error, `RPGUnit ${selectedRelease.release.name} failed to install into ${productLibrary}.LIB`);
217 | }
218 | return newState;
219 | }
220 |
221 | compareVersions(v1: string, v2: string): number {
222 | function normalize(v: string) {
223 | // Remove prefix
224 | v = v.replace('v', '');
225 |
226 | // Remove production suffix
227 | v = v.replace('.r', '');
228 |
229 | // Convert beta suffix
230 | v = v.replace('.b', '-beta.');
231 |
232 | return v;
233 | }
234 |
235 | try {
236 | return compareVersions(normalize(v1), normalize(v2));
237 | } catch (error) {
238 | Logger.log(LogLevel.Error, `Failed to compare versions ${v1} and ${v2}. Error: ${error}`);
239 | return -1;
240 | }
241 | }
242 | }
--------------------------------------------------------------------------------
/src/config.ts:
--------------------------------------------------------------------------------
1 | import { LogLevel, RelativePattern, Uri, workspace, WorkspaceFolder } from "vscode";
2 | import { TestingConfig } from "./types";
3 | import * as path from "path";
4 | import lodash from "lodash";
5 | import { Logger } from "./logger";
6 | import { getInstance } from "./api/ibmi";
7 |
8 | export class ConfigHandler {
9 | static TESTING_CONFIG_FILE = 'testing.json';
10 | static GLOBAL_CONFIG_DIRECTORY = '.vscode';
11 |
12 | async getLocalConfig(uri: Uri): Promise {
13 | const workspaceFolder = workspace.getWorkspaceFolder(uri);
14 | if (!workspaceFolder) {
15 | return;
16 | }
17 |
18 | try {
19 | const localConfigUri = await this.findTestingConfig(workspaceFolder, uri);
20 | const localConfig = localConfigUri ? await this.readTestingConfig(localConfigUri, 'local') : undefined;
21 | if (localConfigUri && localConfig) {
22 | Logger.log(LogLevel.Info, `Found local testing configuration at ${localConfigUri.toString()}:\n${JSON.stringify(localConfig, null, 2)}`);
23 | }
24 |
25 | const globalConfigUri = Uri.joinPath(workspaceFolder.uri, ConfigHandler.GLOBAL_CONFIG_DIRECTORY, ConfigHandler.TESTING_CONFIG_FILE);
26 | const globalConfig = await this.readTestingConfig(globalConfigUri, 'global');
27 | if (globalConfig) {
28 | Logger.log(LogLevel.Info, `Found global testing configuration at ${globalConfigUri.toString()}:\n${JSON.stringify(globalConfig, null, 2)}`);
29 | }
30 |
31 | const mergedConfig = lodash.merge({}, globalConfig, localConfig);
32 | Logger.log(LogLevel.Info, `Merged local testing configuration:\n${JSON.stringify(mergedConfig, null, 2)}`);
33 | return mergedConfig;
34 | } catch (error: any) {
35 | Logger.logWithNotification(LogLevel.Error, `Failed to retrieve local testing configuration`, error);
36 | return;
37 | }
38 | }
39 |
40 | async getRemoteConfig(uri: Uri): Promise {
41 | const ibmi = getInstance();
42 | const connection = ibmi!.getConnection();
43 |
44 | const parsedPath = connection.parserMemberPath(uri.path);
45 | const memberPath = parsedPath.asp ?
46 | path.posix.join(parsedPath.asp, parsedPath.library, parsedPath.file, ConfigHandler.TESTING_CONFIG_FILE) :
47 | path.posix.join(parsedPath.library, parsedPath.file, ConfigHandler.TESTING_CONFIG_FILE);
48 | const remoteConfigUri = Uri.from({ scheme: 'member', path: `/${memberPath}` });
49 |
50 | const remoteConfig = await this.readTestingConfig(remoteConfigUri, 'remote');
51 | if (remoteConfig) {
52 | Logger.log(LogLevel.Info, `Found remote testing configuration at ${remoteConfigUri.toString()}:\n${JSON.stringify(remoteConfig, null, 2)}`);
53 | }
54 |
55 | return remoteConfig;
56 | }
57 |
58 | private async findTestingConfig(workspaceFolder: WorkspaceFolder, uri: Uri): Promise {
59 | const parentDirectory = path.parse(uri.fsPath).dir;
60 | if (parentDirectory.startsWith(workspaceFolder.uri.fsPath)) {
61 | const testingConfigUris = await workspace.findFiles(new RelativePattern(parentDirectory, ConfigHandler.TESTING_CONFIG_FILE));
62 |
63 | if (testingConfigUris.length > 0) {
64 | return testingConfigUris[0];
65 | } else {
66 | return this.findTestingConfig(workspaceFolder, Uri.parse(parentDirectory));
67 | }
68 | }
69 | };
70 |
71 | private async readTestingConfig(testingConfigUri: Uri, type: 'local' | 'remote' | 'global'): Promise {
72 | try {
73 | // Check if file exists
74 | await workspace.fs.stat(testingConfigUri);
75 | } catch (error: any) {
76 | Logger.log(LogLevel.Info, `No ${type} testing configuration found at ${testingConfigUri.toString()}`);
77 | return;
78 | }
79 |
80 | try {
81 | // Read and parse file
82 | let testingConfig;
83 | if (type === 'local' || type === 'global') {
84 | testingConfig = await workspace.fs.readFile(testingConfigUri);
85 | } else {
86 | const ibmi = getInstance();
87 | const connection = ibmi!.getConnection();
88 | const content = connection.getContent();
89 |
90 | const parsedPath = connection.parserMemberPath(testingConfigUri.path);
91 | testingConfig = await content.downloadMemberContent(parsedPath.library, parsedPath.file, parsedPath.name);
92 | }
93 |
94 | return JSON.parse(testingConfig.toString()) as TestingConfig;
95 | } catch (error: any) {
96 | Logger.logWithNotification(LogLevel.Error, `Failed to read testing configuration`, `${testingConfigUri} - ${error}`);
97 | return;
98 | }
99 | }
100 | }
--------------------------------------------------------------------------------
/src/configuration.ts:
--------------------------------------------------------------------------------
1 | import { ConfigurationTarget, LogLevel, workspace } from "vscode";
2 | import { Logger } from "./logger";
3 |
4 | export interface libraryListValidation {
5 | "RPGUNIT": boolean;
6 | "QDEVTOOLS": boolean;
7 | }
8 |
9 | type ValueType = string | string[] | libraryListValidation;
10 |
11 | export enum Section {
12 | productLibrary = 'productLibrary',
13 | testSourceFiles = 'testSourceFiles',
14 | libraryListValidation = 'libraryListValidation',
15 | runOrder = 'runOrder',
16 | libraryList = 'libraryList',
17 | jobDescription = 'jobDescription',
18 | jobDescriptionLibrary = 'jobDescriptionLibrary',
19 | reportDetail = 'reportDetail',
20 | createReport = 'createReport',
21 | reclaimResources = 'reclaimResources'
22 | }
23 |
24 | export const defaultConfigurations: { [T in Section]: ValueType } = {
25 | [Section.productLibrary]: 'RPGUNIT',
26 | [Section.testSourceFiles]: ['QTESTSRC'],
27 | [Section.libraryListValidation]: {
28 | "RPGUNIT": true,
29 | "QDEVTOOLS": true
30 | },
31 | [Section.runOrder]: '*API',
32 | [Section.libraryList]: '*CURRENT',
33 | [Section.jobDescription]: '*DFT',
34 | [Section.jobDescriptionLibrary]: '',
35 | [Section.reportDetail]: '*BASIC',
36 | [Section.createReport]: '*ALLWAYS',
37 | [Section.reclaimResources]: '*NO'
38 | };
39 |
40 | export namespace Configuration {
41 | export const group: string = 'IBM i Testing';
42 |
43 | export async function initialize(): Promise {
44 | const configurations: { [key: string]: ValueType } = {};
45 |
46 | for (const section of Object.values(Section)) {
47 | let value = Configuration.get(section);
48 | if (value === undefined || (Array.isArray(value) && value.length === 0)) {
49 | value = defaultConfigurations[section];
50 | await Configuration.set(section, value);
51 | }
52 |
53 | configurations[section] = value;
54 | }
55 |
56 | Logger.log(LogLevel.Info, `Detected configurations:\n${JSON.stringify(configurations, null, 2)}`);
57 | }
58 |
59 | export function get(section: Section): T | undefined {
60 | return workspace.getConfiguration(Configuration.group).get(section) as T;
61 | }
62 |
63 | export function getOrFallback(section: Section): T {
64 | const value = get(section);
65 | if (value === undefined) {
66 | return defaultConfigurations[section] as T;
67 | }
68 |
69 | return value;
70 | }
71 |
72 | export async function set(section: Section, value: any): Promise {
73 | return await workspace.getConfiguration(Configuration.group).update(section, value, ConfigurationTarget.Global);
74 | }
75 | }
--------------------------------------------------------------------------------
/src/extension.ts:
--------------------------------------------------------------------------------
1 | import { ExtensionContext, LogLevel, workspace } from "vscode";
2 | import { IBMiTestManager } from "./manager";
3 | import { getComponentRegistry, getInstance, loadBase } from "./api/ibmi";
4 | import { Configuration, Section } from "./configuration";
5 | import { Logger } from "./logger";
6 | import IBMi from "@halcyontech/vscode-ibmi-types/api/IBMi";
7 | import { RPGUnit } from "./components/rpgUnit";
8 | import { CodeCov } from "./components/codeCov";
9 | import { Utils } from "./utils";
10 | import * as tmp from "tmp";
11 |
12 | export let manager: IBMiTestManager | undefined;
13 | let userLibraryList: string[] | undefined;
14 |
15 | export function activate(context: ExtensionContext) {
16 | console.log('Congratulations, your extension "vscode-ibmi-testing" is now active!');
17 | const installedVersion = context.extension.packageJSON.version;
18 | Logger.log(LogLevel.Info, `IBM i Testing (v${installedVersion}) extension activated!`);
19 |
20 | // Load Code4i API
21 | loadBase();
22 | const ibmi = getInstance();
23 |
24 | // Initialize configurations
25 | Configuration.initialize();
26 | workspace.onDidChangeConfiguration(async event => {
27 | if (event.affectsConfiguration(Configuration.group)) {
28 | Logger.log(LogLevel.Info, `Configurations changed`);
29 | await Configuration.initialize();
30 | }
31 |
32 | if (event.affectsConfiguration(`${Configuration.group}.${Section.productLibrary}`)) {
33 | const connection = ibmi!.getConnection();
34 | const componentManager = connection?.getComponentManager();
35 | await componentManager?.getRemoteState(RPGUnit.ID);
36 | }
37 |
38 | if (event.affectsConfiguration(`${Configuration.group}.${Section.testSourceFiles}`)) {
39 | if (manager) {
40 | await manager.refreshTests();
41 | }
42 | }
43 | });
44 | Utils.onCodeForIBMiConfigurationChange('connectionSettings', async () => {
45 | const connection = ibmi!.getConnection();
46 | if (connection) {
47 | const config = connection.getConfig();
48 | const newLibraryList = config.libraryList;
49 |
50 | if (newLibraryList !== userLibraryList) {
51 | Logger.log(LogLevel.Info, `Library list changed: ${userLibraryList}`);
52 | userLibraryList = newLibraryList;
53 |
54 | if (manager) {
55 | await manager.refreshTests();
56 | }
57 | }
58 | }
59 | });
60 |
61 | // Register components
62 | const rpgUnit = new RPGUnit();
63 | const codeCov = new CodeCov();
64 | const componentRegistry = getComponentRegistry();
65 | componentRegistry?.registerComponent(context, rpgUnit);
66 | componentRegistry?.registerComponent(context, codeCov);
67 |
68 | // Subscribe to IBM i connect and disconnect events
69 | let connection: IBMi | undefined;
70 | ibmi!.subscribe(context, 'connected', 'Load IBM i Test Manager', async () => {
71 | connection = ibmi!.getConnection();
72 | Logger.log(LogLevel.Debug, `Connected to ${connection.currentUser}@${connection.currentHost}`);
73 |
74 | if (!manager) {
75 | manager = new IBMiTestManager(context);
76 | }
77 |
78 | const config = connection.getConfig();
79 | userLibraryList = config.libraryList;
80 | });
81 | ibmi!.subscribe(context, 'disconnected', 'Dispose IBM i Test Manager', async () => {
82 | if (connection) {
83 | Logger.log(LogLevel.Debug, `Disconnected from ${connection.currentUser}@${connection.currentHost}`);
84 | }
85 |
86 | // Clean up test manager
87 | if (manager) {
88 | manager.controller.dispose();
89 | manager = undefined;
90 | }
91 |
92 | // Clean up cache
93 | userLibraryList = undefined;
94 |
95 | // TODO: Handle disposing of tests mid execution
96 | });
97 |
98 | // Miscellaneous setup
99 | tmp.setGracefulCleanup();
100 | }
101 |
102 | export function deactivate() {
103 | Logger.log(LogLevel.Info, 'IBM i Testing extension deactivated!');
104 | }
105 |
--------------------------------------------------------------------------------
/src/fileCoverage.ts:
--------------------------------------------------------------------------------
1 | import { CancellationToken, DeclarationCoverage, FileCoverage, Position, StatementCoverage, TestCoverageCount, TestRun, Uri } from "vscode";
2 | import { CoverageData } from "./types";
3 |
4 | export class IBMiFileCoverage extends FileCoverage {
5 | public isStatementCoverage: boolean;
6 | public readonly lines: StatementCoverage[] = [];
7 | public readonly procedures: DeclarationCoverage[] = [];
8 |
9 | constructor(uri: Uri, coverageData: CoverageData, isStatementCoverage: boolean) {
10 | super(uri, new TestCoverageCount(0, 0));
11 | this.isStatementCoverage = isStatementCoverage;
12 | this.addCoverage(coverageData, isStatementCoverage);
13 | }
14 |
15 | addCoverage(coverageData: CoverageData, isStatementCoverage: boolean) {
16 | for (const [line, executed] of Object.entries(coverageData.coverage.activeLines)) {
17 | const linePosition = new Position(Number(line) - 1, 0);
18 |
19 | if (isStatementCoverage) {
20 | const existingLineIndex = this.lines.findIndex(line => (line.location as Position).isEqual(linePosition));
21 | if (existingLineIndex >= 0) {
22 | const isPreviouslyExecuted = (this.lines[existingLineIndex].executed as boolean);
23 | this.lines[existingLineIndex].executed = isPreviouslyExecuted || executed;
24 | if (!isPreviouslyExecuted) {
25 | this.statementCoverage.covered += executed ? 1 : 0;
26 | }
27 | } else {
28 | this.lines.push(new StatementCoverage(executed, linePosition));
29 | this.statementCoverage.covered += executed ? 1 : 0;
30 | this.statementCoverage.total++;
31 | }
32 | } else {
33 | if (!this.declarationCoverage) {
34 | this.declarationCoverage = new TestCoverageCount(0, 0);
35 | }
36 |
37 | // TODO: What to set for declaration coverage name - maybe use coverageData.coverage.signitures[Number(line) - 1]
38 | const existingProcedureIndex = this.procedures.findIndex(procedure => (procedure.location as Position).isEqual(linePosition));
39 | if (existingProcedureIndex >= 0) {
40 | const isPreviouslyExecuted = (this.procedures[existingProcedureIndex].executed as boolean);
41 | this.procedures[existingProcedureIndex].executed = isPreviouslyExecuted || executed;
42 | if (!isPreviouslyExecuted) {
43 | this.declarationCoverage.covered += executed ? 1 : 0;
44 | }
45 | } else {
46 | this.procedures.push(new DeclarationCoverage(line, executed, linePosition));
47 | this.declarationCoverage.covered += executed ? 1 : 0;
48 | this.declarationCoverage.total++;
49 | }
50 | }
51 | }
52 | }
53 |
54 | static async loadDetailedCoverage(testRun: TestRun, fileCoverage: FileCoverage, token: CancellationToken) {
55 | if (fileCoverage instanceof IBMiFileCoverage) {
56 | if (fileCoverage.isStatementCoverage) {
57 | return fileCoverage.lines;
58 | } else if (fileCoverage.procedures.length > 0) {
59 | return fileCoverage.procedures;
60 | }
61 | }
62 |
63 | return [];
64 | };
65 | }
--------------------------------------------------------------------------------
/src/github.ts:
--------------------------------------------------------------------------------
1 | import { Octokit } from "octokit";
2 | import * as fs from 'fs/promises';
3 | import { components } from "@octokit/openapi-types";
4 | import fetch from "node-fetch";
5 | import * as path from "path";
6 |
7 | export interface Response {
8 | data: T,
9 | error?: string
10 | }
11 | export type Release = components["schemas"]["release"];
12 | export type ReleaseAsset = components["schemas"]["release-asset"];
13 |
14 | export namespace GitHub {
15 | export const OWNER = 'tools-400';
16 | export const REPO = 'irpgunit';
17 | export const ASSET_NAME = 'RPGUNIT.SAVF';
18 |
19 | export async function getReleases(): Promise> {
20 | const releases: Response = {
21 | data: []
22 | };
23 |
24 | try {
25 | const octokit = new Octokit();
26 | const response = await octokit.rest.repos.listReleases({
27 | owner: OWNER,
28 | repo: REPO
29 | });
30 |
31 | if (response.status === 200) {
32 | releases.data = response.data;
33 | } else {
34 | releases.error = response.status;
35 | }
36 | } catch (error: any) {
37 | releases.error = error.message ? error.message : error;
38 | }
39 |
40 | return releases;
41 | }
42 |
43 | export async function downloadReleaseAsset(asset: ReleaseAsset, downloadDirectory: string): Promise> {
44 | const isDownloaded: Response = {
45 | data: false
46 | };
47 |
48 | try {
49 | // Fetch asset
50 | const response = await fetch(asset.browser_download_url);
51 | const buffer = await response.arrayBuffer();
52 |
53 | // Download asset to specified path
54 | if (response.status === 200) {
55 | const filePath = path.join(downloadDirectory, asset.name);
56 | await fs.writeFile(filePath, Buffer.from(buffer));
57 | isDownloaded.data = true;
58 | } else {
59 | isDownloaded.error = response.statusText;
60 | }
61 | } catch (error: any) {
62 | isDownloaded.error = error.message ? error.message : error;
63 | }
64 |
65 | return isDownloaded;
66 | }
67 | }
--------------------------------------------------------------------------------
/src/logger.ts:
--------------------------------------------------------------------------------
1 | import { LogLevel, LogOutputChannel, window } from "vscode";
2 |
3 | export class Logger {
4 | private static logOutputChannel: LogOutputChannel = window.createOutputChannel('IBM i Testing', { log: true });
5 |
6 | public static log(level: LogLevel, message: string): void {
7 | switch (level) {
8 | case LogLevel.Trace:
9 | this.logOutputChannel.trace(message);
10 | break;
11 | case LogLevel.Debug:
12 | this.logOutputChannel.debug(message);
13 | break;
14 | case LogLevel.Info:
15 | this.logOutputChannel.info(message);
16 | break;
17 | case LogLevel.Warning:
18 | this.logOutputChannel.warn(message);
19 | break;
20 | case LogLevel.Error:
21 | this.logOutputChannel.error(message);
22 | break;
23 | }
24 | }
25 |
26 | public static logWithNotification(level: LogLevel, message: string, details?: string, buttons?: { label: string, func: () => Promise }[]): void {
27 | this.log(level, details ? `${message}: ${details}` : message);
28 |
29 | let showMessage;
30 | switch (level) {
31 | case LogLevel.Error:
32 | showMessage = window.showErrorMessage;
33 | break;
34 | case LogLevel.Warning:
35 | showMessage = window.showWarningMessage;
36 | break;
37 | default:
38 | showMessage = window.showInformationMessage;
39 | }
40 |
41 | const buttonLabels = (buttons ? buttons.map((button) => button.label) : []);
42 | const items = ['View Output', ...buttonLabels];
43 | showMessage(message, ...items).then((value) => {
44 | if (value === 'View Output') {
45 | this.logOutputChannel.show();
46 | } else if (value !== undefined && buttons) {
47 | const selectedButton = buttons.find(button => button.label === value);
48 | if (selectedButton) {
49 | selectedButton.func();
50 | }
51 | }
52 | });
53 | }
54 |
55 | public static show() {
56 | this.logOutputChannel.show();
57 | }
58 | }
--------------------------------------------------------------------------------
/src/manager.ts:
--------------------------------------------------------------------------------
1 | import { CancellationToken, ExtensionContext, LogLevel, RelativePattern, TestController, TestItem, TestRunProfileKind, TestRunRequest, tests, TestTag, TextDocument, TextDocumentChangeEvent, Uri, window, workspace, WorkspaceFolder } from "vscode";
2 | import { TestFile } from "./testFile";
3 | import * as path from "path";
4 | import { IBMiTestRunner } from "./runner";
5 | import { TestDirectory } from "./testDirectory";
6 | import { Logger } from "./logger";
7 | import { IBMiFileCoverage } from "./fileCoverage";
8 | import { TestObject } from "./testObject";
9 | import { getInstance } from "./api/ibmi";
10 | import { IBMiTestData } from "./types";
11 | import { Utils } from "./utils";
12 | import { Configuration, Section } from "./configuration";
13 |
14 | export class IBMiTestManager {
15 | public static CONTROLLER_ID = 'IBMi';
16 | public static CONTROLLER_LABEL = 'IBM i Testing';
17 | public static RUN_PROFILE_LABEL = 'Run Tests';
18 | public static COMPILE_AND_RUN_PROFILE_LABEL = 'Run Tests (Compile)';
19 | public static LINE_COVERAGE_PROFILE_LABEL = 'Run Tests with Line Coverage';
20 | public static COMPILE_AND_LINE_COVERAGE_PROFILE_LABEL = 'Run Tests with Line Coverage (Compile)';
21 | public static PROCEDURE_COVERAGE_PROFILE_LABEL = 'Run Tests with Procedure Coverage';
22 | public static COMPILE_AND_PROCEDURE_COVERAGE_PROFILE_LABEL = 'Run Tests with Procedure Coverage (Compile)';
23 | public context: ExtensionContext;
24 | public testData: WeakMap;
25 | public controller: TestController;
26 |
27 | constructor(context: ExtensionContext) {
28 | this.context = context;
29 | this.testData = new WeakMap();
30 | this.controller = tests.createTestController(IBMiTestManager.CONTROLLER_ID, IBMiTestManager.CONTROLLER_LABEL);
31 | this.controller.resolveHandler = async (item: TestItem | undefined) => {
32 | if (!item) {
33 | this.startWatchingWorkspace();
34 | return;
35 | }
36 |
37 | await this.loadFileOrMember(item.uri!, true);
38 | };
39 | this.controller.refreshHandler = async () => {
40 | await this.refreshTests();
41 | };
42 |
43 | // Profiles for running tests
44 | [IBMiTestManager.RUN_PROFILE_LABEL, IBMiTestManager.COMPILE_AND_RUN_PROFILE_LABEL].forEach((profile, index) => {
45 | const forceCompile = index === 1;
46 | this.controller.createRunProfile(profile, TestRunProfileKind.Run, async (request: TestRunRequest, token: CancellationToken) => {
47 | const runner = new IBMiTestRunner(this, request, forceCompile, token);
48 | await runner.runHandler();
49 | }, !forceCompile, undefined, false);
50 | });
51 |
52 | // Profiles for running tests with line coverage
53 | [IBMiTestManager.LINE_COVERAGE_PROFILE_LABEL, IBMiTestManager.COMPILE_AND_LINE_COVERAGE_PROFILE_LABEL].forEach((profile, index) => {
54 | const forceCompile = index === 1;
55 | const lineCoverageProfile = this.controller.createRunProfile(profile, TestRunProfileKind.Coverage, async (request: TestRunRequest, token: CancellationToken) => {
56 | const runner = new IBMiTestRunner(this, request, forceCompile, token);
57 | await runner.runHandler();
58 | }, !forceCompile, undefined, false);
59 | lineCoverageProfile.loadDetailedCoverage = IBMiFileCoverage.loadDetailedCoverage;
60 | });
61 |
62 | // Profiles for running tests with procedure coverage
63 | [IBMiTestManager.PROCEDURE_COVERAGE_PROFILE_LABEL, IBMiTestManager.COMPILE_AND_PROCEDURE_COVERAGE_PROFILE_LABEL].forEach((profile, index) => {
64 | const forceCompile = index === 1;
65 | const procedureCoverageProfile = this.controller.createRunProfile(profile, TestRunProfileKind.Coverage, async (request: TestRunRequest, token: CancellationToken) => {
66 | const runner = new IBMiTestRunner(this, request, forceCompile, token);
67 | await runner.runHandler();
68 | }, false, undefined, false);
69 | procedureCoverageProfile.loadDetailedCoverage = IBMiFileCoverage.loadDetailedCoverage;
70 | });
71 |
72 | context.subscriptions.push(
73 | this.controller,
74 | workspace.onDidOpenTextDocument(async (document: TextDocument) => {
75 | const uri = document.uri;
76 | await this.loadFileOrMember(uri, true);
77 | }),
78 | workspace.onDidChangeTextDocument(async (event: TextDocumentChangeEvent) => {
79 | const uri = event.document.uri;
80 | await this.loadFileOrMember(uri, true, true);
81 | })
82 | );
83 |
84 | this.loadInitialTests();
85 | }
86 |
87 | async refreshTests(): Promise {
88 | // Remove all existing test items
89 | this.controller.items.forEach((item) => {
90 | this.controller.items.delete(item.id);
91 | });
92 | this.testData = new WeakMap();
93 |
94 | // Reload all test items
95 | await this.loadInitialTests();
96 | }
97 |
98 | async loadInitialTests(): Promise {
99 | // Load local tests from workspace folders
100 | const workspaceTestPatterns = this.getWorkspaceTestPatterns();
101 | for await (const workspaceTestPattern of workspaceTestPatterns) {
102 | Logger.log(LogLevel.Info, `Searching for tests in workspace folder: ${workspaceTestPattern.workspaceFolder.name}`);
103 | const fileUris = await workspace.findFiles(workspaceTestPattern.pattern);
104 | for (const uri of fileUris) {
105 | await this.loadFileOrMember(uri, false);
106 | }
107 | }
108 |
109 | // Fully load test cases for opened documents
110 | const visibleTextEditors = window.visibleTextEditors;
111 | for await (const document of workspace.textDocuments) {
112 | const isVisible = visibleTextEditors.some((editor) => editor.document.uri.toString() === document.uri.toString());
113 | if (isVisible) {
114 | const uri = document.uri;
115 | await this.loadFileOrMember(uri, true);
116 | }
117 | }
118 |
119 | const testSuffixes = Utils.getTestSuffixes({ rpg: true, cobol: true });
120 |
121 | const ibmi = getInstance();
122 | const connection = ibmi!.getConnection();
123 | const content = connection.getContent();
124 |
125 | // Load tests from library list
126 | const workspaceFolders = workspace.workspaceFolders;
127 | const workspaceFolder = workspaceFolders && workspaceFolders.length > 0 ? workspaceFolders[0] : undefined;
128 | const libraryList = await ibmi!.getLibraryList(connection, workspaceFolder);
129 | const testSourceFiles = Configuration.getOrFallback(Section.testSourceFiles);
130 | const libraries: string[] = Array.from(new Set([libraryList.currentLibrary, ...libraryList.libraryList]));
131 | Logger.log(LogLevel.Info, `Searching for tests in library list: ${libraries.join('.LIB, ')}.LIB`);
132 | for await (const library of libraries) {
133 | for await (const testSourceFile of testSourceFiles) {
134 | const testMembers = await content.getMemberList({
135 | library: library,
136 | sourceFile: testSourceFile,
137 | extensions: testSuffixes.qsys.map((suffix) => suffix.slice(1)).join(','),
138 | filterType: 'simple',
139 | sort: { order: 'name' }
140 | });
141 |
142 | for (const testMember of testMembers) {
143 | const memberPath = testMember.asp ?
144 | path.posix.join(testMember.asp, testMember.library, testMember.file, `${testMember.name}.${testMember.extension}`) :
145 | path.posix.join(testMember.library, testMember.file, `${testMember.name}.${testMember.extension}`);
146 | const uri = Uri.from({ scheme: 'member', path: `/${memberPath}` });
147 | await this.loadFileOrMember(uri, false);
148 | }
149 | }
150 | }
151 | }
152 |
153 | private getWorkspaceTestPatterns(): { workspaceFolder: WorkspaceFolder; pattern: RelativePattern; }[] {
154 | const workspaceFolders = workspace.workspaceFolders;
155 | if (!workspaceFolders) {
156 | return [];
157 | }
158 |
159 | const testSuffixes = Utils.getTestSuffixes({ rpg: true, cobol: true });
160 | const pattern = testSuffixes.ifs.flatMap(suffix => [suffix, suffix.toLowerCase()]).join(',');
161 |
162 | return workspaceFolders.map((workspaceFolder: WorkspaceFolder) => {
163 | return {
164 | workspaceFolder,
165 | pattern: new RelativePattern(workspaceFolder, `**/*{${pattern}}`)
166 | };
167 | });
168 | }
169 |
170 | private startWatchingWorkspace(): void {
171 | const workspaceTestPatterns = this.getWorkspaceTestPatterns();
172 |
173 | for (const workspaceTestPattern of workspaceTestPatterns) {
174 | const watcher = workspace.createFileSystemWatcher(workspaceTestPattern.pattern);
175 | this.context.subscriptions.push(watcher);
176 |
177 | watcher.onDidCreate(async (uri: Uri) => {
178 | await this.loadFileOrMember(uri, false);
179 | });
180 | // TODO: Handle remote source member changes
181 | watcher.onDidChange(async (uri: Uri) => {
182 | await this.loadFileOrMember(uri, true, true);
183 | });
184 | watcher.onDidDelete((uri: Uri) => {
185 | this.deleteTestItem(uri);
186 | });
187 | }
188 | }
189 |
190 | private getOrCreateFile(uri: Uri): { item: TestItem; data: TestFile; } | undefined {
191 | // Check if test item already exists
192 | const allTestItems = this.getFlattenedTestItems();
193 | const existingItem = allTestItems.find((item) => item.uri!.toString() === uri.toString());
194 | if (existingItem) {
195 | return {
196 | item: existingItem,
197 | data: this.testData.get(existingItem) as TestFile
198 | };
199 | } else {
200 | if (uri.scheme === 'file') {
201 | // Get workspace folder for the file
202 | const workspaceFolder = workspace.getWorkspaceFolder(uri);
203 | if (!workspaceFolder) {
204 | return;
205 | }
206 |
207 | // Create workspace test item if it does not exist
208 | let workspaceItem = this.controller.items.get(workspaceFolder.uri.toString());
209 | if (!workspaceItem) {
210 | workspaceItem = this.createTestItem(workspaceFolder.uri, path.parse(workspaceFolder.uri.path).base, true);
211 | this.controller.items.add(workspaceItem);
212 | Logger.log(LogLevel.Info, `Created workspace test item for ${workspaceFolder.uri.toString()}`);
213 |
214 | const data = new TestDirectory(workspaceItem);
215 | this.testData.set(workspaceItem, data);
216 | }
217 |
218 | // Create directory test items if they do not exist
219 | let parentItem = workspaceItem;
220 | const relativePathToTest = path.relative(workspaceFolder.uri.fsPath, path.parse(uri.fsPath).dir);
221 | const directoryNames = relativePathToTest.split(path.sep).filter((directoryName) => directoryName !== '');
222 | for (const directoryName of directoryNames) {
223 | const directoryUri = Uri.joinPath(workspaceFolder.uri, directoryName);
224 | let directoryItem = parentItem.children.get(directoryUri.toString());
225 | if (!directoryItem) {
226 | directoryItem = this.createTestItem(directoryUri, directoryName, true);
227 | parentItem.children.add(directoryItem);
228 | Logger.log(LogLevel.Info, `Created directory test item for ${directoryUri.toString()}`);
229 |
230 | const data = new TestDirectory(directoryItem);
231 | this.testData.set(directoryItem, data);
232 | }
233 |
234 | parentItem = directoryItem;
235 | }
236 |
237 | // Create file test item
238 | const fileItem = this.createTestItem(uri, path.parse(uri.path).base, true);
239 | parentItem.children.add(fileItem);
240 | Logger.log(LogLevel.Info, `Created file test item for ${uri.toString()}`);
241 |
242 | const data = new TestFile(fileItem, { workspaceItem });
243 | this.testData.set(fileItem, data);
244 |
245 | return {
246 | item: fileItem,
247 | data: data
248 | };
249 | } else if (uri.scheme === 'member') {
250 | let partPath: string = '';
251 | let parentPartItem: TestItem | undefined;
252 | let libraryItem: TestItem | undefined;
253 | const parts = uri.path.split('/');
254 | for (let index = 0; index < parts.length; index++) {
255 | const part = parts[index];
256 | if (part !== '') {
257 | const isMember = (index === parts.length - 1);
258 |
259 | // Construct uri
260 | partPath += '/' + part;
261 | const partUri = isMember ? uri : Uri.from({ scheme: 'object', path: partPath });
262 |
263 | // Create test item for part
264 | let partItem = parentPartItem ?
265 | parentPartItem.children.get(partUri.toString()) :
266 | this.controller.items.get(partUri.toString());
267 | if (!partItem) {
268 | partItem = this.createTestItem(partUri, part, false);
269 | if (parentPartItem) {
270 | parentPartItem.children.add(partItem);
271 | } else {
272 | this.controller.items.add(partItem);
273 | }
274 | parentPartItem = partItem;
275 |
276 | if (isMember) {
277 | Logger.log(LogLevel.Info, `Created member test item for ${partUri.toString()}`);
278 |
279 | const data = new TestFile(partItem, { libraryItem: libraryItem });
280 | this.testData.set(partItem, data);
281 |
282 | return {
283 | item: partItem,
284 | data: data
285 | };
286 | } else {
287 | Logger.log(LogLevel.Info, `Created object test item for ${partUri.toString()}`);
288 | const data = new TestObject(partItem);
289 | this.testData.set(partItem, data);
290 |
291 | if (!libraryItem) {
292 | libraryItem = partItem;
293 | }
294 | }
295 | } else {
296 | parentPartItem = partItem;
297 | }
298 | }
299 | }
300 | }
301 | }
302 | }
303 |
304 | private createTestItem(uri: Uri, label: string, isLocal: boolean): TestItem {
305 | const testItem = this.controller.createTestItem(uri.toString(), label, uri);
306 | testItem.canResolveChildren = true;
307 |
308 | if (isLocal) {
309 | testItem.tags = [new TestTag('local')];
310 | } else {
311 | testItem.tags = [new TestTag('members')];
312 | }
313 |
314 | return testItem;
315 | }
316 |
317 | private deleteTestItem(uri: Uri) {
318 | const allTestItems = this.getFlattenedTestItems();
319 | const deletedItem = allTestItems.find((item) => item.uri?.toString() === uri.toString());
320 |
321 | if (!deletedItem) {
322 | // File not found in test collection
323 | return;
324 | }
325 |
326 | // Delete item associated with the file
327 | let parentItem = deletedItem.parent;
328 | parentItem?.children.delete(deletedItem.id);
329 | this.testData.delete(deletedItem);
330 | Logger.log(LogLevel.Info, `Deleted file test item for ${uri.toString()}`);
331 |
332 | // Recursively delete empty parents
333 | while (parentItem && parentItem.children.size === 0) {
334 |
335 | const grandParentItem = parentItem.parent;
336 | if (!grandParentItem) {
337 | // Delete workspace item when no grandparent
338 | this.controller.items.delete(parentItem.id);
339 | this.testData.delete(parentItem);
340 |
341 | const rootType = parentItem.uri?.scheme === 'file' ? 'workspace' : 'object';
342 | Logger.log(LogLevel.Info, `Deleted ${rootType} test item for ${parentItem.uri?.toString()}`);
343 | break;
344 | }
345 |
346 | grandParentItem.children.delete(parentItem.id);
347 | this.testData.delete(parentItem);
348 | parentItem = grandParentItem;
349 | const intermediateType = parentItem.uri?.scheme === 'file' ? 'directory' : 'object';
350 | Logger.log(LogLevel.Info, `Deleted ${intermediateType} test item for ${parentItem.uri?.toString()}`);
351 | }
352 | }
353 |
354 | public getFlattenedTestItems(): TestItem[] {
355 | const result: TestItem[] = [];
356 |
357 | function gatherChildren(item: TestItem) {
358 | result.push(item);
359 | for (const [, child] of item.children) {
360 | gatherChildren(child);
361 | }
362 | }
363 |
364 | for (const [, item] of this.controller.items) {
365 | gatherChildren(item);
366 | }
367 |
368 | return result;
369 | }
370 |
371 |
372 | private async loadFileOrMember(uri: Uri, loadTestCases: boolean, isChanged: boolean = false): Promise {
373 | // Get test suffixes based on the URI scheme
374 | const testSuffixes = Utils.getTestSuffixes({ rpg: true, cobol: true });
375 | let uriSpecificSuffixes: string[];
376 | if (uri.scheme === 'file') {
377 | uriSpecificSuffixes = testSuffixes.ifs;
378 | } else if (uri.scheme === 'member') {
379 | uriSpecificSuffixes = testSuffixes.qsys;
380 | } else {
381 | return;
382 | }
383 |
384 | // Check if the URI ends with any of the uri specific suffixes
385 | if (!uriSpecificSuffixes.some(suffix => uri.path.toLocaleUpperCase().endsWith(suffix))) {
386 | return;
387 | }
388 |
389 | const result = this.getOrCreateFile(uri);
390 | if (result) {
391 | if (isChanged) {
392 | result.data.isLoaded = false;
393 | result.data.isCompiled = false;
394 | }
395 |
396 | if (loadTestCases) {
397 | await result.data.load();
398 | }
399 | }
400 | }
401 | }
--------------------------------------------------------------------------------
/src/storage.ts:
--------------------------------------------------------------------------------
1 | import { getInstance } from "./api/ibmi";
2 | import { TestStorage } from "./types";
3 |
4 | export namespace IBMiTestStorage {
5 | const TEST_OUTPUT_DIRECTORY: string = 'vscode-ibmi-testing';
6 | const RPGUNIT_DIRECTORY: string = `RPGUNIT`;
7 | const CODECOV_DIRECTORY: string = `CODECOV`;
8 |
9 | export async function setupTestStorage(): Promise {
10 | // Setup test output directory
11 | const ibmi = getInstance();
12 | const connection = ibmi!.getConnection();
13 | const config = connection.getConfig();
14 | const testStorage = [
15 | `${config.tempDir}/${TEST_OUTPUT_DIRECTORY}/${RPGUNIT_DIRECTORY}`,
16 | `${config.tempDir}/${TEST_OUTPUT_DIRECTORY}/${CODECOV_DIRECTORY}`
17 | ];
18 | for (const storage of testStorage) {
19 | await connection.sendCommand({ command: `mkdir -p ${storage}` });
20 | await connection.sendCommand({ command: `chmod -R 777 ${storage}` });
21 | }
22 | }
23 |
24 | export function getTestStorage(prefix: string): TestStorage {
25 | const ibmi = getInstance();
26 | const connection = ibmi!.getConnection();
27 | const config = connection.getConfig();
28 |
29 | const time = new Date().getTime();
30 |
31 | return {
32 | RPGUNIT: `${config.tempDir}/${TEST_OUTPUT_DIRECTORY}/${RPGUNIT_DIRECTORY}/${prefix}_${time}.xml`,
33 | CODECOV: `${config.tempDir}/${TEST_OUTPUT_DIRECTORY}/${CODECOV_DIRECTORY}/${prefix}_${time}.cczip`
34 | };
35 | }
36 | }
--------------------------------------------------------------------------------
/src/testCase.ts:
--------------------------------------------------------------------------------
1 | import { TestItem } from "vscode";
2 |
3 | export class TestCase {
4 | item: TestItem;
5 |
6 | constructor(item: TestItem) {
7 | this.item = item;
8 | }
9 | }
--------------------------------------------------------------------------------
/src/testDirectory.ts:
--------------------------------------------------------------------------------
1 | import { TestItem } from "vscode";
2 |
3 | export class TestDirectory {
4 | item: TestItem;
5 |
6 | constructor(item: TestItem) {
7 | this.item = item;
8 | }
9 | }
--------------------------------------------------------------------------------
/src/testFile.ts:
--------------------------------------------------------------------------------
1 | import { commands, DocumentSymbol, LogLevel, SymbolKind, TestItem, TestRun, workspace, WorkspaceFolder } from "vscode";
2 | import { TestCase } from "./testCase";
3 | import { manager } from "./extension";
4 | import { getDeployTools, getInstance } from "./api/ibmi";
5 | import { IBMiTestRunner } from "./runner";
6 | import { TestingConfig, RUCRTRPG, RUCRTCBL } from "./types";
7 | import * as path from "path";
8 | import { ConfigHandler } from "./config";
9 | import { Configuration, Section } from "./configuration";
10 | import { Logger } from "./logger";
11 | import { Utils } from "./utils";
12 | import { TestLogger } from "./testLogger";
13 |
14 | export class TestFile {
15 | static RPGLE_TEST_CASE_REGEX = /^TEST.*$/i;
16 | static COBOL_TEST_CASE_REGEX = /^PROGRAM-ID\. +(TEST.+)$/i;
17 | static textDecoder = new TextDecoder('utf-8');
18 | item: TestItem;
19 | workspaceItem?: TestItem;
20 | libraryItem?: TestItem;
21 | isLoaded: boolean;
22 | isCompiled: boolean;
23 | content: string;
24 | isRPGLE: boolean;
25 | testingConfig?: TestingConfig;
26 |
27 | constructor(item: TestItem, parent: { workspaceItem?: TestItem, libraryItem?: TestItem } = {}) {
28 | this.item = item;
29 | this.workspaceItem = parent.workspaceItem;
30 | this.libraryItem = parent.libraryItem;
31 | this.isLoaded = false;
32 | this.isCompiled = false;
33 | this.content = '';
34 |
35 | const rpgleTestSuffixes = Utils.getTestSuffixes({ rpg: true, cobol: false });
36 | this.isRPGLE = rpgleTestSuffixes.qsys.some(suffix => item.uri!.path.toLocaleUpperCase().endsWith(suffix));
37 | }
38 |
39 | async loadTestingConfig() {
40 | const configHandler = new ConfigHandler();
41 | if (this.item.uri!.scheme === 'file') {
42 | this.testingConfig = await configHandler.getLocalConfig(this.item.uri!);
43 | } else {
44 | this.testingConfig = await configHandler.getRemoteConfig(this.item.uri!);
45 | }
46 | }
47 |
48 | async load(): Promise {
49 | if (!this.isLoaded) {
50 | this.isLoaded = true;
51 |
52 | // Load test file content
53 | try {
54 | const rawContent = await workspace.fs.readFile(this.item.uri!);
55 | this.content = TestFile.textDecoder.decode(rawContent);
56 | } catch (error: any) {
57 | Logger.log(LogLevel.Error, `Failed to read test file ${this.item.label}: ${error}`);
58 | }
59 |
60 | // Load test cases
61 | try {
62 | const childItems: TestItem[] = [];
63 | const documentSymbols = await commands.executeCommand(`vscode.executeDocumentSymbolProvider`, this.item.uri) || [];
64 | for (const documentSymbol of documentSymbols) {
65 | const isTestCase = this.isRPGLE ?
66 | documentSymbol.kind === SymbolKind.Function && TestFile.RPGLE_TEST_CASE_REGEX.test(documentSymbol.name) :
67 | documentSymbol.kind === SymbolKind.Class && documentSymbol.name.match(TestFile.COBOL_TEST_CASE_REGEX)?.[1];
68 |
69 | if (isTestCase) {
70 | const testCaseName = this.isRPGLE ?
71 | documentSymbol.name :
72 | documentSymbol.name.match(TestFile.COBOL_TEST_CASE_REGEX)![1];
73 | const childItem = manager!.controller.createTestItem(`${this.item.uri}/${testCaseName.toLocaleUpperCase()}`, testCaseName, this.item.uri);
74 | childItem.range = documentSymbol.range;
75 |
76 | const data = new TestCase(childItem);
77 | manager!.testData.set(childItem, data);
78 | childItems.push(childItem);
79 | }
80 | }
81 | this.item.children.replace(childItems);
82 | Logger.log(LogLevel.Info, `Loaded test file ${this.item.label} with ${childItems.length} test cases: ${childItems.map(item => item.label).join(', ')}`);
83 | } catch (error) {
84 | Logger.log(LogLevel.Error, `Failed to load test cases from ${this.item.label}: ${error}`);
85 | }
86 | }
87 | }
88 |
89 | async compileTest(runner: IBMiTestRunner, run: TestRun): Promise {
90 | this.isCompiled = false;
91 |
92 | const ibmi = getInstance();
93 | const connection = ibmi!.getConnection();
94 | const content = connection.getContent();
95 | const config = connection.getConfig();
96 |
97 | let workspaceFolder: WorkspaceFolder | undefined;
98 | let deployDirectory: string | undefined;
99 | let tstPgm: { name: string, library: string };
100 | let srcFile: { name: string, library: string } | undefined;
101 | let srcMbr: string | undefined;
102 | let srcStmf: string | undefined;
103 |
104 | const testingConfig = this.testingConfig;
105 | const originalTstPgmBasename = this.item.label;
106 | const newTstPgmName = Utils.getTestName(this.item.uri!.scheme as 'file' | 'member', originalTstPgmBasename, testingConfig);
107 |
108 | if (this.item.uri!.scheme === 'file') {
109 | // Use current library as the test library
110 | workspaceFolder = workspace.getWorkspaceFolder(this.item.uri!)!;
111 | const libraryList = await ibmi!.getLibraryList(connection, workspaceFolder);
112 | const tstLibrary = libraryList?.currentLibrary || config.currentLibrary;
113 |
114 | // Get relative local path to test
115 | const relativePathToTest = path.relative(workspaceFolder.uri.fsPath, this.item.uri!.fsPath).replace(/\\/g, '/');
116 |
117 | // Construct remote path to test
118 | const deployTools = getDeployTools()!;
119 | deployDirectory = deployTools.getRemoteDeployDirectory(workspaceFolder)!;
120 | srcStmf = path.posix.join(deployDirectory, relativePathToTest);
121 |
122 | tstPgm = { name: newTstPgmName, library: tstLibrary };
123 | } else {
124 | const parsedPath = connection.parserMemberPath(this.item.uri!.path);
125 | const tstPgmName = parsedPath.name.toLocaleUpperCase();
126 | const tstLibrary = parsedPath.library;
127 | const srcFileName = parsedPath.file;
128 |
129 | tstPgm = { name: newTstPgmName, library: tstLibrary };
130 | srcFile = { name: srcFileName, library: tstLibrary };
131 | srcMbr = tstPgmName;
132 | }
133 |
134 | let compileParams: RUCRTRPG | RUCRTCBL = {
135 | tstPgm: `${tstPgm.library}/${tstPgm.name}`,
136 | srcFile: srcFile ? `${srcFile.library}/${srcFile.name}` : undefined,
137 | srcMbr: srcMbr,
138 | srcStmf: srcStmf
139 | };
140 |
141 | if (this.isRPGLE) {
142 | compileParams = {
143 | ...compileParams,
144 | ...testingConfig?.rpgunit?.rucrtrpg
145 | };
146 |
147 | if (!(compileParams as RUCRTRPG).rpgPpOpt) {
148 | (compileParams as RUCRTRPG).rpgPpOpt = "*LVL2";
149 | }
150 | } else {
151 | compileParams = {
152 | ...compileParams,
153 | ...testingConfig?.rpgunit?.rucrtcbl
154 | };
155 | }
156 |
157 | // Set TGTCCSID to 37 by default
158 | if (!compileParams.tgtCcsid) {
159 | compileParams.tgtCcsid = 37;
160 | }
161 |
162 | // SET COPTION to *EVEVENTF by default to be able to later get diagnostic messages
163 | if (!compileParams.cOption || compileParams.cOption.length === 0) {
164 | compileParams.cOption = ["*EVENTF"];
165 | }
166 |
167 | // Set DBGVIEW to *SOURCE by default for code coverage to get proper line numbers
168 | if (!compileParams.dbgView) {
169 | compileParams.dbgView = "*SOURCE";
170 | }
171 |
172 | // Override DBGVIEW to *LIST for SQLRPGLE files
173 | // https://github.com/IBM/vscode-ibmi-testing/issues/95
174 | if (this.item.uri?.fsPath.toLocaleUpperCase().endsWith('.SQLRPGLE')) {
175 | compileParams.dbgView = "*LIST";
176 | }
177 |
178 | if (compileParams.incDir) {
179 | // Resolve relative include directories with the deploy directory for local files
180 | if (workspaceFolder && deployDirectory) {
181 | const resolvedIncDir: string[] = [];
182 | for (const incDir of compileParams.incDir) {
183 | if (!path.isAbsolute(incDir)) {
184 | resolvedIncDir.push(path.posix.join(deployDirectory, incDir));
185 | } else {
186 | resolvedIncDir.push(incDir);
187 | }
188 | }
189 |
190 | compileParams.incDir = resolvedIncDir;
191 | }
192 | } else {
193 | compileParams.incDir = [];
194 | }
195 |
196 | // Add the deploy directory to the include directories
197 | if (deployDirectory) {
198 | compileParams.incDir.push(deployDirectory);
199 | }
200 |
201 | // Wrap all include directories in quotes
202 | compileParams.incDir = compileParams.incDir.map((dir) => `'${dir}'`);
203 |
204 | // Flatten compile parameters and convert to strings
205 | const flattenedCompileParams: any = { ...compileParams };
206 | for (const key of Object.keys(compileParams) as (keyof typeof compileParams)[]) {
207 | const value = compileParams[key];
208 | if (Array.isArray(value)) {
209 | flattenedCompileParams[key] = value.join(' ');
210 | } else if (typeof value === 'number') {
211 | flattenedCompileParams[key] = value.toString();
212 | }
213 | }
214 |
215 | const productLibrary = Configuration.getOrFallback(Section.productLibrary);
216 | const languageSpecificCommand = this.isRPGLE ? 'RUCRTRPG' : 'RUCRTCBL';
217 | const compileCommand = content.toCl(`${productLibrary}/${languageSpecificCommand}`, flattenedCompileParams as any);
218 | Logger.log(LogLevel.Info, `Compiling ${this.item.label}: ${compileCommand}`);
219 |
220 | let compileResult: any;
221 | try {
222 | const env = workspaceFolder ? (await Utils.getEnvConfig(workspaceFolder)) : {};
223 | compileResult = await connection.runCommand({ command: compileCommand, environment: `ile`, env: env });
224 | } catch (error: any) {
225 | TestLogger.logCompilation(run, this.item, 'failed', runner.metrics, [error.message ? error.message : error]);
226 | return;
227 | }
228 |
229 | try {
230 | // Retrieve diagnostics messages
231 | if (compileParams.cOption.includes('*EVENTF')) {
232 | const ext = path.parse(this.item.uri!.path).ext;
233 | await commands.executeCommand('code-for-ibmi.openErrors', {
234 | qualifiedObject: `${compileParams.tstPgm}${ext}`,
235 | workspace: workspaceFolder,
236 | keepDiagnostics: true
237 | });
238 | }
239 | } catch (error: any) {
240 | Logger.log(LogLevel.Error, `Failed to retrieve diagnostics messages: ${error}`);
241 | }
242 |
243 | if (compileResult.stderr.length > 0) {
244 | Logger.log(LogLevel.Error, `${this.item.label} compile error(s):\n${compileResult.stderr}`);
245 | }
246 |
247 | if (compileResult.code === 0) {
248 | TestLogger.logCompilation(run, this.item, 'success', runner.metrics);
249 | this.isCompiled = true;
250 | } else {
251 | TestLogger.logCompilation(run, this.item, 'failed', runner.metrics, compileResult.stderr.split('\n'));
252 | }
253 | }
254 | }
--------------------------------------------------------------------------------
/src/testLogger.ts:
--------------------------------------------------------------------------------
1 | import { LogLevel, TestMessage, Position, Location, TestRun, TestItem } from "vscode";
2 | import { Logger } from "./logger";
3 | import { CompilationStatus, TestingConfig, TestMetrics } from "./types";
4 | import c from "ansi-colors";
5 | import { Utils } from "./utils";
6 |
7 | export namespace TestLogger {
8 | export function logComponent(run: TestRun, message: string) {
9 | run.appendOutput(c.red(message));
10 | }
11 |
12 | export function logWorkspace(run: TestRun, item: TestItem) {
13 | run.appendOutput(`${c.bgBlue(` WORKSPACE `)} ${item.label} ${c.grey(`(${item.children.size})`)}`);
14 | Logger.log(LogLevel.Info, `Deploying ${item.label}`);
15 | }
16 |
17 | export function logDeployment(run: TestRun, item: TestItem, success: boolean, metrics: TestMetrics) {
18 | if (success) {
19 | metrics.deployments.success++;
20 | run.appendOutput(` ${c.grey(`[ Deployment Successful ]`)}\r\n`);
21 | Logger.log(LogLevel.Info, `Successfully deployed ${item.label}`);
22 | } else {
23 | metrics.deployments.failed++;
24 | run.appendOutput(` ${c.red(`[ Deployment Failed ]`)}\r\n`);
25 | Logger.log(LogLevel.Error, `Failed to deploy ${item.label}`);
26 | }
27 | }
28 |
29 | export function logLibrary(run: TestRun, item: TestItem) {
30 | run.appendOutput(`${c.bgBlue(` LIBRARY `)} ${item.label} ${c.grey(`(${item.children.size})`)}\r\n`);
31 | Logger.log(LogLevel.Info, `Running tests in ${item.label}`);
32 | }
33 |
34 | export function logTestFile(run: TestRun, item: TestItem, error: boolean, testingConfig?: TestingConfig) {
35 | let testSrvPgm = '';
36 | if (!error) {
37 | const originalTstPgmName = item.label;
38 | const newTstPgmName = Utils.getTestName(item.uri!.scheme as 'file' | 'member', originalTstPgmName, testingConfig);
39 | if (newTstPgmName !== originalTstPgmName) {
40 | // TODO: Seems like this is always printed since the original name includes the extension
41 | Logger.log(LogLevel.Warning, `Test program name ${originalTstPgmName} was converted to ${newTstPgmName}`);
42 | }
43 | testSrvPgm = ` → ${newTstPgmName}.SRVPGM`;
44 | }
45 |
46 | run.appendOutput(`${c.blue(`❯`)} ${item.label}${testSrvPgm} ${c.grey(`(${item.children.size})`)}`);
47 | }
48 |
49 | export function logCompilation(run: TestRun, item: TestItem, status: CompilationStatus, metrics: TestMetrics, messages?: string[]) {
50 | if (status === 'success') {
51 | metrics.compilations.success++;
52 | run.appendOutput(` ${c.grey(`[ Compilation Successful ]`)}\r\n`);
53 | Logger.log(LogLevel.Info, `Successfully compiled ${item.label}`);
54 | } else if (status === 'failed') {
55 | metrics.compilations.failed++;
56 | run.appendOutput(` ${c.red(`[ Compilation Failed ]`)}\r\n`);
57 | Logger.log(LogLevel.Error, `Failed to compile ${item.label}`);
58 | } else if (status === 'skipped') {
59 | metrics.compilations.skipped++;
60 | run.appendOutput(` ${c.grey(`[ Compilation Skipped ]`)}\r\n`);
61 | Logger.log(LogLevel.Warning, `Skipped compilation for ${item.label}`);
62 | }
63 |
64 | if (messages) {
65 | for (const message of messages) {
66 | run.appendOutput(`\t${c.red(`${message}`)}\r\n`);
67 | }
68 | }
69 | }
70 |
71 | export function logTestCasePassed(run: TestRun, item: TestItem, metrics: TestMetrics, duration?: number, assertions?: number) {
72 | metrics.testCases.passed++;
73 | if (duration) {
74 | metrics.duration += duration;
75 | }
76 | if (assertions) {
77 | metrics.assertions += assertions;
78 | }
79 |
80 | run.appendOutput(`\t${c.green(`✔`)} ${item.label} ${c.grey(duration !== undefined ? `${duration}s` : ``)}\r\n`);
81 | run.passed(item, duration !== undefined ? duration * 1000 : undefined);
82 | Logger.log(LogLevel.Info, `Test case ${item.label} passed${duration !== undefined ? ` in ${duration}s` : ``}`);
83 | }
84 |
85 | export function logTestCaseFailed(run: TestRun, item: TestItem, metrics: TestMetrics, duration?: number, assertions?: number, messages?: { line?: number, message: string }[]) {
86 | metrics.testCases.failed++;
87 | if (duration) {
88 | metrics.duration += duration;
89 | }
90 | if (assertions) {
91 | metrics.assertions += assertions;
92 | }
93 |
94 | run.appendOutput(`\t${c.red(`✘`)} ${item.label} ${c.grey(duration !== undefined ? `${duration}s` : ``)}\r\n`);
95 |
96 | const testMessages: TestMessage[] = [];
97 | if (messages) {
98 | for (const message of messages) {
99 | const line = message.line ? message.line : undefined;
100 | run.appendOutput(`\t\t${c.red(`${c.bold(`Failure`)}${line ? ` (line ${line})` : ``}: ${message.message}`)}\r\n`);
101 |
102 | const testMessage = new TestMessage(message.message);
103 | const range = message.line ? new Position(message.line - 1, 0) : item.range;
104 | testMessage.location = range ? new Location(item.uri!, range) : undefined;
105 | testMessages.push(testMessage);
106 | }
107 | }
108 |
109 | run.failed(item, testMessages, duration !== undefined ? duration * 1000 : undefined);
110 | Logger.log(LogLevel.Error, `Test case ${item.label} failed${duration !== undefined ? ` in ${duration}s` : ``}`);
111 | }
112 |
113 | export function logArbitraryTestCaseFailed(run: TestRun, testCaseName: string, testFileItem: TestItem, metrics: TestMetrics, duration?: number, assertions?: number, messages?: { line?: number, message: string }[]) {
114 | if (duration) {
115 | metrics.duration += duration;
116 | }
117 | if (assertions) {
118 | metrics.assertions += assertions;
119 | }
120 |
121 | run.appendOutput(`\t${c.red(`✘`)} ${testCaseName} ${c.grey(duration !== undefined ? `${duration}s` : ``)}\r\n`);
122 |
123 | const testMessages: TestMessage[] = [];
124 | if (messages) {
125 | for (const message of messages) {
126 | const line = message.line ? message.line : undefined;
127 | run.appendOutput(`\t\t${c.red(`${c.bold(`Failure`)}${line ? ` (line ${line})` : ``}: ${message.message}`)}\r\n`);
128 |
129 | const testMessage = new TestMessage(message.message);
130 | const range = message.line ? new Position(message.line - 1, 0) : testFileItem.range;
131 | testMessage.location = range ? new Location(testFileItem.uri!, range) : undefined;
132 | testMessages.push(testMessage);
133 | }
134 | }
135 |
136 | run.failed(testFileItem, testMessages, duration !== undefined ? duration * 1000 : undefined);
137 | Logger.log(LogLevel.Error, `Test case ${testCaseName} failed${duration !== undefined ? ` in ${duration}s` : ``} but was not mapped to a test item`);
138 | }
139 |
140 | export function logTestCaseErrored(run: TestRun, item: TestItem, metrics: TestMetrics, duration?: number, assertions?: number, messages?: { line?: number, message: string }[]) {
141 | metrics.testCases.errored++;
142 | if (duration) {
143 | metrics.duration += duration;
144 | }
145 | if (assertions) {
146 | metrics.assertions += assertions;
147 | }
148 |
149 | run.appendOutput(`\t${c.yellow(`⚠`)} ${item.label} ${c.grey(duration !== undefined ? `${duration}s` : ``)}\r\n`);
150 |
151 | const testMessages: TestMessage[] = [];
152 | if (messages) {
153 | for (const message of messages) {
154 | const line = message.line ? message.line : undefined;
155 | run.appendOutput(`\t\t${c.yellow(`${c.bold(`Error`)}${line ? ` (line ${line})` : ``}: ${message.message}`)}\r\n`);
156 |
157 | const testMessage = new TestMessage(message.message);
158 | const range = message.line ? new Position(message.line - 1, 0) : item.range;
159 | testMessage.location = range ? new Location(item.uri!, range) : undefined;
160 | testMessages.push(testMessage);
161 | }
162 | }
163 |
164 | run.errored(item, testMessages, duration !== undefined ? duration * 1000 : undefined);
165 | Logger.log(LogLevel.Error, `Test case ${item.label} errored${duration !== undefined ? ` in ${duration}s` : ``}`);
166 | }
167 |
168 | export function logArbitraryTestCaseErrored(run: TestRun, testCaseName: string, testFileItem: TestItem, metrics: TestMetrics, duration?: number, assertions?: number, messages?: { line?: number, message: string }[]) {
169 | if (duration) {
170 | metrics.duration += duration;
171 | }
172 | if (assertions) {
173 | metrics.assertions += assertions;
174 | }
175 |
176 | run.appendOutput(`\t${c.yellow(`⚠`)} ${testCaseName} ${c.grey(duration !== undefined ? `${duration}s` : ``)}\r\n`);
177 |
178 | const testMessages: TestMessage[] = [];
179 | if (messages) {
180 | for (const message of messages) {
181 | const line = message.line ? message.line : undefined;
182 | run.appendOutput(`\t\t${c.yellow(`${c.bold(`Error`)}${line ? ` (line ${line})` : ``}: ${message.message}`)}\r\n`);
183 |
184 | const testMessage = new TestMessage(message.message);
185 | const range = message.line ? new Position(message.line - 1, 0) : testFileItem.range;
186 | testMessage.location = range ? new Location(testFileItem.uri!, range) : undefined;
187 | testMessages.push(testMessage);
188 | }
189 | }
190 |
191 | run.errored(testFileItem, testMessages, duration !== undefined ? duration * 1000 : undefined);
192 | Logger.log(LogLevel.Error, `Test case ${testCaseName} errored${duration !== undefined ? ` in ${duration}s` : ``} but was not mapped to a test item`);
193 | }
194 |
195 | export function logMetrics(run: TestRun, metrics: TestMetrics): void {
196 | const totalDeployments = metrics.deployments.success + metrics.deployments.failed;
197 | const totalCompilations = metrics.compilations.success + metrics.compilations.failed + metrics.compilations.skipped;
198 | const totalTestFiles = metrics.testFiles.passed + metrics.testFiles.failed + metrics.testFiles.errored;
199 | const totalTestCases = metrics.testCases.passed + metrics.testCases.failed + metrics.testCases.errored;
200 |
201 | // Format text with ansi colors
202 | const testExecutionHeading = `${c.bgBlue(` EXECUTION `)}`;
203 | const deploymentResult = `Deployments: ${c.green(`${metrics.deployments.success} successful`)} | ${c.red(`${metrics.deployments.failed} failed`)} ${c.grey(`(${totalDeployments})`)}`;
204 | const compilationResult = `Compilations: ${c.green(`${metrics.compilations.success} successful`)} | ${c.red(`${metrics.compilations.failed} failed`)} | ${metrics.compilations.skipped} skipped ${c.grey(`(${totalCompilations})`)}`;
205 | const testResultsHeading = `${c.bgBlue(` RESULTS `)}`;
206 | const testFileResult = `Test Files: ${c.green(`${metrics.testFiles.passed} passed`)} | ${c.red(`${metrics.testFiles.failed} failed`)} | ${c.yellow(`${metrics.testFiles.errored} errored`)} ${c.grey(`(${totalTestFiles})`)}`;
207 | const testCaseResult = `Test Cases: ${c.green(`${metrics.testCases.passed} passed`)} | ${c.red(`${metrics.testCases.failed} failed`)} | ${c.yellow(`${metrics.testCases.errored} errored`)} ${c.grey(`(${totalTestCases})`)}`;
208 | const assertionResult = `Assertions: ${metrics.assertions}`;
209 | const durationResult = `Duration: ${metrics.duration}s`;
210 | const finalResult = (metrics.testFiles.failed > 0 || metrics.testCases.failed > 0) ? c.bgRed(` FAIL `) : (metrics.testFiles.errored || metrics.testCases.errored) > 0 ? c.bgYellow(` ERROR `) : c.bgGreen(` PASS `);
211 |
212 | // Calculate box width
213 | const maxContentWidth = Math.max(
214 | c.stripColor(testExecutionHeading).length,
215 | c.stripColor(deploymentResult).length,
216 | c.stripColor(compilationResult).length,
217 | c.stripColor(testResultsHeading).length,
218 | c.stripColor(testFileResult).length,
219 | c.stripColor(testCaseResult).length,
220 | c.stripColor(assertionResult).length,
221 | c.stripColor(durationResult).length,
222 | c.stripColor(finalResult).length
223 | );
224 | const boxWidth = maxContentWidth + 2;
225 |
226 | // Generate dynamic border
227 | const borderTop = c.blue(`┌${'─'.repeat(boxWidth)}┐`);
228 | const borderBottom = c.blue(`└${'─'.repeat(boxWidth)}┘`);
229 |
230 | // Add padding to line
231 | function addPadding(content: string): string {
232 | const plainTextLength = c.stripColor(content).length;
233 | const padding = maxContentWidth - plainTextLength;
234 | return `${c.blue(`│`)} ${content}${' '.repeat(padding)} ${c.blue(`│`)}`;
235 | }
236 |
237 | // Output results
238 | run.appendOutput(`\r\n`);
239 | run.appendOutput(`${borderTop}\r\n`);
240 | run.appendOutput(`${addPadding(testExecutionHeading)}\r\n`);
241 | run.appendOutput(`${addPadding(deploymentResult)}\r\n`);
242 | run.appendOutput(`${addPadding(compilationResult)}\r\n`);
243 | run.appendOutput(`${addPadding('')}\r\n`);
244 | run.appendOutput(`${addPadding(testResultsHeading)}\r\n`);
245 | run.appendOutput(`${addPadding(testFileResult)}\r\n`);
246 | run.appendOutput(`${addPadding(testCaseResult)}\r\n`);
247 | run.appendOutput(`${addPadding(assertionResult)}\r\n`);
248 | run.appendOutput(`${addPadding(durationResult)}\r\n`);
249 | run.appendOutput(`${addPadding('')}\r\n`);
250 | run.appendOutput(`${addPadding(finalResult)}\r\n`);
251 | run.appendOutput(borderBottom);
252 | }
253 | }
--------------------------------------------------------------------------------
/src/testObject.ts:
--------------------------------------------------------------------------------
1 | import { TestItem } from "vscode";
2 |
3 | export class TestObject {
4 | item: TestItem;
5 |
6 | constructor(item: TestItem) {
7 | this.item = item;
8 | }
9 | }
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | import { TestItem } from "vscode";
2 | import { TestFile } from "./testFile";
3 | import { TestCase } from "./testCase";
4 | import { TestDirectory } from "./testDirectory";
5 | import { TestObject } from "./testObject";
6 |
7 | export type Env = Record;
8 |
9 | export type IBMiTestData = TestDirectory | TestObject | TestFile | TestCase;
10 |
11 | export type TestQueue = { item: TestItem, data: TestFile | TestCase }[];
12 |
13 | export type CompilationStatus = 'success' | 'failed' | 'skipped';
14 |
15 | export type TestStatus = 'passed' | 'failed' | 'errored';
16 |
17 | export interface TestStorage {
18 | RPGUNIT: string,
19 | CODECOV: string
20 | };
21 |
22 | export interface TestMetrics {
23 | duration: number,
24 | assertions: number,
25 | deployments: {
26 | success: number,
27 | failed: number
28 | },
29 | compilations: {
30 | success: number,
31 | failed: number,
32 | skipped: number
33 | },
34 | testFiles: {
35 | passed: number,
36 | failed: number,
37 | errored: number
38 | },
39 | testCases: {
40 | passed: number,
41 | failed: number,
42 | errored: number
43 | }
44 | }
45 |
46 | export interface TestCaseResult {
47 | name: string,
48 | status: TestStatus,
49 | time?: number,
50 | assertions?: number,
51 | failure?: {
52 | line?: number,
53 | message: string
54 | }[],
55 | error?: {
56 | line?: number,
57 | message: string
58 | }[]
59 | }
60 |
61 | export interface TestingConfig {
62 | rpgunit?: {
63 | rucrtrpg?: RUCRTRPG,
64 | rucrtcbl?: RUCRTCBL,
65 | prefix?: string
66 | }
67 | }
68 |
69 | export interface RUCRTRPG {
70 | tstPgm: string,
71 | srcFile?: string,
72 | srcMbr?: string,
73 | srcStmf?: string,
74 | text?: string,
75 | cOption?: string[],
76 | dbgView?: string,
77 | bndSrvPgm?: string[],
78 | bndDir?: string[],
79 | bOption?: string,
80 | define?: string[],
81 | dltSplf?: string,
82 | actGrp?: string,
83 | module?: string[],
84 | rpgPpOpt?: string,
85 | pOption?: string[],
86 | compileOpt?: string,
87 | tgtRls?: string
88 | incDir?: string[],
89 | tgtCcsid?: number
90 | }
91 |
92 | export interface RUCRTCBL {
93 | tstPgm: string,
94 | srcFile?: string,
95 | srcMbr?: string,
96 | srcStmf?: string,
97 | text?: string,
98 | cOption?: string[],
99 | dbgView?: string,
100 | bndSrvPgm?: string[],
101 | bndDir?: string[],
102 | bOption?: string,
103 | define?: string[],
104 | dltSplf?: string,
105 | actGrp?: string,
106 | module?: string[],
107 | pOption?: string[],
108 | compileOpt?: string,
109 | tgtRls?: string
110 | incDir?: string[],
111 | tgtCcsid?: number
112 | }
113 |
114 | export interface RUCALLTST {
115 | tstPgm: string,
116 | tstPrc?: string,
117 | order?: string,
118 | detail?: string,
119 | output?: string,
120 | libl?: string,
121 | jobD?: string,
122 | rclRsc?: string,
123 | xmlStmf: string
124 | }
125 |
126 | export interface CODECOV {
127 | cmd: string,
128 | module: string,
129 | ccLvl: string,
130 | ccView?: string,
131 | outDir?: string,
132 | outStmf: string,
133 | testId?: string
134 | }
135 |
136 | export interface CoverageData {
137 | basename: string,
138 | path: string,
139 | localPath: string,
140 | coverage: {
141 | signitures: string[],
142 | lineString: string,
143 | activeLines: { [key: number]: boolean },
144 | percentRan: string
145 | }
146 | }
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import { ConfigurationChangeEvent, WorkspaceFolder, workspace } from "vscode";
3 | import { Env, TestingConfig } from "./types";
4 |
5 | export namespace Utils {
6 | /**
7 | * Get IFS and QSYS test suffixes. IFS test suffixes are identical to QSYS ones,
8 | * but include `.TEST` along with the file extension.
9 | */
10 | export function getTestSuffixes(options: { rpg: boolean, cobol: boolean }): { ifs: string[], qsys: string[] } {
11 | const localSuffix = '.TEST';
12 |
13 | // Supported extensions
14 | const rpgleExt = `.RPGLE`;
15 | const sqlrpgleExt = `.SQLRPGLE`;
16 | const cobolExt = `.CBLLE`;
17 | const sqlcobolExt = `.SQLCBLLE`;
18 |
19 | const testSuffixes: { ifs: string[], qsys: string[] } = {
20 | ifs: [],
21 | qsys: []
22 | };
23 |
24 | if (options.rpg) {
25 | testSuffixes.qsys.push(rpgleExt, sqlrpgleExt);
26 | }
27 |
28 | if (options.cobol) {
29 | testSuffixes.qsys.push(cobolExt, sqlcobolExt);
30 | }
31 |
32 | testSuffixes.ifs.push(...testSuffixes.qsys.map(suffix => localSuffix + suffix));
33 |
34 | return testSuffixes;
35 | }
36 |
37 | /**
38 | * Reuse logic used in Source Orbit to convert a given file name to a 10 character system name.
39 | *
40 | * Explanation: https://ibm.github.io/sourceorbit/#/./pages/general/rules?id=long-file-names
41 | * Original Source: https://github.com/IBM/sourceorbit/blob/main/cli/src/utils.ts#L12
42 | */
43 | function getSystemName(inputName: string) {
44 | let baseName = inputName.includes(`-`) ? inputName.split(`-`)[0] : inputName;
45 |
46 | // If the name is of valid length, return it
47 | if (baseName.length <= 10) {
48 | return baseName.toUpperCase();
49 | }
50 |
51 | // We also support prefixes to the name, such as UA_
52 | let prefix = ``;
53 | let name = baseName;
54 |
55 | if (baseName.includes(`_`)) {
56 | const parts = baseName.split(`_`);
57 | prefix = parts[0];
58 | name = parts[1];
59 | }
60 |
61 | // We start the system name with the suppliedPrefix
62 | let systemName = prefix;
63 |
64 | for (let i = 0; i < name.length && systemName.length < 10; i++) {
65 | const char = name[i];
66 | if (char === char.toUpperCase() || i === 0) {
67 | systemName += char;
68 | }
69 | }
70 |
71 | // If we only have one character, then no capitals were used in the name. Let's just use the first 10 characters
72 | if (systemName.length === 1) {
73 | systemName = name.substring(0, 10);
74 | }
75 |
76 | return systemName.toUpperCase();
77 | }
78 |
79 | export function getTestName(type: 'file' | 'member', originalTstPgmName: string, testingConfig: TestingConfig | undefined) {
80 | const testSuffixes = Utils.getTestSuffixes({ rpg: true, cobol: true });
81 | const relevantSuffixes = type === 'file' ? testSuffixes.ifs : testSuffixes.qsys;
82 | for (const suffix of relevantSuffixes) {
83 | if (originalTstPgmName.toLocaleUpperCase().endsWith(suffix)) {
84 | originalTstPgmName = originalTstPgmName.replace(new RegExp(suffix, 'i'), '');
85 | }
86 | }
87 | originalTstPgmName = originalTstPgmName.toLocaleUpperCase();
88 |
89 | const prefix = testingConfig?.rpgunit?.prefix || '';
90 | return getSystemName(`${prefix}${originalTstPgmName}`);
91 | }
92 |
93 | /**
94 | * Retrieve the environment variables defined in a workspace folder's `.env` file. This implementation
95 | * is a modified version of the original source to include `&` as a prefix for each key.
96 | *
97 | * Original Source: https://github.com/codefori/vscode-ibmi/blob/master/src/filesystems/local/env.ts#L20
98 | */
99 | export async function getEnvConfig(workspaceFolder: WorkspaceFolder) {
100 | const env: Env = {};
101 | const prefix = `&`;
102 |
103 | if (await envExists(workspaceFolder)) {
104 | const folderUri = workspaceFolder.uri;
105 | let readData, readStr;
106 |
107 | // Then we get the local .env file
108 | const envUri = folderUri.with({ path: path.join(folderUri.fsPath, `.env`) });
109 | readData = await workspace.fs.readFile(envUri);
110 | readStr = Buffer.from(readData).toString(`utf8`);
111 |
112 | const envLines = readStr.replace(new RegExp(`\\\r`, `g`), ``).split(`\n`);
113 |
114 | // Parse out the env lines
115 | envLines.forEach(line => {
116 | if (!line.startsWith(`#`)) {
117 | const [key, value] = line.split(`=`);
118 | if (key.length > 0 && value.length > 0) {
119 | env[`${prefix}${key.trim()}`] = value.trim();
120 | }
121 | }
122 | });
123 | }
124 |
125 | return env;
126 | }
127 |
128 | /**
129 | * Check if a `.env` file exists in a workspace folder.
130 | *
131 | * Original Source: https://github.com/codefori/vscode-ibmi/blob/master/src/filesystems/local/env.ts#L8
132 | */
133 | async function envExists(workspaceFolder: WorkspaceFolder) {
134 | const folderUri = workspaceFolder.uri;
135 | const envUri = folderUri.with({ path: path.join(folderUri.fsPath, `.env`) });
136 |
137 | try {
138 | await workspace.fs.stat(envUri);
139 | return true;
140 | } catch (err) {
141 | return false;
142 | }
143 | }
144 |
145 | /**
146 | * Subscribe to Code for IBM i configuration changes.
147 | *
148 | * Original Source: https://github.com/codefori/vscode-ibmi/blob/master/src/config/Configuration.ts#L5
149 | */
150 | export function onCodeForIBMiConfigurationChange(props: string | string[], todo: (value: ConfigurationChangeEvent) => void) {
151 | const keys = (Array.isArray(props) ? props : Array.of(props)).map(key => `code-for-ibmi.${key}`);
152 | return workspace.onDidChangeConfiguration(async event => {
153 | if (keys.some(key => event.affectsConfiguration(key))) {
154 | todo(event);
155 | }
156 | });
157 | }
158 | }
--------------------------------------------------------------------------------
/src/xmlParser.ts:
--------------------------------------------------------------------------------
1 | import { TestCaseResult } from "./types";
2 |
3 | export namespace XMLParser {
4 | export function parseTestResults(xml: any, isStreamFile: boolean): TestCaseResult[] {
5 | const results: TestCaseResult[] = [];
6 |
7 | xml.testsuite.testcase.forEach((testcase: any) => {
8 | const testCaseName = testcase.$.name.toLocaleUpperCase();
9 | const duration: number = parseFloat(testcase.$.time);
10 | const assertions: number = parseInt(testcase.$.assertions);
11 |
12 | const result: TestCaseResult = {
13 | name: testCaseName,
14 | status: 'passed',
15 | time: duration,
16 | assertions: assertions
17 | };
18 |
19 | // Parse failure messages
20 | if (testcase.failure) {
21 | result.status = 'failed';
22 |
23 | testcase.failure.forEach((failure: any) => {
24 | const match = failure._.match(/:(\d+)\)/);
25 | let line = match ? parseInt(match[1]) : undefined;
26 | if (!isStreamFile && line) {
27 | // Stream files: Line numbers match line numbers of the source code
28 | // Source members: Line numbers must be divded by 100 because they are specified with 2 decimal positions
29 | // https://github.com/tools-400/irpgunit/issues/15#issuecomment-2871972032
30 | line = line / 100;
31 | }
32 |
33 | if (!result.failure) {
34 | result.failure = [];
35 | }
36 |
37 | result.failure.push({
38 | line: line,
39 | message: failure.$.type ? `${failure.$.type}: ${failure.$.message}` : failure.$.message
40 | });
41 | });
42 | }
43 |
44 | // Parse error messages
45 | if (testcase.error) {
46 | result.status = 'errored';
47 |
48 | testcase.error.forEach((error: any) => {
49 | const match = error._.match(/:(\d+)\)/);
50 | const line = match ? parseInt(match[1]) : undefined;
51 |
52 | if (!result.error) {
53 | result.error = [];
54 | }
55 |
56 | result.error.push({
57 | line: line,
58 | message: error.$.type ? `${error.$.type}: ${error.$.message}` : error.$.message
59 | });
60 | });
61 | }
62 |
63 | results.push(result);
64 | });
65 |
66 | return results;
67 | }
68 | }
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "ES2022",
4 | "target": "ES2022",
5 | "moduleResolution": "bundler",
6 | "lib": [
7 | "ES2022"
8 | ],
9 | "sourceMap": true,
10 | "rootDir": "src",
11 | "strict": true /* enable all strict type-checking options */
12 | /* Additional Checks */
13 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
14 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
15 | // "noUnusedParameters": true, /* Report errors on unused parameters. */
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | //@ts-check
2 |
3 | 'use strict';
4 |
5 | const path = require('path');
6 |
7 | //@ts-check
8 | /** @typedef {import('webpack').Configuration} WebpackConfig **/
9 |
10 | /** @type WebpackConfig */
11 | const extensionConfig = {
12 | target: 'node', // VS Code extensions run in a Node.js-context 📖 -> https://webpack.js.org/configuration/node/
13 | mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
14 |
15 | entry: './src/extension.ts', // the entry point of this extension, 📖 -> https://webpack.js.org/configuration/entry-context/
16 | output: {
17 | // the bundle is stored in the 'dist' folder (check package.json), 📖 -> https://webpack.js.org/configuration/output/
18 | path: path.resolve(__dirname, 'dist'),
19 | filename: 'extension.js',
20 | libraryTarget: 'commonjs2'
21 | },
22 | externals: {
23 | vscode: 'commonjs vscode' // the vscode-module is created on-the-fly and must be excluded. Add other modules that cannot be webpack'ed, 📖 -> https://webpack.js.org/configuration/externals/
24 | // modules added here also need to be added in the .vscodeignore file
25 | },
26 | resolve: {
27 | // support reading TypeScript and JavaScript files, 📖 -> https://github.com/TypeStrong/ts-loader
28 | extensions: ['.ts', '.js']
29 | },
30 | module: {
31 | rules: [
32 | {
33 | test: /\.ts$/,
34 | exclude: /node_modules/,
35 | use: [
36 | {
37 | loader: 'ts-loader'
38 | }
39 | ]
40 | }
41 | ]
42 | },
43 | devtool: 'nosources-source-map',
44 | infrastructureLogging: {
45 | level: "log", // enables logging required for problem matchers
46 | },
47 | };
48 | module.exports = [ extensionConfig ];
--------------------------------------------------------------------------------