├── .dockerignore ├── .editorconfig ├── .eslintignore ├── .eslintrc ├── .github ├── dependabot.yml └── workflows │ └── build.yml ├── .gitignore ├── .huskyrc.js ├── .prettierignore ├── CODEOWNERS ├── Dockerfile ├── Dockerfile-extras ├── LICENSE ├── README.md ├── babel.config.js ├── bin ├── devInit.js ├── docker-mkdocs ├── mkdocs ├── psp ├── psp-assess ├── psp-build └── psp-publish ├── ci_bin └── docker_push ├── cortex.yaml ├── jest.config.js ├── jest.setup.ts ├── lint-staged.config.js ├── package.json ├── prettier.config.js ├── src ├── assessment │ ├── hipaa │ │ └── questions.ts │ └── index.ts ├── assets.ts ├── commands │ ├── psp-assess.test.ts │ ├── psp-assess.ts │ ├── psp-build.ts │ ├── psp-publish.ts │ └── psp.ts ├── configure.ts ├── constants.ts ├── error.ts ├── index.ts ├── j1 │ ├── index.ts │ ├── j1GraphQL.ts │ └── types.ts ├── publishToConfluence.ts ├── questions │ ├── base.ts │ ├── helpers │ │ ├── colors.ts │ │ └── validate.ts │ ├── hipaa.ts │ └── scorecard.ts ├── render.ts ├── types.ts └── util │ └── pickAdopted.ts ├── static └── assets │ ├── _config.scss │ ├── css │ └── custom.css │ └── images │ ├── favicon.ico │ └── logo.svg ├── test ├── fixtures │ ├── downloads │ │ └── .gitkeep │ ├── empty_config.json │ ├── minimal_populated_config.json │ ├── populated_config.json │ └── templates │ │ ├── assessments │ │ └── hipaa.md.tmpl │ │ ├── mkdocs │ │ └── mkdocs.yml.tmpl │ │ ├── ref │ │ └── sanction-notice.pdf │ │ └── test.tmpl ├── integration │ └── cli.test.ts └── unit │ ├── assess.test.ts │ ├── assets.test.ts │ ├── colors.test.ts │ ├── configure.test.ts │ └── render.test.ts ├── tsconfig.dist.json ├── tsconfig.json ├── util └── add-web-links.ts └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | assessments 2 | coverage 3 | docs 4 | site 5 | node_modules 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # top-most EditorConfig file 2 | root = true 3 | 4 | # Unix-style newlines with a newline ending every file 5 | [*] 6 | end_of_line = lf 7 | insert_final_newline = true 8 | trim_trailing_whitespace = true 9 | 10 | [*.{js,ts,json,yml}] 11 | charset = utf-8 12 | indent_style = space 13 | indent_size = 2 14 | 15 | [Makefile] 16 | charset = utf-8 17 | indent_style = tab 18 | indent_size = 2 -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | coverage/ 2 | env/ 3 | dist/ 4 | bin/ -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "extends": [ 4 | "./node_modules/@jupiterone/typescript-tools/config/eslint-node.json" 5 | ], 6 | "parserOptions": { 7 | "project": "./tsconfig.json", 8 | "tsconfigRootDir": "." 9 | } 10 | } -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Enable version updates for npm 4 | - package-ecosystem: "npm" 5 | # Look for `package.json` and `lock` files in the `root` directory 6 | directory: "/" 7 | # Check the npm registry for updates every day (weekdays) 8 | schedule: 9 | interval: "daily" 10 | 11 | # Enable version updates for Docker 12 | - package-ecosystem: "docker" 13 | # Look for a `Dockerfile` in the `root` directory 14 | directory: "/" 15 | # Check for updates once a week 16 | schedule: 17 | interval: "weekly" -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | on: [push, pull_request] 3 | 4 | jobs: 5 | test: 6 | runs-on: ubuntu-latest 7 | 8 | steps: 9 | - name: Setup Node 10 | uses: actions/setup-node@v1 11 | with: 12 | node-version: '14' 13 | 14 | - name: Check out source code 15 | uses: actions/checkout@v2 16 | 17 | - name: Install dependencies 18 | run: yarn 19 | 20 | - name: Execute Tests 21 | run: yarn test 22 | 23 | - name: Verify that NPM module builds 24 | run: yarn bundle 25 | 26 | - name: Verify that Docker image builds 27 | run: docker build . 28 | 29 | npm: 30 | if: github.ref == 'refs/heads/main' 31 | runs-on: ubuntu-latest 32 | needs: test 33 | 34 | steps: 35 | - name: Initialize Output 36 | run: echo "didpublishnpm=false" >> $GITHUB_ENV 37 | 38 | - name: Check out source code 39 | uses: actions/checkout@v2 40 | 41 | - name: Check if publish needed 42 | run: | 43 | name="$(jq -r .name package.json)" 44 | npmver="$(npm show $name version 2>/dev/null || echo v0.0.0)" 45 | pkgver="$(jq -r .version package.json)" 46 | if [ "$npmver" = "$pkgver" ] 47 | then 48 | echo "Package version ($pkgver) is the same as last published NPM version ($npmver), skipping publish." 49 | else 50 | echo "Package version ($pkgver) is different from latest NPM version ($npmver), publishing!" 51 | echo "shouldpublishnpm=true" >> $GITHUB_ENV 52 | fi 53 | 54 | - name: Setup Node 55 | if: env.shouldpublishnpm 56 | uses: actions/setup-node@v1 57 | with: 58 | node-version: '14' 59 | 60 | - name: Install dependencies 61 | if: env.shouldpublishnpm 62 | run: yarn 63 | 64 | - name: Bundle module (create ./dist dir) 65 | if: env.shouldpublishnpm 66 | run: yarn bundle 67 | 68 | - name: Publish 69 | if: env.shouldpublishnpm 70 | env: 71 | NPM_AUTH_TOKEN: ${{ secrets.NPM_AUTH_TOKEN }} 72 | run: | 73 | echo "//registry.npmjs.org/:_authToken=${NPM_AUTH_TOKEN}" > .npmrc 74 | npm publish --access public ./dist 75 | echo "didpublishnpm=true" >> $GITHUB_ENV 76 | 77 | docker: 78 | if: github.ref == 'refs/heads/main' 79 | runs-on: ubuntu-latest 80 | needs: [test, npm] 81 | 82 | steps: 83 | - name: Check out source code 84 | uses: actions/checkout@v2 85 | 86 | - name: Detect Dockerfile changes 87 | uses: dorny/paths-filter@v2 88 | id: filter 89 | with: 90 | filters: | 91 | dockerchanged: 92 | - 'Dockerfile' 93 | 94 | - name: Should Build? 95 | if: 96 | steps.filter.outputs.dockerchanged == 'true' || 97 | needs.npm.outputs.didpublishnpm == 'true' 98 | run: | 99 | echo "Dockerfile changed, and/or new NPM module published. Need to update Docker image." 100 | echo "need_docker_build=true" >> $GITHUB_ENV 101 | 102 | - name: Login to DockerHub Registry 103 | if: env.need_docker_build 104 | run: 105 | echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ 106 | secrets.DOCKERHUB_USERNAME }} --password-stdin 107 | 108 | - name: Build the latest Docker image 109 | if: env.need_docker_build 110 | run: docker build . --file Dockerfile --tag jupiterone/pspbuilder:latest 111 | 112 | - name: Push the latest Docker image 113 | if: env.need_docker_build 114 | run: docker push jupiterone/pspbuilder:latest 115 | 116 | docker-extras: 117 | if: github.ref == 'refs/heads/main' 118 | runs-on: ubuntu-latest 119 | needs: [test, npm] 120 | 121 | steps: 122 | - name: Check out source code 123 | uses: actions/checkout@v2 124 | 125 | - name: Detect Dockerfile changes 126 | uses: dorny/paths-filter@v2 127 | id: filter 128 | with: 129 | filters: | 130 | dockerchanged: 131 | - 'Dockerfile-extras' 132 | 133 | - name: Should Build? 134 | if: 135 | steps.filter.outputs.dockerchanged == 'true' || 136 | needs.npm.outputs.didpublishnpm == 'true' 137 | run: | 138 | echo "Dockerfile changed, and/or new NPM module published. Need to update Docker-extras image." 139 | echo "need_docker_build=true" >> $GITHUB_ENV 140 | 141 | - name: Login to DockerHub Registry 142 | if: env.need_docker_build 143 | run: 144 | echo ${{ secrets.DOCKERHUB_TOKEN }} | docker login -u ${{ 145 | secrets.DOCKERHUB_USERNAME }} --password-stdin 146 | 147 | - name: Build the latest Docker image 148 | if: env.need_docker_build 149 | run: 150 | docker build . --file Dockerfile-extras --tag 151 | jupiterone/pspbuilder-extras:latest 152 | 153 | - name: Push the latest Docker image 154 | if: env.need_docker_build 155 | run: docker push jupiterone/pspbuilder-extras:latest 156 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | *.log 3 | coverage/ 4 | node_modules/ 5 | work/ 6 | package-lock.json 7 | data/ 8 | docs/ 9 | *.bak 10 | /publish-j1dev.sh -------------------------------------------------------------------------------- /.huskyrc.js: -------------------------------------------------------------------------------- 1 | module.exports = require('@jupiterone/typescript-tools/config/husky'); 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | coverage/ 2 | work/ 3 | dist/ 4 | .terraform/ 5 | data/ -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @jupiterone/apps 2 | 3 | CODEOWNERS @jupiterone/security -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16-bullseye-slim 2 | WORKDIR /opt 3 | 4 | # Install pandoc and other linting/helper tools 5 | RUN apt-get update && apt-get -y install \ 6 | python3-pip \ 7 | unzip 8 | 9 | # Install psp CLI and additional linting tool 10 | RUN npm install -g \ 11 | @jupiterone/security-policy-builder 12 | 13 | # Install Mkdocs 14 | 15 | RUN python3 -m pip install --no-cache-dir importlib_metadata mkdocs-material==5.5.12 16 | 17 | ### NOTE: The following workaround is not longer working and therefore no longer needed, 18 | ### but is left here for reference 19 | ######################################################################################## 20 | 21 | ### NOTE: there appears to be an undocumented edge-case preventing Debian9 from 22 | # succesfully installing mkdocs with python3. Here, we're explicitly copying the 23 | # dependencies from a pinned squidfunk/mkdocs-material image, which should 24 | # always 'just work' 25 | #RUN python3 -m pip install --no-cache-dir importlib_metadata 26 | #COPY --from=squidfunk/mkdocs-material:5.5.12 /usr/local/lib/python3.8/site-packages/ /usr/local/lib/python3.5/dist-packages/ 27 | 28 | # This makes the 'mkdocs' command work as a 'docker run' argument 29 | COPY bin/docker-mkdocs /usr/local/bin/mkdocs 30 | RUN chmod +x /usr/local/bin/mkdocs -------------------------------------------------------------------------------- /Dockerfile-extras: -------------------------------------------------------------------------------- 1 | FROM node:16-bullseye-slim 2 | # Debian stretch base-image, minimized 3 | WORKDIR /opt 4 | 5 | # Install pandoc and other linting/helper tools 6 | RUN apt-get update && apt-get install -y \ 7 | aspell \ 8 | jq \ 9 | pandoc \ 10 | python3-pip \ 11 | texlive-base \ 12 | texlive-fonts-extra \ 13 | texlive-fonts-recommended \ 14 | texlive-latex-extra \ 15 | texlive-xetex \ 16 | unzip 17 | 18 | # Install psp CLI and additional linting tool 19 | RUN npm install -g \ 20 | @jupiterone/security-policy-builder \ 21 | markdownlint-cli 22 | 23 | # Install Mkdocs 24 | 25 | ### NOTE: there appears to be an undocumented edge-case preventing Debian9 from 26 | # succesfully installing mkdocs with python3. Here, we're explicitly copying the 27 | # dependencies from a pinned squidfunk/mkdocs-material image, which should 28 | # always 'just work' 29 | RUN python3 -m pip install --no-cache-dir importlib_metadata 30 | COPY --from=squidfunk/mkdocs-material:5.5.12 /usr/local/lib/python3.8/site-packages/ /usr/local/lib/python3.5/dist-packages/ 31 | 32 | # This makes the 'mkdocs' command work as a 'docker run' argument 33 | COPY bin/docker-mkdocs /usr/local/bin/mkdocs 34 | RUN chmod +x /usr/local/bin/mkdocs 35 | 36 | # clean up unnecessary packages 37 | RUN apt-get remove --purge --assume-yes $(dpkg -l | grep '^ii.*texlive.*doc' | cut -d' ' -f3) 38 | RUN apt autoremove --purge --assume-yes gcc cpp gcc g++ gnome-icon-theme gtk-update-icon-cache make x11-utils xbitmaps xterm 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Mozilla Public License Version 2.0 2 | ================================== 3 | 4 | 1. Definitions 5 | -------------- 6 | 7 | 1.1. "Contributor" 8 | means each individual or legal entity that creates, contributes to 9 | the creation of, or owns Covered Software. 10 | 11 | 1.2. "Contributor Version" 12 | means the combination of the Contributions of others (if any) used 13 | by a Contributor and that particular Contributor's Contribution. 14 | 15 | 1.3. "Contribution" 16 | means Covered Software of a particular Contributor. 17 | 18 | 1.4. "Covered Software" 19 | means Source Code Form to which the initial Contributor has attached 20 | the notice in Exhibit A, the Executable Form of such Source Code 21 | Form, and Modifications of such Source Code Form, in each case 22 | including portions thereof. 23 | 24 | 1.5. "Incompatible With Secondary Licenses" 25 | means 26 | 27 | (a) that the initial Contributor has attached the notice described 28 | in Exhibit B to the Covered Software; or 29 | 30 | (b) that the Covered Software was made available under the terms of 31 | version 1.1 or earlier of the License, but not also under the 32 | terms of a Secondary License. 33 | 34 | 1.6. "Executable Form" 35 | means any form of the work other than Source Code Form. 36 | 37 | 1.7. "Larger Work" 38 | means a work that combines Covered Software with other material, in 39 | a separate file or files, that is not Covered Software. 40 | 41 | 1.8. "License" 42 | means this document. 43 | 44 | 1.9. "Licensable" 45 | means having the right to grant, to the maximum extent possible, 46 | whether at the time of the initial grant or subsequently, any and 47 | all of the rights conveyed by this License. 48 | 49 | 1.10. "Modifications" 50 | means any of the following: 51 | 52 | (a) any file in Source Code Form that results from an addition to, 53 | deletion from, or modification of the contents of Covered 54 | Software; or 55 | 56 | (b) any new file in Source Code Form that contains any Covered 57 | Software. 58 | 59 | 1.11. "Patent Claims" of a Contributor 60 | means any patent claim(s), including without limitation, method, 61 | process, and apparatus claims, in any patent Licensable by such 62 | Contributor that would be infringed, but for the grant of the 63 | License, by the making, using, selling, offering for sale, having 64 | made, import, or transfer of either its Contributions or its 65 | Contributor Version. 66 | 67 | 1.12. "Secondary License" 68 | means either the GNU General Public License, Version 2.0, the GNU 69 | Lesser General Public License, Version 2.1, the GNU Affero General 70 | Public License, Version 3.0, or any later versions of those 71 | licenses. 72 | 73 | 1.13. "Source Code Form" 74 | means the form of the work preferred for making modifications. 75 | 76 | 1.14. "You" (or "Your") 77 | means an individual or a legal entity exercising rights under this 78 | License. For legal entities, "You" includes any entity that 79 | controls, is controlled by, or is under common control with You. For 80 | purposes of this definition, "control" means (a) the power, direct 81 | or indirect, to cause the direction or management of such entity, 82 | whether by contract or otherwise, or (b) ownership of more than 83 | fifty percent (50%) of the outstanding shares or beneficial 84 | ownership of such entity. 85 | 86 | 2. License Grants and Conditions 87 | -------------------------------- 88 | 89 | 2.1. Grants 90 | 91 | Each Contributor hereby grants You a world-wide, royalty-free, 92 | non-exclusive license: 93 | 94 | (a) under intellectual property rights (other than patent or trademark) 95 | Licensable by such Contributor to use, reproduce, make available, 96 | modify, display, perform, distribute, and otherwise exploit its 97 | Contributions, either on an unmodified basis, with Modifications, or 98 | as part of a Larger Work; and 99 | 100 | (b) under Patent Claims of such Contributor to make, use, sell, offer 101 | for sale, have made, import, and otherwise transfer either its 102 | Contributions or its Contributor Version. 103 | 104 | 2.2. Effective Date 105 | 106 | The licenses granted in Section 2.1 with respect to any Contribution 107 | become effective for each Contribution on the date the Contributor first 108 | distributes such Contribution. 109 | 110 | 2.3. Limitations on Grant Scope 111 | 112 | The licenses granted in this Section 2 are the only rights granted under 113 | this License. No additional rights or licenses will be implied from the 114 | distribution or licensing of Covered Software under this License. 115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 116 | Contributor: 117 | 118 | (a) for any code that a Contributor has removed from Covered Software; 119 | or 120 | 121 | (b) for infringements caused by: (i) Your and any other third party's 122 | modifications of Covered Software, or (ii) the combination of its 123 | Contributions with other software (except as part of its Contributor 124 | Version); or 125 | 126 | (c) under Patent Claims infringed by Covered Software in the absence of 127 | its Contributions. 128 | 129 | This License does not grant any rights in the trademarks, service marks, 130 | or logos of any Contributor (except as may be necessary to comply with 131 | the notice requirements in Section 3.4). 132 | 133 | 2.4. Subsequent Licenses 134 | 135 | No Contributor makes additional grants as a result of Your choice to 136 | distribute the Covered Software under a subsequent version of this 137 | License (see Section 10.2) or under the terms of a Secondary License (if 138 | permitted under the terms of Section 3.3). 139 | 140 | 2.5. Representation 141 | 142 | Each Contributor represents that the Contributor believes its 143 | Contributions are its original creation(s) or it has sufficient rights 144 | to grant the rights to its Contributions conveyed by this License. 145 | 146 | 2.6. Fair Use 147 | 148 | This License is not intended to limit any rights You have under 149 | applicable copyright doctrines of fair use, fair dealing, or other 150 | equivalents. 151 | 152 | 2.7. Conditions 153 | 154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 155 | in Section 2.1. 156 | 157 | 3. Responsibilities 158 | ------------------- 159 | 160 | 3.1. Distribution of Source Form 161 | 162 | All distribution of Covered Software in Source Code Form, including any 163 | Modifications that You create or to which You contribute, must be under 164 | the terms of this License. You must inform recipients that the Source 165 | Code Form of the Covered Software is governed by the terms of this 166 | License, and how they can obtain a copy of this License. You may not 167 | attempt to alter or restrict the recipients' rights in the Source Code 168 | Form. 169 | 170 | 3.2. Distribution of Executable Form 171 | 172 | If You distribute Covered Software in Executable Form then: 173 | 174 | (a) such Covered Software must also be made available in Source Code 175 | Form, as described in Section 3.1, and You must inform recipients of 176 | the Executable Form how they can obtain a copy of such Source Code 177 | Form by reasonable means in a timely manner, at a charge no more 178 | than the cost of distribution to the recipient; and 179 | 180 | (b) You may distribute such Executable Form under the terms of this 181 | License, or sublicense it under different terms, provided that the 182 | license for the Executable Form does not attempt to limit or alter 183 | the recipients' rights in the Source Code Form under this License. 184 | 185 | 3.3. Distribution of a Larger Work 186 | 187 | You may create and distribute a Larger Work under terms of Your choice, 188 | provided that You also comply with the requirements of this License for 189 | the Covered Software. If the Larger Work is a combination of Covered 190 | Software with a work governed by one or more Secondary Licenses, and the 191 | Covered Software is not Incompatible With Secondary Licenses, this 192 | License permits You to additionally distribute such Covered Software 193 | under the terms of such Secondary License(s), so that the recipient of 194 | the Larger Work may, at their option, further distribute the Covered 195 | Software under the terms of either this License or such Secondary 196 | License(s). 197 | 198 | 3.4. Notices 199 | 200 | You may not remove or alter the substance of any license notices 201 | (including copyright notices, patent notices, disclaimers of warranty, 202 | or limitations of liability) contained within the Source Code Form of 203 | the Covered Software, except that You may alter any license notices to 204 | the extent required to remedy known factual inaccuracies. 205 | 206 | 3.5. Application of Additional Terms 207 | 208 | You may choose to offer, and to charge a fee for, warranty, support, 209 | indemnity or liability obligations to one or more recipients of Covered 210 | Software. However, You may do so only on Your own behalf, and not on 211 | behalf of any Contributor. You must make it absolutely clear that any 212 | such warranty, support, indemnity, or liability obligation is offered by 213 | You alone, and You hereby agree to indemnify every Contributor for any 214 | liability incurred by such Contributor as a result of warranty, support, 215 | indemnity or liability terms You offer. You may include additional 216 | disclaimers of warranty and limitations of liability specific to any 217 | jurisdiction. 218 | 219 | 4. Inability to Comply Due to Statute or Regulation 220 | --------------------------------------------------- 221 | 222 | If it is impossible for You to comply with any of the terms of this 223 | License with respect to some or all of the Covered Software due to 224 | statute, judicial order, or regulation then You must: (a) comply with 225 | the terms of this License to the maximum extent possible; and (b) 226 | describe the limitations and the code they affect. Such description must 227 | be placed in a text file included with all distributions of the Covered 228 | Software under this License. Except to the extent prohibited by statute 229 | or regulation, such description must be sufficiently detailed for a 230 | recipient of ordinary skill to be able to understand it. 231 | 232 | 5. Termination 233 | -------------- 234 | 235 | 5.1. The rights granted under this License will terminate automatically 236 | if You fail to comply with any of its terms. However, if You become 237 | compliant, then the rights granted under this License from a particular 238 | Contributor are reinstated (a) provisionally, unless and until such 239 | Contributor explicitly and finally terminates Your grants, and (b) on an 240 | ongoing basis, if such Contributor fails to notify You of the 241 | non-compliance by some reasonable means prior to 60 days after You have 242 | come back into compliance. Moreover, Your grants from a particular 243 | Contributor are reinstated on an ongoing basis if such Contributor 244 | notifies You of the non-compliance by some reasonable means, this is the 245 | first time You have received notice of non-compliance with this License 246 | from such Contributor, and You become compliant prior to 30 days after 247 | Your receipt of the notice. 248 | 249 | 5.2. If You initiate litigation against any entity by asserting a patent 250 | infringement claim (excluding declaratory judgment actions, 251 | counter-claims, and cross-claims) alleging that a Contributor Version 252 | directly or indirectly infringes any patent, then the rights granted to 253 | You by any and all Contributors for the Covered Software under Section 254 | 2.1 of this License shall terminate. 255 | 256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all 257 | end user license agreements (excluding distributors and resellers) which 258 | have been validly granted by You or Your distributors under this License 259 | prior to termination shall survive termination. 260 | 261 | ************************************************************************ 262 | * * 263 | * 6. Disclaimer of Warranty * 264 | * ------------------------- * 265 | * * 266 | * Covered Software is provided under this License on an "as is" * 267 | * basis, without warranty of any kind, either expressed, implied, or * 268 | * statutory, including, without limitation, warranties that the * 269 | * Covered Software is free of defects, merchantable, fit for a * 270 | * particular purpose or non-infringing. The entire risk as to the * 271 | * quality and performance of the Covered Software is with You. * 272 | * Should any Covered Software prove defective in any respect, You * 273 | * (not any Contributor) assume the cost of any necessary servicing, * 274 | * repair, or correction. This disclaimer of warranty constitutes an * 275 | * essential part of this License. No use of any Covered Software is * 276 | * authorized under this License except under this disclaimer. * 277 | * * 278 | ************************************************************************ 279 | 280 | ************************************************************************ 281 | * * 282 | * 7. Limitation of Liability * 283 | * -------------------------- * 284 | * * 285 | * Under no circumstances and under no legal theory, whether tort * 286 | * (including negligence), contract, or otherwise, shall any * 287 | * Contributor, or anyone who distributes Covered Software as * 288 | * permitted above, be liable to You for any direct, indirect, * 289 | * special, incidental, or consequential damages of any character * 290 | * including, without limitation, damages for lost profits, loss of * 291 | * goodwill, work stoppage, computer failure or malfunction, or any * 292 | * and all other commercial damages or losses, even if such party * 293 | * shall have been informed of the possibility of such damages. This * 294 | * limitation of liability shall not apply to liability for death or * 295 | * personal injury resulting from such party's negligence to the * 296 | * extent applicable law prohibits such limitation. Some * 297 | * jurisdictions do not allow the exclusion or limitation of * 298 | * incidental or consequential damages, so this exclusion and * 299 | * limitation may not apply to You. * 300 | * * 301 | ************************************************************************ 302 | 303 | 8. Litigation 304 | ------------- 305 | 306 | Any litigation relating to this License may be brought only in the 307 | courts of a jurisdiction where the defendant maintains its principal 308 | place of business and such litigation shall be governed by laws of that 309 | jurisdiction, without reference to its conflict-of-law provisions. 310 | Nothing in this Section shall prevent a party's ability to bring 311 | cross-claims or counter-claims. 312 | 313 | 9. Miscellaneous 314 | ---------------- 315 | 316 | This License represents the complete agreement concerning the subject 317 | matter hereof. If any provision of this License is held to be 318 | unenforceable, such provision shall be reformed only to the extent 319 | necessary to make it enforceable. Any law or regulation which provides 320 | that the language of a contract shall be construed against the drafter 321 | shall not be used to construe this License against a Contributor. 322 | 323 | 10. Versions of the License 324 | --------------------------- 325 | 326 | 10.1. New Versions 327 | 328 | Mozilla Foundation is the license steward. Except as provided in Section 329 | 10.3, no one other than the license steward has the right to modify or 330 | publish new versions of this License. Each version will be given a 331 | distinguishing version number. 332 | 333 | 10.2. Effect of New Versions 334 | 335 | You may distribute the Covered Software under the terms of the version 336 | of the License under which You originally received the Covered Software, 337 | or under the terms of any subsequent version published by the license 338 | steward. 339 | 340 | 10.3. Modified Versions 341 | 342 | If you create software not governed by this License, and you want to 343 | create a new license for such software, you may create and use a 344 | modified version of this License if you rename the license and remove 345 | any references to the name of the license steward (except to note that 346 | such modified license differs from this License). 347 | 348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary 349 | Licenses 350 | 351 | If You choose to distribute Source Code Form that is Incompatible With 352 | Secondary Licenses under the terms of this version of the License, the 353 | notice described in Exhibit B of this License must be attached. 354 | 355 | Exhibit A - Source Code Form License Notice 356 | ------------------------------------------- 357 | 358 | This Source Code Form is subject to the terms of the Mozilla Public 359 | License, v. 2.0. If a copy of the MPL was not distributed with this 360 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 361 | 362 | If it is not possible or desirable to put the notice in a particular 363 | file, then You may include the notice in a location (such as a LICENSE 364 | file in a relevant directory) where a recipient would be likely to look 365 | for such a notice. 366 | 367 | You may add additional accurate notices of copyright ownership. 368 | 369 | Exhibit B - "Incompatible With Secondary Licenses" Notice 370 | --------------------------------------------------------- 371 | 372 | This Source Code Form is "Incompatible With Secondary Licenses", as 373 | defined by the Mozilla Public License, v. 2.0. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # InfoSec Policies, Standards and Procedures (PSP) Builder 2 | 3 | A CLI tool for building and publishing an organization's full policies, 4 | standards, and procedures (PSPs) in support of modern security operations and 5 | compliance. 6 | 7 | The output format is Markdown. For instructions on converting markdown to other 8 | formats like HTML and PDF, see notes below. 9 | 10 | First-time users of the tool can choose one of the execution methods below and 11 | run `psp build` to be interactively prompted for configuration values. This will 12 | generate a `config.json` file that may be used to speed-up future invocations. 13 | 14 | JupiterOne users with existing content may begin using the CLI by downloading 15 | the PSP zip file in the Policies app. 16 | 17 | ## Installing the policybuilder 18 | 19 | ### Using NPM 20 | 21 | Run the following command to install the policy builder locally using NPM. 22 | 23 | ```bash 24 | npm install -g @jupiterone/security-policy-builder 25 | ``` 26 | 27 | If you do not have Node and/or NPM installed locally, you may do so via: 28 | 29 | #### Installing NVM and Node 30 | 31 | 1. Install NVM with: 32 | `curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.11/install.sh | bash` 33 | 1. Install Node with `nvm install stable` 34 | 35 | ### Using Docker 36 | 37 | If you are comfortable using Docker, you can also use our 38 | [Dockerhub pspbuilder image](https://hub.docker.com/r/jupiterone/pspbuilder), by 39 | issuing the command: 40 | 41 | ```bash 42 | docker pull jupiterone/pspbuilder 43 | ``` 44 | 45 | This will cache the docker image locally on your machine. 46 | 47 | ## Building your first set of policies 48 | 49 | The first time you run the `psp build` command, you will be prompted for several 50 | inputs, such as company name, to be included in your policy text. Save this to a 51 | file, say `config.json`, when prompted. This will use the default 52 | [policy templates](https://github.com/JupiterOne/security-policy-templates) 53 | maintained by JupiterOne to render a basic, but fairly complete set of 54 | information security policies and procedures. 55 | 56 | `cd` into a directory where you would like your PSP files to reside (we 57 | recommend keeping the generated `templates` directory--see below--under version 58 | control!) and perform one of the following commands: 59 | 60 | ### Building from NPM script 61 | 62 | If you installed from NPM above, issue: 63 | 64 | ```bash 65 | psp build 66 | ``` 67 | 68 | ### Building from docker image 69 | 70 | If you're using the provided docker image, issue: 71 | 72 | ```bash 73 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder psp build -o /mnt/docs -p /mnt/partials -s /mnt/templates 74 | ``` 75 | 76 | ### Output of `psp build` 77 | 78 | Remember to save your config to a file, which you can reference the next time 79 | you'd like to rebuild the policies and procedures with the `-c` or `--config` 80 | option flag. This JSON document stores your `organization` template variables 81 | referenced from documents in the `templates` folder, and also stores the 82 | information architecture for your documents (how the policies and procedures 83 | fragments should be stitched together). We recommended all policies available in 84 | the default `config.json` to be adopted for your security program. The 85 | `config.json` file includes the procedures/controls you choose to adopt, which 86 | will be included in the final rendered policy docs by the tool. 87 | 88 | The output of a successful first run will be the creation of three directories: 89 | 90 | - `templates` - raw markdown templates that represent the source of truth for 91 | your policies and procedures. 92 | - `partials` - partially rendered markdown fragments used to assemble the 93 | `docs`. This dir is intermediate output sometimes useful for debugging 94 | purposes, and may largely be ignored. 95 | - `docs` - The final Markdown produced by the tool, assembled from `partials` 96 | fragments. 97 | 98 | You will invariably want to edit these PSPs to reflect the specifics of your 99 | organization's information security program. See "PSP Best Practices" below for 100 | additional details on versioning and deployment. 101 | 102 | **IMPORTANT:** To edit the policies and procedures, use the template files in 103 | `./templates` and re-run the `psp build` command. Do _not_ edit the `./docs` and 104 | `./partials` files directly as they will be overwritten on the next build. 105 | 106 | ### Building from existing/edited templates 107 | 108 | Once you've edited your template files, you're ready to build again with: 109 | 110 | ```bash 111 | psp build -t ./templates -c ./config.json 112 | 113 | or 114 | 115 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder psp build -t /mnt/templates -o /mnt/docs -p /mnt/partials 116 | ``` 117 | 118 | ## Publishing 119 | 120 | The policybuilder tools supports publishing to JupiterOne and Confluence, via 121 | the `publish` subcommand. 122 | 123 | ### Publishing policies and procedures to JupiterOne 124 | 125 | If you have an account on the 126 | [JupiterOne security platform](https://jupiterone.io), you can run the following 127 | command to publish the contents of your policies and procedures to your 128 | JupiterOne account, so that you and others in your organization can access them 129 | online. 130 | 131 | ```bash 132 | psp publish -c ./templates/config.json -t ./templates -a $J1_ACCOUNT_ID -k $J1_API_TOKEN 133 | ``` 134 | 135 | or 136 | 137 | ```bash 138 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder psp publish -c /mnt/config.json -t /mnt/templates -a $J1_ACCOUNT_ID -k $J1_API_TOKEN 139 | ``` 140 | 141 | Your JupiterOne token must have `Policies:Admin` privilege, or be issued by an 142 | account Administrator, in order to publish the contents. 143 | 144 | ### Publishing policies and procedures to Confluence 145 | 146 | You can also publish the policies to a Confluence wiki space. Simply run the 147 | `psp publish` command with the `--confluence` option. 148 | 149 | ```bash 150 | psp publish --confluence 151 | ``` 152 | 153 | You will be prompted to enter your Confluence domain and space key, and 154 | username/password: 155 | 156 | ```bash 157 | ? Confluence domain (the vanity subdomain before '.atlassian.net'): 158 | ? Confluence space key: 159 | ? Confluence username: 160 | ? Confluence password: [hidden] 161 | Published 35 docs to Confluence. 162 | ``` 163 | 164 | Or, provide necessary configuration options for non-interactive publishing: 165 | 166 | ```bash 167 | psp publish --confluence --site --space --docs -u -k 168 | ``` 169 | 170 | The program will save the page ID for each published policy document locally to 171 | a file in the current directory: `confluence-pages.json`. Make sure this file is 172 | **retained** because the program will use the page ID for each policy to update 173 | the Confluence page the next time it is run. 174 | 175 | _We recommend creating a dedicated wiki space for these security policies._ 176 | 177 | ## PSP Best Practices 178 | 179 | The PSPs supported by the tool are meant to be automatically generated from 180 | source. We recommend the following practices: 181 | 182 | ### Versioning 183 | 184 | We highly recommend you practice policy-as-code and version your `templates` dir 185 | and `config.json` file. If you use `git` for version control, we recommend 186 | putting the following in your project's `.gitignore`: 187 | 188 | ``` 189 | docs 190 | partials 191 | ``` 192 | 193 | Doing this makes it obvious what is to be edited in order to update your PSPs, 194 | and prevents confusion. 195 | 196 | If your versioning system supports it, we recommend limiting merge authority to 197 | authorized security staff only. 198 | 199 | ### CI/CD 200 | 201 | Building and publishing the PSPs upon authorized merge to main branch is 202 | supported via the `-n` or `--noninteractive` flags. Do something like: 203 | 204 | ```bash 205 | #!/bin/bash 206 | set -euo pipefail 207 | cd to/cloned/repo 208 | 209 | # build documentation 210 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder psp build -c /mnt/config.json -o /mnt/docs -t /mnt/templates --noninteractive 211 | 212 | # publish templates to JupiterOne graph 213 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder psp publish -c /mnt/config.json -t /mnt/templates -a $J1_ACCOUNT_ID -k $J1_API_TOKEN --noninteractive 214 | 215 | # generate static HTML in 'site' directory 216 | # mkdocs command expects the YAML file to be at the root of the project 217 | cp docs/mkdocs.yml . 218 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder mkdocs build -f /mnt/mkdocs.yml 219 | 220 | # copy to static site host (here, AWS S3 bucket) 221 | cd site 222 | aws s3 cp --recursive . s3://mybucket/location 223 | ``` 224 | 225 | ### Generating a static HTML site from Markdown 226 | 227 | We recommend the `mkdocs` tool for this. See above example in "CI/CD" which does 228 | `mkdocs build`. 229 | 230 | ## Advanced Usage 231 | 232 | ### Advanced JSON configuration 233 | 234 | You may edit your `config.json` file directly to provide input to the 235 | configurable questions. The `config.json` file contains the following sections: 236 | 237 | `organization` 238 | 239 | - You can edit this section directly to provide answers to the questions that 240 | are promptly by the `psp-builder` CLI. 241 | - You can also add your own custom variables in this section -- e.g. 242 | `"variableName": "value"`. You will need to make sure to add the same 243 | variable(s) to your templates in the format of `{{variableName}}`. 244 | - Note that if the variable is a URL/URI, you will need to add the `&` symbol to 245 | your variables in the templates -- e.g. `{{&variableURL}}`. 246 | 247 | `standards` 248 | 249 | - Contains references to various compliance standards and frameworks. 250 | - **DO NOT** edit this section. 251 | 252 | `policies` 253 | 254 | - Contains all available policies and the corresponding procedures that 255 | implements and enforces each policy. 256 | - It is recommended that all policies to be included in your security program 257 | and documentation, therefore, you should **NOT** edit this section. 258 | 259 | `procedures` 260 | 261 | - Contains the individual procedure documentations. 262 | - Each procedure includes the following 263 | 264 | - A `summary` to provide high level guidance about that particular procedure 265 | and its implementation. This is for your reference. 266 | - A set of `resources` that will help with implementation and/or selecting a 267 | third party vendor solution. 268 | - A `provider` property, pre-populated with the recommended solution, if 269 | applicable. You may update this to the solution you have actually selected. 270 | The policy builder will update the documentation text within that procedure 271 | accordingly. 272 | - If you choose to exclude a procedure from your final policy documentation, 273 | you may set the `adopted` flag to `false`. The policy builder with skip 274 | those when compiling the policies. 275 | 276 | ### Build Local Docker Image 277 | 278 | If you'd prefer not to use the image provided by DockerHub, you may build your 279 | own docker image by cloning this repository, and running: 280 | 281 | ``` 282 | docker build -t pspbuilder . 283 | ``` 284 | 285 | ### Preview Mkdocs Output Locally 286 | 287 | The static HTML files generated by mkdocs (see "CI/CD" above for example) may be 288 | viewed locally by doing: 289 | 290 | 1. `cd site` 291 | 1. `python3 -m http.server 8000` 292 | 1. `open http://localhost:8000` 293 | 294 | ### Install Mkdocs Locally 295 | 296 | Note: local mkdocs usage is not supported. 297 | 298 | ```bash 299 | pip install --upgrade pip 300 | pip install mkdocs mkdocs-material 301 | ``` 302 | 303 | See http://www.mkdocs.org for more info. Additionally, mkdocs is configured to 304 | use the `mkdocs-material` theme. Instructions 305 | [can be found here](https://squidfunk.github.io/mkdocs-material/getting-started/). 306 | 307 | ### Generating PDF and Word Documents 308 | 309 | `Pandoc` can be used to automatically convert the markdown files into PDF or 310 | Word documents. 311 | 312 | #### Pandoc Conversion Using Docker Image 313 | 314 | The supported `jupiterone/pspbuilder` docker image has the necessary pandoc 315 | dependencies installed. You may issue commands like: 316 | 317 | ```bash 318 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder-extras pandoc /mnt/docs/filename.md -f markdown -t latex --pdf-engine=xelatex --variable monofont="Monaco" -o /mnt/pdf/filename.pdf 319 | ``` 320 | 321 | to convert a single markdown file into a PDF. 322 | 323 | #### Local Pandoc Installation Steps for MacOS 324 | 325 | NOTE: Local pandoc usage is not supported. 326 | 327 | To install and configure `pandoc` locally on your system, follow the 328 | installation instructions here: 329 | [pandoc.org/installing.html](https://pandoc.org/installing.html) 330 | 331 | or issue the following commands: 332 | 333 | Install **Pandoc**: 334 | 335 | ```bash 336 | brew install pandoc 337 | ``` 338 | 339 | Install **pandoc-latex-admonition**, which is a pandoc filter for adding 340 | admonition: 341 | 342 | ```bash 343 | pip install pandoc-latex-admonition 344 | ``` 345 | 346 | Download and install **LaTex**, or 347 | [MacTeX](http://www.tug.org/mactex/morepackages.html). The smaller distribution, 348 | BasicTeX is sufficient, but additional packages are required: 349 | 350 | ```bash 351 | sudo tlmgr install collection-fontsrecommended 352 | sudo tlmgr install mdframed 353 | sudo tlmgr install needspace 354 | sudo tlmgr install ucharcat 355 | sudo tlmgr install tcolorbox 356 | sudo tlmgr install environ 357 | sudo tlmgr install trimspaces 358 | ``` 359 | 360 | NOTE: on macOS systems, you will likely also need to install XeLaTeX from here: 361 | [http://www.texts.io/support/0001/](http://www.texts.io/support/0001/) 362 | 363 | Start a new terminal session to ensure `pandoc` runs. Note that some UTF-8 364 | characters 365 | [may not be supported out-of-the-box](https://stackoverflow.com/questions/18178084/pandoc-and-foreign-characters). 366 | The `--pdf-engine=xelatex --variable monofont="Monaco"` options help, but other 367 | fonts may be required if your content needs them. 368 | 369 | **Example script for generating individual PDF policy documents:** 370 | 371 | ```bash 372 | #!/bin/bash 373 | cd ./docs 374 | mkdir pdf 375 | for filename in *.md; do 376 | echo $filename 377 | pandoc $filename -f markdown -t latex --pdf-engine=xelatex --variable monofont="Monaco" -o ./pdf/$filename.pdf 378 | done 379 | ``` 380 | 381 | **Example script for generating a combined PDF policy document, using Docker:** 382 | 383 | Create a small bash script, called `pdf.sh`: 384 | 385 | ```bash 386 | #!/bin/bash 387 | cd /mnt 388 | mkdir pdf 389 | cd /mnt/docs 390 | pandoc *.md -f markdown -t latex --latex-engine=xelatex --variable monofont="inconsolata" --toc -o /mnt/pdf/infosec-policies.pdf 391 | ``` 392 | 393 | Then, issue: 394 | 395 | ```bash 396 | docker run -it -v "$PWD":/mnt --rm jupiterone/pspbuilder-extras /mnt/pdf.sh 397 | ``` 398 | 399 | This should stitch together all of your markdown files (in alphabetical order 400 | returned by the bash glob, `*`). You could replace this with individual ordering 401 | of file arguments if you wanted more control of the sequencing. 402 | 403 | **Example script for generating Word documents:** 404 | 405 | ```bash 406 | mkdir docx 407 | pandoc model.md *.md -f markdown -t docx --toc -o ./docx/infosec-policies.docx 408 | ``` 409 | 410 | ### Generating Self Assessment Reports 411 | 412 | The current version of the policy builder supports generating a lightweight 413 | HIPAA self assessment report, based on a few key questions and the adoption of 414 | policies and procedures. 415 | 416 | ```bash 417 | ./bin/psp assess --standard hipaa --config [options] 418 | ``` 419 | 420 | The above command generates a HIPAA self assessment report in the 421 | `./assessments` directory. The report contains mapping of your adopted 422 | policies/procedures to each specific HIPAA regulation requirement. It also 423 | contains placeholders, where applicable, for you to provide additional details. 424 | Gaps identified will be called out in the command line output as well as in the 425 | report itself. 426 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = require('@jupiterone/typescript-tools/config/babel'); 2 | -------------------------------------------------------------------------------- /bin/devInit.js: -------------------------------------------------------------------------------- 1 | require('ts-node/register/transpile-only'); 2 | require('require-self-ref'); 3 | -------------------------------------------------------------------------------- /bin/docker-mkdocs: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export LC_ALL=C.UTF-8 3 | python3 -m mkdocs $* 4 | -------------------------------------------------------------------------------- /bin/mkdocs: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | dir="$(dirname "$0")" 4 | 5 | virtualenv="${dir}/../env/bin/activate" 6 | if [ ! -f "$virtualenv" ]; then 7 | echo "Cannot find virtualenv at $virtualenv. Have you run 'yarn mkdocs'?" 8 | exit 1 9 | fi 10 | 11 | # shellcheck source=/dev/null 12 | source "$virtualenv" 13 | 14 | mkdocs "$@" 15 | -------------------------------------------------------------------------------- /bin/psp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | require('./devInit'); 3 | require('../src/commands/psp') 4 | -------------------------------------------------------------------------------- /bin/psp-assess: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | require('./devInit'); 3 | const {run} = require('../src/commands/psp-assess') 4 | 5 | run().catch((err) => { 6 | console.error('Unexpected error performing assessment. Error: ' + (err.stack || err.toString())); 7 | }); -------------------------------------------------------------------------------- /bin/psp-build: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | require('./devInit'); 3 | const {run} = require('../src/commands/psp-build') 4 | 5 | run().catch((err) => { 6 | console.error('Unexpected error building policies. ' + (err.stack || err.toString())); 7 | process.exitCode = 1; 8 | }); 9 | -------------------------------------------------------------------------------- /bin/psp-publish: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | require('./devInit'); 3 | const {run} = require('../src/commands/psp-publish') 4 | 5 | run().catch((err) => { 6 | console.error('Unexpected error publishing. ' + (err.stack || err.toString())); 7 | process.exitCode = 1; 8 | }); 9 | -------------------------------------------------------------------------------- /ci_bin/docker_push: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin 3 | docker push jupiterone/pspbuilder 4 | -------------------------------------------------------------------------------- /cortex.yaml: -------------------------------------------------------------------------------- 1 | openapi: 3.0.1 2 | info: 3 | title: '@jupiterone/security-policy-builder' 4 | description: >- 5 | Configurable compilation of policies, standards, and procedures 6 | documentation. 7 | x-cortex-git: 8 | github: 9 | repository: JupiterOne/security-policy-builder 10 | x-cortex-owners: 11 | - type: group 12 | name: JupiterOne/Apps 13 | x-cortex-tag: '@jupiterone/security-policy-builder' 14 | x-cortex-service-groups: 15 | - tier-4 16 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | process.env.RUNNING_TESTS = 'true'; 2 | 3 | module.exports = { 4 | ...require('@jupiterone/typescript-tools/config/jest'), 5 | setupFilesAfterEnv: ['./jest.setup.ts'], 6 | collectCoverage: true, 7 | collectCoverageFrom: ['src/**/*.ts'], 8 | coverageThreshold: { 9 | global: { 10 | statements: 22, 11 | branches: 16, 12 | lines: 22, 13 | functions: 29, 14 | }, 15 | }, 16 | }; 17 | -------------------------------------------------------------------------------- /jest.setup.ts: -------------------------------------------------------------------------------- 1 | process.env.RUNNING_TESTS = 'true'; 2 | -------------------------------------------------------------------------------- /lint-staged.config.js: -------------------------------------------------------------------------------- 1 | module.exports = require('@jupiterone/typescript-tools/config/lint-staged'); 2 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@jupiterone/security-policy-builder", 3 | "version": "4.2.0", 4 | "description": "Configurable compilation of policies, standards, and procedures documentation.", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/JupiterOne/security-policy-builder" 8 | }, 9 | "license": "UNLICENSED", 10 | "author": "JupiterOne ", 11 | "keywords": [ 12 | "security", 13 | "documentation", 14 | "compiler" 15 | ], 16 | "main": "dist/index.js", 17 | "bin": { 18 | "psp": "bin/psp" 19 | }, 20 | "scripts": { 21 | "compile": "yarn ttsc --declaration -p tsconfig.dist.json && cp -R ./bin ./dist/", 22 | "build": "yarn compile && cp -R LICENSE *.md static yarn.lock package.json ./dist/ && (cd ./dist && yarn install --production)", 23 | "bundle": "yarn build && echo '' > ./dist/bin/devInit.js", 24 | "clean": "rm -rf -- *.log *.zip", 25 | "format-code": "yarn rewrite-imports --dir . && prettier --write '**/*.{js,ts,md,json}'", 26 | "lint": "eslint --ext .ts,.js .", 27 | "test": "yarn jest .", 28 | "local-publish": "export $(grep -v '^#' .env | xargs); ./bin/psp publish -n -c ./config.json -t ./templates -a $J1_ACCOUNT_ID -k $J1_API_TOKEN", 29 | "publish:patch": "yarn version --patch", 30 | "publish:minor": "yarn version --minor", 31 | "publish:major": "yarn version --major" 32 | }, 33 | "dependencies": { 34 | "@jupiterone/security-policy-templates": "^2.9.0", 35 | "@lifeomic/attempt": "^3.0.0", 36 | "chalk": "^4.1.0", 37 | "commander": "^7.1.0", 38 | "file-set": "^4.0.1", 39 | "fs-extra": "^9.0.1", 40 | "graphql": "^15.3.0", 41 | "graphql-tag": "^2.11.0", 42 | "inquirer": "^8.2.0", 43 | "joi": "^17.2.1", 44 | "moment": "^2.27.0", 45 | "mustache": "^4.0.1", 46 | "node-fetch": "^2.6.1", 47 | "p-all": "^3.0.0", 48 | "p-map": "^4.0.0", 49 | "pluralize": "^8.0.0", 50 | "progress": "^2.0.3", 51 | "showdown": "^1.9.1", 52 | "strip-ansi": "^6.0.0" 53 | }, 54 | "devDependencies": { 55 | "@jupiterone/typescript-tools": "^15.0.0", 56 | "@types/fs-extra": "^9.0.1", 57 | "@types/inquirer": "^7.3.1", 58 | "@types/mustache": "^4.0.1", 59 | "@types/nock": "^11.1.0", 60 | "@types/node-fetch": "^2.5.7", 61 | "@types/pluralize": "^0.0.29", 62 | "@types/progress": "^2.0.3", 63 | "@types/showdown": "^1.9.3", 64 | "nock": "^13.0.4", 65 | "require-self-ref": "^2.0.1", 66 | "ts-node": "^9.0.0", 67 | "ttypescript": "^1.5.15", 68 | "type-fest": "^1.1.3" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = require('@jupiterone/typescript-tools/config/prettier'); 2 | -------------------------------------------------------------------------------- /src/assessment/hipaa/questions.ts: -------------------------------------------------------------------------------- 1 | import { PolicyAssessmentQuestion } from '~/src/types'; 2 | 3 | export const hipaaAssessmentQuestions: PolicyAssessmentQuestion[] = [ 4 | { 5 | type: 'confirm', 6 | name: 'hasHIPAATrainingGap', 7 | message: 8 | 'Did everyone in your organization receive Annual HIPAA Awareness Training?', 9 | default: false, 10 | }, 11 | { 12 | type: 'confirm', 13 | name: 'hasInfoSecTrainingGap', 14 | message: 15 | 'Did everyone in your organization receive at least annual training on the information security policies?', 16 | default: false, 17 | }, 18 | { 19 | type: 'confirm', 20 | name: 'hasRiskAssessmentGap', 21 | message: 22 | 'Have you completed an annual Risk Assessment and documented the risks and action plans in a Risk Registry?', 23 | default: false, 24 | }, 25 | { 26 | type: 'confirm', 27 | name: 'hasPenTestGap', 28 | message: 29 | 'Have you completed at least one Penetration Test in the last year and documented the results and remediations?', 30 | default: false, 31 | }, 32 | { 33 | type: 'input', 34 | name: 'lastPenTestDate', 35 | message: 'What was the date of your last Penetration Test?', 36 | when: function (answers) { 37 | return answers.hasPenTestGap === true; 38 | }, 39 | }, 40 | { 41 | type: 'input', 42 | name: 'lastPenTestProvider', 43 | message: 'Who performed your last Penetration Test?', 44 | when: function (answers) { 45 | return answers.hasPenTestGap === true; 46 | }, 47 | }, 48 | { 49 | type: 'input', 50 | name: 'penTestFrequency', 51 | message: 'How often are Penetration Tests performed?', 52 | when: function (answers) { 53 | return answers.hasPenTestGap === true; 54 | }, 55 | default: 'quarterly', 56 | }, 57 | { 58 | type: 'input', 59 | name: 'nextPenTestDate', 60 | message: 'When is the date of your next scheduled Penetration Test?', 61 | when: function (answers) { 62 | return answers.hasPenTestGap === true; 63 | }, 64 | }, 65 | { 66 | type: 'confirm', 67 | name: 'hadDataBreach', 68 | message: 'Have you had a Data Breach in the last 12 months?', 69 | default: false, 70 | }, 71 | ]; 72 | -------------------------------------------------------------------------------- /src/assessment/index.ts: -------------------------------------------------------------------------------- 1 | import * as configure from '~/src/configure'; 2 | import moment from 'moment'; 3 | import * as render from '~/src/render'; 4 | import chalk from 'chalk'; 5 | import path from 'path'; 6 | import fs from 'fs-extra'; 7 | import { DEFAULT_TEMPLATES } from '~/src/constants'; 8 | import { 9 | AnnotatedRefs, 10 | AssessmentInput, 11 | ControlsMappings, 12 | Gap, 13 | Organization, 14 | PolicyBuilderConfig, 15 | PolicyBuilderElement, 16 | PolicyBuilderPaths, 17 | StandardConfig, 18 | StandardName, 19 | StandardRequirement, 20 | } from '~/src/types'; 21 | import { hipaaAssessmentQuestions } from './hipaa/questions'; 22 | import { Entity } from '~/src/j1/types'; 23 | 24 | const STANDARDS: Record = { 25 | hipaa: hipaaAssessmentQuestions, 26 | }; 27 | 28 | function bulletList(items: string[]) { 29 | let list = ''; 30 | items.forEach((item) => { 31 | list += ` * ${item}\n`; 32 | }); 33 | return list; 34 | } 35 | 36 | function print(msg: string) { 37 | if (process.env.NODE_ENV !== 'test') { 38 | console.log(msg); 39 | } 40 | } 41 | 42 | function printError(msg: string) { 43 | if (process.env.NODE_ENV !== 'test') { 44 | console.error(msg); 45 | } 46 | } 47 | 48 | function validateOrgValues(values: Organization) { 49 | const missing = configure.missingOrganizationValues(values); 50 | let valid = true; 51 | 52 | if (missing.length !== 0) { 53 | print( 54 | chalk.red( 55 | `Missing the following configuration value(s):\n${bulletList(missing)}` 56 | ) 57 | ); 58 | valid = false; 59 | } 60 | 61 | const emptyValues = configure 62 | .missingOrEmptyOrganizationValues(values) 63 | .map((x) => x.name); 64 | 65 | if (emptyValues.length !== 0) { 66 | checkPrivSecOfficers(values, emptyValues); 67 | print( 68 | chalk.yellow( 69 | `The following configuration value(s) are blank:\n${bulletList( 70 | emptyValues 71 | )}` 72 | ) 73 | ); 74 | valid = false; 75 | } 76 | 77 | return valid; 78 | } 79 | 80 | function checkPrivSecOfficers(values: Organization, emptyValues: string[]) { 81 | if ( 82 | emptyValues.filter((emptyValue) => 83 | /privacy|securityOfficerName/.test(emptyValue) 84 | ).length > 0 85 | ) { 86 | let note = 87 | 'A Security Officer and a Privacy Officer must both be assigned.\n'; 88 | note += 'NOTE: It can be the same person fulfilling both roles.\n'; 89 | note += `Security Officer: '${values.securityOfficerName}'\n`; 90 | note += `Privacy Officer: '${values.privacyOfficerName}'\n\n`; 91 | print(chalk.yellow(note)); 92 | } 93 | } 94 | 95 | function questions(standardName: string) { 96 | const normalizedStandardName = standardName 97 | .replace(/[^a-z]/gi, '') 98 | .toLowerCase(); 99 | const assessmentQuestions = STANDARDS[normalizedStandardName]; 100 | if (!assessmentQuestions) { 101 | throw new Error(`Invalid standard: ${standardName}`); 102 | } 103 | return assessmentQuestions; 104 | } 105 | 106 | async function generateReport( 107 | orgVars: AssessmentInput, 108 | standard: StandardName, 109 | paths: PolicyBuilderPaths 110 | ) { 111 | const standardBaseName = standard.toLowerCase().replace(/ /g, '-'); 112 | const standardTemplateName = standardBaseName + '.md.tmpl'; 113 | const templateFile = path.join( 114 | paths.templates!, 115 | 'assessments', 116 | standardTemplateName 117 | ); 118 | const reportFileName = 119 | standardBaseName + 120 | '-' + 121 | moment(orgVars.date).format('YYYYMMDD-HHmmss') + 122 | '.md'; 123 | const outputPath = path.join(paths.output, reportFileName); 124 | 125 | let renderedFile; 126 | try { 127 | await fs.mkdirs(paths.output); 128 | renderedFile = await render.renderTemplateFile( 129 | templateFile, 130 | orgVars, 131 | outputPath 132 | ); 133 | print(chalk.grey(`generated report: ${renderedFile}`)); 134 | } catch (err) { 135 | printError(chalk.yellow(err.message)); 136 | } 137 | } 138 | 139 | function generatePolicyTOC(config: PolicyBuilderConfig) { 140 | const toc = render.generateTOC(config); 141 | 142 | let genStr = ''; 143 | toc.forEach((policy) => { 144 | if (/Appendix/.test(policy.name)) { 145 | return; 146 | } 147 | genStr += `${policy.name}\n`; 148 | }); 149 | return genStr; 150 | } 151 | 152 | function generateStandardControlsLookup( 153 | standardName: string, 154 | controlsMapping: ControlsMappings 155 | ) { 156 | const lookup: Record = {}; 157 | controlsMapping.procedures.forEach((procedure) => { 158 | const implementsStandard = procedure.implements 159 | .filter((i) => i.standard.toUpperCase() === standardName.toUpperCase()) 160 | .pop(); 161 | if (!implementsStandard) { 162 | return; 163 | } 164 | if (implementsStandard.requirements) { 165 | implementsStandard.requirements.forEach((requirementId) => { 166 | lookup[requirementId] = lookup[requirementId] || []; 167 | lookup[requirementId].push(procedure.id); 168 | }); 169 | } 170 | if (implementsStandard.controls) { 171 | implementsStandard.controls.forEach((controlId) => { 172 | lookup[controlId] = lookup[controlId] || []; 173 | lookup[controlId].push(procedure.id); 174 | }); 175 | } 176 | }); 177 | return lookup; 178 | } 179 | 180 | async function calculateCPGaps( 181 | standardName: string, 182 | config: PolicyBuilderConfig, 183 | paths: PolicyBuilderPaths 184 | ) { 185 | const standardsDataDir = 186 | paths && paths.templates 187 | ? path.join(paths.templates, 'standards') 188 | : path.join(__dirname, '../../', DEFAULT_TEMPLATES, 'standards'); 189 | const standard = require(path.join( 190 | standardsDataDir, 191 | standardName.toLowerCase() 192 | )) as StandardConfig; 193 | const controlsMapping = require(path.join( 194 | standardsDataDir, 195 | 'controls-mapping' 196 | )) as ControlsMappings; 197 | const controlsLookup = generateStandardControlsLookup( 198 | standardName, 199 | controlsMapping 200 | ); 201 | const cpGaps: StandardRequirement[] = []; 202 | const annotatedRefs: AnnotatedRefs = {}; 203 | 204 | standard.sections.forEach((section) => { 205 | annotatedRefs[section.title] = []; 206 | 207 | section.requirements.forEach((requirement) => { 208 | // check if requirement has conditional applicability 209 | if (requirement.appliesIf) { 210 | const booleanFlag = requirement.appliesIf; 211 | if ((config.organization as any)[booleanFlag] === true) { 212 | requirement.hasGap = true; // annotate requirement object with gap 213 | cpGaps.push(requirement); 214 | } 215 | annotatedRefs[section.title].push(requirement); 216 | return; 217 | } 218 | 219 | const cps = controlsLookup[requirement.ref]; 220 | 221 | if (!cps || cps.length === 0) { 222 | const errmsg = `No ${standardName.toUpperCase()} controls or procedures mapping found for requirement ${ 223 | requirement.ref 224 | } (${requirement.title})`; 225 | printError(chalk.red(errmsg)); 226 | return; 227 | } 228 | 229 | // compare adopted configured-cps to standards-mapped cps 230 | const { unAdoptedCPs, adoptedCPs } = controlAdoptionFilter(cps, config); 231 | 232 | if (adoptedCPs.length === 0) { 233 | requirement.noadoption = true; 234 | requirement.hasGap = true; 235 | } 236 | 237 | if (unAdoptedCPs.length > 0) { 238 | requirement.hasGap = true; 239 | } 240 | 241 | if (requirement.hasGap) { 242 | cpGaps.push(requirement); 243 | } 244 | 245 | Object.assign(requirement, { unAdoptedCPs, adoptedCPs }); 246 | annotatedRefs[section.title].push(requirement); 247 | }); 248 | }); 249 | 250 | return { cpGaps, annotatedRefs }; 251 | } 252 | 253 | function generateStandardControlsMapping( 254 | annotatedRefs: AnnotatedRefs, 255 | config: PolicyBuilderConfig 256 | ) { 257 | let mapping = ''; 258 | const company = config.organization.companyShortName; 259 | Object.keys(annotatedRefs).forEach((section) => { 260 | mapping += `### ${section}\n\n`; 261 | annotatedRefs[section].forEach((reference) => { 262 | mapping += `${reference.hasGap ? '✘' : '✔'} **${reference.ref}** *${ 263 | reference.title 264 | }:* ${reference.summary}\n\n`; 265 | 266 | // handle special-case standard references with conditional applicability 267 | if (reference.appliesIf) { 268 | mapping += reference.hasGap 269 | ? `> *Potential Gap:* ${company} must meet requirement` 270 | : `> *Not Applicable:* ${company} is not subject to requirement `; 271 | mapping += `${reference.ref} (${reference.title})\n\n`; 272 | return; 273 | } 274 | 275 | if (reference.noadoption) { 276 | mapping += `> *Gap:* No applicable controls or procedures have been adopted\n\n`; 277 | return; 278 | } 279 | 280 | mapping += `> ${company} has adopted the following controls or procedures:\n>\n`; 281 | reference.adoptedCPs?.forEach((cp) => { 282 | mapping += `> - ${cp.id} (${cp.name})\n`; 283 | }); 284 | 285 | if (reference.unAdoptedCPs?.length) { 286 | mapping += `\n> ${company} has *NOT* adopted the following controls or procedures:\n>\n`; 287 | reference.unAdoptedCPs.forEach((cp) => { 288 | mapping += `> - *Potential Gap:* ${cp.id} (${cp.name})\n`; 289 | }); 290 | } 291 | 292 | mapping += '\n'; 293 | }); 294 | mapping += '----\n\n'; 295 | }); 296 | return mapping; 297 | } 298 | 299 | function controlAdoptionFilter(cpIds: string[], config: PolicyBuilderConfig) { 300 | const adoptedCPs: PolicyBuilderElement[] = []; 301 | const unAdoptedCPs: PolicyBuilderElement[] = []; 302 | 303 | cpIds.forEach((cpId) => { 304 | const configuredCP = config.procedures?.filter((cp) => cp.id === cpId)[0]; 305 | if (!configuredCP) { 306 | console.warn( 307 | `Control specified in controls-mapping not found in config.json: ${cpId}` 308 | ); 309 | } else { 310 | if (!configuredCP.adopted) { 311 | unAdoptedCPs.push(configuredCP); 312 | } else { 313 | adoptedCPs.push(configuredCP); 314 | } 315 | } 316 | }); 317 | return { unAdoptedCPs, adoptedCPs }; 318 | } 319 | 320 | // takes array of standard reference objects 321 | // returns markdown string 322 | function generateGapList(gaps: Gap[]) { 323 | const uniqueGaps: Record = {}; 324 | gaps.forEach((gap) => { 325 | uniqueGaps[gap.ref] = gap.title; 326 | }); 327 | 328 | let gapList = ''; 329 | Object.keys(uniqueGaps).forEach((gapRef) => { 330 | gapList += `* ${gapRef} ${uniqueGaps[gapRef]}\n`; 331 | }); 332 | return gapList; 333 | } 334 | 335 | function generateGapSummary( 336 | gaps: Gap[], 337 | config: PolicyBuilderConfig, 338 | standardName: StandardName 339 | ) { 340 | return gaps.length === 0 341 | ? `This assessment finds ${ 342 | config.organization.companyShortName 343 | } has met or exceeded all requirements as specified by ${standardName.toUpperCase()}. No gaps have been identified.` 344 | : `This assessment finds ${ 345 | config.organization.companyShortName 346 | } has compliance gaps, or has not yet adopted/implemented controls for all ${standardName.toUpperCase()} requirements.`; 347 | } 348 | 349 | // takes array of Risk Entities from JupiterOne 350 | // returns markdown string 351 | function generateRiskList(riskEntities: Entity[]) { 352 | const riskTableHeaders = 353 | '| Status | Priority | Impact | Probability | Risk Level |\n' + 354 | '| -- | -- | -- | -- | -- |\n'; 355 | 356 | let md = 357 | 'The follow risks were identified, reviewed, or updated during the Risk Assessment:\n\n'; 358 | riskEntities.forEach((riskEntity) => { 359 | const description = (riskEntity.description as string | undefined)?.replace( 360 | /(\r\n|\n|\r)/gm, 361 | '\n> ' 362 | ); 363 | md += `#### ${riskEntity.displayName}\n\n${riskEntity.webLink || ''}\n\n`; 364 | md += 365 | riskTableHeaders + 366 | `| ${riskEntity.status} ` + 367 | `| ${riskEntity.priority} ` + 368 | `| ${riskEntity.impact || riskEntity.riskImpact} ` + 369 | `| ${riskEntity.probability || riskEntity.riskProbability} ` + 370 | `| ${riskEntity.level || riskEntity.riskLevel} |\n\n`; 371 | md += `> **${riskEntity.summary || ''}**\n>\n`; 372 | md += `> ${description || 'No additional details.'}\n\n`; 373 | }); 374 | return md; 375 | } 376 | 377 | export { 378 | calculateCPGaps, 379 | generateGapList, 380 | generateGapSummary, 381 | generateReport, 382 | generateStandardControlsMapping, 383 | generatePolicyTOC, 384 | generateRiskList, 385 | questions, 386 | validateOrgValues, 387 | }; 388 | -------------------------------------------------------------------------------- /src/assets.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fs from 'fs-extra'; 3 | import fetch from 'node-fetch'; 4 | import { PolicyBuilderConfig, PolicyBuilderPaths } from '~/src/types'; 5 | 6 | const FileSet = require('file-set'); 7 | 8 | // download custom logo image if specified in config, and place it where mkdocs expects 9 | // such files to live. 10 | async function downloadCustomLogo( 11 | config: PolicyBuilderConfig, 12 | paths: PolicyBuilderPaths 13 | ) { 14 | const org = config.organization || {}; 15 | if (!org.wantCustomMkdocsTemplate) { 16 | org.mkdocsLogoFile = 'assets/images/logo.svg'; 17 | return org; // early exit unless custom logo override is provided 18 | } 19 | 20 | const url = org.mkdocsLogoURL!; 21 | const logoFile = path.basename(url); 22 | const relLogoFile = `assets/images/${logoFile}`; 23 | 24 | const destDir = path.join(paths.output, 'assets/images'); 25 | const dest = path.join(destDir, logoFile); 26 | 27 | try { 28 | await fs.mkdirs(destDir); 29 | } catch (err) { 30 | throw new Error(`Unable to create output dir ${destDir}: ${err.message}`); 31 | } 32 | 33 | let inputStream: NodeJS.ReadableStream; 34 | try { 35 | const response = await fetch(url, { 36 | method: 'GET', 37 | }); 38 | if (response.status !== 200) { 39 | throw new Error(`Non-200 status code (status=${response.status})`); 40 | } 41 | inputStream = response.body; 42 | } catch (err) { 43 | throw new Error(`Unable to download ${url}: ${err.message}`); 44 | } 45 | 46 | const outputStream = fs.createWriteStream(dest); 47 | 48 | try { 49 | await new Promise((resolve, reject) => { 50 | inputStream.on('error', reject); 51 | outputStream.on('error', reject); 52 | outputStream.on('finish', resolve); 53 | inputStream.pipe(outputStream); 54 | }); 55 | } catch (err) { 56 | throw new Error(`Unable to save logo to ${dest}. Error: ${err.toString()}`); 57 | } 58 | 59 | org.mkdocsLogoFile = relLogoFile; 60 | return org; 61 | } 62 | 63 | async function copyStaticAssets(paths: PolicyBuilderPaths) { 64 | try { 65 | await fs.mkdirs(paths.output); 66 | } catch (err) { 67 | throw new Error( 68 | `Unable to create output directory, ${paths.output}: ${err.message}` 69 | ); 70 | } 71 | 72 | await fs.copy( 73 | path.join(__dirname, '../static/assets'), 74 | path.join(paths.output, 'assets') 75 | ); 76 | 77 | const pdf = new FileSet(path.join(paths.templates!, 'ref', '*.pdf')); 78 | const copyPromises: Promise[] = []; 79 | pdf.files.forEach((pdf: string) => { 80 | copyPromises.push( 81 | fs.copy(pdf, path.join(paths.output, 'ref', path.basename(pdf))) 82 | ); 83 | }); 84 | await Promise.all(copyPromises); 85 | } 86 | 87 | async function writeFileAsync(filePath: string, content: string) { 88 | await fs.mkdirs(path.dirname(filePath)); 89 | await fs.writeFile(filePath, content); 90 | } 91 | 92 | function fileExistsSync(filepath: string) { 93 | try { 94 | fs.accessSync(filepath); 95 | return true; 96 | } catch (e) { 97 | return false; 98 | } 99 | } 100 | 101 | export { copyStaticAssets, downloadCustomLogo, fileExistsSync, writeFileAsync }; 102 | -------------------------------------------------------------------------------- /src/commands/psp-assess.test.ts: -------------------------------------------------------------------------------- 1 | import { calculateInputGaps } from '~/src/commands/psp-assess'; 2 | 3 | test('#calculateInputGaps should return gaps as ref and title', () => { 4 | const gaps = calculateInputGaps({ 5 | hasHIPAATrainingGap: true, 6 | hasPenTestGap: true, 7 | date: new Date(), 8 | isHIPAACoveredEntityText: 'is', 9 | isHIPAABusinessAssociateText: 'is not', 10 | }); 11 | 12 | gaps.sort((v1, v2) => { 13 | return v1.ref.localeCompare(v2.ref); 14 | }); 15 | 16 | expect(gaps).toEqual([ 17 | { 18 | ref: 'HIPAA Training', 19 | title: '(see above)', 20 | }, 21 | { 22 | ref: 'Pen Test', 23 | title: '(see above)', 24 | }, 25 | ]); 26 | }); 27 | -------------------------------------------------------------------------------- /src/commands/psp-assess.ts: -------------------------------------------------------------------------------- 1 | import * as configure from '~/src/configure'; 2 | import commander from 'commander'; 3 | import * as assessment from '~/src/assessment'; 4 | import chalk from 'chalk'; 5 | import * as error from '~/src/error'; 6 | import path from 'path'; 7 | import fs from 'fs-extra'; 8 | import { 9 | AssessmentAnswers, 10 | Gap, 11 | PolicyBuilderConfig, 12 | PolicyBuilderPaths, 13 | AssessmentInput, 14 | } from '~/src/types'; 15 | import { Entity } from '~/src/j1/types'; 16 | import { DEFAULT_TEMPLATES } from '~/src/constants'; 17 | import packageJson from '~/package.json'; 18 | import { createJupiterOneClient, JupiterOneEnvironment } from '~/src/j1'; 19 | 20 | const EUSAGEERROR = 126; 21 | 22 | type ProgramInput = { 23 | version?: string; 24 | standard?: string; 25 | config?: string; 26 | output?: string; 27 | templates?: string; 28 | includeRisks?: string; 29 | account?: string; 30 | apiToken?: string; 31 | }; 32 | async function getRisksFromRegistry(program: ProgramInput): Promise { 33 | if (!program.account) { 34 | throw error.fatal('Missing -a, --account input!', EUSAGEERROR); 35 | } 36 | 37 | if (!program.apiToken) { 38 | throw error.fatal( 39 | 'Missing -k, --api-token input!', 40 | EUSAGEERROR 41 | ); 42 | } 43 | 44 | const j1Client = createJupiterOneClient({ 45 | accountId: program.account, 46 | apiKey: program.apiToken, 47 | targetEnvironment: process.env.J1_TARGET_ENV as JupiterOneEnvironment, 48 | }); 49 | return j1Client.queryForEntityList( 50 | 'find Risk with _beginOn > date.now - 1year' 51 | ); 52 | } 53 | 54 | export async function run() { 55 | // establish root project directory so sane relative paths work 56 | let projectDir = process.env.PROJECT_DIR; 57 | if (!projectDir) { 58 | projectDir = path.normalize(path.join(__dirname, '../../')); 59 | const projectDirs = projectDir.split('/'); 60 | if (projectDirs[projectDirs.length - 1] === 'commands') { 61 | projectDir = path.dirname(projectDir); 62 | } 63 | } 64 | 65 | const program = commander 66 | .version(packageJson.version, '-v, --version') 67 | .usage('--standard --config [options]') 68 | .option( 69 | '-s, --standard ', 70 | 'compliance standard to assess against. currently supported: hipaa' 71 | ) 72 | .option('-c, --config ', 'JSON config file') 73 | .option('-o, --output [dir]', 'optional output directory', 'assessments') 74 | .option('-t, --templates [dir]', 'optional path to template files') 75 | .option( 76 | '-r, --include-risks', 77 | 'include items from JupiterOne Risk Register (requires -a and -u/-k to authenticate to JupiterOne account)' 78 | ) 79 | .option('-a, --account ', 'JupiterOne account id') 80 | .option('-k, --api-token ', 'JupiterOne API token') 81 | .parse(process.argv) 82 | .opts() as ProgramInput; 83 | 84 | if (!program.standard || !program.config) { 85 | commander.outputHelp(); 86 | process.exit(2); 87 | } 88 | 89 | if (!program.templates) { 90 | // if unspecified via the --templates flag, 91 | // prefer a local 'templates' dir (as it may contain modifications), 92 | // default to @jupiterone/security-policy-templates NPM package if not found. 93 | const localTemplates = path.join(projectDir, 'templates'); 94 | const npmTemplates = path.join(projectDir, DEFAULT_TEMPLATES); 95 | program.templates = fs.pathExistsSync(localTemplates) 96 | ? localTemplates 97 | : npmTemplates; 98 | } else { 99 | program.templates = path.resolve(program.templates); 100 | } 101 | 102 | const paths: PolicyBuilderPaths = { 103 | templates: program.templates, 104 | output: program.output!, 105 | partials: 'partials', 106 | }; 107 | 108 | const configFile = program.config; 109 | console.log('config file: %j', configFile); 110 | 111 | let config: PolicyBuilderConfig = { 112 | organization: {}, 113 | }; 114 | try { 115 | config = JSON.parse(fs.readFileSync(configFile, { encoding: 'utf8' })); 116 | } catch (err) { 117 | error.fatal( 118 | `Unable to load configuration from ${configFile} : ${err}`, 119 | EUSAGEERROR 120 | ); 121 | } 122 | let riskList; 123 | if (program.includeRisks) { 124 | const riskEntities = await getRisksFromRegistry(program); 125 | riskList = assessment.generateRiskList(riskEntities); 126 | } else { 127 | riskList = 'Detailed risk items omitted.'; 128 | } 129 | 130 | const inputs = await gatherInputs(program, config); 131 | const standard = program.standard.toLowerCase(); 132 | 133 | // tabulate gaps from input prompts 134 | const inputGaps = calculateInputGaps(inputs); 135 | 136 | // tabulate gaps in controls/procedures 137 | const { cpGaps, annotatedRefs } = await assessment.calculateCPGaps( 138 | standard, 139 | config, 140 | paths 141 | ); 142 | 143 | const allGaps = inputGaps.concat(cpGaps); 144 | 145 | const gapSummary = assessment.generateGapSummary(allGaps, config, standard); 146 | const gapList = assessment.generateGapList(allGaps); 147 | const hipaaControlsMapping = assessment.generateStandardControlsMapping( 148 | annotatedRefs, 149 | config 150 | ); 151 | Object.assign(inputs, { 152 | gapList, 153 | gapSummary, 154 | hipaaControlsMapping, 155 | riskList, 156 | }); 157 | 158 | console.log(`Generating ${standard.toUpperCase()} self-assessment report...`); 159 | await assessment.generateReport(inputs, standard, paths); 160 | if (allGaps.length === 0) { 161 | console.log(chalk.green('No gaps identified.')); 162 | } else { 163 | console.log( 164 | chalk.yellow( 165 | `Gaps identified: ${allGaps.length}. See "Gaps, Findings and Action Items" section in report.` 166 | ) 167 | ); 168 | } 169 | } 170 | 171 | export function calculateInputGaps(inputs: AssessmentInput) { 172 | const gaps: Gap[] = []; 173 | const orgKeys = Object.keys(inputs) as (keyof AssessmentInput)[]; 174 | for (const orgKey of orgKeys) { 175 | const match = /has(.*)Gap/.exec(orgKey); 176 | if (match !== null) { 177 | const name = match[1]; 178 | const ref = name 179 | .replace(/([A-Z])([a-z])/g, (match, upper, lower) => { 180 | return ` ${upper}${lower}`; 181 | }) 182 | .trim(); 183 | gaps.push({ 184 | ref, 185 | title: '(see above)', 186 | }); 187 | } 188 | } 189 | 190 | return gaps; 191 | } 192 | 193 | export type PenTestQuestionName = 194 | | 'lastPenTestDate' 195 | | 'lastPenTestProvider' 196 | | 'penTestFrequency' 197 | | 'nextPenTestDate'; 198 | 199 | const PEN_TEST_QUESTIONS: PenTestQuestionName[] = [ 200 | 'lastPenTestDate', 201 | 'lastPenTestProvider', 202 | 'penTestFrequency', 203 | 'nextPenTestDate', 204 | ]; 205 | 206 | async function gatherInputs( 207 | program: ProgramInput, 208 | config: PolicyBuilderConfig 209 | ) { 210 | const org = config.organization; 211 | if (!assessment.validateOrgValues(org)) { 212 | error.fatal( 213 | 'Please update your policy config file and re-run the policy builder before continuing with the assessment.', 214 | EUSAGEERROR 215 | ); 216 | } 217 | 218 | const standardName = program.standard!; 219 | const answers = (await configure.safeInquirerPrompt( 220 | assessment.questions(standardName) 221 | )) as AssessmentAnswers; 222 | 223 | // invert boolean values for gap questions 224 | (Object.keys(answers) as (keyof AssessmentAnswers)[]) 225 | .filter((questionName) => /has.*Gap/.test(questionName)) 226 | .forEach((gap) => { 227 | answers[gap] = !answers[gap]; 228 | }); 229 | 230 | if (answers.hasPenTestGap) { 231 | PEN_TEST_QUESTIONS.forEach((item) => { 232 | answers[item] = '*TBD*' as any; 233 | }); 234 | } 235 | 236 | return { 237 | ...org, 238 | ...answers, 239 | date: new Date(), 240 | policyTOC: assessment.generatePolicyTOC(config), 241 | isHIPAACoveredEntityText: org.isHIPAACoveredEntity ? 'is' : 'is not', 242 | isHIPAABusinessAssociateText: org.isHIPAABusinessAssociate 243 | ? 'is' 244 | : 'is not', 245 | }; 246 | } 247 | -------------------------------------------------------------------------------- /src/commands/psp-build.ts: -------------------------------------------------------------------------------- 1 | import { PolicyBuilderConfig, PolicyBuilderPaths } from '~/src/types'; 2 | import { policybuilder } from '~/src'; 3 | import pluralize from 'pluralize'; 4 | import * as configure from '~/src/configure'; 5 | import commander from 'commander'; 6 | import chalk from 'chalk'; 7 | import * as error from '~/src/error'; 8 | import path from 'path'; 9 | import fs from 'fs-extra'; 10 | import { DEFAULT_TEMPLATES } from '~/src/constants'; 11 | import packageJson from '~/package.json'; 12 | 13 | const EUSAGEERROR = 126; 14 | 15 | type ProgramInput = { 16 | version?: string; 17 | templates?: string; 18 | savetemplates?: string; 19 | noninteractive?: boolean; 20 | config?: string; 21 | output?: string; 22 | partials?: string; 23 | }; 24 | 25 | export async function run() { 26 | // establish root project directory so sane relative paths work 27 | let projectDir = process.env.PROJECT_DIR; 28 | if (!projectDir) { 29 | projectDir = path.normalize(path.join(__dirname, '../../')); 30 | const projectDirs = projectDir.split('/'); 31 | if (projectDirs[projectDirs.length - 1] === 'commands') { 32 | projectDir = path.dirname(projectDir); 33 | } 34 | } 35 | 36 | const program = commander 37 | .version(packageJson.version, '-v, --version') 38 | .usage('[options]') 39 | .option( 40 | '-t, --templates [dir]', 41 | 'optional path to existing template files.' 42 | ) 43 | .option( 44 | '-s, --savetemplates [dir]', 45 | 'optional path to save template files to upon first run.' 46 | ) 47 | .option( 48 | '-n, --noninteractive', 49 | 'exit with error if any configuration data is missing (do not prompt)' 50 | ) 51 | .option('-c, --config [file]', 'optional JSON config file') 52 | .option('-o, --output [dir]', 'optional output directory', 'docs') 53 | .option( 54 | '-p, --partials [dir]', 55 | 'optional path to partial files.', 56 | 'partials' 57 | ) 58 | .parse(process.argv) 59 | .opts() as ProgramInput; 60 | 61 | if (!program.templates) { 62 | // if unspecified via the --templates flag, 63 | // prefer a local 'templates' dir (as it may contain modifications), 64 | // default to @jupiterone/security-policy-templates NPM package if not found. 65 | const localTemplates = path.join(projectDir, 'templates'); 66 | const npmTemplates = path.join(projectDir, DEFAULT_TEMPLATES); 67 | program.templates = fs.pathExistsSync(localTemplates) 68 | ? localTemplates 69 | : npmTemplates; 70 | } 71 | 72 | if (!program.config) { 73 | program.config = path.join(program.templates, 'config.json'); 74 | } 75 | 76 | const paths: PolicyBuilderPaths = { 77 | partials: program.partials!, 78 | templates: program.templates, 79 | output: program.output!, 80 | }; 81 | 82 | (Object.keys(paths) as (keyof PolicyBuilderPaths)[]).forEach((path) => { 83 | console.log(`${path} dir: ${paths[path]}`); 84 | }); 85 | 86 | const configFile = program.config; 87 | console.log('config file: %j', configFile); 88 | 89 | let config: PolicyBuilderConfig = { 90 | organization: {}, 91 | }; 92 | try { 93 | config = JSON.parse(fs.readFileSync(configFile, { encoding: 'utf8' })); 94 | } catch (err) { 95 | error.fatal( 96 | `Unable to load configuration from ${configFile} : ${err}`, 97 | EUSAGEERROR 98 | ); 99 | } 100 | 101 | const missing = configure.missingOrganizationValues(config.organization); 102 | 103 | if (missing.length !== 0 && program.noninteractive) { 104 | error.fatal( 105 | `missing the following configuration value(s): ${missing.toString()}`, 106 | EUSAGEERROR 107 | ); 108 | } 109 | 110 | // ensure we have the configuration values we need 111 | config = await configure.promptForValues({ 112 | config, 113 | noninteractive: program.noninteractive, 114 | }); 115 | 116 | const { renderedPartials, renderedPSPDocs } = await policybuilder( 117 | config, 118 | paths 119 | ); 120 | 121 | showStatus(renderedPartials); 122 | showStatus(renderedPSPDocs); 123 | 124 | await exposeTemplates(program); 125 | } 126 | 127 | function showStatus(items: { ok: string[]; errors: string[]; type: string }) { 128 | const { ok, errors, type } = items; 129 | const numOk = ok.length; 130 | const numErrors = errors.length; 131 | const numTotal = numOk + numErrors; 132 | const color = type === 'partials' ? chalk.grey : chalk.green; 133 | console.log( 134 | color(`${numOk}/${numTotal} ${pluralize(type, numOk)} processed OK`) 135 | ); 136 | if (numErrors > 0) { 137 | console.log(chalk.yellow(`${numErrors} ${pluralize('error', numTotal)}.`)); 138 | } 139 | } 140 | 141 | async function exposeTemplates(program: ProgramInput) { 142 | const targetDir = program.savetemplates 143 | ? program.savetemplates 144 | : path.join(process.cwd(), 'templates'); 145 | if (!(await fs.pathExists(targetDir))) { 146 | await fs.copy(program.templates!, targetDir); 147 | console.log(`copied templates into ${targetDir} for future modification.`); 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/commands/psp-publish.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | import commander from 'commander'; 3 | import fs, { promises as fsPromises } from 'fs'; 4 | import { prompt } from 'inquirer'; 5 | import pAll from 'p-all'; 6 | import pMap from 'p-map'; 7 | import path from 'path'; 8 | import packageJson from '~/package.json'; 9 | import * as error from '~/src/error'; 10 | import { 11 | createJupiterOneClient, 12 | J1Options, 13 | JupiterOneClient, 14 | JupiterOneEnvironment, 15 | } from '~/src/j1'; 16 | import publishToConfluence, { 17 | PublishToConfluenceOptions, 18 | } from '~/src/publishToConfluence'; 19 | import { 20 | PolicyBuilderConfig, 21 | PolicyBuilderElement, 22 | SectionName, 23 | TemplateData, 24 | } from '~/src/types'; 25 | 26 | const EUSAGEERROR = 126; 27 | const MAX_CONCURRENCY = 4; 28 | 29 | type ProgramInput = { 30 | account?: string; 31 | config?: string; 32 | templates?: string; 33 | user?: string; 34 | apiToken?: string; 35 | noninteractive?: boolean; 36 | confluence?: string; 37 | site?: string; 38 | space?: string; 39 | docs?: string; 40 | wait?: boolean; 41 | }; 42 | 43 | export async function run() { 44 | const program = commander 45 | .version(packageJson.version, '-v, --version') 46 | .usage('[options]') 47 | .option('-a, --account ', 'JupiterOne account id') 48 | .option('-c, --config ', 'path to config file') 49 | .option( 50 | '-t, --templates [dir]', 51 | 'optional path to templates directory', 52 | 'templates' 53 | ) 54 | .option('-u, --user ', 'Confluence user email') 55 | .option( 56 | '-k, --api-token ', 57 | 'JupiterOne API token or Confluence user access key' 58 | ) 59 | .option( 60 | '-n, --noninteractive', 61 | 'do not prompt for confirmation, expect password on stdin' 62 | ) 63 | .option('--wait', 'Wait for completion') 64 | .option('--no-wait', 'Do not wait for completion (default)') 65 | .option('--confluence', 'publish to a Confluence wiki space') 66 | .option( 67 | '--site ', 68 | "Confluence site/domain (the vanity subdomain before '.atlassian.net')" 69 | ) 70 | .option('--space ', 'Space key of the Confluence wiki space') 71 | .option( 72 | '-d, --docs [dir]', 73 | 'path to docs; used in conjunction with --confluence option', 74 | 'docs' 75 | ) 76 | .parse(process.argv) 77 | .opts() as ProgramInput; 78 | 79 | if (program.confluence && program.docs) { 80 | if (program.site && program.space && program.user && program.apiToken) { 81 | const options: PublishToConfluenceOptions = { 82 | domain: program.site, 83 | space: program.space, 84 | username: program.user, 85 | password: program.apiToken, 86 | }; 87 | await publishToConfluence(program.docs, options); 88 | process.exit(0); 89 | } else { 90 | console.log(chalk.red('Missing required arguments')); 91 | process.exit(1); 92 | } 93 | } 94 | 95 | await validateInputs(program); 96 | 97 | if (program.wait === undefined) { 98 | program.wait = false; 99 | } 100 | 101 | const config = await validatePSPDependencies(program); 102 | const templateData = await readTemplateData(program, config); 103 | const j1Client = initializeJ1Client(program); 104 | await storeConfigWithAccount(program, j1Client, config); 105 | 106 | try { 107 | await pMap( 108 | config.policies, 109 | (p: PolicyBuilderElement) => 110 | upsertPolicy({ 111 | program, 112 | j1Client, 113 | policy: p, 114 | templates: templateData, 115 | config, 116 | }), 117 | { concurrency: MAX_CONCURRENCY } 118 | ); 119 | console.log('Verifying the order of all policies and procedures...'); 120 | await j1Client.reorderItems({ 121 | mapping: { 122 | policies: config.policies.map((p: PolicyBuilderElement) => ({ 123 | id: p.id, 124 | procedures: p.procedures || [], 125 | })), 126 | }, 127 | }); 128 | } catch (err) { 129 | error.fatal( 130 | `Error publishing policies and procedures. Error: ${ 131 | err.stack || err.toString() 132 | }` 133 | ); 134 | } 135 | console.log('All items were successfully published'); 136 | } 137 | 138 | // ensure user supplied necessary params 139 | async function validateInputs(program: ProgramInput) { 140 | if (!program.account || program.account === '') { 141 | error.fatal('Missing -a|--account input!', EUSAGEERROR); 142 | } 143 | 144 | if (!program.apiToken) { 145 | error.fatal('Missing --api-token input!', EUSAGEERROR); 146 | } 147 | 148 | if (!program.config || program.config === '') { 149 | error.fatal('Missing -c|--config input!', EUSAGEERROR); 150 | } 151 | } 152 | 153 | // ensure docs are built and config.json is valid 154 | async function validatePSPDependencies(program: ProgramInput) { 155 | if (program.noninteractive) { 156 | process.stdout.write('Validating inputs... '); 157 | } else { 158 | console.log('Validating inputs...'); 159 | } 160 | if (!fs.existsSync(program.templates!)) { 161 | error.fatal( 162 | `Could not find templates directory (${program.templates}). Make sure you have built your PSP ` + 163 | "docs, and/or specify the correct path with '--templates'." 164 | ); 165 | } 166 | if (!fs.existsSync(program.config!)) { 167 | error.fatal( 168 | `Could not find config file (${program.config}). Specify the correct path with '--config'.` 169 | ); 170 | } 171 | const config = parseJsonFile(program.config!); 172 | if (!config) { 173 | error.fatal(`Could not parse config file (${program.config}).`); 174 | } 175 | const requiredKeys = ['organization', 'policies', 'procedures', 'references']; 176 | 177 | const configKeys = Object.keys(config); 178 | if (requiredKeys.some((k) => configKeys.indexOf(k) < 0)) { 179 | error.fatal( 180 | `Missing one or more required config sections: ${requiredKeys.join( 181 | ', ' 182 | )}.` 183 | ); 184 | } 185 | const tmplDirStats = fs.statSync(program.templates!); 186 | if (program.noninteractive) { 187 | console.log('OK!'); 188 | return config; 189 | } 190 | const { shouldPublish } = await prompt([ 191 | { 192 | type: 'confirm', 193 | name: 'shouldPublish', 194 | message: `Do you really want to publish the contents of '${program.templates}/', last modified on ${tmplDirStats.mtime}? This may overwrite content generated via the JupiterOne Policy Builder UI`, 195 | }, 196 | ]); 197 | if (!shouldPublish) { 198 | error.fatal('Canceled by user.'); 199 | } 200 | console.log('Inputs OK!'); 201 | return config; 202 | } 203 | 204 | function parseJsonFile(file: string) { 205 | let data; 206 | try { 207 | data = JSON.parse(fs.readFileSync(file, 'utf8')); 208 | } catch (err) { 209 | return null; 210 | } 211 | return data; 212 | } 213 | 214 | function initializeJ1Client(program: ProgramInput) { 215 | const j1Options: J1Options = { 216 | accountId: program.account!, 217 | targetEnvironment: process.env.J1_TARGET_ENV as JupiterOneEnvironment, 218 | apiKey: program.apiToken!, 219 | }; 220 | 221 | const j1Client = createJupiterOneClient(j1Options); 222 | return j1Client; 223 | } 224 | 225 | async function readTemplateData( 226 | program: ProgramInput, 227 | config: PolicyBuilderConfig 228 | ): Promise { 229 | const data: TemplateData = { 230 | policies: {}, 231 | procedures: {}, 232 | references: {}, 233 | }; 234 | 235 | const todos: (() => Promise)[] = []; 236 | const sections: SectionName[] = ['policies', 'procedures', 'references']; 237 | const templateCount = sections.reduce((acc, cv) => { 238 | return acc + (config[cv]?.length ?? 0); 239 | }, 0); 240 | process.stdout.write( 241 | `Scanning ${templateCount} template files for publishing... ` 242 | ); 243 | sections.forEach((section) => { 244 | const sectionData = config[section] ?? []; 245 | for (const element of sectionData) { 246 | const tmplPath = path.join(program.templates!, element.file + '.tmpl'); 247 | const work = async () => { 248 | const tmplData = await readFilePromise(tmplPath); 249 | data[section][element.id] = tmplData; 250 | }; 251 | todos.push(work); 252 | } 253 | }); 254 | await pAll(todos, { concurrency: MAX_CONCURRENCY }); 255 | console.log('OK!'); 256 | return data; 257 | } 258 | 259 | async function upsertPolicy({ 260 | j1Client, 261 | policy, 262 | templates, 263 | config, 264 | }: { 265 | program: ProgramInput; 266 | j1Client: JupiterOneClient; 267 | policy: PolicyBuilderElement; 268 | templates: TemplateData; 269 | config: any; 270 | }) { 271 | const template = templates.policies[policy.id]; 272 | const { uuid } = await j1Client.upsertPolicy({ 273 | data: { 274 | id: policy.id, 275 | file: policy.file, 276 | title: policy.name as string, 277 | template, 278 | }, 279 | }); 280 | console.log(`Upserted policy: ${policy.id}`); 281 | const isRef = policy.id === 'ref'; 282 | 283 | const upsertProcedureViaJ1Client = async (procedureId: string) => { 284 | const procedure = ((isRef 285 | ? config.references 286 | : config.procedures) as PolicyBuilderElement[]).find( 287 | (procedure) => procedure.id === procedureId 288 | ); 289 | if (!procedure) { 290 | throw error.fatal(`Unable to find procedure with id: ${procedureId}`); 291 | } 292 | const template = (isRef ? templates.references : templates.procedures)[ 293 | procedure.id 294 | ]; 295 | 296 | await j1Client.upsertProcedure({ 297 | data: { 298 | policyId: uuid, 299 | id: procedure.id, 300 | isRef, 301 | template, 302 | file: procedure.file, 303 | name: procedure.name as string, 304 | provider: procedure.provider, 305 | applicable: procedure.applicable, 306 | adopted: procedure.adopted, 307 | summary: (procedure.summary as string) || '', 308 | }, 309 | }); 310 | 311 | console.log( 312 | `Upserted ${isRef ? 'reference' : 'procedure'}: ${procedureId}` 313 | ); 314 | }; 315 | 316 | await pMap(policy.procedures as string[], upsertProcedureViaJ1Client, { 317 | concurrency: MAX_CONCURRENCY, 318 | }); 319 | } 320 | 321 | async function storeConfigWithAccount( 322 | program: ProgramInput, 323 | j1Client: JupiterOneClient, 324 | configData: PolicyBuilderConfig 325 | ) { 326 | const accountId = program.account!; 327 | process.stdout.write('Storing config with JupiterOne account... '); 328 | try { 329 | const result = await j1Client.updateConfig({ 330 | values: configData.organization, 331 | }); 332 | console.log('OK'); 333 | return result; 334 | } catch (err) { 335 | throw error.fatal( 336 | `Error storing PSP configuration data with account (${accountId}). Error: ${ 337 | err.stack || err.toString() 338 | }` 339 | ); 340 | } 341 | } 342 | 343 | async function readFilePromise(filePath: string) { 344 | return fsPromises.readFile(filePath, { encoding: 'utf8' }); 345 | } 346 | -------------------------------------------------------------------------------- /src/commands/psp.ts: -------------------------------------------------------------------------------- 1 | import commander from 'commander'; 2 | import packageJson from '~/package.json'; 3 | 4 | commander 5 | .version(packageJson.version, '-V, --version') 6 | .command('build', 'build PSP markdown documentation from templates') 7 | .command('assess', 'generate compliance self-assessment markdown report') 8 | .command('publish', 'upload PSP assets to JupiterOne') 9 | .parse(process.argv); 10 | -------------------------------------------------------------------------------- /src/configure.ts: -------------------------------------------------------------------------------- 1 | // import {questions} from "./questions/base"); 2 | import stripAnsi from 'strip-ansi'; 3 | import inquirer, { 4 | ConfirmQuestion, 5 | InputQuestion, 6 | QuestionCollection, 7 | } from 'inquirer'; 8 | import chalk from 'chalk'; 9 | import fs from 'fs-extra'; 10 | import { baseQuestions } from './questions/base'; 11 | import { scorecardQuestions } from './questions/scorecard'; 12 | import { hipaaQuestions } from './questions/hipaa'; 13 | import { 14 | Organization, 15 | PolicyBuilderConfig, 16 | AdoptedPolicyBuilderElements, 17 | PolicyBuilderElement, 18 | PolicyBuilderPartial, 19 | PolicyBuilderPartialType, 20 | } from '~/src/types'; 21 | import pickAdopted from '~/src/util/pickAdopted'; 22 | 23 | // expects initial configuration object 24 | // interactively prompts for any unconfigured organization values 25 | // returns fully populated configuration object 26 | async function promptForValues(options: { 27 | config: PolicyBuilderConfig; 28 | noninteractive?: boolean; 29 | }) { 30 | const { config, noninteractive } = options; 31 | // prompt for any missing values 32 | const answers = await safeInquirerPrompt( 33 | missingOrEmptyOrganizationValues(config.organization) 34 | ); 35 | Object.assign(config.organization, answers); 36 | 37 | // conditionally prompt for additional details 38 | await promptForSecurityScorecardValues(config.organization); 39 | await promptForHIPAAValues(config.organization); 40 | 41 | // display config 42 | console.log(JSON.stringify(config.organization, null, 2)); 43 | 44 | // conditionally confirm and save configuration 45 | if (noninteractive !== true) { 46 | const save = await inquirer.prompt(promptForSave); 47 | if (save.selected) { 48 | try { 49 | await fs.writeFile(save.path, JSON.stringify(config, null, 2)); 50 | console.log(chalk.green(`saved configuration to ${save.path}`)); 51 | } catch (err) { 52 | console.log( 53 | chalk.yellow(`Unable to save configuration to ${save.path}: ${err}`) 54 | ); 55 | } 56 | } 57 | } 58 | 59 | mergeAutomaticPSPVars(config); 60 | return config; 61 | } 62 | 63 | async function promptForSecurityScorecardValues(config: Organization) { 64 | if (config.haveSecurityScorecard === true) { 65 | const toAsk = missingOrEmptyOrganizationValues(config, scorecardQuestions); 66 | const values = await safeInquirerPrompt(toAsk); 67 | Object.assign(config, values); 68 | } 69 | 70 | // ensure sane default values for any unanswered questions 71 | config.securityScorecardPeriod = config.securityScorecardPeriod || 'none'; 72 | config.securityScorecardURL = config.securityScorecardURL || 'N/A'; 73 | } 74 | 75 | async function promptForHIPAAValues(config: Organization) { 76 | if (config.needStandardHIPAA === true) { 77 | const toAsk = missingOrEmptyOrganizationValues(config, hipaaQuestions); 78 | const values = await safeInquirerPrompt(toAsk); 79 | Object.assign(config, values); 80 | } 81 | 82 | // ensure sane default values for any unanswered questions 83 | config.securityScorecardPeriod = config.securityScorecardPeriod || 'none'; 84 | config.securityScorecardURL = config.securityScorecardURL || 'N/A'; 85 | } 86 | 87 | // expects inquirer list object 88 | // interactively prompts for all inquirer questions 89 | // returns sanitized answers 90 | async function safeInquirerPrompt(list: QuestionCollection) { 91 | if (process.env.NODE_ENV === 'test') { 92 | return {}; // early exit to avoid inquirer, which kills AVA 93 | } 94 | 95 | // prompt for any missing values 96 | const answers = await inquirer.prompt(list); 97 | return sanitize(answers); 98 | } 99 | 100 | function sanitize(obj: Record) { 101 | const saneObj = { ...obj }; 102 | Object.keys(saneObj).forEach((key) => { 103 | saneObj[key] = stripAnsi(saneObj[key]); 104 | }); 105 | return saneObj; 106 | } 107 | 108 | const promptForSave: (ConfirmQuestion | InputQuestion)[] = [ 109 | { 110 | type: 'confirm', 111 | name: 'selected', 112 | message: 'Save this configuration to a file', 113 | default: false, 114 | }, 115 | { 116 | type: 'input', 117 | name: 'path', 118 | message: 'path to saved file', 119 | when: (answers) => { 120 | return answers.selected === true; 121 | }, 122 | }, 123 | ]; 124 | 125 | // expects configuration organization object 126 | // returns array of currently unconfigured values 127 | function missingOrganizationValues(org = {}, values = baseQuestions): string[] { 128 | const orgKeys = Object.keys(org); 129 | 130 | return values.map((x) => x.name).filter((x) => !orgKeys.includes(x)); 131 | } 132 | 133 | // expects initial configuration "organization" object 134 | // returns array of missing inquiries 135 | function missingOrEmptyOrganizationValues( 136 | org: Organization, 137 | values = baseQuestions 138 | ) { 139 | const missingValues = missingOrganizationValues(org, values); 140 | return values.filter((q) => { 141 | return missingValues.includes(q.name) || org[q.name] === ''; 142 | }); 143 | } 144 | 145 | function buildPartial( 146 | element: PolicyBuilderElement, 147 | partialType: PolicyBuilderPartialType 148 | ) { 149 | const { type, ...properties } = element; 150 | const partial: PolicyBuilderPartial = { 151 | ...properties, 152 | type: partialType, 153 | }; 154 | return partial; 155 | } 156 | // expects full config object 157 | // returns array of all adopted partial objects of the form: 158 | // { 159 | // id: 'cp-policy-training', 160 | // file: 'procedures/cp-policy-training.md', 161 | // ... 162 | // type: 'procedures' 163 | // } 164 | // 165 | // throws error if a partial makes an illegal id reference 166 | function getAdoptedPartials( 167 | config: PolicyBuilderConfig 168 | ): PolicyBuilderPartial[] { 169 | const partials: PolicyBuilderPartial[] = []; 170 | 171 | for (const policy of config.policies || []) { 172 | if (!policy.adopted) { 173 | continue; 174 | } 175 | 176 | partials.push({ 177 | ...policy, 178 | type: 'policies', 179 | }); 180 | 181 | for (const id of policy.procedures ?? []) { 182 | const byIdFilter = (p: PolicyBuilderElement) => { 183 | return p.id === id; 184 | }; 185 | 186 | let policyImplementer: 187 | | PolicyBuilderElement 188 | | undefined = config.procedures?.find(byIdFilter); 189 | if (policyImplementer) { 190 | partials.push(buildPartial(policyImplementer, 'procedures')); 191 | } else if ((policyImplementer = config.references?.find(byIdFilter))) { 192 | partials.push(buildPartial(policyImplementer, 'references')); 193 | } 194 | } 195 | } 196 | 197 | for (const ref of config.references ?? []) { 198 | if (!ref.adopted) { 199 | continue; 200 | } 201 | partials.push(buildPartial(ref, 'references')); 202 | } 203 | return partials; 204 | } 205 | 206 | // returns array of adopted procedure objects for a given policy id 207 | // throws error if policy references unknown procedure id 208 | function getAdoptedProceduresForPolicy( 209 | policyId: string, 210 | config: PolicyBuilderConfig 211 | ) { 212 | const procedures = []; 213 | const policy = config.policies?.find((p) => p.id === policyId); 214 | if (!policy) { 215 | throw new Error(`unknown policy id ${policyId}`); 216 | } 217 | let policyImplementor; 218 | for (const procedure of policy.procedures ?? []) { 219 | policyImplementor = 220 | config.procedures?.find((p) => p.id === procedure) ?? 221 | config.references?.find((r) => r.id === procedure); 222 | if (!policyImplementor) { 223 | throw new Error( 224 | `reference to unknown procedure '${procedure}' in policy '${policy.id}'` 225 | ); 226 | } 227 | if (!policyImplementor.adopted) { 228 | continue; 229 | } 230 | procedures.push(policyImplementor); 231 | } 232 | return procedures; 233 | } 234 | 235 | // expects full config object 236 | // merges additional runtime-calculated values into organization 237 | function mergeAutomaticPSPVars(config: PolicyBuilderConfig) { 238 | const defaultRevision = `${new Date().getFullYear()}.1`; 239 | const mergeValues = { 240 | defaultRevision, 241 | }; 242 | Object.assign(config.organization, mergeValues); 243 | } 244 | 245 | // expects full config object 246 | // returns object containing configured and adopted elements 247 | function getAdoptedPSPElements(config: PolicyBuilderConfig) { 248 | const adopted: AdoptedPolicyBuilderElements = { 249 | standards: pickAdopted(config.standards), 250 | policies: pickAdopted(config.policies), 251 | procedures: pickAdopted(config.procedures), 252 | references: pickAdopted(config.references), 253 | }; 254 | return adopted; 255 | } 256 | 257 | export { 258 | getAdoptedPartials, 259 | getAdoptedProceduresForPolicy, 260 | getAdoptedPSPElements, 261 | mergeAutomaticPSPVars, 262 | missingOrEmptyOrganizationValues, 263 | missingOrganizationValues, 264 | promptForValues, 265 | safeInquirerPrompt, 266 | }; 267 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | export const DEFAULT_TEMPLATES = 2 | 'node_modules/@jupiterone/security-policy-templates/templates'; 3 | export const DEFAULT_MKDOCS_YML = 4 | 'node_modules/@jupiterone/security-policy-templates/templates/mkdocs/mkdocs.yml.tmpl'; 5 | 6 | export const PSP_SYNC_SCOPE = 'policies-procedures'; 7 | -------------------------------------------------------------------------------- /src/error.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | 3 | export function fatal(message: string, code: number = 1) { 4 | console.log('\n' + chalk.red.bold(message) + '\n'); 5 | process.exit(code); 6 | } 7 | 8 | export function warn(message: string) { 9 | console.log('\n' + chalk.yellow(message) + '\n'); 10 | } 11 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import * as assets from './assets'; 2 | import * as configure from './configure'; 3 | import * as render from './render'; 4 | import path from 'path'; 5 | import { PolicyBuilderConfig, PolicyBuilderPaths } from '~/src/types'; 6 | 7 | const defaultPaths: PolicyBuilderPaths = { 8 | partials: 'partials', 9 | templates: 'templates', 10 | output: 'docs', 11 | }; 12 | 13 | export async function policybuilder( 14 | configuration: PolicyBuilderConfig, 15 | paths = defaultPaths 16 | ) { 17 | configure.mergeAutomaticPSPVars(configuration); 18 | 19 | // marshal static assets for mkdocs 20 | await assets.copyStaticAssets(paths); 21 | await assets.downloadCustomLogo(configuration, paths); 22 | await render.renderMkdocsYAML( 23 | configuration, 24 | paths, 25 | path.join(paths.output, 'mkdocs.yml') 26 | ); 27 | await render.renderIndexPage( 28 | configuration, 29 | paths, 30 | path.join(paths.output, 'index.md') 31 | ); 32 | 33 | // render partials and docs 34 | const renderedPartials = await render.renderPartials(configuration, paths); 35 | const renderedPSPDocs = await render.renderPSPDocs(configuration, paths); 36 | return { renderedPartials, renderedPSPDocs }; 37 | } 38 | -------------------------------------------------------------------------------- /src/j1/index.ts: -------------------------------------------------------------------------------- 1 | import { Entity, EntityPropertyValue } from '~/src/j1/types'; 2 | import fetch, { RequestInit, Response as FetchResponse } from 'node-fetch'; 3 | import { print as graphqlAstToString } from 'graphql/language/printer'; 4 | import { DocumentNode } from 'graphql'; 5 | import * as j1GraphQL from './j1GraphQL'; 6 | import { EntityForSync, RelationshipForSync } from '~/src/types'; 7 | import { retry } from '@lifeomic/attempt'; 8 | 9 | export type JupiterOneEnvironment = 'localhost' | 'dev' | 'prod' | 'fedramp' | undefined; 10 | 11 | export type J1Options = { 12 | accountId: string; 13 | targetEnvironment: JupiterOneEnvironment; 14 | apiKey: string; 15 | }; 16 | 17 | export type GraphQLApiResponseError = { 18 | code?: string; 19 | message: string; 20 | }; 21 | 22 | export type GraphQLApiResponseBodyWithErrors = { 23 | errors?: GraphQLApiResponseError[]; 24 | }; 25 | 26 | export type GraphQLApiResponseBodyWithResult = { 27 | data: { 28 | result: T; 29 | }; 30 | }; 31 | 32 | class GraphQLResponseError extends Error { 33 | constructor(errors: GraphQLApiResponseError[]) { 34 | super( 35 | `Received JupiterOne API error response. Errors: ` + 36 | errors 37 | .map((error) => { 38 | return `${error.message} (code=${error.code || ''})`; 39 | }) 40 | .join(', ') 41 | ); 42 | } 43 | } 44 | 45 | export type JupiterOneQuery = { 46 | nameForLogging: string; 47 | ast: DocumentNode; 48 | }; 49 | 50 | function buildRequestHeaders( 51 | j1Client: JupiterOneClient, 52 | headers?: Record 53 | ) { 54 | return { 55 | Authorization: `Bearer ${j1Client.apiKey}`, 56 | 'LifeOmic-Account': j1Client.accountId, 57 | ...headers, 58 | }; 59 | } 60 | async function makeGraphQLRequest(options: { 61 | apiUrl: string; 62 | j1Client: JupiterOneClient; 63 | query: JupiterOneQuery; 64 | input: I; 65 | resultKey?: string; 66 | }) { 67 | const { apiUrl } = options; 68 | const headers = buildRequestHeaders(options.j1Client, { 69 | 'Content-Type': 'application/json', 70 | }); 71 | 72 | const body = { 73 | query: graphqlAstToString(options.query.ast), 74 | variables: options.input, 75 | }; 76 | 77 | const response = await makeFetchRequest( 78 | apiUrl, 79 | { 80 | method: 'POST', 81 | body: JSON.stringify(body), 82 | headers, 83 | }, 84 | options.query.nameForLogging 85 | ); 86 | 87 | const bodyObj = await response.json(); 88 | const errors = (bodyObj as GraphQLApiResponseBodyWithErrors).errors; 89 | if (errors) { 90 | throw new GraphQLResponseError(errors); 91 | } 92 | 93 | return bodyObj.data[options.resultKey || 'result'] as O; 94 | } 95 | 96 | export type DeferredJ1QLQueryState = { 97 | status: 'IN_PROGRESS' | 'COMPLETED' | 'FAILED'; 98 | error: string | null; 99 | url: string | null; 100 | }; 101 | 102 | async function sleep(ms: number) { 103 | return new Promise((resolve) => { 104 | setTimeout(resolve, 500); 105 | }); 106 | } 107 | 108 | async function makeDeferredJ1QLRequest(options: { 109 | j1Client: JupiterOneClient; 110 | j1ql: string; 111 | j1qlVariables: j1GraphQL.J1QLVariables; 112 | }) { 113 | const deferredData = await makeGraphQLRequest({ 114 | apiUrl: options.j1Client.queryGraphQLApiUrl, 115 | j1Client: options.j1Client, 116 | query: j1GraphQL.QUERY_DEFERRED_J1QL, 117 | input: { 118 | query: options.j1ql, 119 | }, 120 | }); 121 | 122 | // Poll for completion 123 | let j1qlState: DeferredJ1QLQueryState | undefined; 124 | let attemptNum = 0; 125 | 126 | do { 127 | if (attemptNum) { 128 | await sleep(500); 129 | } 130 | 131 | attemptNum++; 132 | 133 | const statusResponse = await makeFetchRequest( 134 | deferredData.url, 135 | { 136 | method: 'GET', 137 | }, 138 | 'Deferred J1QL Query Status' 139 | ); 140 | 141 | j1qlState = await statusResponse.json(); 142 | if (j1qlState!.status === 'FAILED') { 143 | throw new Error( 144 | `J1QL query failed. Error: ${j1qlState!.error || '(unknown'}` 145 | ); 146 | } 147 | } while (j1qlState!.status === 'IN_PROGRESS' && attemptNum < 100); 148 | 149 | const queryResponse = await fetch(j1qlState!.url!, { 150 | method: 'GET', 151 | }); 152 | 153 | const finalData = (await queryResponse.json()) as T; 154 | return finalData; 155 | } 156 | 157 | type EntityListItem = { 158 | id: string; 159 | entity: object; 160 | properties: object; 161 | }; 162 | 163 | async function validateSyncJobResponse(response: FetchResponse) { 164 | const rawBody = await response.json(); 165 | const body = rawBody as Partial; 166 | if (!body.job) { 167 | throw new Error( 168 | `JupiterOne API error. Sync job response did not return job. Response: ${JSON.stringify( 169 | rawBody, 170 | null, 171 | 2 172 | )}` 173 | ); 174 | } 175 | return body as SyncJobResonse; 176 | } 177 | 178 | export enum SyncJobStatus { 179 | AWAITING_UPLOADS = 'AWAITING_UPLOADS', 180 | FINALIZE_PENDING = 'FINALIZE_PENDING', 181 | FINALIZING_ENTITIES = 'FINALIZING_ENTITIES', 182 | FINALIZING_RELATIONSHIPS = 'FINALIZING_RELATIONSHIPS', 183 | ABORTED = 'ABORTED', 184 | FINISHED = 'FINISHED', 185 | UNKNOWN = 'UNKNOWN', 186 | ERROR_BAD_DATA = 'ERROR_BAD_DATA', 187 | ERROR_UNEXPECTED_FAILURE = 'ERROR_UNEXPECTED_FAILURE', 188 | } 189 | 190 | export type SyncJob = { 191 | source: string; 192 | scope: string; 193 | accountId: string; 194 | id: string; 195 | status: SyncJobStatus; 196 | done: boolean; 197 | startTimestamp: number; 198 | numEntitiesUploaded: number; 199 | numEntitiesCreated: number; 200 | numEntitiesUpdated: number; 201 | numEntitiesDeleted: number; 202 | numEntityCreateErrors: number; 203 | numEntityUpdateErrors: number; 204 | numEntityDeleteErrors: number; 205 | numEntityRawDataEntriesUploaded: number; 206 | numEntityRawDataEntriesCreated: number; 207 | numEntityRawDataEntriesUpdated: number; 208 | numEntityRawDataEntriesDeleted: number; 209 | numEntityRawDataEntryCreateErrors: number; 210 | numEntityRawDataEntryUpdateErrors: number; 211 | numEntityRawDataEntryDeleteErrors: number; 212 | numRelationshipsUploaded: number; 213 | numRelationshipsCreated: number; 214 | numRelationshipsUpdated: number; 215 | numRelationshipsDeleted: number; 216 | numRelationshipCreateErrors: number; 217 | numRelationshipUpdateErrors: number; 218 | numRelationshipDeleteErrors: number; 219 | numRelationshipRawDataEntriesUploaded: number; 220 | numRelationshipRawDataEntriesCreated: number; 221 | numRelationshipRawDataEntriesUpdated: number; 222 | numRelationshipRawDataEntriesDeleted: number; 223 | numRelationshipRawDataEntryCreateErrors: number; 224 | numRelationshipRawDataEntryUpdateErrors: number; 225 | numRelationshipRawDataEntryDeleteErrors: number; 226 | numMappedRelationshipsCreated: number; 227 | numMappedRelationshipsUpdated: number; 228 | numMappedRelationshipsDeleted: number; 229 | numMappedRelationshipCreateErrors: number; 230 | numMappedRelationshipUpdateErrors: number; 231 | numMappedRelationshipDeleteErrors: number; 232 | syncMode: 'DIFF' | 'CREATE_OR_UPDATE'; 233 | }; 234 | 235 | export type SyncJobResonse = { 236 | job: SyncJob; 237 | }; 238 | 239 | class FetchError extends Error { 240 | httpStatusCode: number; 241 | 242 | constructor(options: { 243 | responseBody: string; 244 | response: FetchResponse; 245 | method: string; 246 | url: string; 247 | nameForLogging?: string; 248 | }) { 249 | super( 250 | `JupiterOne API error. Response not OK (requestName=${ 251 | options.nameForLogging || '(none)' 252 | }, status=${options.response.status}, url=${options.url}, method=${ 253 | options.method 254 | }). Response: ${options.responseBody}` 255 | ); 256 | this.httpStatusCode = options.response.status; 257 | } 258 | } 259 | 260 | async function makeFetchRequest( 261 | url: string, 262 | options: RequestInit, 263 | nameForLogging?: string 264 | ) { 265 | return retry( 266 | async () => { 267 | const response = await fetch(url, options); 268 | const { status } = response; 269 | if (status < 200 || status >= 300) { 270 | const responseBody = await response.text(); 271 | throw new FetchError({ 272 | method: options.method!, 273 | response, 274 | responseBody, 275 | url, 276 | nameForLogging, 277 | }); 278 | } 279 | return response; 280 | }, 281 | { 282 | maxAttempts: 5, 283 | delay: 1000, 284 | handleError(err, context, options) { 285 | const possibleFetchError = err as Partial; 286 | const { httpStatusCode } = possibleFetchError; 287 | if (httpStatusCode !== undefined) { 288 | if (httpStatusCode < 500) { 289 | context.abort(); 290 | } 291 | } 292 | }, 293 | } 294 | ); 295 | } 296 | 297 | class JupiterOneClient { 298 | apiKey: string; 299 | persisterRestApiUrl: string; 300 | persisterGraphQLApiUrl: string; 301 | queryGraphQLApiUrl: string; 302 | accountId: string; 303 | 304 | constructor(options: J1Options) { 305 | this.apiKey = options.apiKey.trim(); 306 | this.accountId = options.accountId.trim(); 307 | 308 | let persisterRestApiUrl: string; 309 | let persisterGraphQLApiUrl: string; 310 | let queryGraphQLApiUrl: string; 311 | 312 | const targetEnvironment = options.targetEnvironment || 'prod'; 313 | 314 | if (targetEnvironment === 'localhost') { 315 | persisterRestApiUrl = 'http://localhost:8080'; 316 | persisterGraphQLApiUrl = 'http://localhost:8080/persister/graphql'; 317 | queryGraphQLApiUrl = 'https://api.dev.jupiterone.io/graphql'; 318 | } else if (targetEnvironment === 'prod') { 319 | persisterRestApiUrl = 'https://api.us.jupiterone.io'; 320 | persisterGraphQLApiUrl = 'https://api.us.jupiterone.io/graphql'; 321 | queryGraphQLApiUrl = 'https://api.us.jupiterone.io/graphql'; 322 | } else if (targetEnvironment === 'dev') { 323 | persisterRestApiUrl = 'https://api.dev.jupiterone.io'; 324 | persisterGraphQLApiUrl = 'https://api.dev.jupiterone.io/graphql'; 325 | queryGraphQLApiUrl = 'https://api.dev.jupiterone.io/graphql'; 326 | } else if (targetEnvironment === 'fedramp') { 327 | persisterRestApiUrl = 'https://api.fedramp.jupiterone.us'; 328 | persisterGraphQLApiUrl = 'https://api.fedramp.jupiterone.us/graphql'; 329 | queryGraphQLApiUrl = 'https://api.fedramp.jupiterone.us/graphql'; 330 | } else { 331 | throw new Error( 332 | 'Unrecognized target JupiterOne environment: ' + targetEnvironment 333 | ); 334 | } 335 | 336 | this.persisterRestApiUrl = persisterRestApiUrl; 337 | this.persisterGraphQLApiUrl = persisterGraphQLApiUrl; 338 | this.queryGraphQLApiUrl = queryGraphQLApiUrl; 339 | } 340 | 341 | async queryForEntityList(j1ql: string): Promise { 342 | const queryResponse: { 343 | type: 'list'; 344 | totalCount: number; 345 | data: EntityListItem[]; 346 | } = await makeDeferredJ1QLRequest({ 347 | j1Client: this, 348 | j1ql, 349 | j1qlVariables: {}, 350 | }); 351 | 352 | const entities = queryResponse.data.map((item) => { 353 | return { 354 | ...item.properties, 355 | ...item.entity, 356 | } as Entity; 357 | }); 358 | 359 | return entities; 360 | } 361 | 362 | async queryForEntityTableList( 363 | j1ql: string 364 | ): Promise[]> { 365 | const queryResponse: { 366 | type: 'table'; 367 | totalCount: number; 368 | data: Record[]; 369 | } = await makeDeferredJ1QLRequest({ 370 | j1Client: this, 371 | j1ql, 372 | j1qlVariables: {}, 373 | }); 374 | 375 | return queryResponse.data; 376 | } 377 | 378 | async uploadEntityRawData(options: { 379 | entityId: string; 380 | entryName: string; 381 | contentType: 'text/html' | 'application/json'; 382 | body: object | string; 383 | }) { 384 | const headers = buildRequestHeaders(this, { 385 | 'Content-Type': options.contentType, 386 | }); 387 | await makeFetchRequest( 388 | this.persisterRestApiUrl + 389 | `/entities/${options.entityId}/raw-data/${options.entryName}`, 390 | { 391 | method: 'PUT', 392 | headers, 393 | body: 394 | typeof options.body === 'string' 395 | ? options.body 396 | : JSON.stringify(options.body), 397 | } 398 | ); 399 | } 400 | 401 | async startSyncJob(options: { source: 'api'; scope: string }) { 402 | const headers = buildRequestHeaders(this, { 403 | 'Content-Type': 'application/json', 404 | }); 405 | const response = await makeFetchRequest( 406 | this.persisterRestApiUrl + `/persister/synchronization/jobs`, 407 | { 408 | method: 'POST', 409 | headers, 410 | body: JSON.stringify(options), 411 | } 412 | ); 413 | return validateSyncJobResponse(response); 414 | } 415 | 416 | async uploadGraphObjectsForSyncJob(options: { 417 | syncJobId: string; 418 | entities?: EntityForSync[]; 419 | relationships?: RelationshipForSync[]; 420 | }) { 421 | const { syncJobId, entities, relationships } = options; 422 | const headers = buildRequestHeaders(this, { 423 | 'Content-Type': 'application/json', 424 | }); 425 | const response = await makeFetchRequest( 426 | this.persisterRestApiUrl + 427 | `/persister/synchronization/jobs/${syncJobId}/upload`, 428 | { 429 | method: 'POST', 430 | headers, 431 | body: JSON.stringify({ 432 | entities, 433 | relationships, 434 | }), 435 | } 436 | ); 437 | return validateSyncJobResponse(response); 438 | } 439 | 440 | async finalizeSyncJob(options: { syncJobId: string }) { 441 | const { syncJobId } = options; 442 | const headers = buildRequestHeaders(this, { 443 | 'Content-Type': 'application/json', 444 | }); 445 | const response = await makeFetchRequest( 446 | this.persisterRestApiUrl + 447 | `/persister/synchronization/jobs/${syncJobId}/finalize`, 448 | { 449 | method: 'POST', 450 | headers, 451 | body: JSON.stringify({}), 452 | } 453 | ); 454 | return validateSyncJobResponse(response); 455 | } 456 | 457 | async fetchSyncJobStatus(options: { syncJobId: string }) { 458 | const { syncJobId } = options; 459 | const headers = buildRequestHeaders(this); 460 | const response = await makeFetchRequest( 461 | this.persisterRestApiUrl + `/persister/synchronization/jobs/${syncJobId}`, 462 | { 463 | method: 'GET', 464 | headers, 465 | } 466 | ); 467 | return validateSyncJobResponse(response); 468 | } 469 | 470 | async updateEntity(input: j1GraphQL.UpdateEntityInput) { 471 | return makeGraphQLRequest({ 472 | apiUrl: this.persisterGraphQLApiUrl, 473 | j1Client: this, 474 | input, 475 | query: j1GraphQL.MUTATION_UPDATE_ENTITY, 476 | }); 477 | } 478 | 479 | async updateRelationship(input: j1GraphQL.UpdateRelationshipInput) { 480 | return makeGraphQLRequest({ 481 | apiUrl: this.persisterGraphQLApiUrl, 482 | j1Client: this, 483 | input, 484 | query: j1GraphQL.MUTATION_UPDATE_RELATIONSHIP, 485 | }); 486 | } 487 | 488 | async updateConfig(input: j1GraphQL.UpdateConfigInput) { 489 | return makeGraphQLRequest< 490 | j1GraphQL.UpdateConfigInput, 491 | j1GraphQL.UpdateConfigOutput 492 | >({ 493 | apiUrl: this.queryGraphQLApiUrl, 494 | j1Client: this, 495 | input, 496 | query: j1GraphQL.MUTATION_UPDATE_CONFIG, 497 | }); 498 | } 499 | 500 | async upsertPolicy(input: j1GraphQL.UpsertPolicyInput) { 501 | return makeGraphQLRequest< 502 | j1GraphQL.UpsertPolicyInput, 503 | j1GraphQL.UpsertPolicyOutput 504 | >({ 505 | apiUrl: this.queryGraphQLApiUrl, 506 | j1Client: this, 507 | input, 508 | query: j1GraphQL.MUTATION_UPSERT_POLICY, 509 | resultKey: 'upsertPolicyById', 510 | }); 511 | } 512 | 513 | async upsertProcedure(input: j1GraphQL.UpsertProcedureInput) { 514 | return makeGraphQLRequest< 515 | j1GraphQL.UpsertProcedureInput, 516 | j1GraphQL.UpsertProcedureOutput 517 | >({ 518 | apiUrl: this.queryGraphQLApiUrl, 519 | j1Client: this, 520 | input, 521 | query: j1GraphQL.MUTATION_UPSERT_PROCEDURE, 522 | resultKey: 'upsertProcedureById', 523 | }); 524 | } 525 | 526 | async reorderItems(input: j1GraphQL.ReorderAllItemsByMappingInput) { 527 | return makeGraphQLRequest< 528 | j1GraphQL.ReorderAllItemsByMappingInput, 529 | j1GraphQL.ReorderAllItemsByMappingOutput 530 | >({ 531 | apiUrl: this.queryGraphQLApiUrl, 532 | j1Client: this, 533 | input, 534 | query: j1GraphQL.MUTATION_REORDER_ITEMS, 535 | resultKey: 'reorderAllItemsByMapping', 536 | }); 537 | } 538 | } 539 | 540 | export function createJupiterOneClient(options: J1Options) { 541 | return new JupiterOneClient(options); 542 | } 543 | 544 | export type { JupiterOneClient }; 545 | -------------------------------------------------------------------------------- /src/j1/j1GraphQL.ts: -------------------------------------------------------------------------------- 1 | import gql from 'graphql-tag'; 2 | import { EntityPropertyValue, Entity, Relationship } from '~/src/j1/types'; 3 | import { JupiterOneQuery as JupiterOneGraphQLQuery } from '~/src/j1'; 4 | import { PolicyBuilderConfig } from '../types'; 5 | 6 | export type J1QLVariables = Record; 7 | 8 | export type DeferredJ1QLInput = { 9 | query: string; 10 | variables?: J1QLVariables; 11 | }; 12 | 13 | export type DeferredJ1QLOutput = { 14 | url: string; 15 | }; 16 | 17 | export const QUERY_DEFERRED_J1QL: JupiterOneGraphQLQuery< 18 | DeferredJ1QLInput, 19 | DeferredJ1QLOutput 20 | > = { 21 | nameForLogging: 'Deferred J1QL Query', 22 | ast: gql` 23 | query J1QL($query: String!, $variables: JSON) { 24 | result: queryV1( 25 | query: $query 26 | variables: $variables 27 | remember: false 28 | includeDeleted: false 29 | deferredResponse: FORCE 30 | ) { 31 | url 32 | } 33 | } 34 | `, 35 | }; 36 | 37 | export type UpdateEntityInput = { 38 | timestamp: number; 39 | entity: Partial & { 40 | _id: string; 41 | }; 42 | }; 43 | 44 | export type UpdateEntityOutput = { 45 | entity: Entity; 46 | }; 47 | 48 | export const MUTATION_UPDATE_ENTITY: JupiterOneGraphQLQuery< 49 | UpdateEntityInput, 50 | UpdateEntityOutput 51 | > = { 52 | nameForLogging: 'Update Entity', 53 | ast: gql` 54 | mutation UpdateEntityV2($timestamp: Long, $entity: JSON!) { 55 | result: updateEntityV2(timestamp: $timestamp, entity: $entity) { 56 | entity 57 | } 58 | } 59 | `, 60 | }; 61 | 62 | export type UpdateRelationshipInput = { 63 | timestamp: number; 64 | relationship: Partial & { 65 | _id: string; 66 | }; 67 | }; 68 | 69 | export type UpdateRelationshipOutput = { 70 | relationship: Relationship; 71 | }; 72 | 73 | export const MUTATION_UPDATE_RELATIONSHIP: JupiterOneGraphQLQuery< 74 | UpdateRelationshipInput, 75 | UpdateRelationshipOutput 76 | > = { 77 | nameForLogging: 'Update Relationship', 78 | ast: gql` 79 | mutation UpdateRelationshipV2($timestamp: Long, $relationship: JSON!) { 80 | result: updateRelationshipV2( 81 | timestamp: $timestamp 82 | relationship: $relationship 83 | ) { 84 | relationship 85 | } 86 | } 87 | `, 88 | }; 89 | 90 | export type UpdateConfigInput = { 91 | values: PolicyBuilderConfig['organization']; 92 | }; 93 | 94 | export type UpdateConfigOutput = { 95 | values: { 96 | config: PolicyBuilderConfig['organization']; 97 | }; 98 | }; 99 | 100 | export const MUTATION_UPDATE_CONFIG: JupiterOneGraphQLQuery< 101 | UpdateConfigInput, 102 | UpdateConfigOutput 103 | > = { 104 | nameForLogging: 'Update Config', 105 | ast: gql` 106 | mutation updateCompanyValues($values: JSON!) { 107 | updateCompanyValues(values: $values) { 108 | values 109 | } 110 | } 111 | `, 112 | }; 113 | 114 | export type UpsertPolicyInput = { 115 | data: { 116 | id: string; 117 | file: string; 118 | title: string; 119 | template: string; 120 | }; 121 | }; 122 | 123 | export type UpsertPolicyOutput = { 124 | uuid: string; 125 | }; 126 | 127 | export const MUTATION_UPSERT_POLICY: JupiterOneGraphQLQuery< 128 | UpsertPolicyInput, 129 | UpsertPolicyOutput 130 | > = { 131 | nameForLogging: 'Upsert Policy', 132 | ast: gql` 133 | mutation upsertPolicyById($data: CreatePolicyInput!) { 134 | upsertPolicyById(data: $data) { 135 | uuid 136 | } 137 | } 138 | `, 139 | }; 140 | 141 | export type UpsertProcedureInput = { 142 | data: { 143 | id: string; 144 | file: string; 145 | name: string; 146 | policyId: string; 147 | template: string; 148 | provider?: string; 149 | isRef?: boolean; 150 | applicable?: boolean; 151 | adopted?: boolean; 152 | summary: string; 153 | }; 154 | }; 155 | 156 | export type UpsertProcedureOutput = { 157 | uuid: string; 158 | }; 159 | 160 | export const MUTATION_UPSERT_PROCEDURE: JupiterOneGraphQLQuery< 161 | UpsertPolicyInput, 162 | UpsertPolicyOutput 163 | > = { 164 | nameForLogging: 'Upsert Policy', 165 | ast: gql` 166 | mutation upsertProcedureById($data: CreateProcedureInput!) { 167 | upsertProcedureById(data: $data) { 168 | uuid 169 | } 170 | } 171 | `, 172 | }; 173 | 174 | export type ReorderAllItemsByMappingInput = { 175 | mapping: { policies: { id: string; procedures: string[] }[] }; 176 | }; 177 | 178 | export type ReorderAllItemsByMappingOutput = { 179 | uuid: string; 180 | }; 181 | 182 | export const MUTATION_REORDER_ITEMS: JupiterOneGraphQLQuery< 183 | ReorderAllItemsByMappingInput, 184 | ReorderAllItemsByMappingOutput 185 | > = { 186 | nameForLogging: 'Upsert Policy', 187 | ast: gql` 188 | mutation reorderAllItemsByMapping($mapping: PolicyMappingInput!) { 189 | reorderAllItemsByMapping(mapping: $mapping) { 190 | policies { 191 | id 192 | } 193 | } 194 | } 195 | `, 196 | }; 197 | -------------------------------------------------------------------------------- /src/j1/types.ts: -------------------------------------------------------------------------------- 1 | export type EntityPropertyValuePrimitive = string | number | boolean; 2 | 3 | export type EntityPropertyValue = 4 | | EntityPropertyValuePrimitive 5 | | EntityPropertyValuePrimitive[] 6 | | undefined 7 | | null; 8 | 9 | export type EntityAdditionalProperties = Record; 10 | 11 | export type Entity = EntityAdditionalProperties & { 12 | _id: string; 13 | _type: string; 14 | _class?: string | string[]; 15 | displayName: string; 16 | }; 17 | 18 | export type RelationshipPropertyValuePrimitive = string | number | boolean; 19 | 20 | export type RelationshipPropertyValue = 21 | | RelationshipPropertyValuePrimitive 22 | | undefined 23 | | null; 24 | 25 | export type RelationshipAdditionalProperties = Record< 26 | string, 27 | RelationshipPropertyValue 28 | >; 29 | 30 | export type Relationship = RelationshipAdditionalProperties & { 31 | _id: string; 32 | _type: string; 33 | _class?: string; 34 | displayName: string; 35 | }; 36 | 37 | export type GraphObject = Entity | Relationship; 38 | -------------------------------------------------------------------------------- /src/publishToConfluence.ts: -------------------------------------------------------------------------------- 1 | import { prompt } from 'inquirer'; 2 | import path from 'path'; 3 | import fs from 'fs-extra'; 4 | import fetch from 'node-fetch'; 5 | import showdown from 'showdown'; 6 | import * as error from '~/src/error'; 7 | 8 | const converter = new showdown.Converter({ 9 | parseImgDimensions: true, 10 | simplifiedAutoLink: true, 11 | tables: true, 12 | }); 13 | 14 | const CONFLUENCE_PAGES = './confluence-pages.json'; 15 | 16 | const CONFLUENCE_DOMAIN = process.env.CONFLUENCE_DOMAIN; 17 | const CONFLUENCE_SPACE = process.env.CONFLUENCE_SPACE; 18 | const CONFLUENCE_USER = process.env.CONFLUENCE_USER; 19 | const CONFLUENCE_PASS = process.env.CONFLUENCE_PASS; 20 | 21 | export type PublishToConfluenceOptions = { 22 | domain: string; 23 | space: string; 24 | username: string; 25 | password: string; 26 | }; 27 | 28 | async function gatherCreds() { 29 | const answer = await prompt([ 30 | { 31 | type: 'input', 32 | name: 'domain', 33 | message: 34 | "Confluence domain (the vanity subdomain before '.atlassian.net'):", 35 | }, 36 | { 37 | type: 'input', 38 | name: 'space', 39 | message: 'Confluence space key:', 40 | }, 41 | { 42 | type: 'input', 43 | name: 'username', 44 | message: 'Confluence username:', 45 | }, 46 | { 47 | type: 'password', 48 | name: 'password', 49 | message: 'Confluence password:', 50 | }, 51 | ]); 52 | return { 53 | domain: answer.domain, 54 | space: answer.space, 55 | username: answer.username, 56 | password: answer.password, 57 | }; 58 | } 59 | 60 | function parseLinks( 61 | pageUrl: string, 62 | html: string, 63 | confluencePages: Record 64 | ) { 65 | const linkRegex = /href=['"]([\w-]+\.md)(#.*)?['"]/gm; 66 | const match = linkRegex.exec(html); 67 | 68 | return match 69 | ? html.replace(linkRegex, `href="${pageUrl}/${confluencePages[match[1]]}"`) 70 | : html; 71 | } 72 | 73 | async function getVersion(headers: Record, page: string) { 74 | const response = await fetch(page, { 75 | method: 'get', 76 | headers, 77 | }); 78 | const result = await response.json(); 79 | return result.version.number; 80 | } 81 | 82 | export default async function publishToConfluence( 83 | source: string, 84 | options: PublishToConfluenceOptions 85 | ) { 86 | const docsPath = source || path.join(__dirname, '../docs'); 87 | if (!fs.existsSync(docsPath)) { 88 | error.fatal('Please run `psp build` first to generate the policy docs.'); 89 | } 90 | 91 | const { domain, space, username, password } = 92 | options || (await gatherCreds()); 93 | 94 | const site = `https://${domain || CONFLUENCE_DOMAIN}.atlassian.net`; 95 | const baseUrl = `${site}/wiki/rest/api/content`; 96 | const pageUrl = `${site}/wiki/spaces/${space || CONFLUENCE_SPACE}/pages`; 97 | 98 | const headers = { 99 | 'Content-Type': 'application/json', 100 | Accept: 'application/json', 101 | Authorization: `Basic ${Buffer.from( 102 | (username || CONFLUENCE_USER) + ':' + (password || CONFLUENCE_PASS) 103 | ).toString('base64')}`, 104 | }; 105 | 106 | const confluencePages = fs.existsSync(CONFLUENCE_PAGES) 107 | ? JSON.parse(fs.readFileSync(CONFLUENCE_PAGES, { encoding: 'utf8' })) 108 | : {}; 109 | 110 | const worked = []; 111 | const failed = []; 112 | 113 | const docs = fs.readdirSync(docsPath); 114 | 115 | for (const doc of docs) { 116 | const pageId = confluencePages[doc]; 117 | const currentVersion = 118 | pageId && (await getVersion(headers, `${baseUrl}/${pageId}`)); 119 | const version = currentVersion && { number: currentVersion + 1 }; 120 | 121 | if (doc.endsWith('.md')) { 122 | const data = fs.readFileSync(path.join(docsPath, doc), 'utf8'); 123 | const parsedData = data 124 | .replace(/^#(.*)$/m, '') // removes title 125 | .replace(/^ {2}(-|\*)/gm, ' -') // fixes sublist indentation 126 | .replace(/&/gm, '&') 127 | .replace(/[‘’]/gm, `'`) // fixes quote character 128 | .replace(/[“”]/gm, `"`); 129 | const html = converter 130 | .makeHtml(parsedData) 131 | .replace(/
<\/pre>/g, '
') 133 | .replace(/<\/table>/g, '
') 134 | .replace(/
/g, '
') 135 | .replace(/<#>/g, '<#>'); 136 | const parsedHtml = parseLinks(pageUrl, html, confluencePages); 137 | 138 | const match = /^#{1,2}(.*)$/m.exec(data); // Title 139 | if (!match) { 140 | failed.push(doc); 141 | console.error(`error parsing title for ${doc}`); 142 | continue; 143 | } 144 | const title = match[1].trim(); 145 | 146 | const body = { 147 | version, 148 | type: 'page', 149 | title, 150 | space: { 151 | key: space || CONFLUENCE_SPACE, 152 | }, 153 | body: { 154 | storage: { 155 | value: parsedHtml, 156 | representation: 'storage', 157 | }, 158 | }, 159 | }; 160 | 161 | const options = { 162 | method: pageId ? 'put' : 'post', 163 | headers, 164 | body: JSON.stringify(body), 165 | }; 166 | 167 | const uri = pageId ? `${baseUrl}/${pageId}` : baseUrl; 168 | const response = await fetch(uri, options); 169 | if (response.ok) { 170 | const result = await response.json(); 171 | confluencePages[doc] = pageId || result.id; 172 | worked.push(doc); 173 | } else { 174 | failed.push(doc); 175 | fs.writeFileSync(`./failed-${doc}.html`, parsedHtml); 176 | console.error(`publish to confluence failed for ${doc}`); 177 | console.error({ response: await response.json() }); 178 | continue; 179 | } 180 | } 181 | } 182 | 183 | fs.writeFileSync( 184 | './confluence-pages.json', 185 | JSON.stringify(confluencePages, null, 2) 186 | ); 187 | 188 | console.log(`Published ${worked.length} docs to Confluence.`); 189 | console.log( 190 | 'Please retain `confluence-pages.json` in the current directory. The file contains Confluence page id mappings used for subsequent updates.' 191 | ); 192 | if (failed.length > 0) { 193 | console.log(`${failed.length} failed:`); 194 | console.log(failed.join('\n')); 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /src/questions/base.ts: -------------------------------------------------------------------------------- 1 | import { PolicyBuilderQuestion } from '~/src/types'; 2 | import * as colors from './helpers/colors'; 3 | import { validateEmail, validateWebURL } from './helpers/validate'; 4 | 5 | export const baseQuestions: PolicyBuilderQuestion[] = [ 6 | { 7 | type: 'input', 8 | name: 'companyFullName', 9 | message: 'Company Full Name (e.g. ACME, Inc.)', 10 | }, 11 | { 12 | type: 'input', 13 | name: 'companyShortName', 14 | message: 'Company Short Name (e.g. ACME)', 15 | }, 16 | { 17 | type: 'input', 18 | name: 'companyOverview', 19 | message: 20 | 'Describe the company, its products and operations (to be used in report generation)', 21 | }, 22 | { 23 | type: 'input', 24 | name: 'companyEmailDomain', 25 | message: 'Company email domain (without the @ sign)', 26 | }, 27 | { 28 | type: 'input', 29 | name: 'companyMailingAddress', 30 | message: 'Company mailing address', 31 | }, 32 | { 33 | type: 'input', 34 | name: 'contactPhoneNumber', 35 | message: 'Company contact phone number', 36 | }, 37 | { 38 | type: 'input', 39 | name: 'ctoName', 40 | message: "Your Head of Engineering's full name", 41 | }, 42 | { 43 | type: 'input', 44 | name: 'ctoEmail', 45 | message: "Your Head of Engineering's email address", 46 | validate: validateEmail, 47 | }, 48 | { 49 | type: 'input', 50 | name: 'cooName', 51 | message: "Your Chief Operating Officer's full name", 52 | }, 53 | { 54 | type: 'input', 55 | name: 'cooEmail', 56 | message: "Your Chief Operating Officer's email address", 57 | validate: validateEmail, 58 | }, 59 | { 60 | type: 'input', 61 | name: 'ceoName', 62 | message: "Your Chief Executive Officer's full name", 63 | }, 64 | { 65 | type: 'input', 66 | name: 'ceoEmail', 67 | message: "Your Chief Executive Officer's email address", 68 | validate: validateEmail, 69 | }, 70 | { 71 | type: 'input', 72 | name: 'securityOfficerName', 73 | message: "Your Security Officer's full name", 74 | }, 75 | { 76 | type: 'input', 77 | name: 'securityOfficerEmail', 78 | message: "Your Security Officer's email address", 79 | validate: validateEmail, 80 | }, 81 | { 82 | type: 'input', 83 | name: 'privacyOfficerName', 84 | message: "Your Privacy Officer's full name", 85 | }, 86 | { 87 | type: 'input', 88 | name: 'privacyOfficerEmail', 89 | message: "Your Privacy Officer's email address", 90 | }, 91 | { 92 | type: 'input', 93 | name: 'securityCommitteeMembers', 94 | message: 95 | 'List members of the Security Committee (e.g. Security Officer, Privacy Officer, CTO, COO, etc.)', 96 | }, 97 | { 98 | type: 'confirm', 99 | name: 'needStandardHIPAA', 100 | message: 'Do you need to be HIPAA compliant', 101 | default: false, 102 | }, 103 | { 104 | type: 'confirm', 105 | name: 'needStandardHITRUST', 106 | message: 'Are you targeting HITRUST certification', 107 | default: false, 108 | }, 109 | { 110 | type: 'confirm', 111 | name: 'wantCustomMkdocsTemplate', 112 | message: 113 | 'Do you want to customize the look and feel of your policy HTML output files (i.e. MkDocs styling)', 114 | default: false, 115 | }, 116 | { 117 | type: 'input', 118 | name: 'mkdocsLogoURL', 119 | message: 'Link to company logo', 120 | validate: validateWebURL, 121 | when: function (answers) { 122 | return answers.wantCustomMkdocsTemplate === true; 123 | }, 124 | }, 125 | { 126 | type: 'list', 127 | name: 'mkdocsThemeColorPrimary', 128 | message: `Primary theme color`, 129 | choices: colors.primaryColorChoices, 130 | when: function (answers) { 131 | return answers.wantCustomMkdocsTemplate === true; 132 | }, 133 | }, 134 | { 135 | type: 'list', 136 | name: 'mkdocsThemeColorAccent', 137 | message: `Accent theme color`, 138 | choices: colors.accentColorChoices, 139 | when: function (answers) { 140 | return answers.wantCustomMkdocsTemplate === true; 141 | }, 142 | }, 143 | { 144 | type: 'input', 145 | name: 'securityPolicyURL', 146 | message: 147 | 'Where will publish your Security Policies (e.g. https://yourcompany.com/security)', 148 | validate: validateWebURL, 149 | }, 150 | { 151 | type: 'input', 152 | name: 'privacyPolicyURL', 153 | message: 154 | 'Where will publish your Privacy Policies (e.g. https://yourcompany.com/privacy)', 155 | validate: validateWebURL, 156 | }, 157 | { 158 | type: 'input', 159 | name: 'privacyPolicyURL', 160 | message: 161 | 'Where will publish your Cookie Policy (e.g. https://yourcompany.com/cookie-policy)', 162 | validate: validateWebURL, 163 | }, 164 | { 165 | type: 'input', 166 | name: 'ticketingSystem', 167 | message: 'Which source control system do you use (e.g. Github, Bitbucket)', 168 | }, 169 | { 170 | type: 'input', 171 | name: 'ticketingSystem', 172 | message: 173 | 'Name of ticketing system in use for issue/change management (e.g. Jira)', 174 | }, 175 | { 176 | type: 'input', 177 | name: 'cmPortal', 178 | message: 179 | 'Link to system/portal that implements the production change management ticketing and workflow', 180 | validate: validateWebURL, 181 | }, 182 | { 183 | type: 'input', 184 | name: 'ciSystem', 185 | message: 186 | 'Name of Continuous Integration/Build system in use (e.g. Jenkins)', 187 | }, 188 | { 189 | type: 'input', 190 | name: 'ciSystem', 191 | message: 192 | 'Name of the HR system your organization uses (e.g. Bamboo HR, EaseCentral)', 193 | }, 194 | { 195 | type: 'confirm', 196 | name: 'haveSecurityScorecard', 197 | message: 198 | 'Do you produce a periodic security metrics scorecard / executive report', 199 | default: false, 200 | }, 201 | ]; 202 | -------------------------------------------------------------------------------- /src/questions/helpers/colors.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | 3 | export function accentColorChoices() { 4 | return materialColors 5 | .filter((c) => c.primary_only !== true) 6 | .map((c) => { 7 | return chalk.hex(c.hex).bold(c.name); 8 | }); 9 | } 10 | 11 | export function primaryColorChoices() { 12 | return materialColors.map((c) => { 13 | return chalk.hex(c.hex).bold(c.name); 14 | }); 15 | } 16 | 17 | export const materialColors = [ 18 | { 19 | name: 'red', 20 | hex: 'f44336', 21 | primary_only: false, 22 | }, 23 | { 24 | name: 'pink', 25 | hex: 'E91E63', 26 | primary_only: false, 27 | }, 28 | { 29 | name: 'purple', 30 | hex: '9C27B0', 31 | primary_only: false, 32 | }, 33 | { 34 | name: 'deep purple', 35 | hex: '673AB7', 36 | primary_only: false, 37 | }, 38 | { 39 | name: 'indigo', 40 | hex: '3F51B5', 41 | primary_only: false, 42 | }, 43 | { 44 | name: 'blue', 45 | hex: '2196F3', 46 | primary_only: false, 47 | }, 48 | { 49 | name: 'light blue', 50 | hex: '03A9F4', 51 | primary_only: false, 52 | }, 53 | { 54 | name: 'cyan', 55 | hex: '00BCD4', 56 | primary_only: false, 57 | }, 58 | { 59 | name: 'teal', 60 | hex: '009688', 61 | primary_only: false, 62 | }, 63 | { 64 | name: 'green', 65 | hex: '4CAF50', 66 | primary_only: false, 67 | }, 68 | { 69 | name: 'light green', 70 | hex: '8BC34A', 71 | primary_only: false, 72 | }, 73 | { 74 | name: 'lime', 75 | hex: 'CDDC39', 76 | primary_only: false, 77 | }, 78 | { 79 | name: 'yellow', 80 | hex: 'FFEB3B', 81 | primary_only: false, 82 | }, 83 | { 84 | name: 'amber', 85 | hex: 'FFC107', 86 | primary_only: false, 87 | }, 88 | { 89 | name: 'orange', 90 | hex: 'FF9800', 91 | primary_only: false, 92 | }, 93 | { 94 | name: 'deep orange', 95 | hex: 'FF5722', 96 | primary_only: false, 97 | }, 98 | { 99 | name: 'brown', 100 | hex: '795548', 101 | primary_only: true, 102 | }, 103 | { 104 | name: 'grey', 105 | hex: '9E9E9E', 106 | primary_only: true, 107 | }, 108 | { 109 | name: 'blue grey', 110 | hex: '607D8B', 111 | primary_only: true, 112 | }, 113 | { 114 | name: 'white', 115 | hex: 'FFFFFF', 116 | primary_only: true, 117 | }, 118 | ]; 119 | -------------------------------------------------------------------------------- /src/questions/helpers/validate.ts: -------------------------------------------------------------------------------- 1 | import joi from 'joi'; 2 | 3 | const emailSchema = joi.string().email(); 4 | const urlSchema = joi.string().uri({ scheme: [/https?/] }); 5 | 6 | export function validateEmail(addr: string) { 7 | if (addr.trim() === '') { 8 | return true; 9 | } 10 | 11 | const result = emailSchema.validate(addr); 12 | return result.error == null ? true : 'Please enter a valid email address'; 13 | } 14 | 15 | export function validateWebURL(url: string) { 16 | if (url.trim() === '') { 17 | return true; 18 | } 19 | const result = urlSchema.validate(url); 20 | return result.error == null ? true : 'Please enter a valid Web URL'; 21 | } 22 | -------------------------------------------------------------------------------- /src/questions/hipaa.ts: -------------------------------------------------------------------------------- 1 | import { PolicyBuilderQuestion } from '~/src/types'; 2 | 3 | export const hipaaQuestions: PolicyBuilderQuestion[] = [ 4 | { 5 | type: 'confirm', 6 | name: 'isHIPAACoveredEntity', 7 | message: 'Is your organization a Covered Entity?', 8 | default: false, 9 | }, 10 | { 11 | type: 'confirm', 12 | name: 'isHIPAABusinessAssociate', 13 | message: 'Is your organization a Business Associate?', 14 | default: false, 15 | }, 16 | { 17 | type: 'confirm', 18 | name: 'isHIPAAGovernmentEntity', 19 | message: 'Is your organization a Government Entity?', 20 | default: false, 21 | }, 22 | { 23 | type: 'confirm', 24 | name: 'isHIPAAPlanSponsor', 25 | message: 'Is your organization a Plan Sponsor?', 26 | default: false, 27 | }, 28 | { 29 | type: 'confirm', 30 | name: 'isHIPAAHealthcareClearinghouse', 31 | message: 'Is your organization a Healthcare Clearinghouse?', 32 | default: false, 33 | }, 34 | ]; 35 | -------------------------------------------------------------------------------- /src/questions/scorecard.ts: -------------------------------------------------------------------------------- 1 | import { PolicyBuilderQuestion } from '~/src/types'; 2 | import { validateWebURL } from './helpers/validate'; 3 | 4 | export const scorecardQuestions: PolicyBuilderQuestion[] = [ 5 | { 6 | type: 'input', 7 | name: 'securityScorecardPeriod', 8 | message: 'How often do you publish updates to the scorecard? Every:', 9 | }, 10 | { 11 | type: 'input', 12 | name: 'securityScorecardURL', 13 | message: 'Link to the published scorecard', 14 | validate: validateWebURL, 15 | }, 16 | ]; 17 | -------------------------------------------------------------------------------- /src/render.ts: -------------------------------------------------------------------------------- 1 | import * as configure from './configure'; 2 | import mustache from 'mustache'; 3 | import * as assets from './assets'; 4 | import chalk from 'chalk'; 5 | import path from 'path'; 6 | import pMap from 'p-map'; 7 | import fs from 'fs-extra'; 8 | import { DEFAULT_MKDOCS_YML } from '~/src/constants'; 9 | import { 10 | Organization, 11 | PolicyBuilderConfig, 12 | PolicyBuilderPartial, 13 | PolicyBuilderPaths, 14 | PolicyBuilderStatus, 15 | } from '~/src/types'; 16 | 17 | function fillTemplate(templateFile: string, orgConfig: Organization) { 18 | return mustache.render(fs.readFileSync(templateFile, 'utf8'), orgConfig); 19 | } 20 | 21 | async function renderMkdocsYAML( 22 | config: PolicyBuilderConfig, 23 | paths: PolicyBuilderPaths, 24 | outputPath: string 25 | ) { 26 | const ymlTemplate = path.join(paths.templates!, 'mkdocs/mkdocs.yml.tmpl'); 27 | if (!fs.pathExistsSync(ymlTemplate)) { 28 | const mkdocsTemplateDir = path.join(paths.templates!, 'mkdocs'); 29 | if (!fs.pathExistsSync(mkdocsTemplateDir)) { 30 | fs.mkdirSync(mkdocsTemplateDir); 31 | } 32 | fs.copyFileSync( 33 | path.join(__dirname, '..', DEFAULT_MKDOCS_YML), 34 | ymlTemplate 35 | ); 36 | } 37 | 38 | let rendered; 39 | try { 40 | rendered = fillTemplate(ymlTemplate, config.organization); 41 | } catch (err) { 42 | throw new Error(`Unable to render ${ymlTemplate}: ${err}`); 43 | } 44 | 45 | // append pages list to rendered yml body 46 | rendered += generateMkdocsPages(config); 47 | 48 | try { 49 | await assets.writeFileAsync(outputPath, rendered); 50 | console.log(chalk.green(`saved ${outputPath}`)); 51 | } catch (err) { 52 | console.log(chalk.yellow(err.message)); 53 | } 54 | } 55 | 56 | async function renderTemplateFile( 57 | templateFile: string, 58 | orgConfig: Organization, 59 | outputPath: string 60 | ) { 61 | let rendered; 62 | try { 63 | rendered = fillTemplate(templateFile, orgConfig); 64 | } catch (err) { 65 | throw new Error(`Unable to render ${templateFile}: ${err}`); 66 | } 67 | try { 68 | await assets.writeFileAsync(outputPath, rendered); 69 | } catch (err) { 70 | throw new Error( 71 | `Unable to save rendered template to ${outputPath}: ${err}` 72 | ); 73 | } 74 | return outputPath; 75 | } 76 | 77 | function mergeAutomaticPSPVars(config: PolicyBuilderConfig) { 78 | const defaultRevision = `${new Date().getFullYear()}.1`; 79 | 80 | const merged: PolicyBuilderConfig = { 81 | organization: { 82 | defaultRevision, 83 | }, 84 | }; 85 | 86 | Object.assign(merged, config); 87 | return merged; 88 | } 89 | 90 | async function renderPSPDocs( 91 | config: PolicyBuilderConfig, 92 | paths: PolicyBuilderPaths 93 | ) { 94 | const status: PolicyBuilderStatus = { ok: [], errors: [], type: 'PSP Docs' }; 95 | const adoptedElements = configure.getAdoptedPSPElements(config); 96 | 97 | let sectionView, partialFile, outputPath, adoptedProcedures, viewPartials; 98 | 99 | // assemble final .md documents from policy and procedure partials 100 | // TODO: add standards 101 | for (const policy of adoptedElements.policies ?? []) { 102 | try { 103 | // append policy partial 104 | partialFile = path.join(paths.partials, policy.file); 105 | sectionView = (await fs.readFile(partialFile, 'utf8')) + '\n'; 106 | 107 | // gather procedure partials 108 | adoptedProcedures = configure.getAdoptedProceduresForPolicy( 109 | policy.id, 110 | config 111 | ); 112 | viewPartials = await pMap(adoptedProcedures, async (procedure) => { 113 | partialFile = path.join(paths.partials, procedure.file); 114 | return '\n' + (await fs.readFile(partialFile, 'utf8')) + '\n'; 115 | }); 116 | 117 | if (viewPartials.length > 0) { 118 | sectionView += '\n\n## Controls and Procedures\n\n'; 119 | } 120 | 121 | // append procedure partials 122 | for (const view of viewPartials) { 123 | sectionView += view; 124 | } 125 | 126 | // write out assembled view 127 | outputPath = path.join(paths.output, path.basename(policy.file)); 128 | await assets.writeFileAsync(outputPath, sectionView); 129 | 130 | console.log(chalk.green(`assembled ${outputPath}`)); 131 | status.ok.push(outputPath); 132 | } catch (err) { 133 | if (outputPath) { 134 | status.errors.push(outputPath); 135 | } 136 | console.error( 137 | chalk.yellow( 138 | `unable to assemble section view for policy '${policy.id}': ${err}` 139 | ) 140 | ); 141 | } 142 | } 143 | 144 | // generate reference docs 145 | let partialConfig; 146 | for (const ref of adoptedElements.references ?? []) { 147 | partialFile = path.join(paths.partials, ref.file); 148 | outputPath = path.join(paths.output, path.basename(ref.file)); 149 | 150 | // merge partial metadata with config 151 | partialConfig = Object.assign({}, config.organization, ref); 152 | 153 | try { 154 | await renderTemplateFile(partialFile, partialConfig, outputPath); 155 | console.log(chalk.green(`generated ${outputPath}`)); 156 | status.ok.push(outputPath); 157 | } catch (err) { 158 | status.errors.push(outputPath); 159 | console.error(chalk.yellow(err.message)); 160 | } 161 | } 162 | return status; 163 | } 164 | 165 | async function renderPartials( 166 | config: PolicyBuilderConfig, 167 | paths: PolicyBuilderPaths 168 | ) { 169 | const status: PolicyBuilderStatus = { ok: [], errors: [], type: 'partials' }; 170 | 171 | config = mergeAutomaticPSPVars(config); 172 | 173 | // TODO: we're ignoring standards for now... 174 | const partialDirs = { 175 | policies: path.join(paths.partials, 'policies'), 176 | procedures: path.join(paths.partials, 'procedures'), 177 | references: path.join(paths.partials, 'ref'), 178 | }; 179 | 180 | const partials = configure.getAdoptedPartials(config); 181 | // generate paths to partial template files 182 | for (const partial of partials) { 183 | partial.tFile = path.join(paths.templates!, `${partial.file}.tmpl`); 184 | if (!assets.fileExistsSync(partial.tFile)) { 185 | throw new Error( 186 | `configured partial template ${partial.tFile} does not exist on disk` 187 | ); 188 | } 189 | } 190 | 191 | await pMap(partials, async (partial: PolicyBuilderPartial) => { 192 | const relPartialDir = partialDirs[partial.type]; 193 | if (relPartialDir === undefined) { 194 | throw new Error( 195 | `unsupported partial path '${partial.type}' for ${partial.id}` 196 | ); 197 | } 198 | 199 | // merge partial metadata with config 200 | const partialConfig = Object.assign({}, config.organization, partial); 201 | 202 | // render partial template to partials outputPath 203 | const outputPath = path.join(relPartialDir, path.basename(partial.file)); 204 | try { 205 | await renderTemplateFile(partial.tFile!, partialConfig, outputPath); 206 | console.log(chalk.grey(`generated ${outputPath}`)); 207 | status.ok.push(outputPath); 208 | } catch (err) { 209 | console.error(chalk.yellow(err.message)); 210 | status.errors.push(outputPath); 211 | } 212 | }); 213 | 214 | return status; 215 | } 216 | 217 | function generateMkdocsPages(config: PolicyBuilderConfig) { 218 | const toc = generateTOC(config); 219 | 220 | let pages = 'pages:\n'; 221 | 222 | pages += `- 'Home': 'index.md'\n`; 223 | 224 | toc.forEach((page) => { 225 | pages += `- '${page.name}': '${page.file}'\n`; 226 | }); 227 | 228 | return pages; 229 | } 230 | 231 | // returns array of { name: '', file: '' } objects 232 | function generateTOC(config: PolicyBuilderConfig) { 233 | const toc: { 234 | name: string; 235 | file: string; 236 | }[] = []; 237 | const adoptedElements = configure.getAdoptedPSPElements(config); 238 | adoptedElements.policies.forEach((policy, idx) => { 239 | toc.push({ 240 | name: `${idx}. ${policy.name}`, 241 | file: path.basename(policy.file), 242 | }); 243 | }); 244 | 245 | adoptedElements.references.forEach((ref, idx) => { 246 | const al = String.fromCharCode(idx + 64 + 1); // map 0 -> A, 1 -> B, etc... 247 | toc.push({ 248 | name: `Appendix ${al}. ${ref.name}`, 249 | file: path.basename(ref.file), 250 | }); 251 | }); 252 | 253 | return toc; 254 | } 255 | 256 | function generateIndexTemplate(config: PolicyBuilderConfig) { 257 | const toc = generateTOC(config); 258 | 259 | let indexTemplate = 260 | '# {{companyShortName}} Security Policies, Standards, and Procedures\n\n'; 261 | 262 | toc.forEach((page) => { 263 | indexTemplate += `* [${page.name}](${page.file})\n`; 264 | }); 265 | 266 | return indexTemplate; 267 | } 268 | 269 | async function renderIndexPage( 270 | config: PolicyBuilderConfig, 271 | paths: PolicyBuilderPaths, 272 | outputPath: string 273 | ) { 274 | const indexTemplate = generateIndexTemplate(config); 275 | 276 | // save generated template 277 | const indexTemplateFile = path.join(paths.partials, 'index.md.tmpl'); 278 | try { 279 | await assets.writeFileAsync(indexTemplateFile, indexTemplate); 280 | } catch (err) { 281 | throw new Error( 282 | `Unable to save index template to ${indexTemplateFile}: ${err}` 283 | ); 284 | } 285 | 286 | let renderedFile; 287 | 288 | try { 289 | renderedFile = await renderTemplateFile( 290 | indexTemplateFile, 291 | config.organization, 292 | outputPath 293 | ); 294 | console.log(chalk.grey(`generated ${renderedFile}`)); 295 | } catch (err) { 296 | console.error(chalk.yellow(err.message)); 297 | } 298 | 299 | return renderedFile; 300 | } 301 | 302 | const test = { 303 | fillTemplate, 304 | generateIndexTemplate, 305 | generateMkdocsPages, 306 | mergeAutomaticPSPVars, 307 | }; 308 | 309 | export { 310 | renderIndexPage, 311 | renderMkdocsYAML, 312 | renderPSPDocs, 313 | renderPartials, 314 | renderTemplateFile, 315 | generateTOC, 316 | test, 317 | }; 318 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { Opaque } from 'type-fest'; 2 | import { 3 | EntityAdditionalProperties, 4 | RelationshipAdditionalProperties, 5 | } from '~/src/j1/types'; 6 | 7 | export interface PolicyBuilderElement { 8 | id: string; 9 | file: string; 10 | name?: string; 11 | type?: ProcedureType; 12 | provider?: string; 13 | summary?: string; 14 | guidance?: string; 15 | applicable?: boolean; 16 | resources?: Resource[]; 17 | adopted?: boolean; 18 | procedures?: string[]; 19 | webLink?: string; 20 | } 21 | 22 | export interface PolicyBuilderConfig { 23 | organization: Organization; 24 | standards?: PolicyBuilderElement[]; 25 | policies?: PolicyBuilderElement[]; 26 | procedures?: PolicyBuilderElement[]; 27 | references?: PolicyBuilderElement[]; 28 | } 29 | 30 | export type PolicyBuilderCollectionName = 31 | | 'standards' 32 | | 'policies' 33 | | 'procedures' 34 | | 'references'; 35 | 36 | export type PolicyBuilderPaths = { 37 | output: string; 38 | templates?: string; 39 | partials: string; 40 | }; 41 | 42 | export interface HipaaConfig { 43 | isHIPAACoveredEntity: boolean; 44 | isHIPAABusinessAssociate: boolean; 45 | isHIPAAGovernmentEntity: boolean; 46 | isHIPAAPlanSponsor: boolean; 47 | isHIPAAHealthcareClearinghouse: boolean; 48 | } 49 | 50 | export interface HipaaAssessmentConfig { 51 | hasHIPAATrainingGap: boolean; 52 | hasInfoSecTrainingGap: boolean; 53 | hasRiskAssessmentGap: boolean; 54 | hasPenTestGap: boolean; 55 | lastPenTestDate: boolean; 56 | lastPenTestProvider: boolean; 57 | penTestFrequency: boolean; 58 | nextPenTestDate: boolean; 59 | hadDataBreach: boolean; 60 | } 61 | 62 | export interface Organization extends Partial { 63 | companyFullName?: string; 64 | companyShortName?: string; 65 | companyEmailDomain?: string; 66 | companyWebsiteURL?: string; 67 | companyMailingAddress?: string; 68 | companyOverview?: string; 69 | contactPhoneNumber?: string; 70 | ceoName?: string; 71 | ceoEmail?: string; 72 | cooName?: string; 73 | cooEmail?: string; 74 | ctoName?: string; 75 | ctoEmail?: string; 76 | securityOfficerName?: string; 77 | securityOfficerEmail?: string; 78 | privacyOfficerName?: string; 79 | privacyOfficerEmail?: string; 80 | securityCommitteeMembers?: string; 81 | wantCustomMkdocsTemplate?: boolean; 82 | mkdocsLogoURL?: string; 83 | mkdocsThemeColorPrimary?: string; 84 | mkdocsThemeColorAccent?: string; 85 | securityPolicyURL?: string; 86 | privacyPolicyURL?: string; 87 | cookiePolicyURL?: string; 88 | sourceControl?: string; 89 | ticketingSystem?: string; 90 | internalHelpdeskURL?: string; 91 | cmPortal?: string; 92 | ciSystem?: string; 93 | hrSystem?: string; 94 | supportBYODandMDM?: boolean; 95 | haveSecurityScorecard?: boolean; 96 | securityScorecardPeriod?: string; 97 | securityScorecardURL?: string; 98 | expenseReporting?: string; 99 | devWikiURL?: string; 100 | hipaaTrainingURL?: string; 101 | statusPageURL?: string; 102 | securityAwarenessTrainingProvider?: string; 103 | IdP?: string; 104 | CPA?: string; 105 | needStandardHIPAA?: boolean; 106 | needStandardHITRUST?: boolean; 107 | needStandardGDPR?: boolean; 108 | needStandardNIST?: boolean; 109 | needStandardPCI?: boolean; 110 | isServiceProvider?: boolean; 111 | mkdocsLogoFile?: string; 112 | defaultRevision?: string; 113 | } 114 | 115 | export type ProcedureId = Opaque; 116 | 117 | export type AdoptedPolicyBuilderElements = { 118 | standards: PolicyBuilderElement[]; 119 | policies: PolicyBuilderElement[]; 120 | procedures: PolicyBuilderElement[]; 121 | references: PolicyBuilderElement[]; 122 | }; 123 | 124 | export interface Resource { 125 | name?: string; 126 | link?: string; 127 | } 128 | 129 | export enum ProcedureType { 130 | Administrative = 'administrative', 131 | Informational = 'informational', 132 | Operational = 'operational', 133 | Physical = 'physical', 134 | Technical = 'technical', 135 | } 136 | 137 | export type PolicyBuilderQuestion = { 138 | type: 'input' | 'confirm' | 'list'; 139 | name: keyof Organization; 140 | message: string; 141 | choices?: () => string[]; 142 | validate?: (value: string) => boolean | string; 143 | default?: boolean | string; 144 | when?: (answers: Record) => boolean; 145 | }; 146 | 147 | export type PolicyAssessmentQuestion = { 148 | type: 'input' | 'confirm' | 'list'; 149 | name: keyof HipaaAssessmentConfig; 150 | message: string; 151 | choices?: () => string[]; 152 | validate?: (value: string) => boolean | string; 153 | default?: boolean | string; 154 | when?: (answers: Record) => boolean; 155 | }; 156 | 157 | export type Gap = { 158 | ref: string; 159 | title: string; 160 | }; 161 | 162 | export type PolicyBuilderStatus = { 163 | ok: string[]; 164 | errors: string[]; 165 | type: string; 166 | }; 167 | 168 | export type PolicyBuilderPartialType = 'policies' | 'procedures' | 'references'; 169 | export type PolicyBuilderPartial = Omit & { 170 | tFile?: string; 171 | type: PolicyBuilderPartialType; 172 | }; 173 | 174 | export type StandardName = string; 175 | 176 | export type AssessmentAnswers = Partial; 177 | export type AssessmentInput = Organization & 178 | AssessmentAnswers & { 179 | date: Date; 180 | isHIPAACoveredEntityText: string; 181 | isHIPAABusinessAssociateText: string; 182 | }; 183 | 184 | export type ControlsMappings = { 185 | procedures: { 186 | id: string; 187 | implements: { 188 | standard: string; 189 | requirements: string[]; 190 | controls: string[]; 191 | }[]; 192 | }[]; 193 | }; 194 | 195 | export type AnnotatedRefs = Record; 196 | 197 | export type StandardRequirement = { 198 | ref: string; 199 | title: string; 200 | summary: string; 201 | appliesIf?: string; 202 | hasGap?: boolean; 203 | noadoption?: boolean; 204 | adoptedCPs?: PolicyBuilderElement[]; 205 | unAdoptedCPs?: PolicyBuilderElement[]; 206 | }; 207 | 208 | export type StandardConfig = { 209 | standard: string; 210 | version: string; 211 | webLink: string; 212 | sections: { 213 | title: string; 214 | requirements: StandardRequirement[]; 215 | }[]; 216 | }; 217 | 218 | export type SecurityEntityType = 219 | | 'security_policy' 220 | | 'security_procedure' 221 | | 'security_document'; 222 | 223 | export type SecurityRelationshipType = 'procedure|implements|policy'; 224 | export type SecurityEntityClass = 'Document' | 'Policy' | 'Procedure'; 225 | export type SecurityRelationshipClass = 'IMPLEMENTS'; 226 | 227 | export type EntityForSync = EntityAdditionalProperties & { 228 | _key: string; 229 | _class: SecurityEntityClass[]; 230 | _type: SecurityEntityType; 231 | _rawData?: Record< 232 | string, 233 | { 234 | body: string; 235 | contentType: 'application/json'; 236 | } 237 | >; 238 | }; 239 | 240 | export type RelationshipForSync = RelationshipAdditionalProperties & { 241 | _key: string; 242 | _class: SecurityRelationshipClass; 243 | _type: SecurityRelationshipType; 244 | _fromEntityKey: string; 245 | _toEntityKey: string; 246 | }; 247 | 248 | export type SectionName = 'policies' | 'procedures' | 'references'; 249 | 250 | export type TemplateData = Record>; 251 | -------------------------------------------------------------------------------- /src/util/pickAdopted.ts: -------------------------------------------------------------------------------- 1 | import { PolicyBuilderElement } from '~/src/types'; 2 | 3 | function adoptedFilter(item: { adopted?: boolean }) { 4 | return item.adopted !== false; 5 | } 6 | 7 | export default function pickAdopted( 8 | collection: T[] | undefined 9 | ): T[] { 10 | return collection ? collection.filter(adoptedFilter) : []; 11 | } 12 | -------------------------------------------------------------------------------- /static/assets/_config.scss: -------------------------------------------------------------------------------- 1 | $md-color-primary: $clr-red-400; 2 | -------------------------------------------------------------------------------- /static/assets/css/custom.css: -------------------------------------------------------------------------------- 1 | .md-sidebar--primary { 2 | background-color: #EFEFEF; 3 | } 4 | 5 | .md-sidebar--secondary { 6 | /*Hide the TOC sidebar*/ 7 | /*display: none;*/ 8 | } 9 | 10 | .md-content { 11 | background-color: white; 12 | } 13 | 14 | .md-nav--secondary * { 15 | font-size: 14px; 16 | } 17 | 18 | a.md-icon { 19 | /* Hide the repo edit icon/link on each page */ 20 | /*display: none;*/ 21 | } 22 | 23 | .md-logo img { 24 | /* width: 100px; */ 25 | -webkit-filter: brightness(0) invert(1); 26 | filter: brightness(0) invert(1); 27 | } 28 | -------------------------------------------------------------------------------- /static/assets/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JupiterOne/security-policy-builder/b3844cdb7b435928e713aaa509ac59232a8fe1f3/static/assets/images/favicon.ico -------------------------------------------------------------------------------- /static/assets/images/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | logo 5 | Created with Sketch. 6 | 7 | 8 | 19 | 20 | -------------------------------------------------------------------------------- /test/fixtures/downloads/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JupiterOne/security-policy-builder/b3844cdb7b435928e713aaa509ac59232a8fe1f3/test/fixtures/downloads/.gitkeep -------------------------------------------------------------------------------- /test/fixtures/empty_config.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /test/fixtures/minimal_populated_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "organization": { 3 | "companyFullName": "FooCorp, LLC.", 4 | "companyShortName": "FooCorp", 5 | "companyEmailDomain": "foocorp.com", 6 | "companyWebsiteURL": "https://www.foocorp.com", 7 | "companyMailingAddress": "555 Timer Ct., Providence, RI", 8 | "contactPhoneNumber": "1-866-555-1212", 9 | "ceoName": "Fred Farthings", 10 | "ceoEmail": "fred@foocorp.com", 11 | "cooName": "Silvia Rousey", 12 | "cooEmail": "silvia@foocorp.com", 13 | "ctoName": "Bob Svast", 14 | "ctoEmail": "bob@foocorp.com", 15 | "securityOfficerName": "Jane Rhodes", 16 | "securityOfficerEmail": "jane@foocorp.com", 17 | "privacyOfficerName": "Peter Collings", 18 | "privacyOfficerEmail": "pete@foocorp.com", 19 | "securityCommitteeMembers": "Bob, Jane, Peter", 20 | "needStandardHIPAA": true, 21 | "needStandardHITRUST": false, 22 | "wantCustomMkdocsTemplate": false, 23 | "mkdocsLogoURL": "http://foocorp.com/logo.png", 24 | "mkdocsThemeColorPrimary": "DarkOrange", 25 | "mkdocsThemeColorAccent": "SaddleBrown", 26 | "securityPolicyURL": "https://security.foocorp.com", 27 | "privacyPolicyURL": "https://foocorp.com/privacy", 28 | "ticketingSystem": "JIRA", 29 | "cmPortal": "JIRA", 30 | "ciSystem": "Jenkins", 31 | "haveSecurityScorecard": true, 32 | "securityScorecardPeriod": "month", 33 | "securityScorecardURL": "https://security.foocorp.com/scorecard" 34 | }, 35 | "standards": [ 36 | { 37 | "id": "hipaa", 38 | "file": "st-hipaa.md", 39 | "name": "HIPAA", 40 | "type": "compliance", 41 | "supported": true, 42 | "adopted": true 43 | }, 44 | { 45 | "id": "hitrust-csf", 46 | "file": "st-hitrust.md", 47 | "name": "HITRUST Common Security Framework", 48 | "type": "certification", 49 | "supported": true, 50 | "adopted": false 51 | }, 52 | { 53 | "id": "nist-csf", 54 | "file": "st-nist.md", 55 | "name": "NIST Cybersecurity Framework", 56 | "type": "standard", 57 | "supported": false, 58 | "adopted": false 59 | }, 60 | { 61 | "id": "iso2700x", 62 | "file": "st-iso2700x.md", 63 | "name": "ISO 27001/27002", 64 | "type": "standard", 65 | "supported": false, 66 | "adopted": false 67 | }, 68 | { 69 | "id": "cis", 70 | "file": "st-cis.md", 71 | "name": "CIS Critical Security Controls", 72 | "type": "standard", 73 | "supported": false, 74 | "adopted": false 75 | }, 76 | { 77 | "id": "owasp", 78 | "file": "st-owasp.md", 79 | "name": "OWASP Top Ten", 80 | "type": "best-practice", 81 | "supported": false, 82 | "adopted": false 83 | }, 84 | { 85 | "id": "pci", 86 | "file": "st-pci.md", 87 | "name": "PCI-DSS", 88 | "type": "compliance", 89 | "supported": false, 90 | "adopted": false 91 | }, 92 | { 93 | "id": "gdpr", 94 | "file": "st-gdpr.md", 95 | "name": "EU General Data Protection Regulation (GDPR)", 96 | "type": "compliance", 97 | "supported": false, 98 | "adopted": false 99 | } 100 | ], 101 | "policies": [ 102 | { 103 | "id": "intro", 104 | "file": "test", 105 | "name": "Security Program Overview", 106 | "adopted": true, 107 | "procedures": [] 108 | } 109 | ], 110 | "procedures": [], 111 | "references": [] 112 | } 113 | -------------------------------------------------------------------------------- /test/fixtures/templates/assessments/hipaa.md.tmpl: -------------------------------------------------------------------------------- 1 | {{ date }} 2 | -------------------------------------------------------------------------------- /test/fixtures/templates/mkdocs/mkdocs.yml.tmpl: -------------------------------------------------------------------------------- 1 | site_name: '{{ companyShortName }} Security PSP' 2 | site_description: '{{ companyShortName }} Security Policies, Standards, and Procedures' 3 | site_url: '{{& securityPolicyURL }}' 4 | 5 | theme: 6 | name: material 7 | theme_dir: 'custom_theme' 8 | include_sidebar: false 9 | palette: 10 | primary: '{{ mkdocsThemeColorPrimary }}' 11 | accent: '{{ mkdocsThemeColorAccent }}' 12 | favicon: 'assets/images/favicon.ico' 13 | logo: '{{& mkdocsLogoFile }}' 14 | 15 | # Enable custom.css 16 | extra_css: 17 | - 'assets/css/custom.css' 18 | 19 | markdown_extensions: 20 | - smarty 21 | - toc: 22 | permalink: true 23 | - sane_lists 24 | - admonition 25 | - codehilite: 26 | guess_lang: false 27 | -------------------------------------------------------------------------------- /test/fixtures/templates/ref/sanction-notice.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JupiterOne/security-policy-builder/b3844cdb7b435928e713aaa509ac59232a8fe1f3/test/fixtures/templates/ref/sanction-notice.pdf -------------------------------------------------------------------------------- /test/fixtures/templates/test.tmpl: -------------------------------------------------------------------------------- 1 | the quick {{ color }} fox jumps over the {{ adjective }} dog. -------------------------------------------------------------------------------- /test/integration/cli.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/prefer-regexp-exec */ 2 | import path from 'path'; 3 | import { execFile } from 'child_process'; 4 | import fs from 'fs-extra'; 5 | import pkg from '~/package.json'; 6 | import { DEFAULT_TEMPLATES } from '~/src/constants'; 7 | 8 | const moduleVersion = pkg.version; 9 | const cli = path.join(__dirname, '../..', pkg.bin.psp); 10 | 11 | const fixturesDir = path.join(__dirname, '../fixtures'); 12 | const templatesDir = path.join(fixturesDir, 'templates'); 13 | const defaultTemplatesDir = path.join(__dirname, '../../', DEFAULT_TEMPLATES); 14 | 15 | beforeAll(() => { 16 | const failedTestDirs = fs.readdirSync(fixturesDir).filter((dir) => { 17 | return dir.match(/test_[a-z0-9]+/); 18 | }); 19 | failedTestDirs.forEach((dir) => { 20 | fs.rmdirSync(path.join(fixturesDir, dir), { 21 | recursive: true, 22 | }); 23 | }); 24 | }); 25 | 26 | let workDir: string; 27 | beforeEach(() => { 28 | workDir = path.join( 29 | fixturesDir, 30 | 'test_' + Math.random().toString(36).substring(2, 5) 31 | ); 32 | fs.mkdirpSync(workDir); 33 | }); 34 | 35 | afterEach(() => { 36 | fs.rmdirSync(workDir, { 37 | recursive: true, 38 | }); 39 | }); 40 | 41 | test('psp-builder shows module version when run with --version', async () => { 42 | await new Promise((resolve, reject) => { 43 | execFile(cli, ['build', '--version'], { cwd: workDir }, function ( 44 | err, 45 | stdout, 46 | stderr 47 | ) { 48 | if (err) { 49 | reject(err); 50 | } else { 51 | expect(stdout.trim()).toBe(moduleVersion); 52 | resolve(); 53 | } 54 | }); 55 | }); 56 | }); 57 | 58 | test('psp-builder errors if optional config file is not valid JSON', async () => { 59 | await expect( 60 | new Promise((resolve, reject) => { 61 | execFile( 62 | cli, 63 | ['build', '--config', '/not/a/valid/file'], 64 | { cwd: workDir }, 65 | function (err, stdout, stderr) { 66 | if (err) { 67 | reject(err); 68 | } else { 69 | resolve(); 70 | } 71 | } 72 | ); 73 | }) 74 | ).rejects.toThrowError(); 75 | }); 76 | 77 | test('psp-builder errors if --noninteractive is given with insufficient --config JSON', async () => { 78 | await expect( 79 | new Promise((resolve, reject) => { 80 | const jsonFile = path.join(__dirname, '../fixtures/empty_config.json'); 81 | execFile( 82 | cli, 83 | ['build', '--noninteractive', '-c', jsonFile], 84 | { cwd: workDir }, 85 | function (err, stdout, stderr) { 86 | if (err) { 87 | reject(err); 88 | } else { 89 | resolve(); 90 | } 91 | } 92 | ); 93 | }) 94 | ).rejects.toThrowError(); 95 | }); 96 | 97 | test('psp-builder fails when unable to write to output dir', async () => { 98 | await expect( 99 | new Promise((resolve, reject) => { 100 | const jsonFile = path.join(fixturesDir, 'populated_config.json'); 101 | execFile( 102 | cli, 103 | ['build', '-n', '-c', jsonFile, '-o', '/dev/null', '-t', templatesDir], 104 | { cwd: workDir }, 105 | function (err, stdout, stderr) { 106 | if (err) { 107 | reject(err); 108 | } else { 109 | resolve(); 110 | } 111 | } 112 | ); 113 | }) 114 | ).rejects.toThrowError(); 115 | }); 116 | 117 | test('psp-builder exposes templates dir on first use', async () => { 118 | const exposedDir = path.join(workDir, 'templates'); 119 | expect(fs.existsSync(exposedDir)).toBe(false); 120 | const jsonFile = path.join(fixturesDir, 'populated_config.json'); 121 | 122 | await new Promise((resolve, reject) => { 123 | execFile( 124 | cli, 125 | ['build', '-n', '-c', jsonFile, '-t', defaultTemplatesDir], 126 | { cwd: workDir }, 127 | function (err, stdout, stderr) { 128 | if (err) { 129 | return reject(err); 130 | } 131 | expect(fs.statSync(exposedDir).isDirectory()).toBe(true); 132 | resolve(); 133 | } 134 | ); 135 | }); 136 | }); 137 | -------------------------------------------------------------------------------- /test/unit/assess.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/prefer-regexp-exec */ 2 | import * as assessment from '~/src/assessment'; 3 | import path from 'path'; 4 | import { promises as fsPromises } from 'fs'; 5 | import { AssessmentInput, PolicyBuilderPaths } from '~/src/types'; 6 | 7 | test('assessment.validateOrgValues returns true when org values are populated', () => { 8 | const org = require('../fixtures/populated_config.json').organization; 9 | expect(assessment.validateOrgValues(org)).toBe(true); 10 | }); 11 | 12 | test('assessment.validateOrgValues returns false when org values are missing', () => { 13 | expect(assessment.validateOrgValues({})).toBe(false); 14 | }); 15 | 16 | test('assessment.validateOrgValues returns false when org values are blank', () => { 17 | const org = require('../fixtures/populated_config.json').organization; 18 | org.securityOfficerName = ''; 19 | expect(assessment.validateOrgValues(org)).toBe(false); 20 | }); 21 | 22 | test('assessment.questions returns parsed inquirer structure when valid standard is given', () => { 23 | expect((assessment.questions('HIPAA')[0] as any).message).toBeTruthy(); 24 | }); 25 | 26 | test('assessment.questions throws when invalid standard is given', () => { 27 | expect(() => { 28 | assessment.questions('INVALID'); 29 | }).toThrowError(); 30 | }); 31 | 32 | test('assessment.generateReport creates a reportfile', async () => { 33 | const outputDir = path.join( 34 | __dirname, 35 | '../fixtures', 36 | 'test_' + Math.random().toString(36).substring(2, 5) 37 | ); 38 | const expectedOutputFile = path.join(outputDir, 'hipaa-20180704-000000.md'); 39 | const org = { 40 | date: new Date('2018/07/04'), 41 | } as AssessmentInput; 42 | const paths = { 43 | output: outputDir, 44 | templates: path.join(__dirname, '../fixtures/templates'), 45 | } as PolicyBuilderPaths; 46 | await assessment.generateReport(org, 'HIPAA', paths); 47 | expect((await fsPromises.stat(expectedOutputFile)).isFile()).toBe(true); 48 | await fsPromises.rmdir(outputDir, { 49 | recursive: true, 50 | }); 51 | }); 52 | 53 | test('assessment.generateGapSummary yields positive report when there are no gaps', () => { 54 | const config = require('../fixtures/populated_config.json'); 55 | expect( 56 | assessment 57 | .generateGapSummary([], config, 'HIPAA') 58 | .match(/^.*met or exceeded all requirements.*$/) 59 | ).toBeTruthy(); 60 | expect( 61 | assessment 62 | .generateGapSummary( 63 | [ 64 | { 65 | ref: 'x', 66 | title: 'y', 67 | }, 68 | ], 69 | config, 70 | 'HIPAA' 71 | ) 72 | .match(/^.*has compliance gaps.*$/) 73 | ).toBeTruthy(); 74 | }); 75 | 76 | test('assessment.generateGapList yields one line of markdown per gap', () => { 77 | const gaps = [ 78 | { ref: 'test', title: 'title' }, 79 | { ref: 'test2', title: 'title2' }, 80 | ]; 81 | expect(assessment.generateGapList(gaps).match(/\n/g)!.length).toBe(2); 82 | }); 83 | 84 | test('assessment.calculateCPGaps yields no gaps if all mapped standard requirements are adopted per config', async () => { 85 | const config = require('../fixtures/populated_config.json'); 86 | const { cpGaps } = await assessment.calculateCPGaps('HIPAA', config, { 87 | output: 'output', 88 | partials: 'partials', 89 | templates: undefined, 90 | }); 91 | expect(cpGaps.length).toBe(0); 92 | }); 93 | 94 | test('assessment.calculateCPGaps yields gaps if not all mapped standard requirements are adopted per config', async () => { 95 | const config = require('../fixtures/populated_config.json'); 96 | config.procedures.filter( 97 | (p: any) => p.id === 'cp-policy-mgmt' 98 | )[0].adopted = false; 99 | const { cpGaps } = await assessment.calculateCPGaps('HIPAA', config, { 100 | output: 'output', 101 | partials: 'partials', 102 | templates: undefined, 103 | }); 104 | expect(cpGaps.length).toBeGreaterThan(0); 105 | }); 106 | 107 | test('assessment.generateStandardControlsMapping shows Gaps', async () => { 108 | const config = require('../fixtures/populated_config.json'); 109 | config.procedures.filter( 110 | (p: any) => p.id === 'cp-policy-mgmt' 111 | )[0].adopted = false; 112 | const { annotatedRefs } = await assessment.calculateCPGaps('HIPAA', config, { 113 | output: 'output', 114 | partials: 'partials', 115 | templates: undefined, 116 | }); 117 | const mapping = assessment.generateStandardControlsMapping( 118 | annotatedRefs, 119 | config 120 | ); 121 | expect( 122 | mapping.match(/No applicable controls or procedures have been adopted/) 123 | ).toBeTruthy(); 124 | }); 125 | -------------------------------------------------------------------------------- /test/unit/assets.test.ts: -------------------------------------------------------------------------------- 1 | import * as assets from '~/src/assets'; 2 | import { promises as fsPromises } from 'fs'; 3 | import path from 'path'; 4 | import nock from 'nock'; 5 | import { PolicyBuilderConfig, PolicyBuilderPaths } from '~/src/types'; 6 | 7 | let context: { 8 | logoFile: string; 9 | scope: any; 10 | outDir: string; 11 | logoURL: string; 12 | badLogoURL: string; 13 | outFile: string; 14 | }; 15 | beforeEach(() => { 16 | const server = 'https://stopmocking.me'; 17 | const logoFile = 'logo.png'; 18 | const outDir = 19 | 'test/fixtures/downloads/' + Math.random().toString(36).substring(7); 20 | 21 | context = { 22 | logoFile, 23 | scope: nock(server).get('/notafile.png').reply(404), 24 | outDir, 25 | logoURL: `${server}/${logoFile}`, 26 | badLogoURL: `${server}/notafile.png`, 27 | outFile: path.join(outDir, 'assets/images', logoFile), 28 | }; 29 | 30 | nock(server) 31 | .get('/' + context.logoFile) 32 | .reply(200, 'logodata'); 33 | }); 34 | 35 | afterEach(async () => { 36 | context.scope.remove(); 37 | await fsPromises.rmdir(context.outDir, { 38 | recursive: true, 39 | }); 40 | }); 41 | 42 | test('assets.downloadCustomLogo downloads and saves file', async () => { 43 | const config = { 44 | organization: { 45 | wantCustomMkdocsTemplate: true, 46 | mkdocsLogoURL: context.logoURL, 47 | }, 48 | }; 49 | 50 | const paths = { 51 | output: context.outDir, 52 | } as PolicyBuilderPaths; 53 | 54 | const newConfig = await assets.downloadCustomLogo(config, paths); 55 | 56 | expect(Object.keys(config).length < Object.keys(newConfig).length).toBe(true); 57 | expect((await fsPromises.stat(context.outFile)).isFile()).toBe(true); 58 | }); 59 | 60 | test('assets.downloadCustomLogo sets default logo template var if no custom logo configured', async () => { 61 | const config = await assets.downloadCustomLogo( 62 | {} as PolicyBuilderConfig, 63 | { 64 | output: context.outDir, 65 | } as PolicyBuilderPaths 66 | ); 67 | expect(config.mkdocsLogoFile).toBe('assets/images/logo.svg'); 68 | }); 69 | 70 | test('assets.downloadCustomLogo throws error when unable to download url', async () => { 71 | const config = { 72 | organization: { 73 | wantCustomMkdocsTemplate: true, 74 | mkdocsLogoURL: context.badLogoURL, 75 | }, 76 | } as PolicyBuilderConfig; 77 | 78 | await expect( 79 | assets.downloadCustomLogo(config, { 80 | output: context.outDir, 81 | } as PolicyBuilderPaths) 82 | ).rejects.toThrowError(); 83 | }); 84 | 85 | test('assets.downloadCustomLogo throws error when unable to save file', async () => { 86 | const config = { 87 | organization: { 88 | wantCustomMkdocsTemplate: true, 89 | mkdocsLogoURL: context.logoURL, 90 | }, 91 | } as PolicyBuilderConfig; 92 | 93 | const notADirectory = '/dev/null'; 94 | 95 | await expect( 96 | assets.downloadCustomLogo(config, { 97 | output: notADirectory, 98 | } as PolicyBuilderPaths) 99 | ).rejects.toThrowError(); 100 | }); 101 | 102 | test('assets.copyStaticAssets throws error when unable to create dir', async () => { 103 | const notADirectory = '/dev/null/dir'; 104 | await expect( 105 | assets.copyStaticAssets({ 106 | output: notADirectory, 107 | templates: notADirectory, 108 | } as PolicyBuilderPaths) 109 | ).rejects.toThrowError(); 110 | }); 111 | -------------------------------------------------------------------------------- /test/unit/colors.test.ts: -------------------------------------------------------------------------------- 1 | import * as colors from '~/src/questions/helpers/colors'; 2 | 3 | test('colors.primary has more options than colors.accent', () => { 4 | expect( 5 | colors.primaryColorChoices().length > colors.accentColorChoices().length 6 | ).toBe(true); 7 | }); 8 | -------------------------------------------------------------------------------- /test/unit/configure.test.ts: -------------------------------------------------------------------------------- 1 | import * as configure from '~/src/configure'; 2 | import { baseQuestions as questions } from '~/src/questions/base'; 3 | import * as validate from '~/src/questions/helpers/validate'; 4 | 5 | test('configure.missingOrganizationValues shows all values when {} is passed', () => { 6 | expect(questions.length).toBe(configure.missingOrganizationValues({}).length); 7 | }); 8 | 9 | test('configure.missingOrEmptyOrganizationValues filters inquirer questions array for missing values', () => { 10 | expect( 11 | configure.missingOrEmptyOrganizationValues({ companyShortName: 'FooCorp' }) 12 | .length 13 | ).toBe(questions.length - 1); 14 | }); 15 | 16 | test('configure.missingOrEmptyOrganizationValues inquires on empty values', () => { 17 | expect( 18 | configure.missingOrEmptyOrganizationValues({ companyShortName: '' }).length 19 | ).toBe(questions.length); 20 | }); 21 | 22 | test('inquiries validates email addresses', () => { 23 | expect(validate.validateEmail('not-an-address')).toBe( 24 | 'Please enter a valid email address' 25 | ); 26 | expect(validate.validateEmail('me@privacy.net')).toBe(true); 27 | }); 28 | 29 | test('inquiries validates web urls', () => { 30 | expect(validate.validateWebURL('git://foo/**/../@')).toBe( 31 | 'Please enter a valid Web URL' 32 | ); 33 | expect(validate.validateWebURL('http://bit.ly')).toBe(true); 34 | }); 35 | -------------------------------------------------------------------------------- /test/unit/render.test.ts: -------------------------------------------------------------------------------- 1 | import * as render from '~/src/render'; 2 | import { promises as fsPromises } from 'fs'; 3 | import { Organization, PolicyBuilderConfig } from '~/src/types'; 4 | 5 | const tfile = 'test/fixtures/templates/test.tmpl'; 6 | 7 | test('fillTemplate fills a template file with config data', async () => { 8 | const config = { 9 | color: 'orange', 10 | adjective: 'mighty', 11 | } as Organization; 12 | const rendered = render.test.fillTemplate(tfile, config); 13 | expect(rendered).toBe('the quick orange fox jumps over the mighty dog.'); 14 | }); 15 | 16 | test('renderTemplateFile writes the filled data to an output path', async () => { 17 | const config = {}; 18 | await render.renderTemplateFile(tfile, config, './foo/test'); 19 | expect((await fsPromises.stat('./foo/test')).isFile()).toBe(true); 20 | await fsPromises.rmdir('./foo', { 21 | recursive: true, 22 | }); 23 | }); 24 | 25 | test('mergeAutomaticPSPVars sets a defaultRevision', async () => { 26 | const merged = render.test.mergeAutomaticPSPVars({} as PolicyBuilderConfig); 27 | const year = new Date().getFullYear(); 28 | expect(merged.organization.defaultRevision).toBe(`${year}.1`); 29 | }); 30 | -------------------------------------------------------------------------------- /tsconfig.dist.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "dist" 5 | }, 6 | "include": ["src/**/*"] 7 | } 8 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/@jupiterone/typescript-tools/config/typescript.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": ".tsc/dist", 6 | "baseUrl": "." 7 | }, 8 | "include": [ 9 | "src/**/*.ts", 10 | "util/**/*.ts", 11 | "test/**/*.ts", 12 | "jest.*.ts", 13 | "jest.*.js", 14 | "*.config.js" 15 | ], 16 | "exclude": ["**/*.bak/**/*", "**/dist/**/*"] 17 | } 18 | -------------------------------------------------------------------------------- /util/add-web-links.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Example utility to add `webLink` property to procedures in the `config.json` 3 | * file. When published to your JupiterOne account, the URL in `webLink` will be 4 | * used in the Compliance app for each mapped requirement/control, instead of a 5 | * URL linking to the JupiterOne Policies app. 6 | * 7 | * The program is pre-configured with URL patterns for SharePoint and Confluence 8 | * using the name of each policy and procedure. Update the URL pattern as needed. 9 | * 10 | * Prerequisite: Node.js version 10 or later. 11 | */ 12 | import * as error from '../src/error'; 13 | import commander from 'commander'; 14 | import { PolicyBuilderConfig, PolicyBuilderElement } from '~/src/types'; 15 | 16 | type ProgramInput = { 17 | version?: string; 18 | site?: string; 19 | domain?: string; 20 | key?: string; 21 | replaceSpace?: string; 22 | config?: string; 23 | param?: string; 24 | policyParam?: string; 25 | procedureParam?: string; 26 | lowerCase?: boolean; 27 | }; 28 | 29 | const program = commander 30 | .version(require('../package').version, '-v, --version') 31 | .usage('[options]') 32 | .option( 33 | '-s, --site [SharePoint|Confluence]', 34 | 'the site where policies and procedures are hosted (only SharePoint and Confluence are supported by default)' 35 | ) 36 | .option( 37 | '-d, --domain [name]', 38 | 'company domain name or vanity subdomain name as part of the site URL' 39 | ) 40 | .option( 41 | '-k, --key [directoryNameOrSpaceKey]', 42 | 'subdirectory name or key for the site, such as SPACEKEY for a Confluence site' 43 | ) 44 | .option( 45 | '-s, --replace-space [char]', 46 | 'replace space in URL with this specified character' 47 | ) 48 | .option('-c, --config [file]', 'JSON config file') 49 | .option( 50 | '-p, --param [name|id]', 51 | 'use either "name" or "id" from each policy/procedure to build the URL' 52 | ) 53 | .option( 54 | '--policy-param [name|id]', 55 | 'use either "name" or "id" from each policy to build the URL' 56 | ) 57 | .option( 58 | '--procedure-param [name|id]', 59 | 'use either "name" or "id" from each procedure to build the URL' 60 | ) 61 | .option('-l, --lower-case', 'use all lowercase URL') 62 | .parse(process.argv) 63 | .opts() as ProgramInput; 64 | 65 | const fs = require('fs'); 66 | const configFile = 'templates/config.json'; 67 | 68 | const domain = program.domain || 'company'; 69 | const site = program.site || 'default'; 70 | const key = program.key; 71 | const spaceChar = program.replaceSpace || ''; 72 | const param = program.param || (site === 'default' ? 'id' : 'name'); 73 | const policyParam = 74 | program.policyParam || (site === 'mkdocs' ? 'id' : undefined); 75 | const procedureParam = program.procedureParam || (site === 'mkdocs' && 'name'); 76 | const forceLowerCase = program.lowerCase || site === 'mkdocs'; 77 | const sectionPrefix = ''; 78 | 79 | type SiteName = 'sharepoint' | 'confluence' | 'mkdocs' | 'custom' | 'default'; 80 | 81 | const sites: Record< 82 | SiteName, 83 | | { 84 | baseUrl: string; 85 | spaceChar: string; 86 | sectionPrefix: string; 87 | } 88 | | undefined 89 | > = { 90 | sharepoint: { 91 | baseUrl: `https://${domain}.sharepoint.com/SitePages${ 92 | key ? '/' + key : '' 93 | }`, 94 | spaceChar: '%20', 95 | sectionPrefix, 96 | }, 97 | confluence: { 98 | baseUrl: `https://${domain}.atlassian.net/wiki/display/${key}`, 99 | spaceChar: '+', 100 | sectionPrefix, 101 | }, 102 | mkdocs: { 103 | baseUrl: `https://${domain}/${key}`, 104 | spaceChar: '-', 105 | sectionPrefix: '#', 106 | }, 107 | custom: { 108 | baseUrl: `https://${domain}/${key}`, 109 | spaceChar, 110 | sectionPrefix, 111 | }, 112 | default: { 113 | baseUrl: `https://apps.us.jupiterone.io/policies`, 114 | spaceChar: '', 115 | sectionPrefix, 116 | }, 117 | }; 118 | 119 | const config = JSON.parse(fs.readFileSync(configFile)) as PolicyBuilderConfig; 120 | const siteName = site.toLocaleLowerCase() as SiteName; 121 | const siteConfig = sites[siteName]; 122 | 123 | if (!siteConfig) { 124 | throw error.fatal(`Unsupported site: ${site}`); 125 | } 126 | 127 | const mapping: Record = {}; 128 | 129 | for (const policy of config.policies || []) { 130 | for (const procedureId of policy.procedures || []) { 131 | mapping[procedureId] = policy; 132 | } 133 | } 134 | 135 | for (const procedure of config.procedures || []) { 136 | const id = procedure.id; 137 | const policy = mapping[id]; 138 | 139 | const part1 = (policy as any)[policyParam || param] as string; 140 | const part2 = (procedure as any)[procedureParam || param] as string; 141 | 142 | const webLink = `${siteConfig.baseUrl}/${part1.replace( 143 | /\s/g, 144 | siteConfig.spaceChar 145 | )}/${siteConfig.sectionPrefix}${part2.replace(/\s/g, siteConfig.spaceChar)}`; 146 | 147 | procedure.webLink = forceLowerCase ? webLink.toLowerCase() : webLink; 148 | } 149 | 150 | fs.writeFileSync(configFile, JSON.stringify(config, null, 2)); 151 | --------------------------------------------------------------------------------