├── .editorconfig ├── .eslintrc.js ├── .github └── workflows │ ├── devScripts.yml │ └── dxb-pipeline.yml ├── .gitignore ├── .husky ├── commit-msg ├── pre-commit └── pre-push ├── .lintstagedrc.js ├── .mocharc.json ├── .nycrc ├── .prettierignore ├── .prettierrc.json ├── .sfdevrc.json ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── Dockerfile ├── LICENSE ├── README.md ├── _config.yml ├── appveyor.yml ├── bin ├── dev ├── dev.cmd ├── run └── run.cmd ├── commitlint.config.js ├── config ├── backup-def.json ├── core-data-def.json ├── cpq-data-def.json └── data-masking-def.json ├── docs ├── dxb_icon.png └── dxb_icon2.png ├── dxbpackagecalc.js ├── messages ├── apex.coverage.check.md ├── apex.coverage.cleanup.md ├── apex.scan.query.md ├── apex.trigger.create.md ├── api.align.md ├── community.publish.md ├── data.backup.md ├── data.bulk.query.md ├── data.export.md ├── data.file.export.md ├── data.import.md ├── data.masking.md ├── data.query.explain.md ├── data.restore.md ├── data.transform.md ├── graphql.convert.md ├── install.md ├── junit.check.md ├── lwc.test.run.md ├── mdapi.convert.md ├── object.create.md ├── object.fields.list.md ├── object.fieldset.md ├── object.prefix.md ├── object.relationships.list.md ├── object.vr.create.md ├── org.create.md ├── org.data.md ├── org.setdefault.md ├── org.setdefaults.md ├── package.dependencies.install.md ├── permissionset.clean.md ├── permissionset.create.md ├── profile.build.md ├── profile.convert.md ├── profile.passwordpolicies.fix.md ├── schema.doc.generate.md ├── source.delta.md ├── source.fetchtest.md ├── source.scanner.md ├── static.create.md └── user.access.why.md ├── package.json ├── sfdx-project.json ├── src ├── commands │ └── dxb │ │ ├── apex │ │ ├── coverage │ │ │ ├── check.ts │ │ │ └── cleanup.ts │ │ ├── scan │ │ │ └── query.ts │ │ └── trigger │ │ │ └── create.ts │ │ ├── api │ │ └── align.ts │ │ ├── community │ │ └── publish.ts │ │ ├── data │ │ ├── backup.ts │ │ ├── bulk │ │ │ └── query.ts │ │ ├── export.ts │ │ ├── file │ │ │ └── export.ts │ │ ├── import.ts │ │ ├── masking.ts │ │ ├── query │ │ │ └── explain.ts │ │ ├── restore.ts │ │ └── transform.ts │ │ ├── graphql │ │ └── convert.ts │ │ ├── install.ts │ │ ├── junit │ │ └── check.ts │ │ ├── lwc │ │ └── test │ │ │ └── run.ts │ │ ├── mdapi │ │ └── convert.ts │ │ ├── object │ │ ├── create.ts │ │ ├── fields │ │ │ └── list.ts │ │ ├── fieldset.ts │ │ ├── prefix.ts │ │ ├── relationships │ │ │ └── list.ts │ │ └── vr │ │ │ └── create.ts │ │ ├── org │ │ ├── create.ts │ │ ├── data.ts │ │ └── setdefault.ts │ │ ├── package │ │ └── dependencies │ │ │ └── install.ts │ │ ├── permissionset │ │ ├── clean.ts │ │ └── create.ts │ │ ├── profile │ │ ├── build.ts │ │ ├── convert.ts │ │ └── passwordpolicies │ │ │ └── fix.ts │ │ ├── schema │ │ └── doc │ │ │ └── generate.ts │ │ ├── source │ │ ├── delta.ts │ │ ├── fetchtest.ts │ │ └── scanner.ts │ │ ├── static │ │ └── create.ts │ │ └── user │ │ └── access │ │ └── why.ts ├── index.ts └── utils │ ├── bootstrap.min.css │ ├── deloitteforce_logo.txt │ ├── documentinfo.json │ ├── metadata-def.json │ ├── puppeteerSFConnector.ts │ ├── sales-cloud-overview-data-model.png │ ├── schema-manual-steps.json │ ├── schema-template-html.html │ ├── schema-template.html │ ├── service-cloud-support-overview-data-model.png │ ├── templates │ ├── apex │ │ ├── ApexClass.cls │ │ ├── ApexClassNoConstructor.cls │ │ ├── BDDUnitTestApexClass.cls │ │ ├── BatchApexClass.cls │ │ ├── ControllerExtension.cls │ │ ├── ExceptionApexClass.cls │ │ ├── HttpCalloutMock.cls │ │ ├── SchedulableApexClass.cls │ │ ├── SelectorClass.cls │ │ ├── ServiceClass.cls │ │ ├── UnitTestApexClass.cls │ │ ├── UrlRewriterApexClass.cls │ │ ├── WebServiceMock.cls │ │ ├── WebserviceClass.cls │ │ ├── apex.cls-meta.xml │ │ └── def.json │ └── trigger │ │ ├── apex.cls │ │ ├── apex.cls-meta.xml │ │ ├── def.json │ │ ├── trigger.trigger │ │ └── trigger.trigger-meta.xml │ └── utils.ts ├── test ├── .eslintrc.js ├── commands │ ├── apex │ │ └── trigger │ │ │ └── create.nut.ts │ ├── api │ │ └── align.nut.ts │ ├── install.nut.ts │ └── static │ │ └── create.nut.ts └── tsconfig.json ├── tsconfig.json ├── tslint.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | 10 | [*.md] 11 | trim_trailing_whitespace = false 12 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ['eslint-config-salesforce-typescript', 'plugin:sf-plugin/recommended'], 3 | root: true, 4 | rules: { 5 | '@typescript-eslint/no-unsafe-assignment': 'off', 6 | '@typescript-eslint/no-explicit-any': 'off', 7 | '@typescript-eslint/no-unsafe-member-access': 'off', 8 | '@typescript-eslint/no-unsafe-argument': 'off', 9 | '@typescript-eslint/no-unsafe-call': 'off', 10 | 'jsdoc/check-indentation': 'off', 11 | }, 12 | }; 13 | -------------------------------------------------------------------------------- /.github/workflows/devScripts.yml: -------------------------------------------------------------------------------- 1 | name: devScripts 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: '50 6 * * 0' 6 | 7 | jobs: 8 | update: 9 | uses: salesforcecli/github-workflows/.github/workflows/devScriptsUpdate.yml@main 10 | secrets: inherit 11 | -------------------------------------------------------------------------------- /.github/workflows/dxb-pipeline.yml: -------------------------------------------------------------------------------- 1 | name: Publish to NPM 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - 'release/**' 8 | paths: 9 | - 'src/**' 10 | - 'package.json' 11 | pull_request: 12 | types: [assigned, opened, synchronize, reopened, edited] 13 | branches: 14 | - master 15 | - 'release/**' 16 | paths: 17 | - 'src/**' 18 | - 'package.json' 19 | 20 | jobs: 21 | validate: 22 | if: github.event_name == 'pull_request' 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout 26 | uses: actions/checkout@v3 27 | 28 | - name: Setup Node.js 29 | uses: actions/setup-node@v3 30 | with: 31 | node-version: 20.x 32 | 33 | - name: Setup Yarn and install dependencies 34 | uses: bahmutov/npm-install@v1.6.0 35 | with: 36 | useLockFile: false 37 | - run: yarn install --ignore-scripts 38 | 39 | - name: Build 40 | run: yarn build 41 | 42 | # - name: Audit packages 43 | # run: npm audit --audit-level=low 44 | # publish_beta: 45 | # if: github.event_name != 'pull_request' && startsWith(github.ref, 'refs/heads/release/') 46 | # runs-on: ubuntu-latest 47 | # steps: 48 | # - uses: actions/checkout@v2 49 | # - uses: actions/setup-node@v3 50 | # with: 51 | # node-version: '16.x' 52 | # - name: Authenticate with private NPM package 53 | # run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.yarnrc 54 | # - name: Yarn Install 55 | # uses: bahmutov/npm-install@v1.6.0 56 | # - run: yarn install --ignore-scripts 57 | # - name: Upgrade Package version" 58 | # run: node dxbpackagecalc.js ${{ github.ref }} beta 59 | # - name: Yarn Publish DXB 60 | # run: yarn publish --tag beta 61 | # - name: Commit report 62 | # run: | 63 | # cat package.json 64 | # publish_prod: 65 | # if: github.event_name != 'pull_request' && github.ref == 'refs/heads/master' 66 | # runs-on: ubuntu-latest 67 | # steps: 68 | # - uses: actions/checkout@v2 69 | # - uses: actions/setup-node@v3 70 | # with: 71 | # node-version: '16.x' 72 | # - name: Authenticate with private NPM package 73 | # run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.yarnrc 74 | # - name: Yarn Install 75 | # uses: bahmutov/npm-install@v1.6.0 76 | # - run: yarn install --ignore-scripts 77 | # - name: Upgrade Package version" 78 | # run: node dxbpackagecalc.js master 79 | # - name: Yarn Publish DXB 80 | # run: yarn publish 81 | # - name: Commit report 82 | # run: | 83 | # git config --global user.name 'David Browaeys' 84 | # git config --global user.email 'david.browaeys@gmail.com' 85 | # git add 'package.json' 86 | # git commit -am "DXB Auto Update package version" 87 | # git push 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # -- CLEAN 2 | tmp/ 3 | # use yarn by default, so ignore npm 4 | package-lock.json 5 | 6 | # never checkin npm config 7 | .npmrc 8 | 9 | # debug logs 10 | npm-error.log 11 | yarn-error.log 12 | 13 | 14 | # compile source 15 | lib 16 | 17 | # test artifacts 18 | *xunit.xml 19 | *checkstyle.xml 20 | *unitcoverage 21 | .nyc_output 22 | test_session* 23 | 24 | # generated docs 25 | docs 26 | 27 | # ignore sfdx-trust files 28 | *.tgz 29 | *.sig 30 | package.json.bak. 31 | 32 | # -- CLEAN ALL 33 | *.tsbuildinfo 34 | .eslintcache 35 | .wireit 36 | node_modules 37 | 38 | # -- 39 | # put files here you don't want cleaned with sf-clean 40 | 41 | # os specific files 42 | .DS_Store 43 | .idea 44 | 45 | oclif.manifest.json 46 | .sfdx/ 47 | force-app/ 48 | force-ui/ 49 | manifest/ 50 | .sf/ 51 | oclif.lock 52 | .forceignore 53 | -------------------------------------------------------------------------------- /.husky/commit-msg: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn commitlint --edit 5 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn lint && yarn pretty-quick --staged 5 | -------------------------------------------------------------------------------- /.husky/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn build 5 | -------------------------------------------------------------------------------- /.lintstagedrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | '**/*.{js,json,md}?(x)': () => 'npm run reformat', 3 | }; 4 | -------------------------------------------------------------------------------- /.mocharc.json: -------------------------------------------------------------------------------- 1 | { 2 | "require": "ts-node/register,source-map-support/register", 3 | "watch-extensions": "ts", 4 | "recursive": true, 5 | "reporter": "spec", 6 | "timeout": 600000 7 | } 8 | -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "check-coverage": false, 3 | "lines": 90, 4 | "statements": 90, 5 | "functions": 90, 6 | "branches": 90 7 | } 8 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # List files or directories below to ignore them when running prettier 2 | # More information: https://prettier.io/docs/en/ignore.html 3 | # 4 | 5 | **/staticresources/** 6 | .localdevserver 7 | .sfdx 8 | .vscode 9 | 10 | coverage/ -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | "@salesforce/prettier-config" 2 | -------------------------------------------------------------------------------- /.sfdevrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "test": { 3 | "testsPath": "test/**/*.nut.ts" 4 | }, 5 | "wireit": { 6 | "test": { 7 | "dependencies": ["test:compile", "test:only", "lint"] 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "attach", 10 | "name": "Attach", 11 | "port": 9229, 12 | "skipFiles": ["/**"] 13 | }, 14 | { 15 | "name": "Run All Tests", 16 | "type": "node", 17 | "request": "launch", 18 | "program": "${workspaceFolder}/node_modules/mocha/bin/mocha", 19 | "args": ["--inspect", "--colors", "test/**/*.test.ts"], 20 | "env": { 21 | "NODE_ENV": "development", 22 | "SFDX_ENV": "development" 23 | }, 24 | "sourceMaps": true, 25 | "smartStep": true, 26 | "internalConsoleOptions": "openOnSessionStart", 27 | "preLaunchTask": "Compile tests" 28 | }, 29 | { 30 | "type": "node", 31 | "request": "launch", 32 | "name": "Run Current Test", 33 | "program": "${workspaceFolder}/node_modules/mocha/bin/mocha", 34 | "args": ["--inspect", "--colors", "${file}"], 35 | "env": { 36 | "NODE_ENV": "development", 37 | "SFDX_ENV": "development" 38 | }, 39 | "sourceMaps": true, 40 | "smartStep": true, 41 | "internalConsoleOptions": "openOnSessionStart", 42 | "preLaunchTask": "Compile tests" 43 | } 44 | ] 45 | } 46 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.git": true, 4 | "**/.svn": true, 5 | "**/.hg": true, 6 | "**/CVS": true, 7 | "**/.DS_Store": true 8 | }, 9 | "search.exclude": { 10 | "**/lib": true, 11 | "**/bin": true 12 | }, 13 | "editor.tabSize": 2, 14 | "editor.formatOnSave": false, 15 | "rewrap.wrappingColumn": 80 16 | } 17 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "problemMatcher": "$tsc", 4 | "tasks": [ 5 | { 6 | "label": "Compile tests", 7 | "group": { 8 | "kind": "build", 9 | "isDefault": true 10 | }, 11 | "command": "yarn", 12 | "type": "shell", 13 | "presentation": { 14 | "focus": false, 15 | "panel": "dedicated" 16 | }, 17 | "args": ["run", "pretest"], 18 | "isBackground": false 19 | } 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:20 2 | 3 | RUN npm install --global @salesforce/sfdx-scanner 4 | RUN npm install --global dxb 5 | RUN npm install --global @cparra/apexdocs 6 | RUN npm install --global sfdx-cli --force 7 | RUN sfdx plugins:link $(npm root -g)/dxb 8 | RUN sfdx plugins:link $(npm root -g)/@salesforce/sfdx-scanner 9 | RUN npm install --global vlocity 10 | RUN export PUPPETEER_SKIP_DOWNLOAD=true && npm install --global puppeteer --unsafe-perm 11 | 12 | #add usefull tools 13 | RUN apt-get update && apt-get install -y jq && apt-get -y install default-jdk 14 | 15 | RUN apt-get update -qq && \ 16 | apt-get -qqy install gnupg wget && \ 17 | wget --quiet --output-document=- https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor > /etc/apt/trusted.gpg.d/google-archive.gpg && \ 18 | sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' && \ 19 | apt-get -qqy --no-install-recommends install chromium && \ 20 | rm -f -r /var/lib/apt/lists/* 21 | 22 | RUN /usr/bin/chromium --no-sandbox --version > /etc/chromium-version -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 David Browaeys 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DXB-CLI 2 | 3 |
4 | DXB icon 5 |

6 | Welcome to the future of Salesforce development and automation! Meet DXB., the revolutionary Salesforce CLI plugin that simplifies your workflow. This powerful toolkit bridges the gap between development and deployment, turning tedious tasks into smooth processes. 7 |

8 |

9 | DXB. is your new partner in the Salesforce ecosystem. With its carefully designed commands, it helps developers easily navigate complex Salesforce projects, making build automation a breeze. 10 |

11 |

12 | Whether you're deploying configurations, managing metadata, or handling CI/CD pipelines, DXB. boosts your productivity. Its user-friendly features cater to both new developers and experienced pros. 13 |

14 |

15 | Step into the next level of Salesforce development with DXB. – where innovation meets efficiency, taking your projects to new heights. Welcome aboard, and get ready to transform Salesforce automation! 16 |

17 |
18 | 19 | # Pre-requisite 20 | 21 | 1. Install [node.js. + npm](https://nodejs.org/en/). 22 | Once installed, checkout proxy setting if you are behind corporate proxy. 23 | 24 | 2. Install [SDFX CLI](https://developer.salesforce.com/docs/atlas.en-us.sfdx_setup.meta/sfdx_setup/sfdx_setup_install_cli.htm) 25 | 26 | # Install DXB-CLI 27 | 28 | To get started, you can install it by using salesforce command, open your terminal and type: 29 | 30 | ```shell 31 | sf plugins install dxb@latest 32 | ``` 33 | 34 | # Commands 35 | 36 | - sf dxb apex scan 37 | - sf dxb apex trigger 38 | - sf dxb apex coverage check 39 | - sf dxb apex coverage cleanup 40 | - sf dxb apex scan query 41 | - sf dxb apex trigger create 42 | - sf dxb api align 43 | - sf dxb community publish 44 | - sf dxb data backup 45 | - sf dxb data bulk 46 | - sf dxb data export 47 | - sf dxb data file 48 | - sf dxb data import 49 | - sf dxb data masking 50 | - sf dxb data query 51 | - sf dxb data restore 52 | - sf dxb data transform 53 | - sf dxb data bulk query 54 | - sf dxb data file export 55 | - sf dxb data query explain 56 | - sf dxb graphql convert 57 | - sf dxb install 58 | - sf dxb junit check 59 | - sf dxb lwc test 60 | - sf dxb lwc test run 61 | - sf dxb mdapi convert 62 | - sf dxb object create 63 | - sf dxb object fields 64 | - sf dxb object fieldset 65 | - sf dxb object prefix 66 | - sf dxb object relationships 67 | - sf dxb object vr 68 | - sf dxb object fields list 69 | - sf dxb object relationships list 70 | - sf dxb object vr create 71 | - sf dxb org create 72 | - sf dxb org data 73 | - sf dxb org setdefault 74 | - sf dxb package dependencies install 75 | - sf dxb permissionset clean 76 | - sf dxb permissionset create 77 | - sf dxb profile build 78 | - sf dxb profile convert 79 | - sf dxb profile passwordpolicies 80 | - sf dxb profile passwordpolicies fix 81 | - sf dxb schema doc generate 82 | - sf dxb source delta 83 | - sf dxb source fetchtest 84 | - sf dxb source scanner 85 | - sf dxb static create 86 | - sf dxb user access 87 | - sf dxb user access why 88 | 89 | # Documentation 90 | 91 | Please see our [Wiki](https://github.com/davidbrowaeys/DXB/wiki) to get started. 92 | 93 | # License 94 | 95 | MIT © David Browaeys 96 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | nodejs_version: "10" 3 | cache: 4 | - '%LOCALAPPDATA%\Yarn -> appveyor.yml' 5 | - node_modules -> yarn.lock 6 | 7 | install: 8 | - ps: Install-Product node $env:nodejs_version x64 9 | - yarn 10 | test_script: 11 | - yarn test 12 | 13 | after_test: 14 | - .\node_modules\.bin\nyc report --reporter text-lcov > coverage.lcov 15 | - ps: | 16 | $env:PATH = 'C:\msys64\usr\bin;' + $env:PATH 17 | Invoke-WebRequest -Uri 'https://codecov.io/bash' -OutFile codecov.sh 18 | bash codecov.sh 19 | 20 | 21 | build: off 22 | 23 | -------------------------------------------------------------------------------- /bin/dev: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const oclif = require('@oclif/core'); 4 | 5 | const path = require('path'); 6 | const project = path.join(__dirname, '..', 'tsconfig.json'); 7 | 8 | // In dev mode -> use ts-node and dev plugins 9 | process.env.NODE_ENV = 'development'; 10 | 11 | require('ts-node').register({ project, swc: true }); 12 | // oclif.settings.tsconfigPath = project; 13 | 14 | // In dev mode, always show stack traces 15 | const g = (global.oclif = global.oclif || {}); 16 | 17 | // In dev mode, always show stack traces 18 | global.oclif.debug = true; 19 | 20 | // Start the CLI 21 | oclif.run().then(require('@oclif/core/flush')).catch(require('@oclif/core/handle')); 22 | -------------------------------------------------------------------------------- /bin/dev.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | set NODE_ENV=development 3 | node "%~dp0\dev" %* 4 | -------------------------------------------------------------------------------- /bin/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require('@oclif/core').run().then(require('@oclif/core/flush')).catch(require('@oclif/core/handle')); 4 | -------------------------------------------------------------------------------- /bin/run.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | node "%~dp0\run" %* 4 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { extends: ['@commitlint/config-conventional'] }; 2 | -------------------------------------------------------------------------------- /config/backup-def.json: -------------------------------------------------------------------------------- 1 | { 2 | "david.browaeys@gmail.com.dev": { 3 | "full": { 4 | "cycle": 1, 5 | "objects": [ 6 | "Account", 7 | "Contact", 8 | "Opportunity" 9 | ], 10 | "backupTime": "2019-08-05T00:50:49.340Z" 11 | }, 12 | "delta": { 13 | "mode": "since_last_delta", 14 | "cycle": 1, 15 | "objects": [ 16 | "Account", 17 | "Contact", 18 | "Opportunity" 19 | ], 20 | "backupTime": "2019-08-05T00:52:12.823Z" 21 | } 22 | } 23 | } -------------------------------------------------------------------------------- /config/data-masking-def.json: -------------------------------------------------------------------------------- 1 | { 2 | "Account" : { 3 | "Name" : "name", 4 | "BillingStreet" : "street", 5 | "BillingPostalCode" : "num_str::4", 6 | "Phone" : "phone" 7 | }, 8 | "PersonAccount" : { 9 | "FirstName" : "name", 10 | "LastName" : "name", 11 | "BillingStreet" : "street", 12 | "BillingPostalCode" : "num_str::4" 13 | }, 14 | "Contact" : { 15 | "FirstName" : "name", 16 | "LastName" : "name", 17 | "Email" : "email", 18 | "Home_Email__c": "email", 19 | "MailingStreet": "street", 20 | "OtherStreet" : "street", 21 | "Phone" : null, 22 | "HomePhone" : null, 23 | "MobilePhone" : null, 24 | "OtherPhone" : null 25 | } 26 | } -------------------------------------------------------------------------------- /docs/dxb_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/davidbrowaeys/DXB/4eefeab7e2c6c01e755c1f3833a25a9bc4b4d672/docs/dxb_icon.png -------------------------------------------------------------------------------- /docs/dxb_icon2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/davidbrowaeys/DXB/4eefeab7e2c6c01e755c1f3833a25a9bc4b4d672/docs/dxb_icon2.png -------------------------------------------------------------------------------- /dxbpackagecalc.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | //Read package.json file 4 | fs.readFile("package.json", (err, data) => { 5 | if (err) throw err; 6 | 7 | const releaseBranch = process.argv.length >= 2 ? process.argv[2] : undefined; 8 | const releaseType = process.argv.length >= 2 ? process.argv[3] : undefined; 9 | 10 | //Parse the file contents 11 | let packageJSON = JSON.parse(data); 12 | 13 | console.log("DXB Current Version: ", packageJSON.version); 14 | 15 | //if release type is beta then increment and add beta other 16 | let releaseVersion = releaseBranch.split("release/")[1]; 17 | if (releaseType === 'beta'){ 18 | releaseVersion += '-beta'; 19 | packageJSON.version = releaseVersion; 20 | }else{ 21 | packageJSON.version = packageJSON.version.split('-beta')[0]; 22 | } 23 | console.log("DXB New Version: ", packageJSON.version); 24 | 25 | //Write the updated package.json file 26 | fs.writeFile("package.json", JSON.stringify(packageJSON, null, 4), (err) => { 27 | if (err) throw err; 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /messages/apex.coverage.check.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Read a cobertura xml file and check if any class coverage is below the minumum threshold. 4 | 5 | # description 6 | 7 | This method read cobertura xml file and check if any class coverage is below the minumum threshold. 8 | 9 | # flags.file-path.summary 10 | 11 | Path of xml file 12 | 13 | # flags.min-coverage.summary 14 | 15 | Minimum apex coverage in % 16 | 17 | # examples 18 | 19 | - Specify a cobertura file: 20 | 21 | <%= config.bin %> <%= command.id %> --file-path tests/coverage/cobertura.xml 22 | 23 | - Specify a cobertura file with minimum code coverage: 24 | 25 | <%= config.bin %> <%= command.id %> --file-path tests/coverage/cobertura.xml --min-coverage 99 26 | 27 | # coverageTooLow 28 | 29 | Ooops, coverage seems a bit low! Each apex class is expected at least a coverage of %d. 30 | 31 | # coverageIsOk 32 | 33 | Code coverage is looking good! 34 | 35 | # insufficientCoverage 36 | 37 | Insufficient Code Coverage! 38 | -------------------------------------------------------------------------------- /messages/apex.coverage.cleanup.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Cleanup cobertura.xml 4 | 5 | # description 6 | 7 | This command cleans up class filenames with actual class file paths in cobertura.xml. This code coverage file is generated by <%= config.bin %> project deploy start when using coverageformatters option as cobertura 8 | 9 | # flags.file-path.summary 10 | 11 | Location of cobertura.xml 12 | 13 | # examples 14 | 15 | - Specify a cobertura file: 16 | 17 | <%= config.bin %> <%= command.id %> --file-path tests/coverage/cobertura.xml 18 | -------------------------------------------------------------------------------- /messages/apex.scan.query.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Scan Apex Classes for Queries and explain them 4 | 5 | # description 6 | 7 | This method goes through all Apex Classes stored in the project and checks if it they contain SOQL queries. 8 | For all queries, a call is made to Salesforce to retrieve the query plan for each query. 9 | 10 | # examples 11 | 12 | - <%= config.bin %> <%= command.id %> 13 | 14 | # class 15 | 16 | Class: %s 17 | 18 | # query 19 | 20 | Query: %s 21 | -------------------------------------------------------------------------------- /messages/apex.trigger.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create trigger and apex class from template. 4 | 5 | # description 6 | 7 | This command creates a trigger and accompanying apex class. Specify the object name according to it's domain layer. 8 | 9 | # examples 10 | 11 | - Specify an sobject: 12 | 13 | <%= config.bin %> <%= command.id %> --sobject Account 14 | 15 | - Specify an API version for the generated files: 16 | 17 | <%= config.bin %> <%= command.id %> -s Account --source-api-version 59.0 18 | 19 | # flags.sobject.summary 20 | 21 | API name of the SObject 22 | 23 | # flags.source-api-version.summary 24 | 25 | Set the API version of the specified class and trigger 26 | 27 | # error.templateNotExist 28 | 29 | Specified template 'trigger' doesn't exist 30 | 31 | # error.defJsonNotFound 32 | 33 | def.json not found 34 | 35 | # error.defJSONVars 36 | 37 | The following variables are required: %s. Specify them like: -v className=myclass,apiName=40.0 38 | -------------------------------------------------------------------------------- /messages/api.align.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Align component API versions 4 | 5 | # description 6 | 7 | Align the API version of components with the API version defined in sfdx-project.json. Add full file paths to plugins.dxb.apiAlignmentExclusion to exclude specific files. 8 | 9 | # examples 10 | 11 | - Align all components in the project with the API defined in sfdx-project.json: 12 | 13 | <%= config.bin %> <%= command.id %> 14 | 15 | - Specify one or multiple metadata types to align: 16 | 17 | <%= config.bin %> <%= command.id %> --metadata-type ApexClass --metadata-type ApexTrigger 18 | 19 | - Specify directories and files to align: 20 | 21 | <%= config.bin %> <%= command.id %> --directory src/main --directory force-app/main/default/classes/ClassName.meta-xml 22 | 23 | # flags.metadata-type.summary 24 | 25 | The types defined as the 'root' of the XML you want to target 26 | 27 | # flags.metadata-type.description 28 | 29 | Select specific metadata type to align, value is the name of the root tag of the XML file holding the apiVersion tag i.e. <%= command.id %> 10 | 11 | - Publish specific communities 12 | 13 | <%= config.bin %> <%= command.id %> --name portal1 --name partner1 14 | 15 | # flags.name.summary 16 | 17 | Contains the name of a community. If not specified, then will fetch all "Live" communities from target env 18 | -------------------------------------------------------------------------------- /messages/data.backup.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Extract certificates from partner community. You must have access to parner community in order to use this command. 4 | 5 | # examples 6 | 7 | - Use a specific data definition file and directory to do a full backup from Salesforce 8 | 9 | <%= config.bin %> <%= command.id %> --mode full --data-dir backup --definition-file config/backup-def.json 10 | 11 | - Load into a specific org 12 | 13 | <%= config.bin %> <%= command.id %> -m full -d backup -f config/backup-def.json -o myOrg 14 | 15 | - Use delta mode 16 | 17 | <%= config.bin %> <%= command.id %> -m delta -d backup -f config/backup-def.json 18 | 19 | # flags.definition-file.summary 20 | 21 | Path to a data backup definition file 22 | 23 | # flags.data-dir.summary 24 | 25 | Path to main data backup directory 26 | 27 | # flags.mode.summary 28 | 29 | Data backup mode, accepted values are delta or full 30 | 31 | # flags.output-dir.summary 32 | 33 | Path to main data backup directory 34 | 35 | # log.importData 36 | 37 | Import data to org... 38 | 39 | # log.preparing 40 | 41 | Preparing file... 42 | 43 | # log.registerImport 44 | 45 | Register import for %s 46 | 47 | # log.loadResult 48 | 49 | Imported: %s succeeded - %s failed 50 | 51 | # log.deltaFor 52 | 53 | Retrieve delta for %s 54 | 55 | # error.invalidMode 56 | 57 | Invalid backup mode specified. You can only select delta or full backup. 58 | 59 | # error.noBackupFile 60 | 61 | No backup configuration found in %s for this user 62 | 63 | # error.unexpected 64 | 65 | Unexpected issue occurred: %s 66 | -------------------------------------------------------------------------------- /messages/data.bulk.query.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Export salesforce data using bulk api 4 | 5 | # examples 6 | 7 | - Use a specific Query 8 | 9 | <%= config.bin %> <%= command.id %> --query "select id from Account" 10 | 11 | - Query a specific object 12 | 13 | <%= config.bin %> <%= command.id %> --object-name Contact 14 | 15 | - Specify a specific org to run the query against: 16 | 17 | <%= config.bin %> <%= command.id %> -q "select id from Account" --target-org dev2 18 | 19 | - Specify the output directory 20 | 21 | <%= config.bin %> <%= command.id %> -q "select id from Account" -o dev2 --output-dir ./dataoutputdir, 22 | 23 | - Specify the output directory and output file name 24 | 25 | <%= config.bin %> <%= command.id %> -q "select id from Account" -o dev2 --output-dir ./dataoutputdir --file-name result.csv, 26 | 27 | - Query all fields 28 | 29 | <%= config.bin %> <%= command.id %> -q "select id from Account" -o dev2 -d ./dataoutputdir -f result.csv --all-fields, 30 | 31 | # flags.query.summary 32 | 33 | SOQL query 34 | 35 | # flags.object-name.summary 36 | 37 | Object name 38 | 39 | # flags.output-dir.summary 40 | 41 | Bulk data output directory, default "output_dir" 42 | 43 | # flags.file-name.summary 44 | 45 | Name of the CSV file generated. If not specified, it will default to "\_.csv" 46 | 47 | # flags.all-fields.summary 48 | 49 | Retrieve all fields from the object. 50 | 51 | # error.name.invalidConnection 52 | 53 | Invalid Connection 54 | 55 | # error.name.invalidFlags 56 | 57 | Invalid Flags 58 | 59 | # error.name.invalidSOQL 60 | 61 | Invalid SOQL 62 | 63 | # error.message.noConfiguration 64 | 65 | No configuration found for this org. 66 | 67 | # error.message.queryOrObject 68 | 69 | You must use either --query or --object-name. 70 | 71 | # error.message.invalidSOQL 72 | 73 | No sobject type found in query, maybe caused by invalid SOQL. 74 | -------------------------------------------------------------------------------- /messages/data.export.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Export data from an org base on dxb data plan definition file. 4 | 5 | # examples 6 | 7 | - Use a specific data definition file and directory to load into Salesforce 8 | 9 | <%= config.bin %> <%= command.id %> --definition-file data/data-def.json --data-dir data/sit 10 | 11 | - Load into a specific org 12 | 13 | <%= config.bin %> <%= command.id %> -f data/data-def.json -d data/sit --target-org devorg 14 | 15 | - Specify a specific amount of time to poll (in ms) 16 | 17 | <%= config.bin %> <%= command.id %> -f data/data-def.json -d data/sit -o devorg --polling-time-out 10000 18 | 19 | # flags.definition-file.summary 20 | 21 | Path to a dxb data definition file 22 | 23 | # flags.output-dir.summary 24 | 25 | Path of export directory 26 | 27 | # flags.query-limit.summary 28 | 29 | Maximum number of records to fetch 30 | 31 | # log.importData 32 | 33 | Import data to org... 34 | 35 | # log.preparing 36 | 37 | Preparing file... 38 | 39 | # log.registerExport 40 | 41 | Register export for %s 42 | 43 | # log.exportResult 44 | 45 | Total records exported: %s record(s) 46 | -------------------------------------------------------------------------------- /messages/data.file.export.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Export data from an org based on dxb data plan definition file. 4 | 5 | # examples 6 | 7 | - Query with a CSV file 8 | 9 | <%= config.bin %> <%= command.id %> --file-path ./input/inputFile.csv 10 | 11 | - Query against a specific org 12 | 13 | <%= config.bin %> <%= command.id %> -f ./input/inputFile.csv --target-org myOrg 14 | 15 | - Only query a specific set of the input file 16 | 17 | <%= config.bin %> <%= command.id %> -f ./input/inputFile.csv -o myOrg --min 1 --max 2 18 | 19 | # flags.file-path.summary 20 | 21 | Path to file containing all content document ids in CSV format 22 | 23 | # flags.min.summary 24 | 25 | The lowest place of the input that must be included 26 | 27 | # flags.max.summary 28 | 29 | The highest value of the input that must be included 30 | 31 | # log.successful 32 | 33 | All files downloaded successfully. 34 | 35 | # log.numberToExtract 36 | 37 | Number of files to extract: %d 38 | 39 | # error.failedDownloads 40 | 41 | Failed to download files: %s 42 | 43 | # error.failedToDownload 44 | 45 | Failed to download file. Status code: %d 46 | 47 | # error.contentVersionError 48 | 49 | Content Version Error for %s! 50 | -------------------------------------------------------------------------------- /messages/data.import.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Import data to an org base on dxb data plan definition file. 4 | 5 | # examples 6 | 7 | - Use a specific data definition file and directory to load into Salesforce 8 | 9 | <%= config.bin %> <%= command.id %> --definition-file data/data-def.json --data-dir data/sit 10 | 11 | - Load into a specific org 12 | 13 | <%= config.bin %> <%= command.id %> -f data/data-def.json -d data/sit --target-org devorg 14 | 15 | - Specify a specific amount of time to poll (in ms) 16 | 17 | <%= config.bin %> <%= command.id %> -f data/data-def.json -d data/sit -o devorg --polling-time-out 10000 18 | 19 | # flags.definition-file.summary 20 | 21 | Path to a dxb data definition file 22 | 23 | # flags.data-dir.summary 24 | 25 | Path of data to import (in CSV format) 26 | 27 | # flags.polling-time-out.summary 28 | 29 | Bulk polling timeout in milliseconds 30 | 31 | # log.importData 32 | 33 | Import data to org... 34 | 35 | # log.preparing 36 | 37 | Preparing file... 38 | 39 | # log.registerImport 40 | 41 | Register import for %s 42 | 43 | # log.loadResult 44 | 45 | Imported: %s succeeded - %s failed 46 | 47 | # log.batch 48 | 49 | Batch %d out of %d... 50 | 51 | # error.dataDirNotExist 52 | 53 | This folder does not exist. 54 | 55 | # error.splitStream 56 | 57 | csvSplitStream failed! %s 58 | 59 | # error.unexpected 60 | 61 | Unexpected issue occurred: %s 62 | -------------------------------------------------------------------------------- /messages/data.masking.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create fieldset for specified object and push to scratch org. 4 | 5 | # examples 6 | 7 | - Use a specific data masking file, Object Name and Input File to mask 8 | 9 | <%= config.bin %> <%= command.id %> --definition-file config/data-masking-def.json --object-name Account --source-data bulk_output/ACCOUNT.csv 10 | 11 | # flags.definition-file.summary 12 | 13 | Path to a data masking definition file 14 | 15 | # flags.object-name.summary 16 | 17 | Object Name 18 | 19 | # flags.source-data.summary 20 | 21 | Path to a data source file 22 | 23 | # log.initializingProcess 24 | 25 | Initializing process... 26 | 27 | # log.preparing 28 | 29 | Preparing file... 30 | 31 | # log.registerExport 32 | 33 | Register export for %s 34 | 35 | # log.exportResult 36 | 37 | Total records exported: %s record(s) 38 | 39 | # error.objectNotFound 40 | 41 | Data masking definition not found for this object. 42 | -------------------------------------------------------------------------------- /messages/data.query.explain.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | CLI version of the salesforce query plan tool to optimize and speed up queries. 4 | 5 | # examples 6 | 7 | - Use a specific query 8 | 9 | <%= config.bin %> <%= command.id %> --query "select id from Account where BillingCountry = \'Australia\' limit 10" 10 | 11 | - Query against a specific org 12 | 13 | <%= config.bin %> <%= command.id %> -q "select id from Account where BillingCountry = \'Australia\' limit 10" --target-org myOrg 14 | 15 | # flags.query.summary 16 | 17 | A valid SOQL query 18 | 19 | # log.successful 20 | 21 | All files downloaded successfully. 22 | 23 | # log.connecting 24 | 25 | Connecting to org... 26 | 27 | # log.overview 28 | How Query Plan works ? 29 | 30 | Cardinality: The estimated number of records that the leading operation type would return. For example, the number of records returned if using an index table. 31 | Fields: The indexed field(s) used by the Query Optimizer. If the leading operation type is Index, the fields value is Index. Otherwise, the fields value is null. 32 | Leading Operation Type: The primary operation type that Salesforce will use to optimize the query. 33 | Relative Cost: The cost of the query compared to the Force.com Query Optimizer’s selectivity threshold. Values above 1 mean that the query won’t be selective. 34 | Object Cardinality: The approximate record count for the queried object. 35 | 36 | # log.noExplanation 37 | 38 | No Query explanation available 39 | 40 | # log.connected 41 | 42 | Connected to %s... 43 | Access Token: %s 44 | 45 | # error.queryNotValid 46 | 47 | Connection not valid. 48 | 49 | # error.unexpected 50 | 51 | Unexpected issue occurred 52 | 53 | # error.missingQueryFlag 54 | 55 | Must specify query in order to use this command. 56 | -------------------------------------------------------------------------------- /messages/data.restore.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Restore records to Salesforce 4 | 5 | # examples 6 | 7 | - Use the path to a data backup folder 8 | 9 | <%= config.bin %> <%= command.id %> --backup-dir backup/cycle-1 10 | 11 | - Specify a target org 12 | 13 | <%= config.bin %> <%= command.id %> -d backup/cycle-1 --target-org myOrg 14 | 15 | # flags.backup-dir.summary 16 | 17 | Path to a data backup cycle root folder 18 | 19 | # flags.object-name.summary 20 | 21 | Object Name 22 | 23 | # flags.source-data.summary 24 | 25 | Path to a data source file 26 | 27 | # log.loadSuccessful 28 | 29 | Record nr. %d loaded successfully, id = %s 30 | 31 | # log.loadFailed 32 | 33 | Record nr. %d error occurred, message = %s 34 | 35 | # error.unexpected 36 | 37 | Unexpected issue occurred: %s 38 | -------------------------------------------------------------------------------- /messages/data.transform.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Transform field values from a given query 4 | 5 | # examples 6 | 7 | - Specify transform file, object name and SOQL query 8 | 9 | <%= config.bin %> <%= command.id %> --object-name Account --transform-file transform.json --query "select id from Account where Phone_Country\_\_c = \'Australia\' limit 10" 10 | 11 | - Specify a target org 12 | 13 | <%= config.bin %> <%= command.id %> -s Account -f transform.json -q "select id from Account where Phone_Country\_\_c = \'Australia\' limit 10" --target-org sit 14 | 15 | # flags.object-name.summary 16 | 17 | Salesforce Object Name 18 | 19 | # flags.query.summary 20 | 21 | A valid SOQL query 22 | 23 | # flags.transform-file.summary 24 | 25 | Specify a JSON file where key = source field and value = mapping value 26 | 27 | # log.success 28 | 29 | CSV file successfully processed 30 | -------------------------------------------------------------------------------- /messages/graphql.convert.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Transform a SOQL Query to a GraphQL Query 4 | 5 | # examples 6 | 7 | - Specify any valid query 8 | 9 | <%= config.bin %> <%= command.id %> --query "SELECT Id, Account.Name, Contact.FirstName, Contact.LastName FROM Case WHERE Origin = 'Phone'" 10 | 11 | # flags.query.summary 12 | 13 | A valid SOQL query 14 | 15 | # error.invalidSOQL 16 | 17 | Invalid SOQL query format 18 | -------------------------------------------------------------------------------- /messages/install.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command initializes your sfdx project to support DXB cli. 4 | 5 | # examples 6 | 7 | - Basic installation 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | # log.schemagen 12 | 13 | File added: config/dxb-schemagen-def.json 14 | 15 | # log.welcome 16 | 17 | Welcome to DXB CLI! Happy coding! 18 | 19 | # log.dxbAdded 20 | 21 | DXB added to sfdx-project.json 22 | 23 | # error.connection 24 | 25 | Connectons not established! 26 | 27 | # spinner.start.setup 28 | 29 | Setting up DXB in your project 30 | 31 | # spinner.stop.done 32 | 33 | Done 34 | -------------------------------------------------------------------------------- /messages/junit.check.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Check the quality of a junit test result and flags anything slower than defined threshold 4 | 5 | # examples 6 | 7 | - Specify an existing JUnit XML file 8 | 9 | <%= config.bin %> <%= command.id %> --junit-path tests/junit.xml 10 | 11 | # flags.junit-path.summary 12 | 13 | Path of junit xml file 14 | 15 | # flags.time-threshold.summary 16 | 17 | Maximum amount of time that a test method should take to execute (in second). 18 | 19 | # flags.flag-as-error.summary 20 | 21 | if set, the command will update add failure tags to junit file and throw an error 22 | 23 | # log.slowUnitTest 24 | 25 | Some unit test have been identified below the standard %ss 26 | 27 | # error.performance.tooSlow 28 | 29 | DXB.PerformanceException: Test method is too slow 30 | 31 | # error.performance 32 | 33 | DXB.PerformanceException 34 | 35 | # error.invalidSOQL 36 | 37 | Invalid SOQL query format 38 | -------------------------------------------------------------------------------- /messages/lwc.test.run.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Run LWC Tests with Jest with additional options. 4 | 5 | # description 6 | 7 | The current Salesforce CLI only runs all LWC tests, which can cause pipelines to last longer than needed if not all tests should be run. 8 | Developers can run the test they want through VS Code test execution or using the Jest CLI. 9 | This command provides a wrapper to run all tests or specific ones. 10 | 11 | # examples 12 | 13 | - Run all LWC tests: 14 | <%= config.bin %> <%= command.id %> 15 | 16 | - Run specific LWC tests: 17 | <%= config.bin %> <%= command.id %> --test lwcComponentOne --test lwcComponentTwo 18 | 19 | - Produce error when tests fail: 20 | <%= config.bin %> <%= command.id %> --fail-on-error 21 | 22 | # flags.test.summary 23 | 24 | The location of the component to test. 25 | 26 | # flags.fail-on-error.summary 27 | 28 | When set, the command will raise an error if a test fails. 29 | 30 | # flags.manifest.summary 31 | 32 | Location of a package.xml file. 33 | 34 | # flags.root-dir.summary 35 | 36 | Specify the location of where the lwc directory is located. 37 | 38 | # success 39 | 40 | All tests ran without issues. 41 | 42 | # error.issues 43 | 44 | Not all tests are successful: 45 | %s 46 | 47 | # error.processManifest 48 | 49 | Unable to process content from manifest 50 | 51 | # error.invalidComponents 52 | 53 | None of the specified tests were found. Please check if the input is correct 54 | 55 | # warning.issues 56 | 57 | Not all tests are successful: 58 | %s 59 | -------------------------------------------------------------------------------- /messages/mdapi.convert.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Enhances mdapi convert. 4 | 5 | # description 6 | 7 | Override mdapi convert standard behavior that create a dup file if file exist already. 8 | Instead it delete old file and remame .dup by actual file. 9 | 10 | # examples 11 | 12 | - Specify a directory containing the MetaData API-formatted metadata 13 | 14 | <%= config.bin %> <%= command.id %> --root-dir tmp 15 | 16 | - Specify a directory to store the output 17 | 18 | <%= config.bin %> <%= command.id %> -r tmp --output-dir out 19 | 20 | # flags.root-dir.summary 21 | 22 | The root directory containing the Metadata API–formatted metadata 23 | 24 | # flags.output-dir.summary 25 | 26 | The output directory to store the source–formatted files 27 | -------------------------------------------------------------------------------- /messages/object.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create a new custom object. 4 | 5 | # description 6 | 7 | Create a new custom object, prompting the user for specific object settings 8 | 9 | # examples 10 | 11 | - Specify the SObject to create 12 | 13 | <%= config.bin %> <%= command.id %> --object-name Invoice 14 | 15 | - Choose if you want to push the new Object to the connected org 16 | 17 | <%= config.bin %> <%= command.id %> -s Invoice --push 18 | 19 | - Specify a target org to deploy to 20 | 21 | <%= config.bin %> <%= command.id %> -s Invoice -p --target-org myOrg 22 | 23 | # flags.object-name.summary 24 | 25 | Name of custom object 26 | 27 | # flags.push.summary 28 | 29 | Push the changes to the target org 30 | 31 | # log.objectCreated 32 | 33 | === Custom Object created successfully: %s 34 | 35 | # log.sharingControlledByParent 36 | 37 | When sharing model is set as controlled by parent, you must define master details : 38 | 39 | # error.pushFailed 40 | 41 | Unable to push source to org! 42 | 43 | # error.exists 44 | 45 | This object already exists 46 | 47 | # data.object 48 | 49 | 50 | 51 | 52 | Accept 53 | Default 54 | 55 | 56 | CancelEdit 57 | Default 58 | 59 | 60 | Clone 61 | Default 62 | 63 | 64 | Delete 65 | Default 66 | 67 | 68 | Edit 69 | Default 70 | 71 | 72 | List 73 | Default 74 | 75 | 76 | New 77 | Default 78 | 79 | 80 | SaveEdit 81 | Default 82 | 83 | 84 | Tab 85 | Default 86 | 87 | 88 | View 89 | Default 90 | 91 | false 92 | SYSTEM 93 | Deployed 94 | {{description}} 95 | true 96 | true 97 | false 98 | false 99 | true 100 | true 101 | true 102 | true 103 | true 104 | 105 | 106 | 107 | Text 108 | 109 | {{label}}s 110 | 111 | {{sharingmodel}} 112 | 113 | 114 | # data.master 115 | 116 | 117 | 118 | {{fieldname}} 119 | false 120 | 121 | {{masterobject}} 122 | {{relationshipLabel}} 123 | {{relationshipName}} 124 | 0 125 | true 126 | false 127 | false 128 | MasterDetail 129 | false 130 | 131 | 132 | # prompt.message.masterObject 133 | 134 | Master object(API name), i.e.: Account, Invoice\_\_c: 135 | 136 | # prompt.message.masterLabel 137 | 138 | Master field label: 139 | 140 | # prompt.message.relationshipName 141 | 142 | Relationship name(i.e.:"Drawdowns", "Invoice Lines") 143 | 144 | # prompt.message.sharingModel 145 | 146 | Sharing Model (Private|Public|ControlledByParent): 147 | 148 | # prompt.message.description 149 | 150 | Description: 151 | -------------------------------------------------------------------------------- /messages/object.fields.list.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Retrieve list of fields of specified object. 4 | 5 | # description 6 | 7 | Override mdapi convert standard behavior that create a dup file if file exist already. 8 | Instead it delete old file and remame .dup by actual file. 9 | 10 | # examples 11 | 12 | - Specify an object name to fetch the fields for 13 | 14 | <%= config.bin %> <%= command.id %> --object-name Account 15 | 16 | - Specify a filter to only include fields that include the filter in the Name 17 | 18 | <%= config.bin %> <%= command.id %> -s Account --filter mail 19 | 20 | - Specify a target org to retrieve from 21 | 22 | <%= config.bin %> <%= command.id %> -s Account -f mail --target-org myOrg 23 | 24 | # flags.object-name.summary 25 | 26 | Name of custom object 27 | 28 | # flags.filter.summary 29 | 30 | Search filter 31 | 32 | # log.retrieveSchema 33 | 34 | Retrieving %s fields from schema... 35 | -------------------------------------------------------------------------------- /messages/object.fieldset.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create a new fieldset for an object. Push to org if needed. 4 | 5 | # examples 6 | 7 | - Specify the SObject to create 8 | 9 | <%= config.bin %> <%= command.id %> --object-name Account --fieldset-name FS_1 10 | 11 | - Choose if you want to push the new Object to the connected org 12 | 13 | <%= config.bin %> <%= command.id %> -s Account -n FS_1 --push 14 | 15 | - Specify a target org to deploy to 16 | 17 | <%= config.bin %> <%= command.id %> -s Account -n FS_1 -p --target-org myOrg 18 | 19 | - Display all fields on the Object before creating the field set 20 | 21 | <%= config.bin %> <%= command.id %> -s Account -n FS_1 -p --retrieve-fields 22 | 23 | # flags.object-name.summary 24 | 25 | Name of custom object 26 | 27 | # flags.fieldset-name.summary 28 | 29 | Name of fieldset 30 | 31 | # flags.retrieve-fields.summary 32 | 33 | Retrieve and display sobject fields in terminal 34 | 35 | # flags.push.summary 36 | 37 | Push the changes to the target org 38 | 39 | # log.fieldsetCreated 40 | 41 | === Fieldset created successfully: %s 42 | 43 | # log.availableFields 44 | 45 | === Available Object Fields: 46 | %s 47 | 48 | # log.getFields 49 | 50 | Retrieve schema for %s from %s 51 | 52 | # error.pushFailed 53 | 54 | Unable to push source to org! 55 | 56 | # error.exists 57 | 58 | This object already exists 59 | 60 | # data.fieldset 61 | 62 | 63 | 64 | {{fullname}} 65 | {{description}} 66 | {{fieldlist}} 67 | 68 | 69 | 70 | # prompt.message.fields 71 | 72 | Fields (APIName with comma separated): 73 | 74 | # prompt.message.description 75 | 76 | Description: 77 | -------------------------------------------------------------------------------- /messages/object.prefix.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Retrieve key prefix of specified sobject or retrieve sobject from specified key 4 | 5 | # examples 6 | 7 | - Specify the SObject to get the prefix from 8 | 9 | <%= config.bin %> <%= command.id %> --object-name Account 10 | 11 | - Specify the prefix for which you want the object name for 12 | 13 | <%= config.bin %> <%= command.id %> --prefix 001 14 | 15 | # flags.object-name.summary 16 | 17 | Name of custom object 18 | 19 | # flags.prefix.summary 20 | 21 | Prefix of the object 22 | 23 | # log.fieldsetCreated 24 | 25 | === Fieldset created successfully: %s 26 | 27 | # log.availableFields 28 | 29 | === Available Object Fields: 30 | %s 31 | 32 | # log.getFields 33 | 34 | Retrieve schema for %s from %s 35 | 36 | # log.globalSchema 37 | 38 | Retrieving global schema... 39 | 40 | # log.result.prefix 41 | 42 | ==== Object Prefix: %s 43 | 44 | # log.result.objectname 45 | 46 | ==== Object Name: %s 47 | 48 | # error.pushFailed 49 | 50 | Unable to push source to org! 51 | 52 | # error.prefixNotFound 53 | 54 | Prefix not found. 55 | 56 | # error.invalidArguments 57 | 58 | You must specify either objectname or prefix. 59 | 60 | # error.invalidConnection 61 | 62 | Connection not valid. 63 | 64 | # error.unexpected 65 | 66 | Unexpected error 67 | 68 | # data.fieldset 69 | 70 | 71 | 72 | {{fullname}} 73 | {{description}} 74 | {{fieldlist}} 75 | 76 | 77 | 78 | # prompt.message.fields 79 | 80 | Fields (APIName with comma separated): 81 | 82 | # prompt.message.description 83 | 84 | Description: 85 | -------------------------------------------------------------------------------- /messages/object.relationships.list.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Retrieve list of child relationships of a specified object. 4 | 5 | # description 6 | 7 | Override mdapi convert standard behavior that create a dup file if file exist already. 8 | Instead it delete old file and remame .dup by actual file. 9 | 10 | # examples 11 | 12 | - Specify an object name to fetch the relationships for 13 | 14 | <%= config.bin %> <%= command.id %> --object-name Account 15 | 16 | - Specify a filter to only include relationships that include the filter in the Name 17 | 18 | <%= config.bin %> <%= command.id %> -s Account --filter Contact 19 | 20 | - Specify a target org to retrieve from 21 | 22 | <%= config.bin %> <%= command.id %> -s Account -f Contact --target-org myOrg 23 | 24 | # flags.object-name.summary 25 | 26 | Name of custom object 27 | 28 | # flags.filter.summary 29 | 30 | Search filter 31 | 32 | # log.retrieveSchema 33 | 34 | Retrieving %s child relationships from schema... 35 | -------------------------------------------------------------------------------- /messages/object.vr.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command create a validation rule against specified object. 4 | 5 | # description 6 | 7 | Override mdapi convert standard behavior that create a dup file if file exist already. 8 | Instead it delete old file and remame .dup by actual file. 9 | 10 | # examples 11 | 12 | - Specify the SObject and name for the Validation Rule 13 | 14 | <%= config.bin %> <%= command.id %> --name BlockNameChange --object-name Account 15 | 16 | - Choose if you want to push the new Validation Rule to the connected org 17 | 18 | <%= config.bin %> <%= command.id %> -s Account -n BlockNameChange --push 19 | 20 | - Specify a target org to deploy to 21 | 22 | <%= config.bin %> <%= command.id %> -s Account -n BlockNameChange -p --target-org myOrg 23 | 24 | # flags.object-name.summary 25 | 26 | Name of custom object 27 | 28 | # flags.name.summary 29 | 30 | Name of the validation rule 31 | 32 | # flags.push.summary 33 | 34 | Push the changes to the target org 35 | 36 | # log.vrCreated 37 | 38 | Validation Rule created successfully at %s 39 | 40 | # error.pushFailed 41 | 42 | Unable to push source to org! 43 | -------------------------------------------------------------------------------- /messages/org.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create scratch org 4 | 5 | # examples 6 | 7 | - Include packages, mark the new org as default for your project and give it an alias 8 | 9 | <%= config.bin %> <%= command.id %> --include-packages --default-org --set-alias myscratchorg 10 | 11 | - Assign a custom number of duration to the scratch org, mark it as default for your project and give it an alias 12 | 13 | <%= config.bin %> <%= command.id %> --duration-days 10 --default-org --set-alias myscratchorg 14 | 15 | # flags.set-alias.summary 16 | 17 | Alias of scratch org 18 | 19 | # flags.include-packages.summary 20 | 21 | Include packages from cli config file 22 | 23 | # flags.default-org.summary 24 | 25 | Mark as default org 26 | 27 | # flags.duration-days.summary 28 | 29 | Duration of the scratch org (in days) (default:30, min:1, max:30) 30 | 31 | # flags.include-tracking-history.summary 32 | 33 | Remove field tracking history tag from object 34 | 35 | # log.welcome 36 | 37 | Welcome to dxb CLI! We are now creating your scratch org %s... 38 | 39 | # log.installing 40 | 41 | Installing your (un)managed packages... 42 | 43 | # log.installed 44 | 45 | Successfully installed package %s 46 | 47 | # log.fieldsetCreated 48 | 49 | === Fieldset created successfully: %s 50 | 51 | # log.availableFields 52 | 53 | === Available Object Fields: 54 | %s 55 | 56 | # log.getFields 57 | 58 | Retrieve schema for %s from %s 59 | 60 | # log.createUser 61 | 62 | Creating testing user... 63 | 64 | # log.importData 65 | 66 | Importing data from data plan... 67 | 68 | # log.trackingHistory 69 | 70 | Disabling Feed Tracking History for %s 71 | 72 | # log.manualConfig 73 | 74 | Due to some limitations with DX scratch org, you must enable manually the following feature(s) before to proceed: 75 | 76 | # log.packages 77 | 78 | Installing your %s legacy packages... 79 | 80 | # log.closing 81 | 82 | Thank you for your patience! You can now enjoy your scrath org. Happy coding! 83 | 84 | # log.userCreated 85 | 86 | User has been created: 87 | 88 | # log.roleNotFound 89 | 90 | Default role not found 91 | 92 | # log.noSetupData 93 | 94 | No setup data... 95 | 96 | # log.globalSchema 97 | 98 | Retrieving global schema... 99 | 100 | # log.result.prefix 101 | 102 | ==== Object Prefix: %s 103 | 104 | # log.result.objectname 105 | 106 | ==== Object Name: %s 107 | 108 | # error.installingPackage 109 | 110 | Error while installing package %s 111 | 112 | # error.installing 113 | 114 | Unable to install (un)managed packages! 115 | 116 | # error.createUser 117 | 118 | Unable to create user on scratch org! 119 | 120 | # error.importData 121 | 122 | Unable to import data on scratch org! 123 | 124 | # error.pushFailed 125 | 126 | Unable to push source to org! 127 | 128 | # error.packages 129 | 130 | Unable to install your %s legacy packages! 131 | 132 | # error.definitionFile 133 | 134 | Scratch definition file does not exist! 135 | 136 | # error.badConfig 137 | 138 | Plugin definition dxb is missing in sfdx-project.json, make sure to setup plugin. 139 | 140 | # error.prefixNotFound 141 | 142 | Prefix not found. 143 | 144 | # error.invalidArguments 145 | 146 | You must specify either objectname or prefix. 147 | 148 | # error.invalidConnection 149 | 150 | Connection not valid. 151 | 152 | # error.unexpected 153 | 154 | Unexpected error 155 | 156 | # data.fieldset 157 | 158 | 159 | 160 | {{fullname}} 161 | {{description}} 162 | {{fieldlist}} 163 | 164 | 165 | 166 | # prompt.message.continue 167 | 168 | Would you like to continue? (Y/N) 169 | 170 | # prompt.message.description 171 | 172 | Description: 173 | -------------------------------------------------------------------------------- /messages/org.data.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Replace variables in specific files 4 | 5 | # description 6 | 7 | Different orgs can require different settings i.e. hardcoded URLs. This command uses an environment mapping JSON file to replace variables placed in the metadata files with actual values depending on the environment that is passed to the command. 8 | 9 | # examples 10 | 11 | - Basic usage 12 | 13 | <%= config.bin %> <%= command.id %> --config config/cty-env-mapping.json --environment SIT 14 | 15 | # flags.config.summary 16 | 17 | Path to config file 18 | 19 | # flags.environment.summary 20 | 21 | Set the environment for which rules in config file should apply to. 22 | 23 | # log.welcome 24 | 25 | Welcome to dxb CLI! We are now creating your scratch org %s... 26 | 27 | # log.installing 28 | 29 | Installing your (un)managed packages... 30 | 31 | # log.processing 32 | 33 | Processing: %s 34 | -------------------------------------------------------------------------------- /messages/org.setdefault.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Set default username and org wide email in metadata such as workflow based on target scratch org 4 | 5 | # examples 6 | 7 | - Use the default package directory from project.json on your default org 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Specify a directory that contains metadata to replace with default values for your default org 12 | 13 | <%= config.bin %> <%= command.id %> --base-dir src 14 | 15 | - Specify a target org when modifying metadata with default values 16 | 17 | <%= config.bin %> <%= command.id %> --d src --target-org myOrg 18 | 19 | # flags.base-dir.summary 20 | 21 | Path of base directory 22 | 23 | # log.welcome 24 | 25 | Replacing unspported metadata for scratch org i.e.: field update on specific user, send email from org wide email... 26 | 27 | # error.badConfig 28 | 29 | Plugin definition dxb is missing in sfdx-project.json, make sure to setup plugin. 30 | 31 | # error.prefixNotFound 32 | 33 | Prefix not found. 34 | 35 | # error.invalidArguments 36 | 37 | You must specify either objectname or prefix. 38 | 39 | # error.invalidConnection 40 | 41 | Connection not valid. 42 | 43 | # error.unexpected 44 | 45 | Unexpected error 46 | 47 | # data.fieldset 48 | 49 | 50 | 51 | {{fullname}} 52 | {{description}} 53 | {{fieldlist}} 54 | 55 | 56 | 57 | # prompt.message.continue 58 | 59 | Would you like to continue? (Y/N) 60 | 61 | # prompt.message.description 62 | 63 | Description: 64 | -------------------------------------------------------------------------------- /messages/org.setdefaults.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Set default username and org wide email in metadata such as workflow based on target scratch org 4 | 5 | # examples 6 | 7 | - Use the default package directory from project.json on your default org 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Specify a directory that contains metadata to replace with default values for your default org 12 | 13 | <%= config.bin %> <%= command.id %> --base-dir src 14 | 15 | - Specify a target org when modifying metadata with default values 16 | 17 | <%= config.bin %> <%= command.id %> --d src --target-org myOrg 18 | 19 | # flags.base-dir.summary 20 | 21 | Path of base directory 22 | 23 | # log.welcome 24 | 25 | Replacing unspported metadata for scratch org i.e.: field update on specific user, send email from org wide email... 26 | 27 | # error.badConfig 28 | 29 | Plugin definition dxb is missing in sfdx-project.json, make sure to setup plugin. 30 | 31 | # error.prefixNotFound 32 | 33 | Prefix not found. 34 | 35 | # error.invalidArguments 36 | 37 | You must specify either objectname or prefix. 38 | 39 | # error.invalidConnection 40 | 41 | Connection not valid. 42 | 43 | # error.unexpected 44 | 45 | Unexpected error 46 | 47 | # data.fieldset 48 | 49 | 50 | 51 | {{fullname}} 52 | {{description}} 53 | {{fieldlist}} 54 | 55 | 56 | 57 | # prompt.message.continue 58 | 59 | Would you like to continue? (Y/N) 60 | 61 | # prompt.message.description 62 | 63 | Description: -------------------------------------------------------------------------------- /messages/package.dependencies.install.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Install package dependencies from sfdx-project.json 4 | 5 | # examples 6 | 7 | - Install the package dependencies inside sfdx-project.json to the connected org. 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Install the package dependencies inside sfdx-project.json to the target org. 12 | 13 | <%= config.bin %> <%= command.id %> --target-org myOrg 14 | 15 | # log.welcome 16 | 17 | Replacing unspported metadata for scratch org i.e.: field update on specific user, send email from org wide email... 18 | 19 | # log.installPackage 20 | 21 | Successfully installed package %s 22 | 23 | # log.packageVersion 24 | 25 | Package version %s 26 | 27 | # log.alias 28 | 29 | Alias %s 30 | 31 | # error.cannotInstall 32 | 33 | Unable to install packages dependencies! %s 34 | 35 | # error.errorInstall 36 | 37 | Error while installing package %s 38 | -------------------------------------------------------------------------------- /messages/permissionset.clean.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command removes fls and object permissions where all access are set to "false" 4 | 5 | # examples 6 | 7 | - Clean the permissions inside a specific permission set file. 8 | 9 | <%= config.bin %> <%= command.id %> --file force-app/main/default/permissionsets/Social_Customer_Service_Permission_Set.permissionset-meta.xml 10 | 11 | - Clean the permissions inside all permissionse files 12 | 13 | <%= config.bin %> <%= command.id %> --root-dir src/permissionsets 14 | 15 | - Clean the permissions inside a specific permissionset 16 | 17 | <%= config.bin %> <%= command.id %> --permissionset-name RW_All 18 | 19 | # flags.file.summary 20 | 21 | File path of permissionset to clean 22 | 23 | # flags.root-dir.summary 24 | 25 | Source path to permissionsets metadata directory, i.e.: src/permissionsets or force-app/main/default/permissionsets 26 | 27 | # flags.permissionset-name.summary 28 | 29 | Permissionset name to clean 30 | 31 | # log.couldNotFind 32 | 33 | Could not find %s 34 | 35 | # log.couldNotCleanup 36 | 37 | Could not clean up %s: %s 38 | 39 | # log.packageVersion 40 | 41 | Package version %s 42 | 43 | # log.alias 44 | 45 | Alias 46 | 47 | # error.cannotInstall 48 | 49 | Unable to install packages dependencies! %s 50 | 51 | # error.errorInstall 52 | 53 | Error while installing package %s 54 | -------------------------------------------------------------------------------- /messages/permissionset.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create a permission set from a profile without layout and category group visiblity 4 | 5 | # examples 6 | 7 | - Create a named permissionset from an existing profile 8 | 9 | <%= config.bin %> <%= command.id %> --profile force-app/main/default/profile/Admin.profile-meta.xml --permissionset-name AdminPermissionSet 10 | 11 | # flags.profile.summary 12 | 13 | File path of profile to create the permission set from 14 | 15 | # flags.permissionset-name.summary 16 | 17 | Name of the permision set to create 18 | 19 | # flags.output-dir.summary 20 | 21 | Output path of the permision set to create 22 | 23 | # flags.has-activation-required.summary 24 | 25 | Indicates whether the permission set requires an associated active session or not. This field is available in API version 53.0 and later. 26 | 27 | # flags.license.summary 28 | 29 | License name of the permision set to create 30 | 31 | # flags.description.summary 32 | 33 | Description of the permission set to create 34 | 35 | # log.description 36 | 37 | Permission Set created from %s 38 | 39 | # log.success 40 | 41 | Permissionset created: %s 42 | -------------------------------------------------------------------------------- /messages/profile.build.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Convert Profile in JSON format to one XML 4 | 5 | # examples 6 | 7 | - Use the default path to build the profile from every Profile JSON 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Specify a path that contains profile JSON 12 | 13 | <%= config.bin %> <%= command.id %> --source-dir src/profiles 14 | 15 | - Specify a specific Profile name to be converted to XML 16 | 17 | <%= config.bin %> <%= command.id %> --profile-name Admin 18 | 19 | # flags.profile-name.summary 20 | 21 | Profile name to be converted 22 | 23 | # flags.source-dir.summary 24 | 25 | Path to profile files 26 | 27 | # warning.noFiles 28 | 29 | No source files were found in %s 30 | -------------------------------------------------------------------------------- /messages/profile.convert.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Convert profile xml into small chunks of json files 4 | 5 | # examples 6 | 7 | - Use the default path to split the profile XML into separate JSON files 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Specify a path that contains profile XML 12 | 13 | <%= config.bin %> <%= command.id %> --source-dir src/profiles 14 | 15 | - Specify a specific Profile name to be converted to JSON files 16 | 17 | <%= config.bin %> <%= command.id %> --profile-name Admin 18 | 19 | # flags.profile-name.summary 20 | 21 | Profile name to be converted 22 | 23 | # flags.source-dir.summary 24 | 25 | Path to profile files 26 | 27 | # log.readFile 28 | 29 | Read file %s 30 | 31 | # log.createdFolder 32 | 33 | Created profile folder: %s.profile-meta.xml 34 | 35 | # log.converted 36 | 37 | Converted: %s 38 | 39 | # warning.noFiles 40 | 41 | No source files were found in %s 42 | 43 | # error.couldNotConvert 44 | 45 | Could not convert %s 46 | 47 | # error.profileNotExist 48 | 49 | Profile does not exist. 50 | 51 | # error.couldNotSplit 52 | 53 | Could not split %s 54 | -------------------------------------------------------------------------------- /messages/profile.passwordpolicies.fix.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command allows password policies deployment to ignore timestamp in file name 4 | 5 | # examples 6 | 7 | - Use the default path to fix all profile password policies 8 | 9 | <%= config.bin %> <%= command.id %> 10 | 11 | - Use a specific path to fix all profile password policies 12 | 13 | <%= config.bin %> <%= command.id %> --source-dir src/passwordPolicies 14 | 15 | # flags.source-path.summary 16 | 17 | Path to profile password policy files 18 | 19 | # warning.noFiles 20 | 21 | No source files were found in %s 22 | -------------------------------------------------------------------------------- /messages/schema.doc.generate.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command-line can generate technical design documentation for a Salesforce org. The tool retrieves metadata information about standard and custom objects, Apex classes, triggers, REST resources, named credentials, and connected apps from the org and then creates a PDF document containing the collected information. The tool uses the pdfmake library to generate the PDF document based on an HTML template and a CSS stylesheet. To start using this command, run sf dxb install or copy schema gen def json file from Github: https://github.com/davidbrowaeys/DXB/blob/master/src/lib/documentinfo.json. 4 | 5 | # examples 6 | 7 | - Specify a path to the pdf configuration file 8 | 9 | <%= config.bin %> <%= command.id %> --target-org myenv --pdf-config config/documentinfo.json', 10 | 11 | - Specify a package.xml file 12 | 13 | <%= config.bin %> <%= command.id %> -o myenv -c config/docmentinfo.json -x manifest/package.xml 14 | 15 | # flags.pdf-config.summary 16 | 17 | A required string parameter that represents the file path of a JSON configuration file for the PDF document generation. 18 | 19 | # flags.stylesheet.summary 20 | 21 | An optional string parameter that represents the file path of a stylesheet for the generated HTML document. If not specified, the default Bootstrap stylesheet will be used 22 | 23 | # flags.html-template.summary 24 | 25 | An optional string parameter that represents the file path of an HTML template for the PDF document generation. If not specified, the default DXB template will be used. 26 | 27 | # flags.manifest.summary 28 | 29 | File path of manifest(package.xml) to generate the PDF document for. If not specified, DXB will consider all custom objects (except managed packages). 30 | 31 | # flags.format.summary 32 | 33 | Format of the generated doc, options : pdf, html, docx. 34 | 35 | # log.readFile 36 | 37 | Read file %s 38 | 39 | # log.createdFolder 40 | 41 | Created profile folder: %s.profile-meta.xml 42 | 43 | # log.converted 44 | 45 | Converted: %s 46 | 47 | # warning.ignored 48 | 49 | %s ignored! 50 | 51 | # error.invalidFormat 52 | 53 | Invalid format: %s. We support only html, pdf and docx. 54 | 55 | # error.noHTMLTemplate 56 | 57 | HTML Template not found: %s 58 | 59 | # error.noStylesheetTemplate 60 | 61 | Stylesheet file not found: %s 62 | 63 | # error.noPDFConfig 64 | 65 | PDF Metadata Config Json file not found: %s 66 | 67 | # error.metadata.incorrectStandard 68 | 69 | You must define list of standard objects as follow "metadata": { stdobjects: ["Account","Contact"]} in your pdf document: %s 70 | 71 | # spinner.start.createDoc 72 | 73 | Create %s document 74 | 75 | # spinner.start.retrieveApex 76 | 77 | Retrieve Apex classes and triggers 78 | 79 | # spinner.start.retrieveAura 80 | 81 | Retrieve Aura Component info 82 | 83 | # spinner.start.retrieveConnected 84 | 85 | Retrieve connected apps 86 | 87 | # spinner.start.retrieveCustom 88 | 89 | Retrieve custom object list 90 | 91 | # spinner.start.retrieveCustomMetadata 92 | 93 | Retrieve custom object metadata 94 | 95 | # spinner.start.retrieveFlow 96 | 97 | Retrieve flow and process builders 98 | 99 | # spinner.start.retrieveInfo 100 | 101 | Retrieve organization info 102 | 103 | # spinner.start.retrieveNameCredentials 104 | 105 | Retrieve name credentials 106 | 107 | # spinner.start.retrieveStandard 108 | 109 | Retrieve standard object list 110 | 111 | # spinner.start.retrieveStandardMetadata 112 | 113 | Retrieve standard object metadata 114 | 115 | # spinner.stop.done 116 | 117 | Done 118 | 119 | # spinner.stop.found 120 | 121 | %d found! 122 | -------------------------------------------------------------------------------- /messages/source.delta.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | This command generate delta package by doing git diff. 4 | 5 | # examples 6 | 7 | - Specify a specific tag 8 | 9 | <%= config.bin %> <%= command.id %> --mode tags --delta-key mytag 10 | 11 | - Specify a branch to compare with 12 | 13 | <%= config.bin %> <%= command.id %> --mode branch --delta-key origin/master 14 | 15 | - Specify an output directory 16 | 17 | <%= config.bin %> <%= command.id %> -m branch -k origin/master --output-dir deltamanifest 18 | 19 | - Specify a commit to compare with 20 | 21 | <%= config.bin %> <%= command.id %> --mode commitid --delta-key 123456 22 | 23 | # flags.mode.summary 24 | 25 | A specific mode to base the delta on. Three options are available: commitid|tags|branch 26 | 27 | # flags.delta-key.summary 28 | 29 | Can hold specific values related to commit id, tags prefix or name, branch name 30 | 31 | # flags.base-dir.summary 32 | 33 | Path of base directory, i.e.: force-app/main/default 34 | 35 | # flags.output-dir.summary 36 | 37 | Output directory path of the delta package.xml to generate, i.e.: ./manifest 38 | 39 | # flags.granular.summary 40 | 41 | If true, then delta will be very granular for Custom Object, otherwise will deploy the whole object 42 | 43 | # flags.destructive-changes.summary 44 | 45 | Indicate if need to generate destructivePackage.xml (experimental not working yet) 46 | 47 | # flags.rollback.summary 48 | 49 | Indicate if rollback of previous changes is required 50 | 51 | # error.invalidMode 52 | 53 | Invalid mode: %s. We support only tags, branch and commitid as values. 54 | -------------------------------------------------------------------------------- /messages/source.fetchtest.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Calculate specified test classes base on source path. This command is to use after source:delta. 4 | 5 | # examples 6 | 7 | - Specify a file to retrieve the related test classes for 8 | 9 | <%= config.bin %> <%= command.id %> --source-path force-app/main/default/classes/GenericApex.cls 10 | 11 | - Specify a manifestfile that contains metadata types to search through 12 | 13 | <%= config.bin %> <%= command.id %> --manifest manifest/package.xml 14 | 15 | - Filter metadata types to use 16 | 17 | <%= config.bin %> <%= command.id %> -x manifest/package.xml --metadata-type classes 18 | 19 | - Include a regex to specify test class naming convention 20 | 21 | <%= config.bin %> <%= command.id %> -x manifest/package.xml -t classes -n "\*.T" 22 | 23 | # flags.source-path.summary 24 | 25 | Content that contains reference(s) to Apex Tests. 26 | 27 | # flags.manifest.summary 28 | 29 | File path for manifest (package.xml) of components to retrieve 30 | 31 | # flags.base-dir.summary 32 | 33 | Path of base directory, i.e.: force-app/main/default 34 | 35 | # flags.metadata-type.summary 36 | 37 | Directory name of the metadata to be used in the search 38 | 39 | # flags.test-class-name-regex.summary 40 | 41 | Regex for test classes naming convention 42 | 43 | # error.requiredFlags 44 | 45 | Sourcepath or manifest is required 46 | 47 | # error.processManifest 48 | 49 | Unable to process content from manifest 50 | -------------------------------------------------------------------------------- /messages/source.scanner.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Extends scanner-code plugin and throw error if severity 1 rule are met. 4 | 5 | # examples 6 | 7 | - Specify a file that contains code scanner results 8 | 9 | <%= config.bin %> <%= command.id %> --file apex_pmd_results.json 10 | 11 | - Specify a path to a JSON file that contains a list of Apex Classes to exclude. 12 | 13 | <%= config.bin %> <%= command.id %> -f apex_pmd_results.json --excluded-files exclude_apex.json 14 | 15 | # flags.file.summary 16 | 17 | File path of code scanner results 18 | 19 | # flags.excluded-files.summary 20 | 21 | File path of classes to exclude 22 | 23 | # flags.severity.summary 24 | 25 | Severity threshold, if set to 3 it will throw an error for all violations where severity is 3 and lower 26 | 27 | # flags.high-severity-rules.summary 28 | 29 | Name of the rules you want to mark a high severity 30 | 31 | # log.calculating 32 | 33 | Calculating quality gate... 34 | 35 | # log.noExcludedFiles 36 | 37 | No excluded files found 38 | 39 | # error.violations 40 | 41 | We have detected some very bad violations in your code. Run sfdx scanner locally. 42 | -------------------------------------------------------------------------------- /messages/static.create.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Create static resource 4 | 5 | # examples 6 | 7 | - Specify information on the static resource files to be created 8 | 9 | <%= config.bin %> <%= command.id %> --name MyImage --target-dir "My Logo" --file "/img/logo.png" 10 | 11 | - Push the created source to the connected org 12 | 13 | <%= config.bin %> <%= command.id %> -n MyImage -d "My Logo" -f "/img/logo.png" --push 14 | 15 | - Specify a directory to place the created files in 16 | 17 | <%= config.bin %> <%= command.id %> -n MyImage -d "My Logo" -f "/img/logo.png" --target-dir src/staticresources 18 | 19 | # flags.name.summary 20 | 21 | Name of the static resource 22 | 23 | # flags.file.summary 24 | 25 | Local file path of the static resource 26 | 27 | # flags.push.summary 28 | 29 | If used, the created files will be pushed to the connected org 30 | 31 | # flags.target-dir.summary 32 | 33 | Path to directory to store created files 34 | 35 | # log.filesCreated 36 | 37 | The following files were created:%s 38 | 39 | # log.noExcludedFiles 40 | 41 | No excluded files found 42 | 43 | # error.contentType 44 | 45 | Content-type not supported. 46 | 47 | # error.pushFailed 48 | 49 | Unable to push source to org! 50 | 51 | # content 52 | 53 | 54 | 55 | {{content_type}} 56 | {{description}} 57 | 58 | -------------------------------------------------------------------------------- /messages/user.access.why.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Find why a specified user has access to a field or object 4 | 5 | # examples 6 | 7 | - Request access explanation for a certain object 8 | 9 | <%= config.bin %> <%= command.id %> --object-name Product2 10 | 11 | - Request access explanation for a certain field on an object 12 | 13 | <%= config.bin %> <%= command.id %> -s Product2 --field-name ExternalId\_\_c 14 | 15 | - Request access explanation for a certain user 16 | 17 | <%= config.bin %> <%= command.id %> -s Product2 --username johndoe@salesforceuser.com 18 | 19 | # flags.object-name.summary 20 | 21 | Salesforce API name of object, i.e.: Account, Invoice\_\_c 22 | 23 | # flags.field-name.summary 24 | 25 | Salesforce API name of object, i.e.: AccountId, Name 26 | 27 | # flags.username.summary 28 | 29 | Username of salesforce user. If not specified, will use the user connected to the org 30 | 31 | # log.why 32 | 33 | Why does %s have access to %s %s? 34 | 35 | # error.connection 36 | 37 | Connectons not established! 38 | 39 | # spinner.start.scanning 40 | 41 | Scanning org for user access 42 | 43 | # spinner.stop.done 44 | 45 | Done! 46 | -------------------------------------------------------------------------------- /sfdx-project.json: -------------------------------------------------------------------------------- 1 | { 2 | "packageDirectories": [ 3 | { 4 | "path": "force-app", 5 | "default": true 6 | } 7 | ], 8 | "namespace": "", 9 | "sourceApiVersion": "59.0" 10 | } 11 | -------------------------------------------------------------------------------- /src/commands/dxb/apex/coverage/check.ts: -------------------------------------------------------------------------------- 1 | import * as xml2js from 'xml2js'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages, SfError } from '@salesforce/core'; 4 | import { existsSync, readFileSync } from 'fs-extra'; 5 | 6 | export type ApexCoverageCheckResult = { 7 | success: boolean; 8 | }; 9 | type Coverage = { 10 | name: string; 11 | path: string; 12 | Time: string; 13 | Diff: string; 14 | }; 15 | 16 | Messages.importMessagesDirectory(__dirname); 17 | const messages = Messages.loadMessages('dxb', 'apex.coverage.check'); 18 | 19 | export default class ApexCoverageCheck extends SfCommand { 20 | public static readonly summary = messages.getMessage('summary'); 21 | public static readonly description = messages.getMessage('description'); 22 | 23 | public static readonly examples = messages.getMessages('examples'); 24 | 25 | public static readonly flags = { 26 | 'file-path': Flags.string({ char: 'f', summary: messages.getMessage('flags.file-path.summary'), required: true, aliases: ['filepath'], deprecateAliases: true }), 27 | 'min-coverage': Flags.integer({ 28 | char: 'c', 29 | summary: messages.getMessage('flags.min-coverage.summary'), 30 | default: 95, 31 | aliases: ['mincoverage'], 32 | deprecateAliases: true 33 | }), 34 | }; 35 | 36 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 37 | public static readonly requiresProject = true; 38 | 39 | public async run(): Promise { 40 | const { flags } = await this.parse(ApexCoverageCheck); 41 | const filePath: string = flags['file-path']; 42 | const threshold: number = flags['min-coverage'] / 100; 43 | const data = readFileSync(filePath, { encoding: 'utf-8' }); 44 | if (!existsSync(filePath)) { 45 | throw new SfError('Coverage file not found: ' + filePath); 46 | } 47 | try { 48 | const result: any = await xml2js.parseStringPromise(data); 49 | 50 | const badClasses: any[] = []; 51 | result.coverage.packages[0].package[0].classes[0].class.forEach((apex: any) => { 52 | if (parseFloat(apex.$['line-rate']) < threshold) { 53 | badClasses.push(apex); 54 | } 55 | }); 56 | if (badClasses && badClasses.length > 0) { 57 | this.log(messages.getMessage('coverageTooLow', [threshold * 100])); 58 | const tableArray: Coverage[] = []; 59 | badClasses.forEach((item) => { 60 | const coverage: number = parseFloat(item.$['line-rate']); 61 | tableArray.push({ 62 | name: item.$.name, 63 | path: item.$.filename, 64 | Time: `${item.$['line-rate'] * 100}%`, 65 | Diff: `-${((threshold - coverage) * 100).toFixed(2)}%`, 66 | }); 67 | }); 68 | this.table(tableArray, { 69 | name: { header: 'NAME' }, 70 | path: { header: 'PATH' }, 71 | Time: { header: 'TIME' }, 72 | Diff: { header: 'DIFF' }, 73 | }); 74 | throw new SfError(messages.getMessage('insufficientCoverage')); 75 | } else { 76 | this.log(messages.getMessage('coverageIsOk')); 77 | } 78 | return { success: true }; 79 | } catch (e: unknown) { 80 | const err = e as Error; 81 | throw new SfError(err.message); 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/commands/dxb/apex/coverage/cleanup.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable class-methods-use-this */ 2 | /* eslint-disable @typescript-eslint/restrict-template-expressions */ 3 | import * as path from 'path'; 4 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 5 | import { Messages, PackageDir, SfProject } from '@salesforce/core'; 6 | import { JsonMap } from '@salesforce/ts-types'; 7 | import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs-extra'; 8 | 9 | type ApexCoverageCleanupResult = { 10 | success: boolean; 11 | }; 12 | 13 | Messages.importMessagesDirectory(__dirname); 14 | const messages = Messages.loadMessages('dxb', 'apex.coverage.cleanup'); 15 | 16 | export default class ApexCoverageCleanup extends SfCommand { 17 | public static readonly summary = messages.getMessage('summary'); 18 | 19 | public static readonly description = messages.getMessage('description'); 20 | 21 | public static readonly examples = messages.getMessages('examples'); 22 | 23 | public static readonly flags = { 24 | 'file-path': Flags.string({ char: 'f', summary: messages.getMessage('flags.file-path.summary'), required: true, aliases: ['file'], deprecateAliases: true }), 25 | }; 26 | protected packageDirectories: string[] = []; 27 | protected allClasses: string[] = []; 28 | public async run(): Promise { 29 | // flags 30 | const { flags } = await this.parse(ApexCoverageCleanup); 31 | const originFile = flags['file-path']; 32 | // project config 33 | const project: SfProject = await SfProject.resolve(); 34 | const projectConfig: JsonMap = await project.resolveProjectConfig(); 35 | this.packageDirectories = (projectConfig.packageDirectories as PackageDir[]).map( pkg => pkg.path); 36 | 37 | let fileContent: string = readFileSync(originFile).toString(); 38 | const results = [...fileContent.matchAll(/filename=".*?"/g)]; 39 | this.packageDirectories.forEach( (pkg) => { 40 | this.getAllClasses(pkg); 41 | }); 42 | results.forEach((elem) => { 43 | const classnameString: string = elem[0]; 44 | const classname = classnameString.split('filename="no-map/').join('').slice(0, -1); 45 | if(classname){ 46 | const classpath = this.allClasses.find((e: string) => this.isPathEndingWith(e,classname)); 47 | fileContent = fileContent.split(`no-map/${classname}"`).join(`${classpath}"`); 48 | } 49 | }); 50 | writeFileSync(originFile, fileContent); 51 | return { success: true }; 52 | } 53 | 54 | public isPathEndingWith(filename: string, classname: string): boolean { 55 | const expectedPath = path.join('classes', `${classname}.cls`); 56 | const fullPath = path.resolve(filename); 57 | 58 | return fullPath.endsWith(expectedPath); 59 | } 60 | 61 | public getAllClasses(directory: string): void { 62 | const currentDirectorypath = path.join(directory); 63 | 64 | const currentDirectory: string[] = readdirSync(currentDirectorypath, 'utf8'); 65 | 66 | currentDirectory.forEach((file: string) => { 67 | const pathOfCurrentItem: string = path.join(directory + '/' + file); 68 | if (statSync(pathOfCurrentItem).isFile() && file.endsWith('.cls')) { 69 | this.allClasses.push(pathOfCurrentItem); 70 | } else if (!statSync(pathOfCurrentItem).isFile()) { 71 | const directorypath = path.join(directory + '/' + file); 72 | this.getAllClasses(directorypath); 73 | } 74 | }); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/commands/dxb/apex/scan/query.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import * as fs from 'fs'; 3 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 4 | import { Messages, PackageDir, SfProject } from '@salesforce/core'; 5 | import { ux } from '@oclif/core'; 6 | import { readdirSync, statSync } from 'fs-extra'; 7 | export type ApexScanQueryResult = { 8 | success: boolean; 9 | }; 10 | const SOQL_REGEX = /\[SELECT\s.*?\]|\('(SELECT|select).*'\)/g; 11 | 12 | Messages.importMessagesDirectory(__dirname); 13 | const messages = Messages.loadMessages('dxb', 'apex.scan.query'); 14 | 15 | export default class ApexScanQuery extends SfCommand { 16 | public static readonly summary = messages.getMessage('summary'); 17 | 18 | public static readonly description = messages.getMessage('description'); 19 | 20 | public static readonly examples = messages.getMessages('examples'); 21 | 22 | public static readonly flags = { 'target-org': Flags.requiredOrg() }; 23 | 24 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 25 | public static readonly requiresProject = true; 26 | 27 | protected packageDirectories: PackageDir[] = []; 28 | protected projectConfig: any; 29 | protected instanceUrl: string | undefined = ''; 30 | protected accessToken = ''; 31 | protected allClasses: string[] = []; 32 | 33 | public async run(): Promise { 34 | const { flags } = await this.parse(ApexScanQuery); 35 | this.accessToken = flags['target-org']?.getConnection().accessToken?.toString() ?? ''; 36 | this.instanceUrl = flags['target-org']?.getConnection().instanceUrl; 37 | this.projectConfig = await (await SfProject.resolve()).resolveProjectConfig(); 38 | 39 | this.packageDirectories = this.projectConfig.packageDirectories; 40 | this.packageDirectories.forEach((directory: PackageDir) => { 41 | this.getAllClasses(directory.path); 42 | }); 43 | await Promise.all( 44 | this.allClasses.map(async (file) => { 45 | const fileContent = fs.readFileSync(file, 'utf8'); 46 | let match; 47 | const queries = []; 48 | while ((match = SOQL_REGEX.exec(fileContent)) !== null) { 49 | const query = match[0].substring(1, match[0].length - 1); 50 | queries.push(query); 51 | } 52 | if (queries.length === 1) { 53 | await this.scanQuery(file, queries[0]); 54 | } else if (queries.length > 1) { 55 | await Promise.all( 56 | queries.map(async (query) => { 57 | try { 58 | await this.scanQuery(file, query); 59 | } catch (err) { 60 | ux.error(`${file}-${query}-${(err as Error).message}`); 61 | } 62 | }) 63 | ); 64 | } 65 | }) 66 | ); 67 | return { success: true }; 68 | } 69 | public async scanQuery(file: string, query: string): Promise { 70 | const url = `${this.instanceUrl}/services/data/v57.0/query/?explain=${encodeURIComponent(query)}`; 71 | const headers: Headers = new Headers(); 72 | headers.append('Authorization', 'Bearer ' + this.accessToken); 73 | headers.append('X-SFDC-Session', 'Bearer ' + this.accessToken); 74 | headers.append('Content-Type', 'application/json; charset=UTF-8'); 75 | headers.append('Accept', 'application/json'); 76 | const options: RequestInit = { 77 | method: 'GET', 78 | headers, 79 | }; 80 | try { 81 | const response: Response = await fetch(url, options); 82 | const body: any = await response.json(); 83 | if (!body.plans || body.plans.length === 0) { 84 | this.log(body); 85 | return; 86 | } 87 | this.log(messages.getMessage('class', [file])); 88 | this.log(messages.getMessage('query', [query])); 89 | for (const queryPlan of body) { 90 | this.log(queryPlan); 91 | for (const queryPlanNote of queryPlan.notes) { 92 | this.log(queryPlanNote); 93 | } 94 | } 95 | this.log('----------------------------------------------------'); 96 | } catch (err) { 97 | this.error(err as Error); 98 | } 99 | } 100 | 101 | public getAllClasses(directory: string): void { 102 | const currentDirectorypath = path.join(directory); 103 | 104 | const currentDirectory = readdirSync(currentDirectorypath, 'utf8'); 105 | 106 | currentDirectory.forEach((file: string) => { 107 | const pathOfCurrentItem: string = path.join(directory + '/' + file); 108 | if (statSync(pathOfCurrentItem).isFile() && file.endsWith('.cls')) { 109 | this.allClasses.push(pathOfCurrentItem); 110 | } else if (!statSync(pathOfCurrentItem).isFile()) { 111 | const directorypath = path.join(directory + '/' + file); 112 | this.getAllClasses(directorypath); 113 | } 114 | }); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/commands/dxb/apex/trigger/create.ts: -------------------------------------------------------------------------------- 1 | import path = require('path'); 2 | import fse = require('fs-extra'); 3 | 4 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 5 | import { Messages, SfProject } from '@salesforce/core'; 6 | 7 | export type ApexTriggerCreateResult = { 8 | success: boolean; 9 | }; 10 | 11 | Messages.importMessagesDirectory(__dirname); 12 | const messages = Messages.loadMessages('dxb', 'apex.trigger.create'); 13 | 14 | export default class ApexTriggerCreate extends SfCommand { 15 | public static readonly summary = messages.getMessage('summary'); 16 | public static readonly description = messages.getMessage('description'); 17 | 18 | public static readonly examples = messages.getMessages('examples'); 19 | 20 | public static readonly flags = { 21 | sobject: Flags.string({ 22 | char: 's', 23 | required: true, 24 | summary: messages.getMessage('flags.sobject.summary'), 25 | }), 26 | 'source-api-version': Flags.integer({ 27 | char: 'v', 28 | summary: messages.getMessage('flags.source-api-version.summary'), 29 | aliases: ['sourceapiversion'], 30 | deprecateAliases: true, 31 | }), 32 | }; 33 | 34 | public async run(): Promise { 35 | const { flags } = await this.parse(ApexTriggerCreate); 36 | const project = await SfProject.resolve(); 37 | const config: any = await project.resolveProjectConfig(); 38 | const sobject = flags.sobject; 39 | const apiversion: number = flags['source-api-version'] ?? parseInt(config.sourceApiVersion, 10); 40 | 41 | const template = 'trigger'; 42 | const vars = 43 | 'className=' + 44 | sobject.replace('__c', '').replace('_', '') + 45 | 'TriggerHandler,' + 46 | 'triggerName=' + 47 | sobject.replace('__c', '').replace('_', '') + 48 | 'Trigger,' + 49 | 'apiVersion=' + 50 | apiversion.toPrecision(3) + 51 | ',' + 52 | 'sobject=' + 53 | sobject; 54 | 55 | let templateFolder = path.join('.sfdx-templates', template); 56 | if (!fse.existsSync(templateFolder)) { 57 | templateFolder = path.join(__dirname, '../../../../utils/templates/', template); 58 | } 59 | this.log(templateFolder); 60 | 61 | this.createFiles(templateFolder, sobject, vars); 62 | return { success: true }; 63 | } 64 | 65 | private createFiles(templateFolder: string, sobject: string, vars: string): void { 66 | const name = sobject.replace('__c', '').replace('_', '') + 'Trigger'; 67 | const outputdir = './force-app/main/default'; 68 | 69 | if (!fse.existsSync(templateFolder)) { 70 | this.error(messages.getMessage('error.templateNotExist')); 71 | } 72 | 73 | const defJsonPath = path.join(templateFolder, 'def.json'); 74 | 75 | if (!fse.existsSync(defJsonPath)) { 76 | this.error(messages.getMessage('error.defJsonNotFound')); 77 | } 78 | 79 | const defJson: { files: string[][]; vars: string } = JSON.parse(fse.readFileSync(defJsonPath).toString()); 80 | const defJsonVars = defJson.vars; 81 | 82 | if (!vars) { 83 | this.error(messages.getMessage('error.defJSONVars', [defJsonVars])); 84 | } 85 | 86 | const filesCreated: string[] = []; 87 | 88 | defJson.files.forEach((row) => { 89 | const fileName = row[0]; 90 | const fileExtension = row[1]; 91 | if (fileName !== 'def.json') { 92 | const templateFilePath = path.join(templateFolder, fileName); 93 | let content = fse.readFileSync(templateFilePath).toString(); 94 | 95 | const splitVars = vars.split(','); 96 | splitVars.forEach((value) => { 97 | content = updateContent(content, value); 98 | }); 99 | content = updateContent(content, 'sobject=' + sobject); 100 | 101 | let newFile = path.join(`${outputdir}/triggers`, `${name}.${fileExtension}`); 102 | if (fileExtension.toString().includes('cls')) { 103 | newFile = path.join(`${outputdir}/classes`, `${name}Handler.${fileExtension}`); 104 | } 105 | 106 | const newFilePath = path.dirname(newFile); 107 | 108 | fse.ensureDirSync(newFilePath); 109 | fse.writeFileSync(newFile, content); 110 | filesCreated.push(newFile); 111 | } 112 | }); 113 | 114 | let result = 'The following files were created:'; 115 | for (const createdFile of filesCreated) { 116 | result += `\n ${createdFile}`; 117 | } 118 | 119 | this.log(result); 120 | } 121 | } 122 | 123 | function updateContent(content: string, values: string): string { 124 | const splitValues = values.split('='); 125 | 126 | const varName = splitValues[0]; 127 | const varValue = splitValues[1]; 128 | content = content.replace(new RegExp(`{{${varName}}}`, 'g'), varValue); 129 | return content; 130 | } 131 | -------------------------------------------------------------------------------- /src/commands/dxb/api/align.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages, PackageDir, SfProject } from '@salesforce/core'; 4 | 5 | import * as xml2js from 'xml2js'; 6 | import * as js2xmlparser from 'js2xmlparser'; 7 | import * as fs from 'fs-extra'; 8 | Messages.importMessagesDirectory(__dirname); 9 | const messages = Messages.loadMessages('dxb', 'api.align'); 10 | 11 | export type ApiAlignResult = { 12 | success: boolean; 13 | }; 14 | 15 | export default class ApiAlign extends SfCommand { 16 | public static readonly summary = messages.getMessage('summary'); 17 | public static readonly description = messages.getMessage('description'); 18 | 19 | public static readonly examples = messages.getMessages('examples'); 20 | 21 | public static readonly flags = { 22 | 'metadata-type': Flags.string({ 23 | char: 'm', 24 | summary: messages.getMessage('flags.metadata-type.summary'), 25 | description: messages.getMessage('flags.metadata-type.description'), 26 | multiple: true, 27 | }), 28 | directory: Flags.string({ 29 | char: 'd', 30 | summary: messages.getMessage('flags.directory.summary'), 31 | description: messages.getMessage('flags.directory.description'), 32 | multiple: true, 33 | }), 34 | }; 35 | 36 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 37 | public static readonly requiresProject = true; 38 | 39 | public async run(): Promise { 40 | const { flags } = await this.parse(ApiAlign); 41 | const projectConfig: any = await (await SfProject.resolve()).resolveProjectConfig(); 42 | const projectApi: string = projectConfig.sourceApiVersion!; 43 | const directories: string[] = 44 | flags.directory ?? projectConfig.packageDirectories.map((packageDir: PackageDir) => packageDir.path); 45 | const metadataTypes: string[] | undefined = flags['metadata-type']; 46 | this.log(metadataTypes?.toString()); 47 | const apiAlignmentExclusion: string[] = projectConfig.plugins.dxb.apiAlignmentExclusion || []; 48 | 49 | // for every package directory, find all XML files that have a tag and return the full path 50 | directories.forEach((rootPath: string) => { 51 | const filesWithApi: string[] = !fs.lstatSync(rootPath).isDirectory() 52 | ? [rootPath] 53 | : (this.findFilesWithTag(rootPath, 'apiVersion') as string[]).filter( 54 | (f: string) => !apiAlignmentExclusion.includes(f) 55 | ); 56 | // for every file with the required tag, read it and update the value of the tag to the project api 57 | filesWithApi.forEach((f: string) => { 58 | void this.processFile(f, projectApi, metadataTypes); 59 | }); 60 | }); 61 | return { success: true }; 62 | } 63 | 64 | /** 65 | * Recursive search for XML files with specific tag 66 | * 67 | * @param filePath The path to a file or directory 68 | * @param tag The tag that an xml file must contain to be selected 69 | * @returns string[]: an Array containing file paths to xml files that contain a specific tags 70 | */ 71 | public findFilesWithTag(filePath: string, tag: string): string | Array { 72 | if (fs.lstatSync(filePath).isDirectory()) { 73 | // eslint-disable-next-line @typescript-eslint/no-unsafe-return 74 | return fs 75 | .readdirSync(filePath) 76 | .flatMap((entry: string) => this.findFilesWithTag(`${filePath}/${entry}`, tag)) 77 | .filter((e) => e !== ''); 78 | } else if (path.extname(filePath) === '.xml' && fs.readFileSync(filePath, { encoding: 'utf-8' }).includes(tag)) { 79 | return filePath; 80 | } else { 81 | return ''; 82 | } 83 | } 84 | 85 | /** 86 | * Process one file to update the apiVersion tag 87 | * 88 | * @param filePath The path to a file 89 | * @param projectApi The API version of the sfdx-project.json 90 | * @param metadataTypes The list of Metadata Types that need to be aligned (optional) 91 | */ 92 | private async processFile(filePath: string, projectApi: string, metadataTypes?: string[]): Promise { 93 | const fileContent: string = fs.readFileSync(filePath, { encoding: 'utf-8' }); 94 | const parser = new xml2js.Parser({ explicitArray: false }); 95 | const result: any = await parser.parseStringPromise(fileContent); 96 | 97 | const root = Object.keys(result)[0]; 98 | this.log(root); 99 | if (metadataTypes?.includes(root) === false) { 100 | // in case specific metadata types are wanted, skip if this type is not one of them 101 | return; 102 | } 103 | delete result[root]['$']; 104 | result[root]['@'] = { 105 | xmlns: 'http://soap.sforce.com/2006/04/metadata', 106 | }; 107 | this.log(`Change API Version of ${filePath} from ${result[root].apiVersion as string} to ${projectApi}`); 108 | result[root].apiVersion = projectApi; 109 | const xml = js2xmlparser.parse(root, result[root], { 110 | declaration: { encoding: 'UTF-8' }, 111 | format: { 112 | doubleQuotes: true, 113 | }, 114 | }); 115 | fs.writeFileSync(filePath, xml); 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/commands/dxb/community/publish.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | 5 | Messages.importMessagesDirectory(__dirname); 6 | const messages = Messages.loadMessages('dxb', 'community.publish'); 7 | 8 | export type CommunityPublishResult = { 9 | success: boolean; 10 | }; 11 | 12 | export default class CommunityPublish extends SfCommand { 13 | public static readonly summary = messages.getMessage('summary'); 14 | 15 | public static readonly examples = messages.getMessages('examples'); 16 | 17 | public static readonly flags = { 18 | 'target-org': Flags.requiredOrg(), 19 | name: Flags.string({ char: 'n', summary: messages.getMessage('flags.name.summary'), multiple: true }), 20 | }; 21 | 22 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 23 | public static readonly requiresProject = true; 24 | 25 | public async run(): Promise { 26 | const { flags } = await this.parse(CommunityPublish); 27 | const name = flags.name; 28 | const username = flags['target-org']?.getUsername(); 29 | if (name) { 30 | name.forEach((elem) => { 31 | this.log(exec(`sf community publish --name ${elem} --target-org ${username}`).toString()); 32 | }); 33 | } else { 34 | const allcommunities = JSON.parse( 35 | exec( 36 | `sf data query --query "SELECT Name FROM Network WHERE Status = 'Live'" --result-format json --target-org ${username}` 37 | ).toString() 38 | ); 39 | if (allcommunities !== null) { 40 | allcommunities.result?.records?.forEach((elem: { Name: string }) => { 41 | this.log(exec(`sf community publish --name ${elem.Name} --target-org ${username}`).toString()); 42 | }); 43 | } 44 | } 45 | return { success: true }; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/commands/dxb/data/bulk/query.ts: -------------------------------------------------------------------------------- 1 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 2 | import { Messages, SfError } from '@salesforce/core'; 3 | import { Connection } from 'jsforce'; 4 | import * as fs from 'fs-extra'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'data.bulk.query'); 8 | 9 | export type DataBulkQueryResult = { 10 | outputFile: string; 11 | }; 12 | 13 | export default class DataBulkQuery extends SfCommand { 14 | public static readonly summary = messages.getMessage('summary'); 15 | 16 | public static readonly examples = messages.getMessages('examples'); 17 | 18 | public static readonly flags = { 19 | 'target-org': Flags.requiredOrg(), 20 | 'object-name': Flags.string({ 21 | char: 's', 22 | summary: messages.getMessage('flags.object-name.summary'), 23 | aliases: ['objectname'], 24 | deprecateAliases: true, 25 | }), 26 | query: Flags.string({ char: 'q', summary: messages.getMessage('flags.query.summary') }), 27 | 'all-fields': Flags.boolean({ 28 | default: false, 29 | summary: messages.getMessage('flags.all-fields.summary'), 30 | aliases: ['allfields'], 31 | deprecateAliases: true, 32 | }), 33 | 'output-dir': Flags.string({ 34 | char: 'd', 35 | summary: messages.getMessage('flags.output-dir.summary'), 36 | default: 'bulk_output', 37 | aliases: ['outputdir'], 38 | deprecateAliases: true, 39 | }), 40 | 'file-name': Flags.string({ 41 | char: 'f', 42 | summary: messages.getMessage('flags.file-name.summary'), 43 | aliases: ['filename'], 44 | deprecateAliases: true, 45 | }), 46 | }; 47 | 48 | protected fields: string[] = []; 49 | protected query: string | undefined; 50 | protected objectname = ''; 51 | protected connection: Connection | undefined; 52 | protected outputdir: string | undefined; 53 | 54 | public async run(): Promise { 55 | const { flags } = await this.parse(DataBulkQuery); 56 | this.outputdir = flags['output-dir']; 57 | this.query = flags.query; 58 | this.objectname = flags['object-name'] ?? ''; 59 | // do we have a proper connections ? 60 | this.connection = flags['target-org']?.getConnection(); 61 | if (!this.connection?.accessToken || !this.connection.instanceUrl) { 62 | throw new SfError( 63 | messages.getMessage('error.message.noConfiguration'), 64 | messages.getMessage('error.name.invalidConnection') 65 | ); 66 | } 67 | // handle query 68 | if (!this.query && !this.objectname) { 69 | // invalid arguments 70 | throw new SfError( 71 | messages.getMessage('error.message.queryOrObject'), 72 | messages.getMessage('error.name.invalidFlags') 73 | ); 74 | } else if (this.query) { 75 | this.objectname = this.query 76 | .toUpperCase() 77 | .replace(/\([\s\S]+\)/g, '') 78 | .match(/FROM\s+(\w+)/i)![1]; 79 | if (!this.objectname) { 80 | throw new SfError( 81 | messages.getMessage('error.message.invalidSOQL'), 82 | messages.getMessage('error.name.invalidSOQL') 83 | ); 84 | } 85 | const fieldSelector = this.query 86 | .replace(/\([\s\S]+\)/g, '') 87 | .match(/SELECT(.*?)FROM/i)![1] 88 | .trim(); 89 | if (fieldSelector === '*') { 90 | this.fields = await this.getObjectFields(); 91 | this.query = this.query.replace('*', this.fields.join(',')); 92 | } 93 | } else if (this.objectname) { 94 | this.query = await this.generateQuery(flags.allfields); 95 | } 96 | 97 | const filename = flags['file-name'] ?? this.objectname + '.csv'; 98 | const outputFile = `${this.outputdir}/${filename}`; 99 | 100 | this.spinner.start('Processing...'); 101 | const result = await this.execute(outputFile); 102 | this.spinner.stop('Done'); 103 | this.log(result); 104 | return { outputFile }; 105 | } 106 | 107 | /** 108 | * @description Build soql query for selected object 109 | */ 110 | private async generateQuery(allfields: boolean): Promise { 111 | const soql = ['SELECT']; 112 | if (allfields) { 113 | this.fields = await this.getObjectFields(); 114 | } else { 115 | this.fields.push('Id'); 116 | } 117 | soql.push(this.fields.join(',')); 118 | soql.push('FROM'); 119 | soql.push(this.objectname); 120 | return soql.join(' '); 121 | } 122 | 123 | /** 124 | * @description Create bulk job 125 | */ 126 | private async execute(outputFile: string): Promise { 127 | return new Promise((resolve, reject) => { 128 | try { 129 | const csvFileOut = fs.createWriteStream(outputFile); 130 | this.connection?.bulk 131 | .query(this.query ?? '') 132 | .stream() // Convert to Node.js's usual readable stream. 133 | .pipe(csvFileOut) 134 | .on('end', () => { 135 | resolve('success'); 136 | }); 137 | } catch (err) { 138 | reject(err); 139 | } 140 | }); 141 | } 142 | 143 | /** 144 | * @description Retrieve Object fields 145 | */ 146 | private async getObjectFields(): Promise { 147 | return new Promise((resolve, reject) => { 148 | this.connection 149 | ?.sobject(this.objectname ?? '') 150 | .describe() 151 | .then((meta) => { 152 | const t: string[] = []; 153 | meta.fields.forEach((f) => { 154 | if (f.type !== 'address' && !f.calculated) { 155 | t.push(f.name); 156 | } 157 | }); 158 | resolve(t); 159 | }) 160 | .catch((err) => { 161 | reject(err); 162 | }); 163 | }); 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /src/commands/dxb/data/export.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | import { Connection } from 'jsforce'; 5 | import * as fs from 'fs-extra'; 6 | import { createObjectCsvWriter as createCsvWriter } from 'csv-writer'; 7 | Messages.importMessagesDirectory(__dirname); 8 | const messages = Messages.loadMessages('dxb', 'data.export'); 9 | 10 | type Header = { 11 | id: string; 12 | title: string; 13 | }; 14 | type GenericObject = { 15 | [key: string]: any; 16 | }; 17 | export type DataExportResult = { 18 | success: boolean; 19 | }; 20 | 21 | export default class DataExport extends SfCommand { 22 | public static readonly summary = messages.getMessage('summary'); 23 | 24 | public static readonly examples = messages.getMessages('examples'); 25 | 26 | public static readonly deprecateAliases = true; 27 | public static readonly aliases = ['dxb:data:transfer:export']; 28 | 29 | public static readonly flags = { 30 | 'target-org': Flags.requiredOrg(), 31 | 'definition-file': Flags.file({ 32 | char: 'f', 33 | summary: messages.getMessage('flags.definition-file.summary'), 34 | required: true, 35 | exists: true, 36 | aliases: ['definitionfile'], 37 | deprecateAliases: true, 38 | }), 39 | 'output-dir': Flags.directory({ 40 | char: 'd', 41 | summary: messages.getMessage('flags.output-dir.summary'), 42 | default: '.', 43 | exists: true, 44 | aliases: ['outputdir'], 45 | deprecateAliases: true, 46 | }), 47 | 'query-limit': Flags.integer({ 48 | char: 'l', 49 | summary: messages.getMessage('flags.query-limit.summary'), 50 | default: 500000, 51 | min: 1, 52 | aliases: ['querylimit'], 53 | deprecateAliases: true, 54 | }), 55 | }; 56 | 57 | protected connection: Connection | undefined; 58 | protected outputdir: string | undefined; 59 | protected csvWriter: any; 60 | protected querylimit: number | undefined; 61 | 62 | public async run(): Promise { 63 | const { flags } = await this.parse(DataExport); 64 | const definitionfile = flags['definition-file']; 65 | this.outputdir = flags['output-dir']; 66 | this.querylimit = flags['query-limit']; 67 | this.connection = flags['target-org']?.getConnection(); 68 | JSON.parse(fs.readFileSync(definitionfile).toString()).objects.reduce( 69 | (accumulatorPromise: Promise, elem: GenericObject) => 70 | accumulatorPromise.then(() => { 71 | if (elem.active) { 72 | return this.export(elem); 73 | } else { 74 | return; 75 | } 76 | }), 77 | Promise.resolve() 78 | ); 79 | return { success: true }; 80 | } 81 | 82 | private export(job: GenericObject): void { 83 | job.fields = job.fields.replace(/ /g, ''); 84 | const exportfile = path.join(this.outputdir ?? '', job.filename); 85 | this.log(messages.getMessage('log.registerExport', [`[\x1b[33m${job.objectName as string},${exportfile}\x1b[0m]`])); 86 | let query = `select ${job.fields.replace(/ /g, '') as string} from ${job.objectName as string}`; 87 | if (job.where) { 88 | query += ` where ${job.where as string}`; 89 | } 90 | job.fields = job.fields.split(','); 91 | job['exportfile'] = exportfile; 92 | job['query'] = query; 93 | this.startQuery(job); 94 | } 95 | 96 | private startQuery(job: GenericObject): void { 97 | const records: GenericObject[] = []; 98 | const query = this.connection 99 | ?.query(job.query) 100 | .on('record', (record) => { 101 | records.push(record); 102 | }) 103 | .on('end', () => { 104 | this.log(messages.getMessage('log.exportResult', [`\x1b[32m${query?.totalFetched}\x1b[0m`])); 105 | const headers: Header[] = []; 106 | job.fields.forEach((key: string) => { 107 | const k = key.trim().toLowerCase(); 108 | headers.push({ id: k, title: k }); 109 | }); 110 | const csvWriter = createCsvWriter({ 111 | path: job.exportfile, 112 | header: headers, 113 | encoding: 'utf-8', 114 | }); 115 | records.forEach((element) => { 116 | job.fields.forEach((key: string) => { 117 | const k = key.trim().toLowerCase(); 118 | if (k.includes('.')) { 119 | // handle cross reference fields 120 | const f = key.split('.'); 121 | element[k] = element[f[0]] ? element[f[0]][f[1]] : undefined; 122 | delete element[f[0]]; 123 | } else { 124 | // just a normal field 125 | element[k] = element[key]; 126 | delete element[key]; 127 | } 128 | }); 129 | delete element.attributes; 130 | }); 131 | void csvWriter.writeRecords(records); // returns a promise 132 | return job; 133 | }) 134 | .on('error', (err) => { 135 | this.error(err); 136 | }) 137 | .run({ autoFetch: true, maxFetch: this.querylimit }); // synonym of Query#execute(); 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/commands/dxb/data/file/export.ts: -------------------------------------------------------------------------------- 1 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 2 | import { Messages } from '@salesforce/core'; 3 | import * as fs from 'fs-extra'; 4 | import { Record, SObject, SObjectFieldType, Schema, Connection } from 'jsforce'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'data.file.export'); 8 | 9 | export type DataFileExportResult = { 10 | failedDownloads: string[]; 11 | }; 12 | 13 | type SObjectResult = { 14 | [name: string]: SObjectFieldType | null; 15 | } & Record; 16 | 17 | function sanitizeFileName(fileName: string): string { 18 | // List of invalid characters in Windows file names 19 | const invalidCharsRegex = /[<>:"/\\|?*\x20-\u{20}/u]/g; 20 | 21 | // Replace invalid characters with an empty string 22 | const sanitizedFileName = fileName.replaceAll(invalidCharsRegex, ''); 23 | 24 | return sanitizedFileName; 25 | } 26 | 27 | function delay(ms: number): Promise { 28 | return new Promise((resolve) => setTimeout(resolve, ms)); 29 | } 30 | 31 | export default class DataFileExport extends SfCommand { 32 | public static readonly summary = messages.getMessage('summary'); 33 | 34 | public static readonly examples = messages.getMessages('examples'); 35 | 36 | public static readonly flags = { 37 | 'target-org': Flags.requiredOrg(), 38 | 'file-path': Flags.file({ 39 | exists: true, 40 | char: 'f', 41 | summary: messages.getMessage('flags.file-path.summary'), 42 | default: './input.csv', 43 | aliases: ['filepath'], 44 | deprecateAliases: true, 45 | }), 46 | min: Flags.integer({ 47 | char: 'm', 48 | summary: messages.getMessage('flags.min.summary'), 49 | }), 50 | max: Flags.integer({ 51 | char: 't', 52 | summary: messages.getMessage('flags.max.summary'), 53 | }), 54 | }; 55 | 56 | public failedDownloads: string[] = []; 57 | protected connection: Connection | undefined; 58 | 59 | public async run(): Promise { 60 | const { flags } = await this.parse(DataFileExport); 61 | const min = flags.min; 62 | const max = flags.max; 63 | const file = fs.readFileSync(flags['file-path']).toString(); 64 | const regex = /[,\n\r]+/; 65 | const contentDocumentIds = file.split(regex); 66 | this.log(messages.getMessage('log.numberToExtract', [contentDocumentIds.length])); 67 | this.connection = flags['target-org']?.getConnection(); 68 | 69 | await this.downloadFiles(min && max ? contentDocumentIds.slice(min, max) : contentDocumentIds); 70 | return { failedDownloads: this.failedDownloads }; 71 | } 72 | 73 | public async downloadFiles(fileBodyIds: string[]): Promise { 74 | const downloadPromises = fileBodyIds.map((fileBodyId) => this.downloadFile(fileBodyId, 3)); 75 | 76 | try { 77 | await Promise.all(downloadPromises); 78 | this.log(messages.getMessage('log.successful')); 79 | } catch (error) { 80 | this.log(messages.getMessage('error.failedDownloads', [(error as Error).message])); 81 | } 82 | } 83 | 84 | public async downloadFile(contentDocumentId: string, retry: number): Promise { 85 | try { 86 | await delay(500); 87 | const contentDocument: SObject | undefined = 88 | this.connection?.sobject('ContentDocument'); 89 | const docResult: SObjectResult | undefined = await contentDocument?.retrieve(contentDocumentId, { 90 | fields: ['LatestPublishedVersionId', 'Title'], 91 | }); 92 | this.log(docResult?.Title); 93 | const contentVersion: SObject | undefined = this.connection?.sobject('ContentVersion'); 94 | const contentVersionId: string = docResult?.LatestPublishedVersionId; 95 | const result: SObjectResult | undefined = await contentVersion?.retrieve(contentVersionId, { 96 | fields: ['PathOnClient', 'VersionDataUrl'], 97 | }); 98 | this.log(result?.PathOnClient); 99 | this.log(result?.VersionData); 100 | const response: Response = await fetch( 101 | (this.connection?.instanceUrl ?? '') + '/' + (result?.VersionData as string), 102 | { 103 | headers: { 104 | 'Content-Type': 'application/json', 105 | Authorization: `Bearer ${this.connection?.accessToken}`, 106 | }, 107 | } 108 | ); 109 | 110 | if (!response.ok) { 111 | messages.createError('error.failedToDownload', [response.status]); 112 | } 113 | const contentType = response.headers.get('content-type'); 114 | const fileBlob = await response.blob(); 115 | 116 | const blob = new Blob([fileBlob], { type: contentType ?? '' }); 117 | const fileStream: fs.WriteStream = fs.createWriteStream( 118 | `./files/${contentDocumentId}_${sanitizeFileName(result?.PathOnClient)}` 119 | ); 120 | fileStream.write(Buffer.from(await new Response(blob).arrayBuffer())); 121 | 122 | return await new Promise((resolve, reject) => { 123 | fileStream.on('finish', resolve); 124 | fileStream.on('error', reject); 125 | }); 126 | } catch (err) { 127 | this.log(messages.getMessage('error.contentVersionError', [contentDocumentId])); 128 | if (retry >= 0) { 129 | return await this.downloadFile(contentDocumentId, retry - 1); 130 | } 131 | this.failedDownloads.push(contentDocumentId); // Store the failed contentDocumentId; 132 | } 133 | return; 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/commands/dxb/data/query/explain.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/restrict-template-expressions */ 2 | /* eslint-disable no-console */ 3 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 4 | import { Messages } from '@salesforce/core'; 5 | import { QueryExplainResult } from 'jsforce'; 6 | import * as TableModule from 'cli-table3'; 7 | const Table = TableModule.default; 8 | 9 | Messages.importMessagesDirectory(__dirname); 10 | const messages = Messages.loadMessages('dxb', 'data.query.explain'); 11 | 12 | export type DataQueryExplainResult = { 13 | result: string | QueryExplainResult; 14 | }; 15 | 16 | export default class DataQueryExplain extends SfCommand { 17 | public static readonly summary = messages.getMessage('summary'); 18 | 19 | public static readonly examples = messages.getMessages('examples'); 20 | 21 | public static readonly flags = { 22 | 'target-org': Flags.requiredOrg(), 23 | query: Flags.string({ 24 | char: 'q', 25 | summary: messages.getMessage('flags.query.summary'), 26 | }), 27 | }; 28 | 29 | protected isJsonOnly: boolean | undefined; 30 | 31 | public async run(): Promise { 32 | const { flags } = await this.parse(DataQueryExplain); 33 | const query = flags.query; 34 | this.isJsonOnly = flags.json; 35 | 36 | if (!query) { 37 | throw messages.createError('error.missingQueryFlag'); 38 | } 39 | 40 | if (!this.isJsonOnly) { 41 | this.log(messages.getMessage('log.connecting')); 42 | } 43 | const accessToken = flags['target-org']?.getConnection().accessToken; 44 | const instanceUrl = flags['target-org']?.getConnection().instanceUrl; 45 | 46 | if (!accessToken || !instanceUrl) { 47 | throw messages.createError('error.queryNotValid'); 48 | } 49 | this.log(messages.getMessage('log.overview')); 50 | const result = await this.queryPlan(query, accessToken, instanceUrl); 51 | return { result }; 52 | } 53 | 54 | public displayAsTable(body: QueryExplainResult): void{ 55 | const table = new Table({ 56 | head: ['Cardinality', 'Fields', 'Leading \nOperation Type', 'Relative Cost', 'Object Cardinality', 'Object Type'], 57 | colWidths: [20, 50, 20, 20, 20, 20], 58 | }); 59 | const noteTable = new Table({ 60 | head: ['Description', 'Fields', 'TableEnumOrId'], 61 | colWidths: [70, 30, 30], 62 | }); 63 | for (const plan of body.plans) { 64 | table.push([ 65 | plan.cardinality, 66 | plan.fields.toString(), 67 | plan.leadingOperationType, 68 | plan.relativeCost, 69 | plan.sobjectCardinality, 70 | plan.sobjectType, 71 | ]); 72 | 73 | for (const note of plan.notes) { 74 | noteTable.push([note.description, note.fields.toString(), note.tableEnumOrId]); 75 | } 76 | } 77 | this.log(`${table.toString()}\n=== Notes\n${noteTable.toString()}`); 78 | } 79 | 80 | public async queryPlan(query: string, accessToken: string, instanceUrl: string): Promise { 81 | const url = `${instanceUrl}/services/data/v57.0/query/?explain=${encodeURIComponent(query)}`; 82 | const headers = new Headers(); 83 | headers.append('Authorization', 'Bearer ' + accessToken); 84 | headers.append('X-SFDC-Session', 'Bearer ' + accessToken); 85 | headers.append('Content-Type', 'application/json; charset=UTF-8'); 86 | headers.append('Accept', 'application/json'); 87 | const options: RequestInit = { 88 | method: 'GET', 89 | headers, 90 | }; 91 | try { 92 | const response = await fetch(url, options); 93 | const body: QueryExplainResult = (await response.json()) as QueryExplainResult; 94 | if (!body.plans || body.plans.length === 0) { 95 | this.log(messages.getMessage('log.noExplanation')); 96 | this.log(JSON.stringify(body)); 97 | }else{ 98 | this.displayAsTable(body); 99 | } 100 | return body; 101 | } catch (error) { 102 | const e = error as Error; 103 | throw messages.createError('error.unexpected', undefined, undefined, e, e); 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/commands/dxb/data/transform.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { JsonMap } from '@salesforce/ts-types'; 4 | import * as fs from 'fs-extra'; 5 | import * as csvpModule from 'csv-parser'; 6 | const csvp = csvpModule.default; 7 | import { createObjectCsvWriter as createCsvWriter } from 'csv-writer'; 8 | import { Messages } from '@salesforce/core'; 9 | 10 | type Header = { 11 | id: string; 12 | title: string; 13 | }; 14 | type GenericObject = { 15 | [key: string]: any; 16 | }; 17 | export type DataTransformResult = { 18 | success: boolean; 19 | }; 20 | Messages.importMessagesDirectory(__dirname); 21 | const messages = Messages.loadMessages('dxb', 'data.transform'); 22 | export default class DataTransform extends SfCommand { 23 | public static readonly summary = messages.getMessage('summary'); 24 | 25 | public static readonly examples = messages.getMessages('examples'); 26 | 27 | public static readonly flags = { 28 | 'target-org': Flags.requiredOrg(), 29 | query: Flags.string({ char: 'q', summary: messages.getMessage('flags.query.summary'), required: true }), 30 | 'object-name': Flags.string({ 31 | char: 's', 32 | summary: messages.getMessage('flags.object-name.summary'), 33 | required: true, 34 | aliases: ['objectname'], 35 | deprecateAliases: true, 36 | }), 37 | 'transform-file': Flags.file({ 38 | char: 'f', 39 | summary: messages.getMessage('flags.transform-file.summary'), 40 | exists: true, 41 | required: true, 42 | aliases: ['transform'], 43 | deprecateAliases: true, 44 | }), 45 | }; 46 | 47 | public async run(): Promise { 48 | const { flags } = await this.parse(DataTransform); 49 | const orgname = flags['target-org']?.getUsername(); 50 | const sobject = flags['object-name']; 51 | const transform: JsonMap = JSON.parse(fs.readFileSync(flags['transform-file']).toString()); 52 | const query = flags.query; 53 | 54 | this.log(`sf data query --query "${query}" --json --target-org ${orgname}`); 55 | exec(`sf data query --query "${query}" --result-format csv --target-org ${orgname} > ${sobject}_in.csv `); 56 | 57 | // const record = [ 58 | // {id: "Id", phone_country__c: "Phone_Country__c"} 59 | // ]; 60 | 61 | const record: GenericObject[] = [{ id: 'Id' }]; 62 | const headers: Header[] = [{ id: 'id', title: 'Id' }]; 63 | Object.keys(transform).forEach((key) => { 64 | record[0][key] = key; 65 | headers.push({ id: key, title: key }); 66 | }); 67 | this.log(record.toString()); 68 | 69 | const csvWriter = createCsvWriter({ 70 | path: `${sobject}_out.csv`, 71 | header: headers, 72 | append: true, 73 | }); 74 | await csvWriter.writeRecords(record); 75 | 76 | fs.createReadStream(`${sobject}_in.csv`) 77 | .pipe(csvp()) 78 | .on('data', (row) => { 79 | // console.log(row["Id"]); 80 | // var record = [ 81 | // {id: row["Id"], phone_country__c: "Australia_61"} 82 | // ]; 83 | const r: GenericObject[] = [{ id: row['Id'] }]; 84 | Object.keys(transform).forEach((key) => { 85 | r[0][key] = transform[key]; 86 | }); 87 | void csvWriter.writeRecords(r); 88 | }) 89 | .on('end', () => { 90 | this.log(messages.getMessage('log.success')); 91 | }); 92 | return { success: true }; 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/commands/dxb/install.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable camelcase */ 2 | import * as path from 'path'; 3 | import * as fs from 'fs-extra'; 4 | import { SfCommand } from '@salesforce/sf-plugins-core'; 5 | import { Messages } from '@salesforce/core'; 6 | type MetadataRule = { 7 | regex: string; 8 | replaceby: string; 9 | mergefield?: string; 10 | }; 11 | type MetadataConfig = { 12 | folder: string; 13 | rules: MetadataRule[]; 14 | }; 15 | export type ProjectSetupResult = { 16 | apextemplatepath?: string; 17 | apiAlignmentExclusion?: string[]; 18 | data_plan_path?: string; 19 | defaultdurationdays: number; 20 | default_user_role?: string; 21 | deferPermissionSet?: string; 22 | deferSharingUser?: string; 23 | disableFeedTrackingHistory?: string[]; 24 | manual_config_required?: boolean; 25 | manual_config_start_url?: string; 26 | manual_steps?: string[]; 27 | orgdefault_config?: MetadataConfig[]; 28 | packages?: string[]; 29 | post_legacy_packages?: string[]; 30 | pre_legacy_packages?: string[]; 31 | userPermissionsKnowledgeUser?: boolean; 32 | user_alias_prefix?: string; 33 | user_def_file?: string; 34 | }; 35 | Messages.importMessagesDirectory(__dirname); 36 | const messages = Messages.loadMessages('dxb', 'install'); 37 | export default class ProjectSetup extends SfCommand { 38 | public static readonly summary = messages.getMessage('summary'); 39 | 40 | public static readonly examples = messages.getMessages('examples'); 41 | 42 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 43 | public static readonly requiresProject = true; 44 | 45 | public async run(): Promise { 46 | await this.parse(ProjectSetup); 47 | this.spinner.start(messages.getMessage('spinner.start.setup')); 48 | const init: ProjectSetupResult = { 49 | apiAlignmentExclusion: [], 50 | defaultdurationdays: 30, 51 | packages: [], 52 | pre_legacy_packages: [], 53 | userPermissionsKnowledgeUser: false, 54 | deferPermissionSet: undefined, 55 | deferSharingUser: undefined, 56 | disableFeedTrackingHistory: [], 57 | manual_config_required: false, 58 | manual_config_start_url: '/ltng/switcher?destination=classic&referrer=%2Flightning%2Fsetup%2FSetupOneHome%2Fhome', 59 | manual_steps: ['- Sample: Chatter Settings > Enable Unlisted Groups'], 60 | data_plan_path: './data/sample/data-plan.json', 61 | apextemplatepath: undefined, 62 | orgdefault_config: [ 63 | { 64 | folder: 'workflows', 65 | rules: [ 66 | { 67 | regex: '.+', 68 | replaceby: '{{mergevalue}}', 69 | mergefield: 'username', 70 | }, 71 | { 72 | regex: '.+', 73 | replaceby: 'CurrentUser', 74 | }, 75 | ], 76 | }, 77 | { 78 | folder: 'emailservices', 79 | rules: [ 80 | { 81 | regex: '.+', 82 | replaceby: '{{mergevalue}}', 83 | mergefield: 'username', 84 | }, 85 | ], 86 | }, 87 | { 88 | folder: 'autoResponseRules', 89 | rules: [ 90 | { 91 | regex: '.+', 92 | replaceby: '{{mergevalue}}', 93 | mergefield: 'username', 94 | }, 95 | { 96 | regex: '.+', 97 | replaceby: '{{mergevalue}}', 98 | mergefield: 'username', 99 | }, 100 | ], 101 | }, 102 | { 103 | folder: 'dashboards', 104 | rules: [ 105 | { 106 | regex: 'LoggedInUser', 107 | replaceby: 'SpecifiedUser', 108 | }, 109 | ], 110 | }, 111 | { 112 | folder: 'approvalProcesses', 113 | rules: [ 114 | { 115 | regex: '.+', 116 | replaceby: '{{mergevalue}}', 117 | mergefield: 'username', 118 | }, 119 | ], 120 | }, 121 | ], 122 | }; 123 | 124 | const config: any = JSON.parse(fs.readFileSync('sfdx-project.json').toString()); 125 | if (!config.plugins) { 126 | config['plugins'] = {}; 127 | } 128 | config['plugins']['dxb'] = init; 129 | fs.writeFileSync('sfdx-project.json', JSON.stringify(config, null, 2)); 130 | this.log(messages.getMessage('log.dxbAdded')); 131 | const dxbSchemaGen = JSON.parse(fs.readFileSync(path.join(__dirname, '../../utils/documentinfo.json')).toString()); 132 | fs.writeFileSync('config/dxb-schemagen-def.json', JSON.stringify(dxbSchemaGen, null, 2)); 133 | this.log(messages.getMessage('log.schemagen')); 134 | this.spinner.stop(messages.getMessage('spinner.stop.done')); 135 | this.log('\x1b[34m\x1b[45m\x1b[5m%s', '\n\n' + messages.getMessage('log.welcome') + '\n\n'); 136 | return init; 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/commands/dxb/junit/check.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | import * as xml2js from 'xml2js'; 5 | 6 | const CONSOLE_COLORS = { 7 | blue: '\x1b[34m', 8 | green: '\x1b[32m', 9 | red: '\x1b[31m', 10 | white: '\x1b[37m', 11 | yellow: '\x1b[33m', 12 | }; 13 | type Test = { 14 | ClassName: string; 15 | TestName: string; 16 | time: number; 17 | Diff: string; 18 | }; 19 | 20 | Messages.importMessagesDirectory(__dirname); 21 | const messages = Messages.loadMessages('dxb', 'junit.check'); 22 | 23 | export type JunitCheckResult = { 24 | success: boolean; 25 | }; 26 | 27 | export default class JunitCheck extends SfCommand { 28 | public static readonly summary = messages.getMessage('summary'); 29 | 30 | public static readonly examples = ['$ sfdx dxb:junit:check ']; 31 | 32 | public static readonly flags = { 33 | 'junit-path': Flags.file({ 34 | char: 'p', 35 | summary: messages.getMessage('flags.junit-path.summary'), 36 | required: true, 37 | exists: true, 38 | aliases: ['junitpath'], 39 | deprecateAliases: true, 40 | }), 41 | 'time-threshold': Flags.string({ 42 | char: 't', 43 | summary: messages.getMessage('flags.time-threshold.summary'), 44 | default: '1', 45 | aliases: ['timetreshold'], 46 | deprecateAliases: true, 47 | }), 48 | 'flag-as-error': Flags.boolean({ 49 | char: 'e', 50 | summary: messages.getMessage('flags.flag-as-error.summary'), 51 | default: false, 52 | aliases: ['flagaserror'], 53 | deprecateAliases: true, 54 | }), 55 | }; 56 | 57 | public async run(): Promise { 58 | const { flags } = await this.parse(JunitCheck); 59 | const junitpath = flags['junit-path']; 60 | const threshold = Number(flags['time-threshold']); 61 | const flagaserror = flags['flag-as-error']; 62 | await this.readJunit(junitpath, threshold, flagaserror); 63 | return { success: true }; 64 | } 65 | /** 66 | * Parses a JUnit XML file and modifies it to include failure information for slow tests. 67 | * 68 | * @param junitPath - The file path of the JUnit XML file to be parsed. 69 | * @param threshold - The time threshold (in milliseconds) to consider a test method "slow". 70 | * @throws SfError - If the JUnit file is not found. 71 | */ 72 | // eslint-disable-next-line class-methods-use-this 73 | private async readJunit(junitPath: string, threshold: number, flagaserror: boolean): Promise { 74 | const data = fs.readFileSync(junitPath, { encoding: 'utf-8' }); 75 | const result: any = await xml2js.parseStringPromise(data); 76 | 77 | let numSlowTests = 0; 78 | const slowTests: Test[] = []; 79 | result.testsuites.testsuite.forEach((ts: any) => { 80 | ts.testcase.forEach((testcase: any) => { 81 | const time = parseFloat(testcase.$.time); 82 | 83 | if (!testcase.failure && time >= threshold) { 84 | numSlowTests++; 85 | if (numSlowTests === 1) { 86 | this.log(`${CONSOLE_COLORS.yellow}%s\x1b[0m`, messages.getMessage('log.slowUnitTest', [threshold])); 87 | } 88 | slowTests.push({ 89 | ClassName: testcase.$.classname, 90 | TestName: testcase.$.name, 91 | time, 92 | Diff: `+${time - threshold}s`, 93 | }); 94 | if (flagaserror) { 95 | testcase.failure = { 96 | _: `Class.${testcase.$.classname as string}.${testcase.$.name as string}: line 0, column 0`, 97 | $: { 98 | message: messages.getMessage('error.performance.tooSlow'), 99 | }, 100 | }; 101 | } 102 | } 103 | }); 104 | }); 105 | 106 | if (slowTests.length > 0) { 107 | this.table(slowTests, { 108 | ClassName: { header: 'CLASSNAME' }, 109 | TestName: { header: 'TESTNAME' }, 110 | time: { header: 'TIME' }, 111 | Diff: { header: 'DIFF' }, 112 | }); 113 | } 114 | 115 | if (numSlowTests > 0 && flagaserror) { 116 | const builder = new xml2js.Builder({ cdata: true }); 117 | const xml = builder.buildObject(result); 118 | fs.writeFileSync(junitPath, xml); 119 | throw messages.createError('error.performance'); 120 | } 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/commands/dxb/lwc/test/run.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import * as path from 'path'; 3 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 4 | import { Messages, PackageDir, SfProject } from '@salesforce/core'; 5 | import * as fs from 'fs-extra'; 6 | import { getComponentsFromManifest } from '../../../../utils/utils'; 7 | 8 | Messages.importMessagesDirectory(__dirname); 9 | const messages = Messages.loadMessages('dxb', 'lwc.test.run'); 10 | 11 | export type LwcTestRunResult = { 12 | result: string; 13 | }; 14 | 15 | export default class LwcTestRun extends SfCommand { 16 | public static readonly summary = messages.getMessage('summary'); 17 | public static readonly description = messages.getMessage('description'); 18 | public static readonly examples = messages.getMessages('examples'); 19 | 20 | public static readonly flags = { 21 | test: Flags.string({ 22 | summary: messages.getMessage('flags.test.summary'), 23 | char: 't', 24 | multiple: true, 25 | exclusive: ['manifest'], 26 | }), 27 | 'fail-on-error': Flags.boolean({ 28 | summary: messages.getMessage('flags.fail-on-error.summary'), 29 | char: 'e', 30 | }), 31 | 'root-dir': Flags.directory({ 32 | summary: messages.getMessage('flags.root-dir.summary'), 33 | char: 'd', 34 | multiple: true, 35 | exists: true, 36 | }), 37 | manifest: Flags.file({ 38 | summary: messages.getMessage('flags.manifest.summary'), 39 | char: 'x', 40 | exists: true, 41 | exclusive: ['test'], 42 | }), 43 | }; 44 | 45 | protected packageDirectories: PackageDir[] = []; 46 | protected projectConfig: any; 47 | 48 | public async run(): Promise { 49 | const { flags } = await this.parse(LwcTestRun); 50 | let components = flags.test; 51 | if (flags.manifest) { 52 | components = await this.processFromPackageXmlContent(flags.manifest); 53 | if (!components) { 54 | return { 55 | result: messages.getMessage('error.invalidComponents'), 56 | }; 57 | } 58 | } 59 | 60 | this.projectConfig = await (await SfProject.resolve()).resolveProjectConfig(); 61 | this.packageDirectories = this.projectConfig.packageDirectories; 62 | // either use specified root directory or all directories in the sfdx-project.json 63 | const roots = flags['root-dir'] ?? this.packageDirectories.map((packageDir) => packageDir.path); 64 | 65 | // sanitize input, to ensure specified components to test actually exist 66 | const validTestNames = roots.reduce((acc: string[], cur: string): string[] => { 67 | acc.push(...this.sanitize(components, cur)); 68 | return acc; 69 | }, []); 70 | if (components && components.length > 0 && validTestNames.length === 0) { 71 | throw messages.createError('error.invalidComponents'); 72 | } 73 | 74 | const issues: string | undefined = this.testComponents(validTestNames, roots); 75 | if (flags['fail-on-error'] && issues) { 76 | throw messages.createError('error.issues', [issues]); 77 | } else if (issues) { 78 | return { 79 | result: messages.createWarning('warning.issues', [issues]).message, 80 | }; 81 | } else { 82 | return { 83 | result: messages.getMessage('success'), 84 | }; 85 | } 86 | } 87 | 88 | // eslint-disable-next-line class-methods-use-this 89 | private async processFromPackageXmlContent(manifest: string): Promise { 90 | try { 91 | const componentsFromManifest = await getComponentsFromManifest(manifest, 'LightningComponentBundle'); 92 | if (componentsFromManifest.length === 1 && componentsFromManifest[0] === '*') { 93 | return []; 94 | } 95 | if (componentsFromManifest.length === 0) { 96 | return undefined; 97 | } 98 | return componentsFromManifest; 99 | } catch (err) { 100 | throw messages.createError('error.processManifest'); 101 | } 102 | } 103 | 104 | /** 105 | * Sanitize input to ensure no malignent calls are made 106 | * 107 | * @param specificComponents Contains the names of specific components to test 108 | * @param root The root directory of the location of the tests 109 | * @returns the names of test that actually exist or an empty list of none exist 110 | */ 111 | // eslint-disable-next-line class-methods-use-this 112 | private sanitize(specificComponents: string[] | undefined, root: string): string[] { 113 | if (specificComponents) { 114 | if (!fs.pathExistsSync(path.join(root, 'lwc'))) { 115 | return fs 116 | .readdirSync(`${root}`) 117 | .map((dir) => this.sanitize(specificComponents, path.join(root, dir))) 118 | .flat(); 119 | } 120 | const result: string[] = []; 121 | specificComponents.forEach((compName) => { 122 | if (fs.pathExistsSync(path.join(root, 'lwc', compName))) { 123 | result.push(compName); 124 | } 125 | }); 126 | 127 | return result; 128 | } else { 129 | return []; 130 | } 131 | } 132 | 133 | /** 134 | * Runs LWC tests. 135 | * 136 | * @param components contains paths to LWC tests to run 137 | * @returns the errors produced (if any) 138 | */ 139 | // eslint-disable-next-line class-methods-use-this 140 | private testComponents(components: string[] | undefined, roots: string[]): string | undefined { 141 | try { 142 | // prettier-ignore 143 | exec(`sfdx-lwc-jest -- ${components ? components.join(' ') : ''} --silent --ci --bail=false --roots=${roots.join(' ')}`); 144 | } catch (error) { 145 | return (error as Error).message.split('\n').slice(1).join('\n'); // remove first line to hide implementation details 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /src/commands/dxb/mdapi/convert.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { SfCommand, Flags } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | import * as fs from 'fs-extra'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'mdapi.convert'); 8 | 9 | export type MdapiConvertResult = { 10 | success: boolean; 11 | }; 12 | 13 | export default class MdapiConvert extends SfCommand { 14 | public static readonly summary = messages.getMessage('summary'); 15 | public static readonly description = messages.getMessage('description'); 16 | 17 | public static readonly examples = messages.getMessages('examples'); 18 | 19 | public static readonly flags = { 20 | 'output-dir': Flags.directory({ 21 | char: 'd', 22 | summary: messages.getMessage('flags.output-dir.summary'), 23 | exists: true, 24 | default: '.', 25 | aliases: ['outputdir'], 26 | deprecateAliases: true, 27 | }), 28 | 'root-dir': Flags.directory({ 29 | char: 'r', 30 | required: true, 31 | summary: messages.getMessage('flags.root-dir.summary'), 32 | exists: true, 33 | aliases: ['rootdir'], 34 | deprecateAliases: true, 35 | }), 36 | }; 37 | 38 | public async run(): Promise { 39 | const { flags } = await this.parse(MdapiConvert); 40 | const rootdir = flags['root-dir']; 41 | const outputdir = flags['output-dir']; 42 | 43 | this.log(`sf project convert mdapi --root-dir ${rootdir} --output-dir ${outputdir} --json`); 44 | const output: any = JSON.parse( 45 | exec(`sf project convert mdapi --root-dir ${rootdir} --output-dir ${outputdir} --json`).toString() 46 | ); 47 | const result: any[] = output.result; 48 | result.forEach((elem) => { 49 | if (elem.filePath.includes('.dup')) { 50 | const oldPath = elem.filePath.substring(0, elem.filePath.length - 4); 51 | fs.unlinkSync(oldPath); 52 | fs.renameSync(elem.filePath, oldPath); 53 | this.log(oldPath); 54 | } else { 55 | this.log(elem.filePath); 56 | } 57 | }); 58 | return { success: true }; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/commands/dxb/object/create.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages, NamedPackageDir, SfProject } from '@salesforce/core'; 4 | import * as fs from 'fs-extra'; 5 | 6 | type ObjectCreateResult = { 7 | success: boolean; 8 | }; 9 | 10 | Messages.importMessagesDirectory(__dirname); 11 | const messages = Messages.loadMessages('dxb', 'object.create'); 12 | export default class ObjectCreate extends SfCommand { 13 | public static readonly summary = messages.getMessage('summary'); 14 | 15 | public static readonly examples = messages.getMessages('examples'); 16 | 17 | public static readonly flags = { 18 | 'target-org': Flags.requiredOrg(), 19 | 'object-name': Flags.string({ 20 | char: 's', 21 | summary: messages.getMessage('flags.object-name.summary'), 22 | required: true, 23 | aliases: ['objectname'], 24 | deprecateAliases: true, 25 | }), 26 | push: Flags.boolean({ char: 'p', default: false, summary: messages.getMessage('flags.push.summary') }), 27 | }; 28 | 29 | private content: string = messages.getMessage('data.object'); 30 | 31 | public async run(): Promise { 32 | const { flags } = await this.parse(ObjectCreate); 33 | const orgname = flags['target-org']?.getUsername(); 34 | const pushToOrg: boolean = flags.push; 35 | const defaultPackageDir: NamedPackageDir = (await SfProject.resolve()).getDefaultPackage(); 36 | const name = flags['object-name']; 37 | 38 | const apiname = name.replace(new RegExp('[^A-Z0-9]', 'gi'), '_') + '__c'; 39 | const objectpath = `${defaultPackageDir.fullPath}/objects/${apiname}`; 40 | if (fs.existsSync(objectpath)) { 41 | throw messages.createError('error.exists'); 42 | } 43 | fs.mkdirSync(objectpath); 44 | this.content = this.content.replace(new RegExp('{{label}}', 'g'), name); 45 | 46 | await this.updateContent('description', messages.getMessage('prompt.message.description')); 47 | const sharingmodel = await this.updateContent('sharingmodel', messages.getMessage('prompt.message.sharingModel')); 48 | if (sharingmodel === 'ControlledByParent') { 49 | this.log(messages.getMessage('log.sharingControlledByParent')); 50 | let masterfield = messages.getMessage('data.master'); 51 | 52 | let prompt = await this.prompt<{ response: string }>({ 53 | type: 'input', 54 | name: 'response', 55 | message: messages.getMessage('prompt.message.masterObject'), 56 | }); 57 | const masterobject = prompt.response; 58 | 59 | prompt = await this.prompt<{ response: string }>({ 60 | type: 'input', 61 | name: 'response', 62 | message: messages.getMessage('prompt.message.masterLabel'), 63 | }); 64 | const masterlabel = prompt.response; 65 | 66 | prompt = await this.prompt<{ response: string }>({ 67 | type: 'input', 68 | name: 'response', 69 | message: messages.getMessage('prompt.message.relationshipName'), 70 | }); 71 | const relationshipLabel = prompt.response; 72 | 73 | const fieldname = masterlabel.replace(new RegExp('[^A-Z0-9]', 'gi'), '_') + '__c'; 74 | masterfield = masterfield.replace(new RegExp('{{fieldname}}', 'g'), fieldname); 75 | masterfield = masterfield.replace(new RegExp('{{fieldlabel}}', 'g'), masterlabel); 76 | masterfield = masterfield.replace(new RegExp('{{masterobject}}', 'g'), masterobject); 77 | const relationshipName = relationshipLabel.replace(new RegExp('[^A-Z0-9]', 'gi'), '_'); 78 | masterfield = masterfield.replace(new RegExp('{{relationshipLabel}}', 'g'), relationshipLabel); 79 | masterfield = masterfield.replace(new RegExp('{{relationshipName}}', 'g'), relationshipName); 80 | 81 | fs.mkdirSync(`${objectpath}/fields`); 82 | fs.writeFileSync(`${objectpath}/fields/${fieldname}.field-meta.xml`, masterfield); 83 | } 84 | 85 | // update content file 86 | const fullpath = objectpath + '/' + apiname + '.object-meta.xml'; 87 | fs.writeFileSync(fullpath, this.content); 88 | this.log(messages.getMessage('log.objectCreated', [fullpath])); 89 | 90 | if (pushToOrg) { 91 | const usesScratchOrg = (await flags['target-org']?.determineIfScratch()) ?? false; 92 | const output = await this.pushSource(orgname!, usesScratchOrg, fullpath); 93 | this.log(output); 94 | } 95 | 96 | return { success: true }; 97 | } 98 | 99 | private async updateContent(varName: string, question: string): Promise { 100 | const answer = await this.prompt<{ res: string }>({ 101 | type: 'input', 102 | name: 'res', 103 | message: question, 104 | }); 105 | 106 | this.content = this.content.replace(new RegExp(`{{${varName}}}`, 'g'), answer.res); 107 | 108 | return answer.res; 109 | } 110 | 111 | private async pushSource(orgname: string, usesScratchOrg: boolean, path: string): Promise { 112 | this.log('Push source to org...'); 113 | const command = usesScratchOrg 114 | ? `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname}` 115 | : `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname} --source-directory ${path}`; 116 | try { 117 | return await new Promise((resolve) => resolve(exec(command).toString())); 118 | } catch (err) { 119 | throw messages.createError('error.pushFailed'); 120 | } 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/commands/dxb/object/fields/list.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as TableModule from 'cli-table3'; 4 | const Table = TableModule.default; 5 | import { Messages } from '@salesforce/core'; 6 | 7 | Messages.importMessagesDirectory(__dirname); 8 | const messages = Messages.loadMessages('dxb', 'object.fields.list'); 9 | 10 | export type ObjectFieldsListResult = { 11 | table: string; 12 | }; 13 | 14 | export default class ObjectFieldsList extends SfCommand { 15 | public static readonly summary = messages.getMessage('summary'); 16 | 17 | public static readonly examples = messages.getMessages('examples'); 18 | 19 | public static readonly flags = { 20 | 'target-org': Flags.requiredOrg(), 21 | 'object-name': Flags.string({ 22 | char: 's', 23 | summary: messages.getMessage('flags.object-name.summary'), 24 | required: true, 25 | aliases: ['objectname'], 26 | deprecateAliases: true, 27 | }), 28 | filter: Flags.string({ char: 'f', summary: messages.getMessage('flags.filter.summary') }), 29 | }; 30 | 31 | public async run(): Promise { 32 | const { flags } = await this.parse(ObjectFieldsList); 33 | const orgname = flags['target-org']?.getUsername(); 34 | const sobject = flags['object-name']; 35 | const filter = flags.filter; 36 | 37 | try { 38 | const objectschema = this.retrievesobjectfields(orgname, sobject); 39 | let fields: Array<{ name: string; type: string }> = JSON.parse(objectschema).result.fields; 40 | if (filter) { 41 | const tmp = []; 42 | for (const field of fields) { 43 | if (field.name.toLowerCase().includes(filter.toLowerCase())) { 44 | tmp.push(field); 45 | } 46 | } 47 | fields = tmp; 48 | } 49 | const table = new Table(); 50 | for (let i = 0; i < fields.length; i = i + 4) { 51 | table.push([ 52 | fields[i] ? fields[i].name + '(' + fields[i].type + ')' : '', 53 | fields[i + 1] ? fields[i + 1].name + '(' + fields[i + 1].type + ')' : '', 54 | fields[i + 2] ? fields[i + 2].name + '(' + fields[i + 2].type + ')' : '', 55 | fields[i + 3] ? fields[i + 3].name + '(' + fields[i + 3].type + ')' : '', 56 | ]); 57 | } 58 | return { table: table.toString() }; 59 | } catch (err: any) { 60 | this.error(err); 61 | } 62 | } 63 | 64 | private retrievesobjectfields(orgname: string | undefined, sobject: string): string { 65 | this.log(messages.getMessage('log.retrieveSchema', [sobject])); 66 | orgname = orgname ? '-u ' + orgname : ''; 67 | return exec(`sfdx force:schema:sobject:describe -s ${sobject} ${orgname} --json`).toString(); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/commands/dxb/object/fieldset.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | import * as fs from 'fs-extra'; 5 | import { Connection, DescribeSObjectResult } from 'jsforce'; 6 | 7 | Messages.importMessagesDirectory(__dirname); 8 | const messages = Messages.loadMessages('dxb', 'object.fieldset'); 9 | 10 | export type ObjectFieldsetResult = { 11 | output: string; 12 | }; 13 | 14 | export default class ObjectFieldset extends SfCommand { 15 | public static readonly summary = messages.getMessage('summary'); 16 | 17 | public static readonly examples = messages.getMessages('examples'); 18 | 19 | public static readonly flags = { 20 | 'target-org': Flags.requiredOrg(), 21 | 'object-name': Flags.string({ 22 | char: 's', 23 | summary: messages.getMessage('flags.object-name.summary'), 24 | required: true, 25 | aliases: ['objectname'], 26 | deprecateAliases: true, 27 | }), 28 | 'fieldset-name': Flags.string({ 29 | char: 'n', 30 | summary: messages.getMessage('flags.fieldset-name.summary'), 31 | required: true, 32 | aliases: ['fieldsetname'], 33 | deprecateAliases: true, 34 | }), 35 | 'retrieve-fields': Flags.boolean({ 36 | char: 'f', 37 | summary: messages.getMessage('flags.retrieve-fields.summary'), 38 | aliases: ['retrievefields'], 39 | deprecateAliases: true, 40 | }), 41 | push: Flags.boolean({ char: 'p', summary: messages.getMessage('flags.push.summary'), default: false }), 42 | }; 43 | 44 | protected connection: Connection | undefined; 45 | 46 | private content: string = messages.getMessage('data.fieldset'); 47 | 48 | public async run(): Promise { 49 | const { flags } = await this.parse(ObjectFieldset); 50 | const orgname = flags['target-org']?.getUsername(); 51 | this.connection = flags['target-org']?.getConnection(); 52 | const sobject = flags['object-name']; 53 | const name = flags['fieldset-name']; 54 | 55 | const fsPath = `./force-app/main/default/objects/${sobject}/fieldSets`; 56 | fs.ensureDirSync(fsPath); 57 | 58 | const apiname = name.replace(new RegExp('[^A-Z0-9]', 'gi'), '_'); 59 | this.content = this.content.replace(new RegExp('{{label}}', 'g'), name); 60 | this.content = this.content.replace(new RegExp('{{fullname}}', 'g'), apiname); 61 | 62 | await this.updateContent('description', messages.getMessage('prompt.message.description')); 63 | 64 | if (flags.retrievefields) { 65 | const objectschema = await this.retrievesobjectfields(orgname!, sobject); 66 | 67 | if (objectschema?.queryable) { 68 | let fields = 'Name'; 69 | objectschema.fields.forEach((f) => { 70 | if (!f.deprecatedAndHidden && f.name !== 'Name') { 71 | fields = fields + ',' + f.name; 72 | } 73 | }); 74 | this.log(messages.getMessage('log.availableFields', [fields])); 75 | } 76 | } 77 | const prompt = await this.prompt<{ response: string }>({ 78 | type: 'input', 79 | name: 'response', 80 | message: messages.getMessage('prompt.message.fields'), 81 | }); 82 | let fieldList = ''; 83 | prompt.response.split(',').forEach((elem) => { 84 | const displayFieldTemplate = 85 | ' \n' + ' {{fieldname}}\n' + ' \n'; 86 | fieldList += displayFieldTemplate.replace(new RegExp('{{fieldname}}', 'g'), elem.trim()); 87 | }); 88 | this.content = this.content.replace(new RegExp('{{fieldlist}}', 'g'), fieldList); 89 | 90 | // update content file 91 | const fullpath = fsPath + '/' + apiname + '.fieldSet-meta.xml'; 92 | fs.writeFileSync(fullpath, this.content); 93 | this.log(messages.getMessage('log.fieldsetCreated', [fullpath])); 94 | 95 | const output = await this.pushSource(orgname!, await flags['target-org']?.determineIfScratch(), fullpath); 96 | return { output }; 97 | } 98 | private async updateContent(varName: string, question: string): Promise { 99 | const prompt = await this.prompt<{ response: string }>({ 100 | type: 'input', 101 | name: 'response', 102 | message: question, 103 | }); 104 | 105 | this.content = this.content.replace(new RegExp(`{{${varName}}}`, 'g'), prompt.response); 106 | } 107 | 108 | private async pushSource(orgname: string, usesScratchOrg: boolean | undefined, path: string): Promise { 109 | this.log('Push source to org...'); 110 | const command = usesScratchOrg 111 | ? `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname}` 112 | : `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname} --source-directory ${path}`; 113 | try { 114 | return await new Promise((resolve) => resolve(exec(command).toString())); 115 | } catch (err) { 116 | throw messages.createError('error.pushFailed'); 117 | } 118 | } 119 | 120 | private async retrievesobjectfields(orgname: string, sobject: string): Promise { 121 | this.log(messages.getMessage('log.getFields', [sobject, orgname])); 122 | return this.connection?.describeSObject(sobject); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/commands/dxb/object/prefix.ts: -------------------------------------------------------------------------------- 1 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 2 | import { Messages } from '@salesforce/core'; 3 | import { Connection, DescribeSObjectResult } from 'jsforce'; 4 | import { DescribeGlobalSObjectResult } from 'jsforce/lib/api/soap/schema'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'object.prefix'); 8 | 9 | export type ObjectPrefixResult = { 10 | result: string; 11 | }; 12 | 13 | export default class ObjectPrefix extends SfCommand { 14 | public static readonly summary = messages.getMessage('summary'); 15 | 16 | public static readonly examples = messages.getMessages('examples'); 17 | 18 | public static readonly flags = { 19 | 'target-org': Flags.requiredOrg(), 20 | 'object-name': Flags.string({ 21 | char: 's', 22 | summary: messages.getMessage('flags.object-name.summary'), 23 | exclusive: ['prefix'], 24 | aliases: ['objectname'], 25 | deprecateAliases: true, 26 | }), 27 | prefix: Flags.string({ 28 | char: 'p', 29 | summary: messages.getMessage('flags.prefix.summary'), 30 | exclusive: ['object-name'], 31 | }), 32 | }; 33 | 34 | protected connection: Connection | undefined; 35 | 36 | public async run(): Promise { 37 | const { flags } = await this.parse(ObjectPrefix); 38 | this.connection = flags['target-org']?.getConnection(); 39 | const sobject = flags.objectname; 40 | const prefix = flags.prefix; 41 | if (!sobject && !prefix) { 42 | throw messages.createError('error.invalidArguments'); 43 | } 44 | if (sobject) { 45 | const orgname = flags['target-org']?.getUsername(); 46 | const keyPrefix = (await this.retrievesobjectfields(orgname!, sobject))?.keyPrefix; 47 | return { result: messages.getMessage('log.result.prefix', [keyPrefix]) }; 48 | } else if (prefix) { 49 | const accessToken = flags['target-org']?.getConnection()?.accessToken; 50 | const instanceUrl = flags['target-org']?.getConnection()?.instanceUrl; 51 | 52 | if (!accessToken || !instanceUrl) { 53 | throw messages.createError('error.invalidConnection'); 54 | } 55 | 56 | let objectName; 57 | const globalschema: DescribeGlobalSObjectResult[] = (await this.retrieveGlobalSchema()) ?? []; 58 | for (const o of globalschema) { 59 | if (o.keyPrefix === prefix) { 60 | objectName = o.name; 61 | break; 62 | } 63 | } 64 | if (objectName) { 65 | return { result: messages.getMessage('log.result.objectname', [objectName]) }; 66 | } else { 67 | throw messages.createError('error.prefixNotFound'); 68 | } 69 | } else { 70 | throw messages.createError('error.unexpected'); 71 | } 72 | } 73 | 74 | private async retrievesobjectfields(orgname: string, sobject: string): Promise { 75 | this.log(messages.getMessage('log.getFields', [sobject, orgname])); 76 | return this.connection?.describeSObject(sobject); 77 | } 78 | 79 | private async retrieveGlobalSchema(): Promise { 80 | this.log(messages.getMessage('log.globalSchema')); 81 | return (await this.connection?.describeGlobal())?.sobjects; 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/commands/dxb/object/relationships/list.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as TableModule from 'cli-table3'; 4 | const Table = TableModule.default; 5 | import { Messages } from '@salesforce/core'; 6 | 7 | Messages.importMessagesDirectory(__dirname); 8 | const messages = Messages.loadMessages('dxb', 'object.relationships.list'); 9 | 10 | export type ObjectRelationshipsListResult = { 11 | table: string; 12 | }; 13 | 14 | export default class ObjectRelationshipsList extends SfCommand { 15 | public static readonly summary = messages.getMessage('summary'); 16 | 17 | public static readonly examples = messages.getMessages('examples'); 18 | 19 | public static readonly flags = { 20 | 'target-org': Flags.requiredOrg(), 21 | 'object-name': Flags.string({ 22 | char: 's', 23 | summary: messages.getMessage('flags.object-name.summary'), 24 | required: true, 25 | aliases: ['objectname'], 26 | deprecateAliases: true, 27 | }), 28 | filter: Flags.string({ char: 'f', summary: messages.getMessage('flags.filter.summary') }), 29 | }; 30 | 31 | public async run(): Promise { 32 | const { flags } = await this.parse(ObjectRelationshipsList); 33 | const orgname = flags['target-org']?.getUsername(); 34 | const sobject = flags.objectname; 35 | const filter = flags.filter; 36 | 37 | try { 38 | const objectschema = this.retrievesobjectchildrelationship(orgname, sobject); 39 | let relationShips: Array<{ relationshipName: string; childSObject: string }> = 40 | JSON.parse(objectschema).result.childRelationships; 41 | 42 | const tmp = []; 43 | for (const relationShip of relationShips) { 44 | if ( 45 | relationShip.relationshipName && 46 | (!filter || (filter && relationShip.relationshipName.toLowerCase().includes(filter.toLowerCase()))) 47 | ) { 48 | tmp.push(relationShip); 49 | } 50 | } 51 | relationShips = tmp; 52 | 53 | const table = new Table(); 54 | for (let i = 0; i < relationShips.length; i = i + 4) { 55 | table.push([ 56 | relationShips[i] ? relationShips[i].relationshipName + '(' + relationShips[i].childSObject + ')' : '', 57 | relationShips[i + 1] 58 | ? relationShips[i + 1].relationshipName + '(' + relationShips[i + 1].childSObject + ')' 59 | : '', 60 | relationShips[i + 2] 61 | ? relationShips[i + 2].relationshipName + '(' + relationShips[i + 2].childSObject + ')' 62 | : '', 63 | relationShips[i + 3] 64 | ? relationShips[i + 3].relationshipName + '(' + relationShips[i + 3].childSObject + ')' 65 | : '', 66 | ]); 67 | } 68 | return { table: table.toString() }; 69 | } catch (err: any) { 70 | this.error(err); 71 | } 72 | } 73 | 74 | private retrievesobjectchildrelationship(orgname: string | undefined, sobject: string): string { 75 | this.log(messages.getMessage('log.retrieveSchema', [sobject])); 76 | orgname = orgname ? '-u ' + orgname : ''; 77 | return exec(`sfdx force:schema:sobject:describe -s ${sobject} ${orgname} --json`).toString(); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/commands/dxb/object/vr/create.ts: -------------------------------------------------------------------------------- 1 | import { execSync } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as fs from 'fs-extra'; 4 | import { Messages, NamedPackageDir, SfProject } from '@salesforce/core'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'object.vr.create'); 8 | 9 | export type ObjectVrCreateResult = { 10 | success: boolean; 11 | }; 12 | 13 | export default class ObjectVrCreate extends SfCommand { 14 | public static readonly summary = messages.getMessage('summary'); 15 | 16 | public static readonly examples = messages.getMessages('examples'); 17 | 18 | public static readonly flags = { 19 | 'target-org': Flags.requiredOrg(), 20 | name: Flags.string({ char: 'n', summary: messages.getMessage('flags.name.summary'), required: true }), 21 | 'object-name': Flags.string({ 22 | char: 's', 23 | summary: messages.getMessage('flags.object-name.summary'), 24 | required: true, 25 | aliases: ['objectname'], 26 | deprecateAliases: true, 27 | }), 28 | push: Flags.boolean({ char: 'p', summary: messages.getMessage('flags.push.summary'), default: false }), 29 | }; 30 | 31 | private content: string = 32 | '\n' + 33 | '\n' + 34 | ' {{fullname}}\n' + 35 | ' true\n' + 36 | ' {{description}}\n' + 37 | ' {{formula}}\n' + 38 | ' {{errorMessage}}\n' + 39 | ''; 40 | 41 | public async run(): Promise { 42 | const { flags } = await this.parse(ObjectVrCreate); 43 | const orgname = flags['target-org']?.getUsername(); 44 | const sobject = flags['object-name']; 45 | const defaultPackageDir: NamedPackageDir = (await SfProject.resolve()).getDefaultPackage(); 46 | const name = flags.name; 47 | 48 | const vrpath = `${defaultPackageDir.fullPath}/objects/${sobject}/validationRules`; 49 | fs.ensureDirSync(vrpath); 50 | 51 | const apiname = name.replace(new RegExp('[^A-Z0-9]', 'gi'), '_'); 52 | this.content = this.content.replace(new RegExp('{{fullname}}', 'g'), apiname); 53 | 54 | await this.updateContent('description', 'Description: '); 55 | await this.updateContent('errorMessage', 'Error message: '); 56 | await this.updateContent('formula', 'Formula:\n'); 57 | 58 | // update content file 59 | const fullpath = `${vrpath}/${apiname}.validationRule-meta.xml`; 60 | fs.writeFileSync(fullpath, this.content); 61 | this.log(messages.getMessage('log.vrCreated', [fullpath])); 62 | 63 | if (flags.push) { 64 | const isScratch: boolean = (await flags['target-org']?.determineIfScratch()) ?? false; 65 | await this.pushSource(orgname!, isScratch, fullpath); 66 | } 67 | return { success: true }; 68 | } 69 | 70 | private async updateContent(varName: string, question: string): Promise { 71 | const answer = await this.prompt<{ res: string }>({ 72 | type: 'input', 73 | name: 'res', 74 | message: question, 75 | }); 76 | this.content = this.content.replace(new RegExp(`{{${varName}}}`, 'g'), answer.res); 77 | } 78 | 79 | private async pushSource(orgname: string, usesScratchOrg: boolean, path: string): Promise { 80 | this.log('Push source to org...'); 81 | const command = usesScratchOrg 82 | ? `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname}` 83 | : `sf project deploy start --ignore-warnings --ignore-conflicts --target-org ${orgname} --source-directory ${path}`; 84 | try { 85 | return await new Promise((resolve) => resolve(execSync(command).toString())); 86 | } catch (err) { 87 | throw messages.createError('error.pushFailed'); 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/commands/dxb/org/data.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages } from '@salesforce/core'; 4 | 5 | type EnvRule = { 6 | source: string; 7 | target: string; 8 | }; 9 | type EnvFile = { 10 | pagePath: string; 11 | replacerules: EnvRule[]; 12 | }; 13 | type OrgDataResult = { 14 | success: boolean; 15 | }; 16 | Messages.importMessagesDirectory(__dirname); 17 | const messages = Messages.loadMessages('dxb', 'org.data'); 18 | export default class OrgData extends SfCommand { 19 | public static readonly summary = messages.getMessage('summary'); 20 | 21 | public static readonly description = messages.getMessage('description'); 22 | 23 | public static readonly examples = messages.getMessages('examples'); 24 | 25 | public static readonly flags = { 26 | config: Flags.file({ 27 | char: 'f', 28 | summary: messages.getMessage('flags.config.summary'), 29 | required: true, 30 | exists: true, 31 | }), 32 | environment: Flags.string({ char: 'e', summary: messages.getMessage('flags.environment.summary'), required: true }), 33 | }; 34 | 35 | public async run(): Promise { 36 | // flags 37 | const { flags } = await this.parse(OrgData); 38 | const config = flags.config; 39 | const environment = flags.environment; 40 | 41 | const envMapping = JSON.parse(fs.readFileSync(config).toString()); 42 | envMapping[environment].forEach((file: EnvFile) => { 43 | this.log(messages.getMessage('log.processing', [file.pagePath])); 44 | let fileContent = fs.readFileSync(file.pagePath).toString(); 45 | file.replacerules.forEach((rule) => { 46 | fileContent = fileContent.replace(new RegExp(rule.source, 'g'), rule.target); 47 | }); 48 | fs.writeFileSync(file.pagePath, fileContent); 49 | }); 50 | return { success: true }; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/commands/dxb/org/setdefault.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import * as fs from 'fs'; 3 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 4 | import { SfProject, NamedPackageDir, Messages } from '@salesforce/core'; 5 | import { AnyJson, JsonArray, JsonMap, asJsonArray, hasJsonArray, hasJsonMap, hasString } from '@salesforce/ts-types'; 6 | 7 | Messages.importMessagesDirectory(__dirname); 8 | const messages = Messages.loadMessages('dxb', 'org.setdefault'); 9 | 10 | export type OrgSetdefaultResult = { 11 | success: boolean; 12 | }; 13 | 14 | export default class OrgSetdefault extends SfCommand { 15 | public static readonly summary = messages.getMessage('summary'); 16 | 17 | public static readonly examples = messages.getMessages('examples'); 18 | 19 | public static readonly flags = { 20 | 'target-org': Flags.requiredOrg(), 21 | 'base-dir': Flags.string({ 22 | char: 'd', 23 | summary: messages.getMessage('flags.base-dir.summary'), 24 | aliases: ['basedir'], 25 | deprecateAliases: true, 26 | }), 27 | }; 28 | 29 | public async run(): Promise { 30 | const { flags } = await this.parse(OrgSetdefault); 31 | const project = await SfProject.resolve(); 32 | const packageDir: NamedPackageDir = project.getDefaultPackage(); 33 | let config: JsonMap = await project.resolveProjectConfig(); 34 | if (!hasJsonMap(config.plugins, 'dxb')) { 35 | throw messages.createError('error.badConfig'); 36 | } 37 | config = config.plugins.dxb; 38 | 39 | const orgname = flags['target-org']?.getUsername(); 40 | const baseDir = flags['base-dir'] ?? packageDir.fullPath; 41 | this.log(messages.getMessage('log.welcome')); 42 | 43 | asJsonArray(config.orgdefault_config)?.forEach((element: AnyJson) => { 44 | if (hasString(element, 'folder') && hasJsonArray(element, 'rules')) { 45 | const dirpath = path.join(baseDir, element.folder); 46 | if (fs.existsSync(dirpath)) { 47 | this.log(`* Processing ${element.folder} :`); 48 | fs.readdirSync(dirpath).forEach((file) => { 49 | this.log(`> ${file}`); 50 | this.applyRules(element.rules, dirpath + '/' + file, orgname ?? ''); 51 | }); 52 | } 53 | } 54 | }); 55 | return { success: true }; 56 | } 57 | // eslint-disable-next-line class-methods-use-this 58 | private applyRules(rules: JsonArray, dirfile: string, username: string): void { 59 | let content = fs.readFileSync(dirfile).toString(); 60 | rules.forEach((element) => { 61 | if (hasString(element, 'replaceby') && hasString(element, 'regex')) { 62 | let value = element.replaceby; 63 | if (hasString(element, 'mergefield') && element.mergefield === 'username') { 64 | value = value.split('{{mergevalue}}').join(username); 65 | } 66 | content = content.replace(new RegExp(element.regex, 'g'), value); 67 | } 68 | }); 69 | fs.writeFileSync(dirfile, content); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/commands/dxb/package/dependencies/install.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import { Messages, PackageDir, SfProject } from '@salesforce/core'; 4 | 5 | Messages.importMessagesDirectory(__dirname); 6 | const messages = Messages.loadMessages('dxb', 'package.dependencies.install'); 7 | 8 | export type PackageDependenciesInstallResult = { 9 | success: boolean; 10 | }; 11 | 12 | export default class PackageDependenciesInstall extends SfCommand { 13 | public static readonly summary = messages.getMessage('summary'); 14 | 15 | public static readonly examples = messages.getMessages('examples'); 16 | 17 | public static readonly flags = { 18 | 'target-org': Flags.requiredOrg(), 19 | }; 20 | 21 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 22 | public static readonly requiresProject = true; 23 | 24 | protected projectConfig: any; 25 | protected orgName: string | undefined; 26 | protected packageDirectories: PackageDir[] = []; 27 | public async run(): Promise { 28 | const { flags } = await this.parse(PackageDependenciesInstall); 29 | this.orgName = flags['target-org']?.getUsername(); 30 | // project config 31 | this.projectConfig = await (await SfProject.resolve()).resolveProjectConfig(); 32 | this.packageDirectories = this.projectConfig.packageDirectories; 33 | this.installPackages(); 34 | return { success: true }; 35 | } 36 | private installPackages(): void { 37 | this.log(exec(`sf package installed list --target-org ${this.orgName} --json`).toString()); 38 | this.log(messages.getMessage('log.alias', [this.projectConfig.packageAliases])); 39 | this.packageDirectories.forEach((pkg) => { 40 | if (pkg.dependencies) { 41 | pkg.dependencies.forEach((elem) => { 42 | try { 43 | const packageVersion = `${elem.package}@${elem.versionNumber?.replace('.LATEST', '-1')}`; 44 | this.log(messages.getMessage('log.packageVersion', [packageVersion])); 45 | const packageID: string = this.projectConfig.packageAliases[packageVersion]; 46 | this.log( 47 | `sf package install --package "${packageID}" --target-org ${this.orgName} --wait 600 --json --no-prompt` 48 | ); 49 | // console.log('Installing',packageID,installedPackage.find(p => packageID === '')); 50 | const output = JSON.parse( 51 | exec( 52 | `sf package install --package "${packageID}" --target-org ${this.orgName} --wait 600 --json --no-prompt` 53 | ).toString() 54 | ); 55 | if (output?.result && output.result.Status === 'SUCCESS') { 56 | this.log(messages.getMessage('log.installPackage', [packageID])); 57 | } else { 58 | throw messages.createError('error.errorInstall', [packageID]); 59 | } 60 | } catch (err) { 61 | throw messages.createError('error.cannotInstall', [JSON.stringify(err)]); 62 | } 63 | }); 64 | } 65 | }); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/commands/dxb/permissionset/create.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as xml2js from 'xml2js'; 4 | import * as js2xmlparser from 'js2xmlparser'; 5 | import { Messages } from '@salesforce/core'; 6 | 7 | type PermissionSetCreateResult = { 8 | path: string; 9 | }; 10 | Messages.importMessagesDirectory(__dirname); 11 | const messages = Messages.loadMessages('dxb', 'permissionset.create'); 12 | export default class PermissionSetCreate extends SfCommand { 13 | public static readonly summary = messages.getMessage('summary'); 14 | 15 | public static readonly examples = messages.getMessages('examples'); 16 | 17 | public static readonly flags = { 18 | profile: Flags.file({ 19 | char: 'p', 20 | summary: messages.getMessage('flags.profile.summary'), 21 | required: true, 22 | exists: true, 23 | }), 24 | 'permissionset-name': Flags.string({ 25 | char: 'n', 26 | summary: messages.getMessage('flags.permissionset-name.summary'), 27 | required: true, 28 | aliases: ['permissionsetname'], 29 | deprecateAliases: true, 30 | }), 31 | 'output-dir': Flags.directory({ 32 | char: 'r', 33 | summary: messages.getMessage('flags.output-dir.summary'), 34 | default: 'force-app/main/default/permissionsets', 35 | aliases: ['outputdir'], 36 | deprecateAliases: true, 37 | }), 38 | 'has-activation-required': Flags.boolean({ 39 | char: 'a', 40 | summary: messages.getMessage('flags.has-activation-required.summary'), 41 | default: false, 42 | aliases: ['hasactivationrequired'], 43 | deprecateAliases: true, 44 | }), 45 | license: Flags.string({ 46 | char: 'l', 47 | summary: messages.getMessage('flags.license.summary'), 48 | default: 'Salesforce', 49 | }), 50 | description: Flags.string({ 51 | char: 'd', 52 | summary: messages.getMessage('flags.description.summary'), 53 | }), 54 | }; 55 | 56 | public async run(): Promise { 57 | const { flags } = await this.parse(PermissionSetCreate); 58 | try { 59 | const permissionSetPath = `${flags['output-dir']}/${flags['permissionset-name']}.permissionset-meta.xml`; 60 | const data = await fs.readFile(flags.profile, { encoding: 'utf8' }); 61 | const profileName = flags.profile.split('/'); 62 | const result = ( 63 | await xml2js.parseStringPromise(data, { 64 | explicitArray: false, 65 | }) 66 | )?.Profile; 67 | const permissionSet: any = { 68 | label: flags['permissionset-name'], 69 | hasActivationRequired: flags['has-activation-required'], 70 | description: messages.getMessage('log.description', [profileName[profileName.length - 1]]), 71 | }; 72 | if (flags.license) permissionSet.license = flags.license; 73 | if (flags.description) permissionSet.description = flags.description; 74 | if (result.applicationVisibilities) permissionSet.applicationVisibilities = result.applicationVisibilities; 75 | if (result.classAccesses) permissionSet.classAccesses = result.classAccesses; 76 | if (result.customMetadataTypeAccesses) 77 | permissionSet.customMetadataTypeAccesses = result.customMetadataTypeAccesses; 78 | if (result.customPermissions) permissionSet.customPermissions = result.customPermissions; 79 | if (result.customSettingAccesses) permissionSet.customSettingAccesses = result.customSettingAccesses; 80 | if (result.externalDataSourceAccesses) 81 | permissionSet.externalDataSourceAccesses = result.externalDataSourceAccesses; 82 | if (result.flowAccesses) permissionSet.flowAccesses = result.flowAccesses; 83 | if (result.objectPermissions) permissionSet.objectPermissions = result.objectPermissions; 84 | if (result.fieldPermissions) permissionSet.fieldPermissions = result.fieldPermissions; 85 | if (result.recordTypeVisibilities) permissionSet.recordTypeVisibilities = result.recordTypeVisibilities; 86 | if (result.pageAccesses) permissionSet.pageAccesses = result.pageAccesses; 87 | if (result.tabVisibilities) permissionSet.tabVisibilities = result.tabVisibilities; 88 | if (result.userPermissions) permissionSet.userPermissions = result.userPermissions; 89 | 90 | const xml = js2xmlparser.parse('PermissionSet', permissionSet, { 91 | declaration: { 92 | encoding: 'UTF-8', 93 | }, 94 | }); 95 | 96 | await fs.writeFile(permissionSetPath, xml); 97 | this.log(messages.getMessage('log.success', [permissionSetPath])); 98 | return { path: permissionSetPath }; 99 | } catch (err: any) { 100 | this.error(err); 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /src/commands/dxb/profile/passwordpolicies/fix.ts: -------------------------------------------------------------------------------- 1 | import { execSync as exec } from 'child_process'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as fs from 'fs-extra'; 4 | import { Messages } from '@salesforce/core'; 5 | 6 | const TARGET_MAIN_DIR = 'targetOrgPolicies'; 7 | const TARGET_POLICY_DIR = 'profilePasswordPolicies'; 8 | Messages.importMessagesDirectory(__dirname); 9 | const messages = Messages.loadMessages('dxb', 'profile.passwordpolicies.fix'); 10 | 11 | export type ProfilePasswordpoliciesFixResult = { 12 | success: boolean; 13 | }; 14 | 15 | export default class ProfilePasswordpoliciesFix extends SfCommand { 16 | public static readonly summary = messages.getMessage('summary'); 17 | 18 | public static readonly examples = ['$ sfdx dxb:profile:passwordpolicies:fix']; 19 | 20 | public static readonly flags = { 21 | 'target-org': Flags.requiredOrg(), 22 | 'source-path': Flags.directory({ 23 | char: 'd', 24 | summary: messages.getMessage('flags.source-path.summary'), 25 | default: 'force-app/main/default/profilePasswordPolicies', 26 | exists: true, 27 | aliases: ['sourcepath'], 28 | deprecateAliases: true, 29 | }), 30 | }; 31 | 32 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 33 | public static readonly requiresProject = true; 34 | 35 | private orgName: string | undefined; 36 | 37 | public async run(): Promise { 38 | const { flags } = await this.parse(ProfilePasswordpoliciesFix); 39 | const sourcepath = flags['source-path']; 40 | this.orgName = flags['target-org']?.getUsername(); 41 | try { 42 | let sourceFiles: string[] = this.getSourceFiles(sourcepath); 43 | if (sourceFiles.length === 0) { 44 | this.warn(messages.createWarning('warning.noFiles', [sourcepath])); 45 | return { success: false }; 46 | } 47 | 48 | const targetOrgPolicies = this.getTargetFiles(); 49 | if (targetOrgPolicies.length === 0) { 50 | // if there are no profile password policies on target, the dir targetOrgPolicies/profilePasswordPolicies needs to be created manually 51 | fs.ensureDirSync(`${TARGET_MAIN_DIR}/${TARGET_POLICY_DIR}`); 52 | } 53 | 54 | for (const file of sourceFiles) { 55 | // find the profile password policy from the target org that starts with the same profile name as the source file 56 | const targetOrgPolicyToReplace: string = ( 57 | targetOrgPolicies.find((targetOrgPolicy) => 58 | targetOrgPolicy.fullName.startsWith(file.split('profilePasswordPolicy')[0]) 59 | ) ?? { filePath: `${TARGET_MAIN_DIR}/${TARGET_POLICY_DIR}/${file}` } 60 | ).filePath; // if profile policy does not exist in target, ensure it is copied over 61 | 62 | // copy the content of the source file to the target org profile password policy. This will only copy content and not the file name from the target 63 | fs.copyFileSync(`${sourcepath}/${file}`, targetOrgPolicyToReplace); 64 | } 65 | 66 | // remove the source directory and it's content, recreate it after as an empty directory 67 | fs.emptyDirSync(sourcepath); 68 | sourceFiles = fs.readdirSync(`${TARGET_MAIN_DIR}/${TARGET_POLICY_DIR}`); 69 | 70 | // copy every file in the target org dir to the source dir, it will have the file name of the target org's policy but the content of the source org. 71 | for (const file of sourceFiles) { 72 | this.log(`copy ${TARGET_MAIN_DIR}/${TARGET_POLICY_DIR}/${file} to ${sourcepath}/${file}`); 73 | fs.copyFileSync(`${TARGET_MAIN_DIR}/${TARGET_POLICY_DIR}/${file}`, `${sourcepath}/${file}`); 74 | } 75 | return { success: true }; 76 | } catch (e: unknown) { 77 | const err = e as Error; 78 | this.error(err.message); 79 | } 80 | } 81 | 82 | /** 83 | * 84 | * @param sourcepath The path where the source profile password policies are stored 85 | * @returns string[] 86 | */ 87 | // eslint-disable-next-line class-methods-use-this 88 | public getSourceFiles(sourcepath: string): string[] { 89 | // check if sourcepath exists and continue 90 | fs.ensureDirSync(sourcepath); 91 | 92 | // get the file names for the source files from the source directory 93 | return fs.readdirSync(sourcepath); 94 | } 95 | 96 | /** 97 | * 98 | * @param sourcepath The path where the source profile password policies are stored 99 | * @returns { filePath: string, fullName: string }[] 100 | */ 101 | public getTargetFiles(): Array<{ filePath: string; fullName: string }> { 102 | // retrieve all password policies from target org, these have a different timestamp appended to the file name than the source files 103 | fs.ensureDirSync(`${TARGET_MAIN_DIR}/main/default`); 104 | return this.getAllPasswordPolicies(); 105 | } 106 | 107 | /** 108 | * Retrieves Profile Password Policies from Target Org 109 | * 110 | * @returns { filePath: string, fullName: string }[] 111 | */ 112 | public getAllPasswordPolicies(): Array<{ filePath: string; fullName: string }> { 113 | this.log( 114 | `sf project retrieve start --metadata ProfilePasswordPolicy --target-org ${this.orgName} --json --output-dir ${TARGET_MAIN_DIR}` 115 | ); 116 | const metadata: { result: { inboundFiles: Array<{ filePath: string; fullName: string }> } } = JSON.parse( 117 | exec( 118 | `sf project retrieve start --metadata ProfilePasswordPolicy --target-org ${this.orgName} --json --output-dir ${TARGET_MAIN_DIR}` 119 | ).toString() 120 | ); 121 | return metadata.result.inboundFiles; 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /src/commands/dxb/source/scanner.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 3 | import * as fs from 'fs-extra'; 4 | import { Messages } from '@salesforce/core'; 5 | 6 | Messages.importMessagesDirectory(__dirname); 7 | const messages = Messages.loadMessages('dxb', 'source.scanner'); 8 | type Violation = { 9 | category: string; 10 | column: string; 11 | line: string; 12 | message: string; 13 | ruleName: string; 14 | severity: string; 15 | }; 16 | export type SourceScannerResult = { 17 | result: boolean; 18 | }; 19 | 20 | export default class SourceScanner extends SfCommand { 21 | public static readonly summary = messages.getMessage('summary'); 22 | 23 | public static readonly examples = messages.getMessages('examples'); 24 | 25 | public static readonly flags = { 26 | 'target-org': Flags.requiredOrg(), 27 | file: Flags.file({ char: 'f', summary: messages.getMessage('flags.file.summary'), exists: true, required: true }), 28 | 'excluded-files': Flags.string({ 29 | char: 'e', 30 | summary: messages.getMessage('flags.excluded-files.summary'), 31 | aliases: ['excludedfiles'], 32 | deprecateAliases: true, 33 | }), 34 | severity: Flags.integer({ char: 's', summary: messages.getMessage('flags.severity.summary'), default: 1 }), 35 | 'high-severity-rules': Flags.string({ 36 | char: 'r', 37 | summary: messages.getMessage('flags.high-severity-rules.summary'), 38 | multiple: true, 39 | aliases: ['highseverityrules'], 40 | deprecateAliases: true, 41 | }), 42 | }; 43 | 44 | public static readonly requiresProject = true; 45 | 46 | public async run(): Promise { 47 | const { flags } = await this.parse(SourceScanner); 48 | const config: any = await this.project.resolveProjectConfig(); 49 | 50 | const filepath = flags.file; 51 | const excludedFilesPath = flags['excluded-files']; 52 | const highseverityrules = flags['high-severity-rules']; 53 | 54 | const severity = flags.severity; 55 | 56 | this.log(messages.getMessage('log.calculating')); 57 | const results = JSON.parse(fs.readFileSync(filepath).toString()); 58 | const excludedFiles = this.getExcludedFiles(excludedFilesPath); 59 | let throwError = false; 60 | results.forEach((elem: { fileName: string; violations: Violation[] }) => { 61 | let content = ''; 62 | const fileJson: path.ParsedPath = path.parse(elem.fileName); 63 | if (elem.violations && !excludedFiles.includes(fileJson.name)) { 64 | elem.violations.forEach((v) => { 65 | if ( 66 | parseInt(v.severity, 10) <= severity || 67 | (highseverityrules?.includes(v.ruleName) ?? 68 | config?.dxb?.highseverityrules?.highseverityrules.includes(v.ruleName)) 69 | ) { 70 | content += `${fileJson.name}[${v.line} - ${v.column}] - ${v.ruleName}(${v.category}) - Severity(${v.severity}) ${v.message}\n`; 71 | throwError = true; 72 | } 73 | }); 74 | } 75 | if (content !== '') { 76 | this.log(content); 77 | } 78 | }); 79 | if (throwError) { 80 | throw messages.createError('error.violations'); 81 | } 82 | return { result: true }; 83 | } 84 | 85 | // eslint-disable-next-line class-methods-use-this 86 | private getExcludedFiles(excludedFilesPath: string | undefined): string[] { 87 | let excludedFiles: string[] = []; 88 | try { 89 | if (!excludedFilesPath && fs.existsSync(excludedFilesPath!)) { 90 | excludedFiles = JSON.parse(fs.readFileSync(excludedFilesPath!).toString()); 91 | } else { 92 | throw new Error('No files found'); 93 | } 94 | } catch (err) { 95 | this.log(messages.getMessage('log.noExcludedFiles')); 96 | } 97 | return excludedFiles; 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/commands/dxb/user/access/why.ts: -------------------------------------------------------------------------------- 1 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core'; 2 | import { Messages } from '@salesforce/core'; 3 | import { QueryResult, Connection } from 'jsforce'; 4 | import * as TableModule from 'cli-table3'; 5 | const Table = TableModule.default; 6 | import { PermissionSet } from 'jsforce/lib/api/metadata'; 7 | import * as colors from '@colors/colors'; 8 | export type UserFindAccessResult = { 9 | success: boolean; 10 | }; 11 | Messages.importMessagesDirectory(__dirname); 12 | const messages = Messages.loadMessages('dxb', 'user.access.why'); 13 | export default class UserFindAccess extends SfCommand { 14 | public static readonly summary = messages.getMessage('summary'); 15 | 16 | public static readonly examples = messages.getMessages('examples'); 17 | 18 | public static readonly flags = { 19 | 'target-org': Flags.requiredOrg(), 20 | username: Flags.string({ 21 | char: 'i', 22 | required: false, 23 | summary: messages.getMessage('flags.username.summary'), 24 | }), 25 | 'object-name': Flags.string({ 26 | char: 's', 27 | required: true, 28 | summary: messages.getMessage('flags.object-name.summary'), 29 | aliases: ['objectname'], 30 | deprecateAliases: true, 31 | }), 32 | 'field-name': Flags.string({ 33 | char: 'f', 34 | required: false, 35 | summary: messages.getMessage('flags.field-name.summary'), 36 | default: '', 37 | aliases: ['fieldname'], 38 | deprecateAliases: true, 39 | }), 40 | }; 41 | 42 | public async run(): Promise { 43 | const { flags } = await this.parse(UserFindAccess); 44 | this.spinner.start('Scanning org for user access'); 45 | // Uses latest API version 46 | let username: string | undefined = flags.username; 47 | const objectname: string = flags['object-name']; 48 | const fieldname: string = flags['field-name']; 49 | if (!flags['target-org']) { 50 | throw messages.createError('error.connection'); 51 | } 52 | if (!username) { 53 | username = flags['target-org'].getUsername()!; 54 | } 55 | const connection: Connection = flags['target-org']!.getConnection()!; 56 | let fieldpermSql = ''; 57 | if (fieldname) { 58 | fieldpermSql = `,(SELECT ID, Field, Parent.Name, PermissionsEdit, PermissionsRead, SobjectType FROM FieldPerms WHERE SobjectType = '${objectname}' AND Field = '${objectname}.${fieldname}')`; 59 | } else { 60 | fieldpermSql = `,(SELECT SobjectType, Parent.Name, PermissionsViewAllRecords, PermissionsRead, PermissionsModifyAllRecords, PermissionsEdit, PermissionsDelete, PermissionsCreate FROM ObjectPerms WHERE SobjectType = '${objectname}')`; 61 | } 62 | const soql = ` 63 | SELECT Id, Name ${fieldpermSql} 64 | FROM PermissionSet 65 | WHERE Id IN (SELECT PermissionSetId FROM PermissionSetAssignment WHERE Assignee.Username = '${username}')`; 66 | 67 | const result: QueryResult = await connection.query(soql); 68 | 69 | const headers: string[] = [ 70 | 'Permission Set Name', 71 | 'C(reate)', 72 | 'R(ead)', 73 | 'E(dit)', 74 | 'D(elete)', 75 | 'V(iew all)', 76 | 'M(odify all)', 77 | ]; 78 | const headerStyles = [ 79 | colors.green.toString(), 80 | colors.green.toString(), 81 | colors.green.toString(), 82 | colors.green.toString(), 83 | colors.green.toString(), 84 | colors.green.toString(), 85 | colors.green.toString(), 86 | ]; 87 | 88 | const t1 = new Table({ 89 | head: headers, 90 | style: { 91 | head: headerStyles, 92 | border: [colors.blue.toString()], 93 | }, 94 | colWidths: [40, 20, 20, 20, 20, 20, 30], 95 | colAligns: ['left', 'center', 'center', 'center', 'center', 'center', 'center'], 96 | truncate: '...', 97 | }); 98 | result.records.forEach((elem: PermissionSet) => { 99 | if (elem.objectPermissions && elem.objectPermissions !== null && elem.objectPermissions.length >= 1) { 100 | t1.push([ 101 | elem.fullName, 102 | elem.objectPermissions[0].allowCreate ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 103 | elem.objectPermissions[0].allowRead ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 104 | elem.objectPermissions[0].allowEdit ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 105 | elem.objectPermissions[0].allowDelete ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 106 | elem.objectPermissions[0].viewAllRecords ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 107 | elem.objectPermissions[0].modifyAllRecords ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 108 | ]); 109 | } 110 | if (elem.fieldPermissions && elem.fieldPermissions !== null && elem.fieldPermissions.length >= 1) { 111 | t1.push([ 112 | elem.fullName, 113 | '', 114 | elem.fieldPermissions[0].readable ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 115 | elem.fieldPermissions[0].editable ? colors.black.bgGreen('V') : colors.white.bgRed('X'), 116 | '', 117 | '', 118 | '', 119 | ]); 120 | } 121 | }); 122 | this.spinner.stop(messages.getMessage('spinner.stop.done')); 123 | this.log(messages.getMessage('log.why', [username, objectname, fieldname])); 124 | this.log(t1.toString()); 125 | return { success: true }; 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export = {}; 2 | -------------------------------------------------------------------------------- /src/utils/deloitteforce_logo.txt: -------------------------------------------------------------------------------- 1 | ___ _ _ _ _ ___ 2 | / \___| | ___ (_) |_| |_ ___ / __\__ _ __ ___ ___ 3 | / /\ / _ \ |/ _ \| | __| __/ _ \/ _\/ _ \| '__/ __/ _ \ 4 | / /_// __/ | (_) | | |_| || __/ / | (_) | | | (_| __/ 5 | /___,' \___|_|\___/|_|\__|\__\___\/ \___/|_| \___\___| -------------------------------------------------------------------------------- /src/utils/documentinfo.json: -------------------------------------------------------------------------------- 1 | { 2 | "pdfOption": { 3 | "format": "A3", 4 | "orientation": "portrait", 5 | "border": "10mm", 6 | "header": { 7 | "height": "45mm", 8 | "contents": "" 9 | }, 10 | "footer": { 11 | "height": "28mm", 12 | "contents": { 13 | "default": "
DXB Solution Design
{{page}}/{{pages}}
Deloitte Confidential
" 14 | } 15 | } 16 | }, 17 | "metadata": { 18 | "stdobjects": ["Account", "Contact", "AccountContactRelation", "Opportunity", "Asset", "Event", "Task"], 19 | "diagrams": { 20 | "standard": { 21 | "title": "Sales Cloud Data Model", 22 | "path": "src/utils/sales-cloud-overview-data-model.png" 23 | }, 24 | "custom": { 25 | "title": "Custom Data Model", 26 | "path": "src/utils/service-cloud-support-overview-data-model.png" 27 | } 28 | } 29 | }, 30 | "documentInfo": { 31 | "title": "ABC Technical Design", 32 | "subtitle": "By David Browaeys", 33 | "documentOwner": "David Browaeys", 34 | "createdDate": "21/03/2023" 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/utils/sales-cloud-overview-data-model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/davidbrowaeys/DXB/4eefeab7e2c6c01e755c1f3833a25a9bc4b4d672/src/utils/sales-cloud-overview-data-model.png -------------------------------------------------------------------------------- /src/utils/schema-manual-steps.json: -------------------------------------------------------------------------------- 1 | { 2 | "deferSharingSettings": { 3 | "path": "/lightning/setup/DeferSharingCalculations/home", 4 | "options": { 5 | "suspend": { 6 | "type": "button", 7 | "name": "rule_suspend" 8 | }, 9 | "resume": { 10 | "type": "button", 11 | "name": "rule_resume" 12 | }, 13 | "recalculate": { 14 | "type": "button", 15 | "name": "rule_recalc" 16 | } 17 | }, 18 | "applyBtn": null 19 | }, 20 | "experienceBundleSettings": { 21 | "path": "/lightning/setup/NetworkSettings/home", 22 | "options": { 23 | "enableExperienceBundleMetadata": { 24 | "type": "checkbox", 25 | "name": "pEnableExperienceBundleMetadata" 26 | } 27 | }, 28 | "applyBtn": "save" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/utils/service-cloud-support-overview-data-model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/davidbrowaeys/DXB/4eefeab7e2c6c01e755c1f3833a25a9bc4b4d672/src/utils/service-cloud-support-overview-data-model.png -------------------------------------------------------------------------------- /src/utils/templates/apex/ApexClass.cls: -------------------------------------------------------------------------------- 1 | public with sharing class {{api_name}} { 2 | public {{api_name}}() { 3 | 4 | } 5 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/ApexClassNoConstructor.cls: -------------------------------------------------------------------------------- 1 | public class {{api_name}} { 2 | 3 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/BDDUnitTestApexClass.cls: -------------------------------------------------------------------------------- 1 | @isTest 2 | private class {{api_name}} 3 | { 4 | @isTest 5 | static void itShould() 6 | { 7 | // Given 8 | 9 | 10 | // When 11 | 12 | 13 | // Then 14 | 15 | } 16 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/BatchApexClass.cls: -------------------------------------------------------------------------------- 1 | global class {{api_name}} implements Database.Batchable { 2 | 3 | String query; 4 | 5 | global {{api_name}}() { 6 | 7 | } 8 | 9 | global Database.QueryLocator start(Database.BatchableContext BC) { 10 | return Database.getQueryLocator(query); 11 | } 12 | 13 | global void execute(Database.BatchableContext BC, List scope) { 14 | 15 | } 16 | 17 | global void finish(Database.BatchableContext BC) { 18 | 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/ControllerExtension.cls: -------------------------------------------------------------------------------- 1 | public with sharing class {{api_name}} { 2 | 3 | private final sObject mysObject; 4 | 5 | // The extension constructor initializes the private member 6 | // variable mysObject by using the getRecord method from the standard 7 | // controller. 8 | public {{api_name}}(ApexPages.StandardController stdController) { 9 | this.mysObject = (sObject)stdController.getRecord(); 10 | } 11 | 12 | public String getRecordName() { 13 | return 'Hello ' + (String)mysObject.get('name') + ' (' + (Id)mysObject.get('Id') + ')'; 14 | } 15 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/ExceptionApexClass.cls: -------------------------------------------------------------------------------- 1 | public class {{api_name}} extends Exception { 2 | 3 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/HttpCalloutMock.cls: -------------------------------------------------------------------------------- 1 | @isTest 2 | public class {{api_name}} implements HttpCalloutMock { 3 | 4 | protected Integer code; 5 | protected String status; 6 | protected String body; 7 | protected Map responseHeaders; 8 | 9 | public {{api_name}}(Integer code, String status, String body, Map responseHeaders) { 10 | this.code = code; 11 | this.status = status; 12 | this.body = body; 13 | this.responseHeaders = responseHeaders; 14 | } 15 | 16 | public HTTPResponse respond(HTTPRequest req) { 17 | //System.assertEquals('http://api.salesforce.com/foo/bar', req.getEndpoint()); 18 | //System.assertEquals('GET', req.getMethod()); 19 | 20 | // Create a fake response 21 | HttpResponse res = new HttpResponse(); 22 | res.setHeader('Content-Type', 'application/json'); 23 | res.setBody('{"foo":"bar"}'); 24 | res.setStatusCode(200); 25 | return res; 26 | } 27 | 28 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/SchedulableApexClass.cls: -------------------------------------------------------------------------------- 1 | global class {{api_name}} implements Schedulable { 2 | global void execute(SchedulableContext sc) { 3 | //MyBatchClass b = new MyBatchClass(); 4 | //database.executebatch(b); 5 | } 6 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/SelectorClass.cls: -------------------------------------------------------------------------------- 1 | /** 2 | * See https://github.com/financialforcedev/fflib-apex-common for more info 3 | * 4 | * Install library via 5 | * https://githubsfdeploy.herokuapp.com/app/githubdeploy/financialforcedev/fflib-apex-common 6 | */ 7 | 8 | /** 9 | * Class encapsulates query logic for {{ object_name }} 10 | * 11 | * https://developer.salesforce.com/page/Apex_Enterprise_Patterns_-_Selector_Layer 12 | **/ 13 | public class {{ api_name }} extends fflib_SObjectSelector 14 | { 15 | public List getSObjectFieldList() 16 | { 17 | return new List { 18 | {{ object_name }}.Id 19 | }; 20 | } 21 | 22 | public Schema.SObjectType getSObjectType() 23 | { 24 | return {{ object_name }}.sObjectType; 25 | } 26 | 27 | public List<{{ object_name }}> selectById(Set idSet) 28 | { 29 | return (List<{{ object_name }}>) selectSObjectsById(idSet); 30 | } 31 | 32 | /* 33 | * For more examples see https://github.com/financialforcedev/fflib-apex-common-samplecode 34 | * 35 | public List<{{ object_name }}> selectBySomethingElse(List somethings) 36 | { 37 | assertIsAccessible(); 38 | return (List<{{ object_name }}>) Database.query( 39 | String.format( 40 | 'select {0}, ' + 41 | 'from {1} ' + 42 | 'where Something__c in :somethings ' + 43 | 'order by {2}', 44 | new List { 45 | getFieldListString(), 46 | getSObjectName(), 47 | getOrderBy() } ) ); 48 | } 49 | */ 50 | } 51 | -------------------------------------------------------------------------------- /src/utils/templates/apex/ServiceClass.cls: -------------------------------------------------------------------------------- 1 | /** 2 | * See https://github.com/financialforcedev/fflib-apex-common for more info 3 | * 4 | * Install library via 5 | * https://githubsfdeploy.herokuapp.com/app/githubdeploy/financialforcedev/fflib-apex-common 6 | */ 7 | 8 | /** 9 | * Encapsulates all service layer logic for a given function or module in the application 10 | * 11 | * For more guidelines and details see 12 | * https://developer.salesforce.com/page/Apex_Enterprise_Patterns_-_Service_Layer 13 | * 14 | **/ 15 | public class {{ api_name }} 16 | { 17 | public static void doSomething() 18 | { 19 | 20 | } 21 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/UnitTestApexClass.cls: -------------------------------------------------------------------------------- 1 | @isTest 2 | private class {{api_name}} { 3 | 4 | @testSetup static void setup(){ 5 | //Implement setup method 6 | } 7 | @isTest static void test_method_one() { 8 | // Implement test code 9 | } 10 | 11 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/UrlRewriterApexClass.cls: -------------------------------------------------------------------------------- 1 | global class {{api_name}} implements Site.UrlRewriter { 2 | 3 | global PageReference mapRequestUrl(PageReference yourFriendlyUrl) { 4 | 5 | return null; 6 | } 7 | 8 | global List generateUrlFor(List yourSalesforceUrls) { 9 | 10 | return null; 11 | } 12 | 13 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/WebServiceMock.cls: -------------------------------------------------------------------------------- 1 | @isTest 2 | public class {{api_name}} implements WebServiceMock { 3 | public void doInvoke( 4 | Object stub, 5 | Object request, 6 | Map response, 7 | String endpoint, 8 | String soapAction, 9 | String requestName, 10 | String responseNS, 11 | String responseName, 12 | String responseType) { 13 | 14 | // Create response element from the autogenerated class. 15 | // Populate response element. 16 | // Add response element to the response parameter, as follows: 17 | //response.put('response_x', responseElement); 18 | } 19 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/WebserviceClass.cls: -------------------------------------------------------------------------------- 1 | global class {{api_name}} { 2 | 3 | webservice String parameter { get; set; } 4 | 5 | public {{api_name}}() { 6 | this.parameter = 'value'; 7 | } 8 | 9 | webservice static String serviceName(){ 10 | return ''; 11 | } 12 | } -------------------------------------------------------------------------------- /src/utils/templates/apex/apex.cls-meta.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{api_version}} 4 | Active 5 | -------------------------------------------------------------------------------- /src/utils/templates/apex/def.json: -------------------------------------------------------------------------------- 1 | { 2 | "Constructor": { 3 | "label": "Constructor", 4 | "vars": ["api_name", "api_version"], 5 | "files": [ 6 | ["ApexClass.cls", "cls"], 7 | ["apex.cls-meta.xml", "cls-meta.xml"] 8 | ], 9 | "bundle": false 10 | }, 11 | "NoConstructor": { 12 | "label": "NoConstructor", 13 | "vars": ["api_name", "api_version"], 14 | "files": [ 15 | ["ApexClassNoConstructor.cls", "cls"], 16 | ["apex.cls-meta.xml", "cls-meta.xml"] 17 | ], 18 | "bundle": false 19 | }, 20 | "Batch": { 21 | "label": "BatchApexClass", 22 | "vars": ["api_name", "api_version"], 23 | "files": [ 24 | ["BatchApexClass.cls", "cls"], 25 | ["apex.cls-meta.xml", "cls-meta.xml"] 26 | ], 27 | "bundle": false 28 | }, 29 | "Schedulable": { 30 | "label": "SchedulableApexClass", 31 | "vars": ["api_name", "api_version"], 32 | "files": [ 33 | ["SchedulableApexClass.cls", "cls"], 34 | ["apex.cls-meta.xml", "cls-meta.xml"] 35 | ], 36 | "bundle": false 37 | }, 38 | "ServiceClass": { 39 | "label": "ServiceClass", 40 | "vars": ["api_name", "api_version"], 41 | "files": [ 42 | ["BatchApexClass.cls", "cls"], 43 | ["apex.cls-meta.xml", "cls-meta.xml"] 44 | ], 45 | "bundle": false 46 | }, 47 | "Exception": { 48 | "label": "ExceptionApexClass", 49 | "vars": ["api_name", "api_version"], 50 | "files": [ 51 | ["ExceptionApexClass.cls", "cls"], 52 | ["apex.cls-meta.xml", "cls-meta.xml"] 53 | ], 54 | "bundle": false 55 | }, 56 | "TestClass": { 57 | "label": "UnitTestApexClass", 58 | "vars": ["api_name", "api_version"], 59 | "files": [ 60 | ["UnitTestApexClass.cls", "cls"], 61 | ["apex.cls-meta.xml", "cls-meta.xml"] 62 | ], 63 | "bundle": false 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/utils/templates/trigger/apex.cls: -------------------------------------------------------------------------------- 1 | public with sharing class {{className}} extends SObjectDomain{ 2 | public {{className}}() { 3 | super(); 4 | } 5 | public override void onBeforeInsert(){ 6 | 7 | } 8 | public override void onBeforeUpdate(Map oldMap) { 9 | 10 | } 11 | } -------------------------------------------------------------------------------- /src/utils/templates/trigger/apex.cls-meta.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | {{apiVersion}} 4 | Active 5 | -------------------------------------------------------------------------------- /src/utils/templates/trigger/def.json: -------------------------------------------------------------------------------- 1 | { 2 | "vars": ["className", "triggerName", "apiVersion", "sobject"], 3 | "files": [ 4 | ["apex.cls", "cls"], 5 | ["apex.cls-meta.xml", "cls-meta.xml"], 6 | ["trigger.trigger", "trigger"], 7 | ["trigger.trigger-meta.xml", "trigger-meta.xml"] 8 | ], 9 | "bundle": false 10 | } 11 | -------------------------------------------------------------------------------- /src/utils/templates/trigger/trigger.trigger: -------------------------------------------------------------------------------- 1 | trigger {{triggerName}} on {{sobject}} (before insert, before update) { 2 | new {{className}}().execute(); 3 | } -------------------------------------------------------------------------------- /src/utils/templates/trigger/trigger.trigger-meta.xml: -------------------------------------------------------------------------------- 1 | 2 | {{apiVersion}} 3 | Active 4 | -------------------------------------------------------------------------------- /src/utils/utils.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | import * as xml2js from 'xml2js'; 3 | 4 | /** 5 | * Processes a manifest file and retrieves the component members 6 | * 7 | * @param manifestPath Path to a manifest file 8 | * @param componentName Name of the component defined in the package.xml in the tag 9 | * @returns The members of that component as mentioned under the tags 10 | */ 11 | export async function getComponentsFromManifest(manifestPath: string, componentName: string): Promise { 12 | const data = fs.readFileSync(manifestPath); 13 | const parser = new xml2js.Parser({ explicitArray: false }); 14 | const result = await parser.parseStringPromise(data); 15 | if (result.Package.types) { 16 | const metadataTypes: any[] = Array.isArray(result.Package.types) ? result.Package.types : [result.Package.types]; 17 | return metadataTypes 18 | .filter((elem) => elem.name === componentName) 19 | .map((elem: { name: string; members: string[] }) => elem.members) 20 | .flat(); 21 | } else { 22 | return []; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: '../.eslintrc.js', 3 | // Allow describe and it 4 | env: { mocha: true }, 5 | rules: { 6 | // Allow assert style expressions. i.e. expect(true).to.be.true 7 | 'no-unused-expressions': 'off', 8 | 9 | // It is common for tests to stub out method. 10 | 11 | // Return types are defined by the source code. Allows for quick overwrites. 12 | '@typescript-eslint/explicit-function-return-type': 'off', 13 | // Mocked out the methods that shouldn't do anything in the tests. 14 | '@typescript-eslint/no-empty-function': 'off', 15 | // Easily return a promise in a mocked method. 16 | '@typescript-eslint/require-await': 'off', 17 | }, 18 | }; 19 | -------------------------------------------------------------------------------- /test/commands/apex/trigger/create.nut.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | import { expect } from 'chai'; 3 | import * as fs from 'fs-extra'; 4 | import { TestSession, execCmd } from '@salesforce/cli-plugins-testkit'; 5 | import { ApexTriggerCreateResult } from '../../../../src/commands/dxb/apex/trigger/create'; 6 | 7 | /* 8 | * Copyright (c) 2023, salesforce.com, inc. 9 | * All rights reserved. 10 | * Licensed under the BSD 3-Clause license. 11 | * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause 12 | */ 13 | 14 | describe('apex trigger create', () => { 15 | let testSession: TestSession; 16 | 17 | before(async () => { 18 | testSession = await TestSession.create({ 19 | project: { 20 | name: 'MyTestProject', 21 | apiVersion: '56.0', 22 | }, 23 | }); 24 | }); 25 | 26 | it('should create a trigger with sobject = Account and no source-api-version', async () => { 27 | const result = ( 28 | await execCmd('dxb apex trigger create --sobject Account --json', { 29 | ensureExitCode: 0, 30 | async: true, 31 | }) 32 | ).jsonOutput?.result; 33 | expect(result?.success).to.equal(true); 34 | const f = fs.readFileSync('force-app/main/default/triggers/AccountTrigger.trigger-meta.xml').toString(); 35 | expect(f).to.include('56.0'); 36 | }); 37 | 38 | it('runs apex trigger create with sobject = Account and source-api-version = 58', async () => { 39 | const result = ( 40 | await execCmd( 41 | 'dxb apex trigger create --sobject Account --source-api-version 58 --json', 42 | { 43 | ensureExitCode: 0, 44 | async: true, 45 | } 46 | ) 47 | ).jsonOutput?.result; 48 | 49 | expect(result?.success).to.equal(true); 50 | const f = fs.readFileSync('force-app/main/default/triggers/AccountTrigger.trigger-meta.xml').toString(); 51 | expect(f).to.include('58.0'); 52 | }); 53 | 54 | after(async () => { 55 | await testSession?.clean(); 56 | }); 57 | }); 58 | -------------------------------------------------------------------------------- /test/commands/api/align.nut.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; 3 | import { expect } from 'chai'; 4 | import * as fs from 'fs-extra'; 5 | import * as shelljs from 'shelljs'; 6 | import { ApiAlignResult } from '../../../src/commands/dxb/api/align'; 7 | 8 | describe('api align NUTs', () => { 9 | let session: TestSession; 10 | 11 | beforeEach(async () => { 12 | session = await TestSession.create({ 13 | project: { 14 | gitClone: 'https://github.com/trailheadapps/dreamhouse-lwc.git', 15 | }, 16 | }); 17 | const projectConfig = JSON.parse(fs.readFileSync('sfdx-project.json').toString()); 18 | projectConfig.sourceApiVersion = '57.0'; 19 | fs.writeFileSync('sfdx-project.json', JSON.stringify(projectConfig), { encoding: 'utf8' }); 20 | execCmd('dxb install', { ensureExitCode: 0 }); 21 | shelljs.rm('-rf', 'force-app/main/default/lwc'); 22 | }); 23 | 24 | afterEach(async () => { 25 | await session?.clean(); 26 | }); 27 | 28 | it('should align api versions of all metadata files and package directories', async () => { 29 | const command = 'dxb api align --json'; 30 | const output = await execCmd(command, { async: true }); 31 | expect(output.jsonOutput?.result.success).to.equal(true); 32 | fs.readdirSync('force-app/main/default/classes') 33 | .filter((f) => f.endsWith('meta.xml')) 34 | .forEach((f) => { 35 | const metadata = fs.readFileSync(path.join('force-app/main/default/classes', f)).toString(); 36 | expect(metadata?.includes('57.0')).to.equal(true); 37 | }); 38 | }); 39 | 40 | it('should exclude certain metadata for alignment', async () => { 41 | const command = 'dxb api align --metadata-type ApexClass --json'; 42 | const output = await execCmd(command, { async: true }); 43 | expect(output.jsonOutput?.result.success).to.equal(true); 44 | fs.readdirSync('force-app/main/default/classes') 45 | .filter((f) => f.endsWith('meta.xml')) 46 | .forEach((f) => { 47 | const metadata = fs.readFileSync(path.join('force-app/main/default/classes', f)).toString(); 48 | expect(metadata?.includes('57.0')).to.equal(true); 49 | }); 50 | 51 | fs.readdirSync('force-app/main/default/flows') 52 | .filter((f) => f.endsWith('meta.xml')) 53 | .forEach((f) => { 54 | const metadata = fs.readFileSync(path.join('force-app/main/default/flows', f)).toString(); 55 | expect(metadata?.includes('57.0')).to.equal(false); 56 | }); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /test/commands/install.nut.ts: -------------------------------------------------------------------------------- 1 | import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; 2 | import { expect } from 'chai'; 3 | import * as fs from 'fs-extra'; 4 | import { ProjectSetupResult } from '../../src/commands/dxb/install'; 5 | 6 | describe('install NUTs', () => { 7 | let session: TestSession; 8 | 9 | before(async () => { 10 | session = await TestSession.create({ devhubAuthStrategy: 'NONE', project: {} }); 11 | }); 12 | 13 | after(async () => { 14 | await session?.clean(); 15 | }); 16 | 17 | it('should update the sfdx-project.json file with --json', async () => { 18 | const command = 'dxb install --json'; 19 | const result = execCmd(command, { ensureExitCode: 0 }); 20 | const output: ProjectSetupResult = result.jsonOutput?.result ?? { defaultdurationdays: -1 }; 21 | const config = JSON.parse(fs.readFileSync('sfdx-project.json').toString()); 22 | expect(config).to.have.property('plugins'); 23 | expect(config.plugins).to.have.property('dxb'); 24 | expect(Object.keys(config.plugins.dxb).toString()).to.equal(Object.keys(output).toString()); 25 | }); 26 | it('should update the sfdx-project.json file without --json', async () => { 27 | const command = 'dxb install'; 28 | const result = execCmd(command, { ensureExitCode: 0 }); 29 | const consoleOutput = result.shellOutput; 30 | expect(consoleOutput).to.contain('Welcome to DXB CLI! Happy coding!'); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/commands/static/create.nut.ts: -------------------------------------------------------------------------------- 1 | import { TestSession } from '@salesforce/cli-plugins-testkit'; 2 | 3 | describe('static create NUTs', () => { 4 | let session: TestSession; 5 | 6 | beforeEach(async () => { 7 | session = await TestSession.create({ 8 | project: { 9 | gitClone: 'https://github.com/trailheadapps/dreamhouse-lwc.git', 10 | }, 11 | }); 12 | }); 13 | 14 | afterEach(async () => { 15 | await session?.clean(); 16 | }); 17 | 18 | it('should create a static resource', async () => { 19 | // const name = 'TestStatic'; 20 | // const fileName = 'teststatic.png'; 21 | // const command = `static create --name ${name} --file ${fileName} --target-dir src/staticresources`; 22 | // const output = await execInteractiveCmd( 23 | // command, 24 | // { 25 | // 'Description: ': ['teststatic', Interaction.ENTER], 26 | // 'Content Type: ': ['image/png', Interaction.ENTER], 27 | // 'Cache Control(Public|Private): ': ['Private', Interaction.ENTER], 28 | // }, 29 | // { ensureExitCode: 0 } 30 | // ); 31 | // console.log(output); 32 | // const createdFiles = fs.readdirSync('src/staticresources'); 33 | // expect(createdFiles.length).to.equal(2); 34 | // expect(createdFiles.toString()).to.contain('teststatic.png').and.to.contain('teststatic.resource-meta.xml'); 35 | // const metadatafile = fs.readFileSync('teststatic.resource-meta.xml').toString(); 36 | // expect(metadatafile.includes('Private')).to.equal(true); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@salesforce/dev-config/tsconfig-test-strict", 3 | "include": ["./**/*.ts"], 4 | "compilerOptions": { 5 | "skipLibCheck": true 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@salesforce/dev-config/tsconfig-strict", 3 | "compilerOptions": { 4 | "outDir": "lib", 5 | "rootDir": "src", 6 | "declaration": false, 7 | "skipLibCheck": true, 8 | "esModuleInterop": true 9 | }, 10 | "include": ["./src/**/*.ts", "/src/utils/*"] 11 | } 12 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@salesforce/dev-config/tslint" 3 | } 4 | --------------------------------------------------------------------------------