├── plugins ├── style.css └── index.js ├── .npmignore ├── src ├── index.ts ├── impl │ ├── project │ │ ├── metadata │ │ │ ├── diffgenerators │ │ │ │ ├── export.ts │ │ │ │ └── diffGenerator.ts │ │ │ └── DataModelSourceDiffImpl.ts │ │ └── diff │ │ │ └── permsetDiff.ts │ ├── source │ │ └── profiles │ │ │ ├── worker.js │ │ │ ├── reconcileWorker.ts │ │ │ └── profileSync.ts │ ├── parser │ │ ├── listeners │ │ │ └── ApexTypeListener.ts │ │ └── ApexTypeFetcher.ts │ ├── user │ │ └── passwordgenerateimpl.ts │ ├── pool │ │ └── scratchorg │ │ │ ├── poolListImpl.ts │ │ │ └── PoolDeleteImpl.ts │ ├── package │ │ └── version │ │ │ ├── packageInfo.ts │ │ │ └── packageVersionCoverage.ts │ └── metadata │ │ └── writer │ │ └── profileWriter.ts ├── utils │ ├── delay.ts │ ├── getSafe.ts │ ├── chunkArray.ts │ ├── extract.ts │ ├── zipDirectory.ts │ ├── getDefaults.ts │ ├── xmlUtil.ts │ ├── retrieveMetadata.ts │ ├── get18DigitSalesforceId.ts │ ├── checkDeploymentStatus.ts │ ├── checkRetrievalStatus.ts │ ├── getUserDetails.ts │ ├── outputGenerator.ts │ ├── dxProjectManifestUtils.ts │ ├── getPackageInfo.ts │ ├── searchFilesInDirectory.ts │ ├── sqlitekv.ts │ ├── metadataOperation.ts │ ├── queryExecutor.ts │ └── packageUtils.ts ├── ui │ └── progressBar.ts ├── commands │ └── sfpowerkit │ │ ├── user │ │ └── password │ │ │ └── generate.ts │ │ ├── org │ │ ├── orgwideemail │ │ │ ├── verify.ts │ │ │ └── create.ts │ │ ├── cleartestresult.ts │ │ ├── scratchorg │ │ │ ├── usage.ts │ │ │ └── delete.ts │ │ ├── relaxiprange.ts │ │ ├── profile │ │ │ └── diff.ts │ │ ├── sandbox │ │ │ ├── info.ts │ │ │ └── refresh.ts │ │ └── connectedapp │ │ │ └── retrieve.ts │ │ ├── pool │ │ ├── create.ts │ │ ├── delete.ts │ │ └── fetch.ts │ │ ├── package │ │ └── version │ │ │ ├── info.ts │ │ │ └── codecoverage.ts │ │ ├── source │ │ ├── apextest │ │ │ └── list.ts │ │ ├── apextestsuite │ │ │ └── convert.ts │ │ └── customlabel │ │ │ └── reconcile.ts │ │ ├── auth │ │ └── login.ts │ │ └── project │ │ └── orgdiff.ts ├── sfpowerkitCommand.ts └── sfpowerkit.ts ├── .gitattributes ├── bin ├── run.cmd └── run ├── resources ├── default-config.json └── so_pool_config.schema.json ├── .vscode ├── extensions.json ├── settings.json └── launch.json ├── messages ├── install.json ├── org_healthcheck.json ├── sandbox.json ├── scratchorg_usage.json ├── duplicaterule_activate.json ├── duplicaterule_deactivate.json ├── trigger_activate.json ├── trigger_deactivate.json ├── matchingrule_activate.json ├── matchingrule_deactivate.json ├── sandbox_info.json ├── package_info.json ├── orgwideemail_verify.json ├── package_applypatch.json ├── org_coverage.json ├── valid.json ├── orgwideemail_create.json ├── scratchorg_pool_create.json ├── source_apextest_list.json ├── connectedapp_retrieve.json ├── project_manifest_merge.json ├── auth_login.json ├── source_picklist_generatepatch.json ├── scratchorg_delete.json ├── org_relaxiprange.json ├── scratchorg_poolhydrate.json ├── org_destruct.json ├── apextestsuite_convert.json ├── scratchorg_poollist.json ├── source_customlabel_clean.json ├── project_datamodel_diff.json ├── connectedapp_create.json ├── project_manifest_diff.json ├── azpipelines_updatevariable.json ├── org_profile_diff.json ├── source_customlabel_buildmanifest.json ├── dependency_tree_package.json ├── package_codecoverage.json ├── scratchorg_poolFetch.json ├── project_orgdiff.json ├── sandbox_refresh.json ├── sandbox_create.json ├── dependency_versionlist.json ├── profile_merge.json ├── profile_retrieve.json ├── package_build.json ├── source_customlabel_create.json ├── profile_reconcile.json ├── project_diff.json └── source_pmd.json ├── docs ├── sfpowerkit-logo.png └── ScratchOrgInfoSharingRule.JPG ├── .eslintignore ├── .prettierignore ├── .prettierrc.json ├── .editorconfig ├── .gitignore ├── tslint.json ├── .github ├── pull_request_template.md ├── workflows │ ├── cherry-picker.yml │ ├── release-notes-generator.yml │ ├── buildPackage.yml │ ├── promotePackage.yml │ ├── release.yml │ └── review-smoke-test.yml └── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md ├── .eslintrc.js ├── schemas └── pool │ ├── examples │ ├── so_pool_config_tag_mode.json │ └── so-pool-config_user_mode.json │ └── scratchorg-poolconfig.json ├── decision records └── template.md ├── tsconfig.json ├── LICENSE ├── contributing.md └── Third Party Notices.md /plugins/style.css: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export default {}; 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.* linguist-language=TypeScript -------------------------------------------------------------------------------- /bin/run.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | node "%~dp0\run" %* 4 | -------------------------------------------------------------------------------- /resources/default-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiversion": "50.0" 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["aaron-bond.better-comments"] 3 | } 4 | -------------------------------------------------------------------------------- /messages/install.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Install dependencies of a package" 3 | } 4 | -------------------------------------------------------------------------------- /docs/sfpowerkit-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dxatscale/sfpowerkit/HEAD/docs/sfpowerkit-logo.png -------------------------------------------------------------------------------- /messages/org_healthcheck.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Gets the health details of an org" 3 | } 4 | -------------------------------------------------------------------------------- /bin/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require('@oclif/command').run() 4 | .catch(require('@oclif/errors/handle')) 5 | -------------------------------------------------------------------------------- /src/impl/project/metadata/diffgenerators/export.ts: -------------------------------------------------------------------------------- 1 | export { SourceDiffGenerator } from './sourceDiffGenerator'; 2 | -------------------------------------------------------------------------------- /messages/sandbox.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Command to Create, Refresh and get status of a Sandbox" 3 | } 4 | -------------------------------------------------------------------------------- /docs/ScratchOrgInfoSharingRule.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dxatscale/sfpowerkit/HEAD/docs/ScratchOrgInfoSharingRule.JPG -------------------------------------------------------------------------------- /messages/scratchorg_usage.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Gets the active count of scratch org by users in a devhub" 3 | } 4 | -------------------------------------------------------------------------------- /src/utils/delay.ts: -------------------------------------------------------------------------------- 1 | export async function delay(ms: number) { 2 | return new Promise((resolve) => setTimeout(resolve, ms)); 3 | } 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | # don't ever lint node_modules 2 | node_modules 3 | # don't lint build output (make sure it's set to your correct build folder name) 4 | dist 5 | 6 | .eslintrc.js -------------------------------------------------------------------------------- /messages/duplicaterule_activate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Activates a duplicate rule in the target org", 3 | "nameFlagDescription": "Name of the duplicate rule" 4 | } 5 | -------------------------------------------------------------------------------- /messages/duplicaterule_deactivate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Deactivates a duplicate rule in the target org", 3 | "nameFlagDescription": "Name of the duplicate rule" 4 | } 5 | -------------------------------------------------------------------------------- /messages/trigger_activate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Activates a trigger in the target org", 3 | "nameFlagDescription": "Name of the trigger that has to be activated" 4 | } 5 | -------------------------------------------------------------------------------- /messages/trigger_deactivate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Deactivates a trigger in the target org", 3 | "nameFlagDescription": "Name of the trigger that has to be deactivated" 4 | } 5 | -------------------------------------------------------------------------------- /messages/matchingrule_activate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Activates a matching rule in the target org", 3 | "nameFlagDescription": "Name of the object that has the matching rule" 4 | } 5 | -------------------------------------------------------------------------------- /messages/matchingrule_deactivate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Deactivates a matching rule in the target org", 3 | "nameFlagDescription": "Name of the object that has the matching rule" 4 | } 5 | -------------------------------------------------------------------------------- /src/utils/getSafe.ts: -------------------------------------------------------------------------------- 1 | export function getSafe(fn, defaultVal) { 2 | try { 3 | return fn(); 4 | } catch (e) { 5 | this.console.log('test' + e); 6 | return defaultVal; 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | lib 2 | tsconfig.tsbuildinfo 3 | node_modules 4 | build 5 | dist 6 | .npmrc 7 | **/README.md 8 | Third Party Notices.md 9 | *.yml 10 | CONTRIBUTING.md 11 | src_saleforce_packages/** 12 | schemas/** -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 120, 3 | "trailingComma": "es5", 4 | "useTabs": false, 5 | "tabWidth": 4, 6 | "semi": true, 7 | "singleQuote": true, 8 | "bracketSpacing": true 9 | } 10 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | 10 | [*.md] 11 | trim_trailing_whitespace = false 12 | -------------------------------------------------------------------------------- /src/impl/project/metadata/diffgenerators/diffGenerator.ts: -------------------------------------------------------------------------------- 1 | // TODO: DiffGenerator is a base class 2 | export default interface DiffGenerator { 3 | revFrom: string; 4 | revTo: string; 5 | compareRevisions(): Promise; 6 | } 7 | -------------------------------------------------------------------------------- /messages/sandbox_info.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Gets the status of a sandbox", 3 | "nameFlagDescription": "Name of the sandbox", 4 | "showOnlyLatestFlagDescription": "Shows only the latest info of the sandbox record" 5 | } 6 | -------------------------------------------------------------------------------- /src/impl/source/profiles/worker.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const { workerData } = require('worker_threads'); 3 | 4 | if (workerData.path.endsWith('.ts')) require('ts-node').register(); 5 | 6 | require(path.resolve(__dirname, workerData.path)); 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *-debug.log 2 | *-error.log 3 | /.nyc_output 4 | /dist 5 | /lib 6 | /tmp 7 | node_modules 8 | oclif.manifest.json 9 | .vs 10 | .sfdx 11 | test/commands/package/version/* 12 | test/* 13 | tsconfig.tsbuildinfo 14 | .DS_Store 15 | npm-shrinkwrap.json -------------------------------------------------------------------------------- /messages/package_info.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is list all the installed (managed/unmanaged) packages in an org", 3 | "loglevel": "[default: info] logging level for this command invocation", 4 | "apiversion": "API version" 5 | } 6 | -------------------------------------------------------------------------------- /messages/orgwideemail_verify.json: -------------------------------------------------------------------------------- 1 | { 2 | "orgWideEmailVerifyCommandDescription": "This command is deprecated, Please update your workflows..Verify an already created orgWide Email Address", 3 | "orgWideEmailIdDescription": "Id of the Org Wide address email to verify" 4 | } 5 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@salesforce/dev-config/tslint", 3 | "rules": { 4 | "variable-name": false, 5 | "no-angle-bracket-type-assertion": false, 6 | "no-string-based-set-timeout": false, 7 | "member-ordering": false 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /messages/package_applypatch.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Retrieves and applies the patch, Useful after a package upgrade in a CD Environment", 3 | "nameFlagDescription": "Name of the static resource to be patched", 4 | "usernameFlagDescription": "Username for the org" 5 | } 6 | -------------------------------------------------------------------------------- /messages/org_coverage.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Gets the apex test coverage details of an org", 3 | "outputFolderDescription": " The output dir where the output will be created", 4 | "formatFlagDescription": " The format for the test result output, Possible values are json/csv" 5 | } 6 | -------------------------------------------------------------------------------- /src/utils/chunkArray.ts: -------------------------------------------------------------------------------- 1 | export function chunkArray(perChunk: number, inputArray: any[]): Array { 2 | let chunks = [], 3 | i = 0, 4 | n = inputArray.length; 5 | 6 | while (i < n) { 7 | chunks.push(inputArray.slice(i, (i += perChunk))); 8 | } 9 | 10 | return chunks; 11 | } 12 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Summary of changes 2 | 3 | 4 | 5 | ## Checklist 6 | - [ ] Adhere to [Contribution Guidelines](https://docs.dxatscale.io/about-us/contributing-to-dx-scale) 7 | - [ ] Updates to documentation at [DX@Scale Guide](https://github.com/dxatscale/dxatscale-guide) required? 8 | - [ ] Tested changes? 9 | -------------------------------------------------------------------------------- /messages/valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Validates a package to check whether it only contains valid metadata as per metadata coverage", 3 | "packageFlagDescription": "the package to analyze", 4 | "itemsToBypassValidationDescription": "metadatatypes to skip the package validation check", 5 | "apiversion": "metadata API version to use for validation" 6 | } 7 | -------------------------------------------------------------------------------- /messages/orgwideemail_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "orgWideEmailCreateCommandDescription": "This command is deprecated, Please update your workflows..Create an orgWide Email Address", 3 | "orgWideEmailAddressDescription": "Org Wide address email", 4 | "orgWideEmailDisplaynameDescription": "Org Wide address display name", 5 | "orgWideEmailAllprofileDescription": "Allow for all profile" 6 | } 7 | -------------------------------------------------------------------------------- /messages/scratchorg_pool_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Creates a pool of prebuilt scratchorgs, which can the be consumed by users or CI", 3 | "configFilePathDescription": "Relative Path to the pool configuration json file. The schema of the file could be found in the Wiki", 4 | "batchSizeDescription": "Number of scratch orgs to be created in a single batch in parallel" 5 | } 6 | -------------------------------------------------------------------------------- /messages/source_apextest_list.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command helps to get list of all apex text classes located in source path", 3 | "pathFlagDescription": "Source path to get all the apex test", 4 | "resultasstringDescription": "Use this flag to get comma separated list of apex test as a string", 5 | "loglevel": "[default: info] logging level for this command invocation" 6 | } 7 | -------------------------------------------------------------------------------- /messages/connectedapp_retrieve.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Retrieves the consumer key for a connected app, Useful after a sandbox refresh in a CD Environment", 3 | "nameFlagDescription": "Name of the connected app to be retreived", 4 | "usernameFlagDescription": "Username for the org", 5 | "passwordFlagDescription": "Password for the org", 6 | "securityTokenFlagDescription": "Security Token for the org" 7 | } 8 | -------------------------------------------------------------------------------- /messages/project_manifest_merge.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Merge multiple package.xml into single collective package.xml", 3 | "pathFlagDescription": "Paths to the package.xml file", 4 | "manifestFlagDescription": "output path to create collective package.xml", 5 | "apiversion": "The api version to be used create package.xml", 6 | "loglevel": "[default: info] logging level for this command invocation" 7 | } 8 | -------------------------------------------------------------------------------- /messages/auth_login.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Login to an org using a username/password", 3 | "usernameFlagDescription": "Username for the org", 4 | "passwordFlagDescription": "Password for the org", 5 | "securityTokenFlagDescription": "Security Token for the org", 6 | "urlFlagDescription": "URL of the instance to be authenticated, Defaults to Test URL", 7 | "aliasFlagDescription": "Alias ofthe org" 8 | } 9 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: '@typescript-eslint/parser', 4 | plugins: ['@typescript-eslint'], 5 | extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'], 6 | rules: { 7 | '@typescript-eslint/no-explicit-any': 'off', 8 | 'prefer-const': 'off', 9 | '@typescript-eslint/no-var-requires': 'off', 10 | }, 11 | env: { 12 | node: true, 13 | }, 14 | }; 15 | -------------------------------------------------------------------------------- /schemas/pool/examples/so_pool_config_tag_mode.json: -------------------------------------------------------------------------------- 1 | { 2 | "pool": { 3 | "expiry": 1, 4 | "config_file_path": "config/project-scratch-def.json", 5 | "tag": "sfpowerkit", 6 | "relax_ip_ranges": [ 7 | { 8 | "start": "49.0.0.0", 9 | "end": "49.255.255.255" 10 | }, 11 | { 12 | "start": "42.0.0.0", 13 | "end": "42.255.255.255" 14 | } 15 | ], 16 | "max_allocation": 1 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /messages/source_picklist_generatepatch.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Search picklist fields inside project and create a static resource file with picklist fields, used to solve the picklist upgrade issue in dx unlock package https://trailblazer.salesforce.com/issues_view?id=a1p3A0000003Uk2QAE", 3 | "packageFlagDescription": "Name of the package to generate the picklist patch", 4 | "loglevel": "[default: info] logging level for this command invocation" 5 | } 6 | -------------------------------------------------------------------------------- /schemas/pool/examples/so-pool-config_user_mode.json: -------------------------------------------------------------------------------- 1 | { 2 | "pool": { 3 | "expiry": 1, 4 | "config_file_path": "config/project-scratch-def.json", 5 | "script_file_path": "scripts/so_script.sh", 6 | "tag": "sfpowerkit" 7 | }, 8 | "poolUsers": [ 9 | { 10 | "max_allocation": 5, 11 | "min_allocation": 2, 12 | "username": "manivasga.murugesan@accenture.com", 13 | "expiry": 1, 14 | "priority": 1 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /messages/scratchorg_delete.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Deletes the active count of scratch org by given usermame/email in a devhub", 3 | "emailFlagDescription": "Email of the user account that has created the scratch org", 4 | "usernameFlagDescription": "Username of the scratch org to be deleted", 5 | "ignorePoolFlagDescription": "Ignore scratch orgs which belong to a pool", 6 | "dryRunFlagDescription": "Perform a dry run, without deleting the scratch orgs" 7 | } 8 | -------------------------------------------------------------------------------- /messages/org_relaxiprange.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command sets or removes ip range in Network access to relax security setting for a particular salesforce environment", 3 | "rangeFlagDescription": "List of ip range with comma separated. eg, 122.0.0.0-122.255.255.255,49.0.0.0-49.255.255.255", 4 | "allDescription": "Relax full iprange 0.0.0.0-255.255.255.255 in the target org", 5 | "noneDescription": "Remove any existing iprange relaxations in the target org" 6 | } 7 | -------------------------------------------------------------------------------- /messages/scratchorg_poolhydrate.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Deletes the pooled scratch orgs from the Scratch Org Pool", 3 | "tagDescription": "tag used to identify the scratch org pool", 4 | "mypoolDescription": "Filter only Scratch orgs created by current user in the pool", 5 | "allscratchorgsDescription": "Deletes all used and unused Scratch orgs from pool by the tag", 6 | "inprogressonlyDescription": "Deletes all In Progress Scratch orgs from pool by the tag" 7 | } 8 | -------------------------------------------------------------------------------- /schemas/pool/scratchorg-poolconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "pool": { 3 | "expiry": 1, 4 | "tag":"sfpowerkit", 5 | "max_allocation": 2, 6 | "config_file_path": "config/project-scratch-def.json", 7 | "relax_all_ip_ranges": true 8 | }, 9 | "poolUsers": [ 10 | { 11 | "max_allocation": 2, 12 | "min_allocation": 2, 13 | "username": "build.bot@dxatscale.io", 14 | "expiry": 1, 15 | "priority": 1 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /src/utils/extract.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | const unzipper = require('unzip-stream'); 3 | 4 | export async function extract(path: string, location: string) { 5 | return new Promise((resolve, reject) => { 6 | fs.createReadStream(path) 7 | .pipe(unzipper.Extract({ path: `${location}` })) 8 | .on('close', () => { 9 | resolve(); 10 | }) 11 | .on('error', (error) => reject(error)); 12 | }); 13 | } 14 | -------------------------------------------------------------------------------- /messages/org_destruct.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This is a helper command to ease the deployment of destructiveChanges.xml. The command will create the empty package.xml and package the passed destructive manifest and deploy it to the org", 3 | "destructiveManifestFlagDescription": "The path to xml containing the members that need to be destructed,follow the instructions here to create such a file https://developer.salesforce.com/docs/atlas.en-us.daas.meta/daas/daas_destructive_changes.htm" 4 | } 5 | -------------------------------------------------------------------------------- /messages/apextestsuite_convert.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Converts an apex test suite to its consituent apex classes as a single line separated by commas, so that it can be used for metadata api deployment", 3 | "nameFlagDescription": "The name of the apextestsuite (the file name minus the apex test suite)", 4 | "packageFlagDescription": "The package where the apex test suite exists", 5 | "pathFlagDescription": "The path to be overriden in case test suite are in a different folder in the target package" 6 | } 7 | -------------------------------------------------------------------------------- /messages/scratchorg_poollist.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Retrieves a list of active scratch org and details from any pool. If this command is run with -m|--mypool, the command will retrieve the passwords for the pool created by the user who is executing the command.", 3 | "tagDescription": "tag used to identify the scratch org pool", 4 | "mypoolDescription": "Filter the tag for any additions created by the executor of the command", 5 | "allscratchorgsDescription": "Gets all used and unused Scratch orgs from pool" 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.git": true, 4 | "**/.svn": true, 5 | "**/.hg": true, 6 | "**/CVS": true, 7 | "**/.DS_Store": true, 8 | "**/node_modules": true, 9 | "**/.sfdx": true, 10 | "**/.nyc_output": true, 11 | "**/lib": true, 12 | "**/bin": true 13 | }, 14 | 15 | "search.exclude": { 16 | "**/node_modules": true, 17 | "**/.sfdx": true, 18 | "**/.bin": true, 19 | "**/.lib": true 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /messages/source_customlabel_clean.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Custom Labels are org wide, hence when the metadata is pulled down from scratch org, the entire custom label metadata file is updated in a package repo, This command reconcile the updated custom labels to include only the labels that have the API name starting with package name or created using the custom label create command", 3 | "pathFlagDescription": "Path to the CustomLabels.labels-meta.xml file", 4 | "packageFlagDescription": "The name of the package that needs to be reconciled" 5 | } 6 | -------------------------------------------------------------------------------- /messages/project_datamodel_diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Provides an audit history of the metadata change between two commit ID's for data model ( CustomFields, RecordTypes, BusinessProcess)", 3 | "revisionFromDescription": "Base revision from which to generate the diff", 4 | "revisionToDescription": "Target revision from which to generate the diff", 5 | "packageDirectoriesDescription": "Run diff only for specified package directories", 6 | "outputDirDescription": "Directory to output the results", 7 | "csvDescription": "Output to csv file" 8 | } 9 | -------------------------------------------------------------------------------- /src/utils/zipDirectory.ts: -------------------------------------------------------------------------------- 1 | const archiver = require('archiver'); 2 | import * as fs from 'fs-extra'; 3 | 4 | export async function zipDirectory(source, out) { 5 | const archive = archiver('zip', { zlib: { level: 9 } }); 6 | const stream = fs.createWriteStream(out); 7 | 8 | return new Promise((resolve, reject) => { 9 | archive 10 | .directory(source, false) 11 | .on('error', (err) => reject(err)) 12 | .pipe(stream); 13 | 14 | stream.on('close', () => resolve()); 15 | archive.finalize(); 16 | }); 17 | } 18 | -------------------------------------------------------------------------------- /messages/connectedapp_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": " Creates a connected app in the target org for JWT based authentication, Please note it only creates Connected App with All users may self authorize option, You would need to manually edit the policies to enable admin users are pre-approved and add your profile to this connected app", 3 | "nameFlagDescription": "Name of the connected app to be created", 4 | "certificateFlagDescription": "Filepath to the private certificate for the connected app to be created", 5 | "emailFlagDescription": "Email of the connected app to be created" 6 | } 7 | -------------------------------------------------------------------------------- /plugins/index.js: -------------------------------------------------------------------------------- 1 | // src/components/plugins/index.js 2 | import React, { PropTypes, Component } from 'react'; 3 | import classnames from 'classnames'; 4 | 5 | import './style.css'; 6 | 7 | export default class plugins extends Component { 8 | // static propTypes = {} 9 | // static defaultProps = {} 10 | // state = {} 11 | 12 | render() { 13 | const { className, ...props } = this.props; 14 | return ( 15 |
16 |

plugins

17 |
18 | ); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /messages/project_manifest_diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "run diff between two package.xml and get the difference", 3 | "sourcepathFlagDescription": "Paths to the source package.xml file", 4 | "targetpathFlagDescription": "Paths to the target package.xml file", 5 | "outputFlagDescription": "path to the diff output package.xml", 6 | "formatFlagDescription": "[default: json] The format for the output, Possible values are json/csv/xml", 7 | "apiversion": "The api version to be used create package.xml", 8 | "loglevel": "[default: info] logging level for this command invocation" 9 | } 10 | -------------------------------------------------------------------------------- /decision records/template.md: -------------------------------------------------------------------------------- 1 | # Process for creating parallel development streams 2 | 3 | * Status: Rejected/Accepted/Pending 4 | 5 | 6 | ## Context and Problem Statement 7 | 8 | The what is the problem and in what context does it exist 9 | 10 | ## Options 11 | 1. **First option which could solve or improve the problem** 12 | - provide details of optional solution 13 | 2. **Second option which could solve or improve the problem** 14 | - provide details of optional solution 15 | 16 | ## Decision 17 | 18 | Has a solution been accepted, if so which option and why? if not, why not and what are the next steps? 19 | -------------------------------------------------------------------------------- /messages/azpipelines_updatevariable.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Updates a shared variable in the Azure Pipelines", 3 | "usernameFlagDescription": "Username of the azure pipelines account", 4 | "patFlagDescription": "PAT for the userpipelines", 5 | "keyFlagDescription": "Key of the variable to be updated", 6 | "valueFlagDescription": "Value of the variable to be updated", 7 | "variablegroupIdFlagDescription": "Id of the variale group in which the variable has to be updated", 8 | "orgFlagDescription": "Azue Pipeline Org", 9 | "projectFlagDescription": "Project in Azure Pipelines" 10 | } 11 | -------------------------------------------------------------------------------- /messages/org_profile_diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Compare profiles from project against target org or between two orgs (source and target)", 3 | "profileListFlagDescription": "List of profiles to compare, comma separated profile names. If not provided and no sourceusername is provided, all profiles from the source folder will be processed.", 4 | "sourceUsernameDescription": "Source org. If no profile is provided in the profilelist parameter, all the profile from this org will be fetched", 5 | "outputFolderDescription": "Output folder. Provide the output folder if comparing profiles from source org." 6 | } 7 | -------------------------------------------------------------------------------- /.github/workflows/cherry-picker.yml: -------------------------------------------------------------------------------- 1 | name: PR for release branch 2 | on: 3 | push: 4 | branches: 5 | - main 6 | jobs: 7 | release_pull_request: 8 | runs-on: ubuntu-latest 9 | name: release_pull_request 10 | steps: 11 | - name: checkout 12 | uses: actions/checkout@v1 13 | - name: Create PR to branch 14 | uses: gorillio/github-action-cherry-pick@master 15 | with: 16 | pr_branch: 'develop' 17 | pr_labels: 'autocreated, main' 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | GITBOT_EMAIL: dxatscale@accenture.com 21 | DRY_RUN: false -------------------------------------------------------------------------------- /src/utils/getDefaults.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | import * as path from 'path'; 3 | 4 | export default class GetDefaults { 5 | public static defaultConfig: any; 6 | private static init() { 7 | let resourcePath = path.join(__dirname, '..', '..', 'resources', 'default-config.json'); 8 | let fileData = fs.readFileSync(resourcePath, 'utf8'); 9 | this.defaultConfig = JSON.parse(fileData); 10 | } 11 | public static getApiVersion() { 12 | if (!this.defaultConfig) { 13 | this.init(); 14 | } 15 | return this.defaultConfig.apiversion; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /messages/source_customlabel_buildmanifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This Command is used to build package.xml with all customlabels as members rather than wildcard *. sfdx force:source:convert creates a package.xml with customlabels wildcard, this command helps to update the package.xml with list of label names.", 3 | "pathFlagDescription": "Path to the CustomLabels.labels-meta.xml file", 4 | "manifestFlagDescription": "path to existing package.xml file or create new package.xml", 5 | "apiversion": "The api version to be used create package.xml", 6 | "loglevel": "[default: info] logging level for this command invocation" 7 | } 8 | -------------------------------------------------------------------------------- /messages/dependency_tree_package.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is used to compute the dependency tree details of an unlocked package", 3 | "packageDescription": "package name, package version id, subscriber id that is installed in the org", 4 | "packagefilterDescription": "output result will filter only dependent packages", 5 | "showallDescription": "Indclude all items with/without dependency in the result", 6 | "formatDescription": "format of the output file to create", 7 | "outputDescription": "path to create the output", 8 | "loglevelDescription": "[default: info] logging level for this command invocation" 9 | } 10 | -------------------------------------------------------------------------------- /messages/package_codecoverage.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is used to get the apex test coverage details of an unlocked package", 3 | "packageName": "Name of the unlocked package to check the code coverage, packageVersionNumber is required when packageName is used", 4 | "packageVersionNumber": "The complete version number format is major.minor.patch (Beta build)—for example, 1.2.0 (Beta 5), packageName is required when packageVersionNumber is used", 5 | "packageVersionId": "Package version Id to check the code coverage", 6 | "apiversion": "API version", 7 | "loglevel": "[default: info] logging level for this command invocation" 8 | } 9 | -------------------------------------------------------------------------------- /messages/scratchorg_poolFetch.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Gets an active/unused scratch org from the scratch org pool", 3 | "tagDescription": "(required) tag used to identify the scratch org pool", 4 | "mypoolDescription": "Filter the tag for any additions created by the executor of the command", 5 | "aliasDescription": "Fetch and set an alias for the org", 6 | "setdefaultusernameDescription": "set the authenticated org as the default username that all commands run against", 7 | "sendToUserDescription": "Send the credentials of the fetched scratchorg to another DevHub user, Useful for situations when pool is only limited to certain users" 8 | } 9 | -------------------------------------------------------------------------------- /messages/project_orgdiff.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Compare source file again the salesforce and display differences. Can optionally add diff conflict markers on file to let the developer accept or rejest changes manually using a git merge tool.", 3 | "filesOrFoldersFlagDescription": "List of fils or folder to compare. Should be only Apex classes, trigger, Aura Components, Lightning Web Components or any unsplitted metadata.", 4 | "noConflictMarkersDescription": "If set to true, the command will not add diff conflict marker to each compared file.", 5 | "outputFormatFlagDescription": " [default: json]The format for the diff output, Possible values are json/csv." 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "attach", 10 | "name": "Attach to Remote", 11 | "address": "TCP/IP address of process to be debugged", 12 | "port": 80, 13 | "localRoot": "${workspaceFolder}", 14 | "remoteRoot": "Absolute path to the remote directory containing the program" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /messages/sandbox_refresh.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Refresh a sandbox using the tooling api, ensure the user has the required permissions before using this command", 3 | "nameFlagDescription": "Name of the sandbox", 4 | "descriptionFlagDescription": "Description of the sandbox", 5 | "licenseFlagDescription": "Type of the sandbox. Valid values are DEVELOPER,DEVELOPER_PRO,PARTIAL,FULL, Provide this if the sandbox is to be rereshed from Production", 6 | "apexClassFlagDescription": "A reference to the ID of an Apex class that runs after each copy of the sandbox", 7 | "cloneFromFlagDescripton": "Name of the SandboxInfo that serves as the source org for a cloned sandbox." 8 | } 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | --- 8 | 9 | **Is your feature request related to a problem? Please describe.** 10 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 11 | 12 | **Describe the solution you'd like** 13 | A clear and concise description of what you want to happen. 14 | 15 | **Describe alternatives you've considered** 16 | A clear and concise description of any alternative solutions or features you've considered. 17 | 18 | **Additional context** 19 | Add any other context or screenshots about the feature request here. 20 | -------------------------------------------------------------------------------- /messages/sandbox_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Creates a sandbox using the tooling api, ensure the user has the required permissions before using this command", 3 | "nameFlagDescription": "Name of the sandbox", 4 | "descriptionFlagDescription": "Description of the sandbox", 5 | "licenseFlagDescription": "Type of the sandbox. Valid values are DEVELOPER,DEVELOPER_PRO,PARTIAL,FULL. Provide this if the sandbox is to be created from Production", 6 | "apexClassFlagDescription": "A reference to the ID of an Apex class that runs after each copy of the sandbox", 7 | "cloneFromFlagDescripton": "A reference to the ID of a SandboxInfo that serves as the source org for a cloned sandbox." 8 | } 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": true, 3 | "compilerOptions": { 4 | "baseUrl": ".", 5 | "composite": true, 6 | "paths": { "*": ["types/*"] }, 7 | "outDir": "./lib", 8 | "rootDir": "./src", 9 | "inlineSourceMap": true, 10 | "declaration": true, 11 | "noImplicitAny": false, 12 | "esModuleInterop": true, 13 | "module": "commonjs", 14 | "moduleResolution": "node", 15 | "target": "es6", 16 | "allowJs": true, 17 | "lib": ["es6"], 18 | "resolveJsonModule": true, 19 | "skipLibCheck": true 20 | }, 21 | "exclude": ["node_modules", "lib"], 22 | "include": ["./src/**/*"] 23 | } 24 | -------------------------------------------------------------------------------- /.github/workflows/release-notes-generator.yml: -------------------------------------------------------------------------------- 1 | # Trigger the workflow on milestone events 2 | on: 3 | milestone: 4 | types: [closed] 5 | workflow_dispatch: 6 | inputs: 7 | milestoneId: 8 | description: 'Milestone ID' 9 | required: true 10 | default: '1' 11 | name: Generate Release Notes 12 | jobs: 13 | create-release-notes: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@master 17 | - name: Create Release Notes 18 | uses: docker://decathlon/release-notes-generator-action:3.1.4 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | OUTPUT_FOLDER: release_notes 22 | USE_MILESTONE_TITLE: "true" 23 | -------------------------------------------------------------------------------- /src/utils/xmlUtil.ts: -------------------------------------------------------------------------------- 1 | import * as xml2js from 'xml2js'; 2 | import * as util from 'util'; 3 | import * as fs from 'fs-extra'; 4 | import * as path from 'path'; 5 | import { AnyJson } from '@salesforce/ts-types'; 6 | 7 | export default class XmlUtil { 8 | public static async xmlToJSON(directory: string) { 9 | const parser = new xml2js.Parser({ explicitArray: false }); 10 | const parseString = util.promisify(parser.parseString); 11 | let obj = await parseString(fs.readFileSync(path.resolve(directory))); 12 | return obj; 13 | } 14 | public static jSONToXML(obj: AnyJson) { 15 | const builder = new xml2js.Builder(); 16 | let xml = builder.buildObject(obj); 17 | return xml; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /messages/dependency_versionlist.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "List the dependencies of each package. The command also resolves the .LATEST to the buildversion number that is available in DevHub, and has an additional option to only list validated dependencies of a given package. This is useful during a CI package build process, to list the exact version numbers the package was built on.", 3 | "usedependencyvalidatedpackagesDescription": "use dependency validated packages that matches the version number schema provide", 4 | "updateprojectDescription": "overwrite the sfdx-project.json with resolved dependencies (replace .LATEST)", 5 | "filterpathsDescription": "filter packageDirectories using path to get dependent packages details only for the specified path" 6 | } 7 | -------------------------------------------------------------------------------- /src/utils/retrieveMetadata.ts: -------------------------------------------------------------------------------- 1 | import { Connection } from '@salesforce/core'; 2 | 3 | export async function retrieveMetadata(types: any, connection: Connection): Promise { 4 | const apiversion = await connection.retrieveMaxApiVersion(); 5 | let toReturn: Promise = new Promise((resolve, reject) => { 6 | connection.metadata.list(types, apiversion, function (err, metadata) { 7 | if (err) { 8 | return reject(err); 9 | } 10 | let metadata_fullnames = []; 11 | for (let i = 0; i < metadata.length; i++) { 12 | metadata_fullnames.push(metadata[i].fullName); 13 | } 14 | resolve(metadata_fullnames); 15 | }); 16 | }); 17 | 18 | return toReturn; 19 | } 20 | -------------------------------------------------------------------------------- /messages/profile_merge.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is used in the lower environments such as ScratchOrgs , Development / System Testing Sandboxes, inorder to apply the changes made in the environment to retrieved profile, so that it can be deployed to the higher environments", 3 | "folderFlagDescription": "comma separated list of folders to scan for profiles. If ommited, the folders in the packageDirectories configuration will be used.", 4 | "profileListFlagDescription": "comma separated list of profiles. If ommited, all the profiles found in the folder(s) will be merged", 5 | "metadataFlagDescription": "comma separated list of metadata for which the permissions will be retrieved.", 6 | "deleteFlagDescription": "set this flag to delete profile files that does not exist in the org." 7 | } 8 | -------------------------------------------------------------------------------- /src/utils/get18DigitSalesforceId.ts: -------------------------------------------------------------------------------- 1 | export function get18DigitSalesforceId(recordId) { 2 | if (recordId && recordId.length === 18) { 3 | return recordId; 4 | } else if (recordId && recordId.length === 15) { 5 | let addon = ''; 6 | for (let block = 0; block < 3; block++) { 7 | let loop = 0; 8 | for (let position = 0; position < 5; position++) { 9 | let current = recordId.charAt(block * 5 + position); 10 | if (current >= 'A' && current <= 'Z') loop += 1 << position; 11 | } 12 | addon += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ012345'.charAt(loop); 13 | } 14 | let convertedId = recordId + addon; 15 | return convertedId; 16 | } else { 17 | throw new Error(`Invalid Salesforce Id ${recordId}`); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | --- 8 | 9 | **Describe the bug** 10 | A clear and concise description of what the bug is. 11 | 12 | **To Reproduce** 13 | Steps to reproduce the behavior: 14 | 15 | 1. sfdx sfpowerkit:: 16 | 17 | **Expected behavior** 18 | A clear and concise description of what you expected to happen. 19 | 20 | **Screenshots** 21 | If applicable, add screenshots to help explain your problem. 22 | 23 | **Desktop (please complete the following information):** 24 | 25 | - OS: [e.g. Windows/Linux] 26 | - Node Version 27 | - Version of sfpowerkit[e.g 1.45.0] 28 | - Salesforce Org Version [ In case of pre release window ] 29 | 30 | **Additional context** 31 | Add any other context about the problem here. 32 | -------------------------------------------------------------------------------- /messages/profile_retrieve.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Retrieve profiles from the salesforce org with all its associated permissions. Common use case for this command is to migrate profile changes from a integration environment to other higher environments [overcomes SFDX CLI Profile retrieve issue where it doesnt fetch the full profile unless the entire metadata is present in source], or retrieving profiles from production to lower environments for testing.", 3 | "folderFlagDescription": "retrieve only updated versions of profiles found in this directory, If ignored, all profiles will be retrieved.", 4 | "profileListFlagDescription": "comma separated list of profiles to be retrieved. Use it for selectively retrieving an existing profile or retrieving a new profile", 5 | "deleteFlagDescription": "set this flag to delete profile files that does not exist in the org, when retrieving in bulk" 6 | } 7 | -------------------------------------------------------------------------------- /src/utils/checkDeploymentStatus.ts: -------------------------------------------------------------------------------- 1 | import { Connection, DeployResult } from 'jsforce'; 2 | import { delay } from './delay'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | import { SfdxError } from '@salesforce/core'; 5 | 6 | export async function checkDeploymentStatus(conn: Connection, retrievedId: string): Promise { 7 | let metadata_result; 8 | 9 | while (true) { 10 | await conn.metadata.checkDeployStatus(retrievedId, true, function (error, result) { 11 | if (error) { 12 | throw new SfdxError(error.message); 13 | } 14 | metadata_result = result; 15 | }); 16 | 17 | if (!metadata_result.done) { 18 | SFPLogger.log('Polling for Deployment Status', LoggerLevel.INFO); 19 | await delay(5000); 20 | } else { 21 | break; 22 | } 23 | } 24 | return metadata_result; 25 | } 26 | -------------------------------------------------------------------------------- /messages/package_build.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Generate a complete manifest of all the metadata from the specified org. Once the manifest is generated use source:retrieve or mdapi:retrieve to retrieve the metadata.", 3 | "quickfilterFlagDescription": "[deprecated] comma separated values of metadata type, member or file names to be excluded while building the manifest", 4 | "excludefilterFlagDescription": "comma separated values of metadata type, member or file names to be excluded while building the manifest", 5 | "includefilterFlagDescription": "comma separated values of metadata type, member or file names to be included while building the manifest", 6 | "excludeManagedFlagDescription": "exclude managed packages components from the manifest", 7 | "outputFileFlagDescription": "The output path where the manifest file will be created", 8 | "includeChildsFlagDescription": "Set to true to include child Metadata in the generated package.xml." 9 | } 10 | -------------------------------------------------------------------------------- /messages/source_customlabel_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Custom Labels are org wide, hence when the metadata is pulled down from scratch org, the entire custom label metadata file is updated in a package repo. This command is a helper command to create customlabel with pacakage names prepended for easy reconcilation.", 3 | "fullnameFlagDescription": "Name of the custom label (API Name)", 4 | "valueFlagDescription": "Value of the custom label", 5 | "categoriesFlagDescription": "Comma Separated Category Values", 6 | "languageFlagDescription": "Language of the custom label (Default: en_US)", 7 | "protectedFlagDescription": "Protected State of the custom label (Default: false)", 8 | "shortdescriptionFlagDescription": "Short Description of the custom label", 9 | "packageFlagDescription": "The name of the package that needs to be appended", 10 | "ignorepackageFlagDescription": "Ignores the addition of the package into the fullname (API Name)" 11 | } 12 | -------------------------------------------------------------------------------- /src/utils/checkRetrievalStatus.ts: -------------------------------------------------------------------------------- 1 | import { Connection } from 'jsforce'; 2 | import { delay } from './delay'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | import { SfdxError } from '@salesforce/core'; 5 | 6 | export async function checkRetrievalStatus(conn: Connection, retrievedId: string, isToBeLoggedToConsole = true) { 7 | let metadata_result; 8 | 9 | while (true) { 10 | await conn.metadata.checkRetrieveStatus(retrievedId, function (error, result) { 11 | if (error) { 12 | return new SfdxError(error.message); 13 | } 14 | metadata_result = result; 15 | }); 16 | 17 | if (metadata_result.done === 'false') { 18 | if (isToBeLoggedToConsole) SFPLogger.log(`Polling for Retrieval Status`, LoggerLevel.INFO); 19 | await delay(5000); 20 | } else { 21 | //this.ux.logJson(metadata_result); 22 | break; 23 | } 24 | } 25 | return metadata_result; 26 | } 27 | -------------------------------------------------------------------------------- /src/impl/parser/listeners/ApexTypeListener.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ApexParserListener, 3 | AnnotationContext, 4 | InterfaceDeclarationContext, 5 | ClassDeclarationContext, 6 | } from 'apex-parser'; 7 | 8 | export default class ApexTypeListener implements ApexParserListener { 9 | private apexType: ApexType = { 10 | class: false, 11 | testClass: false, 12 | interface: false, 13 | }; 14 | 15 | enterAnnotation(ctx: AnnotationContext): void { 16 | if (ctx.text.toUpperCase().startsWith('@ISTEST')) { 17 | this.apexType['testClass'] = true; 18 | } 19 | } 20 | 21 | enterInterfaceDeclaration(ctx: InterfaceDeclarationContext): void { 22 | this.apexType['interface'] = true; 23 | } 24 | 25 | enterClassDeclaration(ctx: ClassDeclarationContext): void { 26 | this.apexType['class'] = true; 27 | } 28 | 29 | public getApexType(): ApexType { 30 | return this.apexType; 31 | } 32 | } 33 | 34 | interface ApexType { 35 | class: boolean; 36 | testClass: boolean; 37 | interface: boolean; 38 | } 39 | -------------------------------------------------------------------------------- /src/utils/getUserDetails.ts: -------------------------------------------------------------------------------- 1 | import { Org } from '@salesforce/core'; 2 | import { isNullOrUndefined } from 'util'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | let retry = require('async-retry'); 5 | 6 | export async function getUserEmail(username: string, hubOrg: Org) { 7 | let hubConn = hubOrg.getConnection(); 8 | 9 | return await retry( 10 | async (bail) => { 11 | if (isNullOrUndefined(username)) { 12 | bail(new Error('username cannot be null. provide a valid username')); 13 | return; 14 | } 15 | let query = `SELECT email FROM user WHERE username='${username}'`; 16 | 17 | SFPLogger.log('QUERY:' + query, LoggerLevel.TRACE); 18 | const results = (await hubConn.query(query)) as any; 19 | 20 | if (results.records.size < 1) { 21 | bail(new Error(`No user found with username ${username} in devhub.`)); 22 | return; 23 | } 24 | return results.records[0].Email; 25 | }, 26 | { retries: 3, minTimeout: 3000 } 27 | ); 28 | } 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Accenture 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/utils/outputGenerator.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 3 | import * as fs from 'fs-extra'; 4 | import FileUtils from './fileutils'; 5 | 6 | export default class OutputGenerator { 7 | public async generateJsonOutput(result: any, outputDir: string) { 8 | let outputJsonPath = `${outputDir}/output.json`; 9 | let dir = path.parse(outputJsonPath).dir; 10 | if (!fs.existsSync(dir)) { 11 | FileUtils.mkDirByPathSync(dir); 12 | } 13 | fs.writeFileSync(outputJsonPath, JSON.stringify(result)); 14 | SFPLogger.log(`Output ${outputDir}/output.json is generated successfully`, LoggerLevel.INFO); 15 | } 16 | 17 | public async generateCSVOutput(result: string, outputDir: string) { 18 | let outputcsvPath = `${outputDir}/output.csv`; 19 | let dir = path.parse(outputcsvPath).dir; 20 | 21 | if (!fs.existsSync(dir)) { 22 | FileUtils.mkDirByPathSync(dir); 23 | } 24 | 25 | fs.writeFileSync(outputcsvPath, result); 26 | SFPLogger.log(`Output ${outputDir}/output.csv is generated successfully`, LoggerLevel.INFO); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/ui/progressBar.ts: -------------------------------------------------------------------------------- 1 | import cli from 'cli-ux'; 2 | import SFPLogger from '@dxatscale/sfp-logger'; 3 | import { isNullOrUndefined } from 'util'; 4 | import { Sfpowerkit } from '../sfpowerkit'; 5 | 6 | export class ProgressBar { 7 | private progressBarImpl; 8 | 9 | public create(title: string, unit: string, displayTillLogLevel: number): ProgressBar { 10 | if (SFPLogger.logLevel <= displayTillLogLevel && !Sfpowerkit.isJsonFormatEnabled) { 11 | this.progressBarImpl = cli.progress({ 12 | format: `${title} - PROGRESS | {bar} | {value}/{total} ${unit}`, 13 | barCompleteChar: '\u2588', 14 | barIncompleteChar: '\u2591', 15 | linewrap: true, 16 | }); 17 | } 18 | return this; 19 | } 20 | 21 | public start(totalSize: number) { 22 | if (!isNullOrUndefined(this.progressBarImpl)) this.progressBarImpl.start(totalSize); 23 | } 24 | 25 | public stop() { 26 | if (!isNullOrUndefined(this.progressBarImpl)) this.progressBarImpl.stop(); 27 | } 28 | 29 | public increment(count: number) { 30 | if (!isNullOrUndefined(this.progressBarImpl)) this.progressBarImpl.increment(count); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /.github/workflows/buildPackage.yml: -------------------------------------------------------------------------------- 1 | #This workflow is used as a template to build the sfpowerkit plugin 2 | 3 | name: 'Build Packages' 4 | 5 | on: 6 | workflow_call: 7 | inputs: 8 | version: 9 | type: string 10 | default: '' 11 | publish: 12 | type: boolean 13 | default: false 14 | environment: 15 | type: string 16 | required: true 17 | secrets: 18 | npm-token: 19 | required: false 20 | 21 | jobs: 22 | build: 23 | name: 'build packages' 24 | runs-on: ubuntu-latest 25 | environment: ${{ inputs.environment }} 26 | steps: 27 | 28 | - uses: actions/checkout@v3 29 | 30 | - uses: actions/setup-node@v2 31 | with: 32 | node-version: '16' 33 | registry-url: https://registry.npmjs.org/ 34 | 35 | - name: 'Set Git Config' 36 | run: | 37 | git config --global user.email "ciuser@dxatscale.io" 38 | git config --global user.name "ciuser" 39 | 40 | - name: 'Build Package' 41 | run: | 42 | npm install 43 | 44 | - run: npm publish --access public --tag ${{ inputs.version }} 45 | env: 46 | NODE_AUTH_TOKEN: ${{secrets.npm-token}} 47 | -------------------------------------------------------------------------------- /messages/profile_reconcile.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is used in the lower environments such as ScratchOrgs , Development / System Testing Sandboxes, where a retrieved profile from production has to be cleaned up only for the metadata that is contained in the environment or base it only as per the metadata that is contained in the packaging directory.", 3 | "folderFlagDescription": "path to the folder which contains the profiles to be reconciled,if project contain multiple package directories, please provide a comma seperated list, if omitted, all the package directories will be checked for profiles", 4 | "nameFlagDescription": "list of profiles to be reconciled. If ommited, all the profiles components will be reconciled.", 5 | "destFolderFlagDescription": " the destination folder for reconciled profiles, if omitted existing profiles will be reconciled and will be rewritten in the current location", 6 | "sourceonlyFlagDescription": "set this flag to reconcile profiles only against component available in the project only. Configure ignored perissions in sfdx-project.json file in the array plugins->sfpowerkit->ignoredPermissions.", 7 | "targetorgFlagDescription": " org against which profiles will be reconciled. this parameter can be ommited if sourceonly flag is set." 8 | } 9 | -------------------------------------------------------------------------------- /src/utils/dxProjectManifestUtils.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import * as fs from 'fs-extra'; 3 | 4 | export class DXProjectManifestUtils { 5 | private sfdxProjectManifestJSON: any; 6 | 7 | public constructor(private projectFolder: string) {} 8 | 9 | public removePackagesNotInDirectory(): void { 10 | //Validate projectJson Path 11 | let sfdxProjectManifestPath = path.join(this.projectFolder, 'sfdx-project.json'); 12 | 13 | if (!fs.existsSync(sfdxProjectManifestPath)) 14 | throw new Error(`sfdx-project.json doesn't exist at ${sfdxProjectManifestPath}`); 15 | 16 | // Read sfdx-projec.json 17 | const sfdxProjectManifest = fs.readFileSync(sfdxProjectManifestPath, 'utf8'); 18 | this.sfdxProjectManifestJSON = JSON.parse(sfdxProjectManifest); 19 | 20 | //Filter sfdx-project.json of unwanted directories 21 | this.sfdxProjectManifestJSON.packageDirectories = this.sfdxProjectManifestJSON.packageDirectories.filter((el) => 22 | this.isElementExists(el) 23 | ); 24 | 25 | //write back sfdx-project.json back 26 | fs.writeJSONSync(sfdxProjectManifestPath, this.sfdxProjectManifestJSON); 27 | } 28 | 29 | private isElementExists(element) { 30 | return fs.existsSync(path.join(this.projectFolder, element.path)); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/utils/getPackageInfo.ts: -------------------------------------------------------------------------------- 1 | import { SfdxProjectJson, SfdxError } from '@salesforce/core'; 2 | import { JsonArray } from '@salesforce/ts-types'; 3 | 4 | //Returns the info about a requested package 5 | export function getPackageInfo(packageJson: SfdxProjectJson, packageName: string) { 6 | //Find the default package or passed package as the parameter 7 | const packageDirectories = (packageJson.get('packageDirectories') as JsonArray) || []; 8 | let packageInfo; 9 | if (packageName) { 10 | packageInfo = packageDirectories.filter((it) => { 11 | return it['package'] === packageName; 12 | })[0]; 13 | 14 | if (packageInfo == undefined) { 15 | throw new SfdxError('Invalid Package'); 16 | } 17 | } else throw new SfdxError('Package Name is empty'); 18 | return packageInfo; 19 | } 20 | 21 | //Returns the info about a requested package 22 | export function getDefaultPackageInfo(packageJson: SfdxProjectJson) { 23 | //Find the default package or passed package as the parameter 24 | const packageDirectories = (packageJson.get('packageDirectories') as JsonArray) || []; 25 | let packageInfo; 26 | 27 | packageInfo = packageDirectories.filter((it) => { 28 | return it['default'] == true; 29 | })[0]; 30 | 31 | if (packageInfo == undefined) { 32 | throw new SfdxError('Default Package not found'); 33 | } 34 | 35 | return packageInfo; 36 | } 37 | -------------------------------------------------------------------------------- /messages/project_diff.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "Generate a delta 'changeset' between two diff commits so that the incremental changes can be deployed to the target org.To be used for an org based deployment and the size of the metadata is large that the project cannot not be deployed in a single attempt./nThis command works with a source format based repository only. Utilize the command during a transition phase where an org is transformed to a modular architecture composing of multiple projects.", 3 | "outputFolderDescription": " The output dir where the incremental project will be created", 4 | "revisionFromDescription": "Base revision from where diff is to be generated, required if diff file is ommited", 5 | "revisionToDescription": " [default:HEAD] Target revision to generate the diff ", 6 | "sameCommitErrorMessage": "Same commit: Please provide to differents revisions", 7 | "generativeDestructiveManifestDescription": "If set to true, the command will also generate a destructiveChangePost.xml file in the output folder.", 8 | "itemsToBypass": "[EXPERIMENTAL] Ignore comma seperated paths from the diff being generated, the diff command doesnt generate a diff on the following paths", 9 | "packagedirectories": "[EXPERIMENTAL] Run diff only on specific paths, also generate a sfdx-project.json to support the corresponding package directory", 10 | "apiversion": "override the api version used for api requests made by this command" 11 | } 12 | -------------------------------------------------------------------------------- /.github/workflows/promotePackage.yml: -------------------------------------------------------------------------------- 1 | name: 'Promote Package' 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | version: 7 | type: string 8 | required: true 9 | pathToPackageJson: 10 | type: string 11 | required: true 12 | environment: 13 | type: string 14 | required: true 15 | secrets: 16 | npm-token: 17 | required: true 18 | 19 | 20 | jobs: 21 | promotePackage: 22 | name: 'promote package' 23 | runs-on: ubuntu-latest 24 | environment: ${{ inputs.environment }} 25 | steps: 26 | - uses: actions/checkout@v2 27 | with: 28 | fetch-depth: 0 29 | 30 | - uses: actions/setup-node@v2 31 | with: 32 | node-version: '14' 33 | registry-url: https://registry.npmjs.org/ 34 | 35 | - name: 'Set Git Config' 36 | run: | 37 | git config --global user.email "ciuser@dxatscale.io" 38 | git config --global user.name "ciuser" 39 | 40 | - name: 'Create .npmrc file in HOME directory' 41 | run: | 42 | echo "//registry.npmjs.org/:_authToken=${AUTH_TOKEN}" > ~/.npmrc 43 | env: 44 | AUTH_TOKEN: ${{ secrets.npm-token }} 45 | 46 | - name: 'Promote package' 47 | run: | 48 | PKG_NAME=$(jq -r ".name" ${{ inputs.pathToPackageJson }}) 49 | PKG_VERSION=$(jq -r ".version" ${{ inputs.pathToPackageJson }}) 50 | npm dist-tag add $PKG_NAME@$PKG_VERSION ${{ inputs.version }} 51 | env: 52 | NODE_AUTH_TOKEN: ${{ secrets.npm-token }} -------------------------------------------------------------------------------- /contributing.md: -------------------------------------------------------------------------------- 1 |

Contributing to sfpowerkit

2 |

First and foremost, thank you! We appreciate that you want to contribute to sfpowerkit, your time is valuable, and your contributions mean a lot to us.

3 |

Important!

4 |

By contributing to this project, you:

5 |
    6 |
  • Agree that you have authored 100% of the content
  • 7 |
  • Agree that you have the necessary rights to the content
  • 8 |
  • Agree that you have received the necessary permissions from your employer to make the contributions (if applicable)
  • 9 |
  • Agree that the content you contribute may be provided under the Project license(s)
  • 10 |
  • Agree that, if you did not author 100% of the content, the appropriate licenses and copyrights have been added along with any other necessary attribution.
  • 11 |
12 |

Getting started

13 |

What does "contributing" mean?

14 |

Creating an issue is the simplest form of contributing to a project. But there are many ways to contribute, including the following:

15 |
    16 |
  • Updating or correcting documentation
  • 17 |
  • Feature requests
  • 18 |
  • Bug reports
  • 19 |
20 |

Issues

21 |

Please only create issues for bug reports or feature requests. Issues discussing any other topics may be closed by the project's maintainers without further explanation.

22 |

Do not create issues about bumping dependencies unless a bug has been identified and you can demonstrate that it effects this repo.

23 | -------------------------------------------------------------------------------- /src/utils/searchFilesInDirectory.ts: -------------------------------------------------------------------------------- 1 | import SFPLogger from '@dxatscale/sfp-logger'; 2 | import * as fs from 'fs-extra'; 3 | import * as path from 'path'; 4 | 5 | export function searchFilesInDirectory(dir: string, filter: string, ext: string) { 6 | if (!fs.existsSync(dir)) { 7 | SFPLogger.log(`Specified directory: ${dir} does not exist`); 8 | return; 9 | } 10 | 11 | let filesFound = []; 12 | 13 | // const files = fs.readdirSync(dir); 14 | const found = getFilesInDirectory(dir, ext); 15 | 16 | found.forEach((file) => { 17 | const fileContent = fs.readFileSync(file); 18 | 19 | const regex = new RegExp(filter); 20 | 21 | if (regex.test(fileContent.toString())) { 22 | filesFound.push(file); 23 | } 24 | }); 25 | return filesFound; 26 | } 27 | 28 | // Using recursion, we find every file with the desired extention, even if its deeply nested in subfolders. 29 | export function getFilesInDirectory(dir: string, ext: string) { 30 | if (!fs.existsSync(dir)) { 31 | SFPLogger.log(`Specified directory: ${dir} does not exist`); 32 | return; 33 | } 34 | 35 | let files = []; 36 | fs.readdirSync(dir).forEach((file) => { 37 | const filePath = path.join(dir, file); 38 | const stat = fs.lstatSync(filePath); 39 | 40 | // If we hit a directory, apply our function to that dir. If we hit a file, add it to the array of files. 41 | if (stat.isDirectory()) { 42 | const nestedFiles = getFilesInDirectory(filePath, ext); 43 | files = files.concat(nestedFiles); 44 | } else { 45 | if (path.extname(file) === ext) { 46 | files.push(filePath); 47 | } 48 | } 49 | }); 50 | 51 | return files; 52 | } 53 | -------------------------------------------------------------------------------- /src/utils/sqlitekv.ts: -------------------------------------------------------------------------------- 1 | //Adapted from https://github.com/nickadam/kv 2 | //Original Author Nick Vissari 3 | 4 | 'use strict'; 5 | const better_sqlite3 = require('better-sqlite3'); 6 | 7 | export default class SQLITEKeyValue { 8 | private sqlite; 9 | 10 | constructor(private path: string) {} 11 | 12 | public init() { 13 | // connect to sqlite 14 | this.sqlite = new better_sqlite3(this.path); 15 | // initialize kv table 16 | this.sqlite.exec( 17 | 'CREATE TABLE IF NOT EXISTS kv (k TEXT PRIMARY KEY, v TEXT, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP)' 18 | ); 19 | } 20 | 21 | public get(key: string): any { 22 | let q = 'SELECT * FROM kv WHERE k = ?'; 23 | 24 | let data = []; 25 | 26 | // eslint-disable-next-line no-constant-condition 27 | while (true) { 28 | try { 29 | data = this.sqlite.prepare(q).all(key); 30 | break; 31 | } catch (err) { 32 | continue; 33 | } 34 | } 35 | 36 | // parse the values 37 | data = data.map((x) => { 38 | x.v = JSON.parse(x.v); 39 | return x; 40 | }); 41 | 42 | if (data.length == 0) { 43 | return null; 44 | } 45 | 46 | return data[0].v; 47 | } 48 | 49 | public set(key: string, value: any) { 50 | let q = 'INSERT INTO kv (k,v) VALUES (@k, @v) ON CONFLICT(k) DO UPDATE SET v=@v,timestamp=CURRENT_TIMESTAMP'; 51 | const data = { 52 | k: key, 53 | v: JSON.stringify(value), 54 | }; 55 | 56 | // eslint-disable-next-line no-constant-condition 57 | while (true) { 58 | try { 59 | this.sqlite.prepare(q).run(data); 60 | break; 61 | } catch (err) { 62 | continue; 63 | } 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/user/password/generate.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | import Passwordgenerateimpl from '../../../../impl/user/passwordgenerateimpl'; 5 | import { SfdxError } from '@salesforce/core'; 6 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 7 | 8 | export default class Generate extends SfpowerkitCommand { 9 | public static description = 'Generates password for a given user in a salesforce org.'; 10 | 11 | public static examples = [`$ sfdx sfpowerkit:user:password:generate -u sandbox1`]; 12 | 13 | protected static flagsConfig = { 14 | loglevel: flags.enum({ 15 | description: 'logging level for this command invocation', 16 | default: 'info', 17 | required: false, 18 | options: [ 19 | 'trace', 20 | 'debug', 21 | 'info', 22 | 'warn', 23 | 'error', 24 | 'fatal', 25 | 'TRACE', 26 | 'DEBUG', 27 | 'INFO', 28 | 'WARN', 29 | 'ERROR', 30 | 'FATAL', 31 | ], 32 | }), 33 | }; 34 | // Comment this out if your command does not require a hub org username 35 | protected static requiresUsername = true; 36 | 37 | public async execute(): Promise { 38 | //Connect to the org 39 | await this.org.refreshAuth(); 40 | const userName = this.org.getUsername(); 41 | 42 | let result = await Passwordgenerateimpl.run(userName); 43 | 44 | if (!result.password) { 45 | throw new SfdxError(`Error occured unable to set password at the moment, please try later.`); 46 | } 47 | 48 | SFPLogger.log(`Password successfully set for ${result.username} : ${result.password}`, LoggerLevel.INFO); 49 | 50 | return result; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/impl/user/passwordgenerateimpl.ts: -------------------------------------------------------------------------------- 1 | // tslint:disable-next-line:ordered-imports 2 | // eslint-disable-next-line no-useless-escape 3 | import { Connection, User, AuthInfo } from '@salesforce/core'; 4 | import queryApi from '../../utils/queryExecutor'; 5 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 6 | 7 | export default class Passwordgenerateimpl { 8 | public static async run(userName: string) { 9 | const query = `SELECT id FROM User WHERE username = '${userName}'`; 10 | 11 | const authInfo = await AuthInfo.create({ username: userName }); 12 | const userConnection = await Connection.create({ authInfo: authInfo }); 13 | let queryUtil = new queryApi(userConnection); 14 | let userRecord = await queryUtil.executeQuery(query, false); 15 | let passwordBuffer = User.generatePasswordUtf8(); 16 | let pwd; 17 | 18 | await passwordBuffer.value(async (buffer: Buffer) => { 19 | try { 20 | pwd = buffer.toString('utf8'); 21 | 22 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 23 | // @ts-ignore TODO: expose `soap` on Connection however appropriate 24 | const soap = userConnection.soap; 25 | await soap.setPassword(userRecord[0].Id, pwd); 26 | } catch (e) { 27 | pwd = undefined; 28 | if (e.message === 'INSUFFICIENT_ACCESS: Cannot set password for self') { 29 | SFPLogger.log( 30 | `${e.message}. Incase of scratch org, Add "features": ["EnableSetPasswordInApi"] in your project-scratch-def.json then create your scratch org.`, 31 | LoggerLevel.WARN 32 | ); 33 | } else { 34 | SFPLogger.log(`${e.message}`, LoggerLevel.WARN); 35 | } 36 | } 37 | }); 38 | 39 | return { 40 | username: userName, 41 | password: pwd, 42 | }; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/orgwideemail/verify.ts: -------------------------------------------------------------------------------- 1 | import { FlagsConfig, flags } from '@salesforce/command'; 2 | import { Messages } from '@salesforce/core'; 3 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 4 | 5 | // Initialize Messages with the current plugin directory 6 | Messages.importMessagesDirectory(__dirname); 7 | 8 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 9 | // or any library that is using the messages framework can also be loaded this way. 10 | const messages = Messages.loadMessages('sfpowerkit', 'orgwideemail_verify'); 11 | 12 | export default class OrgWideEmail extends SFPowerkitCommand { 13 | public static description = messages.getMessage('orgWideEmailVerifyCommandDescription'); 14 | 15 | public static examples = [ 16 | `$ sfdx sfpowerkit:org:orgwideemail:verify --username scratchOrg --emailid orgwideemailid 17 | `, 18 | ]; 19 | 20 | protected static flagsConfig: FlagsConfig = { 21 | emailid: flags.string({ 22 | char: 'i', 23 | description: messages.getMessage('orgWideEmailIdDescription'), 24 | required: true, 25 | }), 26 | }; 27 | 28 | protected static requiresUsername = true; 29 | 30 | public async execute(): Promise { 31 | this.ux.log( 32 | 'This command is deprecated, It is no longer guaranteed to work, Please update your workflow with alternate solution' 33 | ); 34 | 35 | const apiversion = await this.org.getConnection().retrieveMaxApiVersion(); 36 | const id: string = this.flags.emailid; 37 | 38 | let orgWideAddressObj = {}; 39 | 40 | this.ux.log('Verify email ' + id); 41 | 42 | let response = await this.org.getConnection().request({ 43 | method: 'PATCH', 44 | headers: { 45 | 'Content-Type': 'application/json', 46 | }, 47 | url: '/services/data/v' + apiversion + '/sobjects/OrgWideEmailAddress/' + id, 48 | body: JSON.stringify(orgWideAddressObj), 49 | }); 50 | if (response === undefined) { 51 | this.ux.log(`Org wide email address verified `); 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /messages/source_pmd.json: -------------------------------------------------------------------------------- 1 | { 2 | "commandDescription": "This command is a wrapper around PMD ( downloads PMD for the first time) with some predefined defaults, such as ruleset, output format, output file.", 3 | "javaHomeFlagDescription": "The command will try to locate the javahome path to execute PMD automatically, set this flag to override it to another javahome path", 4 | "directoryFlagDescription": "[default: Default project directory as mentioned in sfdx-project.json] Override this to set a different directory in the project folder", 5 | "rulesetsFlagDescription": "[default: [sfpowerkit](https://github.com/Accenture/sfpowerkit/blob/main/resources/pmd-ruleset.xml)] The comma separated pmd ruleset that will be utilzied for analyzing the apex classes, Checkout https://pmd.github.io/latest/pmd_userdocs_making_rulesets.html to create your own ruleset", 6 | "rulesetFlagDescription": "DEPRECATED: use --rulesets instead", 7 | "formatFlagDescription": "[default: text] The format for the pmd output, Possible values are available at https://pmd.github.io/latest/pmd_userdocs_cli_reference.html#available-report-formats", 8 | "filelistFlagDescription": "Path to file containing a comma delimited list of files to analyze.", 9 | "reportFlagDescription": "DEPRECATED: use --reportfile instead.", 10 | "reportfileFlagDescription": "The path to where the output of the analysis should be written", 11 | "versionFlagDescription": "[default: 6.39.0] The version of the pmd to be utilized for the analysis, this version will be downloaded to sfpowerkit's cache directory", 12 | "minimumpriorityFlagDescription": "Rule priority threshold; rules with lower priority than configured here won't be used.", 13 | "shortnamesFlagDescription": "Prints shortened filenames in the report.", 14 | "showsuppressedFlagDescription": "Causes the suppressed rule violations to be added to the report.", 15 | "suppressmarkerFlagDescription": "[default: NOPMD] Specifies the comment token that marks lines which PMD should ignore.", 16 | "failonviolationFlagDescription": "[default: true] By default PMD exits with status 4 if violations are found. Disable this feature with -failOnViolation false to exit with 0 instead and just output the report." 17 | } 18 | -------------------------------------------------------------------------------- /src/sfpowerkitCommand.ts: -------------------------------------------------------------------------------- 1 | import { SfdxCommand } from '@salesforce/command'; 2 | import { Sfpowerkit } from './sfpowerkit'; 3 | import SFPLogger, { COLOR_HEADER} from '@dxatscale/sfp-logger'; 4 | 5 | /** 6 | * A base class that provides common funtionality for sfpowerscripts commands 7 | * 8 | * @extends SfdxCommand 9 | */ 10 | export default abstract class SfpowerkitCommand extends SfdxCommand { 11 | public static isJsonFormatEnabled: boolean; 12 | public static logLevel; 13 | private sfpowerkitConfig; 14 | 15 | /** 16 | * Command run code goes here 17 | */ 18 | abstract execute(): Promise; 19 | 20 | /** 21 | * Entry point for the commands 22 | */ 23 | async run(): Promise { 24 | Sfpowerkit.setLogLevel(this.flags.loglevel, this.flags.json); 25 | if(this.flags.json) { 26 | SFPLogger.disableLogs(); 27 | } 28 | Sfpowerkit.resetCache(); 29 | 30 | // Always enable color by default 31 | if (process.env.SFPOWERKIT_NOCOLOR) Sfpowerkit.disableColor(); 32 | else Sfpowerkit.enableColor(); 33 | 34 | for (const plugin of this.config.plugins) { 35 | if (plugin.name === 'sfpowerkit') { 36 | this.sfpowerkitConfig = plugin; 37 | } 38 | } 39 | 40 | if (!this.flags.json) { 41 | this.sfpowerkitHeader(); 42 | } 43 | 44 | return this.execute(); 45 | } 46 | 47 | private sfpowerkitHeader() { 48 | if (!process.env.SFPOWERKIT_NOHEADER) { 49 | SFPLogger.log( 50 | COLOR_HEADER( 51 | `-------------------------------------------------------------------------------------------` 52 | ) 53 | ); 54 | SFPLogger.log( 55 | COLOR_HEADER( 56 | `sfpowerkit -- The DX@Scale Developer Toolkit - Version:${this.sfpowerkitConfig.version} - Release:${this.sfpowerkitConfig.pjson.release}` 57 | ) 58 | ); 59 | 60 | SFPLogger.log( 61 | COLOR_HEADER( 62 | `-------------------------------------------------------------------------------------------` 63 | ) 64 | ); 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/utils/metadataOperation.ts: -------------------------------------------------------------------------------- 1 | import { Connection } from 'jsforce/connection'; 2 | import { Sfpowerkit } from '../sfpowerkit'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | const retry = require('async-retry'); 5 | 6 | export default class MetadataOperation { 7 | constructor(private conn: Connection) {} 8 | 9 | public async getComponentsFromOrgUsingListMetadata(componentType: string) { 10 | const apiversion: string = await Sfpowerkit.getApiVersion(); 11 | 12 | return await retry( 13 | async () => { 14 | try { 15 | let items = await this.conn.metadata.list( 16 | { 17 | type: componentType, 18 | }, 19 | apiversion 20 | ); 21 | 22 | if (items === undefined || items === null) { 23 | items = []; 24 | } 25 | 26 | if (!Array.isArray(items)) { 27 | items = [items]; 28 | } 29 | 30 | return items; 31 | } catch (error) { 32 | throw new Error(`Unable to fetch list for ${componentType}`); 33 | } 34 | }, 35 | { 36 | retries: 5, 37 | minTimeout: 2000, 38 | onRetry: (error) => { 39 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 40 | }, 41 | } 42 | ); 43 | } 44 | 45 | public async describeAnObject(componentType: string) { 46 | return await retry( 47 | async () => { 48 | try { 49 | return await this.conn.sobject(componentType).describe(); 50 | } catch (error) { 51 | throw new Error(`Unable to describe ${componentType}`); 52 | } 53 | }, 54 | { 55 | retries: 5, 56 | minTimeout: 2000, 57 | onRetry: (error) => { 58 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 59 | }, 60 | } 61 | ); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/impl/pool/scratchorg/poolListImpl.ts: -------------------------------------------------------------------------------- 1 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 2 | import { Org } from '@salesforce/core'; 3 | import ScratchOrgUtils, { ScratchOrg } from '../../../utils/scratchOrgUtils'; 4 | 5 | export default class PoolListImpl { 6 | private hubOrg: Org; 7 | private apiversion: string; 8 | private tag: string; 9 | private mypool: boolean; 10 | private allScratchOrgs: boolean; 11 | 12 | public constructor(hubOrg: Org, apiversion: string, tag: string, mypool: boolean, allScratchOrgs: boolean) { 13 | this.hubOrg = hubOrg; 14 | this.apiversion = apiversion; 15 | this.tag = tag; 16 | this.mypool = mypool; 17 | this.allScratchOrgs = allScratchOrgs; 18 | } 19 | 20 | public async execute(): Promise { 21 | 22 | await ScratchOrgUtils.checkForPreRequisite(this.hubOrg); 23 | 24 | const results = (await ScratchOrgUtils.getScratchOrgsByTag( 25 | this.tag, 26 | this.hubOrg, 27 | this.mypool, 28 | !this.allScratchOrgs 29 | )) as any; 30 | 31 | let scratchOrgList: ScratchOrg[] = new Array(); 32 | if (results.records.length > 0) { 33 | SFPLogger.log(`${this.tag} pool has ${results.records.length} Scratch orgs available`, LoggerLevel.TRACE); 34 | 35 | for (let element of results.records) { 36 | let soDetail: ScratchOrg = {}; 37 | soDetail.tag = element.Pooltag__c; 38 | soDetail.orgId = element.ScratchOrg; 39 | soDetail.loginURL = element.LoginUrl; 40 | soDetail.username = element.SignupUsername; 41 | soDetail.password = element.Password__c; 42 | soDetail.expiryDate = element.ExpirationDate; 43 | if (element.Allocation_status__c === 'Assigned') { 44 | soDetail.status = 'In use'; 45 | } else if (element.Allocation_status__c === 'Available') { 46 | soDetail.status = 'Available'; 47 | } else { 48 | soDetail.status = 'Provisioning in progress'; 49 | } 50 | 51 | scratchOrgList.push(soDetail); 52 | } 53 | } 54 | 55 | return scratchOrgList; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | #This pipeline builds sfpowerkit 2 | 3 | name: 'Release' 4 | 5 | on: 6 | push: 7 | branches: 8 | - main 9 | - develop 10 | paths-ignore: 11 | - 'docs/**' 12 | - '**.md' 13 | - 'decision records/**' 14 | 15 | workflow_dispatch: 16 | 17 | jobs: 18 | build: 19 | name: 'build packages' 20 | uses: ./.github/workflows/buildPackage.yml 21 | with: 22 | version: ${{ github.ref }} 23 | publish: true 24 | environment: build 25 | secrets: 26 | npm-token: ${{ secrets.NPM_TOKEN }} 27 | concurrency: 28 | group: build 29 | 30 | test: 31 | name: 'smoke test' 32 | uses: ./.github/workflows/review-smoke-test.yml 33 | with: 34 | environment: test 35 | secrets: inherit 36 | needs: [build] 37 | 38 | hotfix: 39 | name: 'sfpowerkit hotfix' 40 | uses: ./.github/workflows/promotePackage.yml 41 | with: 42 | version: 'hotfix' 43 | pathToPackageJson: 'package.json' 44 | environment: sfpowerkit-hotfix 45 | secrets: 46 | npm-token: ${{ secrets.NPM_TOKEN }} 47 | if: ${{ github.ref == 'refs/heads/main' }} 48 | needs: test 49 | 50 | alpha: 51 | name: 'sfpowerkit alpha' 52 | uses: ./.github/workflows/promotePackage.yml 53 | with: 54 | version: 'alpha' 55 | pathToPackageJson: 'package.json' 56 | environment: sfpowerkit-alpha 57 | secrets: 58 | npm-token: ${{ secrets.NPM_TOKEN }} 59 | if: ${{ github.ref == 'refs/heads/develop' }} 60 | needs: test 61 | 62 | beta: 63 | name: 'sfpowerkit beta' 64 | uses: ./.github/workflows/promotePackage.yml 65 | with: 66 | version: 'beta' 67 | pathToPackageJson: 'package.json' 68 | environment: sfpowerkit-beta 69 | secrets: 70 | npm-token: ${{ secrets.NPM_TOKEN }} 71 | if: ${{ github.ref == 'refs/heads/develop' }} 72 | needs: alpha 73 | 74 | prod: 75 | name: 'sfpowerkit prod' 76 | uses: ./.github/workflows/promotePackage.yml 77 | with: 78 | version: 'latest' 79 | pathToPackageJson: 'package.json' 80 | environment: sfpowerkit-prod 81 | secrets: 82 | npm-token: ${{ secrets.NPM_TOKEN }} 83 | if: ${{ always() && (github.ref == 'refs/heads/main' && needs.hotfix.result == 'success' || github.ref == 'refs/heads/develop' && needs.beta.result == 'success') }} 84 | needs: [hotfix, beta] 85 | -------------------------------------------------------------------------------- /src/impl/package/version/packageInfo.ts: -------------------------------------------------------------------------------- 1 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 2 | import { Connection } from 'jsforce'; 3 | import { getInstalledPackages, PackageDetail } from '../../../utils/packageUtils'; 4 | let retry = require('async-retry'); 5 | 6 | export default class PackageInfo { 7 | conn: Connection; 8 | apiversion: string; 9 | 10 | public constructor(conn: Connection, apiversion: string, jsonOutput: boolean) { 11 | this.conn = conn; 12 | this.apiversion = apiversion; 13 | } 14 | 15 | public async getPackages(): Promise { 16 | //await this.getInstalledPackageInfo(); 17 | let packageDetails = await getInstalledPackages(this.conn, true); 18 | 19 | SFPLogger.log('PackageDetails:' + JSON.stringify(packageDetails), LoggerLevel.TRACE); 20 | return packageDetails; 21 | } 22 | public async getPackagesDetailsfromDevHub( 23 | hubconn: Connection, 24 | pkgDetails: PackageDetail[] 25 | ): Promise { 26 | let pkgIds: string[] = []; 27 | pkgDetails.forEach((pkg) => { 28 | if (pkg.type === 'Unlocked') { 29 | pkgIds.push(pkg.packageVersionId); 30 | } 31 | }); 32 | 33 | let pkdIdsAsString = pkgIds.join(`','`); 34 | 35 | if (pkgIds.length > 0) { 36 | let installedPackagesQuery = `SELECT SubscriberPackageVersionId, HasPassedCodeCoverageCheck,CodeCoverage,ValidationSkipped FROM Package2Version WHERE SubscriberPackageVersionId IN('${pkdIdsAsString}')`; 37 | 38 | let response = await retry( 39 | async (bail) => { 40 | return await hubconn.tooling.query(installedPackagesQuery); 41 | }, 42 | { retries: 3, minTimeout: 3000 } 43 | ); 44 | 45 | if (response.records && response.records.length > 0) { 46 | response.records.forEach((record) => { 47 | for (let pkg of pkgDetails) { 48 | if (pkg.packageVersionId === record.SubscriberPackageVersionId) { 49 | pkg.codeCoverageCheckPassed = record.HasPassedCodeCoverageCheck; 50 | pkg.CodeCoverage = record.CodeCoverage ? record.CodeCoverage.apexCodeCoveragePercentage : 0; 51 | pkg.validationSkipped = record.ValidationSkipped; 52 | } 53 | } 54 | }); 55 | } 56 | } 57 | 58 | return pkgDetails; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/cleartestresult.ts: -------------------------------------------------------------------------------- 1 | import { Connection, SfdxError } from '@salesforce/core'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import queryApi from '../../../utils/queryExecutor'; 4 | import { Sfpowerkit} from '../../../sfpowerkit'; 5 | import SfpowerkitCommand from '../../../sfpowerkitCommand'; 6 | import { chunkArray } from '../../../utils/chunkArray'; 7 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 8 | 9 | const CODECOVAGG_QUERY = `SELECT Id FROM ApexCodeCoverageAggregate`; 10 | const APEXTESTRESULT_QUERY = `SELECT Id FROM ApexTestResult`; 11 | 12 | export default class Cleartestresult extends SfpowerkitCommand { 13 | public static description = `This command helps to clear any test results and code coverage in the org to get fresh and enhanced coverage everytime`; 14 | 15 | public static examples = [`$ sfdx sfpowerkit:org:cleartestresult -u myOrg@example.com`]; 16 | 17 | // Comment this out if your command does not require an org username 18 | protected static requiresUsername = true; 19 | 20 | public async execute(): Promise { 21 | Sfpowerkit.setLogLevel('Info', this.flags.json); 22 | await this.org.refreshAuth(); 23 | 24 | const conn = this.org.getConnection(); 25 | 26 | this.ux.startSpinner(`Clearing Test results`); 27 | 28 | let queryUtil = new queryApi(conn); 29 | let codeCovAgg = await queryUtil.executeQuery(CODECOVAGG_QUERY, true); 30 | await this.deleteRecords(conn, 'ApexCodeCoverageAggregate', codeCovAgg); 31 | 32 | let testResults = await queryUtil.executeQuery(APEXTESTRESULT_QUERY, true); 33 | await this.deleteRecords(conn, 'ApexTestResult', testResults); 34 | 35 | this.ux.stopSpinner(); 36 | 37 | SFPLogger.log(`Test results cleared in ${this.org.getUsername()} successfully.`, LoggerLevel.INFO); 38 | 39 | return true; 40 | } 41 | private async deleteRecords(conn: Connection, objectType: string, records: any[]) { 42 | if (records && records.length > 0) { 43 | let idsList: string[] = records.map((elem) => elem.Id); 44 | let errors = []; 45 | for (let idsTodelete of chunkArray(2000, idsList)) { 46 | const deleteResults: any = await conn.tooling.destroy(objectType, idsTodelete); 47 | deleteResults.forEach((elem) => { 48 | if (!elem.success) { 49 | errors = errors.concat(elem.errors); 50 | } 51 | }); 52 | } 53 | 54 | if (errors.length > 0) { 55 | throw new SfdxError(JSON.stringify(errors)); 56 | } 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/impl/pool/scratchorg/PoolDeleteImpl.ts: -------------------------------------------------------------------------------- 1 | import { Org } from '@salesforce/core'; 2 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 3 | import ScratchOrgUtils, { ScratchOrg } from '../../../utils/scratchOrgUtils'; 4 | export default class PoolDeleteImpl { 5 | private hubOrg: Org; 6 | private apiversion: string; 7 | private tag: string; 8 | private mypool: boolean; 9 | private allScratchOrgs: boolean; 10 | private inprogressonly: boolean; 11 | 12 | public constructor( 13 | hubOrg: Org, 14 | apiversion: string, 15 | tag: string, 16 | mypool: boolean, 17 | allScratchOrgs: boolean, 18 | inprogressonly: boolean 19 | ) { 20 | this.hubOrg = hubOrg; 21 | this.apiversion = apiversion; 22 | this.tag = tag; 23 | this.mypool = mypool; 24 | this.allScratchOrgs = allScratchOrgs; 25 | this.inprogressonly = inprogressonly; 26 | } 27 | 28 | public async execute(): Promise { 29 | const results = (await ScratchOrgUtils.getScratchOrgsByTag( 30 | this.tag, 31 | this.hubOrg, 32 | this.mypool, 33 | !this.allScratchOrgs 34 | )) as any; 35 | 36 | let scratchOrgToDelete: ScratchOrg[] = new Array(); 37 | if (results.records.length > 0) { 38 | SFPLogger.log(`${this.tag} pool has ${results.records.length} Scratch orgs.`, LoggerLevel.TRACE); 39 | 40 | let scrathOrgIds: string[] = []; 41 | 42 | for (let element of results.records) { 43 | if (!this.inprogressonly || element.Allocation_status__c === 'In Progress') { 44 | let soDetail: ScratchOrg = {}; 45 | soDetail.orgId = element.ScratchOrg; 46 | soDetail.loginURL = element.LoginUrl; 47 | soDetail.username = element.SignupUsername; 48 | soDetail.expiryDate = element.ExpirationDate; 49 | soDetail.status = 'Deleted'; 50 | 51 | scratchOrgToDelete.push(soDetail); 52 | scrathOrgIds.push(`'${element.Id}'`); 53 | } 54 | } 55 | 56 | if (scrathOrgIds.length > 0) { 57 | let activeScrathOrgs = await ScratchOrgUtils.getActiveScratchOrgsByInfoId( 58 | this.hubOrg, 59 | scrathOrgIds.join(',') 60 | ); 61 | 62 | if (activeScrathOrgs.records.length > 0) { 63 | let scratchOrgIds: string[] = activeScrathOrgs.records.map((elem) => elem.Id); 64 | await ScratchOrgUtils.deleteScratchOrg(this.hubOrg, scratchOrgIds); 65 | SFPLogger.log('Scratch Org(s) deleted successfully.', LoggerLevel.TRACE); 66 | } 67 | } 68 | } 69 | 70 | return scratchOrgToDelete; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/pool/create.ts: -------------------------------------------------------------------------------- 1 | import { flags, FlagsConfig } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import * as rimraf from 'rimraf'; 4 | import SFPowerkitCommand from '../../../sfpowerkitCommand'; 5 | import ScratchOrgImpl from '../../../impl/pool/scratchorg/poolCreateImpl'; 6 | import { Messages, SfdxError } from '@salesforce/core'; 7 | 8 | // Initialize Messages with the current plugin directory 9 | Messages.importMessagesDirectory(__dirname); 10 | 11 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 12 | // or any library that is using the messages framework can also be loaded this way. 13 | const messages = Messages.loadMessages('sfpowerkit', 'scratchorg_pool_create'); 14 | 15 | export default class Create extends SFPowerkitCommand { 16 | public static description = messages.getMessage('commandDescription'); 17 | protected static requiresDevhubUsername = true; 18 | 19 | public static examples = [ 20 | `$ sfdx sfpowerkit:pool:create -f config\\core_poolconfig.json`, 21 | `$ sfdx sfpowerkit:pool:create -f config\\core_poolconfig.json -v devhub`, 22 | ]; 23 | 24 | protected static flagsConfig: FlagsConfig = { 25 | configfilepath: flags.filepath({ 26 | char: 'f', 27 | description: messages.getMessage('configFilePathDescription'), 28 | required: true, 29 | }), 30 | batchsize: flags.number({ 31 | char: 'b', 32 | default: 10, 33 | description: messages.getMessage('batchSizeDescription'), 34 | required: false, 35 | }), 36 | loglevel: flags.enum({ 37 | description: 'logging level for this command invocation', 38 | default: 'info', 39 | required: false, 40 | options: [ 41 | 'trace', 42 | 'debug', 43 | 'info', 44 | 'warn', 45 | 'error', 46 | 'fatal', 47 | 'TRACE', 48 | 'DEBUG', 49 | 'INFO', 50 | 'WARN', 51 | 'ERROR', 52 | 'FATAL', 53 | ], 54 | }), 55 | }; 56 | 57 | public async execute(): Promise { 58 | rimraf.sync('temp_sfpowerkit'); 59 | 60 | await this.hubOrg.refreshAuth(); 61 | const hubConn = this.hubOrg.getConnection(); 62 | 63 | this.flags.apiversion = this.flags.apiversion || (await hubConn.retrieveMaxApiVersion()); 64 | 65 | let scratchOrgPoolImpl = new ScratchOrgImpl( 66 | this.flags.configfilepath, 67 | this.hubOrg, 68 | this.flags.apiversion, 69 | this.flags.batchsize 70 | ); 71 | 72 | try { 73 | return !(await scratchOrgPoolImpl.poolScratchOrgs()); 74 | } catch (err) { 75 | throw new SfdxError('Unable to execute command .. ' + err); 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /.github/workflows/review-smoke-test.yml: -------------------------------------------------------------------------------- 1 | name: Review-Smoke-Test 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | - 'hotfix/*' 7 | - 'develop' 8 | 9 | workflow_dispatch: 10 | workflow_call: 11 | inputs: 12 | environment: 13 | type: string 14 | required: true 15 | 16 | jobs: 17 | SmokeTest: 18 | name: Smoke Test PR 19 | runs-on: 'ubuntu-latest' 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Setup Node.js environment 24 | uses: actions/setup-node@v3.3.0 25 | with: 26 | node-version: '14' 27 | registry-url: 'https://registry.npmjs.org' 28 | 29 | - name: Install SFDX 30 | run: npm install -g sfdx-cli 31 | 32 | - name: Run NPM Install 33 | run: npm install 34 | 35 | - name: Install sfpowerkit 36 | run: echo y | sfdx plugins:link 37 | 38 | - name: Store Auth File 39 | run: echo ${{ secrets.DEVHUB_SFDX_AUTH_URL }} > ./authfile 40 | 41 | - name: Authenitcate to DevHub 42 | run: sfdx auth:sfdxurl:store -f authfile -a HubOrg 43 | 44 | - name: Scaffold SFDX Project 45 | run: sfdx force:project:create --projectname testProject --template standard 46 | 47 | - name: Smoke Test Org Commands 48 | run: | 49 | set -euxo pipefail 50 | sfdx sfpowerkit:org:orgcoverage -u HubOrg 51 | sfdx sfpowerkit:org:orgcoverage -u HubOrg --json | jq -r .result.classCoverage[0].id 52 | sfdx sfpowerkit:org:healthcheck -u HubOrg 53 | 54 | - name: Smoke Test Profile Commands 55 | run: | 56 | set -euxo pipefail 57 | sfdx sfpowerkit:source:profile:retrieve -u HubOrg 58 | sfdx sfpowerkit:source:profile:reconcile -u HubOrg 59 | working-directory: testProject 60 | 61 | - name: Smoke Test Package Commands 62 | run: | 63 | set -euxo pipefail 64 | sfdx sfpowerkit:package:version:info -u HubOrg 65 | sfdx sfpowerkit:package:version:info -u HubOrg --json | jq -r .result[0].packageVersionId 66 | sfdx sfpowerkit:package:version:codecoverage -i 04t5f000000NrAJAA0 -v HubOrg 67 | sfdx sfpowerkit:package:version:codecoverage -i 04t5f000000NrAJAA0 -v HubOrg --json | jq -r .result[0].packageVersionId 68 | working-directory: testProject 69 | 70 | - name: Smoke Test Pool Commands 71 | run: | 72 | set -euxo pipefail 73 | sfdx sfpowerkit:pool:delete -t sfpowerkit -v HubOrg -a 74 | sfdx sfpowerkit:pool:create -f '../schemas/pool/scratchorg-poolconfig.json' -v HubOrg 75 | sfdx sfpowerkit:pool:list -a -m -v HubOrg 76 | sfdx sfpowerkit:pool:list -a -m -v HubOrg --json | jq -r .result 77 | sfdx sfpowerkit:pool:fetch -t sfpowerkit -v HubOrg 78 | sfdx sfpowerkit:pool:delete -t sfpowerkit -v HubOrg -a 79 | working-directory: testProject 80 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/package/version/info.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import { Sfpowerkit } from '../../../../sfpowerkit'; 4 | import PackageInfo from '../../../../impl/package/version/packageInfo'; 5 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 6 | import { Messages } from '@salesforce/core'; 7 | 8 | // Initialize Messages with the current plugin directory 9 | Messages.importMessagesDirectory(__dirname); 10 | 11 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 12 | // or any library that is using the messages framework can also be loaded this way. 13 | const messages = Messages.loadMessages('sfpowerkit', 'package_info'); 14 | 15 | export default class Info extends SfpowerkitCommand { 16 | public static description = messages.getMessage('commandDescription'); 17 | 18 | public static examples = [`$ sfdx sfpowerkit:package:version:info -u myOrg@example.com `]; 19 | 20 | protected static flagsConfig = { 21 | apiversion: flags.builtin({ 22 | description: messages.getMessage('apiversion'), 23 | }), 24 | loglevel: flags.enum({ 25 | description: messages.getMessage('loglevel'), 26 | default: 'info', 27 | required: false, 28 | options: [ 29 | 'trace', 30 | 'debug', 31 | 'info', 32 | 'warn', 33 | 'error', 34 | 'fatal', 35 | 'TRACE', 36 | 'DEBUG', 37 | 'INFO', 38 | 'WARN', 39 | 'ERROR', 40 | 'FATAL', 41 | ], 42 | }), 43 | }; 44 | 45 | // Comment this out if your command does not require an org username 46 | protected static requiresUsername = true; 47 | public static readonly supportsDevhubUsername = true; 48 | public async execute(): Promise { 49 | Sfpowerkit.setLogLevel(this.flags.loglevel, this.flags.json); 50 | 51 | await this.org.refreshAuth(); 52 | 53 | const conn = this.org.getConnection(); 54 | 55 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 56 | 57 | let packageInfoImpl: PackageInfo = new PackageInfo(conn, this.flags.apiversion, this.flags.json); 58 | 59 | let result = (await packageInfoImpl.getPackages()) as any; 60 | 61 | result.sort((a, b) => (a.packageName > b.packageName ? 1 : -1)); 62 | 63 | if (this.hubOrg) { 64 | result = (await packageInfoImpl.getPackagesDetailsfromDevHub(this.hubOrg.getConnection(), result)) as any; 65 | } 66 | 67 | this.ux.table(result, [ 68 | 'packageName', 69 | 'type', 70 | 'IsOrgDependent', 71 | 'packageNamespacePrefix', 72 | 'packageVersionNumber', 73 | 'packageVersionId', 74 | 'allowedLicenses', 75 | 'usedLicenses', 76 | 'expirationDate', 77 | 'status', 78 | 'CodeCoverage', 79 | 'codeCoverageCheckPassed', 80 | 'validationSkipped', 81 | ]); 82 | return result; 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/impl/project/metadata/DataModelSourceDiffImpl.ts: -------------------------------------------------------------------------------- 1 | import * as DiffGenerators from './diffgenerators/export'; 2 | import { isNullOrUndefined } from 'util'; 3 | 4 | // Gets the xml files and passes them into diff generators 5 | // Output : csv or json format 6 | export default class DataModelSourceDiffImpl { 7 | constructor(public git, public baseline: string, public target: string, public packageDirectories: string[]) {} 8 | 9 | private diffGenerators = { 10 | customfield: DiffGenerators.SourceDiffGenerator, 11 | recordtype: DiffGenerators.SourceDiffGenerator, 12 | businessprocess: DiffGenerators.SourceDiffGenerator, 13 | }; 14 | 15 | private filePattern = { 16 | customfield: 'field', 17 | recordtype: 'recordType', 18 | businessprocess: 'businessProcess', 19 | }; 20 | 21 | public async exec(): Promise { 22 | const sourceDiffResult = []; 23 | for (let metadataType in this.diffGenerators) { 24 | let changedFiles: string[] = await this.getNameOfChangedFiles( 25 | this.git, 26 | this.baseline, 27 | this.target, 28 | metadataType 29 | ); 30 | 31 | if (!isNullOrUndefined(this.packageDirectories)) { 32 | changedFiles = this.filterByPackageDirectory(changedFiles, this.packageDirectories); 33 | } 34 | 35 | let sourceDiffGenerator: DiffGenerators.SourceDiffGenerator = new this.diffGenerators[metadataType]( 36 | this.baseline, 37 | this.target 38 | ); 39 | 40 | for (let file of changedFiles) { 41 | let fileRevFrom: string | void = await this.git.show([`${this.baseline}:${file}`]).catch((err) => {}); 42 | 43 | let fileRevTo: string | void = await this.git.show([`${this.target}:${file}`]).catch((err) => {}); 44 | 45 | let diff = await sourceDiffGenerator.compareRevisions(fileRevFrom, fileRevTo, file); 46 | 47 | // Aggregate individual file diffs in the source diff result 48 | if (diff) { 49 | sourceDiffResult.push(diff); 50 | } 51 | } 52 | } 53 | return sourceDiffResult; 54 | } 55 | 56 | private async getNameOfChangedFiles( 57 | git, 58 | baseline: string, 59 | target: string, 60 | metadataType: string 61 | ): Promise { 62 | let gitDiffResult: string = await git.diff([ 63 | baseline, 64 | target, 65 | '--name-only', 66 | '--', 67 | `**/objects/**/*${this.filePattern[metadataType]}-meta.xml`, 68 | ]); 69 | 70 | let changedFiles: string[] = gitDiffResult.split('\n'); 71 | changedFiles.pop(); 72 | 73 | return changedFiles; 74 | } 75 | 76 | private filterByPackageDirectory(changedFiles: string[], packageDirectories: string[]): string[] { 77 | let filteredChangedFiles = changedFiles.filter((file) => { 78 | let isFileInPackageDir; 79 | packageDirectories.forEach((dir) => { 80 | if (file.includes(dir)) isFileInPackageDir = true; 81 | }); 82 | return isFileInPackageDir; 83 | }); 84 | 85 | return filteredChangedFiles; 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /resources/so_pool_config.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "type": "object", 4 | "properties": { 5 | "pool": { 6 | "type": "object", 7 | "required": ["config_file_path", "tag"], 8 | "additionalProperties": false, 9 | "properties": { 10 | "expiry": { 11 | "description": "Number of days after which the pooled scratch org will expire", 12 | "type": "number" 13 | }, 14 | "config_file_path": { 15 | "description": "(Required) Path to the scratch org definition file", 16 | "type": "string" 17 | }, 18 | "script_file_path": { 19 | "description": "Path to the Script file", 20 | "type": "string" 21 | }, 22 | "tag": { 23 | "description": "(Required) Identifier for the pool created", 24 | "type": "string" 25 | }, 26 | "relax_all_ip_ranges": { 27 | "description": "Relax all IP ranges", 28 | "type": "boolean" 29 | }, 30 | "alias_prefix": { 31 | "description": "Identifier for the pool of scratch orgs created", 32 | "type": "string" 33 | }, 34 | "relax_ip_ranges": { 35 | "type": "array", 36 | "items": [ 37 | { 38 | "type": "object", 39 | "properties": { 40 | "start": { 41 | "type": "string" 42 | }, 43 | "end": { 44 | "type": "string" 45 | } 46 | } 47 | } 48 | ] 49 | }, 50 | "max_allocation": { 51 | "description": "(Required) Size of the pool, ignored if pool users are specified", 52 | "type": "number" 53 | } 54 | } 55 | }, 56 | "poolUsers": { 57 | "type": "array", 58 | "items": [ 59 | { 60 | "type": "object", 61 | "required": ["max_allocation", "min_allocation", "username", "expiry", "priority"], 62 | "additionalProperties": false, 63 | "properties": { 64 | "max_allocation": { 65 | "type": "number" 66 | }, 67 | "min_allocation": { 68 | "type": "number" 69 | }, 70 | "username": { 71 | "type": "string" 72 | }, 73 | "expiry": { 74 | "type": "number" 75 | }, 76 | "priority": { 77 | "type": "number" 78 | } 79 | } 80 | } 81 | ] 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/source/apextest/list.ts: -------------------------------------------------------------------------------- 1 | import { AnyJson } from '@salesforce/ts-types'; 2 | import { existsSync } from 'fs'; 3 | import { flags } from '@salesforce/command'; 4 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 5 | import { Messages, SfdxError } from '@salesforce/core'; 6 | import ApexTypeFetcher, { ApexSortedByType } from '../../../../impl/parser/ApexTypeFetcher'; 7 | import SFPLogger, {LoggerLevel} from '@dxatscale/sfp-logger'; 8 | 9 | // Initialize Messages with the current plugin directory 10 | Messages.importMessagesDirectory(__dirname); 11 | 12 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 13 | // or any library that is using the messages framework can also be loaded this way. 14 | const messages = Messages.loadMessages('sfpowerkit', 'source_apextest_list'); 15 | 16 | export default class List extends SfpowerkitCommand { 17 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 18 | protected static requiresProject = true; 19 | public static description = messages.getMessage('commandDescription'); 20 | 21 | public static examples = [`$ sfdx sfpowerkit:source:apextest:list -p force-app`]; 22 | 23 | protected static flagsConfig = { 24 | path: flags.string({ 25 | required: true, 26 | char: 'p', 27 | description: messages.getMessage('pathFlagDescription'), 28 | }), 29 | resultasstring: flags.boolean({ 30 | description: messages.getMessage('resultasstringDescription'), 31 | required: false, 32 | }), 33 | loglevel: flags.enum({ 34 | description: messages.getMessage('loglevel'), 35 | default: 'info', 36 | required: false, 37 | options: [ 38 | 'trace', 39 | 'debug', 40 | 'info', 41 | 'warn', 42 | 'error', 43 | 'fatal', 44 | 'TRACE', 45 | 'DEBUG', 46 | 'INFO', 47 | 'WARN', 48 | 'ERROR', 49 | 'FATAL', 50 | ], 51 | }), 52 | }; 53 | 54 | public async execute(): Promise { 55 | //set apex class directory 56 | if (!existsSync(this.flags.path)) { 57 | throw new SfdxError(`path ${this.flags.path} does not exist. you must provide a valid path.`); 58 | } 59 | 60 | let apexTypeFetcher: ApexTypeFetcher = new ApexTypeFetcher(); 61 | let apexSortedByType: ApexSortedByType = apexTypeFetcher.getApexTypeOfClsFiles(this.flags.path); 62 | 63 | let testClasses = apexSortedByType['testClass']; 64 | let testClassesList = testClasses.map((cls) => cls.name); 65 | 66 | if (testClasses.length > 0) { 67 | SFPLogger.log(`Found ${testClasses.length} apex test classes in ${this.flags.path}`, LoggerLevel.INFO); 68 | if (this.flags.resultasstring) { 69 | this.ux.log(testClassesList.join(',')); 70 | } else { 71 | this.ux.table(testClasses, ['name', 'filepath']); 72 | } 73 | } else { 74 | SFPLogger.log(`No apex test classes found in ${this.flags.path}`, LoggerLevel.INFO); 75 | } 76 | 77 | return this.flags.resultasstring ? testClassesList.join(',') : testClassesList; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/scratchorg/usage.ts: -------------------------------------------------------------------------------- 1 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | let request = require('request-promise-native'); 4 | import { Connection, Messages } from '@salesforce/core'; 5 | 6 | // Initialize Messages with the current plugin directory 7 | Messages.importMessagesDirectory(__dirname); 8 | 9 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 10 | // or any library that is using the messages framework can also be loaded this way. 11 | const messages = Messages.loadMessages('sfpowerkit', 'scratchorg_usage'); 12 | 13 | export default class Usage extends SFPowerkitCommand { 14 | public static description = messages.getMessage('commandDescription'); 15 | 16 | public static examples = [ 17 | `$ sfdx sfpowerkit:org:scratchorg:usage -v devhub 18 | Active Scratch Orgs Remaining: 42 out of 100 19 | Daily Scratch Orgs Remaining: 171 out of 200 20 | 21 | SCRATCH_ORGS_USED NAME 22 | ───────────────── ───────────────── 23 | 2 XYZ@KYZ.COM 24 | 2 JFK@KYZ.COM 25 | Total number of records retrieved: 4. 26 | `, 27 | ]; 28 | 29 | // Comment this out if your command does not require a hub org username 30 | protected static requiresDevhubUsername = true; 31 | 32 | public async execute(): Promise { 33 | await this.hubOrg.refreshAuth(); 34 | const conn = this.hubOrg.getConnection(); 35 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 36 | 37 | let limits = await this.getScratchOrgLimits(conn); 38 | 39 | this.ux.log( 40 | `Active Scratch Orgs Remaining: ${limits.ActiveScratchOrgs.Remaining} out of ${limits.ActiveScratchOrgs.Max}` 41 | ); 42 | this.ux.log( 43 | `Daily Scratch Orgs Remaining: ${limits.DailyScratchOrgs.Remaining} out of ${limits.DailyScratchOrgs.Max}` 44 | ); 45 | 46 | this.ux.log(''); 47 | 48 | if (limits.ActiveScratchOrgs.Remaining !== limits.ActiveScratchOrgs.Max) { 49 | let scratchOrgs = await this.getScratchOrgInfo(conn); 50 | //this.ux.log(scratchOrgs); 51 | const output = []; 52 | scratchOrgs.records.forEach((element) => { 53 | output.push({ 54 | In_Use: element.In_Use, 55 | SignupEmail: element.SignupEmail, 56 | }); 57 | }); 58 | this.ux.table(output, ['In_Use', 'SignupEmail']); 59 | } else { 60 | this.ux.log(`No Scratch org used currently.`); 61 | } 62 | 63 | return 1; 64 | } 65 | 66 | private async getScratchOrgLimits(conn: Connection) { 67 | let query_uri = `${conn.instanceUrl}/services/data/v${this.flags.apiversion}/limits`; 68 | 69 | //this.ux.log(`Query URI ${query_uri}`); 70 | 71 | const limits = await request({ 72 | method: 'get', 73 | url: query_uri, 74 | headers: { 75 | Authorization: `Bearer ${conn.accessToken}`, 76 | }, 77 | json: true, 78 | }); 79 | 80 | return limits; 81 | } 82 | private async getScratchOrgInfo(conn: Connection) { 83 | let query = 84 | 'SELECT count(id) In_Use, SignupEmail FROM ActiveScratchOrg GROUP BY SignupEmail ORDER BY count(id) DESC'; 85 | 86 | const results = (await conn.query(query)) as any; 87 | 88 | return results; 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/orgwideemail/create.ts: -------------------------------------------------------------------------------- 1 | import { FlagsConfig, flags } from '@salesforce/command'; 2 | import { Messages } from '@salesforce/core'; 3 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 4 | 5 | // Initialize Messages with the current plugin directory 6 | Messages.importMessagesDirectory(__dirname); 7 | 8 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 9 | // or any library that is using the messages framework can also be loaded this way. 10 | const messages = Messages.loadMessages('sfpowerkit', 'orgwideemail_create'); 11 | 12 | export default class OrgWideEmail extends SFPowerkitCommand { 13 | public static description = messages.getMessage('orgWideEmailCreateCommandDescription'); 14 | 15 | public static examples = [ 16 | `sfdx sfpowerkit:org:orgwideemail:create -e testuser@test.com -u scratch1 -n "Test Address" -p 17 | Creating email azlam.abdulsalam@accenture.com 18 | Org wide email created with Id 0D2210000004DidCAE 19 | Run the folowing command to verify it 20 | sfdx sfpowerkit:org:orgwideemail:verify -i 0D2210000004DidCAE -u test-jkomdylblorj@example.com `, 21 | ]; 22 | 23 | protected static flagsConfig: FlagsConfig = { 24 | address: flags.email({ 25 | char: 'e', 26 | description: messages.getMessage('orgWideEmailAddressDescription'), 27 | required: true, 28 | }), 29 | displayname: flags.string({ 30 | char: 'n', 31 | description: messages.getMessage('orgWideEmailDisplaynameDescription'), 32 | required: true, 33 | }), 34 | allprofile: flags.boolean({ 35 | char: 'p', 36 | description: messages.getMessage('orgWideEmailAllprofileDescription'), 37 | required: false, 38 | }), 39 | }; 40 | protected static requiresUsername = true; 41 | 42 | public async execute(): Promise { 43 | this.ux.log( 44 | 'This command is deprecated, It is no longer guaranteed to work, Please update your workflow with alternate solution' 45 | ); 46 | 47 | const apiversion = await this.org.getConnection().retrieveMaxApiVersion(); 48 | const address: string = this.flags.address; 49 | const displayname: string = this.flags.displayname; 50 | let allprofile = this.flags.allprofile ? true : false; 51 | 52 | const orgWideAddressObj = { 53 | Address: address, 54 | DisplayName: displayname, 55 | IsAllowAllProfiles: allprofile, 56 | }; 57 | 58 | this.ux.log('Creating email ' + orgWideAddressObj.Address); 59 | 60 | let response = await this.org.getConnection().request({ 61 | method: 'POST', 62 | headers: { 63 | 'Content-Type': 'application/json', 64 | }, 65 | url: '/services/data/v' + apiversion + '/sobjects/OrgWideEmailAddress', 66 | body: JSON.stringify(orgWideAddressObj), 67 | }); 68 | 69 | if (response['success']) { 70 | let username = this.org.getUsername(); 71 | this.ux.log(`Org wide email created with Id ${response['id']} `); 72 | this.ux.log(`Run the folowing command to verify it `); 73 | this.ux.log(`sfdx sfpowerkit:org:orgwideemail:verify -i ${response['id']} -u ${username}`); 74 | } else { 75 | this.ux.error('Errors occured during org wide email creation '); 76 | response['errors'].forEach((error) => { 77 | this.ux.error(error); 78 | }); 79 | } 80 | 81 | return response; 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/source/apextestsuite/convert.ts: -------------------------------------------------------------------------------- 1 | import { AnyJson } from '@salesforce/ts-types'; 2 | import * as fs from 'fs-extra'; 3 | import { flags } from '@salesforce/command'; 4 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 5 | import * as rimraf from 'rimraf'; 6 | import { SfdxError, Messages } from '@salesforce/core'; 7 | import * as xml2js from 'xml2js'; 8 | import * as util from 'util'; 9 | const fg = require('fast-glob'); 10 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 11 | const path = require('path'); 12 | 13 | // Initialize Messages with the current plugin directory 14 | Messages.importMessagesDirectory(__dirname); 15 | 16 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 17 | // or any library that is using the messages framework can also be loaded this way. 18 | const messages = Messages.loadMessages('sfpowerkit', 'apextestsuite_convert'); 19 | 20 | export default class Convert extends SfpowerkitCommand { 21 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 22 | protected static requiresProject = true; 23 | 24 | public static description = messages.getMessage('commandDescription'); 25 | 26 | public static examples = [ 27 | `$ sfdx sfpowerkit:source:apextestsuite:convert -n MyApexTestSuite 28 | "ABC2,ABC1Test" 29 | `, 30 | ]; 31 | 32 | protected static flagsConfig = { 33 | name: flags.string({ 34 | required: true, 35 | char: 'n', 36 | description: messages.getMessage('nameFlagDescription'), 37 | }), 38 | loglevel: flags.enum({ 39 | description: 'logging level for this command invocation', 40 | default: 'info', 41 | required: false, 42 | options: [ 43 | 'trace', 44 | 'debug', 45 | 'info', 46 | 'warn', 47 | 'error', 48 | 'fatal', 49 | 'TRACE', 50 | 'DEBUG', 51 | 'INFO', 52 | 'WARN', 53 | 'ERROR', 54 | 'FATAL', 55 | ], 56 | }), 57 | }; 58 | 59 | public async execute(): Promise { 60 | rimraf.sync('temp_sfpowerkit'); 61 | 62 | const entries = fg.sync(`**${this.flags.name}.testSuite-meta.xml`, { 63 | onlyFiles: true, 64 | absolute: true, 65 | baseNameMatch: true, 66 | }); 67 | 68 | if (!entries[0]) throw new SfdxError(`Apex Test Suite ${this.flags.name} not found`); 69 | 70 | SFPLogger.log(`Apex Test Suite File Path ${entries[0]}`, LoggerLevel.DEBUG); 71 | 72 | if (fs.existsSync(path.resolve(entries[0]))) { 73 | const parser = new xml2js.Parser({ explicitArray: false }); 74 | const parseString = util.promisify(parser.parseString); 75 | 76 | let apex_test_suite = await parseString(fs.readFileSync(path.resolve(entries[0]))); 77 | 78 | let testclasses; 79 | const doublequote = '"'; 80 | if (apex_test_suite.ApexTestSuite.testClassName.constructor === Array) { 81 | testclasses = doublequote + apex_test_suite.ApexTestSuite.testClassName.join() + doublequote; 82 | } else { 83 | testclasses = doublequote + apex_test_suite.ApexTestSuite.testClassName + doublequote; 84 | } 85 | 86 | this.ux.log(testclasses); 87 | 88 | return testclasses; 89 | } else { 90 | throw new SfdxError('Apex Test Suite not found'); 91 | } 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/relaxiprange.ts: -------------------------------------------------------------------------------- 1 | import { AnyJson } from '@salesforce/ts-types'; 2 | import { flags } from '@salesforce/command'; 3 | import { Messages, SfdxError } from '@salesforce/core'; 4 | import RelaxIPRangeImpl from '../../../impl/org/relaxIPRangeImpl'; 5 | import SFPowerkitCommand from '../../../sfpowerkitCommand'; 6 | 7 | // tslint:disable-next-line:ordered-imports 8 | 9 | // Initialize Messages with the current plugin directory 10 | Messages.importMessagesDirectory(__dirname); 11 | 12 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 13 | // or any library that is using the messages framework can also be loaded this way. 14 | const messages = Messages.loadMessages('sfpowerkit', 'org_relaxiprange'); 15 | 16 | export default class Relaxiprange extends SFPowerkitCommand { 17 | public connectedapp_consumerKey: string; 18 | public static description = messages.getMessage('commandDescription'); 19 | 20 | public static examples = [ 21 | `sfdx sfpowerkit:org:relaxiprange -u sandbox -r "122.0.0.0-122.255.255.255,49.0.0.0-49.255.255.255"`, 22 | `sfdx sfpowerkit:org:relaxiprange -u sandbox --all`, 23 | `sfdx sfpowerkit:org:relaxiprange -u sandbox --none`, 24 | ]; 25 | 26 | protected static flagsConfig = { 27 | range: flags.array({ 28 | required: false, 29 | char: 'r', 30 | description: messages.getMessage('rangeFlagDescription'), 31 | }), 32 | all: flags.boolean({ 33 | description: messages.getMessage('allDescription'), 34 | required: false, 35 | }), 36 | none: flags.boolean({ 37 | description: messages.getMessage('noneDescription'), 38 | required: false, 39 | }), 40 | loglevel: flags.enum({ 41 | description: 'logging level for this command invocation', 42 | default: 'info', 43 | required: false, 44 | options: [ 45 | 'trace', 46 | 'debug', 47 | 'info', 48 | 'warn', 49 | 'error', 50 | 'fatal', 51 | 'TRACE', 52 | 'DEBUG', 53 | 'INFO', 54 | 'WARN', 55 | 'ERROR', 56 | 'FATAL', 57 | ], 58 | }), 59 | }; 60 | 61 | // Comment this out if your command does not require an org username 62 | protected static requiresUsername = true; 63 | 64 | public async execute(): Promise { 65 | //validate only one param is passed 66 | if (!this.flags.range && !this.flags.all && !this.flags.none) { 67 | throw new SfdxError( 68 | `Required input is missing. you must pass anyone of the flag -r (or) --all (or) --none` 69 | ); 70 | } else if ( 71 | (this.flags.range && this.flags.all) || 72 | (this.flags.range && this.flags.none) || 73 | (this.flags.none && this.flags.all) 74 | ) { 75 | throw new SfdxError(`Too many inputs found, you must pass only one param -r (or) --all (or) --none`); 76 | } 77 | 78 | let ipRangeToSet = []; 79 | if (this.flags.range) { 80 | ipRangeToSet = this.flags.range.map(function (element: string) { 81 | let range = element.split('-'); 82 | return { start: range[0], end: range[1] }; 83 | }); 84 | } 85 | 86 | return await RelaxIPRangeImpl.setIp( 87 | this.org.getConnection(), 88 | this.org.getUsername(), 89 | ipRangeToSet, 90 | this.flags.all, 91 | this.flags.none 92 | ); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/pool/delete.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | import poolHydrateImpl from '../../../impl/pool/scratchorg/PoolDeleteImpl'; 5 | import SfpowerkitCommand from '../../../sfpowerkitCommand'; 6 | import { Messages } from '@salesforce/core'; 7 | 8 | // Initialize Messages with the current plugin directory 9 | Messages.importMessagesDirectory(__dirname); 10 | 11 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 12 | // or any library that is using the messages framework can also be loaded this way. 13 | const messages = Messages.loadMessages('sfpowerkit', 'scratchorg_poolhydrate'); 14 | 15 | export default class Delete extends SfpowerkitCommand { 16 | public static description = messages.getMessage('commandDescription'); 17 | 18 | protected static requiresDevhubUsername = true; 19 | 20 | public static examples = [ 21 | `$ sfdx sfpowerkit:pool:delete -t core `, 22 | `$ sfdx sfpowerkit:pool:delete -t core -v devhub`, 23 | `$ sfdx sfpowerkit:pool:delete -t core -v devhub -m`, 24 | `$ sfdx sfpowerkit:pool:delete -t core -v devhub -m -a`, 25 | ]; 26 | 27 | protected static flagsConfig = { 28 | tag: flags.string({ 29 | char: 't', 30 | description: messages.getMessage('tagDescription'), 31 | required: true, 32 | }), 33 | mypool: flags.boolean({ 34 | char: 'm', 35 | description: messages.getMessage('mypoolDescription'), 36 | required: false, 37 | }), 38 | allscratchorgs: flags.boolean({ 39 | char: 'a', 40 | description: messages.getMessage('allscratchorgsDescription'), 41 | required: false, 42 | }), 43 | inprogressonly: flags.boolean({ 44 | char: 'i', 45 | description: messages.getMessage('inprogressonlyDescription'), 46 | required: false, 47 | exclusive: ['allscratchorgs'], 48 | }), 49 | loglevel: flags.enum({ 50 | description: 'logging level for this command invocation', 51 | default: 'info', 52 | required: false, 53 | options: [ 54 | 'trace', 55 | 'debug', 56 | 'info', 57 | 'warn', 58 | 'error', 59 | 'fatal', 60 | 'TRACE', 61 | 'DEBUG', 62 | 'INFO', 63 | 'WARN', 64 | 'ERROR', 65 | 'FATAL', 66 | ], 67 | }), 68 | }; 69 | 70 | public async execute(): Promise { 71 | await this.hubOrg.refreshAuth(); 72 | const hubConn = this.hubOrg.getConnection(); 73 | 74 | this.flags.apiversion = this.flags.apiversion || (await hubConn.retrieveMaxApiVersion()); 75 | 76 | let hydrateImpl = new poolHydrateImpl( 77 | this.hubOrg, 78 | this.flags.apiversion, 79 | this.flags.tag, 80 | this.flags.mypool, 81 | this.flags.allscratchorgs, 82 | this.flags.inprogressonly 83 | ); 84 | 85 | let result = await hydrateImpl.execute(); 86 | 87 | if (!this.flags.json) { 88 | if (result.length > 0) { 89 | this.ux.log(`======== Scratch org Deleted ========`); 90 | this.ux.table(result, ['orgId', 'username']); 91 | } else { 92 | SFPLogger.log(`${this.flags.tag} pool has No Scratch orgs available to delete.`, LoggerLevel.INFO); 93 | } 94 | } 95 | 96 | return result as AnyJson; 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/profile/diff.ts: -------------------------------------------------------------------------------- 1 | import { FlagsConfig, flags, SfdxResult } from '@salesforce/command'; 2 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 3 | import ProfileDiffImpl from '../../../../impl/source/profiles/profileDiff'; 4 | import { Messages } from '@salesforce/core'; 5 | 6 | // Initialize Messages with the current plugin directory 7 | Messages.importMessagesDirectory(__dirname); 8 | 9 | const messages = Messages.loadMessages('sfpowerkit', 'org_profile_diff'); 10 | 11 | export default class Diff extends SFPowerkitCommand { 12 | public static description = messages.getMessage('commandDescription'); 13 | 14 | public static examples = [ 15 | `$ sfdx sfpowerkit:org:profile:diff --profilelist profilenames --targetusername username (Compare liste profiles path against target org)`, 16 | `$ sfdx sfpowerkit:org:profile:diff --targetusername username (compare all profile in the project against the target org)`, 17 | `$ sfdx sfpowerkit:org:profile:diff --sourceusername sourcealias --targetusername username (compare all profile in the source org against the target org)`, 18 | ]; 19 | 20 | protected static flagsConfig: FlagsConfig = { 21 | profilelist: flags.array({ 22 | char: 'p', 23 | description: messages.getMessage('profileListFlagDescription'), 24 | required: false, 25 | map: (n: string) => n.trim(), 26 | }), 27 | sourceusername: flags.string({ 28 | char: 's', 29 | description: messages.getMessage('sourceUsernameDescription'), 30 | required: false, 31 | }), 32 | output: flags.string({ 33 | char: 'd', 34 | description: messages.getMessage('outputFolderDescription'), 35 | required: false, 36 | }), 37 | apiversion: flags.builtin(), 38 | loglevel: flags.enum({ 39 | description: 'logging level for this command invocation', 40 | default: 'info', 41 | required: false, 42 | options: [ 43 | 'trace', 44 | 'debug', 45 | 'info', 46 | 'warn', 47 | 'error', 48 | 'fatal', 49 | 'TRACE', 50 | 'DEBUG', 51 | 'INFO', 52 | 'WARN', 53 | 'ERROR', 54 | 'FATAL', 55 | ], 56 | }), 57 | }; 58 | 59 | public static result: SfdxResult = { 60 | tableColumnData: { 61 | columns: [ 62 | { key: 'status', label: 'Status' }, 63 | { key: 'metadataType', label: 'Type' }, 64 | { key: 'componentName', label: 'Component Name' }, 65 | { key: 'path', label: 'Path' }, 66 | ], 67 | }, 68 | display() { 69 | if (Array.isArray(this.data) && this.data.length) { 70 | this.ux.table(this.data, this.tableColumnData); 71 | } 72 | }, 73 | }; 74 | 75 | protected static requiresUsername = true; 76 | 77 | public async execute(): Promise { 78 | const outputFolder: string = this.flags.output; 79 | const sourceusername: string = this.flags.sourceusername; 80 | let profileList: string[] = this.flags.profilelist; 81 | if (!profileList || profileList.length === 0) { 82 | if (sourceusername && !outputFolder) { 83 | throw new Error('Output folder is required'); 84 | } 85 | } 86 | let profileDiff = new ProfileDiffImpl(profileList, sourceusername, this.org, outputFolder); 87 | let output = profileDiff.diff().then(() => { 88 | return profileDiff.output; 89 | }); 90 | 91 | let outputData = await output; 92 | return outputData; 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/utils/queryExecutor.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-unused-vars */ 2 | import { Connection } from 'jsforce/connection'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | 5 | const retry = require('async-retry'); 6 | 7 | export default class QueryExecutor { 8 | constructor(private conn: Connection) {} 9 | 10 | public async executeQuery(query: string, tooling: boolean) { 11 | let results; 12 | 13 | if (tooling) { 14 | results = await retry( 15 | async () => { 16 | try { 17 | return (await this.conn.tooling.query(query)) as any; 18 | } catch (error) { 19 | throw new Error(`Unable to fetch ${query}`); 20 | } 21 | }, 22 | { 23 | retries: 5, 24 | minTimeout: 2000, 25 | onRetry: (error) => { 26 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 27 | }, 28 | } 29 | ); 30 | } else { 31 | results = await retry( 32 | async () => { 33 | try { 34 | return (await this.conn.query(query)) as any; 35 | } catch (error) { 36 | throw new Error(`Unable to fetch ${query}`); 37 | } 38 | }, 39 | { 40 | retries: 5, 41 | minTimeout: 2000, 42 | onRetry: (error) => { 43 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 44 | }, 45 | } 46 | ); 47 | } 48 | 49 | if (!results.done) { 50 | let tempRecords = results.records; 51 | while (!results.done) { 52 | results = await this.queryMore(results.nextRecordsUrl, tooling); 53 | tempRecords = tempRecords.concat(results.records); 54 | } 55 | results.records = tempRecords; 56 | } 57 | 58 | return results.records; 59 | } 60 | public async queryMore(url: string, tooling: boolean) { 61 | let result; 62 | if (tooling) { 63 | result = await retry( 64 | async () => { 65 | try { 66 | return (await this.conn.tooling.queryMore(url)) as any; 67 | } catch (error) { 68 | throw new Error(`Unable to fetch ${url}`); 69 | } 70 | }, 71 | { 72 | retries: 5, 73 | minTimeout: 2000, 74 | onRetry: (error) => { 75 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 76 | }, 77 | } 78 | ); 79 | } else { 80 | result = await retry( 81 | async () => { 82 | try { 83 | return (await this.conn.tooling.query(url)) as any; 84 | } catch (error) { 85 | throw new Error(`Unable to fetch ${url}`); 86 | } 87 | }, 88 | { 89 | retries: 5, 90 | minTimeout: 2000, 91 | onRetry: (error) => { 92 | SFPLogger.log(`Retrying Network call due to ${error.message}`, LoggerLevel.INFO); 93 | }, 94 | } 95 | ); 96 | } 97 | return result; 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/auth/login.ts: -------------------------------------------------------------------------------- 1 | import { AnyJson, getString } from '@salesforce/ts-types'; 2 | import { flags } from '@salesforce/command'; 3 | import SFPowerkitCommand from '../../../sfpowerkitCommand'; 4 | import * as rimraf from 'rimraf'; 5 | import { Connection } from 'jsforce'; 6 | 7 | import { SfdxError, AuthInfo, Aliases, ConfigGroup, Messages } from '@salesforce/core'; 8 | // tslint:disable-next-line:ordered-imports 9 | 10 | // Initialize Messages with the current plugin directory 11 | Messages.importMessagesDirectory(__dirname); 12 | 13 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 14 | // or any library that is using the messages framework can also be loaded this way. 15 | const messages = Messages.loadMessages('sfpowerkit', 'auth_login'); 16 | 17 | export default class Login extends SFPowerkitCommand { 18 | public static description = messages.getMessage('commandDescription'); 19 | 20 | public static examples = [ 21 | `$ sfdx sfpowerkit:auth:login -u azlam@sfdc.com -p Xasdax2w2 -a prod 22 | Authorized to azlam@sfdc.com 23 | `, 24 | ]; 25 | 26 | protected static flagsConfig = { 27 | username: flags.string({ 28 | required: true, 29 | char: 'u', 30 | description: messages.getMessage('usernameFlagDescription'), 31 | }), 32 | password: flags.string({ 33 | required: true, 34 | char: 'p', 35 | description: messages.getMessage('passwordFlagDescription'), 36 | }), 37 | securitytoken: flags.string({ 38 | required: false, 39 | char: 's', 40 | description: messages.getMessage('securityTokenFlagDescription'), 41 | }), 42 | url: flags.url({ 43 | required: false, 44 | char: 'r', 45 | description: messages.getMessage('urlFlagDescription'), 46 | }), 47 | alias: flags.string({ 48 | required: false, 49 | char: 'a', 50 | description: messages.getMessage('aliasFlagDescription'), 51 | }), 52 | }; 53 | 54 | loginUrl: string; 55 | password: string; 56 | 57 | public async execute(): Promise { 58 | rimraf.sync('temp_sfpowerkit'); 59 | 60 | if (this.flags.url) this.loginUrl = this.flags.url; 61 | else this.loginUrl = 'https://test.salesforce.com'; 62 | 63 | if (this.flags.securitytoken) this.password = this.flags.password.concat(this.flags.securitytoken); 64 | else this.password = this.flags.password; 65 | 66 | let conn = new Connection({ 67 | loginUrl: this.loginUrl, 68 | }); 69 | 70 | await conn.login(this.flags.username, this.password, function (err) { 71 | if (err) { 72 | throw new SfdxError('Unable to connect to the target org'); 73 | } 74 | }); 75 | 76 | const accessTokenOptions = { 77 | accessToken: conn.accessToken, 78 | instanceUrl: conn.instanceUrl, 79 | loginUrl: this.loginUrl, 80 | orgId: getString(conn, 'userInfo.organizationId'), 81 | }; 82 | 83 | const auth = await AuthInfo.create({ 84 | username: this.flags.username, 85 | accessTokenOptions, 86 | }); 87 | await auth.save(); 88 | 89 | if (this.flags.alias) { 90 | const aliases = await Aliases.create(ConfigGroup.getOptions('orgs', 'alias.json')); 91 | aliases.set(this.flags.alias, this.flags.username); 92 | await aliases.write(); 93 | } 94 | 95 | this.ux.log(`Authorized to ${this.flags.username}`); 96 | 97 | return { username: this.flags.username, accessTokenOptions }; 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/impl/parser/ApexTypeFetcher.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | const path = require('path'); 3 | const glob = require('glob'); 4 | import SFPLogger, {LoggerLevel} from '@dxatscale/sfp-logger'; 5 | 6 | import ApexTypeListener from './listeners/ApexTypeListener'; 7 | 8 | import { 9 | ApexLexer, 10 | ApexParser, 11 | ApexParserListener, 12 | ThrowingErrorListener, 13 | CaseInsensitiveInputStream, 14 | CommonTokenStream, 15 | ParseTreeWalker, 16 | } from 'apex-parser'; 17 | 18 | export default class ApexTypeFetcher { 19 | /** 20 | * Get Apex type of cls files in a search directory. 21 | * Sorts files into classes, test classes and interfaces. 22 | * @param searchDir 23 | */ 24 | public getApexTypeOfClsFiles(searchDir: string): ApexSortedByType { 25 | const apexSortedByType: ApexSortedByType = { 26 | class: [], 27 | testClass: [], 28 | interface: [], 29 | parseError: [], 30 | }; 31 | 32 | let clsFiles: string[]; 33 | if (fs.existsSync(searchDir)) { 34 | clsFiles = glob.sync(`**/*.cls`, { 35 | cwd: searchDir, 36 | absolute: true, 37 | }); 38 | } else { 39 | throw new Error(`Search directory does not exist`); 40 | } 41 | 42 | for (let clsFile of clsFiles) { 43 | const clsPath = path.resolve(clsFile); 44 | let clsPayload: string = fs.readFileSync(clsPath, 'utf8'); 45 | let fileDescriptor: FileDescriptor = { 46 | name: path.basename(clsFile, '.cls'), 47 | filepath: clsFile, 48 | }; 49 | 50 | // Parse cls file 51 | let compilationUnitContext; 52 | try { 53 | let lexer = new ApexLexer(new CaseInsensitiveInputStream(clsPath, clsPayload)); 54 | let tokens: CommonTokenStream = new CommonTokenStream(lexer); 55 | 56 | let parser = new ApexParser(tokens); 57 | parser.removeErrorListeners(); 58 | parser.addErrorListener(new ThrowingErrorListener()); 59 | 60 | compilationUnitContext = parser.compilationUnit(); 61 | } catch (err) { 62 | SFPLogger.log(`Failed to parse ${clsFile}. Error occured ${JSON.stringify(err)} `, LoggerLevel.DEBUG); 63 | 64 | fileDescriptor['error'] = err; 65 | apexSortedByType['parseError'].push(fileDescriptor); 66 | continue; 67 | } 68 | 69 | let apexTypeListener: ApexTypeListener = new ApexTypeListener(); 70 | 71 | // Walk parse tree to determine Apex type 72 | ParseTreeWalker.DEFAULT.walk(apexTypeListener as ApexParserListener, compilationUnitContext); 73 | 74 | let apexType = apexTypeListener.getApexType(); 75 | 76 | if (apexType.class) { 77 | apexSortedByType['class'].push(fileDescriptor); 78 | if (apexType.testClass) { 79 | apexSortedByType['testClass'].push(fileDescriptor); 80 | } 81 | } else if (apexType.interface) { 82 | apexSortedByType['interface'].push(fileDescriptor); 83 | } else { 84 | fileDescriptor['error'] = { 85 | message: 'Unknown Apex Type', 86 | }; 87 | apexSortedByType['parseError'].push(fileDescriptor); 88 | } 89 | } 90 | 91 | return apexSortedByType; 92 | } 93 | } 94 | 95 | export interface ApexSortedByType { 96 | class: FileDescriptor[]; 97 | testClass: FileDescriptor[]; 98 | interface: FileDescriptor[]; 99 | parseError: FileDescriptor[]; 100 | } 101 | 102 | interface FileDescriptor { 103 | name: string; 104 | filepath: string; 105 | error?: any; 106 | } 107 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/pool/fetch.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import SFPowerkitCommand from '../../../sfpowerkitCommand'; 4 | import PoolFetchImpl from '../../../impl/pool/scratchorg/poolFetchImpl'; 5 | import { Messages } from '@salesforce/core'; 6 | 7 | // Initialize Messages with the current plugin directory 8 | Messages.importMessagesDirectory(__dirname); 9 | 10 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 11 | // or any library that is using the messages framework can also be loaded this way. 12 | const messages = Messages.loadMessages('sfpowerkit', 'scratchorg_poolFetch'); 13 | 14 | export default class Fetch extends SFPowerkitCommand { 15 | public static description = messages.getMessage('commandDescription'); 16 | 17 | protected static requiresDevhubUsername = true; 18 | 19 | public static examples = [ 20 | `$ sfdx sfpowerkit:pool:fetch -t core `, 21 | `$ sfdx sfpowerkit:pool:fetch -t core -v devhub`, 22 | `$ sfdx sfpowerkit:pool:fetch -t core -v devhub -m`, 23 | `$ sfdx sfpowerkit:pool:fetch -t core -v devhub -s testuser@test.com`, 24 | ]; 25 | 26 | protected static flagsConfig = { 27 | tag: flags.string({ 28 | char: 't', 29 | description: messages.getMessage('tagDescription'), 30 | required: true, 31 | }), 32 | mypool: flags.boolean({ 33 | char: 'm', 34 | description: messages.getMessage('mypoolDescription'), 35 | required: false, 36 | }), 37 | alias: flags.string({ 38 | char: 'a', 39 | description: messages.getMessage('aliasDescription'), 40 | required: false, 41 | }), 42 | sendtouser: flags.string({ 43 | char: 's', 44 | description: messages.getMessage('sendToUserDescription'), 45 | required: false, 46 | }), 47 | setdefaultusername: flags.boolean({ 48 | char: 'd', 49 | description: messages.getMessage('setdefaultusernameDescription'), 50 | required: false, 51 | }), 52 | loglevel: flags.enum({ 53 | description: 'logging level for this command invocation', 54 | default: 'info', 55 | required: false, 56 | options: [ 57 | 'trace', 58 | 'debug', 59 | 'info', 60 | 'warn', 61 | 'error', 62 | 'fatal', 63 | 'TRACE', 64 | 'DEBUG', 65 | 'INFO', 66 | 'WARN', 67 | 'ERROR', 68 | 'FATAL', 69 | ], 70 | }), 71 | }; 72 | 73 | public async execute(): Promise { 74 | await this.hubOrg.refreshAuth(); 75 | const hubConn = this.hubOrg.getConnection(); 76 | 77 | this.flags.apiversion = this.flags.apiversion || (await hubConn.retrieveMaxApiVersion()); 78 | 79 | let fetchImpl = new PoolFetchImpl( 80 | this.hubOrg, 81 | this.flags.tag, 82 | this.flags.mypool, 83 | this.flags.sendtouser, 84 | this.flags.alias, 85 | this.flags.setdefaultusername 86 | ); 87 | 88 | let result = await fetchImpl.execute(); 89 | 90 | if (!this.flags.json && !this.flags.sendtouser) { 91 | this.ux.log(`======== Scratch org details ========`); 92 | let list = []; 93 | for (let [key, value] of Object.entries(result)) { 94 | if (value) { 95 | list.push({ key: key, value: value }); 96 | } 97 | } 98 | this.ux.table(list, ['key', 'value']); 99 | 100 | fetchImpl.loginToScratchOrgIfSfdxAuthURLExists(result); 101 | } 102 | 103 | if (!this.flags.sendtouser) return result as AnyJson; 104 | else return true; 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/sandbox/info.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | let request = require('request-promise-native'); 4 | import { Messages, SfdxError } from '@salesforce/core'; 5 | import { Sfpowerkit } from '../../../../sfpowerkit'; 6 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 7 | import { Connection } from 'jsforce'; 8 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 9 | 10 | // Initialize Messages with the current plugin directory 11 | Messages.importMessagesDirectory(__dirname); 12 | 13 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 14 | // or any library that is using the messages framework can also be loaded this way. 15 | const messages = Messages.loadMessages('sfpowerkit', 'sandbox_info'); 16 | 17 | export default class Info extends SfpowerkitCommand { 18 | public static description = messages.getMessage('commandDescription'); 19 | 20 | public static examples = [ 21 | `$ sfdx sfpowerkit:org:sandbox:info -n test2 -v produser@example.com 22 | Successfully Enqueued Refresh of Sandbox 23 | `, 24 | ]; 25 | 26 | protected static flagsConfig = { 27 | name: flags.string({ 28 | required: true, 29 | char: 'n', 30 | description: messages.getMessage('nameFlagDescription'), 31 | }), 32 | showonlylatest: flags.boolean({ 33 | required: false, 34 | char: 's', 35 | default: false, 36 | description: messages.getMessage('showOnlyLatestFlagDescription'), 37 | }), 38 | }; 39 | 40 | // Comment this out if your command does not require a hub org username 41 | protected static requiresDevhubUsername = true; 42 | 43 | public async execute(): Promise { 44 | Sfpowerkit.setLogLevel('INFO', false); 45 | await this.hubOrg.refreshAuth(); 46 | 47 | const conn = this.hubOrg.getConnection(); 48 | 49 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 50 | 51 | let result = await this.getSandboxInfo(conn, this.flags.name); 52 | 53 | SFPLogger.log(`Successfully Retrieved Sandbox Details`, LoggerLevel.INFO); 54 | 55 | if (!this.flags.json) this.ux.logJson(result); 56 | 57 | return result; 58 | } 59 | 60 | private async getSandboxInfo(conn: Connection, name: string) { 61 | let query_uri = `${conn.instanceUrl}/services/data/v${this.flags.apiversion}/tooling/query?q=SELECT+Id,SandboxName+FROM+SandboxProcess+WHERE+SandboxName+in+('${name}')+ORDER+BY+EndDate+DESC`; 62 | 63 | const sandbox_query_result = await request({ 64 | method: 'get', 65 | url: query_uri, 66 | headers: { 67 | Authorization: `Bearer ${conn.accessToken}`, 68 | }, 69 | json: true, 70 | }); 71 | 72 | if (sandbox_query_result.records[0] == undefined) 73 | throw new SfdxError(`Unable to find a sandbox with name: ${name}`); 74 | 75 | let result = await this.processSandboxInfo(sandbox_query_result.records, conn, this.flags.showonlylatest); 76 | 77 | return result; 78 | } 79 | 80 | private async processSandboxInfo(sandboxRecords, conn, isShowOnlyLatest) { 81 | let result = []; 82 | 83 | for (const item of sandboxRecords) { 84 | let output = await this.getDetailedSandboxInfo(item.attributes.url, conn); 85 | result.push(output); 86 | if (isShowOnlyLatest) break; 87 | } 88 | return result; 89 | } 90 | 91 | private async getDetailedSandboxInfo(sandboxInfoUl: string, conn: Connection) { 92 | const query_uri = `${conn.instanceUrl}${sandboxInfoUl}`; 93 | 94 | const sandbox_query_result = await request({ 95 | method: 'get', 96 | url: query_uri, 97 | headers: { 98 | Authorization: `Bearer ${conn.accessToken}`, 99 | }, 100 | json: true, 101 | }); 102 | 103 | return sandbox_query_result; 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/project/orgdiff.ts: -------------------------------------------------------------------------------- 1 | import { FlagsConfig, flags, SfdxResult } from '@salesforce/command'; 2 | import { Sfpowerkit } from '../../../sfpowerkit'; 3 | import SfpowerkitCommand from '../../../sfpowerkitCommand'; 4 | import OrgDiffImpl from '../../../impl/project/orgdiff/orgDiffImpl'; 5 | import { fs, Messages } from '@salesforce/core'; 6 | 7 | // Initialize Messages with the current plugin directory 8 | Messages.importMessagesDirectory(__dirname); 9 | 10 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 11 | // or any library that is using the messages framework can also be loaded this way. 12 | const messages = Messages.loadMessages('sfpowerkit', 'project_orgdiff'); 13 | 14 | export default class OrgDiff extends SfpowerkitCommand { 15 | public static description = messages.getMessage('commandDescription'); 16 | 17 | public static examples = [ 18 | `$ sfdx sfpowerkit:project:orgdiff --filesorfolders directory --noconflictmarkers --targetusername sandbox`, 19 | `$ sfdx sfpowerkit:project:orgdiff -f fileName --targetusername sandbox`, 20 | ]; 21 | 22 | protected static flagsConfig: FlagsConfig = { 23 | filesorfolders: flags.array({ 24 | char: 'f', 25 | description: messages.getMessage('filesOrFoldersFlagDescription'), 26 | required: true, 27 | map: (f: string) => f.trim(), 28 | }), 29 | noconflictmarkers: flags.boolean({ 30 | char: 'c', 31 | description: messages.getMessage('noConflictMarkersDescription'), 32 | required: false, 33 | }), 34 | loglevel: flags.enum({ 35 | description: 'logging level for this command invocation', 36 | default: 'info', 37 | required: false, 38 | options: [ 39 | 'trace', 40 | 'debug', 41 | 'info', 42 | 'warn', 43 | 'error', 44 | 'fatal', 45 | 'TRACE', 46 | 'DEBUG', 47 | 'INFO', 48 | 'WARN', 49 | 'ERROR', 50 | 'FATAL', 51 | ], 52 | }), 53 | outputformat: flags.enum({ 54 | required: false, 55 | char: 'o', 56 | description: messages.getMessage('outputFormatFlagDescription'), 57 | options: ['json', 'csv'], 58 | }), 59 | }; 60 | 61 | public static result: SfdxResult = { 62 | tableColumnData: { 63 | columns: [ 64 | { key: 'status', label: 'Status' }, 65 | { key: 'metadataType', label: 'Type' }, 66 | { key: 'componentName', label: 'Component Name' }, 67 | { key: 'path', label: 'Path' }, 68 | ], 69 | }, 70 | display() { 71 | if (Array.isArray(this.data) && this.data.length) { 72 | this.ux.table(this.data, this.tableColumnData); 73 | } 74 | }, 75 | }; 76 | 77 | protected static requiresUsername = true; 78 | protected static requiresProject = true; 79 | 80 | public async execute(): Promise { 81 | Sfpowerkit.setUx(this.ux); 82 | this.ux.startSpinner('Running...'); 83 | 84 | let filesOrFolders = this.flags.filesorfolders; 85 | 86 | let orgDiff = new OrgDiffImpl(filesOrFolders, this.org, !this.flags.noconflictmarkers); 87 | 88 | let output = await orgDiff.orgDiff(); 89 | this.ux.stopSpinner('Completed'); 90 | if (!this.flags.outputformat || this.flags.outputformat == 'json') { 91 | fs.writeJson('orgdiff.json', output); 92 | } else if (this.flags.outputformat == 'csv') { 93 | await this.generateCSVOutput(output); 94 | } 95 | return output; 96 | } 97 | public async generateCSVOutput(result: any[]) { 98 | let newLine = '\r\n'; 99 | let output = 'status,metadataType,componentName,path' + newLine; 100 | result.forEach((element) => { 101 | output = `${output}${element.status},${element.metadataType},${element.componentName},${element.path}${newLine}`; 102 | }); 103 | fs.writeFile('orgdiff.csv', output); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/package/version/codecoverage.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import { Sfpowerkit } from '../../../../sfpowerkit'; 4 | import { Messages } from '@salesforce/core'; 5 | import PackageVersionCoverage from '../../../../impl/package/version/packageVersionCoverage'; 6 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 7 | // Initialize Messages with the current plugin directory 8 | Messages.importMessagesDirectory(__dirname); 9 | 10 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 11 | // or any library that is using the messages framework can also be loaded this way. 12 | const messages = Messages.loadMessages('sfpowerkit', 'package_codecoverage'); 13 | 14 | export default class CodeCoverage extends SfpowerkitCommand { 15 | public static description = messages.getMessage('commandDescription'); 16 | 17 | public static examples = [ 18 | `$ sfdx sfpowerkit:package:version:codecoverage -v myOrg@example.com -i 04tXXXXXXXXXXXXXXX \n`, 19 | `$ sfdx sfpowerkit:package:version:codecoverage -v myOrg@example.com -i 04tXXXXXXXXXXXXXXX,04tXXXXXXXXXXXXXXX,04tXXXXXXXXXXXXXXX \n`, 20 | `$ sfdx sfpowerkit:package:version:codecoverage -v myOrg@example.com -p core -n 1.2.0.45 \n`, 21 | `$ sfdx sfpowerkit:package:version:codecoverage -v myOrg@example.com -p 0HoXXXXXXXXXXXXXXX -n 1.2.0.45`, 22 | ]; 23 | 24 | protected static flagsConfig = { 25 | package: flags.string({ 26 | required: false, 27 | char: 'p', 28 | description: messages.getMessage('packageName'), 29 | }), 30 | versionnumber: flags.string({ 31 | required: false, 32 | char: 'n', 33 | description: messages.getMessage('packageVersionNumber'), 34 | }), 35 | versionid: flags.array({ 36 | required: false, 37 | char: 'i', 38 | description: messages.getMessage('packageVersionId'), 39 | }), 40 | apiversion: flags.builtin({ 41 | description: messages.getMessage('apiversion'), 42 | }), 43 | loglevel: flags.enum({ 44 | description: messages.getMessage('loglevel'), 45 | default: 'info', 46 | required: false, 47 | options: [ 48 | 'trace', 49 | 'debug', 50 | 'info', 51 | 'warn', 52 | 'error', 53 | 'fatal', 54 | 'TRACE', 55 | 'DEBUG', 56 | 'INFO', 57 | 'WARN', 58 | 'ERROR', 59 | 'FATAL', 60 | ], 61 | }), 62 | }; 63 | 64 | // Comment this out if your command does not require an org username 65 | protected static requiresDevhubUsername = true; 66 | public async execute(): Promise { 67 | Sfpowerkit.setLogLevel(this.flags.loglevel, this.flags.json); 68 | 69 | await this.hubOrg.refreshAuth(); 70 | 71 | const conn = this.hubOrg.getConnection(); 72 | 73 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 74 | 75 | let versionId = []; 76 | if (this.flags.versionid) { 77 | versionId = this.flags.versionid; 78 | } 79 | let versionNumber; 80 | if (this.flags.versionnumber) { 81 | versionNumber = this.flags.versionnumber; 82 | } 83 | let packageName; 84 | if (this.flags.package) { 85 | packageName = this.flags.package; 86 | } 87 | 88 | let packageVersionCoverageImpl: PackageVersionCoverage = new PackageVersionCoverage(); 89 | 90 | const result = (await packageVersionCoverageImpl.getCoverage( 91 | versionId, 92 | versionNumber, 93 | packageName, 94 | conn 95 | )) as any; 96 | 97 | this.ux.table(result, [ 98 | 'packageName', 99 | 'packageId', 100 | 'packageVersionNumber', 101 | 'packageVersionId', 102 | 'coverage', 103 | 'HasPassedCodeCoverageCheck', 104 | ]); 105 | return result; 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/source/customlabel/reconcile.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | import * as xml2js from 'xml2js'; 4 | import * as util from 'util'; 5 | import * as fs from 'fs-extra'; 6 | import * as rimraf from 'rimraf'; 7 | import * as path from 'path'; 8 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 9 | import { Messages } from '@salesforce/core'; 10 | import SFPLogger from '@dxatscale/sfp-logger'; 11 | 12 | // Initialize Messages with the current plugin directory 13 | Messages.importMessagesDirectory(__dirname); 14 | 15 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 16 | // or any library that is using the messages framework can also be loaded this way. 17 | const messages = Messages.loadMessages('sfpowerkit', 'source_customlabel_clean'); 18 | 19 | export default class Reconcile extends SFPowerkitCommand { 20 | private customlabel_path: string; 21 | 22 | public static description = messages.getMessage('commandDescription'); 23 | 24 | public static examples = [ 25 | `$ sfdx sfpowerkit:source:customlabel:reconcile -d path/to/customlabelfile.xml -p core 26 | Cleaned The Custom Labels 27 | `, 28 | ]; 29 | 30 | protected static flagsConfig = { 31 | path: flags.string({ 32 | required: true, 33 | char: 'd', 34 | description: messages.getMessage('pathFlagDescription'), 35 | }), 36 | project: flags.string({ 37 | required: true, 38 | char: 'p', 39 | description: messages.getMessage('packageFlagDescription'), 40 | }), 41 | }; 42 | 43 | // Comment this out if your command does not require an org username 44 | //protected static requiresUsername = true; 45 | 46 | // Comment this out if your command does not support a hub org username 47 | // protected static supportsDevhubUsername = true; 48 | 49 | // Set this to true if your command requires a project workspace; 'requiresProject' is false by default 50 | protected static requiresProject = true; 51 | 52 | public async execute(): Promise { 53 | rimraf.sync('temp_sfpowerkit'); 54 | 55 | // Gives first value in url after https protocol 56 | const packageName = this.flags.project; 57 | 58 | this.customlabel_path = this.flags.path; 59 | 60 | if (fs.existsSync(path.resolve(this.customlabel_path)) && path.extname(this.customlabel_path) == '.xml') { 61 | const parser = new xml2js.Parser({ explicitArray: false }); 62 | const parseString = util.promisify(parser.parseString); 63 | 64 | let retrieved_customlabels = await parseString(fs.readFileSync(path.resolve(this.customlabel_path))); 65 | 66 | if (!Object.keys(retrieved_customlabels).includes('CustomLabels')) { 67 | this.ux.log(`Metadata Mismatch: Not A CustomLabels Metadata File`); 68 | 69 | rimraf.sync('temp_sfpowerkit'); 70 | 71 | return 1; 72 | } 73 | 74 | SFPLogger.log(`Package ::: ${packageName}`); 75 | 76 | if (this.isIterable(retrieved_customlabels.CustomLabels.labels)) { 77 | retrieved_customlabels.CustomLabels.labels = retrieved_customlabels.CustomLabels.labels.filter((item) => 78 | item.fullName.startsWith(`${packageName}_`) 79 | ); 80 | } else { 81 | if (!retrieved_customlabels.CustomLabels.labels.fullName.startsWith('${packageName}_`')) 82 | delete retrieved_customlabels.CustomLabels.labels; 83 | } 84 | 85 | let builder = new xml2js.Builder(); 86 | let xml = builder.buildObject(retrieved_customlabels); 87 | 88 | await fs.writeFileSync(path.resolve(this.customlabel_path), xml); 89 | 90 | this.ux.log( 91 | `Reconciled The Custom Labels only to have ${packageName} labels (labels with full name beginning with ${packageName}_)` 92 | ); 93 | } else { 94 | this.ux.log(`File is either not found, or not an xml file.`); 95 | } 96 | 97 | rimraf.sync('temp_sfpowerkit'); 98 | 99 | return 0; 100 | } 101 | 102 | isIterable(obj) { 103 | if (obj == null) { 104 | return false; 105 | } 106 | return typeof obj[Symbol.iterator] === 'function'; 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /Third Party Notices.md: -------------------------------------------------------------------------------- 1 | THIRD-PARTY SOFTWARE NOTICES AND INFORMATION 2 | 3 | 1.sfdx-node https://github.com/pony-ci/sfdx-node/blob/master/LICENSE 4 | 5 | ``` 6 | The MIT License 7 | 8 | Copyright (c) 2020 Ondřej Kratochvíl 9 | 10 | Permission is hereby granted, free of charge, to any person obtaining a copy 11 | of this software and associated documentation files (the "Software"), to deal 12 | in the Software without restriction, including without limitation the rights 13 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 14 | copies of the Software, and to permit persons to whom the Software is 15 | furnished to do so, subject to the following conditions: 16 | 17 | The above copyright notice and this permission notice shall be included in 18 | all copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 21 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 22 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 23 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 24 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 26 | THE SOFTWARE. 27 | ``` 28 | 29 | 2.texei-sfdx-plugin https://github.com/texei/texei-sfdx-plugin 30 | 31 | ``` 32 | BSD 3-Clause License 33 | 34 | Copyright (c) 2020, Texeï 35 | All rights reserved. 36 | 37 | Redistribution and use in source and binary forms, with or without 38 | modification, are permitted provided that the following conditions are met: 39 | 40 | 1. Redistributions of source code must retain the above copyright notice, this 41 | list of conditions and the following disclaimer. 42 | 43 | 2. Redistributions in binary form must reproduce the above copyright notice, 44 | this list of conditions and the following disclaimer in the documentation 45 | and/or other materials provided with the distribution. 46 | 47 | 3. Neither the name of the copyright holder nor the names of its 48 | contributors may be used to endorse or promote products derived from 49 | this software without specific prior written permission. 50 | 51 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 52 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 53 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 54 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 55 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 56 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 57 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 58 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 59 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 60 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 61 | ``` 62 | 63 | 3.apex-parser https://github.com/nawforce/apex-parser 64 | 65 | ``` 66 | [The "BSD licence"] 67 | Copyright (c) 2019 Kevin Jones 68 | All rights reserved. 69 | Redistribution and use in source and binary forms, with or without 70 | modification, are permitted provided that the following conditions 71 | are met: 72 | 1. Redistributions of source code must retain the above copyright 73 | notice, this list of conditions and the following disclaimer. 74 | 2. Redistributions in binary form must reproduce the above copyright 75 | notice, this list of conditions and the following disclaimer in the 76 | documentation and/or other materials provided with the distribution. 77 | 3. The name of the author may not be used to endorse or promote products 78 | derived from this software without specific prior written permission. 79 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 80 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 81 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 82 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 83 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 84 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 85 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 86 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 87 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 88 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 89 | ``` 90 | -------------------------------------------------------------------------------- /src/impl/package/version/packageVersionCoverage.ts: -------------------------------------------------------------------------------- 1 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 2 | import { Connection } from 'jsforce'; 3 | 4 | const QUERY_string = `SELECT SubscriberPackageVersionId,Package2Id, Package2.Name,MajorVersion,MinorVersion,PatchVersion,BuildNumber, CodeCoverage, HasPassedCodeCoverageCheck, Name FROM Package2Version WHERE `; 5 | const DEFAULT_ORDER_BY_FIELDS = 'Package2Id, MajorVersion, MinorVersion, PatchVersion, BuildNumber'; 6 | export default class PackageVersionCoverage { 7 | public constructor() {} 8 | 9 | public async getCoverage( 10 | versionId: string[], 11 | versionNumber: string, 12 | packageName: string, 13 | conn: Connection 14 | ): Promise { 15 | let whereClause = (await this.getWhereClause(versionId, versionNumber, packageName)) as string; 16 | 17 | if (!whereClause) { 18 | throw new Error('Either versionId or versionNumber and packageName is mandatory'); 19 | } 20 | 21 | let output = []; 22 | 23 | const result = (await conn.tooling.query( 24 | `${QUERY_string} ${whereClause} ORDER BY ${DEFAULT_ORDER_BY_FIELDS}` 25 | )) as any; 26 | if (result && result.size > 0) { 27 | result.records.forEach((record) => { 28 | let packageCoverage = {}; 29 | packageCoverage.HasPassedCodeCoverageCheck = record.HasPassedCodeCoverageCheck; 30 | packageCoverage.coverage = record.CodeCoverage ? record.CodeCoverage.apexCodeCoveragePercentage : 0; 31 | packageCoverage.packageId = record.Package2Id; 32 | packageCoverage.packageName = record.Package2.Name; 33 | packageCoverage.packageVersionId = record.SubscriberPackageVersionId; 34 | packageCoverage.packageVersionNumber = `${record.MajorVersion}.${record.MinorVersion}.${record.PatchVersion}.${record.BuildNumber}`; 35 | output.push(packageCoverage); 36 | }); 37 | 38 | SFPLogger.log(`Successfully Retrieved the Apex Test Coverage of the package version`, LoggerLevel.INFO); 39 | } else { 40 | throw new Error(`Package version doesnot exist, Please check the version details`); 41 | } 42 | return output; 43 | } 44 | private async getWhereClause(versionId: string[], versionNumber: string, packageName: string): Promise { 45 | let whereClause = ''; 46 | if (versionId && versionId.length > 0) { 47 | whereClause = this.buildWhereFilter('SubscriberPackageVersionId', versionId); 48 | } else if (versionNumber && packageName) { 49 | whereClause = 50 | this.buildWhereOnNameOrId('0Ho', 'Package2Id', 'Package2.Name', packageName) + 51 | ' AND ' + 52 | this.buildVersionNumberFilter(versionNumber); 53 | } 54 | return whereClause; 55 | } 56 | // buid the where clause IN or = based on length 57 | private buildWhereFilter(key: string, value: string[]) { 58 | let result = ''; 59 | if (value.length > 1) { 60 | result = `${key} IN ('${value.join("','")}')`; 61 | } else { 62 | result = `${key} = '${value[0]}'`; 63 | } 64 | return result; 65 | } 66 | //build where clause based of id or name 67 | private buildWhereOnNameOrId(idFilter: string, idKey: string, nameKey: string, value: string) { 68 | let result = ''; 69 | if (value.startsWith(idFilter)) { 70 | result = `${idKey} = '${value}' `; 71 | } else { 72 | result = `${nameKey} = '${value}' `; 73 | } 74 | return result; 75 | } 76 | private buildVersionNumberFilter(versionNumber: string) { 77 | let result = ''; 78 | let versionNumberList = versionNumber.split('.'); 79 | if (versionNumberList.length === 4) { 80 | result = `MajorVersion = ${versionNumberList[0]} AND MinorVersion = ${versionNumberList[1]} AND PatchVersion = ${versionNumberList[2]} AND BuildNumber = ${versionNumberList[3]}`; 81 | } else { 82 | throw new Error( 83 | 'Provide complete version number format in major.minor.patch (Beta build)—for example, 1.2.0.5' 84 | ); 85 | } 86 | return result; 87 | } 88 | } 89 | interface PackageCoverage { 90 | coverage: number; 91 | packageName: string; 92 | packageId: string; 93 | packageVersionNumber: string; 94 | packageVersionId: string; 95 | HasPassedCodeCoverageCheck: boolean; 96 | } 97 | -------------------------------------------------------------------------------- /src/impl/metadata/writer/profileWriter.ts: -------------------------------------------------------------------------------- 1 | import Profile from '../schema'; 2 | import { Sfpowerkit } from '../../../sfpowerkit'; 3 | import * as fs from 'fs-extra'; 4 | import * as xml2js from 'xml2js'; 5 | const format = require('xml-formatter'); 6 | 7 | const nonArayProperties = ['custom', 'description', 'fullName', 'userLicense', '$']; 8 | const PROFILE_NAMESPACE = 'http://soap.sforce.com/2006/04/metadata'; 9 | 10 | export default class ProfileWriter { 11 | public writeProfile(profileObj: Profile, filePath: string) { 12 | //Delete eampty arrays 13 | for (let key in profileObj) { 14 | if (Array.isArray(profileObj[key])) { 15 | //All top element must be arays exept non arrayProperties 16 | if (!nonArayProperties.includes(key) && profileObj[key].length === 0) { 17 | delete profileObj[key]; 18 | } 19 | } 20 | } 21 | 22 | let builder = new xml2js.Builder({ 23 | rootName: 'Profile', 24 | xmldec: { version: '1.0', encoding: 'UTF-8' }, 25 | }); 26 | profileObj['$'] = { 27 | xmlns: PROFILE_NAMESPACE, 28 | }; 29 | let xml = builder.buildObject(profileObj); 30 | 31 | let formattedXml = format(xml, { 32 | indentation: ' ', 33 | filter: (node) => node.type !== 'Comment', 34 | collapseContent: true, 35 | lineSeparator: '\n', 36 | }); 37 | 38 | //console.log(formattedXml); 39 | 40 | fs.writeFileSync(filePath, formattedXml); 41 | } 42 | 43 | public toXml(profileObj: Profile) { 44 | //Delete eampty arrays 45 | for (let key in profileObj) { 46 | if (Array.isArray(profileObj[key])) { 47 | //All top element must be arays exept non arrayProperties 48 | if (!nonArayProperties.includes(key) && profileObj[key].length === 0) { 49 | delete profileObj[key]; 50 | } 51 | } 52 | } 53 | let builder = new xml2js.Builder({ 54 | rootName: 'Profile', 55 | xmldec: { version: '1.0', encoding: 'UTF-8' }, 56 | }); 57 | profileObj['$'] = { 58 | xmlns: PROFILE_NAMESPACE, 59 | }; 60 | let xml = builder.buildObject(profileObj); 61 | let formattedXml = format(xml, { 62 | indentation: ' ', 63 | filter: (node) => node.type !== 'Comment', 64 | collapseContent: true, 65 | lineSeparator: '\n', 66 | }); 67 | return formattedXml; 68 | } 69 | 70 | public toProfile(profileObj: any): Profile { 71 | let convertedObject: any = {}; 72 | for (let key in profileObj) { 73 | if (Array.isArray(profileObj[key])) { 74 | //All top element must be arays exept non arrayProperties 75 | if (nonArayProperties.includes(key)) { 76 | convertedObject[key] = 77 | profileObj[key][0] === 'true' 78 | ? true 79 | : profileObj[key][0] === 'false' 80 | ? false 81 | : profileObj[key][0]; 82 | } else { 83 | let data = []; 84 | for (let i = 0; i < profileObj[key].length; i++) { 85 | let element = this.removeArrayNatureOnValue(profileObj[key][i]); 86 | if (element !== '') { 87 | data.push(element); 88 | } 89 | } 90 | convertedObject[key] = data; 91 | } 92 | } else if (nonArayProperties.includes(key)) { 93 | convertedObject[key] = profileObj[key]; 94 | } else { 95 | convertedObject[key] = [profileObj[key]]; 96 | } 97 | } 98 | return convertedObject as Profile; 99 | } 100 | 101 | private removeArrayNatureOnValue(obj: any): any { 102 | let toReturn = {}; 103 | for (let key in obj) { 104 | if (Array.isArray(obj[key]) && obj[key].length > 0) { 105 | //All top element must be arays exept non arrayProperties 106 | toReturn[key] = obj[key][0] === 'true' ? true : obj[key][0] === 'false' ? false : obj[key][0]; 107 | } else { 108 | toReturn[key] = obj[key]; 109 | } 110 | } 111 | return toReturn; 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/connectedapp/retrieve.ts: -------------------------------------------------------------------------------- 1 | import { AnyJson } from '@salesforce/ts-types'; 2 | import * as fs from 'fs-extra'; 3 | import { flags } from '@salesforce/command'; 4 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 5 | import * as rimraf from 'rimraf'; 6 | import { AsyncResult } from 'jsforce'; 7 | import { Messages, SfdxError } from '@salesforce/core'; 8 | import * as xml2js from 'xml2js'; 9 | import * as util from 'util'; 10 | // tslint:disable-next-line:ordered-imports 11 | const path = require('path'); 12 | import { checkRetrievalStatus } from '../../../../utils/checkRetrievalStatus'; 13 | import { extract } from '../../../../utils/extract'; 14 | import getDefaults from '../../../../utils/getDefaults'; 15 | 16 | // Initialize Messages with the current plugin directory 17 | Messages.importMessagesDirectory(__dirname); 18 | 19 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 20 | // or any library that is using the messages framework can also be loaded this way. 21 | const messages = Messages.loadMessages('sfpowerkit', 'connectedapp_retrieve'); 22 | 23 | export default class Retrieve extends SFPowerkitCommand { 24 | public connectedapp_consumerKey: string; 25 | public static description = messages.getMessage('commandDescription'); 26 | 27 | public static examples = [ 28 | `$ sfdx sfpowerkit:org:connectedapp:retrieve -n AzurePipelines -u azlam@sfdc.com 29 | Retrived AzurePipelines Consumer Key : XSD21Sd23123w21321 30 | `, 31 | ]; 32 | 33 | // Comment this out if your command does not require an org username 34 | protected static requiresUsername = true; 35 | 36 | protected static flagsConfig = { 37 | name: flags.string({ 38 | required: true, 39 | char: 'n', 40 | description: messages.getMessage('nameFlagDescription'), 41 | }), 42 | }; 43 | 44 | public async execute(): Promise { 45 | rimraf.sync('temp_sfpowerkit'); 46 | let retrieveRequest = { 47 | apiVersion: getDefaults.getApiVersion(), 48 | }; 49 | 50 | retrieveRequest['singlePackage'] = true; 51 | retrieveRequest['unpackaged'] = { 52 | types: { name: 'ConnectedApp', members: this.flags.name }, 53 | }; 54 | 55 | // if(!this.flags.json) 56 | // this.ux.logJson(retrieveRequest); 57 | 58 | await this.org.refreshAuth(); 59 | 60 | const conn = this.org.getConnection(); 61 | 62 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 63 | 64 | retrieveRequest.apiVersion = this.flags.apiversion; 65 | 66 | conn.metadata.pollTimeout = 60; 67 | 68 | let retrievedId; 69 | 70 | await conn.metadata.retrieve(retrieveRequest, function (error, result: AsyncResult) { 71 | if (error) { 72 | return console.error(error); 73 | } 74 | retrievedId = result.id; 75 | }); 76 | 77 | // if(!this.flags.json) 78 | // console.log(retrievedId); 79 | 80 | let metadata_retrieve_result = await checkRetrievalStatus(conn, retrievedId, !this.flags.json); 81 | if (!metadata_retrieve_result.zipFile) throw new SfdxError('Unable to find the requested ConnectedApp'); 82 | 83 | const zipFileName = 'temp_sfpowerkit/unpackaged.zip'; 84 | 85 | fs.mkdirSync('temp_sfpowerkit'); 86 | fs.writeFileSync(zipFileName, metadata_retrieve_result.zipFile, { 87 | encoding: 'base64', 88 | }); 89 | 90 | await extract(`./temp_sfpowerkit/unpackaged.zip`, 'temp_sfpowerkit'); 91 | 92 | let resultFile = `temp_sfpowerkit/connectedApps/${this.flags.name}.connectedApp`; 93 | // if(!this.flags.json) 94 | // this.ux.log(`Checking for file ${resultFile}`); 95 | 96 | // this.ux.log(path.resolve(resultFile)); 97 | let retrieved_connectedapp; 98 | 99 | if (fs.existsSync(path.resolve(resultFile))) { 100 | const parser = new xml2js.Parser({ explicitArray: false }); 101 | const parseString = util.promisify(parser.parseString); 102 | 103 | retrieved_connectedapp = await parseString(fs.readFileSync(path.resolve(resultFile))); 104 | // if(!this.flags.json) 105 | // this.ux.logJson(retrieved_connectedapp); 106 | this.ux.log( 107 | `Retrieved ConnectedApp Succesfully with Consumer Key : ${retrieved_connectedapp.ConnectedApp.oauthConfig.consumerKey}` 108 | ); 109 | return { connectedapp: retrieved_connectedapp.ConnectedApp }; 110 | } else { 111 | throw new SfdxError('Unable to process'); 112 | } 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/sfpowerkit.ts: -------------------------------------------------------------------------------- 1 | import { SfdxProject } from '@salesforce/core'; 2 | import { UX } from '@salesforce/command'; 3 | import chalk = require('chalk'); 4 | import * as fs from 'fs-extra'; 5 | import SQLITEKeyValue from './utils/sqlitekv'; 6 | import FileUtils from './utils/fileutils'; 7 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 8 | const NodeCache = require('node-cache'); 9 | 10 | 11 | export class Sfpowerkit { 12 | private static defaultFolder: string; 13 | private static projectDirectories: string[]; 14 | private static pluginConfig; 15 | public static isJsonFormatEnabled: boolean; 16 | private static ux: UX; 17 | private static sourceApiVersion: any; 18 | private static cache; 19 | 20 | 21 | static enableColor() { 22 | chalk.level = 2; 23 | } 24 | 25 | static disableColor() { 26 | chalk.level = 0; 27 | } 28 | 29 | public static resetCache() { 30 | let cachePath = FileUtils.getLocalCachePath('sfpowerkit-cache.db'); 31 | if (fs.existsSync(cachePath)) 32 | fs.unlinkSync(cachePath); 33 | } 34 | 35 | public static initCache() { 36 | try { 37 | //Set the cache path on init, 38 | //TODO: Move this to a temporary directory with randomization 39 | Sfpowerkit.cache = new SQLITEKeyValue(FileUtils.getLocalCachePath('sfpowerkit-cache.db')); 40 | Sfpowerkit.cache.init(); 41 | } catch (error) { 42 | //Fallback to NodeCache, as sqlite cache cant be lazily loaded 43 | //Retreive and Merge doesnt have workers so sqlite cant be loaded.. need further investigation 44 | Sfpowerkit.cache = new NodeCache(); 45 | } 46 | } 47 | 48 | public static getFromCache(key: string): any { 49 | return Sfpowerkit.cache.get(key); 50 | } 51 | 52 | public static addToCache(key: string, value: any) { 53 | return Sfpowerkit.cache.set(key, value); 54 | } 55 | 56 | public static setLogLevel(logLevel: string, isJsonFormatEnabled: boolean) { 57 | this.isJsonFormatEnabled = isJsonFormatEnabled ? true : false; 58 | } 59 | 60 | public static setProjectDirectories(packagedirectories: string[]) { 61 | Sfpowerkit.projectDirectories = packagedirectories; 62 | } 63 | 64 | public static async getProjectDirectories() { 65 | if (!Sfpowerkit.projectDirectories) { 66 | Sfpowerkit.projectDirectories = []; 67 | const dxProject = await SfdxProject.resolve(); 68 | const project = await dxProject.retrieveSfdxProjectJson(); 69 | let packages = (project.get('packageDirectories') as any[]) || []; 70 | packages.forEach((element) => { 71 | Sfpowerkit.projectDirectories.push(element.path); 72 | if (element.default) { 73 | Sfpowerkit.defaultFolder = element.path; 74 | } 75 | }); 76 | } 77 | return Sfpowerkit.projectDirectories; 78 | } 79 | 80 | public static async getDefaultFolder() { 81 | if (!Sfpowerkit.defaultFolder) { 82 | await Sfpowerkit.getProjectDirectories(); 83 | } 84 | return Sfpowerkit.defaultFolder; 85 | } 86 | public static setDefaultFolder(defaultFolder: string) { 87 | Sfpowerkit.defaultFolder = defaultFolder; 88 | } 89 | 90 | public static async getConfig() { 91 | if (!Sfpowerkit.pluginConfig) { 92 | const dxProject = await SfdxProject.resolve(); 93 | const project = await dxProject.retrieveSfdxProjectJson(); 94 | let plugins = project.get('plugins') || {}; 95 | let sfpowerkitConfig = plugins['sfpowerkit']; 96 | Sfpowerkit.pluginConfig = sfpowerkitConfig || {}; 97 | } 98 | return Sfpowerkit.pluginConfig; 99 | } 100 | public static setapiversion(apiversion: any) { 101 | Sfpowerkit.sourceApiVersion = apiversion; 102 | } 103 | 104 | public static async getApiVersion(): Promise { 105 | if (!Sfpowerkit.sourceApiVersion) { 106 | const dxProject = await SfdxProject.resolve(); 107 | const project = await dxProject.retrieveSfdxProjectJson(); 108 | Sfpowerkit.sourceApiVersion = project.get('sourceApiVersion'); 109 | } 110 | return Sfpowerkit.sourceApiVersion; 111 | } 112 | /** 113 | * Print log only if the log level for this commamnd matches the log level for the message 114 | * @param message Message to print 115 | * @param messageLoglevel Log level for the message 116 | */ 117 | public static log(message: any, logLevel: LoggerLevel) { 118 | if (this.isJsonFormatEnabled) return; 119 | SFPLogger.log(message, logLevel); 120 | } 121 | public static setUx(ux: UX) { 122 | this.ux = ux; 123 | } 124 | 125 | public static setStatus(status: string) { 126 | this.ux.setSpinnerStatus(status); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/scratchorg/delete.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { Connection, Messages, SfdxError } from '@salesforce/core'; 3 | import { AnyJson } from '@salesforce/ts-types'; 4 | import ScratchOrgUtils from '../../../../utils/scratchOrgUtils'; 5 | import SFPowerkitCommand from '../../../../sfpowerkitCommand'; 6 | // Initialize Messages with the current plugin directory 7 | Messages.importMessagesDirectory(__dirname); 8 | 9 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 10 | // or any library that is using the messages framework can also be loaded this way. 11 | const messages = Messages.loadMessages('sfpowerkit', 'scratchorg_delete'); 12 | 13 | export default class Delete extends SFPowerkitCommand { 14 | public static description = messages.getMessage('commandDescription'); 15 | 16 | public static examples = [ 17 | `$ sfdx sfpowerkit:org:scratchorg:delete -e xyz@kyz.com -v devhub`, 18 | `$ sfdx sfpowerkit:org:scratchorg:delete -u xyz@kyz.com -v devhub`, 19 | `$ sfdx sfpowerkit:org:scratchorg:delete -e xyz@kyz.com -v devhub --ignorepool`, 20 | ]; 21 | 22 | // Comment this out if your command does not require a hub org username 23 | protected static requiresDevhubUsername = true; 24 | 25 | protected static flagsConfig = { 26 | email: flags.string({ 27 | required: false, 28 | char: 'e', 29 | exclusive: ['username'], 30 | description: messages.getMessage('emailFlagDescription'), 31 | }), 32 | username: flags.string({ 33 | required: false, 34 | char: 'u', 35 | exclusive: ['email'], 36 | description: messages.getMessage('usernameFlagDescription'), 37 | }), 38 | ignorepool: flags.boolean({ 39 | required: false, 40 | dependsOn: ['email'], 41 | description: messages.getMessage('ignorePoolFlagDescription'), 42 | }), 43 | dryrun: flags.boolean({ 44 | required: false, 45 | description: messages.getMessage('dryRunFlagDescription'), 46 | }), 47 | }; 48 | 49 | public async execute(): Promise { 50 | if (!this.flags.username && !this.flags.email) { 51 | throw new SfdxError('Required flags are missing, Please provide either username or email.'); 52 | } 53 | 54 | await this.hubOrg.refreshAuth(); 55 | const conn = this.hubOrg.getConnection(); 56 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 57 | 58 | let info = await this.getActiveScratchOrgsForUser(conn, this.flags.email, this.flags.username); 59 | 60 | if (info.totalSize > 0) { 61 | this.ux.log( 62 | `Found ${info.totalSize} Scratch Org(s) for the given ${ 63 | this.flags.username ? 'Username: ' + this.flags.username : 'Email: ' + this.flags.email 64 | } in devhub ${this.hubOrg.getUsername()}.\n` 65 | ); 66 | this.ux.table(info.records, ['Id', 'ScratchOrg', 'SignupUsername', 'SignupEmail', 'ExpirationDate']); 67 | 68 | if (!this.flags.dryrun) { 69 | let scratchOrgIds: string[] = info.records.map((elem) => elem.Id); 70 | await ScratchOrgUtils.deleteScratchOrg(this.hubOrg, scratchOrgIds); 71 | this.ux.log('Scratch Org(s) deleted successfully.'); 72 | } 73 | } else { 74 | this.ux.log( 75 | `No Scratch Org(s) found for the given ${ 76 | this.flags.username ? 'Username: ' + this.flags.username : 'Email: ' + this.flags.email 77 | } in devhub ${this.hubOrg.getUsername()}.` 78 | ); 79 | } 80 | 81 | return 1; 82 | } 83 | 84 | private async getActiveScratchOrgsForUser(conn: Connection, email: string, username: string): Promise { 85 | let query = `SELECT Id, ScratchOrg, SignupUsername, SignupEmail, ExpirationDate FROM ActiveScratchOrg`; 86 | 87 | if (username) { 88 | query = `${query} WHERE SignupUsername = '${username}'`; 89 | } else { 90 | query = `${query} WHERE SignupEmail = '${email}'`; 91 | } 92 | 93 | if (this.flags.ignorepool && !username) { 94 | const orgIds = await this.getOrgIdOfPooledScratchOrgs(); 95 | const collection = orgIds.map((id) => `'${id}'`).toString(); 96 | query += ` AND ScratchOrg NOT IN (${collection})`; 97 | } 98 | 99 | const scratch_orgs = (await conn.query(query)) as any; 100 | 101 | return scratch_orgs; 102 | } 103 | 104 | private async getOrgIdOfPooledScratchOrgs(): Promise { 105 | const results = await ScratchOrgUtils.getScratchOrgsByTag(null, this.hubOrg, false, false); 106 | return results.records.map((record) => record.ScratchOrg); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/utils/packageUtils.ts: -------------------------------------------------------------------------------- 1 | import { Connection } from 'jsforce'; 2 | 3 | let retry = require('async-retry'); 4 | 5 | export async function getInstalledPackages(conn: Connection, fetchLicenses: boolean): Promise { 6 | let packageDetails = []; 7 | 8 | let installedPackagesQuery = 9 | 'SELECT Id, SubscriberPackageId, SubscriberPackage.NamespacePrefix, SubscriberPackage.Name, ' + 10 | 'SubscriberPackageVersion.Id, SubscriberPackageVersion.Name, SubscriberPackageVersion.MajorVersion, SubscriberPackageVersion.MinorVersion, ' + 11 | 'SubscriberPackageVersion.PatchVersion, SubscriberPackageVersion.BuildNumber, SubscriberPackageVersion.Package2ContainerOptions, SubscriberPackageVersion.IsOrgDependent FROM InstalledSubscriberPackage ' + 12 | 'ORDER BY SubscriberPackageId'; 13 | 14 | let packageNamespacePrefixList = []; 15 | 16 | return await retry( 17 | async (bail) => { 18 | let results = await conn.tooling.query(installedPackagesQuery); 19 | const records = results.records; 20 | 21 | if (records && records.length > 0) { 22 | records.forEach((record) => { 23 | const packageDetail = {} as PackageDetail; 24 | packageDetail.packageName = record['SubscriberPackage']['Name']; 25 | packageDetail.subcriberPackageId = record['SubscriberPackageId']; 26 | packageDetail.packageNamespacePrefix = record['SubscriberPackage']['NamespacePrefix']; 27 | packageDetail.packageVersionId = record['SubscriberPackageVersion']['Id']; 28 | packageDetail.packageVersionNumber = `${record['SubscriberPackageVersion']['MajorVersion']}.${record['SubscriberPackageVersion']['MinorVersion']}.${record['SubscriberPackageVersion']['PatchVersion']}.${record['SubscriberPackageVersion']['BuildNumber']}`; 29 | packageDetail.type = record['SubscriberPackageVersion']['Package2ContainerOptions']; 30 | packageDetail.IsOrgDependent = record['SubscriberPackageVersion']['IsOrgDependent']; 31 | packageDetails.push(packageDetail); 32 | if (packageDetail.packageNamespacePrefix) { 33 | packageNamespacePrefixList.push("'" + packageDetail.packageNamespacePrefix + "'"); 34 | } 35 | }); 36 | } 37 | 38 | if (fetchLicenses) { 39 | let licenseMap = new Map(); 40 | if (packageNamespacePrefixList.length > 0) { 41 | let packageLicensingQuery = `SELECT AllowedLicenses, UsedLicenses,ExpirationDate, NamespacePrefix, IsProvisioned, Status FROM PackageLicense WHERE NamespacePrefix IN (${packageNamespacePrefixList})`; 42 | await conn.query(packageLicensingQuery).then((queryResult) => { 43 | if (queryResult.records && queryResult.records.length > 0) { 44 | queryResult.records.forEach((record) => { 45 | let licenseDetailObj = {} as PackageDetail; 46 | licenseDetailObj.allowedLicenses = 47 | record['AllowedLicenses'] > 0 ? record['AllowedLicenses'] : 0; 48 | licenseDetailObj.usedLicenses = record['UsedLicenses']; 49 | licenseDetailObj.expirationDate = record['ExpirationDate']; 50 | licenseDetailObj.status = record['Status']; 51 | licenseMap.set(record['NamespacePrefix'], licenseDetailObj); 52 | }); 53 | } 54 | }); 55 | } 56 | 57 | if (packageDetails.length > 0 && licenseMap.size > 0) { 58 | packageDetails.forEach((detail) => { 59 | if (detail.packageNamespacePrefix && licenseMap.has(detail.packageNamespacePrefix)) { 60 | let licDetail = licenseMap.get(detail.packageNamespacePrefix); 61 | detail.allowedLicenses = licDetail.allowedLicenses; 62 | detail.usedLicenses = licDetail.usedLicenses; 63 | detail.expirationDate = licDetail.expirationDate; 64 | detail.status = licDetail.status; 65 | } 66 | }); 67 | } 68 | } 69 | 70 | return packageDetails; 71 | }, 72 | { retries: 3, minTimeout: 3000 } 73 | ); 74 | } 75 | 76 | export interface PackageDetail { 77 | packageName: string; 78 | subcriberPackageId: string; 79 | packageNamespacePrefix: string; 80 | packageVersionNumber: string; 81 | packageVersionId: string; 82 | allowedLicenses: number; 83 | usedLicenses: number; 84 | expirationDate: string; 85 | status: string; 86 | type: string; 87 | IsOrgDependent: boolean; 88 | CodeCoverage: number; 89 | codeCoverageCheckPassed: boolean; 90 | validationSkipped: boolean; 91 | } 92 | -------------------------------------------------------------------------------- /src/impl/project/diff/permsetDiff.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs-extra'; 2 | import * as xml2js from 'xml2js'; 3 | import * as util from 'util'; 4 | import * as _ from 'lodash'; 5 | import DiffUtil from './diffUtil'; 6 | 7 | const nonArayProperties = ['description', 'hasActivationRequired', 'label', 'license', 'userLicense', '$', 'fullName']; 8 | 9 | const parser = new xml2js.Parser({ 10 | explicitArray: false, 11 | valueProcessors: [ 12 | function (name) { 13 | if (name === 'true') name = true; 14 | if (name === 'false') name = false; 15 | return name; 16 | }, 17 | ], 18 | }); 19 | 20 | export default abstract class PermsetDiff { 21 | protected debugFlag: boolean; 22 | 23 | public constructor(debugFlag?: boolean) { 24 | this.debugFlag = debugFlag; 25 | } 26 | 27 | public static async generatePermissionsetXml( 28 | permissionsetXml1: string, 29 | permissionsetXml2: string, 30 | outputFilePath: string 31 | ) { 32 | const parseString = util.promisify(parser.parseString); 33 | 34 | let parseResult = await parseString(permissionsetXml1); 35 | let permsetObj1 = parseResult.PermissionSet; 36 | parseResult = await parseString(permissionsetXml2); 37 | let permsetObj2 = parseResult.PermissionSet; 38 | 39 | let newPermsetObj = {} as any; 40 | 41 | newPermsetObj.label = permsetObj2.label; 42 | 43 | if (!_.isNil(permsetObj2.description)) { 44 | newPermsetObj.description = permsetObj2.description; 45 | } 46 | if (!_.isNil(permsetObj2.license)) { 47 | newPermsetObj.license = permsetObj2.license; 48 | } 49 | if (permsetObj2.hasActivationRequired) { 50 | newPermsetObj.hasActivationRequired = permsetObj2.hasActivationRequired; 51 | } 52 | 53 | newPermsetObj.applicationVisibilities = DiffUtil.getChangedOrAdded( 54 | permsetObj1.applicationVisibilities, 55 | permsetObj2.applicationVisibilities, 56 | 'application' 57 | ).addedEdited; 58 | newPermsetObj.classAccesses = DiffUtil.getChangedOrAdded( 59 | permsetObj1.classAccesses, 60 | permsetObj2.classAccesses, 61 | 'apexClass' 62 | ).addedEdited; 63 | newPermsetObj.customPermissions = DiffUtil.getChangedOrAdded( 64 | permsetObj1.customPermissions, 65 | permsetObj2.customPermissions, 66 | 'name' 67 | ).addedEdited; 68 | newPermsetObj.externalDataSourceAccesses = DiffUtil.getChangedOrAdded( 69 | permsetObj1.externalDataSourceAccesses, 70 | permsetObj2.externalDataSourceAccesses, 71 | 'externalDataSource' 72 | ).addedEdited; 73 | 74 | newPermsetObj.fieldPermissions = DiffUtil.getChangedOrAdded( 75 | permsetObj1.fieldPermissions, 76 | permsetObj2.fieldPermissions, 77 | 'field' 78 | ).addedEdited; 79 | 80 | newPermsetObj.objectPermissions = DiffUtil.getChangedOrAdded( 81 | permsetObj1.objectPermissions, 82 | permsetObj2.objectPermissions, 83 | 'object' 84 | ).addedEdited; 85 | newPermsetObj.pageAccesses = DiffUtil.getChangedOrAdded( 86 | permsetObj1.pageAccesses, 87 | permsetObj2.pageAccesses, 88 | 'apexPage' 89 | ).addedEdited; 90 | 91 | newPermsetObj.recordTypeVisibilities = DiffUtil.getChangedOrAdded( 92 | permsetObj1.recordTypeVisibilities, 93 | permsetObj2.recordTypeVisibilities, 94 | 'recordType' 95 | ).addedEdited; 96 | newPermsetObj.tabSettings = DiffUtil.getChangedOrAdded( 97 | permsetObj1.tabSettings, 98 | permsetObj2.tabSettings, 99 | 'tab' 100 | ).addedEdited; 101 | newPermsetObj.userPermissions = DiffUtil.getChangedOrAdded( 102 | permsetObj1.userPermissions, 103 | permsetObj2.userPermissions, 104 | 'name' 105 | ).addedEdited; 106 | 107 | await PermsetDiff.writePermset(newPermsetObj, outputFilePath); 108 | } 109 | 110 | private static async writePermset(permsetObj: any, filePath: string) { 111 | //Delete eampty arrays 112 | for (let key in permsetObj) { 113 | if (Array.isArray(permsetObj[key])) { 114 | //All top element must be arays exept non arrayProperties 115 | if (!nonArayProperties.includes(key) && permsetObj[key].length === 0) { 116 | delete permsetObj[key]; 117 | } 118 | } 119 | } 120 | if (permsetObj.label != undefined) { 121 | let builder = new xml2js.Builder({ rootName: 'PermissionSet' }); 122 | permsetObj['$'] = { 123 | xmlns: 'http://soap.sforce.com/2006/04/metadata', 124 | }; 125 | let xml = builder.buildObject(permsetObj); 126 | 127 | fs.writeFileSync(filePath, xml); 128 | } 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/commands/sfpowerkit/org/sandbox/refresh.ts: -------------------------------------------------------------------------------- 1 | import { flags } from '@salesforce/command'; 2 | import { AnyJson } from '@salesforce/ts-types'; 3 | const request = require('request-promise-native'); 4 | import { Connection, Messages, SfdxError } from '@salesforce/core'; 5 | import { Sfpowerkit } from '../../../../sfpowerkit'; 6 | import SfpowerkitCommand from '../../../../sfpowerkitCommand'; 7 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 8 | 9 | // Initialize Messages with the current plugin directory 10 | Messages.importMessagesDirectory(__dirname); 11 | 12 | // Load the specific messages for this file. Messages from @salesforce/command, @salesforce/core, 13 | // or any library that is using the messages framework can also be loaded this way. 14 | const messages = Messages.loadMessages('sfpowerkit', 'sandbox_refresh'); 15 | 16 | export default class Refresh extends SfpowerkitCommand { 17 | public static description = messages.getMessage('commandDescription'); 18 | 19 | public static examples = [ 20 | `$ sfdx sfpowerkit:org:sandbox:refresh -n test2 -f sitSandbox -v myOrg@example.com`, 21 | `$ sfdx sfpowerkit:org:sandbox:refresh -n test2 -l DEVELOPER -v myOrg@example.com`, 22 | ]; 23 | 24 | protected static flagsConfig = { 25 | name: flags.string({ 26 | required: true, 27 | char: 'n', 28 | description: messages.getMessage('nameFlagDescription'), 29 | }), 30 | clonefrom: flags.string({ 31 | required: false, 32 | char: 'f', 33 | default: '', 34 | description: messages.getMessage('cloneFromFlagDescripton'), 35 | }), 36 | licensetype: flags.string({ 37 | required: false, 38 | char: 'l', 39 | options: ['DEVELOPER', 'DEVELOPER_PRO', 'PARTIAL', 'FULL'], 40 | description: messages.getMessage('licenseFlagDescription'), 41 | }), 42 | }; 43 | 44 | // Comment this out if your command does not require a hub org username 45 | protected static requiresDevhubUsername = true; 46 | 47 | public async execute(): Promise { 48 | Sfpowerkit.setLogLevel('INFO', false); 49 | 50 | await this.hubOrg.refreshAuth(); 51 | 52 | const conn = this.hubOrg.getConnection(); 53 | 54 | this.flags.apiversion = this.flags.apiversion || (await conn.retrieveMaxApiVersion()); 55 | 56 | let result; 57 | 58 | const sandboxId = await this.getSandboxId(conn, this.flags.name); 59 | const uri = `${conn.instanceUrl}/services/data/v${this.flags.apiversion}/tooling/sobjects/SandboxInfo/${sandboxId}/`; 60 | 61 | if (this.flags.clonefrom) { 62 | const sourceSandboxId = await this.getSandboxId(conn, this.flags.clonefrom); 63 | 64 | result = await request({ 65 | method: 'patch', 66 | url: uri, 67 | headers: { 68 | Authorization: `Bearer ${conn.accessToken}`, 69 | }, 70 | body: { 71 | AutoActivate: 'true', 72 | SourceId: `${sourceSandboxId}`, 73 | }, 74 | json: true, 75 | }); 76 | } else { 77 | if (!this.flags.licensetype) { 78 | throw new SfdxError( 79 | 'License type is required when clonefrom source org is not provided. you may need to provide -l | --licensetype' 80 | ); 81 | } 82 | 83 | result = await request({ 84 | method: 'patch', 85 | url: uri, 86 | headers: { 87 | Authorization: `Bearer ${conn.accessToken}`, 88 | }, 89 | body: { 90 | AutoActivate: 'true', 91 | LicenseType: `${this.flags.licensetype}`, 92 | }, 93 | json: true, 94 | }); 95 | } 96 | 97 | SFPLogger.log(`Successfully Enqueued Refresh of Sandbox`, LoggerLevel.INFO); 98 | 99 | return result; 100 | } 101 | 102 | public async getSandboxId(conn: Connection, name: string) { 103 | const query_uri = `${conn.instanceUrl}/services/data/v${this.flags.apiversion}/tooling/query?q=SELECT+Id,SandboxName+FROM+SandboxInfo+WHERE+SandboxName+in+('${name}')`; 104 | 105 | const sandbox_query_result = await request({ 106 | method: 'get', 107 | url: query_uri, 108 | headers: { 109 | Authorization: `Bearer ${conn.accessToken}`, 110 | }, 111 | json: true, 112 | }); 113 | 114 | if (sandbox_query_result.records[0] == undefined) 115 | throw new SfdxError(`Unable to continue, Please check your sandbox name: ${name}`); 116 | 117 | this.ux.log(); 118 | 119 | SFPLogger.log( 120 | `Fetched Sandbox Id for sandbox ${name} is ${sandbox_query_result.records[0].Id}`, 121 | LoggerLevel.INFO 122 | ); 123 | 124 | return sandbox_query_result.records[0].Id; 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/impl/source/profiles/reconcileWorker.ts: -------------------------------------------------------------------------------- 1 | import { Connection, Org, SfdxProject } from '@salesforce/core'; 2 | import { Sfpowerkit } from '../../../sfpowerkit'; 3 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 4 | import { parentPort, workerData } from 'worker_threads'; 5 | import * as fs from 'fs-extra'; 6 | import * as path from 'path'; 7 | import * as xml2js from 'xml2js'; 8 | import * as util from 'util'; 9 | import ProfileWriter from '../../metadata/writer/profileWriter'; 10 | import Profile from '../../metadata/schema'; 11 | 12 | import ProfileComponentReconciler from './profileComponentReconciler'; 13 | import { MetadataResolver, SourceComponent } from '@salesforce/source-deploy-retrieve'; 14 | import { ProfileSourceFile } from './profileActions'; 15 | 16 | export default class ReconcileWorker { 17 | private conn: Connection; 18 | public constructor(private targetOrg: string, private isSourceOnly: boolean) {} 19 | 20 | public async reconcile(profilesToReconcile: ProfileSourceFile[], destFolder) { 21 | //Init Cache for each worker thread from file system 22 | 23 | Sfpowerkit.initCache(); 24 | 25 | if (this.targetOrg) { 26 | let org = await Org.create({ aliasOrUsername: this.targetOrg }); 27 | this.conn = org.getConnection(); 28 | } else { 29 | //Load all local components from source to database 30 | await this.loadAllLocalComponents(); 31 | } 32 | 33 | let result: string[] = []; 34 | for (let count = 0; count < profilesToReconcile.length; count++) { 35 | let reconciledProfile = await this.reconcileProfileJob(profilesToReconcile[count], destFolder); 36 | result.push(reconciledProfile[0]); 37 | } 38 | return result; 39 | } 40 | 41 | private async loadAllLocalComponents() { 42 | const resolver = new MetadataResolver(); 43 | const project = await SfdxProject.resolve(); 44 | let packageDirectories = project.getPackageDirectories(); 45 | 46 | for (const packageDirectory of packageDirectories) { 47 | const components = resolver.getComponentsFromPath(packageDirectory.path); 48 | for (const component of components) { 49 | this.loadComponentsTocache(component); 50 | } 51 | } 52 | } 53 | 54 | private loadComponentsTocache(component:SourceComponent){ 55 | Sfpowerkit.addToCache(`SOURCE_${component.type.name}_${component.fullName}`, true); 56 | Sfpowerkit.addToCache(`${component.type.name}_SOURCE_CACHE_AVAILABLE`, true); 57 | let children:SourceComponent[] = component.getChildren(); 58 | for (const child of children) { 59 | this.loadComponentsTocache(child); 60 | } 61 | } 62 | 63 | public reconcileProfileJob(profileComponent: ProfileSourceFile, destFolder: string): Promise { 64 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 65 | let reconcilePromise = new Promise((resolve, reject) => { 66 | let result: string[] = []; // Handle result of command execution 67 | 68 | let profileXmlString = fs.readFileSync(profileComponent.path); 69 | const parser = new xml2js.Parser({ explicitArray: true }); 70 | const parseString = util.promisify(parser.parseString); 71 | parseString(profileXmlString) 72 | .then((parseResult) => { 73 | let profileWriter = new ProfileWriter(); 74 | let profileObj: Profile = profileWriter.toProfile(parseResult.Profile); // as Profile 75 | return profileObj; 76 | }) 77 | .then((profileObj) => { 78 | return new ProfileComponentReconciler(this.conn, this.isSourceOnly).reconcileProfileComponents( 79 | profileObj, 80 | profileComponent.name 81 | ); 82 | }) 83 | .then((profileObj) => { 84 | //write profile back 85 | let outputFile = profileComponent.path; 86 | if (destFolder != null) { 87 | outputFile = path.join(destFolder, path.basename(profileComponent.path)); 88 | } 89 | let profileWriter = new ProfileWriter(); 90 | profileWriter.writeProfile(profileObj, outputFile); 91 | result.push(outputFile); 92 | resolve(result); 93 | return result; 94 | }) 95 | .catch((error) => { 96 | SFPLogger.log( 97 | 'Error while processing file ' + profileComponent + '. ERROR Message: ' + error.message, 98 | LoggerLevel.ERROR 99 | ); 100 | }); 101 | }); 102 | return reconcilePromise; 103 | } 104 | } 105 | 106 | Sfpowerkit.setLogLevel(workerData.loglevel, workerData.isJsonFormatEnabled); 107 | 108 | let reconcileWorker = new ReconcileWorker(workerData.targetOrg, workerData.isSourceOnly); 109 | reconcileWorker.reconcile(workerData.profileChunk, workerData.destFolder).then((result) => { 110 | parentPort.postMessage(result); 111 | }); 112 | -------------------------------------------------------------------------------- /src/impl/source/profiles/profileSync.ts: -------------------------------------------------------------------------------- 1 | import SFPLogger, {LoggerLevel } from '@dxatscale/sfp-logger'; 2 | import * as fs from 'fs-extra'; 3 | import Profile from '../../../impl/metadata/schema'; 4 | import * as _ from 'lodash'; 5 | import ProfileActions, { ProfileStatus, ProfileSourceFile } from './profileActions'; 6 | import ProfileWriter from '../../../impl/metadata/writer/profileWriter'; 7 | import { ProgressBar } from '../../../ui/progressBar'; 8 | import MetadataRetriever from '../../metadata/retriever/metadataRetriever'; 9 | import { registry } from '@salesforce/source-deploy-retrieve'; 10 | import * as path from 'path'; 11 | 12 | export default class ProfileSync extends ProfileActions { 13 | public async sync(srcFolders: string[], profilesToSync?: string[], isdelete?: boolean): Promise { 14 | SFPLogger.log('Retrieving profiles', LoggerLevel.DEBUG); 15 | 16 | //Display provided profiles if any 17 | if (!_.isNil(profilesToSync) && profilesToSync.length !== 0) { 18 | SFPLogger.log('Requested profiles are..', LoggerLevel.DEBUG); 19 | profilesToSync.forEach((element) => { 20 | SFPLogger.log(element,LoggerLevel.DEBUG) 21 | }); 22 | } 23 | 24 | //Fetch all profiles if source folders if not provided 25 | let isToFetchNewProfiles = _.isNil(srcFolders) || srcFolders.length === 0; 26 | 27 | SFPLogger.log('Source Folders are', LoggerLevel.DEBUG); 28 | srcFolders.forEach((element) =>{ 29 | SFPLogger.log(element, LoggerLevel.DEBUG); 30 | }); 31 | 32 | //get local profiles when profile path is provided 33 | let profilesInProjectDir = await this.loadProfileFromPackageDirectories(srcFolders); 34 | 35 | //If dont fetch add those to profilesToSync 36 | if (!isToFetchNewProfiles && profilesToSync.length < 1) { 37 | profilesInProjectDir.forEach((element) => { 38 | profilesToSync.push(element.name); 39 | }); 40 | } 41 | 42 | //Grab status of the profiles (Add, Update or Delete) 43 | let profileStatus = await this.getRemoteProfilesWithLocalStatus(profilesToSync, srcFolders); 44 | 45 | let profilesToRetrieve: ProfileSourceFile[] = []; 46 | if (isToFetchNewProfiles) { 47 | //Retriving local profiles and anything extra found in the org 48 | profilesToRetrieve = _.union(profileStatus.added, profileStatus.updated); 49 | } else { 50 | //Retriving only local profiles 51 | profilesToRetrieve = profileStatus.updated; 52 | profileStatus.added = []; 53 | } 54 | profilesToRetrieve.sort((a, b) => a.name.localeCompare(b.name)); 55 | SFPLogger.log(`Number of profiles to retrieve ${profilesToRetrieve.length}`, LoggerLevel.INFO); 56 | 57 | if (profilesToRetrieve.length > 0) { 58 | let i: number, 59 | j: number, 60 | chunk = 10; 61 | let profilesToRetrieveChunked: any[] = []; 62 | 63 | let progressBar = new ProgressBar().create(`Loading profiles in batches `, ` Profiles`, LoggerLevel.INFO); 64 | progressBar.start(profilesToRetrieve.length); 65 | for (i = 0, j = profilesToRetrieve.length; i < j; i += chunk) { 66 | //slice profilesToRetrieve in chunk 67 | profilesToRetrieveChunked = profilesToRetrieve.slice(i, i + chunk); 68 | let remoteProfiles = await this.profileRetriever.loadProfiles( 69 | _.uniq( 70 | profilesToRetrieveChunked.map((elem) => { 71 | return elem.name; 72 | }) 73 | ) 74 | ); 75 | 76 | let profileWriter = new ProfileWriter(); 77 | for (let count = 0; count < remoteProfiles.length; count++) { 78 | let profileObj = remoteProfiles[count] as Profile; 79 | SFPLogger.log('Reconciling Tabs', LoggerLevel.DEBUG); 80 | await this.reconcileTabs(profileObj); 81 | //Find correct profile path, so that remote could be overlaid 82 | let indices = _.keys(_.pickBy(profilesToRetrieveChunked, { name: profileObj.fullName })); 83 | for (const index of indices) { 84 | let filePath = profilesToRetrieveChunked[index].path; 85 | if (filePath) { 86 | profileWriter.writeProfile( 87 | profileObj, 88 | path.join(process.cwd(), profilesToRetrieveChunked[index].path) 89 | ); 90 | } else { 91 | SFPLogger.log('File path not found...', LoggerLevel.DEBUG); 92 | } 93 | } 94 | } 95 | progressBar.increment(j - i > chunk ? chunk : j - i); 96 | } 97 | progressBar.stop(); 98 | } else { 99 | SFPLogger.log(`No Profiles found to retrieve`, LoggerLevel.INFO); 100 | } 101 | 102 | if (profileStatus.deleted && isdelete) { 103 | profileStatus.deleted.forEach((profile) => { 104 | if (fs.existsSync(path.join(process.cwd(), profile.path))) { 105 | fs.unlinkSync(path.join(process.cwd(), profile.path)); 106 | } 107 | }); 108 | } 109 | //Retun final status 110 | return profileStatus; 111 | } 112 | } 113 | --------------------------------------------------------------------------------