├── .husky ├── .gitignore ├── commit-msg ├── post-merge ├── post-rewrite ├── post-checkout ├── pre-commit └── pre-push ├── .trivyignore ├── __mocks__ ├── .forceinclude └── .forceignore ├── .github ├── FUNDING.yml ├── linters │ ├── .checkov.yml │ ├── .jscpd.json │ └── .cspell.json ├── CODEOWNERS ├── dependabot.yml ├── actions │ └── install │ │ └── action.yml ├── ISSUE_TEMPLATE │ ├── enhancement.md │ └── issue.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── manual-deprecate-versions.yml │ ├── on-published-release.yml │ ├── manual-manage-versions.yml │ ├── reusable-build.yml │ ├── run-e2e-tests.yml │ ├── on-merged-pull-request.yml │ └── on-main-push.yml ├── bin ├── run.cmd ├── dev.cmd ├── run.js └── dev.js ├── __tests__ ├── tsconfig.json ├── perf │ └── bench.mjs ├── __utils__ │ └── globalTestHelper.ts ├── unit │ └── lib │ │ ├── post-processor │ │ ├── baseProcessor.test.ts │ │ ├── postProcessorManager.test.ts │ │ └── includeProcessor.test.ts │ │ ├── utils │ │ ├── MessageService.test.ts │ │ ├── gitLfsHelper.test.ts │ │ ├── fxpHelper.test.ts │ │ └── packageHelper.test.ts │ │ └── service │ │ ├── flowHandler.test.ts │ │ ├── diffLineInterpreterCompatibility.test.ts │ │ ├── decomposedHandler.test.ts │ │ ├── diffLineInterpreter.test.ts │ │ ├── inBundleHandler.test.ts │ │ ├── customObjectChildHandler.test.ts │ │ ├── customLabelHandler.test.ts │ │ ├── sharedFolderHandler.test.ts │ │ ├── customFieldHandler.test.ts │ │ ├── lwcHandler.test.ts │ │ ├── botHandler.test.ts │ │ ├── typeHandlerFactory.test.ts │ │ ├── inFolderHandler.test.ts │ │ ├── objectTranslationHandler.test.ts │ │ ├── customObjectHandler.test.ts │ │ └── reportingFolderHandler.test.ts └── functional │ ├── main.test.ts │ └── delta.nut.ts ├── src ├── constant │ ├── libConstant.ts │ ├── fsConstants.ts │ ├── cliConstants.ts │ ├── gitConstants.ts │ └── metadataConstants.ts ├── types │ ├── git.ts │ ├── sgdResult.ts │ ├── work.ts │ ├── config.ts │ ├── metadata.ts │ └── ignore.d.ts ├── metadata │ ├── MetadataRepository.ts │ ├── metadataManager.ts │ └── MetadataRepositoryImpl.ts ├── utils │ ├── asyncFilter.ts │ ├── gitLfsHelper.ts │ ├── MessageService.ts │ ├── fsUtils.ts │ ├── fxpHelper.ts │ ├── packageHelper.ts │ ├── LoggingDecorator.ts │ ├── fsHelper.ts │ ├── LoggingService.ts │ ├── repoGitDiff.ts │ ├── ignoreHelper.ts │ └── cliHelper.ts ├── service │ ├── lwcHandler.ts │ ├── customObjectChildHandler.ts │ ├── inBundleHandler.ts │ ├── flowHandler.ts │ ├── customFieldHandler.ts │ ├── customLabelHandler.ts │ ├── botHandler.ts │ ├── decomposedHandler.ts │ ├── diffLineInterpreter.ts │ ├── reportingFolderHandler.ts │ ├── sharedFolderHandler.ts │ ├── customObjectHandler.ts │ ├── objectTranslationHandler.ts │ ├── inFolderHandler.ts │ ├── containedDecomposedHandler.ts │ ├── inResourceHandler.ts │ ├── inFileHandler.ts │ └── typeHandlerFactory.ts ├── post-processor │ ├── baseProcessor.ts │ ├── postProcessorManager.ts │ ├── packageGenerator.ts │ └── includeProcessor.ts ├── main.ts └── commands │ └── sgd │ └── source │ └── delta.ts ├── commitlint.config.js ├── lychee.toml ├── .prettierignore ├── img ├── SGD_logo.png ├── delta_principles.png ├── example_commit.png ├── example_package.png ├── example_generateDelta.png └── example_destructiveChange.png ├── .lintstagedrc ├── .validate-branch-namerc.json ├── .nycrc ├── .prettierrc.json ├── .mocharc.json ├── stryker.conf.mjs ├── .gitignore ├── .codeclimate.yml ├── SECURITY.md ├── tooling └── incrementApiVersion.sh ├── knip.config.ts ├── .ls-lint.yml ├── .mega-linter.yml ├── tsconfig.json ├── LICENSE.md ├── PUBLISHING.md ├── messages └── delta.md ├── biome.json └── CODE_OF_CONDUCT.md /.husky/.gitignore: -------------------------------------------------------------------------------- 1 | _ 2 | -------------------------------------------------------------------------------- /.trivyignore: -------------------------------------------------------------------------------- 1 | CVE-2023-0842 2 | -------------------------------------------------------------------------------- /__mocks__/.forceinclude: -------------------------------------------------------------------------------- 1 | **/jsconfig.json -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | --- 2 | github: [scolladon] 3 | -------------------------------------------------------------------------------- /bin/run.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | node "%~dp0\run" %* 4 | -------------------------------------------------------------------------------- /.husky/commit-msg: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npx commitlint --edit 4 | -------------------------------------------------------------------------------- /.husky/post-merge: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npm run dependencies:reinstall -------------------------------------------------------------------------------- /.husky/post-rewrite: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npm run dependencies:reinstall -------------------------------------------------------------------------------- /.husky/post-checkout: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npm run dependencies:reinstall -------------------------------------------------------------------------------- /__tests__/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig" 3 | } 4 | 5 | -------------------------------------------------------------------------------- /src/constant/libConstant.ts: -------------------------------------------------------------------------------- 1 | export const PLUGIN_NAME = 'sfdx-git-delta' 2 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npx validate-branch-name 4 | npx lint-staged 5 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | export default { extends: ['@commitlint/config-conventional'] } 2 | -------------------------------------------------------------------------------- /lychee.toml: -------------------------------------------------------------------------------- 1 | exclude_mail = true 2 | exclude_path = ["CHANGELOG.md", "package-lock.json"] -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .next 2 | node_modules 3 | output 4 | reports 5 | .github 6 | *.json 7 | *.md -------------------------------------------------------------------------------- /img/SGD_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/SGD_logo.png -------------------------------------------------------------------------------- /src/types/git.ts: -------------------------------------------------------------------------------- 1 | export type FileGitRef = { 2 | path: string 3 | oid: string 4 | } 5 | -------------------------------------------------------------------------------- /.lintstagedrc: -------------------------------------------------------------------------------- 1 | { 2 | "*{.ts, .js}": ["npx @biomejs/biome check --error-on-warnings --write"] 3 | } 4 | -------------------------------------------------------------------------------- /img/delta_principles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/delta_principles.png -------------------------------------------------------------------------------- /img/example_commit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/example_commit.png -------------------------------------------------------------------------------- /img/example_package.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/example_package.png -------------------------------------------------------------------------------- /src/types/sgdResult.ts: -------------------------------------------------------------------------------- 1 | export type SgdResult = { 2 | error?: string 3 | 'output-dir': string 4 | } 5 | -------------------------------------------------------------------------------- /bin/dev.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | node --loader ts-node/esm --no-warnings=ExperimentalWarning "%~dp0\dev" %* 4 | -------------------------------------------------------------------------------- /img/example_generateDelta.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/example_generateDelta.png -------------------------------------------------------------------------------- /.validate-branch-namerc.json: -------------------------------------------------------------------------------- 1 | { 2 | "pattern": "^(main){1}$|^(feat|fix|hotfix|release|build|chore|docs)/.+$" 3 | } 4 | -------------------------------------------------------------------------------- /img/example_destructiveChange.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scolladon/sfdx-git-delta/HEAD/img/example_destructiveChange.png -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "check-coverage": true, 3 | "lines": 100, 4 | "statements": 100, 5 | "functions": 100, 6 | "branches": 100 7 | } -------------------------------------------------------------------------------- /.husky/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | npm run lint 4 | npm pack 5 | npm run test 6 | npm outdated || true 7 | npm audit || true 8 | npm run lint:dependencies || true 9 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5", 3 | "tabWidth": 2, 4 | "semi": false, 5 | "arrowParens": "avoid", 6 | "singleQuote": true, 7 | "endOfLine":"auto" 8 | } -------------------------------------------------------------------------------- /bin/run.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | async function main() { 3 | const { execute } = await import('@oclif/core') 4 | await execute({ dir: import.meta.url }) 5 | } 6 | 7 | await main() 8 | -------------------------------------------------------------------------------- /.github/linters/.checkov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # You can see all available properties here: https://github.com/bridgecrewio/checkov#configuration-using-a-config-file 3 | quiet: true 4 | skip-check: 5 | - CKV_DOCKER_2 6 | - CKV2_GHA_1 7 | - CKV_GHA_7 8 | -------------------------------------------------------------------------------- /src/constant/fsConstants.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export const DOT = '.' 4 | export const EXTENSION_SUFFIX_REGEX = /\.[^/.]+$/ 5 | export const PATH_SEP = '/' 6 | export const PATH_SEPARATOR_REGEX = /[/\\]+/g 7 | export const UTF8_ENCODING = 'utf8' 8 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This is a comment. 2 | # Each line is a file pattern followed by one or more owners. 3 | 4 | # These owners will be the default owners for everything in 5 | # the repo. Unless a later match takes precedence 6 | * @scolladon @mehdicherf -------------------------------------------------------------------------------- /.mocharc.json: -------------------------------------------------------------------------------- 1 | { 2 | "require": "ts-node/register,source-map-support/register", 3 | "watch-extensions": "ts", 4 | "node-option": ["experimental-specifier-resolution=node","loader=ts-node/esm"], 5 | "recursive": true, 6 | "reporter": "spec", 7 | "timeout": 30000 8 | } -------------------------------------------------------------------------------- /stryker.conf.mjs: -------------------------------------------------------------------------------- 1 | const config = { 2 | coverageAnalysis: 'perTest', 3 | ignorePatterns: ['lib/', 'reports/', 'bin/', 'e2e/'], 4 | mutate: ['src/**/*.ts', '!src/metadata/v*.ts'], 5 | reporters: ['html', 'progress'], 6 | testRunner: 'jest', 7 | } 8 | export default config 9 | -------------------------------------------------------------------------------- /src/constant/cliConstants.ts: -------------------------------------------------------------------------------- 1 | import { HEAD } from './gitConstants.js' 2 | 3 | export const TO_DEFAULT_VALUE = HEAD 4 | export const OUTPUT_DEFAULT_VALUE = './output' 5 | export const SOURCE_DEFAULT_VALUE = './' 6 | export const REPO_DEFAULT_VALUE = './' 7 | export const TAB = '\t' 8 | -------------------------------------------------------------------------------- /bin/dev.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S NODE_OPTIONS="--no-warnings=ExperimentalWarning" npx ts-node --project tsconfig.json --esm 2 | async function main() { 3 | const { execute } = await import('@oclif/core') 4 | await execute({ development: true, dir: import.meta.url }) 5 | } 6 | 7 | await main() 8 | -------------------------------------------------------------------------------- /.github/linters/.jscpd.json: -------------------------------------------------------------------------------- 1 | { 2 | "threshold": 0, 3 | "reporters": ["html", "markdown"], 4 | "ignore": [ 5 | "**/node_modules/**", 6 | "**/.git/**", 7 | "**/*cache*/**", 8 | "**/.github/**", 9 | "**/report/**", 10 | "**/img/**", 11 | "**/__tests__/**" 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .stryker-tmp 2 | .wireit 3 | *-debug.log 4 | *-error.log 5 | /.nyc_output 6 | /e2e 7 | /lib 8 | /node_modules 9 | /reports 10 | install-state.gz 11 | megalinter-reports/ 12 | package.tgz 13 | perf-result.txt 14 | sfdx-git-delta-*.tgz 15 | stderr*.txt 16 | stdout*.txt 17 | tsconfig.tsbuildinfo 18 | -------------------------------------------------------------------------------- /src/metadata/MetadataRepository.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import type { Metadata } from '../types/metadata.js' 4 | 5 | export interface MetadataRepository { 6 | has(path: string): boolean 7 | get(path: string): Metadata | undefined 8 | getFullyQualifiedName(path: string): string 9 | values(): Metadata[] 10 | } 11 | -------------------------------------------------------------------------------- /src/types/work.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from './config.js' 2 | 3 | export type Manifest = Map> 4 | 5 | export type Manifests = { 6 | package: Manifest 7 | destructiveChanges: Manifest 8 | } 9 | 10 | export type Work = { 11 | config: Config 12 | diffs: Manifests 13 | warnings: Error[] 14 | } 15 | -------------------------------------------------------------------------------- /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | --- 2 | plugins: 3 | duplication: 4 | enabled: true 5 | config: 6 | languages: 7 | - javascript 8 | fixme: 9 | enabled: true 10 | exclude_patterns: 11 | - node_modules/**/* 12 | - __tests__/**/* 13 | - __mocks__/**/* 14 | - output/**/* 15 | - reports/**/* 16 | - .github/**/* 17 | - lib/**/* 18 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | |---------|--------------------| 7 | | 6.x.x | :white_check_mark: | 8 | | < 6.0.0 | :x: | 9 | 10 | ## Reporting a Vulnerability 11 | 12 | If you discover a security vulnerability, please open an issue with label `type: security`. 13 | -------------------------------------------------------------------------------- /tooling/incrementApiVersion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | filename=$(find src/metadata -type f -name "v*.ts" | sort | tail -1) 4 | version=$(echo "$filename" | tr -d -c 0-9) 5 | ((version++)) 6 | \sed -i "" "s/const latestVersion: number = $((version-1))/const latestVersion: number = $version/g" src/metadata/metadataManager.ts 7 | targetname="src/metadata/v${version}.ts" 8 | \cp "$filename" "$targetname" 9 | -------------------------------------------------------------------------------- /__mocks__/.forceignore: -------------------------------------------------------------------------------- 1 | # List files or directories below to ignore them when running force:source:push, force:source:pull, and force:source:status 2 | # More information: https://developer.salesforce.com/docs/atlas.en-us.sfdx_dev.meta/sfdx_dev/sfdx_dev_exclude_source.htm 3 | # 4 | 5 | package.xml 6 | **/jsconfig.json 7 | force-app/main/default/pages/* 8 | 9 | 10 | # LWC Jest 11 | **/__tests__/** -------------------------------------------------------------------------------- /src/types/config.ts: -------------------------------------------------------------------------------- 1 | export type Config = { 2 | to: string 3 | from: string 4 | output: string 5 | source: string[] 6 | ignore?: string | undefined 7 | ignoreDestructive?: string | undefined 8 | apiVersion?: number | undefined 9 | repo: string 10 | ignoreWhitespace: boolean 11 | generateDelta: boolean 12 | include?: string | undefined 13 | includeDestructive?: string | undefined 14 | } 15 | -------------------------------------------------------------------------------- /src/utils/asyncFilter.ts: -------------------------------------------------------------------------------- 1 | const asyncFilter = async ( 2 | list: string[], 3 | predicate: (t: string) => Promise 4 | ) => { 5 | const resolvedPredicates: boolean[] = [] 6 | for (const elem of list) { 7 | const predicateResult = await predicate(elem) 8 | resolvedPredicates.push(predicateResult) 9 | } 10 | return list.filter((_, idx) => resolvedPredicates[idx]) 11 | } 12 | export default asyncFilter 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "monthly" 8 | commit-message: 9 | prefix: "build" 10 | 11 | - package-ecosystem: "npm" 12 | directory: "/" 13 | schedule: 14 | interval: "weekly" 15 | versioning-strategy: increase 16 | commit-message: 17 | prefix: "build" 18 | allow: 19 | - dependency-type: "all" 20 | -------------------------------------------------------------------------------- /src/service/lwcHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { parse } from 'node:path/posix' 3 | 4 | import { PATH_SEP } from '../constant/fsConstants.js' 5 | 6 | import InResourceHandler from './inResourceHandler.js' 7 | 8 | export default class LwcHandler extends InResourceHandler { 9 | protected override _isProcessable() { 10 | const parentFolder = parse(this.line).dir.split(PATH_SEP).pop() 11 | 12 | return parentFolder !== this.metadataDef.directoryName 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /knip.config.ts: -------------------------------------------------------------------------------- 1 | export default { 2 | entry: [ 3 | 'src/commands/sgd/source/delta.ts', 4 | 'bin/dev.js', 5 | 'bin/run.js', 6 | '**/*.{nut,test}.ts', 7 | '.github/**/*.yml', 8 | ], 9 | project: ['**/*.{ts,js,json,yml}', '!src/metadata/v*.ts'], 10 | ignoreDependencies: [ 11 | '@salesforce/ts-sinon', 12 | '@types/mocha', 13 | 'mocha', 14 | 'sinon', 15 | 'ts-jest-mock-import-meta', 16 | ], 17 | ignoreBinaries: ['npm-check-updates'], 18 | } 19 | -------------------------------------------------------------------------------- /src/service/customObjectChildHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import StandardHandler from './standardHandler.js' 3 | 4 | export default class CustomObjectChildHandler extends StandardHandler { 5 | protected override _getElementName() { 6 | const parentTypeSuffix = 7 | this.splittedLine[ 8 | this.splittedLine.indexOf(this.metadataDef.directoryName) - 1 9 | ] 10 | const elementName = super._getElementName() 11 | return `${parentTypeSuffix}.${elementName}` 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /.ls-lint.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ls: 3 | .config.ts: camelCase | PascalCase 4 | .d.ts: camelCase | PascalCase 5 | .dir: kebab-case | regex:__[a-z]+__ 6 | .md: lowercase | SCREAMING_SNAKE_CASE 7 | .nut.ts: camelCase | PascalCase 8 | .test.ts: camelCase | PascalCase 9 | .ts: camelCase | PascalCase 10 | 11 | ignore: 12 | - .git 13 | - .github 14 | - .husky 15 | - .nyc_output 16 | - .stryker-tmp 17 | - .vscode 18 | - .idea 19 | - .wireit 20 | - e2e 21 | - lib 22 | - megalinter-reports 23 | - node_modules 24 | - reports -------------------------------------------------------------------------------- /src/post-processor/baseProcessor.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 4 | import type { Config } from '../types/config.js' 5 | import type { Work } from '../types/work.js' 6 | 7 | export default class BaseProcessor { 8 | protected readonly config: Config 9 | 10 | constructor( 11 | protected readonly work: Work, 12 | protected readonly metadata: MetadataRepository 13 | ) { 14 | this.config = work.config 15 | } 16 | 17 | public async process() { 18 | throw new Error('this class should be derived') 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/constant/gitConstants.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | export const ADDITION = 'A' 3 | export const BLOB_TYPE = 'blob' 4 | export const DELETION = 'D' 5 | export const GIT_DIFF_TYPE_REGEX = /^.\s+/u 6 | export const GIT_FOLDER = '.git' 7 | export const HEAD = 'HEAD' 8 | export const IGNORE_WHITESPACE_PARAMS = [ 9 | '--ignore-all-space', 10 | '--ignore-blank-lines', 11 | '--ignore-cr-at-eol', 12 | '--word-diff-regex', 13 | '--word-diff-regex=|[^[:space:]]', 14 | ] 15 | export const MODIFICATION = 'M' 16 | export const NUM_STAT_CHANGE_INFORMATION = /^((\d+|\-)\t){2}/ 17 | export const TREE_TYPE = 'tree' 18 | -------------------------------------------------------------------------------- /src/types/metadata.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | export type BaseMetadata = { 4 | suffix?: string 5 | xmlName?: string 6 | } 7 | 8 | export type SharedFolderMetadata = BaseMetadata & { 9 | content?: BaseMetadata[] 10 | } 11 | 12 | export type SharedFileMetadata = BaseMetadata & { 13 | parentXmlName?: string 14 | xmlTag?: string 15 | key?: string 16 | excluded?: boolean 17 | pruneOnly?: boolean 18 | } 19 | 20 | export type Metadata = BaseMetadata & 21 | SharedFolderMetadata & 22 | SharedFileMetadata & { 23 | directoryName: string 24 | inFolder: boolean 25 | metaFile: boolean 26 | childXmlNames?: string[] 27 | } 28 | -------------------------------------------------------------------------------- /.github/actions/install/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Install 3 | description: Install dependencies 4 | 5 | runs: 6 | using: composite 7 | steps: 8 | - name: Get cache directory 9 | id: cache-dir 10 | run: echo "dir=$(npm config get cache)" >> "$GITHUB_OUTPUT" 11 | shell: bash 12 | 13 | - uses: actions/cache@v4 14 | with: 15 | path: ${{ steps.cache-dir.outputs.dir }} 16 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 17 | 18 | - name: Install dependencies 19 | run: npm ci 20 | shell: bash 21 | env: 22 | HUSKY: '0' # By default do not run HUSKY install 23 | -------------------------------------------------------------------------------- /src/service/inBundleHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { PATH_SEP } from '../constant/fsConstants.js' 3 | import { META_REGEX } from '../constant/metadataConstants.js' 4 | 5 | import InResourceHandler from './inResourceHandler.js' 6 | 7 | export default class BundleHandler extends InResourceHandler { 8 | protected override _getElementName() { 9 | const bundlePath: string[] = this.splittedLine 10 | .slice(this.splittedLine.indexOf(this.metadataDef.directoryName) + 1) 11 | .slice(0, 2) 12 | 13 | return bundlePath 14 | .join(PATH_SEP) 15 | .replace(META_REGEX, '') 16 | .replace(this.suffixRegex, '') 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/service/flowHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { log } from '../utils/LoggingDecorator.js' 4 | import { MessageService } from '../utils/MessageService.js' 5 | import StandardHandler from './standardHandler.js' 6 | 7 | export default class FlowHandler extends StandardHandler { 8 | @log 9 | public override async handleDeletion() { 10 | await super.handleDeletion() 11 | this.warnFlowDeleted() 12 | } 13 | 14 | private warnFlowDeleted() { 15 | const message = new MessageService() 16 | this.work.warnings.push( 17 | new Error( 18 | message.getMessage('warning.FlowDeleted', [this._getElementName()]) 19 | ) 20 | ) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/service/customFieldHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MASTER_DETAIL_TAG } from '../constant/metadataConstants.js' 3 | import { readPathFromGit } from '../utils/fsHelper.js' 4 | 5 | import DecomposedHandler from './decomposedHandler.js' 6 | 7 | export default class CustomFieldHandler extends DecomposedHandler { 8 | // QUESTION: Why we need to add parent object for Master Detail field ? https://help.salesforce.com/s/articleView?id=000386883&type=1 9 | protected override async _copyParent() { 10 | const data = await readPathFromGit( 11 | { path: this.line, oid: this.config.to }, 12 | this.config 13 | ) 14 | if (!data.includes(MASTER_DETAIL_TAG)) return 15 | 16 | await super._copyParent() 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/utils/gitLfsHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { sep } from 'node:path/posix' 3 | 4 | import { UTF8_ENCODING } from '../constant/fsConstants.js' 5 | import { GIT_FOLDER } from '../constant/gitConstants.js' 6 | 7 | const LFS_HEADER = Buffer.from('version https://git-lfs') 8 | 9 | export const isLFS = (content: Buffer): boolean => 10 | content.subarray(0, LFS_HEADER.length).equals(LFS_HEADER) 11 | 12 | export const getLFSObjectContentPath = (bufferContent: Buffer): string => { 13 | const content = bufferContent.toString(UTF8_ENCODING) 14 | const oid = content.split(/\n/)[1].split(':')[1] 15 | return [ 16 | GIT_FOLDER, 17 | 'lfs', 18 | 'objects', 19 | oid.slice(0, 2), 20 | oid.slice(2, 4), 21 | oid, 22 | ].join(sep) 23 | } 24 | -------------------------------------------------------------------------------- /__tests__/perf/bench.mjs: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { execCmd } from '@salesforce/cli-plugins-testkit' 3 | import benchmark from 'benchmark' 4 | 5 | const suite = new benchmark.Suite() 6 | 7 | suite 8 | .add('e2e-test', () => { 9 | execCmd( 10 | 'sgd:source:delta --from "origin/e2e/base" --to "origin/e2e/head" --output e2e/expected --generate-delta --repo e2e --include e2e/.sgdinclude --include-destructive e2e/.sgdincludeDestructive --ignore e2e/.sgdignore --ignore-destructive e2e/.sgdignoreDestructive', 11 | { 12 | ensureExitCode: 0, 13 | } 14 | ) 15 | }) 16 | .on('cycle', event => { 17 | // biome-ignore lint/suspicious/noConsole: necessary for metric aggregation 18 | console.info(String(event.target)) 19 | }) 20 | .run() 21 | -------------------------------------------------------------------------------- /src/utils/MessageService.ts: -------------------------------------------------------------------------------- 1 | import { Messages } from '@salesforce/core' 2 | import { PLUGIN_NAME } from '../constant/libConstant.js' 3 | import { log } from './LoggingDecorator.js' 4 | 5 | export class MessageService { 6 | private static instance: Messages 7 | 8 | constructor() { 9 | if (!MessageService.instance) { 10 | Messages.importMessagesDirectoryFromMetaUrl(import.meta.url) 11 | MessageService.instance = Messages.loadMessages(PLUGIN_NAME, 'delta') 12 | } 13 | } 14 | 15 | @log 16 | getMessage(key: string, tokens?: string[]): string { 17 | return MessageService.instance.getMessage(key, tokens) 18 | } 19 | 20 | @log 21 | getMessages(key: string, tokens?: string[]): string[] { 22 | return MessageService.instance.getMessages(key, tokens) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /.mega-linter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Configuration file for MegaLinter 3 | # See all available variables at https://megalinter.io/latest/config-file/ and in linters documentation 4 | 5 | APPLY_FIXES: all # all, none, or list of linter keys 6 | # ENABLE: # If you use ENABLE variable, all other languages/formats/tooling-formats will be disabled by default 7 | # ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default 8 | # DISABLE: 9 | # - COPYPASTE # Uncomment to disable checks of excessive copy-pastes 10 | # - SPELL # Uncomment to disable checks of spelling mistakes 11 | DISABLE_LINTERS: 12 | - SPELL_MISSPELL 13 | - TYPESCRIPT_STANDARD 14 | - TYPESCRIPT_PRETTIER 15 | - MARKDOWN_MARKDOWN_LINK_CHECK 16 | SHOW_ELAPSED_TIME: true 17 | FILEIO_REPORTER: false 18 | # DISABLE_ERRORS: true # Uncomment if you want MegaLinter to detect errors but not block CI to pass 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/enhancement.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Enhancement request 3 | about: Use this template for tracking new features. 4 | title: '[FEATURE NAME]' 5 | labels: enhancement 6 | assignees: scolladon 7 | --- 8 | 9 | ### Is your proposal related to a problem? 10 | 11 | --- 12 | 13 | 17 | 18 | ### Describe a solution you propose 19 | 20 | --- 21 | 22 | 25 | 26 | ### Describe alternatives you've considered 27 | 28 | --- 29 | 30 | 33 | 34 | ### Additional context 35 | 36 | --- 37 | 38 | 42 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 4 | 5 | # Explain your changes 6 | 7 | --- 8 | 9 | 12 | 13 | # Does this close any currently open issues? 14 | 15 | --- 16 | 17 | 21 | 22 | closes # 23 | 24 | - [ ] Jest tests added to cover the fix. 25 | - [ ] NUT tests added to cover the fix. 26 | - [ ] E2E tests added to cover the fix. 27 | 28 | # Any particular element that can be tested locally 29 | 30 | --- 31 | 32 | 35 | 36 | # Any other comments 37 | 38 | --- 39 | 40 | 46 | -------------------------------------------------------------------------------- /.github/workflows/manual-deprecate-versions.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Deprecate versions 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | version-expression: 8 | description: version number (semver format) or range to deprecate 9 | required: true 10 | type: string 11 | rationale: 12 | description: explain why this version is deprecated. No message content will un-deprecate the version 13 | type: string 14 | 15 | 16 | jobs: 17 | deprecate: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Checkout sources 21 | uses: actions/checkout@v4 22 | 23 | - name: Setup node 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: 20 27 | registry-url: 'https://registry.npmjs.org' 28 | 29 | - name: Change version 30 | run: npm deprecate sfdx-git-delta@$"${{ github.event.inputs.version-expression }}" "${{ github.event.inputs.rationale }}" 31 | env: 32 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 33 | -------------------------------------------------------------------------------- /__tests__/__utils__/globalTestHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MetadataRepository } from '../../src/metadata/MetadataRepository' 3 | import { 4 | getDefinition, 5 | getLatestSupportedVersion, 6 | } from '../../src/metadata/metadataManager' 7 | import type { Work } from '../../src/types/work' 8 | 9 | export const getGlobalMetadata = async (): Promise => { 10 | const apiVersion: number = getLatestSupportedVersion() 11 | const metadata: MetadataRepository = await getDefinition(apiVersion) 12 | return metadata 13 | } 14 | 15 | export const getWork = (): Work => ({ 16 | diffs: { 17 | package: new Map>(), 18 | destructiveChanges: new Map>(), 19 | }, 20 | config: { 21 | source: ['./'], 22 | output: 'output', 23 | generateDelta: true, 24 | to: '', 25 | from: '', 26 | ignore: '', 27 | ignoreDestructive: '', 28 | apiVersion: -1, 29 | repo: '', 30 | ignoreWhitespace: false, 31 | include: '', 32 | includeDestructive: '', 33 | }, 34 | warnings: [], 35 | }) 36 | -------------------------------------------------------------------------------- /.github/workflows/on-published-release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Published release communication 3 | 4 | on: 5 | release: 6 | types: 7 | - published 8 | 9 | jobs: 10 | release: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: apexskier/github-release-commenter@v1 14 | with: 15 | GITHUB_TOKEN: ${{ github.token }} 16 | comment-template: | 17 | Shipped in [release `{release_tag}`]({release_link}). 18 | Version `{release_tag}` will be assigned to the `latest` npm channel soon 19 | Install it using either `{release_tag}` or the `latest-rc` npm channel 20 | ```sh 21 | $ sf plugins install sfdx-git-delta@latest-rc 22 | # Or 23 | $ sf plugins install sfdx-git-delta@{release_tag} 24 | ``` 25 | 💡 Enjoying sfdx-git-delta? 26 | Your contribution helps us provide fast support 🚀 and high quality features 🔥 27 | Become a [sponsor](https://github.com/sponsors/scolladon) 💙 28 | Happy incremental deployment! 29 | -------------------------------------------------------------------------------- /__tests__/unit/lib/post-processor/baseProcessor.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { beforeAll, describe, expect, it } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import BaseProcessor from '../../../../src/post-processor/baseProcessor' 6 | import type { Work } from '../../../../src/types/work' 7 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 8 | 9 | describe('BaseProcessor', () => { 10 | let work: Work, metadata: MetadataRepository 11 | beforeAll(async () => { 12 | work = getWork() 13 | metadata = await getGlobalMetadata() 14 | }) 15 | describe('when process is called', () => { 16 | it('throws an error', async () => { 17 | // Arrange 18 | expect.assertions(1) 19 | const sut = new BaseProcessor(work, metadata) 20 | 21 | // Act 22 | try { 23 | await sut.process() 24 | } catch (error) { 25 | // Assert 26 | expect((error as Error).message).toEqual('this class should be derived') 27 | } 28 | }) 29 | }) 30 | }) 31 | -------------------------------------------------------------------------------- /.github/workflows/manual-manage-versions.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Manage versions 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | version-alias: 8 | description: version alias to map to a version number 9 | required: true 10 | type: choice 11 | options: 12 | - stable 13 | - latest 14 | - latest-rc 15 | version-number: 16 | description: version number (semver format) 17 | required: true 18 | default: vX.Y.Z 19 | type: string 20 | 21 | jobs: 22 | add-tag: 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout sources 26 | uses: actions/checkout@v4 27 | 28 | - name: Setup node 29 | uses: actions/setup-node@v4 30 | with: 31 | node-version: 20 32 | registry-url: 'https://registry.npmjs.org' 33 | 34 | - name: Change version 35 | run: npm dist-tag add sfdx-git-delta@${{ github.event.inputs.version-number }} ${{ github.event.inputs.version-alias }} 36 | env: 37 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 38 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@salesforce/dev-config/tsconfig-strict-esm", 3 | "compilerOptions": { 4 | "allowJs": false, 5 | "allowUnreachableCode": false, 6 | "allowUnusedLabels": false, 7 | "alwaysStrict": true, 8 | "declaration": true, 9 | "exactOptionalPropertyTypes": true, 10 | "experimentalDecorators": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "importHelpers": true, 13 | "noImplicitAny": true, 14 | "noImplicitOverride": true, 15 | "noImplicitReturns": true, 16 | "noImplicitThis": true, 17 | "noPropertyAccessFromIndexSignature": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "outDir": "./lib", 21 | "resolveJsonModule": true, 22 | "rootDir": "src", 23 | "skipLibCheck": true, 24 | "strictBindCallApply": true, 25 | "strictFunctionTypes": true, 26 | "strictNullChecks": true, 27 | "strictPropertyInitialization": true, 28 | "useUnknownInCatchVariables": true 29 | }, 30 | "include": [ 31 | "./src/**/*.json", 32 | "./src/**/*" 33 | ], 34 | "paths": { 35 | "ignore": ["src/types/ignore.d.ts"] 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | 2 | MIT License 3 | 4 | Copyright (c) 2019 Sebastien Colladon 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. -------------------------------------------------------------------------------- /src/service/customLabelHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { LABEL_DECOMPOSED_SUFFIX } from '../constant/metadataConstants.js' 3 | import { log } from '../utils/LoggingDecorator.js' 4 | import InFileHandler from './inFileHandler.js' 5 | import StandardHandler from './standardHandler.js' 6 | 7 | export default class CustomLabelHandler extends InFileHandler { 8 | @log 9 | public override async handleAddition() { 10 | if (this._isDecomposed()) { 11 | await StandardHandler.prototype.handleAddition.apply(this) 12 | } else { 13 | await super.handleAddition() 14 | } 15 | } 16 | 17 | protected override _shouldTreatDeletionAsDeletion() { 18 | return this._isDecomposed() 19 | } 20 | 21 | protected override _getQualifiedName() { 22 | return '' 23 | } 24 | 25 | protected override _delegateFileCopy() { 26 | return this._isDecomposed() 27 | } 28 | 29 | protected override _isProcessable() { 30 | return true 31 | } 32 | 33 | protected override _shouldTreatContainerType() { 34 | // There is no container / parent type for Contained CustomLabels 35 | return false 36 | } 37 | 38 | protected _isDecomposed() { 39 | return this.ext === LABEL_DECOMPOSED_SUFFIX 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /__tests__/unit/lib/utils/MessageService.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { describe, expect, it } from '@jest/globals' 4 | import { MessageService } from '../../../../src/utils/MessageService.js' 5 | 6 | const mockedMessages = jest.fn() 7 | jest.mock('@salesforce/core', () => { 8 | return { 9 | Messages: { 10 | importMessagesDirectoryFromMetaUrl: jest.fn(), 11 | loadMessages: jest.fn(() => ({ 12 | getMessage: mockedMessages, 13 | getMessages: mockedMessages, 14 | })), 15 | }, 16 | } 17 | }) 18 | 19 | describe('MessageService', () => { 20 | describe('getMessage', () => { 21 | it('calls the @salesforce/core implementation', () => { 22 | // Arrange 23 | const sut = new MessageService() 24 | 25 | // Act 26 | sut.getMessage('arg') 27 | 28 | // Assert 29 | expect(mockedMessages).toHaveBeenCalledWith('arg', undefined) 30 | }) 31 | }) 32 | 33 | describe('getMessages', () => { 34 | it('calls the @salesforce/core implementation', () => { 35 | // Arrange 36 | const sut = new MessageService() 37 | 38 | // Act 39 | sut.getMessages('arg') 40 | 41 | // Assert 42 | expect(mockedMessages).toHaveBeenCalledWith('arg', undefined) 43 | }) 44 | }) 45 | }) 46 | -------------------------------------------------------------------------------- /.github/workflows/reusable-build.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Build Checks 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | source: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout sources 11 | uses: actions/checkout@v4 12 | 13 | - name: Setup node 14 | uses: actions/setup-node@v4 15 | with: 16 | node-version: 20 17 | 18 | - uses: google/wireit@setup-github-actions-caching/v2 19 | 20 | - name: Setup dependencies, cache and install 21 | uses: ./.github/actions/install 22 | 23 | - name: Lint plugin 24 | run: npm run lint 25 | 26 | - name: Build plugin 27 | run: npm pack 28 | 29 | - name: Unit test 30 | run: npm run test:unit -- --runInBand 31 | 32 | - name: Upload coverage 33 | uses: codecov/codecov-action@v5 34 | with: 35 | token: ${{ secrets.CODECOV_TOKEN }} 36 | continue-on-error: true 37 | 38 | - name: Checkout e2e sources 39 | uses: actions/checkout@v4 40 | with: 41 | ref: 'e2e/head' 42 | fetch-depth: 0 43 | path: ./e2e 44 | 45 | - name: Functional test 46 | run: npm run test:nut 47 | 48 | - uses: actions/upload-artifact@v4 49 | with: 50 | name: coverage-test-report 51 | path: reports/coverage 52 | -------------------------------------------------------------------------------- /src/service/botHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { parse } from 'node:path/posix' 3 | 4 | import { DOT, PATH_SEP } from '../constant/fsConstants.js' 5 | import { log } from '../utils/LoggingDecorator.js' 6 | import { fillPackageWithParameter } from '../utils/packageHelper.js' 7 | import ShareFolderHandler from './sharedFolderHandler.js' 8 | 9 | const BOT_TYPE = 'Bot' 10 | const BOT_EXTENSION = 'bot' 11 | 12 | export default class BotHandler extends ShareFolderHandler { 13 | protected override _getElementName() { 14 | const parsedPath = this._getParsedPath() 15 | const elementName = new Set([ 16 | parsedPath.dir.split(PATH_SEP).pop(), 17 | parsedPath.name, 18 | ]) 19 | return [...elementName].join(DOT) 20 | } 21 | 22 | @log 23 | public override async handleAddition() { 24 | await super.handleAddition() 25 | await this._addParentBot() 26 | } 27 | 28 | protected async _addParentBot() { 29 | const botName = this.parentFolder.split(PATH_SEP).pop() as string 30 | fillPackageWithParameter({ 31 | store: this.diffs.package, 32 | type: BOT_TYPE, 33 | member: botName, 34 | }) 35 | 36 | if (!this.config.generateDelta) return 37 | 38 | const botPath = `${ 39 | parse(this.line).dir 40 | }${PATH_SEP}${botName}.${BOT_EXTENSION}` 41 | 42 | await this._copyWithMetaFile(botPath) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MetadataRepository } from './metadata/MetadataRepository.js' 3 | import { getDefinition } from './metadata/metadataManager.js' 4 | import { getPostProcessors } from './post-processor/postProcessorManager.js' 5 | import DiffLineInterpreter from './service/diffLineInterpreter.js' 6 | import type { Config } from './types/config.js' 7 | import type { Work } from './types/work.js' 8 | import CLIHelper from './utils/cliHelper.js' 9 | import { Logger, lazy } from './utils/LoggingService.js' 10 | import RepoGitDiff from './utils/repoGitDiff.js' 11 | 12 | export default async (config: Config): Promise => { 13 | Logger.trace('main: entry') 14 | Logger.debug(lazy`main: arguments ${config}`) 15 | 16 | const work: Work = { 17 | config, 18 | diffs: { package: new Map(), destructiveChanges: new Map() }, 19 | warnings: [], 20 | } 21 | const cliHelper = new CLIHelper(work) 22 | await cliHelper.validateConfig() 23 | 24 | const metadata: MetadataRepository = await getDefinition(config.apiVersion) 25 | const repoGitDiffHelper = new RepoGitDiff(config, metadata) 26 | 27 | const lines = await repoGitDiffHelper.getLines() 28 | const lineProcessor = new DiffLineInterpreter(work, metadata) 29 | await lineProcessor.process(lines) 30 | await getPostProcessors(work, metadata).execute() 31 | 32 | Logger.debug(lazy`main: return ${work}`) 33 | Logger.trace('main: exit') 34 | return work 35 | } 36 | -------------------------------------------------------------------------------- /.github/workflows/run-e2e-tests.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: E2E Tests a published version 3 | on: 4 | workflow_call: 5 | inputs: 6 | channel: 7 | type: string 8 | default: latest-rc 9 | 10 | jobs: 11 | e2e-test: 12 | strategy: 13 | fail-fast: false 14 | matrix: 15 | os: [macos-latest, windows-latest, ubuntu-latest] 16 | node: [20, 22, 24] 17 | cli: ['@salesforce/cli'] 18 | runs-on: ${{ matrix.os }} 19 | steps: 20 | - name: Checkout sources 21 | uses: actions/checkout@v4 22 | with: 23 | ref: 'e2e/head' 24 | fetch-depth: 0 25 | 26 | - name: Setup node 27 | uses: actions/setup-node@v4 28 | with: 29 | node-version: ${{ matrix.node }} 30 | 31 | - name: Set environment variables 32 | run: | 33 | echo "SF_DISABLE_AUTOUPDATE=true" >> "$GITHUB_ENV" 34 | echo "SF_DISABLE_TELEMETRY=true" >> "$GITHUB_ENV" 35 | 36 | - name: Install cli 37 | run: npm install -g ${{ matrix.cli }} 38 | 39 | - name: Install new plugin version 40 | run: echo y | sf plugins install sfdx-git-delta@${{ inputs.channel }} 41 | 42 | - name: Test new plugin version 43 | run: sf sgd source delta --help 44 | 45 | - name: E2E Tests 46 | run: | 47 | npm install 48 | npm run test:e2e 49 | 50 | - name: Display diff 51 | run: git --no-pager diff 52 | -------------------------------------------------------------------------------- /src/service/decomposedHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join } from 'node:path/posix' 3 | 4 | import { PATH_SEP } from '../constant/fsConstants.js' 5 | import { METAFILE_SUFFIX } from '../constant/metadataConstants.js' 6 | import { log } from '../utils/LoggingDecorator.js' 7 | import StandardHandler from './standardHandler.js' 8 | 9 | export default class DecomposedHandler extends StandardHandler { 10 | @log 11 | public override async handleAddition() { 12 | await super.handleAddition() 13 | if (!this.config.generateDelta) return 14 | await this._copyParent() 15 | } 16 | 17 | protected async _copyParent() { 18 | const parentDirPath = this.splittedLine 19 | .slice(0, this.splittedLine.indexOf(this.metadataDef.directoryName)) 20 | .join(PATH_SEP) 21 | const parentTypeName = this.getParentName() 22 | 23 | const parentTypeSuffix = this.metadata.get( 24 | this.metadataDef.parentXmlName! 25 | )!.suffix 26 | 27 | const parentPath = join( 28 | parentDirPath, 29 | `${parentTypeName}.${parentTypeSuffix}${METAFILE_SUFFIX}` 30 | ) 31 | 32 | await this._copyWithMetaFile(parentPath) 33 | } 34 | 35 | protected override _getElementName() { 36 | const parentTypeSuffix = this.getParentName() 37 | const elementName = super._getElementName() 38 | return `${parentTypeSuffix}.${elementName}` 39 | } 40 | 41 | protected getParentName() { 42 | return this.splittedLine[ 43 | this.splittedLine.indexOf(this.metadataDef.directoryName) - 1 44 | ] 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/service/diffLineInterpreter.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { availableParallelism } from 'node:os' 3 | 4 | import { queue } from 'async' 5 | 6 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 7 | import type { Work } from '../types/work.js' 8 | import { log } from '../utils/LoggingDecorator.js' 9 | import StandardHandler from './standardHandler.js' 10 | import TypeHandlerFactory from './typeHandlerFactory.js' 11 | 12 | export default class DiffLineInterpreter { 13 | constructor( 14 | protected readonly work: Work, 15 | protected readonly metadata: MetadataRepository 16 | ) {} 17 | 18 | @log 19 | public async process(lines: string[]) { 20 | const typeHandlerFactory = new TypeHandlerFactory(this.work, this.metadata) 21 | const MAX_PARALLELISM = this.getConcurrencyThreshold() 22 | const processor = queue( 23 | async (handler: StandardHandler) => await handler.handle(), 24 | MAX_PARALLELISM 25 | ) 26 | 27 | for (const line of lines) { 28 | const handler: StandardHandler = typeHandlerFactory.getTypeHandler(line) 29 | processor.push(handler) 30 | } 31 | 32 | if (processor.length() > 0) { 33 | await processor.drain() 34 | } 35 | } 36 | 37 | protected getConcurrencyThreshold() { 38 | // This is because of this issue: https://github.com/scolladon/sfdx-git-delta/issues/762#issuecomment-1907609957 39 | const AVAILABLE_PARALLELISM = availableParallelism 40 | ? availableParallelism() 41 | : Infinity 42 | 43 | return Math.min(AVAILABLE_PARALLELISM, 6) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/service/reportingFolderHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { join } from 'node:path/posix' 4 | import { METAFILE_SUFFIX } from '../constant/metadataConstants.js' 5 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 6 | import { getSharedFolderMetadata } from '../metadata/metadataManager.js' 7 | import { Metadata } from '../types/metadata.js' 8 | import type { Manifest, Work } from '../types/work.js' 9 | import { fillPackageWithParameter } from '../utils/packageHelper.js' 10 | import InFolderHandler from './inFolderHandler.js' 11 | 12 | export default class ReportingFolderHandler extends InFolderHandler { 13 | /* jscpd:ignore-start */ 14 | protected readonly sharedFolderMetadata: Map 15 | 16 | constructor( 17 | line: string, 18 | metadataDef: Metadata, 19 | work: Work, 20 | metadata: MetadataRepository 21 | ) { 22 | super(line, metadataDef, work, metadata) 23 | this.sharedFolderMetadata = getSharedFolderMetadata(this.metadata) 24 | } 25 | /* jscpd:ignore-end */ 26 | 27 | protected override async _copyFolderMetaFile() { 28 | const [, folderPath, folderName] = this._parseLine()! 29 | const folderFileName = `${folderName}${METAFILE_SUFFIX}` 30 | await this._copyWithMetaFile(join(folderPath, folderFileName)) 31 | } 32 | 33 | protected override _fillPackage(store: Manifest) { 34 | const type = this.sharedFolderMetadata.get(this.ext!) as string 35 | fillPackageWithParameter({ 36 | store, 37 | type: type, 38 | member: this._getElementName(), 39 | }) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/utils/fsUtils.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { access, readFile as fsReadFile, stat } from 'node:fs/promises' 4 | import { isAbsolute, normalize, relative } from 'node:path/posix' 5 | 6 | import { 7 | PATH_SEP, 8 | PATH_SEPARATOR_REGEX, 9 | UTF8_ENCODING, 10 | } from '../constant/fsConstants.js' 11 | 12 | export const treatPathSep = (data: string) => 13 | data.replace(PATH_SEPARATOR_REGEX, PATH_SEP) 14 | 15 | export const sanitizePath = (data: string | undefined) => 16 | data ? normalize(treatPathSep(data)) : data 17 | 18 | export const isSubDir = (parent: string, dir: string) => { 19 | const rel = relative(parent, dir) 20 | return !!rel && !rel.startsWith('..') && !isAbsolute(rel) 21 | } 22 | 23 | export const isSamePath = (pathA: string, pathB: string) => 24 | !relative(pathA, pathB) 25 | 26 | export const dirExists = async (dir: string) => { 27 | try { 28 | const st = await stat(dir) 29 | return st.isDirectory() 30 | } catch { 31 | return false 32 | } 33 | } 34 | 35 | export const fileExists = async (file: string) => { 36 | try { 37 | const st = await stat(file) 38 | return st.isFile() 39 | } catch { 40 | return false 41 | } 42 | } 43 | 44 | export const pathExists = async (path: string) => { 45 | let pathIsAccessible = true 46 | try { 47 | await access(path) 48 | } catch { 49 | pathIsAccessible = false 50 | } 51 | return pathIsAccessible 52 | } 53 | 54 | export const readFile = async (path: string) => { 55 | const file = await fsReadFile(path, { 56 | encoding: UTF8_ENCODING, 57 | }) 58 | return file 59 | } 60 | -------------------------------------------------------------------------------- /src/post-processor/postProcessorManager.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 3 | import type { Work } from '../types/work.js' 4 | 5 | import BaseProcessor from './baseProcessor.js' 6 | import FlowTranslationProcessor from './flowTranslationProcessor.js' 7 | import IncludeProcessor from './includeProcessor.js' 8 | import PackageGenerator from './packageGenerator.js' 9 | 10 | const processors: Array = [ 11 | FlowTranslationProcessor, 12 | IncludeProcessor, 13 | ] 14 | 15 | // It must be done last 16 | processors.push(PackageGenerator) 17 | 18 | export default class PostProcessorManager { 19 | protected readonly postProcessors: BaseProcessor[] 20 | 21 | constructor(protected readonly work: Work) { 22 | this.postProcessors = [] 23 | } 24 | 25 | public use(postProcessor: BaseProcessor) { 26 | this.postProcessors.push(postProcessor) 27 | return this 28 | } 29 | 30 | public async execute() { 31 | for (const postProcessor of this.postProcessors) { 32 | try { 33 | await postProcessor.process() 34 | } catch (error) { 35 | if (error instanceof Error) { 36 | this.work.warnings.push(error) 37 | } 38 | } 39 | } 40 | } 41 | } 42 | 43 | export const getPostProcessors = (work: Work, metadata: MetadataRepository) => { 44 | const postProcessor = new PostProcessorManager(work) 45 | 46 | for (const processor of processors) { 47 | const instance = new processor(work, metadata) 48 | postProcessor.use(instance) 49 | } 50 | 51 | return postProcessor 52 | } 53 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/flowHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { DELETION } from '../../../../src/constant/gitConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import FlowHandler from '../../../../src/service/flowHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 9 | 10 | jest.mock('../../../../src/utils/fsHelper') 11 | jest.mock('../../../../src/utils/MessageService') 12 | 13 | const objectType = { 14 | directoryName: 'flows', 15 | inFolder: false, 16 | metaFile: false, 17 | suffix: 'flow', 18 | xmlName: 'Flow', 19 | } 20 | const basePath = `force-app/main/default/${objectType.directoryName}` 21 | let work: Work 22 | beforeEach(() => { 23 | jest.clearAllMocks() 24 | work = getWork() 25 | }) 26 | 27 | describe('flowHandler', () => { 28 | let globalMetadata: MetadataRepository 29 | beforeAll(async () => { 30 | globalMetadata = await getGlobalMetadata() 31 | }) 32 | describe('when a flow is deleted', () => { 33 | it('warns the user not to', async () => { 34 | // Arrange 35 | const sut = new FlowHandler( 36 | `${DELETION} ${basePath}/MyFlow.${objectType.suffix}-meta.xml`, 37 | objectType, 38 | work, 39 | globalMetadata 40 | ) 41 | expect(work.warnings.length).toBe(0) 42 | 43 | // Act 44 | await sut.handle() 45 | 46 | // Assert 47 | expect(work.warnings.length).toBe(1) 48 | }) 49 | }) 50 | }) 51 | -------------------------------------------------------------------------------- /src/utils/fxpHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { XMLBuilder, XMLParser } from 'fast-xml-parser' 4 | 5 | import type { Config } from '../types/config.js' 6 | import type { FileGitRef } from '../types/git.js' 7 | 8 | import { readPathFromGit } from './fsHelper.js' 9 | 10 | const XML_PARSER_OPTION = { 11 | commentPropName: '#comment', 12 | ignoreAttributes: false, 13 | ignoreNameSpace: false, 14 | parseTagValue: false, 15 | parseNodeValue: false, 16 | parseAttributeValue: false, 17 | trimValues: true, 18 | processEntities: false, 19 | } 20 | const JSON_PARSER_OPTION = { 21 | ...XML_PARSER_OPTION, 22 | format: true, 23 | indentBy: ' ', 24 | suppressBooleanAttributes: false, 25 | suppressEmptyNode: false, 26 | } 27 | 28 | export const xml2Json = (xmlContent: string) => { 29 | // biome-ignore lint/suspicious/noExplicitAny: Any is expected here 30 | let jsonContent: any = {} 31 | if (xmlContent) { 32 | const xmlParser = new XMLParser(XML_PARSER_OPTION) 33 | jsonContent = xmlParser.parse(xmlContent) 34 | } 35 | return jsonContent 36 | } 37 | 38 | export const parseXmlFileToJson = async ( 39 | forRef: FileGitRef, 40 | config: Config 41 | ) => { 42 | const xmlContent = await readPathFromGit(forRef, config) 43 | return xml2Json(xmlContent) 44 | } 45 | 46 | // biome-ignore lint/suspicious/noExplicitAny: Any is expected here 47 | export const convertJsonToXml = (jsonContent: any) => { 48 | const xmlBuilder = new XMLBuilder(JSON_PARSER_OPTION) 49 | return xmlBuilder.build(jsonContent) 50 | } 51 | 52 | export const ATTRIBUTE_PREFIX = '@_' 53 | 54 | export const XML_HEADER_ATTRIBUTE_KEY = '?xml' 55 | -------------------------------------------------------------------------------- /src/service/sharedFolderHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join, parse } from 'node:path/posix' 3 | 4 | import { METAFILE_SUFFIX } from '../constant/metadataConstants.js' 5 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 6 | import { getSharedFolderMetadata } from '../metadata/metadataManager.js' 7 | import { Metadata } from '../types/metadata.js' 8 | import type { Manifest, Work } from '../types/work.js' 9 | import { fillPackageWithParameter } from '../utils/packageHelper.js' 10 | 11 | import StandardHandler from './standardHandler.js' 12 | 13 | export default class SharedFolderHandler extends StandardHandler { 14 | /* jscpd:ignore-start */ 15 | protected readonly sharedFolderMetadata: Map 16 | 17 | constructor( 18 | line: string, 19 | metadataDef: Metadata, 20 | work: Work, 21 | metadata: MetadataRepository 22 | ) { 23 | super(line, metadataDef, work, metadata) 24 | this.sharedFolderMetadata = getSharedFolderMetadata(this.metadata) 25 | } 26 | /* jscpd:ignore-end */ 27 | 28 | protected override _fillPackage(store: Manifest) { 29 | const type = this.sharedFolderMetadata.get(this.ext!) as string 30 | fillPackageWithParameter({ 31 | store, 32 | type: type, 33 | member: this._getElementName(), 34 | }) 35 | } 36 | 37 | protected override _isProcessable() { 38 | return super._isProcessable() || this.sharedFolderMetadata.has(this.ext) 39 | } 40 | 41 | protected override _getMetaTypeFilePath(path: string) { 42 | const parsedPath = parse(path) 43 | return join( 44 | parsedPath.dir, 45 | `${parsedPath.name}${parsedPath.ext}${METAFILE_SUFFIX}` 46 | ) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/service/customObjectHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join, parse } from 'node:path/posix' 3 | 4 | import { 5 | FIELD_DIRECTORY_NAME, 6 | MASTER_DETAIL_TAG, 7 | OBJECT_TYPE, 8 | } from '../constant/metadataConstants.js' 9 | import asyncFilter from '../utils/asyncFilter.js' 10 | import { pathExists, readDirs, readPathFromGit } from '../utils/fsHelper.js' 11 | import { log } from '../utils/LoggingDecorator.js' 12 | import StandardHandler from './standardHandler.js' 13 | 14 | export default class CustomObjectHandler extends StandardHandler { 15 | @log 16 | public override async handleAddition() { 17 | await super.handleAddition() 18 | if (!this.config.generateDelta) return 19 | await this._handleMasterDetailException() 20 | } 21 | 22 | protected async _handleMasterDetailException() { 23 | if (this.metadataDef.xmlName !== OBJECT_TYPE) return 24 | 25 | const fieldsFolder = join(parse(this.line).dir, FIELD_DIRECTORY_NAME) 26 | const exists = await pathExists(fieldsFolder, this.config) 27 | if (!exists) return 28 | 29 | // QUESTION: Why we need to add parent object for Master Detail field ? https://help.salesforce.com/s/articleView?id=000386883&type=1 30 | const fields = await readDirs(fieldsFolder, this.config) 31 | const masterDetailsFields = await asyncFilter( 32 | fields, 33 | async (path: string) => { 34 | const content = await readPathFromGit( 35 | { path, oid: this.config.to }, 36 | this.config 37 | ) 38 | return content.includes(MASTER_DETAIL_TAG) 39 | } 40 | ) 41 | for (const masterDetailField of masterDetailsFields) { 42 | await this._copyWithMetaFile(masterDetailField) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/constant/metadataConstants.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | export const CUSTOM_APPLICATION_SUFFIX = 'app' 3 | export const CUSTOM_METADATA_SUFFIX = 'md' 4 | export const EMAIL_SERVICES_FUNCTION_SUFFIX = 'xml' 5 | export const FIELD_DIRECTORY_NAME = 'fields' 6 | export const FLOW_XML_NAME = 'Flow' 7 | export const INFOLDER_SUFFIX = `Folder` 8 | export const LABEL_DECOMPOSED_SUFFIX = 'label' 9 | export const MASTER_DETAIL_TAG = 'MasterDetail' 10 | export const METAFILE_SUFFIX = '-meta.xml' 11 | export const META_REGEX = new RegExp(`${METAFILE_SUFFIX}$`) 12 | export const OBJECT_TRANSLATION_META_XML_SUFFIX = `objectTranslation${METAFILE_SUFFIX}` 13 | export const OBJECT_TRANSLATION_TYPE = 'CustomObjectTranslation' 14 | export const OBJECT_TYPE = 'CustomObject' 15 | export const PERMISSIONSET_OBJECTSETTINGS_FOLDER = 'objectSettings' 16 | export const PERMISSIONSET_TYPE = 'PermissionSet' 17 | export const SHARING_RULE_TYPE = 'SharingRules' 18 | export const SUB_OBJECT_TYPES = [ 19 | 'BusinessProcess', 20 | 'CompactLayout', 21 | 'CustomField', 22 | 'FieldSet', 23 | 'Index', 24 | 'ListView', 25 | 'RecordType', 26 | 'SharingCriteriaRule', 27 | 'SharingGuestRule', 28 | 'SharingOwnerRule', 29 | 'SharingReason', 30 | 'Territory2', 31 | 'Territory2Rule', 32 | 'ValidationRule', 33 | 'WebLink', 34 | 'WorkflowAlert', 35 | 'WorkflowFieldUpdate', 36 | 'WorkflowFlowAction', 37 | 'WorkflowKnowledgePublish', 38 | 'WorkflowOutboundMessage', 39 | 'WorkflowRule', 40 | 'WorkflowSend', 41 | 'WorkflowTask', 42 | ] 43 | export const TERRITORY_MODEL_TYPE = 'Territory2Model' 44 | export const TRANSLATION_EXTENSION = 'translation' 45 | export const TRANSLATION_TYPE = 'Translations' 46 | export const WORKFLOW_TYPE = 'Workflow' 47 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/diffLineInterpreterCompatibility.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { describe, expect, it, jest } from '@jest/globals' 4 | 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import DiffLineInterpreter from '../../../../src/service/diffLineInterpreter' 7 | import type { Work } from '../../../../src/types/work' 8 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 9 | 10 | jest.mock('node:os', () => ({ 11 | availableParallelism: null, 12 | })) 13 | 14 | const mockHandle = jest.fn() 15 | jest.mock('../../../../src/service/typeHandlerFactory', () => { 16 | return { 17 | default: jest.fn().mockImplementation(() => { 18 | return { 19 | getTypeHandler: jest 20 | .fn() 21 | .mockImplementation(() => ({ handle: mockHandle })), 22 | } 23 | }), 24 | } 25 | }) 26 | 27 | let work: Work 28 | beforeEach(() => { 29 | jest.clearAllMocks() 30 | work = getWork() 31 | }) 32 | 33 | describe('DiffLineInterpreter', () => { 34 | let sut: DiffLineInterpreter 35 | let globalMetadata: MetadataRepository 36 | beforeAll(async () => { 37 | globalMetadata = await getGlobalMetadata() 38 | }) 39 | 40 | describe('compatibility test', () => { 41 | beforeEach(() => { 42 | sut = new DiffLineInterpreter(work, globalMetadata) 43 | }) 44 | describe('when `availableParallelism` is not defined', () => { 45 | it('fallback gracefully', async () => { 46 | // Arrange 47 | const lines = ['test'] 48 | 49 | // Act 50 | await sut.process(lines) 51 | 52 | // Assert 53 | expect(mockHandle).toHaveBeenCalledTimes(lines.length) 54 | }) 55 | }) 56 | }) 57 | }) 58 | -------------------------------------------------------------------------------- /src/types/ignore.d.ts: -------------------------------------------------------------------------------- 1 | type Pathname = string 2 | 3 | interface TestResult { 4 | ignored: boolean 5 | unignored: boolean 6 | } 7 | 8 | declare module 'ignore' { 9 | export interface Ignore { 10 | /** 11 | * Adds one or several rules to the current manager. 12 | * @param {string[]} patterns 13 | * @returns IgnoreBase 14 | */ 15 | add(patterns: string | Ignore | readonly (string | Ignore)[]): this 16 | 17 | /** 18 | * Filters the given array of pathnames, and returns the filtered array. 19 | * NOTICE that each path here should be a relative path to the root of your repository. 20 | * @param paths the array of paths to be filtered. 21 | * @returns The filtered array of paths 22 | */ 23 | filter(pathnames: readonly Pathname[]): Pathname[] 24 | 25 | /** 26 | * Creates a filter function which could filter 27 | * an array of paths with Array.prototype.filter. 28 | */ 29 | createFilter(): (pathname: Pathname) => boolean 30 | 31 | /** 32 | * Returns Boolean whether pathname should be ignored. 33 | * @param {string} pathname a path to check 34 | * @returns boolean 35 | */ 36 | ignores(pathname: Pathname): boolean 37 | 38 | /** 39 | * Returns whether pathname should be ignored or unignored 40 | * @param {string} pathname a path to check 41 | * @returns TestResult 42 | */ 43 | test(pathname: Pathname): TestResult 44 | } 45 | 46 | export interface Options { 47 | ignorecase?: boolean 48 | // For compatibility 49 | ignoreCase?: boolean 50 | allowRelativePaths?: boolean 51 | } 52 | 53 | /** 54 | * Creates new ignore manager. 55 | */ 56 | export default function ignore(options?: Options): Ignore 57 | } 58 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/decomposedHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import DecomposedHandler from '../../../../src/service/decomposedHandler' 6 | import type { Work } from '../../../../src/types/work' 7 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 8 | 9 | jest.mock('../../../../src/utils/fsHelper') 10 | 11 | const recordTypeType = { 12 | directoryName: 'recordTypes', 13 | inFolder: false, 14 | metaFile: false, 15 | suffix: 'recordType', 16 | xmlName: 'RecordType', 17 | } 18 | const line = 19 | 'A force-app/main/default/objects/Account/recordTypes/Test.recordType-meta.xml' 20 | 21 | let globalMetadata: MetadataRepository 22 | beforeAll(async () => { 23 | globalMetadata = await getGlobalMetadata() 24 | }) 25 | 26 | let work: Work 27 | beforeEach(() => { 28 | jest.clearAllMocks() 29 | work = getWork() 30 | work.config.generateDelta = false 31 | }) 32 | 33 | describe('DecomposedHandler', () => { 34 | describe.each([ 35 | 'handleAddition', 36 | 'handleDeletion', 37 | 'handleModification', 38 | ])('in %s case', method => { 39 | it('element name should have the parent metadata', async () => { 40 | // Arrange 41 | const sut = new DecomposedHandler( 42 | line, 43 | recordTypeType, 44 | work, 45 | globalMetadata 46 | ) 47 | const expectSubject = 48 | method === 'handleDeletion' 49 | ? work.diffs.destructiveChanges 50 | : work.diffs.package 51 | 52 | // Act 53 | await sut[method as keyof DecomposedHandler]() 54 | 55 | // Assert 56 | expect(expectSubject.get('RecordType')).toContain('Account.Test') 57 | }) 58 | }) 59 | }) 60 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/diffLineInterpreter.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import DiffLineInterpreter from '../../../../src/service/diffLineInterpreter' 6 | import type { Work } from '../../../../src/types/work' 7 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 8 | 9 | const mockHandle = jest.fn() 10 | jest.mock('../../../../src/service/typeHandlerFactory', () => { 11 | return { 12 | default: jest.fn().mockImplementation(() => { 13 | return { 14 | getTypeHandler: jest 15 | .fn() 16 | .mockImplementation(() => ({ handle: mockHandle })), 17 | } 18 | }), 19 | } 20 | }) 21 | 22 | let work: Work 23 | beforeEach(() => { 24 | jest.clearAllMocks() 25 | work = getWork() 26 | }) 27 | 28 | describe('DiffLineInterpreter', () => { 29 | let sut: DiffLineInterpreter 30 | let globalMetadata: MetadataRepository 31 | beforeAll(async () => { 32 | globalMetadata = await getGlobalMetadata() 33 | }) 34 | 35 | beforeEach(() => { 36 | sut = new DiffLineInterpreter(work, globalMetadata) 37 | }) 38 | 39 | describe('when called with lines', () => { 40 | it('process each lines', async () => { 41 | // Arrange 42 | const lines = ['test'] 43 | 44 | // Act 45 | await sut.process(lines) 46 | 47 | // Assert 48 | expect(mockHandle).toHaveBeenCalledTimes(lines.length) 49 | }) 50 | }) 51 | 52 | describe('when called without lines', () => { 53 | it('it does not process anything', async () => { 54 | // Arrange 55 | const lines: string[] = [] 56 | 57 | // Act 58 | await sut.process(lines) 59 | 60 | // Assert 61 | expect(mockHandle).not.toHaveBeenCalled() 62 | }) 63 | }) 64 | }) 65 | -------------------------------------------------------------------------------- /src/utils/packageHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { create } from 'xmlbuilder2' 3 | 4 | import { OBJECT_TYPE } from '../constant/metadataConstants.js' 5 | import type { Config } from '../types/config.js' 6 | import type { Manifest } from '../types/work.js' 7 | import { log } from './LoggingDecorator.js' 8 | 9 | const xmlConf = { indent: ' ', newline: '\n', prettyPrint: true } 10 | const frLocale = 'fr' 11 | 12 | export default class PackageBuilder { 13 | constructor(protected readonly config: Config) {} 14 | 15 | @log 16 | public buildPackage(strucDiffPerType: Manifest) { 17 | const xml = create({ version: '1.0', encoding: 'UTF-8' }).ele('Package', { 18 | xmlns: 'http://soap.sforce.com/2006/04/metadata', 19 | }) 20 | Array.from(strucDiffPerType.keys()) 21 | .sort(this._sortTypesWithMetadata) 22 | .forEach(metadataType => 23 | [...strucDiffPerType.get(metadataType)!] 24 | .sort(Intl.Collator(frLocale).compare) 25 | .reduce((type, member) => { 26 | type.ele('members').txt(member) 27 | return type 28 | }, xml.ele('types')) 29 | .ele('name') 30 | .txt(metadataType) 31 | ) 32 | xml.ele('version').txt(`${this.config.apiVersion}.0`) 33 | return xml.end(xmlConf) 34 | } 35 | 36 | _sortTypesWithMetadata = (x: string, y: string) => { 37 | // QUESTION: Why Object needs to be ordered first in package.xml so it can be deployed ? 38 | if (x === OBJECT_TYPE) return -1 // @deprecated To remove when the order will not impact the result of the deployment 39 | return new Intl.Collator(frLocale).compare(x, y) 40 | } 41 | } 42 | 43 | export const fillPackageWithParameter = ({ 44 | store, 45 | type, 46 | member, 47 | }: { 48 | store: Manifest 49 | type: string 50 | member: string 51 | }) => { 52 | if (!store.has(type)) { 53 | store.set(type, new Set()) 54 | } 55 | store.get(type)?.add(member) 56 | } 57 | -------------------------------------------------------------------------------- /__tests__/unit/lib/utils/gitLfsHelper.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it } from '@jest/globals' 3 | 4 | import { 5 | getLFSObjectContentPath, 6 | isLFS, 7 | } from '../../../../src/utils/gitLfsHelper' 8 | 9 | describe('isLFS', () => { 10 | it('returns true when called with LFS file', async () => { 11 | // Arrange 12 | const lfsFileContent = 13 | Buffer.from(`version https://git-lfs.github.com/spec/v1 14 | oid sha256:0a4ca7e5eca75024197fff96ef7e5de1b2ca35d6c058ce76e7e0d84bee1c8b14 15 | size 72`) 16 | 17 | // Act 18 | const result = isLFS(lfsFileContent) 19 | 20 | // Assert 21 | expect(result).toBe(true) 22 | }) 23 | it('returns false when called with normal file', async () => { 24 | // Arrange 25 | const lfsFileContent = Buffer.from(`not lfs file`) 26 | 27 | // Act 28 | const result = isLFS(lfsFileContent) 29 | 30 | // Assert 31 | expect(result).toBe(false) 32 | }) 33 | }) 34 | 35 | describe('getLFSObjectContentPath', () => { 36 | it('with LFS content, it creates LFS file path', async () => { 37 | // Arrange 38 | const lfsFileContent = 39 | Buffer.from(`version https://git-lfs.github.com/spec/v1 40 | oid sha256:0a4ca7e5eca75024197fff96ef7e5de1b2ca35d6c058ce76e7e0d84bee1c8b14 41 | size 72`) 42 | 43 | // Act 44 | const lfsFilePath = await getLFSObjectContentPath(lfsFileContent) 45 | 46 | // Assert 47 | expect(lfsFilePath).toBe( 48 | '.git/lfs/objects/0a/4c/0a4ca7e5eca75024197fff96ef7e5de1b2ca35d6c058ce76e7e0d84bee1c8b14' 49 | ) 50 | }) 51 | 52 | it('without LFS content, it creates LFS file path', async () => { 53 | // Arrange 54 | expect.assertions(1) 55 | const lfsFileContent = Buffer.from(`not lfs file`) 56 | 57 | // Act 58 | try { 59 | await getLFSObjectContentPath(lfsFileContent) 60 | } catch (e) { 61 | // Assert 62 | expect(e).toBeDefined() 63 | } 64 | }) 65 | }) 66 | -------------------------------------------------------------------------------- /src/service/objectTranslationHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { parse } from 'node:path/posix' 3 | 4 | import { PATH_SEP } from '../constant/fsConstants.js' 5 | import { OBJECT_TRANSLATION_META_XML_SUFFIX } from '../constant/metadataConstants.js' 6 | import { getInFileAttributes } from '../metadata/metadataManager.js' 7 | import { writeFile } from '../utils/fsHelper.js' 8 | import { log } from '../utils/LoggingDecorator.js' 9 | import MetadataDiff from '../utils/metadataDiff.js' 10 | import ResourceHandler from './inResourceHandler.js' 11 | import StandardHandler from './standardHandler.js' 12 | 13 | export default class ObjectTranslationHandler extends ResourceHandler { 14 | @log 15 | public override async handleAddition() { 16 | await StandardHandler.prototype.handleAddition.apply(this) 17 | if (!this.config.generateDelta) return 18 | 19 | const objectTranslationPath = this._getObjectTranslationPath() 20 | await this._copyObjectTranslation(objectTranslationPath) 21 | } 22 | 23 | protected async _copyObjectTranslation(path: string) { 24 | const inFileMetadata = getInFileAttributes(this.metadata) 25 | const metadataDiff = new MetadataDiff(this.config, inFileMetadata) 26 | await metadataDiff.compare(path) 27 | const { xmlContent } = metadataDiff.prune() 28 | await writeFile(path, xmlContent, this.config) 29 | } 30 | 31 | protected _getObjectTranslationPath() { 32 | // Return Object Translation Path for both objectTranslation and fieldTranslation 33 | // QUESTION: Why fieldTranslation element are not deployable when objectTranslation element is not in the deployed sources (even if objectTranslation file is empty) ? 34 | return `${parse(this.line).dir}${PATH_SEP}${ 35 | this.splittedLine[this.splittedLine.length - 2] 36 | }.${OBJECT_TRANSLATION_META_XML_SUFFIX}` 37 | } 38 | 39 | protected override _delegateFileCopy() { 40 | return !this.line.endsWith(OBJECT_TRANSLATION_META_XML_SUFFIX) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/on-merged-pull-request.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Package dev version cleaner 3 | 4 | on: 5 | pull_request_target: 6 | branches: 7 | - main 8 | paths-ignore: 9 | - "**.md" 10 | - "img/**" 11 | types: 12 | - closed 13 | 14 | jobs: 15 | clean-npm-dev-version: 16 | if: ${{ github.event.pull_request.merged }} 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Checkout sources 20 | uses: actions/checkout@v4 21 | 22 | - name: Setup node 23 | uses: actions/setup-node@v4 24 | with: 25 | node-version: 20 26 | registry-url: 'https://registry.npmjs.org' 27 | 28 | - uses: jwalton/gh-find-current-pr@master 29 | id: pr-number 30 | with: 31 | state: closed 32 | 33 | - name: Set dev channel value 34 | run: | 35 | echo "CURRENT_VERSION=$(jq -r '.version' package.json)" >> "$GITHUB_ENV" 36 | echo "DEV_CHANNEL=dev-${{ steps.pr-number.outputs.pr }}" >> "$GITHUB_ENV" 37 | 38 | - name: Remove dist-tag 39 | run: npm dist-tag rm sfdx-git-delta ${{ env.DEV_CHANNEL }} 40 | env: 41 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 42 | 43 | - name: Deprecate related dev versions 44 | run: | 45 | DEV_VERSIONS=$(npm view sfdx-git-delta versions --json | jq -r '.[]' | grep -E "${{ env.CURRENT_VERSION}}-${{ env.DEV_CHANNEL }}") 46 | [ -n "$DEV_VERSIONS" ] && for DEV_VERSION in ${DEV_VERSIONS}; do npm deprecate "sfdx-git-delta@${DEV_VERSION}" "Deprecated dev version"; done 47 | env: 48 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 49 | 50 | - name: Delete package dev channel PR comment 51 | uses: thollander/actions-comment-pull-request@v3 52 | with: 53 | message: | 54 | Published under `${{ env.DEV_CHANNEL }}` npm channel. 55 | ```sh 56 | $ sf plugins install sfdx-git-delta@${{ env.DEV_CHANNEL }} 57 | ``` 58 | comment_tag: dev-publish 59 | mode: delete 60 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Log an issue 3 | about: Use this template for tracking new bugs. 4 | title: '[BUG NAME]' 5 | labels: bug 6 | assignees: scolladon 7 | --- 8 | 9 | Issue verification check: 10 | 11 | - [ ] is the current repository fully deployable at the commit SHA provided with the 'from' parameter of the command? 12 | 13 | ## What is the problem? 14 | 15 | --- 16 | 17 | 20 | 21 | ### What is the parameter and the value you used with it? 22 | 23 | 27 | 28 | ### What is the expected result? 29 | 30 | 34 | 35 | ### What is the actual result? 36 | 37 | 41 | 42 | ## Steps to reproduce 43 | 44 | --- 45 | 46 | 56 | 57 | ## Execution context 58 | 59 | --- 60 | 61 | 64 | 65 | **Operating System:** … 66 | 67 | **npm version:** … 68 | 69 | **node version:** … 70 | 71 | **git version:** … 72 | 73 | **sf version:** … 74 | 75 | **sgd plugin version:** … 76 | 77 | ## More information (optional) 78 | 79 | --- 80 | 81 | 87 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/inBundleHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import InBundleHandler from '../../../../src/service/inBundleHandler' 6 | import type { Work } from '../../../../src/types/work' 7 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 8 | 9 | const objectType = { 10 | directoryName: 'digitalExperiences', 11 | inFolder: false, 12 | metaFile: true, 13 | suffix: 'digitalExperience', 14 | xmlName: 'DigitalExperienceBundle', 15 | } 16 | const entityPath = 17 | 'force-app/main/default/digitalExperiences/site/component.digitalExperience-meta.xml' 18 | const line = `A ${entityPath}` 19 | 20 | let work: Work 21 | beforeEach(() => { 22 | jest.clearAllMocks() 23 | work = getWork() 24 | }) 25 | 26 | describe('InBundleHandler', () => { 27 | let globalMetadata: MetadataRepository 28 | beforeAll(async () => { 29 | globalMetadata = await getGlobalMetadata() 30 | }) 31 | 32 | describe('_getElementName', () => { 33 | describe('when called with meta file', () => { 34 | it('returns /', () => { 35 | // Arrange 36 | const sut = new InBundleHandler(line, objectType, work, globalMetadata) 37 | 38 | // Act 39 | const result = sut['_getElementName']() 40 | 41 | // Assert 42 | expect(result).toEqual('site/component') 43 | }) 44 | }) 45 | 46 | describe('when called with sub workspace file', () => { 47 | it('returns /', () => { 48 | // Arrange 49 | const entityPath = 50 | 'force-app/main/default/digitalExperiences/site/component/workspace/file.json' 51 | const line = `A ${entityPath}` 52 | const sut = new InBundleHandler(line, objectType, work, globalMetadata) 53 | 54 | // Act 55 | const result = sut['_getElementName']() 56 | 57 | // Assert 58 | expect(result).toEqual('site/component') 59 | }) 60 | }) 61 | }) 62 | }) 63 | -------------------------------------------------------------------------------- /src/service/inFolderHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join, parse } from 'node:path/posix' 3 | 4 | import { EXTENSION_SUFFIX_REGEX, PATH_SEP } from '../constant/fsConstants.js' 5 | import { 6 | INFOLDER_SUFFIX, 7 | META_REGEX, 8 | METAFILE_SUFFIX, 9 | } from '../constant/metadataConstants.js' 10 | import { readDirs } from '../utils/fsHelper.js' 11 | import { log } from '../utils/LoggingDecorator.js' 12 | import StandardHandler from './standardHandler.js' 13 | 14 | const INFOLDER_SUFFIX_REGEX = new RegExp(`${INFOLDER_SUFFIX}$`) 15 | export default class InFolderHandler extends StandardHandler { 16 | @log 17 | public override async handleAddition() { 18 | await super.handleAddition() 19 | if (!this.config.generateDelta) return 20 | await this._copyFolderMetaFile() 21 | await this._copySpecialExtension() 22 | } 23 | 24 | protected async _copyFolderMetaFile() { 25 | const [, folderPath, folderName] = this._parseLine()! 26 | 27 | const suffix = folderName.endsWith(INFOLDER_SUFFIX) 28 | ? '' 29 | : `.${this.metadataDef.suffix!.toLowerCase()}` 30 | 31 | const folderFileName = `${folderName}${suffix}${METAFILE_SUFFIX}` 32 | 33 | await this._copyWithMetaFile(join(folderPath, folderFileName)) 34 | } 35 | 36 | protected async _copySpecialExtension() { 37 | const parsedLine = parse(this.line) 38 | const dirContent = await readDirs(parsedLine.dir, this.config) 39 | 40 | await Promise.all( 41 | dirContent 42 | .filter((file: string) => file.includes(parsedLine.name)) 43 | .map((file: string) => this._copyWithMetaFile(file)) 44 | ) 45 | } 46 | 47 | protected override _getElementName() { 48 | return this.splittedLine 49 | .slice(this.splittedLine.indexOf(this.metadataDef.directoryName) + 1) 50 | .join(PATH_SEP) 51 | .replace(META_REGEX, '') 52 | .replace(INFOLDER_SUFFIX_REGEX, '') 53 | .replace(EXTENSION_SUFFIX_REGEX, '') 54 | } 55 | 56 | protected override _isProcessable() { 57 | return ( 58 | super._isProcessable() || 59 | this._parentFolderIsNotTheType() || 60 | this.ext!.endsWith(INFOLDER_SUFFIX) 61 | ) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/utils/LoggingDecorator.ts: -------------------------------------------------------------------------------- 1 | /** biome-ignore-all lint/suspicious/noExplicitAny: it is dynamic by definition */ 2 | import { Logger, lazy } from './LoggingService.js' 3 | 4 | export function stringify(value: unknown): string { 5 | if (hasCustomToString(value)) { 6 | return value.toString() 7 | } 8 | return JSON.stringify(value, replacer) 9 | } 10 | 11 | function replacer(_key: string, value: unknown): unknown { 12 | if (value instanceof Map) { 13 | return Array.from(value.entries()) 14 | } 15 | if (value instanceof Set) { 16 | return Array.from(value) 17 | } 18 | return value 19 | } 20 | 21 | export function hasCustomToString( 22 | obj: unknown 23 | ): obj is { toString: () => string } { 24 | if (obj === null || typeof obj !== 'object') return false 25 | 26 | const toStringFn = (obj as any).toString 27 | if (typeof toStringFn !== 'function') return false 28 | 29 | if (Object.hasOwn(obj, 'toString')) { 30 | return toStringFn !== Object.prototype.toString 31 | } 32 | const proto = Object.getPrototypeOf(obj) 33 | const protoToString = proto.toString 34 | return ( 35 | typeof protoToString === 'function' && 36 | protoToString !== Object.prototype.toString 37 | ) 38 | } 39 | 40 | export function log( 41 | target: any, 42 | propertyKey: string, 43 | descriptor: PropertyDescriptor 44 | ): void { 45 | const original = descriptor.value 46 | 47 | descriptor.value = function (...args: any[]) { 48 | Logger.trace(lazy`${target.constructor.name}.${propertyKey}: entry`) 49 | Logger.debug( 50 | lazy`${target.constructor.name}.${propertyKey}: arguments : ${stringify(args)}` 51 | ) 52 | 53 | const call = () => original.call(this, ...args) 54 | 55 | const logResult = (result: any) => { 56 | Logger.debug( 57 | lazy`${target.constructor.name}.${propertyKey}: result : ${stringify(result)}` 58 | ) 59 | Logger.trace(lazy`${target.constructor.name}.${propertyKey}: exit`) 60 | return result 61 | } 62 | 63 | if (original.constructor.name === 'AsyncFunction') { 64 | return call().then(logResult) 65 | } else { 66 | return logResult(call()) 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/post-processor/packageGenerator.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { join } from 'node:path/posix' 4 | 5 | import { outputFile } from 'fs-extra' 6 | import { log } from '../utils/LoggingDecorator.js' 7 | import PackageBuilder from '../utils/packageHelper.js' 8 | import BaseProcessor from './baseProcessor.js' 9 | 10 | const DESTRUCTIVE_CHANGES_FILE_NAME = 'destructiveChanges' 11 | const PACKAGE_FILE_NAME = 'package' 12 | const XML_FILE_EXTENSION = 'xml' 13 | 14 | export default class PackageGenerator extends BaseProcessor { 15 | @log 16 | public override async process() { 17 | this._cleanPackages() 18 | await this._buildPackages() 19 | } 20 | 21 | protected _cleanPackages() { 22 | const additive = this.work.diffs[PACKAGE_FILE_NAME] 23 | const destructive = this.work.diffs[DESTRUCTIVE_CHANGES_FILE_NAME] 24 | for (const [type, members] of additive) { 25 | if (destructive.has(type)) { 26 | destructive.set( 27 | type, 28 | new Set( 29 | [...destructive.get(type)!].filter(element => !members.has(element)) 30 | ) 31 | ) 32 | if (destructive.get(type)!.size === 0) { 33 | destructive.delete(type) 34 | } 35 | } 36 | } 37 | } 38 | 39 | protected async _buildPackages() { 40 | const pc = new PackageBuilder(this.config) 41 | await Promise.all( 42 | [ 43 | { 44 | filename: `${DESTRUCTIVE_CHANGES_FILE_NAME}.${XML_FILE_EXTENSION}`, 45 | folder: DESTRUCTIVE_CHANGES_FILE_NAME, 46 | manifest: this.work.diffs[DESTRUCTIVE_CHANGES_FILE_NAME], 47 | }, 48 | { 49 | filename: `${PACKAGE_FILE_NAME}.${XML_FILE_EXTENSION}`, 50 | folder: PACKAGE_FILE_NAME, 51 | manifest: this.work.diffs[PACKAGE_FILE_NAME], 52 | }, 53 | { 54 | filename: `${PACKAGE_FILE_NAME}.${XML_FILE_EXTENSION}`, 55 | folder: DESTRUCTIVE_CHANGES_FILE_NAME, 56 | manifest: new Map(), 57 | }, 58 | ].map(op => { 59 | return outputFile( 60 | join(this.config.output, op.folder, op.filename), 61 | pc.buildPackage(op.manifest) as string 62 | ) 63 | }) 64 | ) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /PUBLISHING.md: -------------------------------------------------------------------------------- 1 | # Publishing SFDX-Git-Delta to npm 2 | 3 | Here are the steps to publish a version 4 | 5 | ## Release 6 | 7 | This repository uses [release-please](https://github.com/googleapis/release-please-action) workflow. 8 | 9 | Merge the release pull request to create a new version, it will take care of: 10 | - defining next version number 11 | - updating `package.json` version attribute 12 | - create the changelog 13 | - create a tag version 14 | - create a github release 15 | - publish to npm the new version 16 | - set `latest-rc` npm channel to point the new version 17 | 18 | ## Update tag version 19 | 20 | ```sh 21 | npm dist-tag add sfdx-git-delta@ 22 | ``` 23 | 24 | It will set the `version-source` release channel to the `version-target` specific release (`vX.Y.Z`). 25 | 26 | **Update `v5.6.0` to be `latest`:** 27 | To be performed once the current `latest-rc` version (`v5.6.0` at that time) is considered stable enough. The `latest` version is the one installed by default with the `sf plugins install sfdx-git-delta` command. 28 | ```sh 29 | npm dist-tag add sfdx-git-delta@v5.6.0 latest 30 | ``` 31 | 32 | **Update `v5.5.0` to be `stable`:** 33 | To be performed once the current `latest` version (`v.5.5.0` at that time) is considered stable enough. 34 | ```sh 35 | npm dist-tag add sfdx-git-delta@v5.5.0 stable 36 | ``` 37 | 38 | **Rollback**: 39 | Use this command only if something is wrong with the current `latest` version, and you need to roll it back to a previous version (to `v5.0.0` in this example). 40 | ```sh 41 | npm dist-tag add sfdx-git-delta@v5.0.0 latest 42 | ``` 43 | 44 | Use the **"Manage Versions"** manual github action to do the same thing with point & click 45 | 46 | ## Deprecate version expression 47 | 48 | ```sh 49 | npm deprecate sfdx-git-delta@ "" 50 | ``` 51 | 52 | It will deprecate the `version-expression` with the `message`. 53 | The `version-expression` can either be a specific version (`vX.Y.Z`) 54 | Or a [version range](https://semver.npmjs.com/) 55 | 56 | Do not specify a `message` ("") to un-deprecate a version expression 57 | ```sh 58 | npm deprecate sfdx-git-delta@ "" 59 | ``` 60 | 61 | Use the **"Deprecate Versions"** manual github action to do the same thing with point & click 62 | -------------------------------------------------------------------------------- /src/utils/fsHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join } from 'node:path/posix' 3 | 4 | import { outputFile } from 'fs-extra' 5 | 6 | import GitAdapter from '../adapter/GitAdapter.js' 7 | import type { Config } from '../types/config.js' 8 | import type { FileGitRef } from '../types/git.js' 9 | 10 | import { buildIgnoreHelper } from './ignoreHelper.js' 11 | 12 | const copiedFiles = new Set() 13 | const writtenFiles = new Set() 14 | 15 | export const copyFiles = async (config: Config, src: string) => { 16 | if (copiedFiles.has(src) || writtenFiles.has(src)) { 17 | return 18 | } 19 | copiedFiles.add(src) 20 | 21 | const ignoreHelper = await buildIgnoreHelper(config) 22 | if (ignoreHelper.globalIgnore.ignores(src)) { 23 | return 24 | } 25 | try { 26 | const gitAdapter = GitAdapter.getInstance(config) 27 | for await (const file of gitAdapter.getFilesFrom(src)) { 28 | // Use Buffer to output the file content 29 | // Let fs implementation detect the encoding ("utf8" or "binary") 30 | const dst = join(config.output, file.path) 31 | await outputFile(dst, file.content) 32 | copiedFiles.add(dst) 33 | } 34 | } catch { 35 | /* empty */ 36 | } 37 | } 38 | 39 | export const readPathFromGit = async (forRef: FileGitRef, config: Config) => { 40 | let utf8Data = '' 41 | try { 42 | const gitAdapter = GitAdapter.getInstance(config) 43 | utf8Data = await gitAdapter.getStringContent(forRef) 44 | } catch { 45 | /* empty */ 46 | } 47 | return utf8Data 48 | } 49 | 50 | export const pathExists = async (path: string, config: Config) => { 51 | const gitAdapter = GitAdapter.getInstance(config) 52 | return await gitAdapter.pathExists(path) 53 | } 54 | 55 | export const readDirs = async ( 56 | paths: string | string[], 57 | config: Config 58 | ): Promise => { 59 | const gitAdapter = GitAdapter.getInstance(config) 60 | return await gitAdapter.getFilesPath(paths) 61 | } 62 | 63 | export const writeFile = async ( 64 | path: string, 65 | content: string, 66 | config: Config 67 | ) => { 68 | if (writtenFiles.has(path)) { 69 | return 70 | } 71 | writtenFiles.add(path) 72 | 73 | const ignoreHelper = await buildIgnoreHelper(config) 74 | if (ignoreHelper.globalIgnore.ignores(path)) { 75 | return 76 | } 77 | await outputFile(join(config.output, path), content) 78 | } 79 | -------------------------------------------------------------------------------- /src/utils/LoggingService.ts: -------------------------------------------------------------------------------- 1 | import { Logger as CoreLogger, LoggerLevel } from '@salesforce/core' 2 | import { PLUGIN_NAME } from '../constant/libConstant.js' 3 | 4 | type LoggerMessage = T | (() => T) 5 | 6 | function resolveLoggerMessage(message: LoggerMessage): T { 7 | return typeof message === 'function' ? (message as () => T)() : message 8 | } 9 | 10 | // biome-ignore lint/suspicious/noExplicitAny: Any is expected here 11 | export function lazy(strings: TemplateStringsArray, ...exprs: any[]) { 12 | const getters = exprs.map(expr => { 13 | if (typeof expr === 'function') return expr 14 | return () => expr 15 | }) 16 | 17 | return () => 18 | strings.reduce( 19 | (acc, str, i) => acc + str + (i < getters.length ? getters[i]() : ''), 20 | '' 21 | ) 22 | } 23 | 24 | export class Logger { 25 | private static coreLogger: CoreLogger = (() => { 26 | const coreLogger = CoreLogger.childFromRoot(PLUGIN_NAME) 27 | coreLogger.setLevel() 28 | return coreLogger 29 | })() 30 | 31 | static debug(message: LoggerMessage, meta?: unknown): void { 32 | if (Logger.coreLogger.shouldLog(LoggerLevel.DEBUG)) { 33 | const content = resolveLoggerMessage(message) 34 | Logger.coreLogger.debug(content, meta) 35 | } 36 | } 37 | 38 | static error(message: LoggerMessage, meta?: unknown): void { 39 | if (Logger.coreLogger.shouldLog(LoggerLevel.ERROR)) { 40 | const content = resolveLoggerMessage(message) 41 | Logger.coreLogger.error(content, meta) 42 | } 43 | } 44 | 45 | static info(message: LoggerMessage, meta?: unknown): void { 46 | if (Logger.coreLogger.shouldLog(LoggerLevel.INFO)) { 47 | const content = resolveLoggerMessage(message) 48 | Logger.coreLogger.info(content, meta) 49 | } 50 | } 51 | 52 | static trace(message: LoggerMessage, meta?: unknown): void { 53 | if (Logger.coreLogger.shouldLog(LoggerLevel.TRACE)) { 54 | const content = resolveLoggerMessage(message) 55 | Logger.coreLogger.trace(content, meta) 56 | } 57 | } 58 | 59 | static warn(message: LoggerMessage, meta?: unknown): void { 60 | if (Logger.coreLogger.shouldLog(LoggerLevel.WARN)) { 61 | const content = resolveLoggerMessage(message) 62 | Logger.coreLogger.warn(content, meta) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/utils/repoGitDiff.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import GitAdapter from '../adapter/GitAdapter.js' 3 | import { ADDITION, DELETION } from '../constant/gitConstants.js' 4 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 5 | import type { Config } from '../types/config.js' 6 | 7 | import { buildIgnoreHelper } from './ignoreHelper.js' 8 | 9 | export default class RepoGitDiff { 10 | protected readonly gitAdapter: GitAdapter 11 | 12 | constructor( 13 | protected readonly config: Config, 14 | protected readonly metadata: MetadataRepository 15 | ) { 16 | this.gitAdapter = GitAdapter.getInstance(this.config) 17 | } 18 | 19 | public async getLines() { 20 | const lines = await this.gitAdapter.getDiffLines() 21 | const treatedLines = await this._treatResult(lines) 22 | return treatedLines 23 | } 24 | 25 | protected async _treatResult(lines: string[]): Promise { 26 | const renamedElements = this._getRenamedElements(lines) 27 | 28 | const ignoreHelper = await buildIgnoreHelper(this.config) 29 | 30 | return lines 31 | .filter(Boolean) 32 | .filter((line: string) => this._filterInternal(line, renamedElements)) 33 | .filter((line: string) => ignoreHelper.keep(line)) 34 | } 35 | 36 | protected _getRenamedElements(lines: string[]) { 37 | const linesPerDiffType: Map = 38 | this._spreadLinePerDiffType(lines) 39 | const AfileNames: Set = new Set( 40 | linesPerDiffType 41 | .get(ADDITION) 42 | ?.map(line => this._extractComparisonName(line)) ?? [] 43 | ) 44 | const deletedRenamed: string[] = [ 45 | ...(linesPerDiffType.get(DELETION) ?? []), 46 | ].filter((line: string) => { 47 | const dEl = this._extractComparisonName(line) 48 | return AfileNames.has(dEl) 49 | }) 50 | 51 | return deletedRenamed 52 | } 53 | protected _spreadLinePerDiffType(lines: string[]) { 54 | return lines.reduce((acc: Map, line: string) => { 55 | const idx: string = line.charAt(0) 56 | if (!acc.has(idx)) { 57 | acc.set(idx, []) 58 | } 59 | acc.get(idx)!.push(line) 60 | return acc 61 | }, new Map()) 62 | } 63 | 64 | protected _filterInternal(line: string, deletedRenamed: string[]): boolean { 65 | return !deletedRenamed.includes(line) && this.metadata.has(line) 66 | } 67 | 68 | protected _extractComparisonName(line: string) { 69 | return this.metadata.getFullyQualifiedName(line).toLocaleLowerCase() 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/customObjectChildHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 3 | import CustomObjectChildHandler from '../../../../src/service/customObjectChildHandler' 4 | import type { Work } from '../../../../src/types/work' 5 | import { copyFiles, readPathFromGit } from '../../../../src/utils/fsHelper' 6 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 7 | 8 | jest.mock('../../../../src/utils/fsHelper') 9 | 10 | const mockedReadPathFromGit = jest.mocked(readPathFromGit) 11 | 12 | const objectType = { 13 | directoryName: 'recordTypes', 14 | inFolder: false, 15 | metaFile: false, 16 | suffix: 'recordType', 17 | parentXmlName: 'CustomObject', 18 | xmlName: 'RecordType', 19 | } 20 | const line = 21 | 'A force-app/main/default/objects/Account/recordTypes/awesome.recordType-meta.xml' 22 | 23 | let work: Work 24 | beforeEach(() => { 25 | jest.clearAllMocks() 26 | work = getWork() 27 | }) 28 | 29 | describe('CustomFieldHandler', () => { 30 | let globalMetadata: MetadataRepository 31 | beforeAll(async () => { 32 | globalMetadata = await getGlobalMetadata() 33 | }) 34 | 35 | describe('when called with generateDelta false', () => { 36 | it('should not handle master detail exception', async () => { 37 | // Arrange 38 | work.config.generateDelta = false 39 | const sut = new CustomObjectChildHandler( 40 | line, 41 | objectType, 42 | work, 43 | globalMetadata 44 | ) 45 | 46 | // Act 47 | await sut.handleAddition() 48 | 49 | // Assert 50 | expect(copyFiles).not.toHaveBeenCalled() 51 | expect(work.diffs.package.get('RecordType')).toEqual( 52 | new Set(['Account.awesome']) 53 | ) 54 | }) 55 | }) 56 | describe('when called with generateDelta true', () => { 57 | describe(`when field is not master detail`, () => { 58 | it('should not handle master detail exception', async () => { 59 | // Arrange 60 | mockedReadPathFromGit.mockResolvedValueOnce('') 61 | const sut = new CustomObjectChildHandler( 62 | line, 63 | objectType, 64 | work, 65 | globalMetadata 66 | ) 67 | 68 | // Act 69 | await sut.handleAddition() 70 | 71 | // Assert 72 | expect(copyFiles).toHaveBeenCalledTimes(1) 73 | expect(work.diffs.package.get('RecordType')).toEqual( 74 | new Set(['Account.awesome']) 75 | ) 76 | }) 77 | }) 78 | }) 79 | }) 80 | -------------------------------------------------------------------------------- /src/metadata/metadataManager.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import type { 3 | BaseMetadata, 4 | Metadata, 5 | SharedFileMetadata, 6 | SharedFolderMetadata, 7 | } from '../types/metadata.js' 8 | 9 | import { MetadataRepository } from './MetadataRepository.js' 10 | import { MetadataRepositoryImpl } from './MetadataRepositoryImpl.js' 11 | 12 | const inFileMetadata = new Map() 13 | const sharedFolderMetadata = new Map() 14 | 15 | const earliestVersion: number = 46 16 | const latestVersion: number = 65 17 | 18 | export const getLatestSupportedVersion = () => { 19 | return latestVersion - 1 20 | } 21 | 22 | export const isVersionSupported = (version: number | undefined) => { 23 | return ( 24 | Number.isInteger(version) && 25 | version! >= earliestVersion && 26 | version! <= latestVersion 27 | ) 28 | } 29 | 30 | export const getDefinition = async ( 31 | apiVersion: number | undefined 32 | ): Promise => { 33 | const version = isVersionSupported(apiVersion) 34 | ? apiVersion 35 | : getLatestSupportedVersion() 36 | const { default: metadataVersion } = await import(`./v${version}.js`) 37 | 38 | const metadataRepository: MetadataRepository = new MetadataRepositoryImpl( 39 | metadataVersion 40 | ) 41 | return metadataRepository 42 | } 43 | 44 | export const isPackable = (type: string) => 45 | Array.from(inFileMetadata.values()).find( 46 | (inFileDef: SharedFileMetadata) => inFileDef.xmlName === type 47 | )?.excluded !== true 48 | 49 | export const getInFileAttributes = (metadata: MetadataRepository) => 50 | inFileMetadata.size 51 | ? inFileMetadata 52 | : metadata 53 | .values() 54 | .filter((meta: Metadata) => meta.xmlTag) 55 | .reduce( 56 | (acc: Map, meta: Metadata) => 57 | acc.set(meta.xmlTag!, { 58 | xmlName: meta.xmlName, 59 | key: meta.key, 60 | excluded: !!meta.excluded, 61 | } as SharedFileMetadata), 62 | inFileMetadata 63 | ) 64 | 65 | export const getSharedFolderMetadata = (metadata: MetadataRepository) => 66 | sharedFolderMetadata.size 67 | ? sharedFolderMetadata 68 | : metadata 69 | .values() 70 | .filter((meta: Metadata) => meta.content) 71 | .flatMap((elem: SharedFolderMetadata): BaseMetadata[] => elem.content!) 72 | .reduce( 73 | (acc: Map, val: BaseMetadata) => 74 | acc.set(val!.suffix!, val!.xmlName!), 75 | sharedFolderMetadata 76 | ) 77 | -------------------------------------------------------------------------------- /src/utils/ignoreHelper.ts: -------------------------------------------------------------------------------- 1 | import ignore, { Ignore } from 'ignore' 2 | 3 | import { 4 | ADDITION, 5 | DELETION, 6 | GIT_DIFF_TYPE_REGEX, 7 | MODIFICATION, 8 | } from '../constant/gitConstants.js' 9 | import { readFile } from './fsUtils.js' 10 | import { log } from './LoggingDecorator.js' 11 | 12 | // QUESTION: Why we should ignore recordTypes for destructive changes manifest ? 13 | // Because the operation is note enabled on the metadata API https://ideas.salesforce.com/s/idea/a0B8W00000GdeGKUAZ/allow-deletion-of-record-type-using-metadata-api 14 | const BASE_DESTRUCTIVE_IGNORE = ['recordTypes/'] 15 | 16 | export class IgnoreHelper { 17 | constructor( 18 | public readonly globalIgnore: Ignore, 19 | protected readonly destructiveIgnore: Ignore 20 | ) {} 21 | 22 | @log 23 | public keep(line: string): boolean { 24 | const changeType = line.charAt(0) 25 | 26 | let ignInstance!: Ignore 27 | if (DELETION === changeType) { 28 | ignInstance = this.destructiveIgnore 29 | } else if ([ADDITION, MODIFICATION].includes(changeType)) { 30 | ignInstance = this.globalIgnore 31 | } 32 | 33 | const filePath = line.replace(GIT_DIFF_TYPE_REGEX, '') 34 | 35 | return !ignInstance?.ignores(filePath) 36 | } 37 | } 38 | 39 | let ignoreInstance: IgnoreHelper | null 40 | export const buildIgnoreHelper = async ({ 41 | ignore, 42 | ignoreDestructive, 43 | }: { 44 | ignore?: string | undefined 45 | ignoreDestructive?: string | undefined 46 | }) => { 47 | if (!ignoreInstance) { 48 | const globalIgnore = await _buildIgnore(ignore) 49 | const destructiveIgnore = await _buildIgnore(ignoreDestructive || ignore) 50 | 51 | destructiveIgnore.add(BASE_DESTRUCTIVE_IGNORE) 52 | 53 | ignoreInstance = new IgnoreHelper(globalIgnore, destructiveIgnore) 54 | } 55 | return ignoreInstance 56 | } 57 | 58 | let includeInstance: IgnoreHelper | null 59 | export const buildIncludeHelper = async ({ 60 | include, 61 | includeDestructive, 62 | }: { 63 | include?: string | undefined 64 | includeDestructive?: string | undefined 65 | }) => { 66 | if (!includeInstance) { 67 | const globalIgnore = await _buildIgnore(include) 68 | const destructiveIgnore = await _buildIgnore(includeDestructive) 69 | 70 | includeInstance = new IgnoreHelper(globalIgnore, destructiveIgnore) 71 | } 72 | return includeInstance 73 | } 74 | 75 | const _buildIgnore = async (ignorePath: string | undefined) => { 76 | const ign = ignore() 77 | if (ignorePath) { 78 | const content = await readFile(ignorePath) 79 | ign.add(content.toString()) 80 | } 81 | return ign 82 | } 83 | 84 | export const resetIgnoreInstance = () => { 85 | ignoreInstance = null 86 | } 87 | 88 | export const resetIncludeInstance = () => { 89 | includeInstance = null 90 | } 91 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/customLabelHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import CustomLabelHandler from '../../../../src/service/customLabelHandler' 6 | import type { Work } from '../../../../src/types/work' 7 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 8 | 9 | const labelType = { 10 | directoryName: 'labels', 11 | inFolder: false, 12 | metaFile: false, 13 | parentXmlName: 'CustomLabels', 14 | xmlName: 'CustomLabel', 15 | childXmlNames: ['CustomLabel'], 16 | suffix: 'labels', 17 | xmlTag: 'labels', 18 | key: 'fullName', 19 | } 20 | 21 | let globalMetadata: MetadataRepository 22 | beforeAll(async () => { 23 | globalMetadata = await getGlobalMetadata() 24 | }) 25 | let work: Work 26 | beforeEach(() => { 27 | jest.clearAllMocks() 28 | work = getWork() 29 | }) 30 | 31 | describe('Decomposed CustomLabel spec', () => { 32 | const line = 'force-app/main/default/labels/Test.label-meta.xml' 33 | describe('when file is added', () => { 34 | let sut: CustomLabelHandler 35 | beforeEach(() => { 36 | // Arrange 37 | sut = new CustomLabelHandler(line, labelType, work, globalMetadata) 38 | }) 39 | it('should add the element in the package', async () => { 40 | // Arrange 41 | 42 | // Act 43 | await sut.handleAddition() 44 | 45 | // Assert 46 | expect(work.diffs.destructiveChanges.size).toEqual(0) 47 | expect(work.diffs.package.get('CustomLabel')).toEqual(new Set(['Test'])) 48 | }) 49 | }) 50 | 51 | describe('when file is modified', () => { 52 | let sut: CustomLabelHandler 53 | beforeEach(() => { 54 | // Arrange 55 | sut = new CustomLabelHandler(line, labelType, work, globalMetadata) 56 | }) 57 | it('should add the element in the package', async () => { 58 | // Arrange 59 | 60 | // Act 61 | await sut.handleModification() 62 | 63 | // Assert 64 | expect(work.diffs.destructiveChanges.size).toEqual(0) 65 | expect(work.diffs.package.get('CustomLabel')).toEqual(new Set(['Test'])) 66 | }) 67 | }) 68 | 69 | describe('when file is deleted', () => { 70 | let sut: CustomLabelHandler 71 | beforeEach(() => { 72 | // Arrange 73 | sut = new CustomLabelHandler(line, labelType, work, globalMetadata) 74 | }) 75 | it('should add the element in the destructiveChanges', async () => { 76 | // Arrange 77 | 78 | // Act 79 | await sut.handleDeletion() 80 | 81 | // Assert 82 | expect(work.diffs.package.size).toEqual(0) 83 | expect(work.diffs.destructiveChanges.get('CustomLabel')).toEqual( 84 | new Set(['Test']) 85 | ) 86 | }) 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/sharedFolderHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { METAFILE_SUFFIX } from '../../../../src/constant/metadataConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import SharedFolderHandler from '../../../../src/service/sharedFolderHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { copyFiles } from '../../../../src/utils/fsHelper' 9 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 10 | 11 | jest.mock('../../../../src/utils/fsHelper') 12 | 13 | const objectType = { 14 | directoryName: 'discovery', 15 | inFolder: false, 16 | metaFile: true, 17 | content: [ 18 | { 19 | suffix: 'model', 20 | xmlName: 'DiscoveryAIModel', 21 | }, 22 | { 23 | suffix: 'goal', 24 | xmlName: 'DiscoveryGoal', 25 | }, 26 | ], 27 | } 28 | const entityName = 'DiscoveryAIModelTest' 29 | const entityExtension = 'model' 30 | const basePath = `force-app/main/default/` 31 | const line = `A ${basePath}${objectType}/${entityName}.${entityExtension}` 32 | const entityType = 'DiscoveryAIModel' 33 | 34 | let work: Work 35 | beforeEach(() => { 36 | jest.clearAllMocks() 37 | work = getWork() 38 | work.config.generateDelta = false 39 | }) 40 | 41 | describe('SharedFolderHandler', () => { 42 | let globalMetadata: MetadataRepository 43 | beforeAll(async () => { 44 | globalMetadata = await getGlobalMetadata() 45 | }) 46 | 47 | it('should add the metadata component under the right type to the package', async () => { 48 | // Arrange 49 | const sut = new SharedFolderHandler(line, objectType, work, globalMetadata) 50 | 51 | // Act 52 | await sut.handleAddition() 53 | 54 | // Assert 55 | expect(work.diffs.package.get(entityType)!.size).toEqual(1) 56 | expect(work.diffs.package.get(entityType)).toEqual(new Set([entityName])) 57 | }) 58 | describe('when it should generate output file', () => { 59 | beforeEach(() => { 60 | work.config.generateDelta = true 61 | }) 62 | it('should add and copy the metadata', async () => { 63 | const sut = new SharedFolderHandler( 64 | line, 65 | objectType, 66 | work, 67 | globalMetadata 68 | ) 69 | 70 | // Act 71 | await sut.handleAddition() 72 | 73 | // Assert 74 | expect(work.diffs.package.get(entityType)!.size).toEqual(1) 75 | expect(work.diffs.package.get(entityType)).toEqual(new Set([entityName])) 76 | expect(copyFiles).toHaveBeenCalledTimes(2) 77 | expect(copyFiles).toHaveBeenCalledWith( 78 | work.config, 79 | `${basePath}${objectType}/${entityName}.${entityExtension}` 80 | ) 81 | expect(copyFiles).toHaveBeenCalledWith( 82 | work.config, 83 | `${basePath}${objectType}/${entityName}.${entityExtension}${METAFILE_SUFFIX}` 84 | ) 85 | }) 86 | }) 87 | }) 88 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/customFieldHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MASTER_DETAIL_TAG } from '../../../../src/constant/metadataConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import CustomFieldHandler from '../../../../src/service/customFieldHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { copyFiles, readPathFromGit } from '../../../../src/utils/fsHelper' 9 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 10 | 11 | jest.mock('../../../../src/utils/fsHelper') 12 | 13 | const mockedReadPathFromGit = jest.mocked(readPathFromGit) 14 | 15 | const objectType = { 16 | directoryName: 'fields', 17 | inFolder: false, 18 | metaFile: false, 19 | suffix: 'field', 20 | parentXmlName: 'CustomObject', 21 | xmlName: 'CustomField', 22 | } 23 | const line = 24 | 'A force-app/main/default/objects/Account/fields/awesome.field-meta.xml' 25 | 26 | let work: Work 27 | beforeEach(() => { 28 | jest.clearAllMocks() 29 | work = getWork() 30 | }) 31 | 32 | describe('CustomFieldHandler', () => { 33 | let globalMetadata: MetadataRepository 34 | beforeAll(async () => { 35 | globalMetadata = await getGlobalMetadata() 36 | }) 37 | 38 | describe('when called with generateDelta false', () => { 39 | it('should not handle master detail exception', async () => { 40 | // Arrange 41 | work.config.generateDelta = false 42 | const sut = new CustomFieldHandler(line, objectType, work, globalMetadata) 43 | 44 | // Act 45 | await sut.handleAddition() 46 | 47 | // Assert 48 | expect(readPathFromGit).not.toHaveBeenCalled() 49 | }) 50 | }) 51 | describe('when called with generateDelta true', () => { 52 | describe(`when field is not master detail`, () => { 53 | it('should not handle master detail exception', async () => { 54 | // Arrange 55 | mockedReadPathFromGit.mockResolvedValueOnce('') 56 | const sut = new CustomFieldHandler( 57 | line, 58 | objectType, 59 | work, 60 | globalMetadata 61 | ) 62 | 63 | // Act 64 | await sut.handleAddition() 65 | 66 | // Assert 67 | expect(readPathFromGit).toHaveBeenCalledTimes(1) 68 | expect(copyFiles).toHaveBeenCalledTimes(1) 69 | }) 70 | }) 71 | describe(`when field is master detail`, () => { 72 | it('should copy the parent object', async () => { 73 | // Arrange 74 | mockedReadPathFromGit.mockResolvedValueOnce(MASTER_DETAIL_TAG) 75 | const sut = new CustomFieldHandler( 76 | line, 77 | objectType, 78 | work, 79 | globalMetadata 80 | ) 81 | 82 | // Act 83 | await sut.handleAddition() 84 | 85 | // Assert 86 | expect(readPathFromGit).toHaveBeenCalledTimes(1) 87 | expect(copyFiles).toHaveBeenCalledTimes(2) 88 | }) 89 | }) 90 | }) 91 | }) 92 | -------------------------------------------------------------------------------- /__tests__/functional/main.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import sgd from '../../src/main' 5 | import type { Config } from '../../src/types/config' 6 | 7 | const mockValidateConfig = jest.fn() 8 | jest.mock('../../src/utils/cliHelper', () => { 9 | // biome-ignore lint/suspicious/noExplicitAny: let TS know it is an object 10 | const actualModule: any = jest.requireActual('../../src/utils/cliHelper') 11 | return { 12 | default: jest.fn().mockImplementation(() => { 13 | return { 14 | ...actualModule, 15 | validateConfig: mockValidateConfig, 16 | } 17 | }), 18 | } 19 | }) 20 | 21 | const mockGetLines = jest.fn() 22 | jest.mock('../../src/utils/repoGitDiff', () => { 23 | // biome-ignore lint/suspicious/noExplicitAny: let TS know it is an object 24 | const actualModule: any = jest.requireActual('../../src/utils/repoGitDiff') 25 | return { 26 | default: jest.fn().mockImplementation(() => { 27 | return { 28 | ...actualModule, 29 | getLines: mockGetLines, 30 | } 31 | }), 32 | } 33 | }) 34 | 35 | const mockProcess = jest.fn() 36 | jest.mock('../../src/service/diffLineInterpreter', () => { 37 | // biome-ignore lint/suspicious/noExplicitAny: let TS know it is an object 38 | const actualModule: any = jest.requireActual( 39 | '../../src/service/diffLineInterpreter' 40 | ) 41 | return { 42 | default: jest.fn().mockImplementation(() => { 43 | return { 44 | ...actualModule, 45 | process: mockProcess, 46 | } 47 | }), 48 | } 49 | }) 50 | 51 | describe('external library inclusion', () => { 52 | describe('when configuration is not valid', () => { 53 | beforeEach(() => { 54 | // Arrange 55 | mockValidateConfig.mockImplementationOnce(() => 56 | Promise.reject(new Error('test')) 57 | ) 58 | }) 59 | 60 | it('it should throw', async () => { 61 | // Arrange 62 | expect.assertions(1) 63 | 64 | // Act 65 | try { 66 | await sgd({} as Config) 67 | } catch (error) { 68 | // Assert 69 | expect((error as Error).message).toEqual('test') 70 | } 71 | }) 72 | }) 73 | 74 | describe('when there are no changes', () => { 75 | beforeEach(() => { 76 | // Arrange 77 | mockGetLines.mockImplementationOnce(() => Promise.resolve([])) 78 | }) 79 | it('it should not process lines', async () => { 80 | // Act 81 | await sgd({} as Config) 82 | 83 | // Assert 84 | expect(mockProcess).toHaveBeenCalledWith([]) 85 | }) 86 | }) 87 | 88 | describe('when there are changes', () => { 89 | beforeEach(() => { 90 | // Arrange 91 | mockGetLines.mockImplementationOnce(() => Promise.resolve(['line'])) 92 | }) 93 | it('it should process those lines', async () => { 94 | // Act 95 | await sgd({} as Config) 96 | 97 | // Assert 98 | expect(mockProcess).toHaveBeenCalledWith(['line']) 99 | }) 100 | }) 101 | }) 102 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/lwcHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { 5 | ADDITION, 6 | DELETION, 7 | MODIFICATION, 8 | } from '../../../../src/constant/gitConstants' 9 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 10 | import LwcHandler from '../../../../src/service/lwcHandler' 11 | import type { Work } from '../../../../src/types/work' 12 | import { copyFiles } from '../../../../src/utils/fsHelper' 13 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 14 | 15 | jest.mock('../../../../src/utils/fsHelper') 16 | 17 | const objectType = { 18 | directoryName: 'lwc', 19 | inFolder: false, 20 | metaFile: false, 21 | xmlName: 'LightningComponentBundle', 22 | } 23 | const element = 'component' 24 | const basePath = `force-app/main/default/${objectType.directoryName}` 25 | const entityPath = `${basePath}/${element}/${element}.js` 26 | const xmlName = 'LightningComponentBundle' 27 | let work: Work 28 | beforeEach(() => { 29 | jest.clearAllMocks() 30 | work = getWork() 31 | }) 32 | 33 | describe('lwcHandler', () => { 34 | let globalMetadata: MetadataRepository 35 | beforeAll(async () => { 36 | globalMetadata = await getGlobalMetadata() 37 | }) 38 | describe('when the line should not be processed', () => { 39 | it.each([ 40 | `${basePath}/.eslintrc.json`, 41 | `${basePath}/jsconfig.json`, 42 | ])('does not handle the line', async entityPath => { 43 | // Arrange 44 | const sut = new LwcHandler( 45 | `${ADDITION} ${entityPath}`, 46 | objectType, 47 | work, 48 | globalMetadata 49 | ) 50 | 51 | // Act 52 | await sut.handle() 53 | 54 | // Assert 55 | expect(work.diffs.package.size).toBe(0) 56 | expect(copyFiles).not.toHaveBeenCalled() 57 | }) 58 | }) 59 | 60 | describe('when the line should be processed', () => { 61 | it.each([ 62 | ADDITION, 63 | MODIFICATION, 64 | ])('handles the line for "%s" type change', async changeType => { 65 | // Arrange 66 | const sut = new LwcHandler( 67 | `${changeType} ${entityPath}`, 68 | objectType, 69 | work, 70 | globalMetadata 71 | ) 72 | 73 | // Act 74 | await sut.handle() 75 | 76 | // Assert 77 | expect(work.diffs.package.get(xmlName)).toEqual(new Set([element])) 78 | expect(copyFiles).toHaveBeenCalled() 79 | }) 80 | 81 | it('handles the line for "D" type change', async () => { 82 | // Arrange 83 | const sut = new LwcHandler( 84 | `${DELETION} ${entityPath}`, 85 | objectType, 86 | work, 87 | globalMetadata 88 | ) 89 | 90 | // Act 91 | await sut.handle() 92 | 93 | // Assert 94 | expect(work.diffs.destructiveChanges.get(xmlName)).toEqual( 95 | new Set([element]) 96 | ) 97 | expect(copyFiles).not.toHaveBeenCalled() 98 | }) 99 | }) 100 | }) 101 | -------------------------------------------------------------------------------- /messages/delta.md: -------------------------------------------------------------------------------- 1 | # summary 2 | 3 | Generate incremental package manifest and source content 4 | 5 | # description 6 | 7 | Use two git commit reference to generate the package corresponding to what has changed in between 8 | 9 | # examples 10 | 11 | - Build incremental manifest from the previous commit 12 | 13 | <%= config.bin %> <%= command.id %> --from "origin/development" --output-dir incremental 14 | 15 | - Build incremental manifest and source from the development branch 16 | 17 | <%= config.bin %> <%= command.id %> --from "origin/development" --generate-delta --output-dir incremental 18 | 19 | # flags.to.summary 20 | 21 | commit sha to where the diff is done 22 | 23 | # flags.from.summary 24 | 25 | commit sha from where the diff is done 26 | 27 | # flags.repo.summary 28 | 29 | git repository location 30 | 31 | # flags.output.summary 32 | 33 | source package specific output 34 | 35 | # flags.source.summary 36 | 37 | source folders focus location relative to --repo-dir 38 | 39 | # flags.source.description 40 | 41 | You can use this flag multiple times to include different folders that contain source files. Each path should be relative to --repo-dir. 42 | 43 | The folder can exist or not. 44 | * If the folder exists, its contents will be processed. 45 | * If the folder doesn't exist, it usually won't show any output—unless the folder was recently deleted and is part of a diff, in which case changes may still be picked up. 46 | 47 | # flags.ignore.summary 48 | 49 | file listing paths to explicitly ignore for any diff actions 50 | 51 | # flags.ignore-destructive.summary 52 | 53 | file listing paths to explicitly ignore for any destructive actions 54 | 55 | # flags.api-version.summary 56 | 57 | salesforce metadata API version, default to sfdx-project.json "sourceApiVersion" attribute or latest version 58 | 59 | # flags.generate-delta.summary 60 | 61 | generate delta files in [--output-dir] folder 62 | 63 | # flags.ignore-whitespace.summary 64 | 65 | ignore git diff whitespace (space, tab, eol) changes 66 | 67 | # flags.include.summary 68 | 69 | file listing paths to explicitly include for any diff actions 70 | 71 | # flags.include-destructive.summary 72 | 73 | file listing paths to explicitly include for any destructive actions 74 | 75 | # error.ParameterIsNotGitSHA 76 | 77 | --%s is not a valid sha pointer: '%s' (If in CI/CD context, check the fetch depth is properly set) 78 | 79 | # error.PathIsNotGit 80 | 81 | '%s' is not a git repository 82 | 83 | # warning.ApiVersionNotSupported 84 | 85 | API version not found or not supported, using '%s' instead 86 | 87 | # warning.FlowDeleted 88 | 89 | Attempt to delete the flow '%s' via destructiveChanges.xml may not work as expected (see https://github.com/scolladon/sfdx-git-delta#handle-flow-deletion) 90 | 91 | # info.CommandIsRunning 92 | 93 | Generating incremental package 94 | 95 | # info.CommandSuccess 96 | 97 | Success 98 | 99 | # info.CommandFailure 100 | 101 | Failure 102 | 103 | # info.EncourageSponsorship 104 | 105 | 💡 Enjoying sfdx-git-delta? 106 | Your contribution helps us provide fast support 🚀 and high quality features 🔥 107 | Become a sponsor: https://github.com/sponsors/scolladon 💙 -------------------------------------------------------------------------------- /src/service/containedDecomposedHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join, ParsedPath, parse } from 'node:path/posix' 3 | import { PATH_SEP } from '../constant/fsConstants.js' 4 | import { 5 | METAFILE_SUFFIX, 6 | PERMISSIONSET_OBJECTSETTINGS_FOLDER, 7 | } from '../constant/metadataConstants.js' 8 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 9 | import { Metadata } from '../types/metadata.js' 10 | import { Work } from '../types/work.js' 11 | import { readDirs } from '../utils/fsHelper.js' 12 | import { log } from '../utils/LoggingDecorator.js' 13 | import StandardHandler from './standardHandler.js' 14 | 15 | export default class ContainedDecomposedHandler extends StandardHandler { 16 | protected holderFolder: ParsedPath | undefined 17 | 18 | constructor( 19 | line: string, 20 | metadataDef: Metadata, 21 | work: Work, 22 | metadata: MetadataRepository 23 | ) { 24 | super(line, metadataDef, work, metadata) 25 | this._setholderFolder() 26 | } 27 | 28 | @log 29 | public override async handleAddition() { 30 | await super.handleAddition() 31 | if (!this.config.generateDelta) return 32 | 33 | // For decomposed format, copy all related files 34 | if (this._isDecomposedFormat()) { 35 | await this._copyDecomposedFiles() 36 | } 37 | } 38 | 39 | @log 40 | public override async handleDeletion() { 41 | if (!this._isDecomposedFormat()) { 42 | await super.handleDeletion() 43 | return 44 | } 45 | 46 | if (await this._hasRelatedContent()) { 47 | await this.handleModification() 48 | } else { 49 | await super.handleDeletion() 50 | } 51 | } 52 | 53 | protected _setholderFolder() { 54 | if (!this._isDecomposedFormat()) { 55 | this.holderFolder = parse( 56 | this.line 57 | .replace(METAFILE_SUFFIX, '') 58 | .replace(`.${this.metadataDef.suffix}`, '') 59 | ) 60 | return 61 | } 62 | // Get the parent folder name from the path 63 | const parentFolderName = this.splittedLine.at(-2) 64 | 65 | // If parent folder is objectSettings, use the grandparent folder name 66 | // Otherwise use the parent folder name 67 | const index = 68 | parentFolderName === PERMISSIONSET_OBJECTSETTINGS_FOLDER ? -2 : -1 69 | 70 | this.holderFolder = parse(this.splittedLine.slice(0, index).join(PATH_SEP)) 71 | } 72 | 73 | protected _isDecomposedFormat() { 74 | return ( 75 | !this.parsedLine.base.includes(`.${this.metadataDef.suffix}`) || 76 | this.parsedLine.dir.split(PATH_SEP).pop() === this.parsedLine.name 77 | ) 78 | } 79 | 80 | protected async _hasRelatedContent(): Promise { 81 | const files = await readDirs( 82 | join(this.holderFolder!.dir, this.holderFolder!.base), 83 | this.config 84 | ) 85 | return files.length > 0 86 | } 87 | 88 | protected async _copyDecomposedFiles() { 89 | await this._copy(join(this.holderFolder!.dir, this.holderFolder!.base)) 90 | } 91 | 92 | protected override _getElementName() { 93 | return this.holderFolder!.base 94 | } 95 | 96 | protected override _isProcessable() { 97 | return true 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/post-processor/includeProcessor.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import GitAdapter from '../adapter/GitAdapter.js' 3 | import { TAB } from '../constant/cliConstants.js' 4 | import { ADDITION, DELETION } from '../constant/gitConstants.js' 5 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 6 | import DiffLineInterpreter from '../service/diffLineInterpreter.js' 7 | import type { Work } from '../types/work.js' 8 | import { buildIncludeHelper } from '../utils/ignoreHelper.js' 9 | import { log } from '../utils/LoggingDecorator.js' 10 | 11 | import BaseProcessor from './baseProcessor.js' 12 | 13 | type GitChange = typeof ADDITION | typeof DELETION 14 | 15 | export default class IncludeProcessor extends BaseProcessor { 16 | protected readonly gitAdapter: GitAdapter 17 | constructor(work: Work, metadata: MetadataRepository) { 18 | super(work, metadata) 19 | this.gitAdapter = GitAdapter.getInstance(this.config) 20 | } 21 | 22 | protected _shouldProcess() { 23 | return !!this.config.include || !!this.config.includeDestructive 24 | } 25 | 26 | @log 27 | public override async process() { 28 | if (!this._shouldProcess()) { 29 | return 30 | } 31 | 32 | const includeHelper = await buildIncludeHelper(this.config) 33 | const includeLines = new Map() 34 | const gitChanges: GitChange[] = [ADDITION, DELETION] 35 | const lines: string[] = await this.gitAdapter.getFilesPath( 36 | this.config.source 37 | ) 38 | for (const line of lines) { 39 | gitChanges.forEach((changeType: GitChange) => { 40 | const changedLine = `${changeType}${TAB}${line}` 41 | if (!includeHelper.keep(changedLine)) { 42 | if (!includeLines.has(changeType)) { 43 | includeLines.set(changeType, []) 44 | } 45 | includeLines.get(changeType)?.push(changedLine) 46 | } 47 | }) 48 | } 49 | 50 | await this._processIncludes(includeLines) 51 | } 52 | 53 | protected async _processIncludes(includeLines: Map) { 54 | if (includeLines.size === 0) { 55 | return 56 | } 57 | 58 | const fromBackup = this.work.config.from 59 | const firsSHA = await this.gitAdapter.getFirstCommitRef() 60 | 61 | // Compare with the whole history of the repository 62 | // so it can get full file content for inFile metadata 63 | // while reusing current way to do it on a minimal scope 64 | if (includeLines.has(ADDITION)) { 65 | this.work.config.from = firsSHA 66 | await this._processLines(includeLines.get(ADDITION)!) 67 | } 68 | 69 | if (includeLines.has(DELETION)) { 70 | // Need to invert the SHA pointer for DELETION 71 | // so all the addition are interpreted has deletion by MetadataDiff 72 | // for the lines of InFile metadata type 73 | this.work.config.from = this.work.config.to 74 | this.work.config.to = firsSHA 75 | await this._processLines(includeLines.get(DELETION)!) 76 | this.work.config.to = this.work.config.from 77 | } 78 | this.work.config.from = fromBackup 79 | } 80 | 81 | protected async _processLines(lines: string[]) { 82 | const lineProcessor = new DiffLineInterpreter(this.work, this.metadata) 83 | await lineProcessor.process(lines) 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /__tests__/functional/delta.nut.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import fs from 'node:fs' 3 | import { execCmd } from '@salesforce/cli-plugins-testkit' 4 | import { expect } from 'chai' 5 | import readline from 'readline' 6 | 7 | import { getLatestSupportedVersion } from '../../src/metadata/metadataManager.js' 8 | 9 | describe('sgd source delta NUTS', () => { 10 | it('run help', () => { 11 | const result = execCmd('sgd source delta --help', { 12 | ensureExitCode: 0, 13 | }).shellOutput 14 | expect(result).to.include('incremental') 15 | }) 16 | 17 | it('run `e2e` tests with multiple --source-dir flags', async () => { 18 | // Act 19 | const result = execCmd( 20 | 'sgd source delta --from "origin/e2e/base" --to "origin/e2e/head" --output e2e/expected --generate-delta --repo e2e --source-dir test/create-classes --source-dir test/update-classes --source-dir test/delete-classes --json', 21 | { 22 | ensureExitCode: 0, 23 | } 24 | ).shellOutput 25 | 26 | // Assert 27 | const packageFile = fs.readFileSync( 28 | 'e2e/expected/package/package.xml', 29 | 'utf8' 30 | ) 31 | const destructiveChangesFile = fs.readFileSync( 32 | 'e2e/expected/destructiveChanges/destructiveChanges.xml', 33 | 'utf8' 34 | ) 35 | const expectedPackage = ` 36 | 37 | 38 | CreatedClass 39 | ModifiedClass 40 | ApexClass 41 | 42 | ${getLatestSupportedVersion()}.0 43 | ` 44 | const expectedDestructiveChanges = ` 45 | 46 | 47 | DeletedClass 48 | ApexClass 49 | 50 | ${getLatestSupportedVersion()}.0 51 | ` 52 | expect(packageFile).to.equal(expectedPackage) 53 | expect(destructiveChangesFile).to.equal(expectedDestructiveChanges) 54 | expect(result).to.include('"status": 0') 55 | }) 56 | 57 | it('run `e2e` tests', async () => { 58 | // Act 59 | const result = execCmd( 60 | 'sgd source delta --from "origin/e2e/base" --to "origin/e2e/head" --output e2e/expected --generate-delta --repo e2e --include e2e/.sgdinclude --include-destructive e2e/.sgdincludeDestructive --ignore e2e/.sgdignore --ignore-destructive e2e/.sgdignoreDestructive --json', 61 | { 62 | ensureExitCode: 0, 63 | } 64 | ).shellOutput 65 | 66 | // Assert 67 | const packageLineCount = await getFileLineNumber( 68 | 'e2e/expected/package/package.xml' 69 | ) 70 | const destructiveChangesLineCount = await getFileLineNumber( 71 | 'e2e/expected/destructiveChanges/destructiveChanges.xml' 72 | ) 73 | expect(packageLineCount).to.equal(235) 74 | expect(destructiveChangesLineCount).to.equal(140) 75 | expect(result).to.include('"status": 0') 76 | }) 77 | }) 78 | 79 | const getFileLineNumber = async (path: string) => { 80 | let linesCount = 0 81 | const rl = readline.createInterface({ 82 | input: fs.createReadStream(path), 83 | output: process.stdout, 84 | terminal: false, 85 | }) 86 | for await (const _ of rl) { 87 | ++linesCount 88 | } 89 | return linesCount 90 | } 91 | -------------------------------------------------------------------------------- /src/service/inResourceHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join, parse } from 'node:path/posix' 3 | 4 | import { DOT, PATH_SEP } from '../constant/fsConstants.js' 5 | import { META_REGEX, METAFILE_SUFFIX } from '../constant/metadataConstants.js' 6 | import { pathExists, readDirs } from '../utils/fsHelper.js' 7 | import { log } from '../utils/LoggingDecorator.js' 8 | import StandardHandler from './standardHandler.js' 9 | 10 | export default class ResourceHandler extends StandardHandler { 11 | protected metadataName: string | undefined 12 | 13 | @log 14 | public override async handleAddition() { 15 | this.metadataName = this._getMetadataName() 16 | await super.handleAddition() 17 | if (!this.config.generateDelta) return 18 | 19 | await this._copyResourceFiles() 20 | } 21 | 22 | @log 23 | public override async handleDeletion() { 24 | const [, elementPath, elementName] = this._parseLine()! 25 | const exists = await pathExists(join(elementPath, elementName), this.config) 26 | if (exists) { 27 | await this.handleModification() 28 | } else { 29 | await super.handleDeletion() 30 | } 31 | } 32 | 33 | protected async _copyResourceFiles() { 34 | const staticResourcePath = this.metadataName!.substring( 35 | 0, 36 | this.metadataName!.lastIndexOf(PATH_SEP) 37 | ) 38 | const allStaticResources = await readDirs( 39 | staticResourcePath, 40 | this.work.config 41 | ) 42 | 43 | const startsWithMetadataName = new RegExp( 44 | `${this.metadataName!}[${PATH_SEP}${DOT}]` 45 | ) 46 | const resourceFiles = allStaticResources.filter((file: string) => 47 | startsWithMetadataName.test(file) 48 | ) 49 | for (const resourceFile of resourceFiles) { 50 | await this._copy(resourceFile) 51 | } 52 | } 53 | 54 | protected override _getElementName() { 55 | const parsedPath = this._getParsedPath() 56 | return parsedPath.name 57 | } 58 | 59 | protected override _getParsedPath() { 60 | const base = 61 | !this.metadataDef.excluded && this.ext === this.metadataDef.suffix 62 | ? this.splittedLine.at(-1)! 63 | : this.splittedLine[ 64 | this.splittedLine.lastIndexOf(this.metadataDef.directoryName) + 1 65 | ] 66 | return parse(base.replace(META_REGEX, '')) 67 | } 68 | 69 | protected override _isProcessable() { 70 | return true 71 | } 72 | 73 | protected _getMetadataName() { 74 | const metadataDirIndex = this.splittedLine.lastIndexOf( 75 | this.metadataDef.directoryName 76 | ) 77 | 78 | const metadataFullPath = this.splittedLine.slice(0, metadataDirIndex + 2) 79 | const componentNameIndex = metadataFullPath.length - 1 80 | const componentNameParts = metadataFullPath[componentNameIndex] 81 | .replace(METAFILE_SUFFIX, '') 82 | .split(DOT) 83 | 84 | if (componentNameParts.length > 1) { 85 | componentNameParts.pop() 86 | } 87 | 88 | metadataFullPath[componentNameIndex] = componentNameParts.join(DOT) 89 | return metadataFullPath.join(PATH_SEP) 90 | } 91 | 92 | protected override _getMetaTypeFilePath() { 93 | return `${this.metadataName}.${this.metadataDef.suffix}${METAFILE_SUFFIX}` 94 | } 95 | 96 | protected override _shouldCopyMetaFile(): boolean { 97 | return true 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /__tests__/unit/lib/post-processor/postProcessorManager.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import BaseProcessor from '../../../../src/post-processor/baseProcessor' 6 | import PostProcessorManager, { 7 | getPostProcessors, 8 | } from '../../../../src/post-processor/postProcessorManager' 9 | import type { Work } from '../../../../src/types/work' 10 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 11 | 12 | const processSpy = jest.fn() 13 | 14 | class TestProcessor extends BaseProcessor { 15 | constructor(work: Work, metadata: MetadataRepository) { 16 | super(work, metadata) 17 | } 18 | override async process() { 19 | return processSpy() as Promise 20 | } 21 | } 22 | 23 | describe('postProcessorManager', () => { 24 | const work: Work = getWork() 25 | let metadata: MetadataRepository 26 | beforeAll(async () => { 27 | metadata = await getGlobalMetadata() 28 | }) 29 | 30 | describe('getPostProcessors', () => { 31 | describe('when called', () => { 32 | it('returns a post processor manager with a list of post processor', () => { 33 | // Arrange 34 | const sut = getPostProcessors 35 | 36 | // Act 37 | const result = sut(work, metadata) 38 | 39 | // Assert 40 | expect(result['postProcessors'].length).toBeGreaterThan(0) 41 | }) 42 | }) 43 | }) 44 | describe('when calling `use`', () => { 45 | it('should add a processor to the list', () => { 46 | // Arrange 47 | const sut = new PostProcessorManager(work) 48 | const processorCount = sut['postProcessors'].length 49 | 50 | // Act 51 | sut.use(new TestProcessor(work, metadata) as BaseProcessor) 52 | 53 | // Assert 54 | expect(processorCount).toBeLessThan(sut['postProcessors'].length) 55 | }) 56 | }) 57 | 58 | describe('processor count', () => { 59 | describe.each([ 60 | [new PostProcessorManager(work), 0], 61 | [ 62 | new PostProcessorManager(work).use( 63 | new TestProcessor(work, metadata) as BaseProcessor 64 | ), 65 | 1, 66 | ], 67 | [ 68 | new PostProcessorManager(work) 69 | .use(new TestProcessor(work, metadata) as BaseProcessor) 70 | .use(new TestProcessor(work, metadata) as BaseProcessor), 71 | 2, 72 | ], 73 | ])('when calling `execute`', (processorManager, expectedCount) => { 74 | it(`should execute ${expectedCount} processors`, async () => { 75 | // Arrange 76 | const sut = processorManager 77 | 78 | // Act 79 | await sut.execute() 80 | 81 | // Assert 82 | expect(processSpy).toHaveBeenCalledTimes(expectedCount) 83 | }) 84 | }) 85 | }) 86 | 87 | describe('when postProcessor `process` throws', () => { 88 | it('should append the error in warnings', async () => { 89 | // Arrange 90 | expect.assertions(1) 91 | const sut = new PostProcessorManager(work) 92 | sut.use(new TestProcessor(work, metadata) as BaseProcessor) 93 | processSpy.mockImplementationOnce(() => 94 | Promise.reject(new Error('Error')) 95 | ) 96 | 97 | // Act 98 | await sut.execute() 99 | expect(work.warnings.length).toBe(1) 100 | }) 101 | }) 102 | }) 103 | -------------------------------------------------------------------------------- /src/service/inFileHandler.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { basename } from 'node:path/posix' 3 | 4 | import { DOT } from '../constant/fsConstants.js' 5 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 6 | import { getInFileAttributes, isPackable } from '../metadata/metadataManager.js' 7 | import { Metadata } from '../types/metadata.js' 8 | import type { Manifest, Work } from '../types/work.js' 9 | import { writeFile } from '../utils/fsHelper.js' 10 | import { log } from '../utils/LoggingDecorator.js' 11 | import MetadataDiff from '../utils/metadataDiff.js' 12 | import { fillPackageWithParameter } from '../utils/packageHelper.js' 13 | import StandardHandler from './standardHandler.js' 14 | 15 | const getRootType = (line: string) => basename(line).split(DOT)[0] 16 | 17 | export default class InFileHandler extends StandardHandler { 18 | protected readonly metadataDiff: MetadataDiff 19 | constructor( 20 | line: string, 21 | metadataDef: Metadata, 22 | work: Work, 23 | metadata: MetadataRepository 24 | ) { 25 | super(line, metadataDef, work, metadata) 26 | const inFileMetadata = getInFileAttributes(metadata) 27 | this.metadataDiff = new MetadataDiff(this.config, inFileMetadata) 28 | } 29 | 30 | @log 31 | public override async handleAddition() { 32 | await this._compareRevisionAndStoreComparison() 33 | } 34 | 35 | @log 36 | public override async handleDeletion() { 37 | if (this._shouldTreatDeletionAsDeletion()) { 38 | await super.handleDeletion() 39 | } else { 40 | await this.handleAddition() 41 | } 42 | } 43 | 44 | public override async handleModification() { 45 | await this.handleAddition() 46 | } 47 | 48 | protected async _compareRevisionAndStoreComparison() { 49 | const { added, deleted } = await this.metadataDiff.compare(this.line) 50 | this._storeComparison(this.diffs.destructiveChanges, deleted) 51 | this._storeComparison(this.diffs.package, added) 52 | const { xmlContent, isEmpty } = this.metadataDiff.prune() 53 | if (this._shouldTreatContainerType(isEmpty)) { 54 | // Call from super.handleAddition to add the Root Type 55 | // QUESTION: Why InFile element are not deployable when root component is not listed in package.xml ? 56 | await super.handleAddition() 57 | } 58 | if (this.config.generateDelta && !isEmpty) { 59 | await writeFile(this.line, xmlContent, this.config) 60 | } 61 | } 62 | 63 | protected _storeComparison(store: Manifest, content: Manifest) { 64 | for (const [type, members] of content) { 65 | for (const member of members) { 66 | this._fillPackageForInfileMetadata(store, type, member) 67 | } 68 | } 69 | } 70 | 71 | protected _fillPackageForInfileMetadata( 72 | store: Manifest, 73 | subType: string, 74 | member: string 75 | ) { 76 | if (isPackable(subType)) { 77 | const cleanedMember = `${this._getQualifiedName()}${member}` 78 | 79 | fillPackageWithParameter({ 80 | store, 81 | type: subType, 82 | member: cleanedMember, 83 | }) 84 | } 85 | } 86 | 87 | protected _getQualifiedName() { 88 | return `${getRootType(this.line)}${DOT}` 89 | } 90 | 91 | protected override _delegateFileCopy() { 92 | return false 93 | } 94 | 95 | protected _shouldTreatDeletionAsDeletion() { 96 | return this.metadataDef.pruneOnly 97 | } 98 | 99 | protected _shouldTreatContainerType(fileIsEmpty: boolean) { 100 | return !fileIsEmpty 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /.github/workflows/on-main-push.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Main 3 | 4 | on: 5 | push: 6 | branches: 7 | - main 8 | paths-ignore: 9 | - "**.md" 10 | - "img/**" 11 | 12 | jobs: 13 | build: 14 | uses: ./.github/workflows/reusable-build.yml 15 | secrets: inherit 16 | 17 | prepare-release: 18 | needs: [build] 19 | runs-on: ubuntu-latest 20 | outputs: 21 | release_created: ${{ steps.release.outputs.release_created }} 22 | prs_created: ${{ steps.release.outputs.prs_created }} 23 | version: ${{ steps.release.outputs.version }} 24 | steps: 25 | - uses: googleapis/release-please-action@v4 26 | id: release 27 | with: 28 | token: ${{ secrets.RELEASE_PAT }} 29 | release-type: node 30 | 31 | release: 32 | needs: [prepare-release] 33 | runs-on: ubuntu-latest 34 | if: ${{ needs.prepare-release.outputs.release_created == 'true' }} 35 | steps: 36 | - name: Checkout sources 37 | uses: actions/checkout@v4 38 | 39 | - name: Setup node 40 | uses: actions/setup-node@v4 41 | with: 42 | node-version: 20 43 | registry-url: 'https://registry.npmjs.org' 44 | 45 | - name: Setup dependencies, cache and install 46 | uses: ./.github/actions/install 47 | 48 | - name: Publish to npm 49 | run: npm publish --access public --tag latest-rc 50 | env: 51 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 52 | 53 | test-release: 54 | uses: ./.github/workflows/run-e2e-tests.yml 55 | needs: [prepare-release, release] 56 | with: 57 | channel: ${{ needs.prepare-release.outputs.version }} 58 | secrets: inherit 59 | 60 | perf: 61 | needs: [prepare-release] 62 | runs-on: ubuntu-latest 63 | if: ${{ needs.prepare-release.outputs.release_created != 'true' && needs.prepare-release.outputs.prs_created == 'true' }} 64 | steps: 65 | - name: Checkout sources 66 | uses: actions/checkout@v4 67 | 68 | - name: Setup node 69 | uses: actions/setup-node@v4 70 | with: 71 | node-version: 20 72 | 73 | - uses: google/wireit@setup-github-actions-caching/v2 74 | 75 | - name: Setup dependencies, cache and install 76 | uses: ./.github/actions/install 77 | 78 | - name: Set environment variables 79 | run: | 80 | echo "SF_DISABLE_AUTOUPDATE=true" >> "$GITHUB_ENV" 81 | echo "SF_DISABLE_TELEMETRY=true" >> "$GITHUB_ENV" 82 | 83 | - name: Build plugin 84 | run: npm pack 85 | 86 | - name: Install salesforce CLI 87 | run: npm install -g @salesforce/cli 88 | 89 | - name: Install plugin 90 | run: | 91 | sf plugins link . 92 | sf plugins 93 | 94 | - name: Checkout e2e test subject 95 | uses: actions/checkout@v4 96 | with: 97 | ref: 'e2e/head' 98 | fetch-depth: 0 99 | path: ./e2e 100 | 101 | - name: Run benchmark 102 | run: | 103 | npm run test:perf 104 | git reset --hard 105 | 106 | - name: Store benchmark result 107 | uses: benchmark-action/github-action-benchmark@v1 108 | with: 109 | name: Benchmark.js Benchmark 110 | tool: 'benchmarkjs' 111 | output-file-path: ./perf-result.txt 112 | github-token: ${{ secrets.GITHUB_TOKEN }} 113 | auto-push: true 114 | alert-threshold: '130%' 115 | comment-on-alert: true 116 | fail-on-alert: true 117 | summary-always: true 118 | alert-comment-cc-users: '@scolladon' 119 | -------------------------------------------------------------------------------- /src/service/typeHandlerFactory.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { MetadataRepository } from '../metadata/MetadataRepository.js' 3 | import { Metadata } from '../types/metadata.js' 4 | import type { Work } from '../types/work.js' 5 | import { log } from '../utils/LoggingDecorator.js' 6 | 7 | import Bot from './botHandler.js' 8 | import ContainedDecomposed from './containedDecomposedHandler.js' 9 | import CustomFieldHandler from './customFieldHandler.js' 10 | import CustomLabel from './customLabelHandler.js' 11 | import CustomObjectChildHandler from './customObjectChildHandler.js' 12 | import CustomObject from './customObjectHandler.js' 13 | import Decomposed from './decomposedHandler.js' 14 | import FlowHandler from './flowHandler.js' 15 | import InBundle from './inBundleHandler.js' 16 | import InFile from './inFileHandler.js' 17 | import InFolder from './inFolderHandler.js' 18 | import InResource from './inResourceHandler.js' 19 | import Lwc from './lwcHandler.js' 20 | import ObjectTranslation from './objectTranslationHandler.js' 21 | import ReportingFolderHandler from './reportingFolderHandler.js' 22 | import SharedFolder from './sharedFolderHandler.js' 23 | import Standard from './standardHandler.js' 24 | 25 | const handlerMap = { 26 | AssignmentRules: InFile, 27 | AuraDefinitionBundle: Lwc, 28 | AutoResponseRules: InFile, 29 | BusinessProcess: CustomObjectChildHandler, 30 | CompactLayout: CustomObjectChildHandler, 31 | CustomField: CustomFieldHandler, 32 | CustomFieldTranslation: ObjectTranslation, 33 | CustomLabel: CustomLabel, 34 | CustomObject: CustomObject, 35 | CustomObjectTranslation: ObjectTranslation, 36 | Dashboard: ReportingFolderHandler, 37 | DigitalExperienceBundle: InBundle, 38 | Document: InFolder, 39 | EmailTemplate: InFolder, 40 | EscalationRules: InFile, 41 | ExperienceBundle: InResource, 42 | FieldSet: CustomObjectChildHandler, 43 | Flow: FlowHandler, 44 | GenAiFunction: Lwc, 45 | GenAiPlannerBundle: InResource, 46 | GlobalValueSetTranslation: InFile, 47 | Index: CustomObjectChildHandler, 48 | LightningComponentBundle: Lwc, 49 | LightningTypeBundle: InResource, 50 | ListView: CustomObjectChildHandler, 51 | MarketingAppExtension: InFile, 52 | MatchingRules: InFile, 53 | PermissionSet: ContainedDecomposed, 54 | Profile: InFile, 55 | RecordType: CustomObjectChildHandler, 56 | Report: ReportingFolderHandler, 57 | SharingCriteriaRule: Decomposed, 58 | SharingGuestRule: Decomposed, 59 | SharingOwnerRule: Decomposed, 60 | SharingReason: CustomObjectChildHandler, 61 | SharingRules: InFile, 62 | StandardValueSetTranslation: InFile, 63 | StaticResource: InResource, 64 | Territory2: Decomposed, 65 | Territory2Model: CustomObject, 66 | Territory2Rule: Decomposed, 67 | Translations: InFile, 68 | ValidationRule: CustomObjectChildHandler, 69 | VirtualBot: Bot, 70 | VirtualDiscovery: SharedFolder, 71 | VirtualModeration: SharedFolder, 72 | VirtualWave: SharedFolder, 73 | WaveTemplateBundle: InResource, 74 | WebLink: CustomObjectChildHandler, 75 | Workflow: InFile, 76 | WorkflowAlert: Decomposed, 77 | WorkflowFieldUpdate: Decomposed, 78 | WorkflowFlowAction: Decomposed, 79 | WorkflowKnowledgePublish: Decomposed, 80 | WorkflowOutboundMessage: Decomposed, 81 | WorkflowRule: Decomposed, 82 | WorkflowSend: Decomposed, 83 | WorkflowTask: Decomposed, 84 | } 85 | 86 | export default class TypeHandlerFactory { 87 | constructor( 88 | protected readonly work: Work, 89 | protected readonly metadata: MetadataRepository 90 | ) {} 91 | 92 | @log 93 | public getTypeHandler(line: string) { 94 | const type: Metadata = this.metadata.get(line)! 95 | const xmlName = type.xmlName as keyof typeof handlerMap 96 | return xmlName in handlerMap 97 | ? new handlerMap[xmlName](line, type, this.work, this.metadata) 98 | : new Standard(line, type, this.work, this.metadata) 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/botHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import BotHandler from '../../../../src/service/botHandler' 6 | import { Metadata } from '../../../../src/types/metadata' 7 | import type { Work } from '../../../../src/types/work' 8 | import { copyFiles } from '../../../../src/utils/fsHelper' 9 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 10 | 11 | jest.mock('../../../../src/utils/fsHelper') 12 | 13 | const objectType: Metadata = { 14 | directoryName: 'bots', 15 | inFolder: false, 16 | metaFile: true, 17 | content: [ 18 | { 19 | suffix: 'bot', 20 | xmlName: 'Bot', 21 | }, 22 | { 23 | suffix: 'botVersion', 24 | xmlName: 'BotVersion', 25 | }, 26 | ], 27 | } 28 | const line = 29 | 'A force-app/main/default/bots/TestBot/v1.botVersion-meta.xml' 30 | 31 | let work: Work 32 | beforeEach(() => { 33 | jest.clearAllMocks() 34 | work = getWork() 35 | }) 36 | 37 | describe('BotHandler', () => { 38 | let globalMetadata: MetadataRepository 39 | beforeAll(async () => { 40 | globalMetadata = await getGlobalMetadata() 41 | }) 42 | 43 | describe('when called for a bot', () => { 44 | it('should add the bot', async () => { 45 | // Arrange 46 | work.config.generateDelta = false 47 | const sut = new BotHandler( 48 | 'A force-app/main/default/bots/TestBot/TestBot.bot-meta.xml', 49 | objectType, 50 | work, 51 | globalMetadata 52 | ) 53 | 54 | // Act 55 | await sut.handleAddition() 56 | 57 | // Assert 58 | expect(work.diffs.package.get('Bot')).toEqual(new Set(['TestBot'])) 59 | expect(work.diffs.package.get('BotVersion')).toBeUndefined() 60 | expect(copyFiles).not.toHaveBeenCalled() 61 | }) 62 | }) 63 | 64 | describe('when called for a bot version', () => { 65 | describe('when called with generateDelta false', () => { 66 | it('should add the related bot', async () => { 67 | // Arrange 68 | work.config.generateDelta = false 69 | const sut = new BotHandler(line, objectType, work, globalMetadata) 70 | 71 | // Act 72 | await sut.handleAddition() 73 | 74 | // Assert 75 | expect(work.diffs.package.get('Bot')).toEqual(new Set(['TestBot'])) 76 | expect(work.diffs.package.get('BotVersion')).toEqual( 77 | new Set(['TestBot.v1']) 78 | ) 79 | expect(copyFiles).not.toHaveBeenCalled() 80 | }) 81 | }) 82 | 83 | describe('when called with generateDelta true', () => { 84 | it('should add and copy the related parent bot', async () => { 85 | const sut = new BotHandler(line, objectType, work, globalMetadata) 86 | 87 | // Act 88 | await sut.handleAddition() 89 | 90 | // Assert 91 | expect(work.diffs.package.get('Bot')).toEqual(new Set(['TestBot'])) 92 | expect(work.diffs.package.get('BotVersion')).toEqual( 93 | new Set(['TestBot.v1']) 94 | ) 95 | expect(copyFiles).toHaveBeenCalledTimes(4) 96 | expect(copyFiles).toHaveBeenCalledWith( 97 | work.config, 98 | `force-app/main/default/bots/TestBot/v1.botVersion-meta.xml` 99 | ) 100 | expect(copyFiles).toHaveBeenCalledWith( 101 | work.config, 102 | `force-app/main/default/bots/TestBot/v1.botVersion` 103 | ) 104 | expect(copyFiles).toHaveBeenCalledWith( 105 | work.config, 106 | `force-app/main/default/bots/TestBot/TestBot.bot` 107 | ) 108 | expect(copyFiles).toHaveBeenCalledWith( 109 | work.config, 110 | `force-app/main/default/bots/TestBot/TestBot.bot-meta.xml` 111 | ) 112 | }) 113 | }) 114 | }) 115 | }) 116 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/typeHandlerFactory.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import ContainedDecomposedHandler from '../../../../src/service/containedDecomposedHandler' 6 | import CustomField from '../../../../src/service/customFieldHandler' 7 | import CustomObjectChildHandler from '../../../../src/service/customObjectChildHandler' 8 | import Decomposed from '../../../../src/service/decomposedHandler' 9 | import FlowHandler from '../../../../src/service/flowHandler' 10 | import InFolder from '../../../../src/service/inFolderHandler' 11 | import InResource from '../../../../src/service/inResourceHandler' 12 | import ReportingFolderHandler from '../../../../src/service/reportingFolderHandler' 13 | import SharedFolder from '../../../../src/service/sharedFolderHandler' 14 | import Standard from '../../../../src/service/standardHandler' 15 | import TypeHandlerFactory from '../../../../src/service/typeHandlerFactory' 16 | import type { Work } from '../../../../src/types/work' 17 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 18 | 19 | describe('the type handler factory', () => { 20 | let typeHandlerFactory: TypeHandlerFactory 21 | beforeAll(async () => { 22 | const globalMetadata: MetadataRepository = await getGlobalMetadata() 23 | const work: Work = getWork() 24 | work.config.apiVersion = 46 25 | typeHandlerFactory = new TypeHandlerFactory(work, globalMetadata) 26 | }) 27 | describe.each([ 28 | [CustomField, ['fields']], 29 | [ContainedDecomposedHandler, ['permissionsets']], 30 | [ 31 | CustomObjectChildHandler, 32 | [ 33 | 'businessProcesses', 34 | 'compactLayouts', 35 | 'fieldSets', 36 | 'indexes', 37 | 'listViews', 38 | 'recordTypes', 39 | 'sharingReasons', 40 | 'validationRules', 41 | 'webLinks', 42 | ], 43 | ], 44 | [InFolder, ['documents']], 45 | [ReportingFolderHandler, ['dashboards', 'reports']], 46 | [InResource, ['staticresources', 'aura', 'lwc']], 47 | [Standard, ['classes']], 48 | [SharedFolder, ['moderation', 'wave', 'discovery']], 49 | ])('give %p handler', (handler, types) => { 50 | it.each(types)('for %s folder', type => { 51 | expect( 52 | typeHandlerFactory.getTypeHandler( 53 | `Z force-app/main/default/${type}/folder/file` 54 | ) 55 | ).toBeInstanceOf(handler) 56 | }) 57 | }) 58 | 59 | it('can handle Decomposed', () => { 60 | expect( 61 | typeHandlerFactory.getTypeHandler( 62 | `Z force-app/main/default/objects/Account/fields/Test__c` 63 | ) 64 | ).toBeInstanceOf(Decomposed) 65 | }) 66 | 67 | it('can handle sub folder with Decomposed', () => { 68 | expect( 69 | typeHandlerFactory.getTypeHandler( 70 | `Z force-app/main/default/objects/folder/Account/fields/Test__c.field-meta.xml` 71 | ) 72 | ).toBeInstanceOf(Decomposed) 73 | }) 74 | 75 | it('can handle sub folder with non Decomposed', () => { 76 | expect( 77 | typeHandlerFactory.getTypeHandler( 78 | `Z force-app/main/default/documents/classes/TestDocument` 79 | ) 80 | ).toBeInstanceOf(InFolder) 81 | }) 82 | 83 | it('can handle Flow', () => { 84 | expect( 85 | typeHandlerFactory.getTypeHandler( 86 | `Z force-app/main/default/flows/MyFlow.flow-meta.xml` 87 | ) 88 | ).toBeInstanceOf(FlowHandler) 89 | }) 90 | 91 | it.each([ 92 | 'force-app/main/default/TestClass.cls', 93 | 'force-app/main/default/TestClass.cls-meta.xml', 94 | 'force-app/main/default/admin.profile-meta.xml', 95 | 'force-app/main/default/admin.permissionset-meta.xml', 96 | ])('can handle metadata outside its folder', line => { 97 | expect(typeHandlerFactory.getTypeHandler(`Z ${line}`)).toBeInstanceOf( 98 | Standard 99 | ) 100 | }) 101 | }) 102 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/inFolderHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { METAFILE_SUFFIX } from '../../../../src/constant/metadataConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import InFolder from '../../../../src/service/inFolderHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { copyFiles, readDirs } from '../../../../src/utils/fsHelper' 9 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 10 | 11 | jest.mock('../../../../src/utils/fsHelper') 12 | const mockedReadDirs = jest.mocked(readDirs) 13 | 14 | const entity = 'folder/test' 15 | const extension = 'document' 16 | const objectType = { 17 | directoryName: 'documents', 18 | inFolder: true, 19 | metaFile: true, 20 | suffix: 'document', 21 | xmlName: 'Document', 22 | } 23 | const xmlName = 'Document' 24 | const line = `A force-app/main/default/${objectType.directoryName}/${entity}.${extension}-meta.xml` 25 | 26 | let work: Work 27 | beforeEach(() => { 28 | jest.clearAllMocks() 29 | work = getWork() 30 | }) 31 | 32 | describe('InFolderHandler', () => { 33 | let globalMetadata: MetadataRepository 34 | beforeAll(async () => { 35 | globalMetadata = await getGlobalMetadata() 36 | }) 37 | 38 | describe('when called with generateDelta false', () => { 39 | beforeEach(() => { 40 | work.config.generateDelta = false 41 | }) 42 | it('should not copy meta files nor copy special extension', async () => { 43 | // Arrange 44 | const sut = new InFolder(line, objectType, work, globalMetadata) 45 | 46 | // Act 47 | await sut.handleAddition() 48 | 49 | // Assert 50 | expect(work.diffs.package.get(xmlName)).toEqual(new Set([entity])) 51 | expect(copyFiles).not.toHaveBeenCalled() 52 | }) 53 | }) 54 | describe('when called with generateDelta true', () => { 55 | beforeEach(() => { 56 | work.config.generateDelta = true 57 | }) 58 | 59 | describe('when readDirs does not return files', () => { 60 | it('should not copy special extension and copy meta files', async () => { 61 | // Arrange 62 | const sut = new InFolder(line, objectType, work, globalMetadata) 63 | mockedReadDirs.mockImplementation(() => Promise.resolve([])) 64 | 65 | // Act 66 | await sut.handleAddition() 67 | 68 | // Assert 69 | expect(work.diffs.package.get(xmlName)).toEqual(new Set([entity])) 70 | expect(readDirs).toHaveBeenCalledTimes(1) 71 | expect(copyFiles).toHaveBeenCalledTimes(3) 72 | expect(copyFiles).toHaveBeenCalledWith( 73 | work.config, 74 | expect.stringContaining(METAFILE_SUFFIX) 75 | ) 76 | }) 77 | }) 78 | 79 | describe('when readDirs returns files', () => { 80 | it('should copy special extension', async () => { 81 | // Arrange 82 | const sut = new InFolder(line, objectType, work, globalMetadata) 83 | mockedReadDirs.mockImplementationOnce(() => 84 | Promise.resolve([entity, 'not/matching']) 85 | ) 86 | 87 | // Act 88 | await sut.handleAddition() 89 | 90 | // Assert 91 | expect(work.diffs.package.get(xmlName)).toEqual(new Set([entity])) 92 | expect(readDirs).toHaveBeenCalledTimes(1) 93 | expect(copyFiles).toHaveBeenCalledTimes(5) 94 | }) 95 | }) 96 | }) 97 | describe('when the line should not be processed', () => { 98 | it.each([ 99 | `force-app/main/default/${objectType.directoryName}/test.otherExtension`, 100 | ])('does not handle the line', async entityPath => { 101 | // Arrange 102 | const sut = new InFolder( 103 | `A ${entityPath}`, 104 | objectType, 105 | work, 106 | globalMetadata 107 | ) 108 | 109 | // Act 110 | await sut.handle() 111 | 112 | // Assert 113 | expect(work.diffs.package.size).toBe(0) 114 | expect(copyFiles).not.toHaveBeenCalled() 115 | }) 116 | }) 117 | }) 118 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/objectTranslationHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import ObjectTranslation from '../../../../src/service/objectTranslationHandler' 6 | import type { Work } from '../../../../src/types/work' 7 | import { copyFiles, writeFile } from '../../../../src/utils/fsHelper' 8 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 9 | 10 | const mockCompare = jest.fn() 11 | const mockprune = jest.fn() 12 | jest.mock('../../../../src/utils/metadataDiff', () => { 13 | return { 14 | default: jest.fn().mockImplementation(() => { 15 | return { compare: mockCompare, prune: mockprune } 16 | }), 17 | } 18 | }) 19 | 20 | jest.mock('../../../../src/utils/fsHelper') 21 | 22 | const objectType = { 23 | directoryName: 'objectTranslations', 24 | inFolder: false, 25 | metaFile: false, 26 | suffix: 'objectTranslation', 27 | xmlName: 'CustomObjectTranslation', 28 | pruneOnly: true, 29 | } 30 | const xmlName = 'CustomObjectTranslation' 31 | const line = 32 | 'A force-app/main/default/objectTranslations/Account-es/Account-es.objectTranslation-meta.xml' 33 | 34 | const xmlContent = '' 35 | mockprune.mockReturnValue({ xmlContent }) 36 | 37 | let work: Work 38 | beforeEach(() => { 39 | jest.clearAllMocks() 40 | work = getWork() 41 | }) 42 | 43 | describe('ObjectTranslation', () => { 44 | let globalMetadata: MetadataRepository 45 | beforeAll(async () => { 46 | globalMetadata = await getGlobalMetadata() 47 | }) 48 | 49 | describe('when called with generateDelta false', () => { 50 | it('should not copy files', async () => { 51 | // Arrange 52 | work.config.generateDelta = false 53 | const sut = new ObjectTranslation(line, objectType, work, globalMetadata) 54 | 55 | // Act 56 | await sut.handleAddition() 57 | 58 | // Assert 59 | expect(writeFile).not.toHaveBeenCalled() 60 | expect(Array.from(work.diffs.package.get(xmlName)!)).toEqual([ 61 | 'Account-es', 62 | ]) 63 | }) 64 | }) 65 | 66 | describe('when called with generateDelta true', () => { 67 | it('should copy object translations files', async () => { 68 | // Arrange 69 | const sut = new ObjectTranslation(line, objectType, work, globalMetadata) 70 | 71 | // Act 72 | await sut.handleAddition() 73 | 74 | // Assert 75 | expect(copyFiles).not.toHaveBeenCalled() 76 | expect(writeFile).toHaveBeenCalledTimes(1) 77 | expect(writeFile).toHaveBeenCalledWith( 78 | expect.stringContaining('Account-es.objectTranslation'), 79 | xmlContent, 80 | work.config 81 | ) 82 | expect(Array.from(work.diffs.package.get(xmlName)!)).toEqual([ 83 | 'Account-es', 84 | ]) 85 | }) 86 | 87 | describe('when called with fieldTranslation', () => { 88 | const fieldTranslationline = 89 | 'A force-app/main/default/objectTranslations/Account-es/BillingFloor__c.fieldTranslation-meta.xml' 90 | it('should copy object translations files and fieldTranslation', async () => { 91 | // Arrange 92 | const sut = new ObjectTranslation( 93 | fieldTranslationline, 94 | objectType, 95 | work, 96 | globalMetadata 97 | ) 98 | 99 | // Act 100 | await sut.handleAddition() 101 | 102 | // Assert 103 | expect(copyFiles).toHaveBeenCalledTimes(2) 104 | expect(copyFiles).toHaveBeenCalledWith( 105 | work.config, 106 | expect.stringContaining('BillingFloor__c.fieldTranslation') 107 | ) 108 | expect(writeFile).toHaveBeenCalledTimes(1) 109 | expect(writeFile).toHaveBeenCalledWith( 110 | expect.stringContaining('Account-es.objectTranslation'), 111 | xmlContent, 112 | work.config 113 | ) 114 | expect(Array.from(work.diffs.package.get(xmlName)!)).toEqual([ 115 | 'Account-es', 116 | ]) 117 | }) 118 | }) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /.github/linters/.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignorePaths": [ 3 | "**/CHANGELOG.md", 4 | "**/.git/**", 5 | "**/megalinter.yml", 6 | "**/node_modules/**", 7 | "**/src/metadata/**", 8 | "**/src/service/typeHandlerFactory.ts" 9 | ], 10 | "language": "en", 11 | "noConfigSearch": true, 12 | "version": "0.2", 13 | "words": [ 14 | "\u00c0gain", 15 | "afile", 16 | "amannn", 17 | "anotherignore", 18 | "apexskier", 19 | "apiversion", 20 | "appli", 21 | "authprovider", 22 | "authproviders", 23 | "AUTOUPDATE", 24 | "behaviour", 25 | "benchmarkjs", 26 | "biomejs", 27 | "brqh", 28 | "cherfaoui", 29 | "cloudity", 30 | "codeclimate", 31 | "codecov", 32 | "codeowners", 33 | "colladon", 34 | "commandsstop", 35 | "commitlint", 36 | "contentassets", 37 | "customindex", 38 | "datacategorygroup", 39 | "datacategorygroups", 40 | "destructiveignore", 41 | "destructiveinclude", 42 | "emailservices", 43 | "emailservicesfunction", 44 | "experiencebundle", 45 | "firstsha", 46 | "flexi", 47 | "flexipage", 48 | "flexipages", 49 | "flowtest", 50 | "flowtests", 51 | "flowtype", 52 | "forceignore", 53 | "forceinclude", 54 | "friendlyname", 55 | "fullname", 56 | "geodata", 57 | "gitdir", 58 | "hardis", 59 | "hardlinks", 60 | "iframe", 61 | "ignorewarnings", 62 | "iife", 63 | "indx", 64 | "infile", 65 | "instantiator", 66 | "jwalton", 67 | "knip", 68 | "lcov", 69 | "linebreak", 70 | "lintstagedrc", 71 | "loglevel", 72 | "lwcc", 73 | "mcarvin", 74 | "mdapi", 75 | "megalinter", 76 | "mehdi", 77 | "mehdicherf", 78 | "metafile", 79 | "mjyhjbm", 80 | "mkdirs", 81 | "mocharc", 82 | "mockprune", 83 | "multilines", 84 | "mutingpermissionset", 85 | "mutingpermissionsets", 86 | "myexperiencebundle", 87 | "namerc", 88 | "nimn", 89 | "nonblock", 90 | "nonoctal", 91 | "notblank", 92 | "notexist", 93 | "notificationtypes", 94 | "notiftype", 95 | "numstat", 96 | "nycrc", 97 | "oauthcustomscope", 98 | "oauthcustomscopes", 99 | "oclif", 100 | "omni", 101 | "omnistudio", 102 | "oxsecurity", 103 | "parens", 104 | "pastsha", 105 | "permissionset", 106 | "permissionsetgroup", 107 | "permissionsetgroups", 108 | "permissionsets", 109 | "piquouze", 110 | "postdestructivechanges", 111 | "postpack", 112 | "predestructivechanges", 113 | "predicat", 114 | "quotepath", 115 | "recentsha", 116 | "renatoliveira", 117 | "repogitdiff", 118 | "revparse", 119 | "rulesets", 120 | "samlssoconfig", 121 | "samlssoconfigs", 122 | "scolladon", 123 | "scontrol", 124 | "scontrols", 125 | "sebastien", 126 | "sfdx", 127 | "sgdignore", 128 | "sgdinclude", 129 | "sgdincludedestructive", 130 | "shellcheck", 131 | "sitedotcom", 132 | "SOQL", 133 | "staticresource", 134 | "staticresources", 135 | "stefanzweifel", 136 | "struc", 137 | "subfolders", 138 | "targetname", 139 | "testkit", 140 | "testlevel", 141 | "testtest", 142 | "thollander", 143 | "translationline", 144 | "trivyignore", 145 | "tsbuildinfo", 146 | "unmanaged", 147 | "unmocked", 148 | "unpackaged", 149 | "venv", 150 | "wagoid", 151 | "wapp", 152 | "wavehandler", 153 | "wcomp", 154 | "wdash", 155 | "wdpr", 156 | "wearerequired", 157 | "weblink", 158 | "weblinks", 159 | "wireit", 160 | "wlens", 161 | "xmlbuilder" 162 | ] 163 | } -------------------------------------------------------------------------------- /src/utils/cliHelper.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { join } from 'node:path/posix' 3 | 4 | import GitAdapter from '../adapter/GitAdapter.js' 5 | import { GIT_FOLDER } from '../constant/gitConstants.js' 6 | import { 7 | getLatestSupportedVersion, 8 | isVersionSupported, 9 | } from '../metadata/metadataManager.js' 10 | import type { Config } from '../types/config.js' 11 | import type { Work } from '../types/work.js' 12 | import { fileExists, pathExists, readFile, sanitizePath } from './fsUtils.js' 13 | import { log } from './LoggingDecorator.js' 14 | import { MessageService } from './MessageService.js' 15 | 16 | const TO: keyof Config = 'to' 17 | const FROM: keyof Config = 'from' 18 | 19 | const SOURCE_API_VERSION_ATTRIBUTE = 'sourceApiVersion' 20 | const SFDX_PROJECT_FILE_NAME = 'sfdx-project.json' 21 | 22 | export default class CLIHelper { 23 | protected readonly config: Config 24 | protected readonly gitAdapter: GitAdapter 25 | protected readonly message: MessageService 26 | 27 | constructor(protected readonly work: Work) { 28 | this.config = work.config 29 | this.gitAdapter = GitAdapter.getInstance(work.config) 30 | this.message = new MessageService() 31 | } 32 | 33 | protected async _validateGitSha() { 34 | const errors: string[] = [] 35 | 36 | await Promise.all( 37 | [FROM, TO].map(async (shaParameter: keyof Config) => { 38 | const shaValue: string = this.config[shaParameter] as string 39 | try { 40 | const ref: string = await this.gitAdapter.parseRev(shaValue) 41 | ;(this.config[shaParameter] as string) = ref 42 | } catch { 43 | errors.push( 44 | this.message.getMessage('error.ParameterIsNotGitSHA', [ 45 | shaParameter, 46 | shaValue, 47 | ]) 48 | ) 49 | } 50 | }) 51 | ) 52 | 53 | return errors 54 | } 55 | 56 | @log 57 | public async validateConfig() { 58 | this._sanitizeConfig() 59 | await this._handleDefault() 60 | const errors: string[] = [] 61 | 62 | const repoExists = await pathExists(join(this.config.repo, GIT_FOLDER)) 63 | if (!repoExists) { 64 | errors.push( 65 | this.message.getMessage('error.PathIsNotGit', [this.config.repo]) 66 | ) 67 | } 68 | 69 | const gitErrors = await this._validateGitSha() 70 | errors.push(...gitErrors) 71 | 72 | if (errors.length > 0) { 73 | throw new Error(errors.join(', ')) 74 | } 75 | 76 | await this.gitAdapter.configureRepository() 77 | } 78 | 79 | protected async _handleDefault() { 80 | await this._getApiVersion() 81 | await this._apiVersionDefault() 82 | } 83 | 84 | protected async _getApiVersion() { 85 | const isInputVersionSupported = isVersionSupported(this.config.apiVersion) 86 | if (!isInputVersionSupported) { 87 | const sfdxProjectPath = join(this.config.repo, SFDX_PROJECT_FILE_NAME) 88 | const exists = await fileExists(sfdxProjectPath) 89 | if (exists) { 90 | const sfdxProjectRaw = await readFile(sfdxProjectPath) 91 | const sfdxProject = JSON.parse(sfdxProjectRaw) 92 | this.config.apiVersion = 93 | parseInt(sfdxProject[SOURCE_API_VERSION_ATTRIBUTE]) || -1 94 | } 95 | } 96 | } 97 | 98 | protected _apiVersionDefault() { 99 | const isInputVersionSupported = isVersionSupported(this.config.apiVersion) 100 | 101 | if (!isInputVersionSupported) { 102 | const latestAPIVersionSupported = getLatestSupportedVersion() 103 | if ( 104 | this.config.apiVersion !== undefined && 105 | this.config.apiVersion !== null 106 | ) { 107 | this.work.warnings.push( 108 | new Error( 109 | this.message.getMessage('warning.ApiVersionNotSupported', [ 110 | `${latestAPIVersionSupported}`, 111 | ]) 112 | ) 113 | ) 114 | } 115 | this.config.apiVersion = latestAPIVersionSupported 116 | } 117 | } 118 | 119 | protected _sanitizeConfig() { 120 | this.config.repo = sanitizePath(this.config.repo)! 121 | this.config.source = this.config.source.map(source => sanitizePath(source)!) 122 | this.config.output = sanitizePath(this.config.output)! 123 | this.config.ignore = sanitizePath(this.config.ignore) 124 | this.config.ignoreDestructive = sanitizePath(this.config.ignoreDestructive) 125 | this.config.include = sanitizePath(this.config.include) 126 | this.config.includeDestructive = sanitizePath( 127 | this.config.includeDestructive 128 | ) 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /__tests__/unit/lib/utils/fxpHelper.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import type { Config } from '../../../../src/types/config' 5 | import { readPathFromGit } from '../../../../src/utils/fsHelper' 6 | import { 7 | convertJsonToXml, 8 | parseXmlFileToJson, 9 | xml2Json, 10 | } from '../../../../src/utils/fxpHelper' 11 | 12 | const mockedReadPathFromGit = jest.mocked(readPathFromGit) 13 | 14 | jest.mock('../../../../src/utils/fsHelper') 15 | 16 | describe('fxpHelper', () => { 17 | describe('parseXmlFileToJson', () => { 18 | const config: Config = { 19 | from: '', 20 | to: '', 21 | output: '', 22 | source: [''], 23 | ignore: '', 24 | ignoreDestructive: '', 25 | apiVersion: 0, 26 | repo: '', 27 | ignoreWhitespace: false, 28 | generateDelta: false, 29 | include: '', 30 | includeDestructive: '', 31 | } 32 | describe('when called with empty content', () => { 33 | beforeEach(() => { 34 | // Arrange 35 | mockedReadPathFromGit.mockResolvedValueOnce('') 36 | }) 37 | it('returns empty object', async () => { 38 | // Act 39 | const jsonResult = await parseXmlFileToJson( 40 | { path: 'path/to/empty/file', oid: config.to }, 41 | config 42 | ) 43 | 44 | // Assert 45 | expect(jsonResult).toStrictEqual({}) 46 | }) 47 | }) 48 | describe('when called with xml content', () => { 49 | beforeEach(() => { 50 | // Arrange 51 | mockedReadPathFromGit.mockResolvedValueOnce( 52 | 'wow' 53 | ) 54 | }) 55 | it('returns json content', async () => { 56 | // Act 57 | const jsonContent = await parseXmlFileToJson( 58 | { path: 'path/to/empty/file', oid: config.to }, 59 | config 60 | ) 61 | 62 | // Assert 63 | expect(jsonContent).toEqual({ root: { '@_a': 'nice', a: 'wow' } }) 64 | }) 65 | }) 66 | describe('when called with non xml content', () => { 67 | beforeEach(() => { 68 | // Arrange 69 | mockedReadPathFromGit.mockResolvedValueOnce('{"attribute": "value"}') 70 | }) 71 | it('returns empty object', async () => { 72 | // Act 73 | const jsonContent = await parseXmlFileToJson( 74 | { path: 'path/to/empty/file', oid: config.to }, 75 | config 76 | ) 77 | 78 | // Assert 79 | expect(jsonContent).toStrictEqual({}) 80 | }) 81 | }) 82 | }) 83 | 84 | describe('convertJsonToXml', () => { 85 | describe('when called with empty object', () => { 86 | it('returns empty object', () => { 87 | // Act 88 | const xmlResult = convertJsonToXml({}) 89 | 90 | // Assert 91 | expect(xmlResult).toEqual('') 92 | }) 93 | }) 94 | describe('when called with json content', () => { 95 | it('returns json content', () => { 96 | // Act 97 | const xmlResult = convertJsonToXml({ 98 | root: { '@_a': 'nice', a: 'wow' }, 99 | }) 100 | 101 | // Assert 102 | expect(xmlResult).toEqual( 103 | ` 104 | wow 105 | 106 | ` 107 | ) 108 | }) 109 | }) 110 | describe('when called with non json content', () => { 111 | it('returns empty object', () => { 112 | // Act 113 | const jsonContent = convertJsonToXml('s') 114 | 115 | // Assert 116 | expect(jsonContent).toStrictEqual(`<0>s 117 | `) 118 | }) 119 | }) 120 | }) 121 | 122 | describe('xml2Json', () => { 123 | describe('when called with empty content', () => { 124 | it('returns empty object', () => { 125 | // Act 126 | const jsonResult = xml2Json('') 127 | 128 | // Assert 129 | expect(jsonResult).toStrictEqual({}) 130 | }) 131 | }) 132 | describe('when called with xml content', () => { 133 | it('returns json content', async () => { 134 | // Act 135 | const jsonContent = await xml2Json( 136 | 'wow' 137 | ) 138 | 139 | // Assert 140 | expect(jsonContent).toEqual({ root: { '@_a': 'nice', a: 'wow' } }) 141 | }) 142 | }) 143 | describe('when called with non xml content', () => { 144 | it('returns empty object', async () => { 145 | // Act 146 | const jsonContent = await xml2Json( 147 | JSON.stringify({ attribute: 'value' }) 148 | ) 149 | 150 | // Assert 151 | expect(jsonContent).toStrictEqual({}) 152 | }) 153 | }) 154 | }) 155 | }) 156 | -------------------------------------------------------------------------------- /__tests__/unit/lib/post-processor/includeProcessor.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 5 | import IncludeProcessor from '../../../../src/post-processor/includeProcessor' 6 | import type { Work } from '../../../../src/types/work' 7 | import { 8 | buildIncludeHelper, 9 | IgnoreHelper, 10 | } from '../../../../src/utils/ignoreHelper' 11 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 12 | 13 | const mockProcess = jest.fn() 14 | jest.mock('../../../../src/service/diffLineInterpreter', () => { 15 | return { 16 | default: jest.fn().mockImplementation(() => { 17 | return { 18 | process: mockProcess, 19 | } 20 | }), 21 | } 22 | }) 23 | 24 | const mockGetFilesPath = jest.fn() 25 | jest.mock('../../../../src/adapter/GitAdapter', () => ({ 26 | default: { 27 | getInstance: jest.fn(() => ({ 28 | getFilesPath: mockGetFilesPath, 29 | getFirstCommitRef: jest.fn(), 30 | })), 31 | }, 32 | })) 33 | 34 | jest.mock('../../../../src/utils/ignoreHelper') 35 | const mockedBuildIncludeHelper = jest.mocked(buildIncludeHelper) 36 | 37 | const mockKeep = jest.fn() 38 | mockedBuildIncludeHelper.mockResolvedValue({ 39 | keep: mockKeep, 40 | } as unknown as IgnoreHelper) 41 | 42 | describe('IncludeProcessor', () => { 43 | let work: Work 44 | let metadata: MetadataRepository 45 | 46 | beforeAll(async () => { 47 | metadata = await getGlobalMetadata() 48 | }) 49 | 50 | beforeEach(() => { 51 | work = getWork() 52 | jest.clearAllMocks() 53 | }) 54 | 55 | describe('when no include is configured', () => { 56 | it('does not process include', async () => { 57 | // Arrange 58 | const sut = new IncludeProcessor(work, metadata) 59 | 60 | // Act 61 | await sut.process() 62 | 63 | // Assert 64 | expect(mockedBuildIncludeHelper).not.toHaveBeenCalled() 65 | }) 66 | }) 67 | 68 | describe('when include is configured', () => { 69 | beforeAll(() => { 70 | mockGetFilesPath.mockImplementation(() => Promise.resolve(['test'])) 71 | }) 72 | 73 | describe('when no file matches the patterns', () => { 74 | beforeEach(() => { 75 | mockKeep.mockReturnValue(true) 76 | }) 77 | it('does not process include', async () => { 78 | // Arrange 79 | work.config.include = '.sgdinclude' 80 | const sut = new IncludeProcessor(work, metadata) 81 | 82 | // Act 83 | await sut.process() 84 | 85 | // Assert 86 | expect(mockedBuildIncludeHelper).toHaveBeenCalled() 87 | expect(mockProcess).not.toHaveBeenCalled() 88 | }) 89 | }) 90 | 91 | describe('when file matches the patterns', () => { 92 | beforeEach(() => { 93 | mockKeep.mockReturnValue(false) 94 | }) 95 | it('process include', async () => { 96 | // Arrange 97 | work.config.include = '.sgdinclude' 98 | const sut = new IncludeProcessor(work, metadata) 99 | 100 | // Act 101 | await sut.process() 102 | 103 | // Assert 104 | expect(mockedBuildIncludeHelper).toHaveBeenCalled() 105 | expect(mockProcess).toHaveBeenCalled() 106 | }) 107 | }) 108 | }) 109 | 110 | describe('when includeDestructive is configured', () => { 111 | beforeAll(() => { 112 | mockGetFilesPath.mockImplementation(() => Promise.resolve(['test'])) 113 | }) 114 | describe('when no file matches the patterns', () => { 115 | beforeEach(() => { 116 | mockKeep.mockReturnValue(true) 117 | }) 118 | it('does not process include destructive', async () => { 119 | // Arrange 120 | work.config.includeDestructive = '.sgdincludedestructive' 121 | const sut = new IncludeProcessor(work, metadata) 122 | 123 | // Act 124 | await sut.process() 125 | 126 | // Assert 127 | expect(mockedBuildIncludeHelper).toHaveBeenCalled() 128 | expect(mockProcess).not.toHaveBeenCalled() 129 | }) 130 | }) 131 | 132 | describe('when file matches the patterns', () => { 133 | beforeEach(() => { 134 | mockKeep.mockReturnValue(false) 135 | }) 136 | it('process include destructive', async () => { 137 | // Arrange 138 | work.config.includeDestructive = '.sgdincludedestructive' 139 | const sut = new IncludeProcessor(work, metadata) 140 | 141 | // Act 142 | await sut.process() 143 | 144 | // Assert 145 | expect(mockedBuildIncludeHelper).toHaveBeenCalled() 146 | expect(mockProcess).toHaveBeenCalled() 147 | }) 148 | }) 149 | }) 150 | }) 151 | -------------------------------------------------------------------------------- /__tests__/unit/lib/utils/packageHelper.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it } from '@jest/globals' 3 | 4 | import type { Config } from '../../../../src/types/config' 5 | import type { Manifest } from '../../../../src/types/work' 6 | import PackageBuilder, { 7 | fillPackageWithParameter, 8 | } from '../../../../src/utils/packageHelper' 9 | 10 | const config: Config = { 11 | apiVersion: 46, 12 | to: '', 13 | from: '', 14 | output: '', 15 | source: [''], 16 | ignore: '', 17 | ignoreDestructive: '', 18 | repo: '', 19 | ignoreWhitespace: false, 20 | generateDelta: false, 21 | include: '', 22 | includeDestructive: '', 23 | } 24 | const tests = [ 25 | [ 26 | 'Object', 27 | new Map( 28 | Object.entries({ 29 | CustomObject: new Set([ 30 | 'Object', 31 | 'YetAnotherObject', 32 | 'OtherObject', 33 | 'AgainAnObject', 34 | 'ÀgainAndAgainAnObject', 35 | ]), 36 | }) 37 | ), 38 | ` 39 | 40 | 41 | ÀgainAndAgainAnObject 42 | AgainAnObject 43 | Object 44 | OtherObject 45 | YetAnotherObject 46 | CustomObject 47 | 48 | ${config.apiVersion}.0 49 | `, 50 | ], 51 | [ 52 | 'empty', 53 | new Map(), 54 | ` 55 | 56 | ${config.apiVersion}.0 57 | `, 58 | ], 59 | [ 60 | 'full', 61 | new Map( 62 | Object.entries({ 63 | CustomField: new Set(['Field']), 64 | CustomObject: new Set(['Object', 'YetAnotherObject', 'OtherObject']), 65 | Dashboard: new Set(['Dashboard']), 66 | Document: new Set(['Document']), 67 | LightningComponentBundle: new Set(['Component']), 68 | WaveLens: new Set(['Lens']), 69 | WaveRecipe: new Set(['Recipe']), 70 | }) 71 | ), 72 | ` 73 | 74 | 75 | Object 76 | OtherObject 77 | YetAnotherObject 78 | CustomObject 79 | 80 | 81 | Field 82 | CustomField 83 | 84 | 85 | Dashboard 86 | Dashboard 87 | 88 | 89 | Document 90 | Document 91 | 92 | 93 | Component 94 | LightningComponentBundle 95 | 96 | 97 | Lens 98 | WaveLens 99 | 100 | 101 | Recipe 102 | WaveRecipe 103 | 104 | ${config.apiVersion}.0 105 | `, 106 | ], 107 | [ 108 | 'WaveApplication', 109 | new Map([['WaveApplication', new Set(['aWaveApp'])]]), 110 | ` 111 | 112 | 113 | aWaveApp 114 | WaveApplication 115 | 116 | ${config.apiVersion}.0 117 | `, 118 | ], 119 | ] 120 | 121 | describe(`test if package builder`, () => { 122 | let packageConstructor: PackageBuilder 123 | beforeAll(async () => { 124 | packageConstructor = new PackageBuilder(config) 125 | }) 126 | 127 | it.each(tests)('can build %s manifest', (_, diff, expected) => { 128 | expect(packageConstructor.buildPackage(diff as Manifest)).toBe(expected) 129 | }) 130 | }) 131 | 132 | describe('fillPackageWithParameter', () => { 133 | describe('when called with proper params', () => { 134 | const type = 'test-type' 135 | const member = 'test-name' 136 | describe.each([ 137 | [new Map(), 'is empty'], 138 | [new Map([['other-type', new Set(['other-name'])]]), 'is not empty'], 139 | [new Map([[type, new Set()]]), 'contains the type'], 140 | [ 141 | new Map([[type, new Set([member])]]), 142 | 'contains the type and the element', 143 | ], 144 | ])('when the package %o %s', store => { 145 | it('adds the element name under the type in the package', () => { 146 | // Arrange 147 | const params = { 148 | store, 149 | type: type, 150 | member, 151 | } 152 | 153 | // Act 154 | fillPackageWithParameter(params) 155 | 156 | // Assert 157 | expect(store.get(type).has(member)).toBeTruthy() 158 | }) 159 | }) 160 | }) 161 | }) 162 | -------------------------------------------------------------------------------- /src/metadata/MetadataRepositoryImpl.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | import { parse } from 'node:path/posix' 4 | 5 | import { DOT, PATH_SEP } from '../constant/fsConstants.js' 6 | import { 7 | CUSTOM_APPLICATION_SUFFIX, 8 | CUSTOM_METADATA_SUFFIX, 9 | EMAIL_SERVICES_FUNCTION_SUFFIX, 10 | METAFILE_SUFFIX, 11 | OBJECT_TRANSLATION_TYPE, 12 | OBJECT_TYPE, 13 | PERMISSIONSET_TYPE, 14 | SHARING_RULE_TYPE, 15 | SUB_OBJECT_TYPES, 16 | TERRITORY_MODEL_TYPE, 17 | WORKFLOW_TYPE, 18 | } from '../constant/metadataConstants.js' 19 | import type { Metadata } from '../types/metadata.js' 20 | import { log } from '../utils/LoggingDecorator.js' 21 | import { MetadataRepository } from './MetadataRepository.js' 22 | 23 | export class MetadataRepositoryImpl implements MetadataRepository { 24 | protected readonly metadataPerExt: Map 25 | protected readonly metadataPerDir: Map 26 | protected readonly metadataPerXmlName: Map 27 | constructor(protected readonly metadatas: Metadata[]) { 28 | this.metadataPerExt = new Map() 29 | this.metadataPerDir = new Map() 30 | this.metadataPerXmlName = new Map() 31 | 32 | this.metadatas.forEach(metadata => { 33 | this.addSuffix(metadata) 34 | this.addFolder(metadata) 35 | this.addXmlName(metadata) 36 | }) 37 | } 38 | 39 | protected addSuffix(metadata: Metadata) { 40 | if (metadata.suffix) { 41 | if (this.metadataPerExt.has(metadata.suffix)) { 42 | MetadataRepositoryImpl.UNSAFE_EXTENSION.add(metadata.suffix) 43 | } else { 44 | this.metadataPerExt.set(metadata.suffix, metadata) 45 | } 46 | } 47 | this.addSharedFolderSuffix(metadata) 48 | } 49 | 50 | protected addSharedFolderSuffix(metadata: Metadata) { 51 | if (metadata.content) { 52 | const metadataWithoutContent = { 53 | ...metadata, 54 | content: undefined, 55 | } 56 | for (const sharedFolderMetadataDef of metadata.content) { 57 | this.addSuffix({ 58 | ...metadataWithoutContent, 59 | suffix: sharedFolderMetadataDef.suffix, 60 | } as unknown as Metadata) 61 | } 62 | } 63 | } 64 | 65 | protected addFolder(metadata: Metadata) { 66 | if (metadata.directoryName) { 67 | this.metadataPerDir.set(metadata.directoryName, metadata) 68 | } 69 | } 70 | 71 | protected addXmlName(metadata: Metadata) { 72 | if (metadata.xmlName) { 73 | this.metadataPerXmlName.set(metadata.xmlName, metadata) 74 | } 75 | } 76 | 77 | public has(path: string): boolean { 78 | return !!this.get(path) 79 | } 80 | 81 | public get(path: string): Metadata | undefined { 82 | const parts = path.split(PATH_SEP) 83 | return ( 84 | this.searchByExtension(parts) ?? 85 | this.searchByDirectory(parts) ?? 86 | this.searchByXmlName(path) 87 | ) 88 | } 89 | 90 | protected searchByExtension(parts: string[]): Metadata | undefined { 91 | const extension = parse( 92 | parts[parts.length - 1].replace(METAFILE_SUFFIX, '') 93 | ).ext.replace(DOT, '') 94 | 95 | if (MetadataRepositoryImpl.UNSAFE_EXTENSION.has(extension)) { 96 | return 97 | } 98 | return this.metadataPerExt.get(extension) 99 | } 100 | 101 | protected searchByDirectory(parts: string[]): Metadata | undefined { 102 | let metadata: Metadata | undefined 103 | for (const part of parts) { 104 | metadata = this.metadataPerDir.get(part) ?? metadata 105 | if ( 106 | metadata && 107 | !MetadataRepositoryImpl.TYPES_WITH_SUB_TYPES.has(metadata.xmlName!) 108 | ) { 109 | break 110 | } 111 | } 112 | return metadata 113 | } 114 | 115 | protected searchByXmlName(xmlName: string): Metadata | undefined { 116 | return this.metadataPerXmlName.get(xmlName) 117 | } 118 | 119 | @log 120 | public getFullyQualifiedName(path: string): string { 121 | let fullyQualifiedName = parse(path).base 122 | const type = this.get(path) 123 | if (type && MetadataRepositoryImpl.COMPOSED_TYPES.has(type.xmlName!)) { 124 | const parentType = path 125 | .split(PATH_SEP) 126 | .find(part => this.metadataPerDir.has(part))! 127 | fullyQualifiedName = path 128 | .slice(path.indexOf(parentType)) 129 | .replace(new RegExp(PATH_SEP, 'g'), '') 130 | } 131 | return fullyQualifiedName 132 | } 133 | 134 | public values(): Metadata[] { 135 | return this.metadatas 136 | } 137 | 138 | private static TYPES_WITH_SUB_TYPES = new Set([ 139 | OBJECT_TYPE, 140 | TERRITORY_MODEL_TYPE, 141 | WORKFLOW_TYPE, 142 | SHARING_RULE_TYPE, 143 | '', 144 | ]) 145 | 146 | private static UNSAFE_EXTENSION = new Set([ 147 | CUSTOM_APPLICATION_SUFFIX, 148 | EMAIL_SERVICES_FUNCTION_SUFFIX, 149 | CUSTOM_METADATA_SUFFIX, 150 | ]) 151 | 152 | private static COMPOSED_TYPES = new Set([ 153 | OBJECT_TYPE, 154 | OBJECT_TRANSLATION_TYPE, 155 | PERMISSIONSET_TYPE, 156 | WORKFLOW_TYPE, 157 | SHARING_RULE_TYPE, 158 | ...SUB_OBJECT_TYPES, 159 | ]) 160 | } 161 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/customObjectHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { MASTER_DETAIL_TAG } from '../../../../src/constant/metadataConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import CustomObjectHandler from '../../../../src/service/customObjectHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { 9 | copyFiles, 10 | pathExists, 11 | readDirs, 12 | readPathFromGit, 13 | } from '../../../../src/utils/fsHelper' 14 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 15 | 16 | jest.mock('../../../../src/utils/fsHelper') 17 | 18 | const mockedPathExist = jest.mocked(pathExists) 19 | const mockedReadDirs = jest.mocked(readDirs) 20 | const mockedReadPathFromGit = jest.mocked(readPathFromGit) 21 | 22 | mockedPathExist.mockResolvedValue(true) 23 | 24 | const territoryModelType = { 25 | childXmlNames: ['Territory2Rule', 'Territory2'], 26 | directoryName: 'territory2Models', 27 | inFolder: false, 28 | metaFile: false, 29 | suffix: 'territory2Model', 30 | xmlName: 'Territory2Model', 31 | } 32 | const objectType = { 33 | childXmlNames: [ 34 | 'CustomField', 35 | 'Index', 36 | 'BusinessProcess', 37 | 'RecordType', 38 | 'CompactLayout', 39 | 'WebLink', 40 | 'ValidationRule', 41 | 'SharingReason', 42 | 'ListView', 43 | 'FieldSet', 44 | ], 45 | directoryName: 'objects', 46 | inFolder: false, 47 | metaFile: false, 48 | suffix: 'object', 49 | xmlName: 'CustomObject', 50 | } 51 | 52 | const line = 53 | 'A force-app/main/default/objects/Account/Account.object-meta.xml' 54 | 55 | let work: Work 56 | beforeEach(() => { 57 | jest.clearAllMocks() 58 | work = getWork() 59 | }) 60 | 61 | describe('CustomObjectHandler', () => { 62 | let globalMetadata: MetadataRepository 63 | beforeAll(async () => { 64 | globalMetadata = await getGlobalMetadata() 65 | }) 66 | 67 | describe('when called with generateDelta false', () => { 68 | it('should not handle master detail exception', async () => { 69 | // Arrange 70 | work.config.generateDelta = false 71 | const sut = new CustomObjectHandler( 72 | line, 73 | objectType, 74 | work, 75 | globalMetadata 76 | ) 77 | 78 | // Act 79 | await sut.handleAddition() 80 | 81 | // Assert 82 | expect(pathExists).not.toHaveBeenCalled() 83 | }) 84 | }) 85 | 86 | describe('when called with generateDelta true', () => { 87 | describe(`when called with not 'objects' type`, () => { 88 | it('should not handle try to find master details fields', async () => { 89 | // Arrange 90 | const sut = new CustomObjectHandler( 91 | 'A force-app/main/default/territory2Models/EU/EU.territory2Model-meta.xml', 92 | territoryModelType, 93 | work, 94 | globalMetadata 95 | ) 96 | 97 | // Act 98 | await sut.handleAddition() 99 | 100 | // Assert 101 | expect(pathExists).not.toHaveBeenCalled() 102 | }) 103 | }) 104 | 105 | describe('when field folder exist', () => { 106 | describe('when field folder contains master details', () => { 107 | it('should copy master detail fields', async () => { 108 | // Arrange 109 | mockedReadDirs.mockResolvedValueOnce(['Name.field-meta.xml']) 110 | mockedReadPathFromGit.mockResolvedValueOnce(MASTER_DETAIL_TAG) 111 | const sut = new CustomObjectHandler( 112 | line, 113 | objectType, 114 | work, 115 | globalMetadata 116 | ) 117 | 118 | // Act 119 | await sut.handleAddition() 120 | 121 | // Assert 122 | expect(copyFiles).toHaveBeenCalledTimes(2) 123 | }) 124 | }) 125 | 126 | describe('when field folder does not contain master details', () => { 127 | it('should not copy master detail fields', async () => { 128 | // Arrange 129 | mockedReadDirs.mockResolvedValue([]) 130 | mockedReadPathFromGit.mockResolvedValueOnce('') 131 | const sut = new CustomObjectHandler( 132 | line, 133 | objectType, 134 | work, 135 | globalMetadata 136 | ) 137 | 138 | // Act 139 | await sut.handleAddition() 140 | 141 | // Assert 142 | expect(copyFiles).toHaveBeenCalledTimes(1) 143 | }) 144 | }) 145 | }) 146 | 147 | describe('when field folder does not exist', () => { 148 | it('should not look into the field folder', async () => { 149 | // Arrange 150 | mockedPathExist.mockResolvedValueOnce(false) 151 | const sut = new CustomObjectHandler( 152 | line, 153 | objectType, 154 | work, 155 | globalMetadata 156 | ) 157 | 158 | // Act 159 | await sut.handleAddition() 160 | 161 | // Assert 162 | expect(readDirs).not.toHaveBeenCalled() 163 | }) 164 | }) 165 | }) 166 | }) 167 | -------------------------------------------------------------------------------- /src/commands/sgd/source/delta.ts: -------------------------------------------------------------------------------- 1 | import { Flags, SfCommand } from '@salesforce/sf-plugins-core' 2 | 3 | import { 4 | OUTPUT_DEFAULT_VALUE, 5 | REPO_DEFAULT_VALUE, 6 | SOURCE_DEFAULT_VALUE, 7 | TO_DEFAULT_VALUE, 8 | } from '../../../constant/cliConstants.js' 9 | import sgd from '../../../main.js' 10 | import type { Config } from '../../../types/config.js' 11 | import type { SgdResult } from '../../../types/sgdResult.js' 12 | import { log } from '../../../utils/LoggingDecorator.js' 13 | import { Logger } from '../../../utils/LoggingService.js' 14 | import { MessageService } from '../../../utils/MessageService.js' 15 | 16 | const messages = new MessageService() 17 | 18 | export default class SourceDeltaGenerate extends SfCommand { 19 | public static override readonly summary = messages.getMessage('summary') 20 | public static override readonly description = 21 | messages.getMessage('description') 22 | public static override readonly examples = messages.getMessages('examples') 23 | 24 | public static override readonly flags = { 25 | from: Flags.string({ 26 | char: 'f', 27 | summary: messages.getMessage('flags.from.summary'), 28 | required: true, 29 | }), 30 | to: Flags.string({ 31 | char: 't', 32 | summary: messages.getMessage('flags.to.summary'), 33 | default: TO_DEFAULT_VALUE, 34 | }), 35 | 'generate-delta': Flags.boolean({ 36 | char: 'd', 37 | summary: messages.getMessage('flags.generate-delta.summary'), 38 | }), 39 | 'output-dir': Flags.directory({ 40 | char: 'o', 41 | summary: messages.getMessage('flags.output.summary'), 42 | default: OUTPUT_DEFAULT_VALUE, 43 | exists: true, 44 | aliases: ['output'], 45 | deprecateAliases: true, 46 | }), 47 | 'repo-dir': Flags.directory({ 48 | char: 'r', 49 | summary: messages.getMessage('flags.repo.summary'), 50 | default: REPO_DEFAULT_VALUE, 51 | exists: true, 52 | aliases: ['repo'], 53 | deprecateAliases: true, 54 | }), 55 | 'source-dir': Flags.directory({ 56 | char: 's', 57 | summary: messages.getMessage('flags.source.summary'), 58 | description: messages.getMessage('flags.source.description'), 59 | default: [SOURCE_DEFAULT_VALUE], 60 | aliases: ['source'], 61 | deprecateAliases: true, 62 | multiple: true, 63 | }), 64 | 'ignore-file': Flags.file({ 65 | char: 'i', 66 | summary: messages.getMessage('flags.ignore.summary'), 67 | exists: true, 68 | aliases: ['ignore'], 69 | deprecateAliases: true, 70 | }), 71 | 'ignore-destructive-file': Flags.file({ 72 | char: 'D', 73 | summary: messages.getMessage('flags.ignore-destructive.summary'), 74 | exists: true, 75 | aliases: ['ignore-destructive'], 76 | deprecateAliases: true, 77 | }), 78 | 'include-file': Flags.file({ 79 | char: 'n', 80 | summary: messages.getMessage('flags.include.summary'), 81 | exists: true, 82 | aliases: ['include'], 83 | deprecateAliases: true, 84 | }), 85 | 'include-destructive-file': Flags.file({ 86 | char: 'N', 87 | summary: messages.getMessage('flags.include-destructive.summary'), 88 | exists: true, 89 | aliases: ['include-destructive'], 90 | deprecateAliases: true, 91 | }), 92 | 'ignore-whitespace': Flags.boolean({ 93 | char: 'W', 94 | summary: messages.getMessage('flags.ignore-whitespace.summary'), 95 | }), 96 | 'api-version': Flags.orgApiVersion({ 97 | char: 'a', 98 | summary: messages.getMessage('flags.api-version.summary'), 99 | }), 100 | } 101 | 102 | @log 103 | public async run(): Promise { 104 | const { flags } = await this.parse(SourceDeltaGenerate) 105 | 106 | const config: Config = { 107 | apiVersion: parseInt(flags['api-version']!) || undefined, 108 | from: flags['from'], 109 | generateDelta: flags['generate-delta'], 110 | ignore: flags['ignore-file'], 111 | ignoreDestructive: flags['ignore-destructive-file'], 112 | ignoreWhitespace: flags['ignore-whitespace'], 113 | include: flags['include-file'], 114 | includeDestructive: flags['include-destructive-file'], 115 | output: flags['output-dir'], 116 | repo: flags['repo-dir'], 117 | source: flags['source-dir'], 118 | to: flags['to'], 119 | } 120 | 121 | this.spinner.start( 122 | messages.getMessage('info.CommandIsRunning'), 123 | undefined, 124 | { stdout: true } 125 | ) 126 | const output: SgdResult = { 127 | 'output-dir': config.output, 128 | } 129 | let finalMessage = messages.getMessage('info.CommandSuccess') 130 | try { 131 | const jobResult = await sgd(config) 132 | for (const warning of jobResult.warnings) { 133 | Logger.warn('run: warning', warning) 134 | this.warn(warning.message) 135 | } 136 | this.info(messages.getMessage('info.EncourageSponsorship')) 137 | Logger.info('run: success') 138 | } catch (err) { 139 | if (err instanceof Error) { 140 | finalMessage = `${messages.getMessage('info.CommandFailure')}: ${ 141 | err.message 142 | }` 143 | output.error = err.message 144 | } 145 | Logger.error('run: error', err) 146 | process.exitCode = 1 147 | } 148 | this.spinner.stop(finalMessage) 149 | return output 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/2.0.5/schema.json", 3 | "formatter": { 4 | "enabled": true, 5 | "formatWithErrors": false, 6 | "indentStyle": "space", 7 | "indentWidth": 2, 8 | "lineEnding": "lf", 9 | "lineWidth": 80, 10 | "attributePosition": "auto", 11 | "includes": [ 12 | "**", 13 | "!**/.next", 14 | "!**/node_modules", 15 | "!**/output", 16 | "!**/reports", 17 | "!**/.github", 18 | "!**/*.json", 19 | "!**/*.md" 20 | ] 21 | }, 22 | "assist": { "actions": { "source": { "organizeImports": "on" } } }, 23 | "linter": { 24 | "enabled": true, 25 | "rules": { 26 | "recommended": false, 27 | "complexity": { 28 | "noExtraBooleanCast": "error", 29 | "noUselessCatch": "error", 30 | "noUselessThisAlias": "error", 31 | "noUselessTypeConstraint": "error", 32 | "useArrowFunction": "off", 33 | "noAdjacentSpacesInRegex": "error" 34 | }, 35 | "correctness": { 36 | "noConstAssign": "error", 37 | "noConstantCondition": "error", 38 | "noEmptyCharacterClassInRegex": "error", 39 | "noEmptyPattern": "error", 40 | "noGlobalObjectCalls": "error", 41 | "noInnerDeclarations": "error", 42 | "noInvalidConstructorSuper": "error", 43 | "noNonoctalDecimalEscape": "error", 44 | "noPrecisionLoss": "error", 45 | "noSelfAssign": "error", 46 | "noSetterReturn": "error", 47 | "noSwitchDeclarations": "error", 48 | "noUndeclaredVariables": "error", 49 | "noUnreachable": "error", 50 | "noUnreachableSuper": "error", 51 | "noUnsafeFinally": "error", 52 | "noUnsafeOptionalChaining": "error", 53 | "noUnusedLabels": "error", 54 | "noUnusedVariables": "error", 55 | "useIsNan": "error", 56 | "useValidForDirection": "error", 57 | "useYield": "error", 58 | "noInvalidBuiltinInstantiation": "error", 59 | "useValidTypeof": "error" 60 | }, 61 | "style": { 62 | "noNamespace": "error", 63 | "noNonNullAssertion": "off", 64 | "useAsConstAssertion": "error", 65 | "useBlockStatements": "off", 66 | "useArrayLiterals": "off" 67 | }, 68 | "suspicious": { 69 | "noAssignInExpressions": "error", 70 | "noAsyncPromiseExecutor": "error", 71 | "noCatchAssign": "error", 72 | "noClassAssign": "error", 73 | "noCompareNegZero": "error", 74 | "noControlCharactersInRegex": "error", 75 | "noDebugger": "error", 76 | "noDuplicateCase": "error", 77 | "noDuplicateClassMembers": "error", 78 | "noDuplicateObjectKeys": "error", 79 | "noDuplicateParameters": "error", 80 | "noEmptyBlockStatements": "error", 81 | "noExplicitAny": "error", 82 | "noExtraNonNullAssertion": "error", 83 | "noFallthroughSwitchClause": "error", 84 | "noFunctionAssign": "error", 85 | "noGlobalAssign": "error", 86 | "noImportAssign": "error", 87 | "noMisleadingCharacterClass": "error", 88 | "noMisleadingInstantiator": "error", 89 | "noPrototypeBuiltins": "error", 90 | "noRedeclare": "error", 91 | "noShadowRestrictedNames": "error", 92 | "noUnsafeDeclarationMerging": "error", 93 | "noUnsafeNegation": "error", 94 | "useGetterReturn": "error", 95 | "useNamespaceKeyword": "error", 96 | "noWith": "error", 97 | "noConsole": { "level": "error", "options": { "allow": ["log"] } } 98 | } 99 | }, 100 | "includes": [ 101 | "**", 102 | "!**/lib/**/*", 103 | "!**/node_modules", 104 | "!**/.next", 105 | "!**/output", 106 | "!**/reports", 107 | "!**/e2e", 108 | "!**/.github" 109 | ] 110 | }, 111 | "javascript": { 112 | "formatter": { 113 | "jsxQuoteStyle": "double", 114 | "quoteProperties": "asNeeded", 115 | "trailingCommas": "es5", 116 | "semicolons": "asNeeded", 117 | "arrowParentheses": "asNeeded", 118 | "bracketSpacing": true, 119 | "bracketSameLine": false, 120 | "quoteStyle": "single", 121 | "attributePosition": "auto" 122 | }, 123 | "globals": ["Atomics", "SharedArrayBuffer"] 124 | }, 125 | "overrides": [ 126 | { 127 | "includes": ["**/*.ts", "**/*.tsx", "**/*.mts", "**/*.cts"], 128 | "linter": { 129 | "rules": { 130 | "correctness": { 131 | "noConstAssign": "off", 132 | "noGlobalObjectCalls": "off", 133 | "noInvalidConstructorSuper": "off", 134 | "noSetterReturn": "off", 135 | "noUndeclaredVariables": "off", 136 | "noUnreachable": "off", 137 | "noUnreachableSuper": "off", 138 | "noInvalidBuiltinInstantiation": "off" 139 | }, 140 | "style": { 141 | "useConst": "error" 142 | }, 143 | "suspicious": { 144 | "noDuplicateClassMembers": "off", 145 | "noDuplicateObjectKeys": "off", 146 | "noDuplicateParameters": "off", 147 | "noFunctionAssign": "off", 148 | "noImportAssign": "off", 149 | "noRedeclare": "off", 150 | "noUnsafeNegation": "off", 151 | "useGetterReturn": "off", 152 | "noVar": "error" 153 | }, 154 | "complexity": { 155 | "noArguments": "error" 156 | } 157 | } 158 | } 159 | } 160 | ] 161 | } 162 | -------------------------------------------------------------------------------- /__tests__/unit/lib/service/reportingFolderHandler.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | import { describe, expect, it, jest } from '@jest/globals' 3 | 4 | import { METAFILE_SUFFIX } from '../../../../src/constant/metadataConstants' 5 | import { MetadataRepository } from '../../../../src/metadata/MetadataRepository' 6 | import ReportingFolderHandler from '../../../../src/service/reportingFolderHandler' 7 | import type { Work } from '../../../../src/types/work' 8 | import { copyFiles, readDirs } from '../../../../src/utils/fsHelper' 9 | import { getGlobalMetadata, getWork } from '../../../__utils__/globalTestHelper' 10 | 11 | jest.mock('../../../../src/utils/fsHelper') 12 | const mockedReadDirs = jest.mocked(readDirs) 13 | 14 | const entity = 'folder/test' 15 | const extension = 'report' 16 | const objectType = { 17 | directoryName: 'reports', 18 | inFolder: true, 19 | metaFile: true, 20 | xmlName: 'Report', 21 | content: [ 22 | { 23 | suffix: 'report', 24 | xmlName: 'Report', 25 | }, 26 | { 27 | suffix: 'reportFolder', 28 | xmlName: 'ReportFolder', 29 | }, 30 | ], 31 | } 32 | 33 | const testContext = [ 34 | [ 35 | `A force-app/main/default/${objectType.directoryName}/${entity}.${extension}-meta.xml`, 36 | new Set([entity]), 37 | 'Report', 38 | ], 39 | [ 40 | `A force-app/main/default/${objectType.directoryName}/${entity}.reportFolder-meta.xml`, 41 | new Set([entity]), 42 | 'ReportFolder', 43 | ], 44 | [ 45 | `A force-app/main/default/${objectType.directoryName}/folder/${entity}.reportFolder-meta.xml`, 46 | new Set([`folder/${entity}`]), 47 | 'ReportFolder', 48 | ], 49 | [ 50 | `A force-app/main/default/${objectType.directoryName}/folder/folder/${entity}.reportFolder-meta.xml`, 51 | new Set([`folder/folder/${entity}`]), 52 | 'ReportFolder', 53 | ], 54 | ] 55 | 56 | let work: Work 57 | beforeEach(() => { 58 | jest.clearAllMocks() 59 | work = getWork() 60 | }) 61 | 62 | describe('InNestedFolderHandler', () => { 63 | let globalMetadata: MetadataRepository 64 | beforeAll(async () => { 65 | globalMetadata = await getGlobalMetadata() 66 | }) 67 | 68 | describe.each( 69 | testContext 70 | )('when called with generateDelta false', (changePath: 71 | | string 72 | | Set, expected: string | Set, expectedType: 73 | | string 74 | | Set) => { 75 | beforeEach(() => { 76 | work.config.generateDelta = false 77 | }) 78 | it(`should not copy meta files nor copy special extension when adding ${expectedType}`, async () => { 79 | // Arrange 80 | const sut = new ReportingFolderHandler( 81 | changePath as string, 82 | objectType, 83 | work, 84 | globalMetadata 85 | ) 86 | 87 | // Act 88 | await sut.handleAddition() 89 | 90 | // Assert 91 | expect(work.diffs.package.get(expectedType as string)).toEqual(expected) 92 | expect(copyFiles).not.toHaveBeenCalled() 93 | }) 94 | }) 95 | 96 | describe.each(testContext)('when called with generateDelta true', (changePath: 97 | | string 98 | | Set, expected: string | Set, expectedType: 99 | | string 100 | | Set) => { 101 | beforeEach(() => { 102 | work.config.generateDelta = true 103 | }) 104 | 105 | describe(`when readDirs does not return files`, () => { 106 | it(`should not copy special extension and copy meta files in addition ${expectedType}`, async () => { 107 | // Arrange 108 | const sut = new ReportingFolderHandler( 109 | changePath as string, 110 | objectType, 111 | work, 112 | globalMetadata 113 | ) 114 | mockedReadDirs.mockImplementation(() => Promise.resolve([])) 115 | 116 | // Act 117 | await sut.handleAddition() 118 | 119 | // Assert 120 | expect(work.diffs.package.get(expectedType as string)).toEqual(expected) 121 | expect(readDirs).toHaveBeenCalledTimes(1) 122 | expect(copyFiles).toHaveBeenCalledTimes(3) 123 | expect(copyFiles).toHaveBeenCalledWith( 124 | work.config, 125 | expect.stringContaining(METAFILE_SUFFIX) 126 | ) 127 | }) 128 | }) 129 | 130 | describe('when readDirs returns files', () => { 131 | it('should copy special extension', async () => { 132 | // Arrange 133 | const sut = new ReportingFolderHandler( 134 | changePath as string, 135 | objectType, 136 | work, 137 | globalMetadata 138 | ) 139 | mockedReadDirs.mockImplementationOnce(() => 140 | Promise.resolve([entity, 'not/matching']) 141 | ) 142 | 143 | // Act 144 | await sut.handleAddition() 145 | 146 | // Assert 147 | expect(work.diffs.package.get(expectedType as string)).toEqual(expected) 148 | expect(readDirs).toHaveBeenCalledTimes(1) 149 | expect(copyFiles).toHaveBeenCalledTimes(5) 150 | }) 151 | }) 152 | }) 153 | 154 | describe('when the line should not be processed', () => { 155 | it.each([ 156 | `force-app/main/default/${objectType.directoryName}/test.otherExtension`, 157 | ])('does not handle the line', async entityPath => { 158 | // Arrange 159 | const sut = new ReportingFolderHandler( 160 | `A ${entityPath}`, 161 | objectType, 162 | work, 163 | globalMetadata 164 | ) 165 | 166 | // Act 167 | await sut.handle() 168 | 169 | // Assert 170 | expect(work.diffs.package.size).toBe(0) 171 | expect(copyFiles).not.toHaveBeenCalled() 172 | }) 173 | }) 174 | }) 175 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | colladonsebastien@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | . 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | . Translations are available at 128 | . 129 | --------------------------------------------------------------------------------