├── .github ├── FUNDING.yml ├── dependabot.yml └── workflows │ ├── codeql.yml │ ├── documentation.yml │ ├── lint.yml │ ├── main.yml │ ├── refreshLockFiles.yml │ └── release.yml ├── .gitignore ├── .prettierignore ├── LICENSE ├── README.md ├── index.d.ts ├── jest.config.js ├── package-lock.json ├── package.json ├── src ├── checks │ ├── commons │ │ └── keySeparator_check.ts │ ├── diff_checks.ts │ ├── export │ │ ├── export_common_checks.ts │ │ ├── export_csv_checks.ts │ │ ├── export_xlsx_checks.ts │ │ └── index.ts │ ├── import │ │ ├── import_common_checks.ts │ │ ├── import_xlsx_checks.ts │ │ └── index.ts │ └── index.ts ├── cli.ts ├── cmds │ ├── diff.ts │ ├── diff │ │ ├── detectChanges.ts │ │ └── reporter-strategies │ │ │ ├── index.ts │ │ │ └── toJSON.ts │ ├── export.ts │ ├── export_cmds │ │ ├── export_commons.ts │ │ ├── export_csv.ts │ │ └── export_xlsx.ts │ ├── import.ts │ └── import_cmds │ │ ├── import_commons.ts │ │ ├── import_csv.ts │ │ └── import_xlsx.ts ├── commons │ ├── commandBuilder.ts │ ├── enhancedGet.ts │ ├── enhancedSet.ts │ └── getLeavesPathes.ts ├── index.ts ├── middlewares │ └── middlewares.ts └── types │ ├── diffTypes.ts │ ├── exportTypes.ts │ └── importTypes.ts ├── test ├── diff.test.ts ├── export │ ├── export-csv.test.ts │ └── export-xlsx.test.ts ├── fixtures │ ├── export-xlsx │ │ └── worksheetCustomizer-dynamic.js │ ├── import-csv │ │ ├── export-csv.csv │ │ └── export-flat-csv.csv │ └── import-xlsx │ │ ├── export-flat-xlsx.xlsx │ │ └── export-xlsx.xlsx ├── getLeavesPathes.test.ts ├── import │ ├── import-csv.test.ts │ └── import-xlsx.test.ts └── test-helpers.ts ├── tsconfig.json └── website ├── .gitignore ├── README.md ├── babel.config.js ├── blog ├── 2021-09-05-welcome │ └── index.md ├── 2021-10-07-flat-json-now-supported │ └── index.md └── authors.yml ├── docs ├── commands │ ├── _category_.json │ ├── _diff-faq.mdx │ ├── _i18n-files-tabs.mdx │ ├── diff.mdx │ ├── export │ │ ├── _category_.json │ │ ├── _export-faq.mdx │ │ ├── assets │ │ │ ├── exampleXlsxExport.png │ │ │ ├── export-csv.csv │ │ │ └── export-xlsx.xlsx │ │ ├── export to_csv.mdx │ │ ├── export to_xlsx.mdx │ │ └── index.mdx │ ├── import │ │ ├── _category_.json │ │ ├── _import-faq.mdx │ │ ├── import from_csv.mdx │ │ ├── import from_xlsx.mdx │ │ └── index.mdx │ └── index.mdx ├── faq.md └── installation.md ├── docusaurus.config.js ├── package-lock.json ├── package.json ├── sidebars.js ├── src ├── components │ ├── HomepageFeatures.module.css │ └── HomepageFeatures.tsx ├── css │ └── custom.css └── pages │ ├── index.module.css │ └── index.tsx ├── static ├── .nojekyll └── img │ ├── diff.svg │ ├── export.svg │ ├── favicon.ico │ ├── import.svg │ └── logo.svg └── tsconfig.json /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | custom: ['https://www.buymeacoffee.com/GPFR'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 4 | #github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 5 | #patreon: # Replace with a single Patreon username 6 | #open_collective: # Replace with a single Open Collective username 7 | #ko_fi: # Replace with a single Ko-fi username 8 | #tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 9 | #community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 10 | #liberapay: # Replace with a single Liberapay username 11 | #issuehunt: # Replace with a single IssueHunt username 12 | #otechie: # Replace with a single Otechie username 13 | #lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | # Files stored in repository root 9 | - package-ecosystem: "npm" 10 | directory: "/" 11 | commit-message: 12 | prefix: chore 13 | include: "scope" 14 | schedule: 15 | interval: "weekly" 16 | # Files stored in repository testsite 17 | - package-ecosystem: "npm" 18 | directory: "/website" 19 | commit-message: 20 | prefix: chore 21 | include: "scope" 22 | schedule: 23 | interval: "weekly" 24 | # Workflow files stored in the default location of `.github/workflows` 25 | - package-ecosystem: "github-actions" 26 | directory: "/" 27 | commit-message: 28 | prefix: ci 29 | include: "scope" 30 | schedule: 31 | interval: "daily" 32 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | schedule: 9 | - cron: "34 17 * * 0" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ javascript ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v4 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v3 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v3 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v3 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.github/workflows/documentation.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - '.github/workflows/documentation.yml' 9 | - 'website/**' 10 | 11 | jobs: 12 | deploy: 13 | runs-on: ubuntu-latest 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.ref }} 16 | defaults: 17 | run: 18 | working-directory: website 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Setup Node 23 | uses: actions/setup-node@v4 24 | with: 25 | node-version: 'lts/*' 26 | 27 | - name: Get yarn cache 28 | id: yarn-cache 29 | run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT 30 | 31 | - name: Cache dependencies 32 | uses: actions/cache@v4 33 | with: 34 | path: ${{ steps.yarn-cache.outputs.dir }} 35 | key: ${{ runner.os }}-website-${{ hashFiles('**/yarn.lock') }} 36 | restore-keys: | 37 | ${{ runner.os }}-website- 38 | 39 | - run: yarn install --frozen-lockfile 40 | - run: yarn build 41 | 42 | - name: Deploy 43 | uses: peaceiris/actions-gh-pages@v4 44 | if: ${{ github.ref == 'refs/heads/master' }} 45 | with: 46 | github_token: ${{ secrets.GITHUB_TOKEN }} 47 | publish_dir: ./website/build 48 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: [push] 3 | jobs: 4 | lint: 5 | name: Check our linting standards 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: 🛎️ Checkout 9 | uses: actions/checkout@v4 10 | - name: 🔨 Setup Node.js 11 | uses: actions/setup-node@v4 12 | with: 13 | node-version: 16 14 | - name: 🚧 Install dependencies 15 | run: npm ci 16 | - name: 🛂 Check linting 17 | run: npm run lint -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | # Triggers the workflow on push / pull request events / manual trigger 3 | on: [push, pull_request, workflow_dispatch] 4 | jobs: 5 | build: 6 | name: Build and test on Node ${{ matrix.node }} - ${{ matrix.os }} 7 | 8 | runs-on: ${{ matrix.os }} 9 | strategy: 10 | matrix: 11 | node: ['18.x', '20.x'] 12 | os: [ubuntu-latest, windows-latest] 13 | 14 | steps: 15 | - name: Checkout repo 16 | uses: actions/checkout@v4 17 | 18 | - name: Use Node ${{ matrix.node }} 19 | uses: actions/setup-node@v4 20 | with: 21 | node-version: ${{ matrix.node }} 22 | 23 | - name: Install deps and build (with cache) 24 | uses: bahmutov/npm-install@v1.10.9 25 | 26 | - name: Test 27 | run: yarn test --ci --coverage --maxWorkers=50% 28 | 29 | - name: Send coverage to Codecov 30 | uses: codecov/codecov-action@v5.4.3 31 | with: 32 | token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos 33 | directory: ./coverage 34 | flags: ${{ matrix.os }} 35 | 36 | - name: Build 37 | run: yarn build 38 | -------------------------------------------------------------------------------- /.github/workflows/refreshLockFiles.yml: -------------------------------------------------------------------------------- 1 | name: Refresh Package Lock Files 2 | 3 | on: 4 | workflow_dispatch: 5 | jobs: 6 | update_packages: 7 | runs-on: ubuntu-latest 8 | permissions: 9 | contents: write 10 | pull-requests: write 11 | steps: 12 | - name: 🛎️ Checkout 13 | uses: actions/checkout@v4 14 | - name: Setup Node.js ✨ 15 | uses: actions/setup-node@v4.0.3 16 | with: 17 | node-version: "lts/*" 18 | - name: 🤖 Remove package-lock.json files 19 | run: | 20 | find . -name "package-lock.json" -delete 21 | - name: 💻 Install npm packages 22 | run: npm install 23 | - name: 🛂 Fix linting 24 | run: npm run lint-fix 25 | - name: Build lib 🤖 26 | run: | 27 | npm run build 28 | - name: Link lib 🔗 29 | run: | 30 | npm link 31 | - name: Install website 💻 32 | run: | 33 | cd website 34 | npm link @jy95/i18n-tools --save 35 | npm install --prefer-dedupe 36 | npm dedupe 37 | - name: Create Pull Request 🤖 38 | uses: peter-evans/create-pull-request@v7 39 | with: 40 | title: Update npm packages 41 | body: | 42 | This PR updates npm packages by removing package-lock.json files and reinstalling dependencies. 43 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | # Trigger only when the build workflow succeeded on master 4 | workflow_run: 5 | workflows: ["CI"] 6 | branches: [master] 7 | types: 8 | - completed 9 | # Trigger semantic-release on demand 10 | repository_dispatch: 11 | types: [semantic-release] 12 | jobs: 13 | release: 14 | name: Release 15 | runs-on: ubuntu-latest 16 | if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'repository_dispatch' }} 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v4 20 | - name: Setup Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version: "lts/*" 24 | - name: Install dependencies 25 | run: npm ci 26 | - name: Release 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 30 | run: npx semantic-release 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | .DS_Store 3 | node_modules 4 | dist 5 | 6 | # Files / Folders created by the testing 7 | coverage 8 | diff_report_* 9 | translations_* 10 | test.xlsx.xlsx 11 | 12 | # Files / Folder for the website 13 | /website/.docusaurus 14 | /website/build 15 | /website/backers.json 16 | /website/node_modules 17 | /website/yarn.lock 18 | 19 | # Files by parcel js 20 | .parcel-cache -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Ignore artifacts: 2 | dist 3 | coverage 4 | 5 | # Ignore documentation 6 | website 7 | 8 | # Ignore scripts 9 | .github 10 | 11 | # Rest 12 | README.md 13 | tsconfig.json 14 | package.json 15 | package-lock.json 16 | jest.config.js -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # @jy95/i18n-tools [![codecov](https://codecov.io/gh/jy95/i18n-tools/branch/master/graph/badge.svg?token=PQDE2R2GYR)](https://codecov.io/gh/jy95/i18n-tools) [![Codacy Badge](https://app.codacy.com/project/badge/Grade/95593519673143d6a1e475c1d2c4332c)](https://www.codacy.com/gh/jy95/i18n-tools) 2 | 3 | CLI to make common operations around i18n files simpler. 4 | 5 | - 👩‍💻 Export i18n files into something else (xlsx, csv, ...) 6 | - ✨ Turn a file (xlsx, csv, ...) to i18n file(s) 7 | - 📜 Compare at least two i18n files and generate a report 8 | - ... 9 | 10 | Read more on [https://jy95.github.io/i18n-tools/](https://jy95.github.io/i18n-tools/) 11 | 12 | ```bash 13 | # Display all available commands 14 | npx @jy95/i18n-tools --help 15 | ``` 16 | 17 | ## Contributing 18 | 19 | * If you're unsure if a feature would make a good addition, you can always [create an issue](https://github.com/jy95/i18n-tools/issues/new) first. 20 | * We aim for 100% test coverage. Please write tests for any new functionality or changes. 21 | * Any API changes should be fully documented. 22 | * Make sure your code meets our linting standards. Run `npm run lint` to check your code. 23 | * Be mindful of others when making suggestions and/or code reviewing. -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | export {}; 2 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | collectCoverageFrom: [ 6 | "src/**/*.{js,ts}" 7 | ] 8 | }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@jy95/i18n-tools", 3 | "author": "jy95", 4 | "version": "0.0.0-development", 5 | "license": "GPL-3.0-or-later", 6 | "description": "CLI to make common operations around i18n files simpler", 7 | "keywords": [ 8 | "i18n", 9 | "cli", 10 | "conversion", 11 | "xlsx", 12 | "csv", 13 | "json", 14 | "diff", 15 | "export", 16 | "import", 17 | "internationalization", 18 | "translation", 19 | "localization" 20 | ], 21 | "files": [ 22 | "dist" 23 | ], 24 | "bin": "dist/cli.js", 25 | "main": "dist/main.js", 26 | "module": "dist/module.js", 27 | "types": "dist/module.d.ts", 28 | "exports": { 29 | ".": { 30 | "require": "./dist/main.js", 31 | "import": "./dist/module.js" 32 | } 33 | }, 34 | "targets": { 35 | "cli": { 36 | "context": "node", 37 | "source": "src/cli.ts", 38 | "outputFormat": "commonjs" 39 | }, 40 | "main": { 41 | "context": "node", 42 | "source": "src/index.ts", 43 | "optimize": true, 44 | "outputFormat": "commonjs" 45 | }, 46 | "module": { 47 | "context": "node", 48 | "source": "src/index.ts", 49 | "optimize": true, 50 | "outputFormat": "esmodule" 51 | } 52 | }, 53 | "engines": { 54 | "node": ">=12" 55 | }, 56 | "scripts": { 57 | "watch": "npx parcel watch", 58 | "build": "npx parcel build", 59 | "test": "npx jest", 60 | "test:coverage": "npx jest --coverage", 61 | "lint": "npx prettier --check .", 62 | "lint-fix": "npx prettier --write .", 63 | "prepare": "npm run build", 64 | "semantic-release": "semantic-release" 65 | }, 66 | "prettier": { 67 | "printWidth": 80, 68 | "semi": true, 69 | "singleQuote": true, 70 | "trailingComma": "es5" 71 | }, 72 | "devDependencies": { 73 | "@parcel/transformer-typescript-types": "^2.10.0", 74 | "@types/jest": "^29.5.2", 75 | "@types/lodash": "^4.14.195", 76 | "@types/node": "^22.5.2", 77 | "@types/yargs": "^17.0.25", 78 | "eslint-config-prettier": "^10.0.1", 79 | "fsify": "^5.0.0", 80 | "jest": "^29.6.4", 81 | "parcel": "^2.10.0", 82 | "prettier": "^3.1.2", 83 | "semantic-release": "^24.1.0", 84 | "ts-jest": "^29.1.1", 85 | "tslib": "^2.6.2", 86 | "typescript": "^5.2.2" 87 | }, 88 | "dependencies": { 89 | "exceljs": "^4.3.0", 90 | "lodash": "^4.17.21", 91 | "lodash-es": "^4.17.21", 92 | "yargs": "^17.7.2" 93 | }, 94 | "eslintConfig": { 95 | "extends": [ 96 | "prettier" 97 | ], 98 | "overrides": [ 99 | { 100 | "files": [ 101 | "*.ts", 102 | "*.js" 103 | ], 104 | "rules": { 105 | "import/no-anonymous-default-export": "off" 106 | } 107 | } 108 | ] 109 | }, 110 | "homepage": "https://jy95.github.io/i18n-tools/", 111 | "repository": { 112 | "type": "git", 113 | "url": "https://github.com/jy95/i18n-tools.git" 114 | }, 115 | "publishConfig": { 116 | "access": "public" 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/checks/commons/keySeparator_check.ts: -------------------------------------------------------------------------------- 1 | // lodash methodes 2 | import isString from 'lodash/isString'; 3 | 4 | const KEYSEPARATOR_CHECK = async (argv: any) => { 5 | let keySeparator = argv.keySeparator as any; 6 | let check = [ 7 | () => isString(keySeparator) && keySeparator.length !== 1, 8 | () => keySeparator === true, 9 | ].some((pred) => pred()); 10 | if (check) { 11 | return new Error(`Option keySeparator should be a not-empty char or false`); 12 | } else { 13 | return true; 14 | } 15 | }; 16 | export default KEYSEPARATOR_CHECK; 17 | -------------------------------------------------------------------------------- /src/checks/diff_checks.ts: -------------------------------------------------------------------------------- 1 | // reuse check function from export command 2 | import { FILENAME_CHECK } from './export/index'; 3 | // key separator check 4 | import KEYSEPARATOR_CHECK from './commons/keySeparator_check'; 5 | 6 | // check if at least two paths were provided 7 | // For that, we will use "backupKey" from backupPaths : "paths" 8 | const pathsProp = 'paths'; 9 | export const AT_LEAST_2_PATHS_CHECK = async (argv: any) => { 10 | return Object.values(argv[pathsProp]).filter((v) => v !== undefined).length >= 11 | 2 12 | ? true 13 | : new Error('At least two paths must be provided'); 14 | }; 15 | 16 | // export checks in expected order into a single array 17 | export const CHECKS = [ 18 | KEYSEPARATOR_CHECK, 19 | FILENAME_CHECK, 20 | AT_LEAST_2_PATHS_CHECK, 21 | ]; 22 | -------------------------------------------------------------------------------- /src/checks/export/export_common_checks.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { extname } from 'path'; 3 | 4 | // lodash methodes 5 | import isPlainObject from 'lodash/isPlainObject'; 6 | import isFunction from 'lodash/isFunction'; 7 | import isEmpty from 'lodash/isEmpty'; 8 | import uniq from 'lodash/uniq'; 9 | 10 | // key separator check 11 | import KEYSEPARATOR_CHECK from '../commons/keySeparator_check'; 12 | 13 | // validation for filename option 14 | export const FILENAME_CHECK = async (argv: any) => { 15 | let filename: unknown = argv['filename']; 16 | if (extname(filename as string).length !== 0) { 17 | return new Error(`${filename} has an extension : Remove it please`); 18 | } else { 19 | return true; 20 | } 21 | }; 22 | 23 | // validations for files option 24 | export const FILES_CHECK = async (argv: any) => { 25 | let files = argv.files as any; 26 | if (!isPlainObject(files)) { 27 | return new Error('Option files is not a JSON Object'); 28 | } 29 | if (isEmpty(files)) { 30 | return new Error('Option files should have at least one entry'); 31 | } 32 | let entries: [string, any][] = Object.entries(files); 33 | if (uniq(Object.values(files)).length !== entries.length) { 34 | return new Error( 35 | `At least a duplicated value in files JSON object was detected` 36 | ); 37 | } 38 | 39 | try { 40 | await Promise.all(entries.map((entry) => verify_files_entry(entry))); 41 | return true; 42 | } catch (error) { 43 | return error as Error; 44 | } 45 | }; 46 | 47 | // verify if an entry from files option meet requirements 48 | async function verify_files_entry([_, i18nPath]: [string, any]): Promise< 49 | boolean | Error 50 | > { 51 | let potentialJSON; 52 | // check if file is readable 53 | try { 54 | await fs.promises.access(i18nPath); 55 | potentialJSON = await fs.promises.readFile(i18nPath); 56 | } catch (error) { 57 | return Promise.reject( 58 | new Error(`${i18nPath} cannot be read : check permissions`) 59 | ); 60 | } 61 | // check if the file is a JSON 62 | try { 63 | JSON.parse(potentialJSON.toString()); 64 | return Promise.resolve(true); 65 | } catch (error) { 66 | return Promise.reject(new Error(`${i18nPath} isn't a valid JSON`)); 67 | } 68 | } 69 | 70 | // validations for resultsFilter option 71 | export const RESULTSFILTER_CHECK = async (argv: any) => { 72 | if ('resultsFilter' in argv) { 73 | let fct = argv.resultsFilter as any; 74 | if (isFunction(fct) && fct.length === 1) { 75 | return true; 76 | } else { 77 | return new Error( 78 | "resultsFilter is not an function or doesn't take an single argument" 79 | ); 80 | } 81 | } else { 82 | return true; 83 | } 84 | }; 85 | 86 | // export checks in expected order into a single array 87 | export const CHECKS = [ 88 | KEYSEPARATOR_CHECK, 89 | FILENAME_CHECK, 90 | FILES_CHECK, 91 | RESULTSFILTER_CHECK, 92 | ]; 93 | -------------------------------------------------------------------------------- /src/checks/export/export_csv_checks.ts: -------------------------------------------------------------------------------- 1 | import { COLUMNS_CHECK, COLUMNS_AND_FILES_CHECK } from './export_xlsx_checks'; 2 | 3 | // export checks in expected order into a single array 4 | export const CHECKS = [COLUMNS_CHECK, COLUMNS_AND_FILES_CHECK]; 5 | -------------------------------------------------------------------------------- /src/checks/export/export_xlsx_checks.ts: -------------------------------------------------------------------------------- 1 | // lodash methodes 2 | import isString from 'lodash/isString'; 3 | import isFunction from 'lodash/isFunction'; 4 | import isArray from 'lodash/isArray'; 5 | import isEmpty from 'lodash/isEmpty'; 6 | import some from 'lodash/some'; 7 | import has from 'lodash/has'; 8 | import get from 'lodash/get'; 9 | import uniq from 'lodash/uniq'; 10 | import find from 'lodash/find'; 11 | import xor from 'lodash/xor'; 12 | 13 | // validations for columns option 14 | export const COLUMNS_CHECK = async (argv: any) => { 15 | let columns = argv.columns as any; 16 | if (!isArray(columns)) { 17 | return new Error('columns is not a JSON Array'); 18 | } 19 | if (isEmpty(columns)) { 20 | return new Error('Option columns should have at least one entry'); 21 | } 22 | // checking rules 23 | let errors_detectors: { 24 | message: (prop: string) => string; 25 | errorDetected: (prop: string) => boolean; 26 | }[] = [ 27 | { 28 | message: (prop: string) => 29 | `At least one item in columns array doesn't have "${prop}" property`, 30 | errorDetected: (prop: string) => 31 | some(columns, (item) => !has(item, prop)), 32 | }, 33 | { 34 | message: (prop: string) => 35 | `At least one item in columns array doesn't have "${prop}" property with a String value`, 36 | errorDetected: (prop: string) => 37 | some(columns, (item) => !isString(get(item, prop))), 38 | }, 39 | { 40 | message: (prop: string) => 41 | `At least a duplicated value in columns array in prop "${prop}" was detected`, 42 | errorDetected: (prop: string) => 43 | uniq(columns.map((item: string) => get(item, prop))).length !== 44 | columns.length, 45 | }, 46 | ]; 47 | // run check 48 | return ['locale', 'label'].reduce((acc: boolean | Error, prop: string) => { 49 | /* istanbul ignore if */ 50 | if (acc instanceof Error) { 51 | return acc; 52 | } else { 53 | let error = find(errors_detectors, (rule) => rule.errorDetected(prop)); 54 | if (error) { 55 | return new Error(error.message(prop)); 56 | } else { 57 | return acc; 58 | } 59 | } 60 | }, true); 61 | }; 62 | 63 | // validation for both columns & files options 64 | export const COLUMNS_AND_FILES_CHECK = async (argv: any) => { 65 | let columns = argv.columns as any[]; 66 | let files = argv.files as { [x: string]: any }; 67 | 68 | let keys_columns: string[] = columns.map((x: { [x: string]: any }) => 69 | get(x, 'locale') 70 | ); 71 | let keys_files: string[] = Object.keys(files); 72 | if (isEmpty(xor(keys_columns, keys_files))) { 73 | return true; 74 | } else { 75 | return new Error( 76 | 'At least one key differs between files and columns options' 77 | ); 78 | } 79 | }; 80 | 81 | // validations for worksheetCustomizer option 82 | export const WORKSHEETCUSTOMIZER_CHECK = async (argv: any) => { 83 | if ('worksheetCustomizer' in argv) { 84 | let fct = argv.worksheetCustomizer as any; 85 | if (isFunction(fct) && fct.length === 1) { 86 | return true; 87 | } else { 88 | return new Error( 89 | "worksheetCustomizer is not an function or doesn't take an single argument" 90 | ); 91 | } 92 | } else { 93 | return true; 94 | } 95 | }; 96 | 97 | // export checks in expected order into a single array 98 | export const CHECKS = [ 99 | COLUMNS_CHECK, 100 | COLUMNS_AND_FILES_CHECK, 101 | WORKSHEETCUSTOMIZER_CHECK, 102 | ]; 103 | -------------------------------------------------------------------------------- /src/checks/export/index.ts: -------------------------------------------------------------------------------- 1 | // common check for this command 2 | export * from './export_common_checks'; 3 | 4 | // check for xlsx sub command 5 | export * as XLSX from './export_xlsx_checks'; 6 | 7 | // check for csv sub command 8 | // as it is identical (at that time) to xlsx, simply re-export same module 9 | export * as CSV from './export_csv_checks'; 10 | -------------------------------------------------------------------------------- /src/checks/import/import_common_checks.ts: -------------------------------------------------------------------------------- 1 | // lodash methodes 2 | import uniq from 'lodash/uniq'; 3 | 4 | // key separator check 5 | import KEYSEPARATOR_CHECK from '../commons/keySeparator_check'; 6 | 7 | // validation for locales option 8 | export const LOCALES_CHECK = async (argv: any) => { 9 | const locales = argv.locales as any[]; 10 | /* istanbul ignore if */ 11 | if (uniq(locales).length !== locales.length) { 12 | return new Error("locales options doesn't contain uniq values"); 13 | } 14 | 15 | // pass validation 16 | return true; 17 | }; 18 | 19 | // export checks in expected order into a single array 20 | export const CHECKS = [KEYSEPARATOR_CHECK, LOCALES_CHECK]; 21 | -------------------------------------------------------------------------------- /src/checks/import/import_xlsx_checks.ts: -------------------------------------------------------------------------------- 1 | // lodash methodes 2 | import isPlainObject from 'lodash/isPlainObject'; 3 | import has from 'lodash/has'; 4 | import find from 'lodash/find'; 5 | import isString from 'lodash/isString'; 6 | 7 | // required properties for columns 8 | const REQUIRED_PROPERTIES = ['technical_key', 'locales']; 9 | 10 | // validation for columns option 11 | export const COLUMNS_CHECK = async (argv: any) => { 12 | let columns = argv.columns as any; 13 | if (!isPlainObject(columns)) { 14 | return new Error('columns is not a JSON Object'); 15 | } 16 | 17 | // check presence of required properties 18 | let missingProp = find(REQUIRED_PROPERTIES, (prop) => !has(columns, prop)); 19 | if (missingProp) { 20 | return new Error(`${missingProp} couldn't be found in columns object`); 21 | } 22 | 23 | // check if technical_key is an string 24 | if (!isString(columns.technical_key)) { 25 | return new Error("technical_key in columns object isn't a String"); 26 | } 27 | 28 | // check if locales key is an object 29 | if (!isPlainObject(columns.locales)) { 30 | return new Error('locales key in columns object is not a JSON Object'); 31 | } 32 | 33 | // check if locales values all are string 34 | if (!Object.values(columns.locales).every((v) => isString(v))) { 35 | return new Error( 36 | "At least one value for locales key in columns object isn't a string" 37 | ); 38 | } 39 | 40 | // pass validation 41 | return true; 42 | }; 43 | 44 | // export checks in expected order into a single array 45 | export const CHECKS = [COLUMNS_CHECK]; 46 | -------------------------------------------------------------------------------- /src/checks/import/index.ts: -------------------------------------------------------------------------------- 1 | // common check for this command 2 | export * from './import_common_checks'; 3 | 4 | // check for xlsx sub command 5 | export * as XLSX from './import_xlsx_checks'; 6 | 7 | // check for csv sub command 8 | // as it is identical (at that time) to xlsx, simply re-export same module 9 | export * as CSV from './import_xlsx_checks'; 10 | -------------------------------------------------------------------------------- /src/checks/index.ts: -------------------------------------------------------------------------------- 1 | // re export stuff for easier import 2 | export * as EXPORT_CHECKS from './export/index'; 3 | export * as IMPORT_CHECKS from './import/index'; 4 | export * as DIFF_CHECKS from './diff_checks'; 5 | 6 | // Yargs parser doesn't stop when issue(s) occurs and only returns last error. 7 | // So I need something that resolves promises sequentially and return first error 8 | // See https://github.com/yargs/yargs/issues/1399 9 | // See https://github.com/yargs/yargs/issues/1975 10 | 11 | type PromiseCheck = (argv: any) => Promise; 12 | 13 | export const resolveChecksInOrder = (checks: PromiseCheck[]) => { 14 | return async (argv: any) => { 15 | for (let check of checks) { 16 | try { 17 | //console.log(`Check ${check.name}`); // to make easier debugging in the future 18 | let result = await check(argv); 19 | if (result !== true) { 20 | return result; 21 | } 22 | } catch (error) { 23 | /* istanbul ignore next */ 24 | return error; 25 | } 26 | } 27 | return true; 28 | }; 29 | }; 30 | -------------------------------------------------------------------------------- /src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | /* istanbul ignore file */ 3 | // Simple way to test this file : node dist/index.js 4 | import yargs from 'yargs'; 5 | import { hideBin } from 'yargs/helpers'; 6 | import exportCmd from './cmds/export'; 7 | import importCmd from './cmds/import'; 8 | import diffCmd from './cmds/diff'; 9 | 10 | /* eslint-disable @typescript-eslint/no-unused-expressions */ 11 | yargs(hideBin(process.argv)) 12 | .scriptName('@jy95/i18n-tools') 13 | .parserConfiguration({ 14 | // https://github.com/jy95/i18n-tools/issues/61 15 | 'camel-case-expansion': false, 16 | }) 17 | // commandDir doesn't work very well in Typescript 18 | .command(exportCmd) 19 | .command(importCmd) 20 | .command(diffCmd) 21 | .demandCommand() 22 | .help().argv; 23 | -------------------------------------------------------------------------------- /src/cmds/diff.ts: -------------------------------------------------------------------------------- 1 | // checks import 2 | import { resolveChecksInOrder, DIFF_CHECKS } from '../checks/index'; 3 | 4 | // For typing 5 | // eslint-disable-next-line 6 | import type { Argv } from 'yargs'; 7 | import { backupPaths, parsePathsToJSON } from '../middlewares/middlewares'; 8 | import { CommonDiffArguments, ChangesOps } from '../types/diffTypes'; 9 | import CommandBuilder from '../commons/commandBuilder'; 10 | 11 | // sub fonctions 12 | import detectChanges from './diff/detectChanges'; 13 | import reporters from './diff/reporter-strategies/index'; 14 | 15 | // checks for this command 16 | const CHECKS = DIFF_CHECKS.CHECKS; 17 | 18 | // named exports 19 | export const command = 'diff [files..]'; 20 | export const description = 21 | 'Compare at least two i18n files & generate a report'; 22 | 23 | // Builder for yargs 24 | export class CommonDiffYargsBuilder extends CommandBuilder { 25 | addFilenameOption() { 26 | this.y = this.y 27 | .option('filename', { 28 | type: 'string', 29 | alias: 'of', 30 | describe: 31 | 'Name of the output file generated by this CLI (without extension)', 32 | }) 33 | // default value for filename 34 | .default('filename', function () { 35 | const date = new Date(); 36 | const timestamp = `${date.getDate()}-${ 37 | date.getMonth() + 1 38 | }-${date.getFullYear()} ${date.getHours()}h${date.getMinutes()}m${date.getSeconds()}`; 39 | return `diff_report_${timestamp}`; 40 | }); 41 | return this; 42 | } 43 | 44 | addOutputFormatOption() { 45 | this.y = this.y.option('outputFormat', { 46 | describe: 'Output format', 47 | choices: ['JSON'], 48 | default: 'JSON', 49 | }); 50 | return this; 51 | } 52 | 53 | addFilesOption() { 54 | this.y = this.y 55 | // save provided paths into a backup key 56 | .middleware(backupPaths('files', 'paths'), true) 57 | // coerce varidic path(s) into Object(s) 58 | .middleware(parsePathsToJSON('files'), true); 59 | return this; 60 | } 61 | 62 | addOperationsOption() { 63 | this.y = this.y.option('operations', { 64 | type: 'array', 65 | describe: 66 | 'Array of operations (such as ["ADD", "PUT"]) that should be checked when comparing files', 67 | default: Object.keys(ChangesOps), 68 | }); 69 | return this; 70 | } 71 | } 72 | 73 | export const builder = function (y: Argv) { 74 | return ( 75 | new CommonDiffYargsBuilder(y) 76 | .addFilenameOption() 77 | .addOutputDirOption() 78 | .addKeySeparatorOption() 79 | .addOutputFormatOption() 80 | .addOperationsOption() 81 | .addFilesOption() 82 | .addSettingConfig() 83 | .build() 84 | // validations 85 | .check(resolveChecksInOrder(CHECKS)) 86 | ); 87 | }; 88 | 89 | export const handler = async function (argv: any) { 90 | try { 91 | const changes = detectChanges(argv as CommonDiffArguments); 92 | console.log(`Preparing the report file ...`); 93 | await reporters({ 94 | yargs: argv as CommonDiffArguments, 95 | changes: changes, 96 | }); 97 | console.log('Successfully wrote the report file'); 98 | return Promise.resolve(undefined); 99 | } catch (/* istanbul ignore next */ err) { 100 | /* istanbul ignore next */ 101 | return Promise.reject(err); 102 | } 103 | }; 104 | 105 | // default export 106 | export default { 107 | command: command, 108 | description: description, 109 | builder: builder, 110 | handler: handler, 111 | }; 112 | -------------------------------------------------------------------------------- /src/cmds/diff/detectChanges.ts: -------------------------------------------------------------------------------- 1 | import { CommonDiffArguments, ChangesOps } from '../../types/diffTypes'; 2 | import type { 3 | ChangeOperations, 4 | CommonChangeOperation, 5 | AddOperation, 6 | DelOperation, 7 | PutOperation, 8 | } from '../../types/diffTypes'; 9 | 10 | // Own methods 11 | import getLeavesPathes from '../../commons/getLeavesPathes'; 12 | import get from '../../commons/enhancedGet'; 13 | 14 | // lodash method 15 | import intersection from 'lodash/intersection'; 16 | import isEqual from 'lodash/isEqual'; 17 | import difference from 'lodash/difference'; 18 | 19 | type fileParam = { 20 | keys: string[]; 21 | file: string; 22 | obj: any; 23 | }; 24 | // create an "ChangeOperation" 25 | function createChangeOperation( 26 | technicalKey: string, 27 | op: ChangesOps, 28 | file1: fileParam, 29 | file2: fileParam, 30 | keySeparator: string | false 31 | ): ChangeOperations { 32 | // common part 33 | let obj: CommonChangeOperation = { 34 | key: technicalKey, 35 | type: op, 36 | from: file1.file, 37 | to: file2.file, 38 | }; 39 | // specific parts 40 | if ([ChangesOps.DEL, ChangesOps.PUT].some((o) => o === op)) { 41 | (obj as DelOperation | PutOperation).oldValue = get( 42 | file1.obj, 43 | technicalKey, 44 | keySeparator 45 | ); 46 | } 47 | if ([ChangesOps.ADD, ChangesOps.PUT].some((o) => o === op)) { 48 | (obj as AddOperation | PutOperation).newValue = get( 49 | file2.obj, 50 | technicalKey, 51 | keySeparator 52 | ); 53 | } 54 | // return result 55 | return obj as ChangeOperations; 56 | } 57 | 58 | // computes changes 59 | export default function detectChanges( 60 | argv: CommonDiffArguments 61 | ): ChangeOperations[] { 62 | let result: ChangeOperations[] = []; 63 | let keySeparator: string | false = argv.keySeparator; 64 | let operations: string[] = argv.operations; 65 | 66 | // Fetch keys 67 | let files: fileParam[] = argv.files.map((file, idx) => ({ 68 | // like done in backupPaths function 69 | file: `file${idx + 1}`, 70 | // get leaves paths for provided file 71 | keys: getLeavesPathes(file), 72 | // object in order to access properties 73 | obj: file, 74 | })); 75 | 76 | // Computes pairs for comparisons 77 | // Given an array with [file1, file2, file3] , it would mean two pairs [file1, file2] & [file2, file3] 78 | let comparaison_pairs = Array.from( 79 | { length: files.length - 1 }, 80 | (_, idx) => idx 81 | ).map((idx) => [files[idx], files[idx + 1]]); 82 | 83 | // Made comparisons 84 | for (let [file1, file2] of comparaison_pairs) { 85 | // Computes changes of values 86 | /* istanbul ignore else - No need to check all combinaisons to see that user choice is respected */ 87 | if (operations.includes('PUT')) { 88 | let sameKeys = intersection(file1.keys, file2.keys); 89 | let modifiedKeys = sameKeys.filter( 90 | (key) => 91 | !isEqual( 92 | get(file1.obj, key, keySeparator), 93 | get(file2.obj, key, keySeparator) 94 | ) 95 | ); 96 | 97 | result.push( 98 | ...modifiedKeys.map((key) => 99 | createChangeOperation(key, ChangesOps.PUT, file1, file2, keySeparator) 100 | ) 101 | ); 102 | } 103 | 104 | // Computes deleted keys 105 | /* istanbul ignore else - No need to check all combinaisons to see that user choice is respected */ 106 | if (operations.includes('DEL')) { 107 | result.push( 108 | ...difference(file1.keys, file2.keys).map((key) => 109 | createChangeOperation(key, ChangesOps.DEL, file1, file2, keySeparator) 110 | ) 111 | ); 112 | } 113 | 114 | // Computes new keys 115 | /* istanbul ignore else - No need to check all combinaisons to see that user choice is respected */ 116 | if (operations.includes('ADD')) { 117 | result.push( 118 | ...difference(file2.keys, file1.keys).map((key) => 119 | createChangeOperation(key, ChangesOps.ADD, file1, file2, keySeparator) 120 | ) 121 | ); 122 | } 123 | } 124 | 125 | return result; 126 | } 127 | -------------------------------------------------------------------------------- /src/cmds/diff/reporter-strategies/index.ts: -------------------------------------------------------------------------------- 1 | import { DiffExportParameters } from '../../../types/diffTypes'; 2 | import toJSON from './toJSON'; 3 | 4 | export default async function (params: DiffExportParameters) { 5 | switch (params.yargs.outputFormat) { 6 | case 'JSON': 7 | return toJSON(params); 8 | /* istanbul ignore next */ 9 | default: 10 | return Promise.reject('Strategy not implemented'); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/cmds/diff/reporter-strategies/toJSON.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { resolve as pathResolve } from 'path'; 3 | import { DiffExportParameters } from '../../../types/diffTypes'; 4 | 5 | export default async function (params: DiffExportParameters) { 6 | let filename = pathResolve( 7 | params.yargs.outputDir, 8 | `${params.yargs.filename}.json` 9 | ); 10 | // KISS : just return the changes array 11 | return await fs.promises.writeFile( 12 | filename, 13 | JSON.stringify({ 14 | files: params.yargs.paths, 15 | changes: params.changes, 16 | }) 17 | ); 18 | } 19 | -------------------------------------------------------------------------------- /src/cmds/export.ts: -------------------------------------------------------------------------------- 1 | // export command 2 | import export_xlsx from './export_cmds/export_xlsx'; 3 | import export_csv from './export_cmds/export_csv'; 4 | 5 | // named exports 6 | export const command = 'export '; 7 | export const description = 'Export i18n files into something else'; 8 | 9 | export const builder = function (y: any) { 10 | return ( 11 | y 12 | // commandDir doesn't work very well in Typescript 13 | .command(export_xlsx) 14 | .command(export_csv) 15 | ); 16 | }; 17 | /* istanbul ignore next */ 18 | export const handler = function (_: any) {}; 19 | 20 | // default export 21 | export default { 22 | command: command, 23 | description: description, 24 | builder: builder, 25 | handler: handler, 26 | }; 27 | -------------------------------------------------------------------------------- /src/cmds/export_cmds/export_commons.ts: -------------------------------------------------------------------------------- 1 | import fs, { PathLike } from 'fs'; 2 | 3 | // lodash methodes 4 | import groupBy from 'lodash/groupBy'; 5 | import flattenDeep from 'lodash/flattenDeep'; 6 | 7 | // "Enhanced get" 8 | import get from '../../commons/enhancedGet'; 9 | 10 | // For typings 11 | import { 12 | CommonExportArguments, 13 | I18N_Merged_Data, 14 | } from '../../types/exportTypes'; 15 | 16 | // middelware 17 | import { 18 | parsePathToJSON, 19 | parsePathToFunction, 20 | } from '../../middlewares/middlewares'; 21 | import getLeavesPathes from '../../commons/getLeavesPathes'; 22 | import CommandBuilder from '../../commons/commandBuilder'; 23 | type I18N_Object = { [x: string]: string | Array | I18N_Object }; 24 | type I18N_Result = { 25 | technical_key: string; 26 | label: string; 27 | locale: string; 28 | }[]; 29 | 30 | // Builder for yargs 31 | export class CommonExportYargsBuilder extends CommandBuilder { 32 | addFilesOption() { 33 | this.y = this.y 34 | .option('files', { 35 | describe: 36 | 'Absolute path to a JSON object that have as key an unique identifier and value the absolute path to a i18n file, such as : { "FR": "/somePath/fr.json", "NL": "/somePath/nl.json"}', 37 | demandOption: true, 38 | }) 39 | // coerce files into Object 40 | .middleware(parsePathToJSON('files'), true); 41 | return this; 42 | } 43 | 44 | addFilenameOption() { 45 | this.y = this.y 46 | .option('filename', { 47 | type: 'string', 48 | alias: 'of', 49 | describe: 50 | 'Name of the output file generated by this CLI (without extension)', 51 | }) 52 | // default value for filename 53 | .default('filename', function () { 54 | const date = new Date(); 55 | const timestamp = `${date.getDate()}-${ 56 | date.getMonth() + 1 57 | }-${date.getFullYear()} ${date.getHours()}h${date.getMinutes()}m${date.getSeconds()}`; 58 | return `translations_${timestamp}`; 59 | }); 60 | return this; 61 | } 62 | 63 | addResultsFilterOption() { 64 | this.y = this.y 65 | .option('resultsFilter', { 66 | description: 67 | 'Absolute path to a JS module to filter rows that will be exported. This js file exports a default function with the following signature : (x: I18N_Merged_Data) => I18N_Merged_Data)', 68 | }) 69 | // coerce resultsFilter into function 70 | .middleware(parsePathToFunction('resultsFilter'), true); 71 | return this; 72 | } 73 | } 74 | 75 | // turns n i18n file(s) into a merged version 76 | export function merge_i18n_files( 77 | argv: CommonExportArguments 78 | ): Promise { 79 | return new Promise((resolve, reject) => { 80 | Promise 81 | // Read files and convert them to useful obj 82 | .all( 83 | Object.entries(argv.files).map((entry) => 84 | readFile(entry, argv.keySeparator) 85 | ) 86 | ) 87 | // merge results 88 | .then((results) => mergeResults(results)) 89 | .then((data) => resolve(data)) 90 | .catch(/* istanbul ignore next */ (err) => reject(err)); 91 | }); 92 | } 93 | 94 | // merge_i18n_files sub functions 95 | 96 | // read file and turning into a useful array of objects 97 | function readFile( 98 | [locale, file_path]: [string, PathLike], 99 | keySeparator: string | false 100 | ): Promise { 101 | return new Promise((resolve, reject) => { 102 | fs.promises 103 | .readFile(file_path, 'utf8') 104 | .then((jsonData) => Promise.resolve(JSON.parse(jsonData))) 105 | .then((json) => i18n_to_result_format(json, locale, keySeparator)) 106 | .then((result) => resolve(result)) 107 | .catch(/* istanbul ignore next */ (err) => reject(err)); 108 | }); 109 | } 110 | 111 | // turns i18n object to usable format 112 | function i18n_to_result_format( 113 | obj: I18N_Object, 114 | locale: string, 115 | keySeparator: string | false 116 | ): I18N_Result { 117 | let leafPaths = getLeavesPathes(obj, keySeparator); 118 | return leafPaths.map((leafPath) => ({ 119 | locale: locale, 120 | technical_key: leafPath, 121 | label: get(obj, leafPath, keySeparator) as string, 122 | })); 123 | } 124 | 125 | // merge array of {"technical_key": "...", "label": "...", "locale": "..."} 126 | // into {"technical_key": ..., "labels": { "FR": ..., "NL": ..., "DE": ... }} 127 | function mergeResults(results: I18N_Result[]): Promise { 128 | const flattenResults = flattenDeep(results); 129 | let groupBy_technical_key = groupBy(flattenResults, 'technical_key'); 130 | 131 | let final_result = Object.keys(groupBy_technical_key) 132 | .sort() 133 | .map((key) => { 134 | return { 135 | technical_key: key, 136 | labels: groupBy_technical_key[key].reduce((prev: any, curr: any) => { 137 | prev[curr['locale']] = curr['label']; 138 | return prev; 139 | }, {}), 140 | }; 141 | }); 142 | return Promise.resolve(final_result); 143 | } 144 | -------------------------------------------------------------------------------- /src/cmds/export_cmds/export_csv.ts: -------------------------------------------------------------------------------- 1 | // for fs ops 2 | import { resolve as pathResolve } from 'path'; 3 | import { Workbook } from 'exceljs'; 4 | 5 | // common fct 6 | import { merge_i18n_files, CommonExportYargsBuilder } from './export_commons'; 7 | import { parsePathToJSON } from '../../middlewares/middlewares'; 8 | 9 | // checks import 10 | import { resolveChecksInOrder, EXPORT_CHECKS } from '../../checks/index'; 11 | 12 | // For typing 13 | // eslint-disable-next-line 14 | import { Argv } from 'yargs'; 15 | import { CSVExportArguments, I18N_Merged_Data } from '../../types/exportTypes'; 16 | 17 | // checks for this command 18 | const CHECKS = [...EXPORT_CHECKS.CHECKS, ...EXPORT_CHECKS.CSV.CHECKS]; 19 | 20 | // named exports 21 | export const command = 'to_csv'; 22 | export const description = 'Export i18n files into a csv file'; 23 | 24 | // Builder for yargs 25 | export class CsvExportYargsBuilder extends CommonExportYargsBuilder { 26 | addColumnsOption() { 27 | this.y = this.y 28 | .option('columns', { 29 | description: 30 | 'Absolute path to a JSON array of objects, to control the columns. Example : [{ "locale": "FR", "label": "French translation" }]', 31 | demandOption: true, 32 | }) 33 | // coerce columns into Object 34 | .middleware(parsePathToJSON('columns'), true); 35 | return this; 36 | } 37 | 38 | addDelimiterOption() { 39 | this.y = this.y.option('delimiter', { 40 | description: 'Specify an field delimiter such as | or \\t', 41 | choices: [',', ';', '\t', ' ', '|'], 42 | default: ';', 43 | }); 44 | return this; 45 | } 46 | 47 | addRowDelimiterOption() { 48 | this.y = this.y.option('rowDelimiter', { 49 | description: 'Specify an alternate row delimiter (i.e \\r\\n)', 50 | type: 'string', 51 | default: '\n', 52 | }); 53 | return this; 54 | } 55 | 56 | addQuoteOption() { 57 | this.y = this.y.option('quote', { 58 | description: 'String to quote fields that contain a delimiter', 59 | type: 'string', 60 | default: '"', 61 | }); 62 | return this; 63 | } 64 | 65 | addEscapeOption() { 66 | this.y = this.y.option('escape', { 67 | description: 68 | 'The character to use when escaping a value that is quoted and contains a quote character that is not the end of the field', 69 | type: 'string', 70 | default: '"', 71 | }); 72 | return this; 73 | } 74 | 75 | addWriteBOMOption() { 76 | this.y = this.y.option('writeBOM', { 77 | description: 78 | 'Set to true if you want the first character written to the stream to be a utf-8 BOM character.', 79 | type: 'boolean', 80 | default: false, 81 | }); 82 | return this; 83 | } 84 | 85 | addQuoteHeadersOption() { 86 | this.y = this.y.option('quoteHeaders', { 87 | description: 'If true then all headers will be quoted', 88 | type: 'boolean', 89 | default: true, 90 | }); 91 | return this; 92 | } 93 | } 94 | 95 | export const builder = function (y: Argv) { 96 | return ( 97 | new CsvExportYargsBuilder(y) 98 | .addFilesOption() 99 | .addFilenameOption() 100 | .addOutputDirOption() 101 | .addSettingConfig() 102 | .addKeySeparatorOption() 103 | .addColumnsOption() 104 | .addDelimiterOption() 105 | .addRowDelimiterOption() 106 | .addQuoteOption() 107 | .addEscapeOption() 108 | .addWriteBOMOption() 109 | .addQuoteHeadersOption() 110 | .addResultsFilterOption() 111 | .build() 112 | // validations 113 | .check(resolveChecksInOrder(CHECKS)) 114 | ); 115 | }; 116 | 117 | export const handler = async function (argv: CSVExportArguments) { 118 | try { 119 | let data: I18N_Merged_Data = await merge_i18n_files(argv); 120 | const CSV_FILE = pathResolve(argv.outputDir, argv.filename + '.csv'); 121 | if (argv.resultsFilter) { 122 | data = (argv.resultsFilter as (x: I18N_Merged_Data) => I18N_Merged_Data)( 123 | data 124 | ); 125 | } 126 | await export_as_csv(CSV_FILE, argv, data); 127 | console.log(`${CSV_FILE} successfully written`); 128 | return Promise.resolve(undefined); 129 | } catch (/* istanbul ignore next */ err) { 130 | return Promise.reject(err); 131 | } 132 | }; 133 | 134 | // write 135 | async function export_as_csv( 136 | CSV_FILE: string, 137 | argv: CSVExportArguments, 138 | data: I18N_Merged_Data 139 | ) { 140 | console.log('Preparing CSV file ...'); 141 | 142 | // prepare data 143 | const workbook = new Workbook(); 144 | let worksheet = workbook.addWorksheet(); 145 | 146 | // Set up columns 147 | worksheet.columns = [ 148 | { header: 'Technical Key', key: 'technical_key' }, 149 | ].concat( 150 | argv.columns.map(({ label, locale }) => ({ 151 | header: label, 152 | key: `labels.${locale}`, 153 | })) 154 | ); 155 | 156 | // workaround as Exceljs doesn't support nested key 157 | worksheet.addRows( 158 | data.map((item) => 159 | argv.columns.reduce( 160 | (acc: { [x: string]: string }, { locale }) => { 161 | acc[`labels.${locale}`] = item['labels'][locale] || ''; 162 | return acc; 163 | }, 164 | { technical_key: item['technical_key'] } 165 | ) 166 | ) 167 | ); 168 | 169 | // finally write this file 170 | const options = { 171 | // https://c2fo.io/fast-csv/docs/formatting/options 172 | formatterOptions: { 173 | delimiter: argv.delimiter, 174 | rowDelimiter: argv.rowDelimiter, 175 | quote: argv.quote, 176 | escape: argv.escape, 177 | writeBOM: argv.writeBOM, 178 | quoteHeaders: argv.quoteHeaders, 179 | }, 180 | }; 181 | return workbook.csv.writeFile(CSV_FILE, options); 182 | } 183 | 184 | // default export 185 | export default { 186 | command: command, 187 | description: description, 188 | builder: builder, 189 | handler: handler, 190 | }; 191 | -------------------------------------------------------------------------------- /src/cmds/export_cmds/export_xlsx.ts: -------------------------------------------------------------------------------- 1 | // for fs ops 2 | import { resolve as pathResolve } from 'path'; 3 | import { Workbook, Worksheet } from 'exceljs'; 4 | 5 | // common fct 6 | import { merge_i18n_files, CommonExportYargsBuilder } from './export_commons'; 7 | import { 8 | parsePathToJSON, 9 | parsePathToFunction, 10 | } from '../../middlewares/middlewares'; 11 | 12 | // checks import 13 | import { resolveChecksInOrder, EXPORT_CHECKS } from '../../checks/index'; 14 | 15 | // For typing 16 | // eslint-disable-next-line 17 | import type { Argv } from 'yargs'; 18 | import { XLSXExportArguments, I18N_Merged_Data } from '../../types/exportTypes'; 19 | 20 | // checks for this command 21 | const CHECKS = [...EXPORT_CHECKS.CHECKS, ...EXPORT_CHECKS.XLSX.CHECKS]; 22 | 23 | // named exports 24 | export const command = 'to_xlsx'; 25 | export const description = 26 | 'Export i18n files into a xlsx file, created by exceljs'; 27 | 28 | // Builder for yargs 29 | export class XlsxExportYargsBuilder extends CommonExportYargsBuilder { 30 | addColumnsOption() { 31 | this.y = this.y 32 | .option('columns', { 33 | description: 34 | 'Absolute path to a JSON array of objects, to control the columns. Example : [{ "locale": "FR", "label": "French translation" }]', 35 | demandOption: true, 36 | }) 37 | // coerce columns into Object 38 | .middleware(parsePathToJSON('columns'), true); 39 | return this; 40 | } 41 | 42 | addWorksheetCustomizerOption() { 43 | this.y = this.y 44 | .option('worksheetCustomizer', { 45 | description: 46 | 'Absolute path to a JS module to customize the generated xlsx, thanks to exceljs. This js file exports a default async function with the following signature : (worksheet : Excel.Worksheet) => Promise', 47 | }) 48 | // coerce worksheetCustomizer into function 49 | .middleware(parsePathToFunction('worksheetCustomizer'), true); 50 | return this; 51 | } 52 | 53 | addWorksheetNameOption() { 54 | this.y = this.y.option('worksheetName', { 55 | type: 'string', 56 | description: 'Name of the worksheet', 57 | default: 'Translations', 58 | }); 59 | return this; 60 | } 61 | } 62 | 63 | export const builder = function (y: Argv) { 64 | return ( 65 | new XlsxExportYargsBuilder(y) 66 | .addFilesOption() 67 | .addFilenameOption() 68 | .addOutputDirOption() 69 | .addSettingConfig() 70 | .addKeySeparatorOption() 71 | .addColumnsOption() 72 | .addWorksheetCustomizerOption() 73 | .addWorksheetNameOption() 74 | .addResultsFilterOption() 75 | .build() 76 | // validations 77 | .check(resolveChecksInOrder(CHECKS)) 78 | ); 79 | }; 80 | 81 | export const handler = async function (argv: XLSXExportArguments) { 82 | try { 83 | let data: I18N_Merged_Data = await merge_i18n_files(argv); 84 | const XLSX_FILE = pathResolve(argv.outputDir, argv.filename + '.xlsx'); 85 | if (argv.resultsFilter) { 86 | data = (argv.resultsFilter as (x: I18N_Merged_Data) => I18N_Merged_Data)( 87 | data 88 | ); 89 | } 90 | await export_as_excel(XLSX_FILE, argv, data); 91 | console.log(`${XLSX_FILE} successfully written`); 92 | return Promise.resolve(undefined); 93 | } catch (/* istanbul ignore next */ err) { 94 | return Promise.reject(err); 95 | } 96 | }; 97 | 98 | // write 99 | async function export_as_excel( 100 | XLSX_FILE: string, 101 | argv: XLSXExportArguments, 102 | data: I18N_Merged_Data 103 | ) { 104 | console.log('Preparing XLSX file ...'); 105 | 106 | // prepare data 107 | const workbook = new Workbook(); 108 | let worksheet = workbook.addWorksheet(argv.worksheetName); 109 | 110 | // Set up columns 111 | worksheet.columns = [ 112 | { header: 'Technical Key', key: 'technical_key' }, 113 | ].concat( 114 | argv.columns.map(({ label, locale }) => ({ 115 | header: label, 116 | key: `labels.${locale}`, 117 | })) 118 | ); 119 | 120 | // workaround as Exceljs doesn't support nested key 121 | worksheet.addRows( 122 | data.map((item) => 123 | argv.columns.reduce( 124 | (acc: { [x: string]: string }, { locale }) => { 125 | acc[`labels.${locale}`] = item['labels'][locale] || ''; 126 | return acc; 127 | }, 128 | { technical_key: item['technical_key'] } 129 | ) 130 | ) 131 | ); 132 | 133 | // If worksheetCustomizer was set, give user total control on worksheet output 134 | if (argv.worksheetCustomizer) { 135 | console.log('Applying worksheetCustomizer ...'); 136 | worksheet = await ( 137 | argv.worksheetCustomizer as (x: Worksheet) => Promise 138 | )(worksheet); 139 | } 140 | 141 | // finally write this file 142 | return workbook.xlsx.writeFile(XLSX_FILE); 143 | } 144 | 145 | // default export 146 | export default { 147 | command: command, 148 | description: description, 149 | builder: builder, 150 | handler: handler, 151 | }; 152 | -------------------------------------------------------------------------------- /src/cmds/import.ts: -------------------------------------------------------------------------------- 1 | // import command 2 | import import_xlsx from './import_cmds/import_xlsx'; 3 | import import_csv from './import_cmds/import_csv'; 4 | 5 | // named exports 6 | export const command = 'import '; 7 | export const description = 'Turn a file to i18n file(s)'; 8 | 9 | export const builder = function (y: any) { 10 | return ( 11 | y 12 | // commandDir doesn't work very well in Typescript 13 | .command(import_xlsx) 14 | .command(import_csv) 15 | ); 16 | }; 17 | 18 | /* istanbul ignore next */ 19 | export const handler = function (_: any) {}; 20 | 21 | // default export 22 | export default { 23 | command: command, 24 | description: description, 25 | builder: builder, 26 | handler: handler, 27 | }; 28 | -------------------------------------------------------------------------------- /src/cmds/import_cmds/import_commons.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { resolve as pathResolve } from 'path'; 3 | 4 | // lodash methodes 5 | import groupBy from 'lodash/groupBy'; 6 | 7 | // Own method 8 | import set from '../../commons/enhancedSet'; 9 | 10 | // For typings 11 | import { 12 | CommonImportArguments, 13 | extractedTranslation, 14 | } from '../../types/importTypes'; 15 | import CommandBuilder from '../../commons/commandBuilder'; 16 | 17 | // Builder for yargs 18 | export class CommonImporttYargsBuilder extends CommandBuilder { 19 | addInputOption() { 20 | this.y = this.y.options('input', { 21 | type: 'string', 22 | describe: 23 | 'Absolute path to a file that will be used as source to generate i18n file(s)', 24 | demandOption: true, 25 | }); 26 | return this; 27 | } 28 | 29 | addLocalesOption() { 30 | this.y = this.y.options('locales', { 31 | type: 'array', 32 | describe: 33 | 'Array of locales (such as ["FR", "NL"]) that will be used to generate i18n file(s)', 34 | demandOption: true, 35 | }); 36 | return this; 37 | } 38 | 39 | addSuffixOption() { 40 | this.y = this.y 41 | .option('suffix', { 42 | type: 'string', 43 | describe: 44 | "Suffix to append in the output filename(s) to distinguish executions of this script. Use an empty string if you don't want this behavior", 45 | }) 46 | .default('suffix', function () { 47 | const date = new Date(); 48 | const timestamp = `${date.getDate()}-${ 49 | date.getMonth() + 1 50 | }-${date.getFullYear()} ${date.getHours()}h${date.getMinutes()}m${date.getSeconds()}`; 51 | return `_${timestamp}`; 52 | }); 53 | return this; 54 | } 55 | } 56 | 57 | // generate filepaths for locales 58 | export function generate_i18n_filepaths(argv: CommonImportArguments) { 59 | return argv.locales.reduce((acc: { [x: string]: string }, locale: string) => { 60 | acc[locale] = pathResolve( 61 | argv.outputDir, 62 | `${locale.toLowerCase()}${argv.suffix}.json` 63 | ); 64 | return acc; 65 | }, {}); 66 | } 67 | 68 | // extractedTranslation[] to i18n file(s) 69 | export function extractedTranslations_to_i18n_files( 70 | files: { [x: string]: string }, 71 | translations: extractedTranslation[], 72 | keySeparator: string | false 73 | ) { 74 | let groupBy_locales = groupBy(translations, 'locale'); 75 | return Promise.all( 76 | Object.entries(groupBy_locales).map(([locale, translations]) => 77 | write_new_i18n_file( 78 | locale, 79 | files[locale], 80 | translations_2_i18n_object(translations, keySeparator) 81 | ) 82 | ) 83 | ); 84 | } 85 | 86 | // export result for a given language into the given file 87 | function write_new_i18n_file( 88 | locale: string, 89 | filepath: string, 90 | json: { [x: string]: any } 91 | ) { 92 | console.log(`\t Trying to write ${locale} i18n file at ${filepath}`); 93 | return new Promise((resolve, reject) => { 94 | fs.promises 95 | .writeFile(filepath, JSON.stringify(json, null, 4)) 96 | .then((_) => { 97 | console.log(`\t Successfully wrote ${locale} i18n file`); 98 | resolve(undefined); 99 | }) 100 | .catch((err) => reject(err)); 101 | }); 102 | } 103 | 104 | // Turns array for a given lang into a i18n js object 105 | function translations_2_i18n_object( 106 | translations: extractedTranslation[], 107 | keySeparator: string | false 108 | ) { 109 | let result = {}; 110 | translations.forEach((item) => { 111 | set(result, item['technical_key'], item['label'], keySeparator); 112 | }); 113 | return result; 114 | } 115 | -------------------------------------------------------------------------------- /src/cmds/import_cmds/import_csv.ts: -------------------------------------------------------------------------------- 1 | import { Workbook } from 'exceljs'; 2 | 3 | // common fct 4 | import { 5 | CommonImporttYargsBuilder, 6 | generate_i18n_filepaths, 7 | extractedTranslations_to_i18n_files, 8 | } from './import_commons'; 9 | import { parsePathToJSON } from '../../middlewares/middlewares'; 10 | 11 | // lodash methods 12 | import flattenDeep from 'lodash/flattenDeep'; 13 | 14 | // checks import 15 | import { resolveChecksInOrder, IMPORT_CHECKS } from '../../checks/index'; 16 | 17 | // For typing 18 | // eslint-disable-next-line 19 | import type { Argv } from 'yargs'; 20 | import { CSVImportArguments } from '../../types/importTypes'; 21 | 22 | // checks for this command 23 | const CHECKS = [...IMPORT_CHECKS.CHECKS, ...IMPORT_CHECKS.CSV.CHECKS]; 24 | 25 | // named exports 26 | export const command = 'from_csv'; 27 | export const description = 'Turn a csv file to i18n file(s)'; 28 | 29 | export class CsvImportYargsBuilder extends CommonImporttYargsBuilder { 30 | addColumnsOption() { 31 | this.y = this.y 32 | .options('columns', { 33 | describe: 34 | 'Absolute path to a JSON object that describe headers of the excel columns used to store translations', 35 | demandOption: true, 36 | }) 37 | // coerce columns into Object 38 | .middleware(parsePathToJSON('columns'), true); 39 | return this; 40 | } 41 | 42 | addDelimiterOption() { 43 | this.y = this.y.option('delimiter', { 44 | description: 'Specify an field delimiter such as | or \\t', 45 | choices: [',', ';', '\t', ' ', '|'], 46 | default: ';', 47 | }); 48 | return this; 49 | } 50 | 51 | addQuoteOption() { 52 | this.y = this.y.option('quote', { 53 | description: 'String used to quote fields that contain a delimiter', 54 | type: 'string', 55 | default: '"', 56 | }); 57 | return this; 58 | } 59 | 60 | addEscapeOption() { 61 | this.y = this.y.option('escape', { 62 | description: 63 | 'The character used when escaping a value that is quoted and contains a quote character that is not the end of the field', 64 | type: 'string', 65 | default: '"', 66 | }); 67 | return this; 68 | } 69 | 70 | addEncodingOption() { 71 | this.y = this.y.option('encoding', { 72 | description: 'Input file encoding', 73 | choices: ['utf8', 'utf16le', 'latin1'], 74 | default: 'utf8', 75 | }); 76 | return this; 77 | } 78 | } 79 | 80 | export const builder = function (y: Argv) { 81 | return ( 82 | new CsvImportYargsBuilder(y) 83 | .addInputOption() 84 | .addLocalesOption() 85 | .addOutputDirOption(true) 86 | .addKeySeparatorOption() 87 | .addSuffixOption() 88 | .addColumnsOption() 89 | .addDelimiterOption() 90 | .addQuoteOption() 91 | .addEscapeOption() 92 | .addEncodingOption() 93 | .addSettingConfig() 94 | .build() 95 | // validations 96 | .check(resolveChecksInOrder(CHECKS)) 97 | ); 98 | }; 99 | 100 | export const handler = async function (argv: CSVImportArguments) { 101 | try { 102 | const translations = await csv_2_translation_objects(argv); 103 | const files = generate_i18n_filepaths(argv); 104 | await extractedTranslations_to_i18n_files( 105 | files, 106 | translations, 107 | argv.keySeparator 108 | ); 109 | console.log('Successfully exported found locale(s) to i18n json file(s)'); 110 | return Promise.resolve(undefined); 111 | } catch (error) { 112 | return Promise.reject(error); 113 | } 114 | }; 115 | 116 | // Extract translations from csv file 117 | async function csv_2_translation_objects(argv: CSVImportArguments) { 118 | const options = { 119 | // https://c2fo.io/fast-csv/docs/parsing/options 120 | parserOptions: { 121 | delimiter: argv.delimiter, 122 | quote: argv.quote, 123 | escape: argv.escape, 124 | encoding: argv.encoding, 125 | }, 126 | }; 127 | const workbook = new Workbook(); 128 | const worksheet = await workbook.csv.readFile(argv.input, options); 129 | let rowCount = worksheet.rowCount; 130 | 131 | // columns properties to load 132 | let columns = argv.columns; 133 | 134 | // retrieve the headers of the table 135 | // Warning : Exceljs put for some reason a undefined value at the 0 index 136 | let headers = worksheet.getRow(1).values as (undefined | string)[]; 137 | // retrieve data of the table 138 | let data = ( 139 | worksheet.getRows(2, rowCount - 1) || /* istanbul ignore next */ [] 140 | ).map((item) => item.values); 141 | 142 | // find out where the technical key is 143 | const technical_key_index = headers.findIndex((h) => 144 | (h || '').includes(columns.technical_key) 145 | ); 146 | 147 | if (technical_key_index === -1) { 148 | return Promise.reject( 149 | new Error("Couldn't find index for technical_key with provided label") 150 | ); 151 | } 152 | 153 | // find out where the translations are positioned in the value 154 | const locales_index = Object.entries(columns.locales) 155 | .map(([key, value]) => ({ 156 | [key]: headers.findIndex((h) => (h || '').includes(value)), 157 | })) 158 | .reduce((prev, curr) => Object.assign(prev, curr), {}); 159 | 160 | // Warn users if some locale translations couldn't be found 161 | let missing_indexes = Object.entries(locales_index).filter( 162 | ([_, idx]) => idx === -1 163 | ); 164 | 165 | for (let [locale] of missing_indexes) { 166 | /* istanbul ignore next Not worthy to create a test case for that*/ 167 | console.warn( 168 | `Couldn't find index for ${locale} locale with provided label` 169 | ); 170 | } 171 | 172 | // build results 173 | let results = data.map((row: any) => 174 | Object.entries(locales_index) 175 | // skip translation(s) where index couldn't be found 176 | .filter(([_, idx]) => idx !== -1) 177 | .map(([locale, localeIndex]) => ({ 178 | technical_key: row[technical_key_index], 179 | label: row[localeIndex], 180 | locale: locale, 181 | })) 182 | ); 183 | return Promise.resolve(flattenDeep(results)); 184 | } 185 | 186 | // default export 187 | export default { 188 | command: command, 189 | description: description, 190 | builder: builder, 191 | handler: handler, 192 | }; 193 | -------------------------------------------------------------------------------- /src/cmds/import_cmds/import_xlsx.ts: -------------------------------------------------------------------------------- 1 | import { Workbook } from 'exceljs'; 2 | 3 | // common fct 4 | import { 5 | CommonImporttYargsBuilder, 6 | generate_i18n_filepaths, 7 | extractedTranslations_to_i18n_files, 8 | } from './import_commons'; 9 | import { parsePathToJSON } from '../../middlewares/middlewares'; 10 | 11 | // lodash methods 12 | import flattenDeep from 'lodash/flattenDeep'; 13 | 14 | // checks import 15 | import { resolveChecksInOrder, IMPORT_CHECKS } from '../../checks/index'; 16 | 17 | // For typing 18 | // eslint-disable-next-line 19 | import type { Argv } from 'yargs'; 20 | import { XLSXImportArguments } from '../../types/importTypes'; 21 | 22 | // checks for this command 23 | const CHECKS = [...IMPORT_CHECKS.CHECKS, ...IMPORT_CHECKS.XLSX.CHECKS]; 24 | 25 | // named exports 26 | export const command = 'from_xlsx'; 27 | export const description = 'Turn a xlsx file to i18n file(s)'; 28 | 29 | export class XlsxImportYargsBuilder extends CommonImporttYargsBuilder { 30 | addColumnsOption() { 31 | this.y = this.y 32 | .options('columns', { 33 | describe: 34 | 'Absolute path to a JSON object that describe headers of the excel columns used to store translations', 35 | demandOption: true, 36 | }) 37 | // coerce columns into Object 38 | .middleware(parsePathToJSON('columns'), true); 39 | return this; 40 | } 41 | } 42 | 43 | export const builder = function (y: Argv) { 44 | return ( 45 | new XlsxImportYargsBuilder(y) 46 | .addInputOption() 47 | .addLocalesOption() 48 | .addOutputDirOption(true) 49 | .addKeySeparatorOption() 50 | .addSuffixOption() 51 | .addColumnsOption() 52 | .addSettingConfig() 53 | .build() 54 | // validations 55 | .check(resolveChecksInOrder(CHECKS)) 56 | ); 57 | }; 58 | 59 | export const handler = async function (argv: XLSXImportArguments) { 60 | try { 61 | const translations = await xlsx_2_translation_objects(argv); 62 | const files = generate_i18n_filepaths(argv); 63 | await extractedTranslations_to_i18n_files( 64 | files, 65 | translations, 66 | argv.keySeparator 67 | ); 68 | console.log('Successfully exported found locale(s) to i18n json file(s)'); 69 | return Promise.resolve(undefined); 70 | } catch (error) { 71 | return Promise.reject(error); 72 | } 73 | }; 74 | 75 | // Extract translations from xlsx file 76 | async function xlsx_2_translation_objects(argv: XLSXImportArguments) { 77 | let workbook = await new Workbook().xlsx.readFile(argv.input); 78 | 79 | // columns properties to load 80 | let columns = argv.columns; 81 | 82 | // The file generated by export to_xlsx has a single worksheet & I only care on 83 | let worksheet = workbook.worksheets[0]; 84 | let rowCount = worksheet.rowCount; 85 | 86 | // retrieve the headers of the table 87 | // Warning : Exceljs put for some reason a undefined value at the 0 index 88 | let headers = worksheet.getRow(1).values as (undefined | string)[]; 89 | // retrieve data of the table 90 | let data = ( 91 | worksheet.getRows(2, rowCount - 1) || /* istanbul ignore next */ [] 92 | ).map((item) => item.values); 93 | 94 | // find out where the technical key is 95 | const technical_key_index = headers.findIndex((h) => 96 | (h || '').includes(columns.technical_key) 97 | ); 98 | 99 | if (technical_key_index === -1) { 100 | return Promise.reject( 101 | new Error("Couldn't find index for technical_key with provided label") 102 | ); 103 | } 104 | 105 | // find out where the translations are positioned in the value 106 | const locales_index = Object.entries(columns.locales) 107 | .map(([key, value]) => ({ 108 | [key]: headers.findIndex((h) => (h || '').includes(value)), 109 | })) 110 | .reduce((prev, curr) => Object.assign(prev, curr), {}); 111 | 112 | // Warn users if some locale translations couldn't be found 113 | let missing_indexes = Object.entries(locales_index).filter( 114 | ([_, idx]) => idx === -1 115 | ); 116 | 117 | for (let [locale] of missing_indexes) { 118 | /* istanbul ignore next Not worthy to create a test case for that*/ 119 | console.warn( 120 | `Couldn't find index for ${locale} locale with provided label` 121 | ); 122 | } 123 | 124 | // build results 125 | let results = data.map((row: any) => 126 | Object.entries(locales_index) 127 | // skip translation(s) where index couldn't be found 128 | .filter(([_, idx]) => idx !== -1) 129 | .map(([locale, localeIndex]) => ({ 130 | technical_key: row[technical_key_index], 131 | label: row[localeIndex], 132 | locale: locale, 133 | })) 134 | ); 135 | return Promise.resolve(flattenDeep(results)); 136 | } 137 | 138 | // default export 139 | export default { 140 | command: command, 141 | description: description, 142 | builder: builder, 143 | handler: handler, 144 | }; 145 | -------------------------------------------------------------------------------- /src/commons/commandBuilder.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from 'fs'; 2 | import { extname, resolve } from 'path'; 3 | import { parseUnknownToFalse } from '../middlewares/middlewares'; 4 | 5 | // For typing 6 | // eslint-disable-next-line 7 | import type { Argv } from 'yargs'; 8 | 9 | export default class CommandBuilder { 10 | y: Argv<{ [x: string]: any }>; // current yargs result 11 | 12 | constructor(y: Argv<{ [x: string]: any }>) { 13 | this.y = y; 14 | } 15 | 16 | addSettingConfig() { 17 | this.y = this.y.config('settings', function (configPath) { 18 | let ext = extname(configPath); 19 | return /\.js$/i.test(ext) 20 | ? require(configPath) 21 | : JSON.parse(readFileSync(configPath, 'utf-8')); 22 | }); 23 | return this; 24 | } 25 | 26 | addOutputDirOption(multiple = false) { 27 | this.y = this.y 28 | .option('outputDir', { 29 | type: 'string', 30 | alias: 'od', 31 | describe: `Output folder where to store the output ${ 32 | multiple ? 'file(s)' : 'file' 33 | }`, 34 | default: process.cwd(), 35 | }) 36 | // coerce path provided by outputDir 37 | .coerce(['outputDir'], resolve); 38 | return this; 39 | } 40 | 41 | addKeySeparatorOption() { 42 | this.y = this.y 43 | .option('keySeparator', { 44 | type: 'string', 45 | alias: 'ks', 46 | describe: 47 | 'Char to separate i18n keys. If working with flat JSON, set this to false', 48 | default: '.', 49 | }) 50 | // parse false values 51 | .middleware(parseUnknownToFalse('keySeparator'), true); 52 | return this; 53 | } 54 | 55 | build() { 56 | return this.y; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/commons/enhancedGet.ts: -------------------------------------------------------------------------------- 1 | // lodash methods 2 | import get from 'lodash/get'; 3 | 4 | // "Enhance" Lodash get, to deal with custom separator 5 | export default function enhancedGet( 6 | obj: any, 7 | key: string, 8 | keySeparator: string | false 9 | ) { 10 | // compute path that use dot (or custom) separator + square brack notation 11 | let path = keySeparator 12 | ? key 13 | // handle square brack notation - eg: a[10] should be translated as a.10 14 | .replace(/\[(\d+)\]/g, `${keySeparator}$1`) 15 | .split(keySeparator) 16 | : [key]; 17 | return get(obj, path); 18 | } 19 | -------------------------------------------------------------------------------- /src/commons/enhancedSet.ts: -------------------------------------------------------------------------------- 1 | // lodash methods 2 | import set from 'lodash/set'; 3 | 4 | // "Enhance" Lodash set, to deal with custom separator 5 | export default function enhancedSet( 6 | obj: any, 7 | key: string, 8 | val: any, 9 | keySeparator: string | false 10 | ) { 11 | // compute path that use dot (or custom) separator + square brack notation 12 | let path = keySeparator 13 | ? key 14 | // handle square brack notation - eg: a[10] should be translated as a.10 15 | .replace(/\[(\d+)\]/g, `${keySeparator}$1`) 16 | .split(keySeparator) 17 | : [key]; 18 | return set(obj, path, val); 19 | } 20 | -------------------------------------------------------------------------------- /src/commons/getLeavesPathes.ts: -------------------------------------------------------------------------------- 1 | // lodash methodes 2 | import isObject from 'lodash/isObject'; 3 | import reduce from 'lodash/reduce'; 4 | 5 | // Get all leaves paths of a object 6 | // Typescript code inspired by https://stackoverflow.com/a/55381003/6149867 7 | export default function getLeavesPathes( 8 | dataObj: any, 9 | keySeparator: string | false = '.' 10 | ): string[] { 11 | const reducer = (aggregator: string[], val: any, key: string) => { 12 | let paths = [key]; 13 | if (isObject(val)) { 14 | paths = reduce(val, reducer, []); 15 | // In theory, no flatten i18n should arrive here 16 | // Better prevent that cure, let have a backup scenario 17 | paths = paths.map((path) => key + (keySeparator || '.') + path); 18 | } 19 | aggregator.push(...paths); 20 | return aggregator; 21 | }; 22 | let paths = reduce(dataObj, reducer, []); 23 | if (keySeparator) { 24 | // Need to double escape stuff when using this constructor 25 | // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping 26 | const arrayIndexRegEx = new RegExp(`\\${keySeparator}(\\d+)(?!\\w+)`, 'gi'); 27 | paths = paths.map((path) => path.replace(arrayIndexRegEx, '[$1]')); 28 | } 29 | 30 | return paths; 31 | } 32 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /* istanbul ignore file */ 2 | import export_xlsx from './cmds/export_cmds/export_xlsx'; 3 | import export_csv from './cmds/export_cmds/export_csv'; 4 | import import_xlsx from './cmds/import_cmds/import_xlsx'; 5 | import import_csv from './cmds/import_cmds/import_csv'; 6 | import diff from './cmds/diff'; 7 | 8 | // export commands so that users can cherry pick what they want 9 | export { export_xlsx, export_csv, import_xlsx, import_csv, diff }; 10 | 11 | // for CJS output 12 | export default { 13 | export: { 14 | to_xlsx: export_xlsx, 15 | to_csv: export_csv, 16 | }, 17 | import: { 18 | from_xlsx: import_xlsx, 19 | from_csv: import_csv, 20 | }, 21 | diff: diff, 22 | }; 23 | -------------------------------------------------------------------------------- /src/middlewares/middlewares.ts: -------------------------------------------------------------------------------- 1 | import isString from 'lodash/isString'; 2 | import isArray from 'lodash/isArray'; 3 | import fs from 'fs'; 4 | 5 | // internal function for both parsePath(s)toJSON middlewares 6 | // could be improved later to handle URL too 7 | async function path2JSON(file: string | any) { 8 | if (isString(file)) { 9 | // arg is a Path, convert it into a JSON 10 | let potentialJSON = await fs.promises.readFile(file, 'utf-8'); 11 | return JSON.parse(potentialJSON); 12 | } else { 13 | // don't touch it 14 | return file; 15 | } 16 | } 17 | 18 | // Turn a path into a JSON object 19 | export function parsePathToJSON(prop: string) { 20 | return async (argv: any) => { 21 | let file = argv[prop] as string | any; 22 | let potentialJSON = await path2JSON(file); 23 | // reference check : if something changed, update argv 24 | if (potentialJSON !== file) { 25 | // replace prop by parsed json 26 | argv[prop] = potentialJSON; 27 | } 28 | return argv; 29 | }; 30 | } 31 | 32 | // Turn variadic paths into each a JSON object 33 | export function parsePathsToJSON(prop: string) { 34 | return async (argv: any) => { 35 | let paths = (argv[prop] || []) as (string | any)[]; 36 | let results: any[] = []; 37 | for (let file of paths) { 38 | let potentialJSON = await path2JSON(file); 39 | results.push( 40 | potentialJSON !== file ? potentialJSON : /* istanbul ignore next */ file 41 | ); 42 | } 43 | argv[prop] = results; 44 | return argv; 45 | }; 46 | } 47 | 48 | // Backup path(s) provided by user 49 | // If a path couldn't be found (because it was provided directly as object / array in settings) 50 | // Default value "undefined" will be used 51 | export function backupPaths(prop: string, backupKey: string) { 52 | return (argv: any) => { 53 | let data = argv[prop] as any | any[]; 54 | argv[backupKey] = (isArray(data) ? data : [data]).reduce( 55 | (acc, curr, idx) => { 56 | acc[`file${idx + 1}`] = isString(curr) ? curr : undefined; 57 | return acc; 58 | }, 59 | {} 60 | ); 61 | return argv; 62 | }; 63 | } 64 | 65 | // Turn path into function 66 | export function parsePathToFunction(prop: string) { 67 | return async (argv: any) => { 68 | let param = argv[prop] as string | ((x: any) => any) | undefined; 69 | if (param) { 70 | argv[prop] = isString(param) ? require(param) : param; 71 | } 72 | return argv; 73 | }; 74 | } 75 | 76 | // Turn unknown into false, if possible 77 | export function parseUnknownToFalse(prop: string) { 78 | return async (argv: any) => { 79 | let param = argv[prop] as unknown; 80 | let check = ['false', false].some((pred) => pred === param); 81 | if (check) { 82 | argv[prop] = false; 83 | } 84 | return argv; 85 | }; 86 | } 87 | -------------------------------------------------------------------------------- /src/types/diffTypes.ts: -------------------------------------------------------------------------------- 1 | // reuse yargs Argv type 2 | import { Argv } from 'yargs'; 3 | 4 | // Yargs diff arguments 5 | export interface CommonDiffArguments extends Argv { 6 | filename: string; 7 | outputDir: string; 8 | paths: { 9 | [file: string]: string | undefined; 10 | }; 11 | // Diff requires at least an array of 2 items 12 | files: [any, any, ...any[]]; 13 | // output format 14 | outputFormat: 'JSON'; 15 | // https://github.com/jy95/i18n-tools/issues/25 16 | keySeparator: '.' | string | false; 17 | // cherry pick operations for comparaison 18 | operations: string[]; 19 | } 20 | 21 | // for exporter(s) 22 | export enum ChangesOps { 23 | DEL = 'DELETE', // when key exists in file1 but not in file2 24 | ADD = 'ADD', // when key exists in file2 but not in file1 25 | PUT = 'REPLACED', // when key exists in both file1 & files2 but value was replaced 26 | } 27 | // Describe an change, done in the direction from => to 28 | export type CommonChangeOperation = { 29 | key: string; // Technical key of the prop compared 30 | type: ChangesOps; // type of operation done 31 | from: string; // first file ("file1") 32 | to: string; // second ("file2") 33 | }; 34 | // Describe a delete operation 35 | export type DelOperation = CommonChangeOperation & { 36 | type: ChangesOps.DEL; 37 | oldValue: string; 38 | }; 39 | // Describe a add operation 40 | export type AddOperation = CommonChangeOperation & { 41 | type: ChangesOps.ADD; 42 | newValue: string; 43 | }; 44 | // Describe a put operation 45 | export type PutOperation = CommonChangeOperation & { 46 | type: ChangesOps.PUT; 47 | oldValue: string; 48 | newValue: string; 49 | }; 50 | export type ChangeOperations = DelOperation | AddOperation | PutOperation; 51 | 52 | // arguments for exporter 53 | export interface DiffExportParameters { 54 | // yargs, to pass dynamically option(s) without re-exporting them 55 | yargs: CommonDiffArguments; 56 | // the changes that occurs between files 57 | changes: ChangeOperations[]; 58 | } 59 | -------------------------------------------------------------------------------- /src/types/exportTypes.ts: -------------------------------------------------------------------------------- 1 | // reuse yargs Argv type 2 | import { Argv } from 'yargs'; 3 | // reuse exceljs Worksheet type 4 | import { Worksheet } from 'exceljs'; 5 | 6 | // Result after extract of multiple i18n files 7 | export type I18N_Merged_Data = { 8 | technical_key: string; 9 | labels: { 10 | [locale: string]: string; 11 | }; 12 | }[]; 13 | 14 | // Yargs export arguments 15 | export interface CommonExportArguments extends Argv { 16 | files: { 17 | [x: string]: string; 18 | }; 19 | filename: string; 20 | outputDir: string; 21 | resultsFilter?: string | ((x: I18N_Merged_Data) => I18N_Merged_Data); 22 | // https://github.com/jy95/i18n-tools/issues/25 23 | keySeparator: '.' | string | false; 24 | } 25 | // Yargs export arguments for TO_XLSX command 26 | export interface XLSXExportArguments extends CommonExportArguments { 27 | columns: { 28 | locale: string; 29 | label: string; 30 | }[]; 31 | worksheetName: string; 32 | worksheetCustomizer?: string | ((x: Worksheet) => Promise); 33 | } 34 | // Yargs export arguments for TO_CSV command 35 | export interface CSVExportArguments extends CommonExportArguments { 36 | columns: { 37 | locale: string; 38 | label: string; 39 | }[]; 40 | delimiter: ',' | ';' | '\t' | ' ' | '|'; 41 | rowDelimiter: string; 42 | quote: '"' | string; 43 | escape: '"' | string; 44 | writeBOM: boolean; 45 | quoteHeaders: boolean; 46 | } 47 | -------------------------------------------------------------------------------- /src/types/importTypes.ts: -------------------------------------------------------------------------------- 1 | // reuse yargs Argv type 2 | import { Argv } from 'yargs'; 3 | 4 | // Yargs import arguments 5 | export interface CommonImportArguments extends Argv { 6 | input: string; 7 | outputDir: string; 8 | suffix: string; 9 | locales: string[]; 10 | // https://github.com/jy95/i18n-tools/issues/25 11 | keySeparator: '.' | string | false; 12 | } 13 | 14 | // Yargs import arguments for FROM_XLSX command 15 | export interface XLSXImportArguments extends CommonImportArguments { 16 | columns: { 17 | technical_key: string; 18 | locales: { 19 | [locale: string]: string; 20 | }; 21 | }; 22 | } 23 | 24 | // Yargs import arguments for FROM_CSV command 25 | export interface CSVImportArguments extends CommonImportArguments { 26 | columns: { 27 | technical_key: string; 28 | locales: { 29 | [locale: string]: string; 30 | }; 31 | }; 32 | delimiter: ',' | ';' | '\t' | ' ' | '|'; 33 | quote: '"' | string; 34 | escape: '"' | string; 35 | encoding: 'utf8' | 'utf16le' | 'latin1'; 36 | } 37 | 38 | // Result after extract of input file 39 | export interface extractedTranslation { 40 | technical_key: string; 41 | label: string; 42 | locale: string; 43 | } 44 | -------------------------------------------------------------------------------- /test/diff.test.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fs from 'fs'; 3 | import yargs from 'yargs'; 4 | import set from 'lodash/set'; 5 | import unset from 'lodash/unset'; 6 | // diff command 7 | import { 8 | command, 9 | description as describeText, 10 | builder, 11 | handler, 12 | } from '../src/cmds/diff'; 13 | 14 | // test helpers 15 | import { 16 | TEMP_FOLDER, 17 | VALID_TEST_FOLDER, 18 | USELESS_TEST_FOLDER, 19 | expectError, 20 | fetchOutput, 21 | fsify_structure, 22 | fsify, 23 | } from './test-helpers'; 24 | 25 | // test folders constants 26 | const ROOT_TEST_FOLDER = 'tests-for-diff'; 27 | 28 | // Build the parser used for that command 29 | const parser = yargs.command(command, describeText, builder, handler).help(); 30 | 31 | // to access easier the paths of test file paths 32 | const test_files_list = [ 33 | 'file1.json', 34 | 'file2.json', 35 | 'file3.json', 36 | // flat json, to test 37 | 'flat_file1.json', 38 | 'flat_file2.json', 39 | // to test out the json reporter 40 | 'settings1-JSON.json', 41 | 'settings2-JSON.json', 42 | 'settings3-JSON.js', 43 | 'settings4-JSON.json', 44 | // TODO test out future reporters 45 | ] as const; 46 | const [ 47 | TEST_FILE_FILE1, 48 | TEST_FILE_FILE2, 49 | TEST_FILE_FILE3, 50 | TEST_FILE_FLAT_FILE1, 51 | TEST_FILE_FLAT_FILE2, 52 | TEST_FILE_JSON_SETTINGS1, 53 | TEST_FILE_JSON_SETTINGS2, 54 | TEST_FILE_JSON_SETTINGS3, 55 | TEST_FILE_JSON_SETTINGS4, 56 | ] = test_files_list; 57 | type test_files_type = (typeof test_files_list)[number]; 58 | 59 | // files path 60 | const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( 61 | (acc: any, curr: test_files_type, idx: number) => { 62 | acc[curr] = path.join( 63 | TEMP_FOLDER, 64 | ROOT_TEST_FOLDER, 65 | idx < 10 ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, 66 | curr 67 | ); 68 | return acc; 69 | }, 70 | {} 71 | ); 72 | 73 | // to concat faster command 74 | type concat_cmd_type = (args: string[]) => string; 75 | type prepare_mandatory_args_type = (...args: test_files_type[]) => string[]; 76 | const concat_cmd: concat_cmd_type = (args: string[]) => 77 | `diff ${args.join(' ')}`; 78 | const prepare_mandatory_args: prepare_mandatory_args_type = (...files) => [ 79 | ...files.map((file) => `${TEST_FILES[file]}`), 80 | ]; 81 | 82 | // generate contents for comparison 83 | const generate_i18_contents = (idx: number) => { 84 | return [ 85 | ['untouchedKey', 'Hello World'], 86 | ['commons.nestedKey.changedValue', 'Changed value ' + idx], 87 | ['commons.array[0]', 'Pierre'], 88 | ['commons.array[1]', idx > 0 ? 'Paul' : undefined], 89 | ['commons.array[2]', idx > 1 ? 'Jacques' : undefined], 90 | ['commons.conditionalDeletedKey', idx % 2 === 0 ? 'Present' : undefined], 91 | ].reduce((acc: { [x: string]: any }, [key, value]) => { 92 | if (value !== undefined) { 93 | set(acc, key as string, value); 94 | } else { 95 | unset(acc, key as string); 96 | } 97 | return acc; 98 | }, {}); 99 | }; 100 | 101 | // file structure for fsify, in order to run the tests 102 | const structure: fsify_structure = [ 103 | { 104 | type: fsify.DIRECTORY, 105 | name: ROOT_TEST_FOLDER, 106 | contents: [ 107 | // In this folder, everything in correct 108 | { 109 | type: fsify.DIRECTORY, 110 | name: VALID_TEST_FOLDER, 111 | contents: [ 112 | ...[TEST_FILE_FILE1, TEST_FILE_FILE2, TEST_FILE_FILE3].map( 113 | (file: string, idx: number) => ({ 114 | type: fsify.FILE, 115 | name: file, 116 | contents: JSON.stringify(generate_i18_contents(idx)), 117 | }) 118 | ), 119 | // With two files 120 | { 121 | type: fsify.FILE, 122 | name: TEST_FILE_JSON_SETTINGS1, 123 | contents: JSON.stringify({ 124 | filename: 'diff_settings1-JSON', 125 | outputDir: TEMP_FOLDER, 126 | outputFormat: 'JSON', 127 | files: [TEST_FILE_FILE1, TEST_FILE_FILE2].map( 128 | (file) => TEST_FILES[file] 129 | ), 130 | }), 131 | }, 132 | // With three files 133 | { 134 | type: fsify.FILE, 135 | name: TEST_FILE_JSON_SETTINGS2, 136 | contents: JSON.stringify({ 137 | filename: 'diff_settings2-JSON', 138 | outputDir: TEMP_FOLDER, 139 | outputFormat: 'JSON', 140 | files: [TEST_FILE_FILE1, TEST_FILE_FILE2, TEST_FILE_FILE3].map( 141 | (file) => TEST_FILES[file] 142 | ), 143 | }), 144 | }, 145 | // For cherry pick operations 146 | { 147 | type: fsify.FILE, 148 | name: TEST_FILE_JSON_SETTINGS4, 149 | contents: JSON.stringify({ 150 | filename: 'diff_settings4-JSON', 151 | outputDir: TEMP_FOLDER, 152 | outputFormat: 'JSON', 153 | operations: ['PUT'], // only interessted by update operations 154 | files: [TEST_FILE_FILE1, TEST_FILE_FILE2].map( 155 | (file) => TEST_FILES[file] 156 | ), 157 | }), 158 | }, 159 | // flat files 160 | { 161 | type: fsify.FILE, 162 | name: TEST_FILE_FLAT_FILE1, 163 | contents: JSON.stringify({ 164 | 'unchanged.key_with-special-char!': 'unchanged', 165 | 'changed.key_test$': 'Hello', 166 | }), 167 | }, 168 | { 169 | type: fsify.FILE, 170 | name: TEST_FILE_FLAT_FILE2, 171 | contents: JSON.stringify({ 172 | 'unchanged.key_with-special-char!': 'unchanged', 173 | 'changed.key_test$': 'Bonjour', 174 | }), 175 | }, 176 | // js file 177 | { 178 | type: fsify.FILE, 179 | name: TEST_FILE_JSON_SETTINGS3, 180 | contents: `module.exports = { 181 | filename: 'diff_settings3-JSON', 182 | outputDir: "${TEMP_FOLDER.replace(/\\/g, '\\\\')}", 183 | outputFormat: 'JSON', 184 | files: [${[TEST_FILE_FILE1, TEST_FILE_FILE2] 185 | .map((file) => `"${TEST_FILES[file].replace(/\\/g, '\\\\')}"`) 186 | .join(',')}] 187 | }`, 188 | }, 189 | ], 190 | }, 191 | ], 192 | }, 193 | ]; 194 | 195 | // test scenarios for validations 196 | const VALIDATIONS_SCENARIOS: [ 197 | string, 198 | [test_files_type[], ...string[]], 199 | ...string[], 200 | ][] = [ 201 | [ 202 | // Test out the message : "At least two paths must be provided" 203 | 'Argument files - Not expected number of paths should be reject', 204 | [[TEST_FILE_FILE1]], 205 | 'At least two paths must be provided', 206 | ], 207 | [ 208 | // Test out the message : "Option keySeparator should be a not-empty char" 209 | 'Option keySeparator - Invalid separator should be rejected', 210 | [[TEST_FILE_FILE1, TEST_FILE_FILE2], '--keySeparator', `"HACKERMAN"`], 211 | 'keySeparator', 212 | 'not-empty char', 213 | ], 214 | ]; 215 | 216 | // E2E scenarios for JSON reporter 217 | const E2E_JSON_REPORTER: [ 218 | string, 219 | // if a single test_files_type, it is a settings file, multiple inline files otherwise 220 | [test_files_type[], ...string[]], 221 | string, 222 | any, 223 | ][] = [ 224 | [ 225 | 'Inline paths should be accepted', 226 | [ 227 | [TEST_FILE_FILE1, TEST_FILE_FILE1], 228 | '--filename', 229 | `"diff_inline-JSON"`, 230 | '--outputDir', 231 | `${TEMP_FOLDER}`, 232 | ], 233 | path.join(TEMP_FOLDER, 'diff_inline-JSON.json'), 234 | { 235 | files: { 236 | file1: TEST_FILES[TEST_FILE_FILE1], 237 | file2: TEST_FILES[TEST_FILE_FILE1], 238 | }, 239 | changes: [], 240 | }, 241 | ], 242 | [ 243 | 'should work with two files', 244 | [[TEST_FILE_JSON_SETTINGS1]], 245 | path.join(TEMP_FOLDER, 'diff_settings1-JSON.json'), 246 | { 247 | files: { 248 | file1: TEST_FILES[TEST_FILE_FILE1], 249 | file2: TEST_FILES[TEST_FILE_FILE2], 250 | }, 251 | changes: [ 252 | { 253 | from: 'file1', 254 | key: 'commons.nestedKey.changedValue', 255 | newValue: 'Changed value 1', 256 | oldValue: 'Changed value 0', 257 | to: 'file2', 258 | type: 'REPLACED', 259 | }, 260 | { 261 | from: 'file1', 262 | key: 'commons.conditionalDeletedKey', 263 | oldValue: 'Present', 264 | to: 'file2', 265 | type: 'DELETE', 266 | }, 267 | { 268 | from: 'file1', 269 | key: 'commons.array[1]', 270 | newValue: 'Paul', 271 | to: 'file2', 272 | type: 'ADD', 273 | }, 274 | ], 275 | }, 276 | ], 277 | [ 278 | 'should work with three files', 279 | [[TEST_FILE_JSON_SETTINGS2]], 280 | path.join(TEMP_FOLDER, 'diff_settings2-JSON.json'), 281 | { 282 | files: { 283 | file1: TEST_FILES[TEST_FILE_FILE1], 284 | file2: TEST_FILES[TEST_FILE_FILE2], 285 | file3: TEST_FILES[TEST_FILE_FILE3], 286 | }, 287 | changes: [ 288 | { 289 | key: 'commons.nestedKey.changedValue', 290 | type: 'REPLACED', 291 | from: 'file1', 292 | to: 'file2', 293 | oldValue: 'Changed value 0', 294 | newValue: 'Changed value 1', 295 | }, 296 | { 297 | key: 'commons.conditionalDeletedKey', 298 | type: 'DELETE', 299 | from: 'file1', 300 | to: 'file2', 301 | oldValue: 'Present', 302 | }, 303 | { 304 | key: 'commons.array[1]', 305 | type: 'ADD', 306 | from: 'file1', 307 | to: 'file2', 308 | newValue: 'Paul', 309 | }, 310 | { 311 | key: 'commons.nestedKey.changedValue', 312 | type: 'REPLACED', 313 | from: 'file2', 314 | to: 'file3', 315 | oldValue: 'Changed value 1', 316 | newValue: 'Changed value 2', 317 | }, 318 | { 319 | key: 'commons.array[2]', 320 | type: 'ADD', 321 | from: 'file2', 322 | to: 'file3', 323 | newValue: 'Jacques', 324 | }, 325 | { 326 | key: 'commons.conditionalDeletedKey', 327 | type: 'ADD', 328 | from: 'file2', 329 | to: 'file3', 330 | newValue: 'Present', 331 | }, 332 | ], 333 | }, 334 | ], 335 | [ 336 | 'should work with js config file', 337 | [[TEST_FILE_JSON_SETTINGS3]], 338 | path.join(TEMP_FOLDER, 'diff_settings3-JSON.json'), 339 | { 340 | files: { 341 | file1: TEST_FILES[TEST_FILE_FILE1], 342 | file2: TEST_FILES[TEST_FILE_FILE2], 343 | }, 344 | changes: [ 345 | { 346 | from: 'file1', 347 | key: 'commons.nestedKey.changedValue', 348 | newValue: 'Changed value 1', 349 | oldValue: 'Changed value 0', 350 | to: 'file2', 351 | type: 'REPLACED', 352 | }, 353 | { 354 | from: 'file1', 355 | key: 'commons.conditionalDeletedKey', 356 | oldValue: 'Present', 357 | to: 'file2', 358 | type: 'DELETE', 359 | }, 360 | { 361 | from: 'file1', 362 | key: 'commons.array[1]', 363 | newValue: 'Paul', 364 | to: 'file2', 365 | type: 'ADD', 366 | }, 367 | ], 368 | }, 369 | ], 370 | [ 371 | 'should work with flat json', 372 | [ 373 | [TEST_FILE_FLAT_FILE1, TEST_FILE_FLAT_FILE2], 374 | '--filename', 375 | `"diff_flat_inline-JSON"`, 376 | '--outputDir', 377 | `${TEMP_FOLDER}`, 378 | '--keySeparator', 379 | `"false"`, 380 | ], 381 | path.join(TEMP_FOLDER, 'diff_flat_inline-JSON.json'), 382 | { 383 | files: { 384 | file1: TEST_FILES[TEST_FILE_FLAT_FILE1], 385 | file2: TEST_FILES[TEST_FILE_FLAT_FILE2], 386 | }, 387 | changes: [ 388 | { 389 | from: 'file1', 390 | key: 'changed.key_test$', 391 | newValue: 'Bonjour', 392 | oldValue: 'Hello', 393 | to: 'file2', 394 | type: 'REPLACED', 395 | }, 396 | ], 397 | }, 398 | ], 399 | [ 400 | 'should respect user wanted operations for output', 401 | [[TEST_FILE_JSON_SETTINGS4]], 402 | path.join(TEMP_FOLDER, 'diff_settings4-JSON.json'), 403 | { 404 | files: { 405 | file1: TEST_FILES[TEST_FILE_FILE1], 406 | file2: TEST_FILES[TEST_FILE_FILE2], 407 | }, 408 | changes: [ 409 | { 410 | from: 'file1', 411 | key: 'commons.nestedKey.changedValue', 412 | newValue: 'Changed value 1', 413 | oldValue: 'Changed value 0', 414 | to: 'file2', 415 | type: 'REPLACED', 416 | }, 417 | ], 418 | }, 419 | ], 420 | ]; 421 | 422 | beforeAll(() => { 423 | // write temporary files 424 | return fsify(structure); 425 | }); 426 | 427 | describe('[diff command]', () => { 428 | describe('Check command availability', () => { 429 | it('Should display diff help output', async () => { 430 | const output = await fetchOutput(parser)('diff --help'); 431 | expect(output).toMatch(describeText); 432 | }); 433 | }); 434 | 435 | describe('Validations', () => { 436 | // mock console.log 437 | let consoleLog: any; 438 | beforeAll(() => { 439 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 440 | }); 441 | 442 | // restore console.log 443 | afterAll(() => { 444 | if (consoleLog !== undefined) { 445 | consoleLog.mockRestore(); 446 | } 447 | }); 448 | 449 | test.each(VALIDATIONS_SCENARIOS)( 450 | '%s', 451 | async ( 452 | _title: string, 453 | args: [test_files_type[], ...string[]], 454 | ...messages: string[] 455 | ) => { 456 | let [files, ...otherArgs] = args; 457 | let test_cmd = concat_cmd([ 458 | // optional args 459 | ...otherArgs, 460 | // mandatory args 461 | ...prepare_mandatory_args(...files), 462 | ]); 463 | //console.warn(test_cmd); 464 | // Test out if error message is thrown 465 | await expectError(parser)(test_cmd, ...messages); 466 | } 467 | ); 468 | }); 469 | 470 | describe('E2E successful scenarios', () => { 471 | // mock console.log 472 | let consoleLog: any; 473 | beforeAll(() => { 474 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 475 | }); 476 | 477 | // clear mock after each call 478 | afterEach(() => { 479 | consoleLog.mockClear(); 480 | }); 481 | 482 | // reenable console.log 483 | afterAll(() => { 484 | // restore console.log 485 | if (consoleLog !== undefined) { 486 | consoleLog.mockRestore(); 487 | } 488 | }); 489 | 490 | // JSON reporter tests 491 | test.each(E2E_JSON_REPORTER)( 492 | 'JSON reporter - %s', 493 | async ( 494 | _title: string, 495 | args: [test_files_type[], ...string[]], 496 | filepath: string, 497 | expectedObj: any 498 | ) => { 499 | let [files, ...otherArgs] = args; 500 | 501 | let test_cmd = concat_cmd([ 502 | ...(files.length === 1 503 | ? ['--settings', ...prepare_mandatory_args(...files)] 504 | : []), 505 | // optional args 506 | ...otherArgs, 507 | // mandatory args (if needed) 508 | ...(files.length >= 2 ? [...prepare_mandatory_args(...files)] : []), 509 | ]); 510 | 511 | await parser.parseAsync(test_cmd); 512 | 513 | expect(consoleLog).toHaveBeenCalledWith( 514 | 'Preparing the report file ...' 515 | ); 516 | expect(consoleLog).toHaveBeenCalledWith( 517 | 'Successfully wrote the report file' 518 | ); 519 | 520 | // check out the file 521 | let potentialJSON = await fs.promises.readFile(filepath, 'utf-8'); 522 | let result = JSON.parse(potentialJSON); 523 | // checking the result 524 | expect(result).toEqual(expectedObj); 525 | } 526 | ); 527 | }); 528 | }); 529 | -------------------------------------------------------------------------------- /test/export/export-csv.test.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import yargs from 'yargs'; 3 | // export command 4 | import { 5 | command, 6 | description as describeText, 7 | builder, 8 | } from '../../src/cmds/export'; 9 | // CSV description 10 | import { description as csv_description } from '../../src/cmds/export_cmds/export_csv'; 11 | 12 | // test helpers 13 | import { 14 | TEMP_FOLDER, 15 | VALID_TEST_FOLDER, 16 | USELESS_TEST_FOLDER, 17 | expectError, 18 | fetchOutput, 19 | fsify_structure, 20 | fsify, 21 | } from '../test-helpers'; 22 | 23 | // test folders constants 24 | const ROOT_TEST_FOLDER = 'tests-for-export-csv'; 25 | 26 | // Translations keys for test 27 | const TRANSLATIONS_KEYS = ['FR', 'NL', 'DE']; 28 | const KEYS_LABEL: { [key: string]: string } = { 29 | FR: 'French', 30 | NL: 'Dutch', 31 | DE: 'German', 32 | }; 33 | const locale_label = (locale: string) => `${KEYS_LABEL[locale]} translation`; 34 | 35 | // i18n example 36 | const generate_i18n = (locale: string) => ({ 37 | commons: { 38 | myNestedKey: `Hello world ${locale}`, 39 | myNestedArray: ['1', '2', '3'].map((item) => `${item} ${locale}`), 40 | }, 41 | array: ['1', '2', '3'].map((item) => `${item} ${locale}`), 42 | simpleKey: `[${locale}] not setted key`, 43 | 'Key with spaces': [{ test: '42 is the answer' }], 44 | 'Missing key in DE': locale !== TRANSLATIONS_KEYS[2] ? 'present' : undefined, 45 | }); 46 | 47 | // Export files 48 | const generate_files = ( 49 | locales: string[], 50 | fnMapper: (locale: string) => string 51 | ) => 52 | locales.reduce((acc: { [x: string]: string }, locale: string) => { 53 | acc[locale] = fnMapper(locale); 54 | return acc; 55 | }, {}); 56 | 57 | // Export columns 58 | const EXPORT_COLUMNS = (locales: string[]) => 59 | locales.map((locale) => ({ 60 | locale, 61 | label: locale_label(locale), 62 | })); 63 | 64 | // flat operation 65 | const flat = (arr: any[]) => [].concat(...arr); 66 | 67 | // to access easier the paths of test file paths 68 | const test_files_list = [ 69 | // correct files 70 | 'columns.json', 71 | 'files.json', 72 | 'files-flat.json', 73 | 'settings1.json', 74 | 'settings2.json', 75 | 'settings3.js', 76 | 'settings4.js', 77 | // wrong files 78 | 'emptyObject.json', 79 | 'emptyArray.json', 80 | 'files-duplicatedValues.json', 81 | 'files-invalidPath.json', 82 | 'columns-missingLabelProp.json', 83 | 'columns-wrongPropValue.json', 84 | 'columns-duplicatedValues.json', 85 | 'columns-missingKey.json', 86 | ] as const; 87 | const [ 88 | TEST_FILE_EXPORT_COLUMNS, 89 | TEST_FILE_FILES, 90 | TEST_FILE_FLAT_FILES, 91 | TEST_FILE_SETTINGS1, 92 | TEST_FILE_SETTINGS2, 93 | TEST_FILE_SETTINGS3, 94 | TEST_FILE_SETTINGS4, 95 | TEST_FILE_EMPTY_OBJECT, 96 | TEST_FILE_EMPTY_ARRAY, 97 | TEST_FILE_FILES_DUP, 98 | TEST_FILE_FILES_INVALID, 99 | TEST_FILE_EXPORT_COLUMNS_MISS_PROP, 100 | TEST_FILE_EXPORT_COLUMNS_WRONG_PROP, 101 | TEST_FILE_EXPORT_COLUMNS_DUP_VALS, 102 | TEST_FILE_EXPORT_COLUMNS_MISS_KEY, 103 | ] = test_files_list; 104 | type test_files_type = (typeof test_files_list)[number]; 105 | 106 | // file structure for fsify, in order to run the tests 107 | const structure: fsify_structure = [ 108 | { 109 | type: fsify.DIRECTORY, 110 | name: ROOT_TEST_FOLDER, 111 | contents: [ 112 | // In this folder, everything in correct 113 | { 114 | type: fsify.DIRECTORY, 115 | name: VALID_TEST_FOLDER, 116 | contents: flat([ 117 | // 3 i18n files (deep) 118 | TRANSLATIONS_KEYS.map((locale) => ({ 119 | type: fsify.FILE, 120 | name: `${locale.toLowerCase()}.json`, 121 | contents: JSON.stringify(generate_i18n(locale)), 122 | })), 123 | // 3 i18n files (flat) 124 | TRANSLATIONS_KEYS.map((locale) => ({ 125 | type: fsify.FILE, 126 | name: `${locale.toLowerCase()}-flat.json`, 127 | contents: JSON.stringify({ 128 | 'unchanged.key_with-special-char!': locale, 129 | 'changed.key_test$': locale, 130 | }), 131 | })), 132 | // the columns.json 133 | { 134 | type: fsify.FILE, 135 | name: TEST_FILE_EXPORT_COLUMNS, 136 | contents: JSON.stringify(EXPORT_COLUMNS(TRANSLATIONS_KEYS)), 137 | }, 138 | // the files.json 139 | { 140 | type: fsify.FILE, 141 | name: TEST_FILE_FILES, 142 | contents: JSON.stringify( 143 | generate_files(TRANSLATIONS_KEYS, (locale) => 144 | path.join( 145 | TEMP_FOLDER, 146 | ROOT_TEST_FOLDER, 147 | VALID_TEST_FOLDER, 148 | `${locale.toLowerCase()}.json` 149 | ) 150 | ) 151 | ), 152 | }, 153 | // the files-flat.json 154 | { 155 | type: fsify.FILE, 156 | name: TEST_FILE_FLAT_FILES, 157 | contents: JSON.stringify( 158 | generate_files(TRANSLATIONS_KEYS, (locale) => 159 | path.join( 160 | TEMP_FOLDER, 161 | ROOT_TEST_FOLDER, 162 | VALID_TEST_FOLDER, 163 | `${locale.toLowerCase()}-flat.json` 164 | ) 165 | ) 166 | ), 167 | }, 168 | // First format of settings.json (Path) 169 | { 170 | type: fsify.FILE, 171 | name: TEST_FILE_SETTINGS1, 172 | contents: JSON.stringify({ 173 | files: path.join( 174 | TEMP_FOLDER, 175 | ROOT_TEST_FOLDER, 176 | VALID_TEST_FOLDER, 177 | TEST_FILE_FILES 178 | ), 179 | columns: path.join( 180 | TEMP_FOLDER, 181 | ROOT_TEST_FOLDER, 182 | VALID_TEST_FOLDER, 183 | TEST_FILE_EXPORT_COLUMNS 184 | ), 185 | filename: 'settings1-output', 186 | outputDir: TEMP_FOLDER, 187 | }), 188 | }, 189 | // Second format of settings.json (Object/Array instead of Paths) 190 | { 191 | type: fsify.FILE, 192 | name: TEST_FILE_SETTINGS2, 193 | contents: JSON.stringify({ 194 | files: generate_files(TRANSLATIONS_KEYS, (locale) => 195 | path.join( 196 | TEMP_FOLDER, 197 | ROOT_TEST_FOLDER, 198 | VALID_TEST_FOLDER, 199 | `${locale.toLowerCase()}.json` 200 | ) 201 | ), 202 | columns: EXPORT_COLUMNS(TRANSLATIONS_KEYS), 203 | filename: 'settings2-output', 204 | outputDir: TEMP_FOLDER, 205 | }), 206 | }, 207 | // First format of settings.js (Mixins config) 208 | { 209 | type: fsify.FILE, 210 | name: TEST_FILE_SETTINGS3, 211 | contents: `module.exports = { 212 | "files": "${path 213 | .resolve( 214 | TEMP_FOLDER, 215 | ROOT_TEST_FOLDER, 216 | VALID_TEST_FOLDER, 217 | TEST_FILE_FILES 218 | ) 219 | .replace(/\\/g, '\\\\')}", 220 | "columns": "${path 221 | .resolve( 222 | TEMP_FOLDER, 223 | ROOT_TEST_FOLDER, 224 | VALID_TEST_FOLDER, 225 | TEST_FILE_EXPORT_COLUMNS 226 | ) 227 | .replace(/\\/g, '\\\\')}", 228 | "filename": 'settings3-output', 229 | "resultsFilter": function(data) { return data }, 230 | "outputDir": "${TEMP_FOLDER.replace(/\\/g, '\\\\')}" 231 | }`, 232 | }, 233 | // Second format of settings.js (keySeparator) 234 | { 235 | type: fsify.FILE, 236 | name: TEST_FILE_SETTINGS4, 237 | contents: `module.exports = { 238 | "files": "${path 239 | .resolve( 240 | TEMP_FOLDER, 241 | ROOT_TEST_FOLDER, 242 | VALID_TEST_FOLDER, 243 | TEST_FILE_FLAT_FILES 244 | ) 245 | .replace(/\\/g, '\\\\')}", 246 | "columns": "${path 247 | .resolve( 248 | TEMP_FOLDER, 249 | ROOT_TEST_FOLDER, 250 | VALID_TEST_FOLDER, 251 | TEST_FILE_EXPORT_COLUMNS 252 | ) 253 | .replace(/\\/g, '\\\\')}", 254 | "filename": 'settings4-output', 255 | "outputDir": "${TEMP_FOLDER.replace(/\\/g, '\\\\')}", 256 | "keySeparator": false 257 | }`, 258 | }, 259 | ]), 260 | }, 261 | // In this folder, files used for validations 262 | { 263 | type: fsify.DIRECTORY, 264 | name: USELESS_TEST_FOLDER, 265 | contents: [ 266 | // An empty object 267 | { 268 | type: fsify.FILE, 269 | name: TEST_FILE_EMPTY_OBJECT, 270 | contents: JSON.stringify({}), 271 | }, 272 | // An empty array 273 | { 274 | type: fsify.FILE, 275 | name: TEST_FILE_EMPTY_ARRAY, 276 | contents: JSON.stringify([]), 277 | }, 278 | // files.json with duplicated values 279 | { 280 | type: fsify.FILE, 281 | name: TEST_FILE_FILES_DUP, 282 | contents: JSON.stringify( 283 | generate_files(TRANSLATIONS_KEYS, (_) => 284 | path.join( 285 | TEMP_FOLDER, 286 | ROOT_TEST_FOLDER, 287 | VALID_TEST_FOLDER, 288 | `${TRANSLATIONS_KEYS[0].toLowerCase()}.json` 289 | ) 290 | ) 291 | ), 292 | }, 293 | // files.json with invalid path 294 | { 295 | type: fsify.FILE, 296 | name: TEST_FILE_FILES_INVALID, 297 | contents: JSON.stringify({ 298 | fr: '/not/a/valid/path/fr.json', 299 | }), 300 | }, 301 | // columns.json with missing property (label) 302 | { 303 | type: fsify.FILE, 304 | name: TEST_FILE_EXPORT_COLUMNS_MISS_PROP, 305 | contents: JSON.stringify([{ locale: 'FR' }]), 306 | }, 307 | // columns.json with wrong property type 308 | { 309 | type: fsify.FILE, 310 | name: TEST_FILE_EXPORT_COLUMNS_WRONG_PROP, 311 | contents: JSON.stringify([{ locale: 'FR', label: 42 }]), 312 | }, 313 | // columns.json with duplicated value 314 | { 315 | type: fsify.FILE, 316 | name: TEST_FILE_EXPORT_COLUMNS_DUP_VALS, 317 | contents: JSON.stringify([ 318 | { locale: 'FR', label: 'Hello World' }, 319 | { locale: 'NL', label: 'Hello World' }, 320 | ]), 321 | }, 322 | // columns.json with missing key for files.json 323 | { 324 | type: fsify.FILE, 325 | name: TEST_FILE_EXPORT_COLUMNS_MISS_KEY, 326 | contents: JSON.stringify([ 327 | { locale: 'FR', label: 'French translation' }, 328 | ]), 329 | }, 330 | ], 331 | }, 332 | ], 333 | }, 334 | ]; 335 | 336 | // files path 337 | const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( 338 | (acc: any, curr: test_files_type, idx: number) => { 339 | // improvement for later : handle generically nested stuff 340 | let arr = [ 341 | TEMP_FOLDER, 342 | ROOT_TEST_FOLDER, 343 | idx < 7 ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, 344 | curr, 345 | ]; 346 | acc[curr] = path.join(...arr); 347 | return acc; 348 | }, 349 | {} 350 | ); 351 | 352 | beforeAll(() => { 353 | // write temporary files 354 | return fsify(structure); 355 | }); 356 | 357 | // Build the parser used for that command 358 | const parser = yargs.command(command, describeText, builder).help(); 359 | 360 | // to concat faster command 361 | type concat_cmd_type = (args: string[]) => string; 362 | type prepare_mandatory_args_type = (...args: string[]) => string[]; 363 | const concat_cmd: concat_cmd_type = (args: string[]) => 364 | `export to_csv ${args.join(' ')}`; 365 | const prepare_mandatory_args: prepare_mandatory_args_type = ( 366 | ...args: string[] 367 | ) => ['--files', `"${args[0]}"`, '--columns', `"${args[1]}"`]; 368 | 369 | // test scenarios for validations 370 | const VALIDATIONS_SCENARIOS: [string, string[], ...string[]][] = [ 371 | [ 372 | // Test out the message : "Error: test.csv has an extension : Remove it please" 373 | 'Filename with extension should be rejected', 374 | [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS, '--filename', `"test.csv"`], 375 | 'test.csv', 376 | 'extension', 377 | ], 378 | [ 379 | // Test out the message : "Option keySeparator should be a not-empty char" 380 | 'Option keySeparator - Invalid separator should be rejected', 381 | [ 382 | TEST_FILE_FILES, 383 | TEST_FILE_EXPORT_COLUMNS, 384 | '--keySeparator', 385 | `"HACKERMAN"`, 386 | ], 387 | 'keySeparator', 388 | 'not-empty char', 389 | ], 390 | [ 391 | // Test out the message : "Option files is not a JSON Object" 392 | 'Option files - unexpected file should be rejected', 393 | [TEST_FILE_EMPTY_ARRAY, TEST_FILE_EXPORT_COLUMNS], 394 | 'not a JSON Object', 395 | ], 396 | [ 397 | // Test out the message : "Option files should have at least one entry" 398 | 'Option files - empty object should be rejected', 399 | [TEST_FILE_EMPTY_OBJECT, TEST_FILE_EXPORT_COLUMNS], 400 | 'at least one entry', 401 | ], 402 | [ 403 | // Test out the message : "At least a duplicated value in files JSON object was detected" 404 | 'Option files - Duplicated values should be rejected', 405 | [TEST_FILE_FILES_DUP, TEST_FILE_EXPORT_COLUMNS], 406 | 'duplicated value', 407 | ], 408 | [ 409 | // Test out the message : `${i18nPath} cannot be read : check permissions` 410 | 'Option files - invalid path(s) should be rejected', 411 | [TEST_FILE_FILES_INVALID, TEST_FILE_EXPORT_COLUMNS], 412 | 'cannot be read', 413 | ], 414 | [ 415 | // Test out the message : "columns is not a JSON Array" 416 | 'Option columns - unexpected file should be rejected', 417 | [TEST_FILE_FILES, TEST_FILE_EMPTY_OBJECT], 418 | 'not a JSON Array', 419 | ], 420 | [ 421 | // Test out the message : "Option columns should have at least one entry" 422 | 'Option columns - empty array should be rejected', 423 | [TEST_FILE_FILES, TEST_FILE_EMPTY_ARRAY], 424 | 'at least one entry', 425 | ], 426 | [ 427 | // Test out the message : `At least one item in columns array doesn't have "${prop}" property` 428 | 'Option columns - missing property in array should be rejected', 429 | [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_MISS_PROP], 430 | "doesn't have", 431 | 'property', 432 | ], 433 | [ 434 | // Test out the message : `At least one item in columns array doesn't have "${prop}" property with a String value` 435 | 'Option columns - unexpected property type should be rejected', 436 | [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_WRONG_PROP], 437 | "doesn't have", 438 | 'property with a String value', 439 | ], 440 | [ 441 | // Test out the message : `At least a duplicated value in columns array in prop "${prop}" was detected` 442 | 'Option columns - duplicated value should be rejected', 443 | [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_DUP_VALS], 444 | 'duplicated value', 445 | ], 446 | [ 447 | // Test out the message : 'At least one key differs between files and columns options' 448 | 'Options files & columns - incompatibles files should be rejected', 449 | [TEST_FILE_FILES, TEST_FILE_EXPORT_COLUMNS_MISS_KEY], 450 | 'between files and columns', 451 | ], 452 | ]; 453 | 454 | describe('[export_csv command]', () => { 455 | describe('Check command availability', () => { 456 | it('Should list to_csv in export command', async () => { 457 | const output = await fetchOutput(parser)('export --help'); 458 | expect(output).toMatch('to_csv'); 459 | }); 460 | 461 | it('Should display to_csv help output', async () => { 462 | const output = await fetchOutput(parser)('export to_csv --help'); 463 | expect(output).toMatch(csv_description); 464 | }); 465 | }); 466 | 467 | describe('Validations', () => { 468 | // mock console.log 469 | let consoleLog: any; 470 | beforeAll(() => { 471 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 472 | }); 473 | 474 | // restore console.log 475 | afterAll(() => { 476 | if (consoleLog !== undefined) { 477 | consoleLog.mockRestore(); 478 | } 479 | }); 480 | 481 | test.each(VALIDATIONS_SCENARIOS)( 482 | '%s', 483 | async (_title: string, args: string[], ...messages: string[]) => { 484 | let [files, columns, ...otherArgs] = args; 485 | let test_cmd = concat_cmd([ 486 | // mandatory args 487 | ...prepare_mandatory_args( 488 | TEST_FILES[files as test_files_type], 489 | TEST_FILES[columns as test_files_type] 490 | ), 491 | // optional args 492 | ...otherArgs, 493 | ]); 494 | //console.warn(test_cmd); 495 | // Test out if error message is thrown 496 | await expectError(parser)(test_cmd, ...messages); 497 | } 498 | ); 499 | }); 500 | 501 | describe('E2E successful scenarios', () => { 502 | // mock console.log 503 | let consoleLog: any; 504 | beforeAll(() => { 505 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 506 | }); 507 | 508 | // clear mock after each call 509 | afterEach(() => { 510 | consoleLog.mockClear(); 511 | }); 512 | 513 | // reenable console.log 514 | afterAll(() => { 515 | // restore console.log 516 | if (consoleLog !== undefined) { 517 | consoleLog.mockRestore(); 518 | } 519 | }); 520 | 521 | test.each([ 522 | ['settings.json (Paths)', TEST_FILE_SETTINGS1], 523 | ['settings.json (Object/Array instead of Paths)', TEST_FILE_SETTINGS2], 524 | ['settings.js (Include resultsFilter as fct)', TEST_FILE_SETTINGS3], 525 | ['settings.js (keySeparator set to false)', TEST_FILE_SETTINGS4], 526 | ])('%s', async (_title: string, settingsFile: test_files_type) => { 527 | let test_cmd = concat_cmd([ 528 | '--settings', 529 | `"${TEST_FILES[settingsFile]}"`, 530 | ]); 531 | // example : 'settings1-output' 532 | let filename = settingsFile.substring(0, settingsFile.lastIndexOf('.')); 533 | let expectedFile = path.join(TEMP_FOLDER, `${filename}-output.csv`); 534 | 535 | // run command 536 | //console.warn(test_cmd); 537 | await parser.parseAsync(test_cmd); 538 | 539 | expect(consoleLog).toHaveBeenCalledWith('Preparing CSV file ...'); 540 | expect(consoleLog).toHaveBeenCalledWith( 541 | `${expectedFile} successfully written` 542 | ); 543 | }); 544 | }); 545 | }); 546 | -------------------------------------------------------------------------------- /test/fixtures/export-xlsx/worksheetCustomizer-dynamic.js: -------------------------------------------------------------------------------- 1 | // I keep in comments the stuff needed to convert that into a TS file 2 | // (So I can easily update this script in the future) 3 | //import { Worksheet } from "exceljs"; 4 | 5 | module.exports = async function (worksheet /*: Worksheet*/) { 6 | // Conditionaly formatting (to better view stuff) 7 | let rowCount = worksheet.rowCount; 8 | let columnCount = worksheet.columnCount; 9 | 10 | // for easiness in the future, for arbitrary number of translations 11 | // As table have a least one language (starting to 'B'), pretty simple to compute last column letter 12 | let lastColumnLetter = String.fromCharCode(66 + (columnCount - 2)); 13 | 14 | // domain for rules ; All the cells 15 | // Ex : "B2:D" + rowCount + 2" for three languages 16 | let computedRef = `B2:${lastColumnLetter + rowCount + 2}`; 17 | 18 | worksheet.addConditionalFormatting({ 19 | ref: computedRef, 20 | rules: [ 21 | // cell is empty : put it in red 22 | { 23 | type: 'containsText', 24 | operator: 'containsBlanks', 25 | style: { 26 | fill: { 27 | type: 'pattern', 28 | pattern: 'solid', 29 | bgColor: { argb: 'FF5733' }, 30 | }, 31 | }, 32 | priority: 1, 33 | }, 34 | // cell contains either [FR], [NL] or [DE] : put it in orange 35 | { 36 | type: 'containsText', 37 | operator: 'containsText', 38 | text: '[FR]', 39 | style: { 40 | fill: { 41 | type: 'pattern', 42 | pattern: 'solid', 43 | bgColor: { argb: 'FF9633' }, 44 | }, 45 | }, 46 | priority: 2, 47 | }, 48 | { 49 | type: 'containsText', 50 | operator: 'containsText', 51 | text: '[NL]', 52 | style: { 53 | fill: { 54 | type: 'pattern', 55 | pattern: 'solid', 56 | bgColor: { argb: 'FF9633' }, 57 | }, 58 | }, 59 | priority: 2, 60 | }, 61 | { 62 | type: 'containsText', 63 | operator: 'containsText', 64 | text: '[DE]', 65 | style: { 66 | fill: { 67 | type: 'pattern', 68 | pattern: 'solid', 69 | bgColor: { argb: 'FF9633' }, 70 | }, 71 | }, 72 | priority: 2, 73 | }, 74 | ], 75 | }); 76 | 77 | return worksheet; 78 | }; 79 | -------------------------------------------------------------------------------- /test/fixtures/import-csv/export-csv.csv: -------------------------------------------------------------------------------- 1 | Technical Key;French translation;Dutch translation;German translation 2 | Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer 3 | Missing key in DE;present;present; 4 | array[0];1 FR;1 NL;1 DE 5 | array[1];2 FR;2 NL;2 DE 6 | array[2];3 FR;3 NL;3 DE 7 | commons.myNestedArray[0];1 FR;1 NL;1 DE 8 | commons.myNestedArray[1];2 FR;2 NL;2 DE 9 | commons.myNestedArray[2];3 FR;3 NL;3 DE 10 | commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE 11 | simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key -------------------------------------------------------------------------------- /test/fixtures/import-csv/export-flat-csv.csv: -------------------------------------------------------------------------------- 1 | Technical Key;French translation;Dutch translation;German translation 2 | changed.key_test$;FR;NL;DE 3 | unchanged.key_with-special-char!;FR;NL;DE -------------------------------------------------------------------------------- /test/fixtures/import-xlsx/export-flat-xlsx.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/test/fixtures/import-xlsx/export-flat-xlsx.xlsx -------------------------------------------------------------------------------- /test/fixtures/import-xlsx/export-xlsx.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/test/fixtures/import-xlsx/export-xlsx.xlsx -------------------------------------------------------------------------------- /test/getLeavesPathes.test.ts: -------------------------------------------------------------------------------- 1 | import getLeavesPathes from '../src/commons/getLeavesPathes'; 2 | 3 | // scenarios for testing 4 | const PATH_SCENARIOS: [string, any, string[]][] = [ 5 | [ 6 | 'Simple keys', 7 | { 8 | key: 42, 9 | someArray: [0, 1, 2], 10 | }, 11 | ['key', 'someArray[0]', 'someArray[1]', 'someArray[2]'], 12 | ], 13 | [ 14 | 'Nested object keys should work', 15 | { 16 | commons: { 17 | firstNestedKey: 'Hello', 18 | units: { 19 | secondNestedKey: 'World !', 20 | }, 21 | }, 22 | }, 23 | ['commons.firstNestedKey', 'commons.units.secondNestedKey'], 24 | ], 25 | [ 26 | 'Key that starts with number(s) should be correctly handled', 27 | { 28 | commons: { 29 | units: { 30 | '5ml': 42, 31 | }, 32 | }, 33 | }, 34 | ['commons.units.5ml'], 35 | ], 36 | [ 37 | 'Key with spaces should be correctly handled', 38 | { 39 | 'Key with spaces': '42', 40 | }, 41 | ['Key with spaces'], 42 | ], 43 | [ 44 | 'Nested objects inside array should be correctly handled', 45 | { 46 | someArray: [ 47 | { 48 | type: 'CRITICAL', 49 | message: 'Fatal error', 50 | }, 51 | { 52 | type: 'WARNING', 53 | message: 'Deal with syntax', 54 | }, 55 | ], 56 | }, 57 | [ 58 | 'someArray[0].type', 59 | 'someArray[0].message', 60 | 'someArray[1].type', 61 | 'someArray[1].message', 62 | ], 63 | ], 64 | ]; 65 | 66 | // scenarios for custom separator 67 | // Might be lazy but better to support same tests that the dot separator XD 68 | let CUST_SEPARATOR = '_'; 69 | let expectedResult: string[][] = [ 70 | ['key', 'someArray[0]', 'someArray[1]', 'someArray[2]'], 71 | [ 72 | `commons${CUST_SEPARATOR}firstNestedKey`, 73 | `commons${CUST_SEPARATOR}units${CUST_SEPARATOR}secondNestedKey`, 74 | ], 75 | [`commons${CUST_SEPARATOR}units${CUST_SEPARATOR}5ml`], 76 | ['Key with spaces'], 77 | [ 78 | `someArray${CUST_SEPARATOR}0${CUST_SEPARATOR}type`, 79 | `someArray${CUST_SEPARATOR}0${CUST_SEPARATOR}message`, 80 | `someArray${CUST_SEPARATOR}1${CUST_SEPARATOR}type`, 81 | `someArray${CUST_SEPARATOR}1${CUST_SEPARATOR}message`, 82 | ], 83 | ]; 84 | const PATH_SCENARIOS_2: [string, any, string[]][] = PATH_SCENARIOS.map( 85 | (entry, index) => { 86 | return [entry[0], entry[1], expectedResult[index]]; 87 | } 88 | ); 89 | 90 | // Scenarios for keySeparator is set to false 91 | const PATH_SCENARIOS_3: [string, any, string[]][] = [ 92 | [ 93 | 'Simple keys', 94 | { 95 | key: 42, 96 | verylooooogKey: 'Hello world', 97 | }, 98 | ['key', 'verylooooogKey'], 99 | ], 100 | [ 101 | 'Keys with special characters', 102 | { 103 | 'Hello.world !': 42, 104 | '$x.y_42-z~5!': 'jy95', 105 | '[Hello].[World]|42': 'Hello', 106 | }, 107 | ['Hello.world !', '$x.y_42-z~5!', '[Hello].[World]|42'], 108 | ], 109 | [ 110 | 'Nested JSON with separator set to false - backup strategy', 111 | { 112 | lol: { 113 | test: { 114 | world: 42, 115 | }, 116 | }, 117 | }, 118 | ['lol.test.world'], 119 | ], 120 | ]; 121 | 122 | describe('[commons - getLeavesPathes] dot separator', () => { 123 | test.each(PATH_SCENARIOS)( 124 | '%s', 125 | async (_title: string, obj: any, expectedArray: string[]) => { 126 | const paths = getLeavesPathes(obj); 127 | expect(paths.sort()).toEqual(expectedArray.sort()); 128 | } 129 | ); 130 | }); 131 | 132 | describe('[commons - getLeavesPathes] custom separator', () => { 133 | test.each(PATH_SCENARIOS_2)( 134 | '%s', 135 | async (_title: string, obj: any, expectedArray: string[]) => { 136 | const paths = getLeavesPathes(obj, CUST_SEPARATOR); 137 | expect(paths.sort()).toEqual(expectedArray.sort()); 138 | } 139 | ); 140 | }); 141 | 142 | describe('[commons - getLeavesPathes] separator set to false', () => { 143 | test.each(PATH_SCENARIOS_3)( 144 | '%s', 145 | async (_title: string, obj: any, expectedArray: string[]) => { 146 | const paths = getLeavesPathes(obj, false); 147 | expect(paths.sort()).toEqual(expectedArray.sort()); 148 | } 149 | ); 150 | }); 151 | -------------------------------------------------------------------------------- /test/import/import-csv.test.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import yargs from 'yargs'; 3 | // import command 4 | import { 5 | command, 6 | description as describeText, 7 | builder, 8 | } from '../../src/cmds/import'; 9 | // CSV description 10 | import { description as csv_description } from '../../src/cmds/import_cmds/import_csv'; 11 | 12 | // test helpers 13 | import { 14 | TEMP_FOLDER, 15 | VALID_TEST_FOLDER, 16 | USELESS_TEST_FOLDER, 17 | expectError, 18 | fetchOutput, 19 | fsify_structure, 20 | fsify, 21 | } from '../test-helpers'; 22 | 23 | // test folders constants 24 | const ROOT_TEST_FOLDER = 'tests-for-import-csv'; 25 | 26 | // Build the parser used for that command 27 | const parser = yargs.command(command, describeText, builder).help(); 28 | 29 | // to concat faster command 30 | type concat_cmd_type = (args: string[]) => string; 31 | type prepare_mandatory_args_type = ( 32 | ...args: [string, string, string[]] 33 | ) => string[]; 34 | const concat_cmd: concat_cmd_type = (args: string[]) => 35 | `import from_csv ${args.join(' ')}`; 36 | const prepare_mandatory_args: prepare_mandatory_args_type = ( 37 | ...[input, columns, ...locales] 38 | ) => [ 39 | '--input', 40 | `"${input}"`, 41 | '--columns', 42 | `"${columns}"`, 43 | '--locales', 44 | locales.join(' '), 45 | ]; 46 | 47 | // to access easier the paths of test file paths 48 | const test_files_list = [ 49 | // inpput file 50 | 'export-csv.csv', 51 | 'export-flat-csv.csv', 52 | // correct files 53 | 'columns.json', 54 | 'settings1.json', 55 | 'settings2.json', 56 | 'settings3.json', 57 | // wrong files 58 | 'emptyObject.json', 59 | 'emptyArray.json', 60 | // wrong columns.json 61 | 'columns-technicalKeyNotString.json', 62 | 'columns-localesNotAObject.json', 63 | 'columns-localesValuesNotString.json', 64 | ] as const; 65 | const [ 66 | TEST_FILE_INPUT, 67 | TEST_FILE_FLAT_INPUT, 68 | TEST_FILE_COLUMNS, 69 | TEST_FILE_SETTINGS1, 70 | TEST_FILE_SETTINGS2, 71 | TEST_FILE_SETTINGS3, 72 | TEST_FILE_EMPTY_OBJECT, 73 | TEST_FILE_EMPTY_ARRAY, 74 | TEST_FILE_COLUMNS_TKNS, 75 | TEST_FILE_COLUMNS_LNAO, 76 | TEST_FILE_COLUMNS_LVNS, 77 | ] = test_files_list; 78 | type test_files_type = (typeof test_files_list)[number]; 79 | 80 | // files path 81 | const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( 82 | (acc: any, curr: test_files_type, idx: number) => { 83 | let arr = 84 | idx === 0 || idx === 1 85 | ? [__dirname, '..', 'fixtures', 'import-csv', curr] 86 | : [ 87 | TEMP_FOLDER, 88 | ROOT_TEST_FOLDER, 89 | idx > 0 && idx < 6 ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, 90 | curr, 91 | ]; 92 | acc[curr] = path.join(...arr); 93 | return acc; 94 | }, 95 | {} 96 | ); 97 | 98 | // test scenarios for validations 99 | const VALIDATIONS_SCENARIOS: [ 100 | string, 101 | [test_files_type, test_files_type, string[], ...string[]], 102 | ...string[], 103 | ][] = [ 104 | [ 105 | // Test out the message : "locales options doesn't contain uniq values" 106 | 'Option locales - Duplicated values should be rejected', 107 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS, ['FR', 'FR']], 108 | // I have to disable the error message check as yargs is buggy atm 109 | //"doesn't contain uniq values" 110 | ], 111 | [ 112 | // Test out the message : "Option keySeparator should be a not-empty char" 113 | 'Option keySeparator - Invalid separator should be rejected', 114 | [ 115 | TEST_FILE_INPUT, 116 | TEST_FILE_COLUMNS, 117 | ['FR', 'NL'], 118 | '--keySeparator', 119 | `"HACKERMAN"`, 120 | ], 121 | 'keySeparator', 122 | 'not-empty char', 123 | ], 124 | [ 125 | // Test out the message : 'columns is not a JSON Object' 126 | 'Option columns - unexpected file should be rejected', 127 | [TEST_FILE_INPUT, TEST_FILE_EMPTY_ARRAY, ['FR', 'NL']], 128 | 'columns is not a JSON Object', 129 | ], 130 | [ 131 | // Test out the message : `${missingProp} couldn't be found in columns object` 132 | 'Option columns - missing property should be rejected', 133 | [TEST_FILE_INPUT, TEST_FILE_EMPTY_OBJECT, ['FR', 'NL']], 134 | "couldn't be found in columns object", 135 | ], 136 | [ 137 | // Test out the message : "technical_key in columns object isn't a String" 138 | 'Option columns - unexpected technical_key value should be reject', 139 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_TKNS, ['FR', 'NL']], 140 | "technical_key in columns object isn't a String", 141 | ], 142 | [ 143 | // Test out the message : "locales key in columns object is not a JSON Object", 144 | 'Option columns - unexpected locales value should be rejected', 145 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LNAO, ['FR', 'NL']], 146 | 'locales key in columns object is not a JSON Object', 147 | ], 148 | [ 149 | // Test out the message : "At least one value for locales key in columns object isn't a string" 150 | 'Option columns - unexpected value(s) for locales should be rejected', 151 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LVNS, ['FR', 'NL']], 152 | ], 153 | ]; 154 | 155 | // file structure for fsify, in order to run the tests 156 | const structure: fsify_structure = [ 157 | { 158 | type: fsify.DIRECTORY, 159 | name: ROOT_TEST_FOLDER, 160 | contents: [ 161 | // In this folder, everything in correct 162 | { 163 | type: fsify.DIRECTORY, 164 | name: VALID_TEST_FOLDER, 165 | contents: [ 166 | // columns file 167 | { 168 | type: fsify.FILE, 169 | name: TEST_FILE_COLUMNS, 170 | contents: JSON.stringify({ 171 | technical_key: 'Technical Key', 172 | locales: { 173 | FR: 'French translation', 174 | NL: 'Dutch translation', 175 | DE: 'German translation', 176 | }, 177 | }), 178 | }, 179 | // First format of settings.json (Path) 180 | { 181 | type: fsify.FILE, 182 | name: TEST_FILE_SETTINGS1, 183 | contents: JSON.stringify({ 184 | input: TEST_FILES[TEST_FILE_INPUT], 185 | columns: TEST_FILES[TEST_FILE_COLUMNS], 186 | locales: ['FR', 'NL', 'DE'], 187 | outputDir: path.join(TEMP_FOLDER, ROOT_TEST_FOLDER), 188 | suffix: '_settings1', 189 | }), 190 | }, 191 | // Second format of settings.json (Object/Array instead of Paths) 192 | { 193 | type: fsify.FILE, 194 | name: TEST_FILE_SETTINGS2, 195 | contents: JSON.stringify({ 196 | input: TEST_FILES[TEST_FILE_INPUT], 197 | columns: { 198 | technical_key: 'Technical Key', 199 | locales: { 200 | FR: 'French translation', 201 | NL: 'Dutch translation', 202 | DE: 'German translation', 203 | }, 204 | }, 205 | locales: ['FR', 'NL', 'DE'], 206 | outputDir: path.join(TEMP_FOLDER, ROOT_TEST_FOLDER), 207 | suffix: '_settings2', 208 | }), 209 | }, 210 | // Third format of settings.json (keySeparator) 211 | { 212 | type: fsify.FILE, 213 | name: TEST_FILE_SETTINGS3, 214 | contents: JSON.stringify({ 215 | input: TEST_FILES[TEST_FILE_FLAT_INPUT], 216 | columns: { 217 | technical_key: 'Technical Key', 218 | locales: { 219 | FR: 'French translation', 220 | NL: 'Dutch translation', 221 | DE: 'German translation', 222 | }, 223 | }, 224 | locales: ['FR', 'NL', 'DE'], 225 | outputDir: path.join(TEMP_FOLDER, ROOT_TEST_FOLDER), 226 | suffix: '_settings3', 227 | keySeparator: false, 228 | }), 229 | }, 230 | ], 231 | }, 232 | // In this folder, files used for validations 233 | { 234 | type: fsify.DIRECTORY, 235 | name: USELESS_TEST_FOLDER, 236 | contents: [ 237 | // An empty object 238 | { 239 | type: fsify.FILE, 240 | name: TEST_FILE_EMPTY_OBJECT, 241 | contents: JSON.stringify({}), 242 | }, 243 | // An empty array 244 | { 245 | type: fsify.FILE, 246 | name: TEST_FILE_EMPTY_ARRAY, 247 | contents: JSON.stringify([]), 248 | }, 249 | // columns option - technical_key not a string 250 | { 251 | type: fsify.FILE, 252 | name: TEST_FILE_COLUMNS_TKNS, 253 | contents: JSON.stringify({ 254 | technical_key: 42.0, 255 | locales: {}, 256 | }), 257 | }, 258 | // columns option - locales not a object 259 | { 260 | type: fsify.FILE, 261 | name: TEST_FILE_COLUMNS_LNAO, 262 | contents: JSON.stringify({ 263 | technical_key: 'something', 264 | locales: [], 265 | }), 266 | }, 267 | // columns option - locales values not string 268 | { 269 | type: fsify.FILE, 270 | name: TEST_FILE_COLUMNS_LVNS, 271 | contents: JSON.stringify({ 272 | technical_key: 'something', 273 | locales: { 274 | FR: 42.0, 275 | NL: null, 276 | }, 277 | }), 278 | }, 279 | ], 280 | }, 281 | ], 282 | }, 283 | ]; 284 | 285 | beforeAll(() => { 286 | // write temporary files 287 | return fsify(structure); 288 | }); 289 | 290 | describe('[import_csv command]', () => { 291 | describe('Check command availability', () => { 292 | it('Should list from_csv in import command', async () => { 293 | const output = await fetchOutput(parser)('import --help'); 294 | expect(output).toMatch('from_csv'); 295 | }); 296 | 297 | it('Should display from_csv help output', async () => { 298 | const output = await fetchOutput(parser)('import from_csv --help'); 299 | expect(output).toMatch(csv_description); 300 | }); 301 | }); 302 | 303 | describe('Validations', () => { 304 | // mock console.log 305 | let consoleLog: any; 306 | beforeAll(() => { 307 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 308 | }); 309 | 310 | // restore console.log 311 | afterAll(() => { 312 | if (consoleLog !== undefined) { 313 | consoleLog.mockRestore(); 314 | } 315 | }); 316 | 317 | test.each(VALIDATIONS_SCENARIOS)( 318 | '%s', 319 | async ( 320 | _title: string, 321 | args: [test_files_type, test_files_type, string[], ...string[]], 322 | ...messages: string[] 323 | ) => { 324 | let [input, columns, locales, ...otherArgs] = args; 325 | let test_cmd = concat_cmd([ 326 | // mandatory args 327 | ...prepare_mandatory_args( 328 | TEST_FILES[input], 329 | TEST_FILES[columns], 330 | locales 331 | ), 332 | // optional args 333 | ...otherArgs, 334 | ]); 335 | //console.warn(test_cmd); 336 | // Test out if error message is thrown 337 | await expectError(parser)(test_cmd, ...messages); 338 | } 339 | ); 340 | }); 341 | 342 | describe('E2E successful scenarios', () => { 343 | // mock console.log 344 | let consoleLog: any; 345 | beforeAll(() => { 346 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 347 | }); 348 | 349 | // clear mock after each call 350 | afterEach(() => { 351 | consoleLog.mockClear(); 352 | }); 353 | 354 | // reenable console.log 355 | afterAll(() => { 356 | // restore console.log 357 | if (consoleLog !== undefined) { 358 | consoleLog.mockRestore(); 359 | } 360 | }); 361 | 362 | test.each([ 363 | ['(Paths)', TEST_FILE_SETTINGS1], 364 | ['(Object/Array instead of Paths)', TEST_FILE_SETTINGS2], 365 | ['(keySeparator set to false)', TEST_FILE_SETTINGS3], 366 | ])( 367 | 'settings.json %s', 368 | async (_title: string, settingsFile: test_files_type) => { 369 | let test_cmd = concat_cmd([ 370 | '--settings', 371 | `"${TEST_FILES[settingsFile]}"`, 372 | ]); 373 | // run command 374 | //console.warn(test_cmd); 375 | await parser.parseAsync(test_cmd); 376 | 377 | expect(consoleLog).toHaveBeenCalledWith( 378 | 'Successfully exported found locale(s) to i18n json file(s)' 379 | ); 380 | } 381 | ); 382 | }); 383 | }); 384 | -------------------------------------------------------------------------------- /test/import/import-xlsx.test.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import yargs from 'yargs'; 3 | // import command 4 | import { 5 | command, 6 | description as describeText, 7 | builder, 8 | } from '../../src/cmds/import'; 9 | // XLSX description 10 | import { description as xlsx_description } from '../../src/cmds/import_cmds/import_xlsx'; 11 | 12 | // test helpers 13 | import { 14 | TEMP_FOLDER, 15 | VALID_TEST_FOLDER, 16 | USELESS_TEST_FOLDER, 17 | expectError, 18 | fetchOutput, 19 | fsify_structure, 20 | fsify, 21 | } from '../test-helpers'; 22 | 23 | // test folders constants 24 | const ROOT_TEST_FOLDER = 'tests-for-import-xlsx'; 25 | 26 | // Build the parser used for that command 27 | const parser = yargs.command(command, describeText, builder).help(); 28 | 29 | // to concat faster command 30 | type concat_cmd_type = (args: string[]) => string; 31 | type prepare_mandatory_args_type = ( 32 | ...args: [string, string, string[]] 33 | ) => string[]; 34 | const concat_cmd: concat_cmd_type = (args: string[]) => 35 | `import from_xlsx ${args.join(' ')}`; 36 | const prepare_mandatory_args: prepare_mandatory_args_type = ( 37 | ...[input, columns, ...locales] 38 | ) => [ 39 | '--input', 40 | `"${input}"`, 41 | '--columns', 42 | `"${columns}"`, 43 | '--locales', 44 | locales.join(' '), 45 | ]; 46 | 47 | // to access easier the paths of test file paths 48 | const test_files_list = [ 49 | // inpput file 50 | 'export-xlsx.xlsx', 51 | 'export-flat-xlsx.xlsx', 52 | // correct files 53 | 'columns.json', 54 | 'settings1.json', 55 | 'settings2.json', 56 | 'settings3.js', 57 | 'settings4.js', 58 | // wrong files 59 | 'emptyObject.json', 60 | 'emptyArray.json', 61 | // wrong columns.json 62 | 'columns-technicalKeyNotString.json', 63 | 'columns-localesNotAObject.json', 64 | 'columns-localesValuesNotString.json', 65 | ] as const; 66 | const [ 67 | TEST_FILE_INPUT, 68 | TEST_FILE_FLAT_INPUT, 69 | TEST_FILE_COLUMNS, 70 | TEST_FILE_SETTINGS1, 71 | TEST_FILE_SETTINGS2, 72 | TEST_FILE_SETTINGS3, 73 | TEST_FILE_SETTINGS4, 74 | TEST_FILE_EMPTY_OBJECT, 75 | TEST_FILE_EMPTY_ARRAY, 76 | TEST_FILE_COLUMNS_TKNS, 77 | TEST_FILE_COLUMNS_LNAO, 78 | TEST_FILE_COLUMNS_LVNS, 79 | ] = test_files_list; 80 | type test_files_type = (typeof test_files_list)[number]; 81 | 82 | // files path 83 | const TEST_FILES: { [x in test_files_type]: string } = test_files_list.reduce( 84 | (acc: any, curr: test_files_type, idx: number) => { 85 | let arr = 86 | idx === 0 || idx === 1 87 | ? [__dirname, '..', 'fixtures', 'import-xlsx', curr] 88 | : [ 89 | TEMP_FOLDER, 90 | ROOT_TEST_FOLDER, 91 | idx > 0 && idx < 7 ? VALID_TEST_FOLDER : USELESS_TEST_FOLDER, 92 | curr, 93 | ]; 94 | acc[curr] = path.join(...arr); 95 | return acc; 96 | }, 97 | {} 98 | ); 99 | 100 | // test scenarios for validations 101 | const VALIDATIONS_SCENARIOS: [ 102 | string, 103 | [test_files_type, test_files_type, string[], ...string[]], 104 | ...string[], 105 | ][] = [ 106 | [ 107 | // Test out the message : "locales options doesn't contain uniq values" 108 | 'Option locales - Duplicated values should be rejected', 109 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS, ['FR', 'FR']], 110 | // I have to disable the error message check as yargs is buggy atm 111 | //"doesn't contain uniq values" 112 | ], 113 | [ 114 | // Test out the message : "Option keySeparator should be a not-empty char" 115 | 'Option keySeparator - Invalid separator should be rejected', 116 | [ 117 | TEST_FILE_INPUT, 118 | TEST_FILE_COLUMNS, 119 | ['FR', 'NL'], 120 | '--keySeparator', 121 | `"HACKERMAN"`, 122 | ], 123 | 'keySeparator', 124 | 'not-empty char', 125 | ], 126 | [ 127 | // Test out the message : 'columns is not a JSON Object' 128 | 'Option columns - unexpected file should be rejected', 129 | [TEST_FILE_INPUT, TEST_FILE_EMPTY_ARRAY, ['FR', 'NL']], 130 | 'columns is not a JSON Object', 131 | ], 132 | [ 133 | // Test out the message : `${missingProp} couldn't be found in columns object` 134 | 'Option columns - missing property should be rejected', 135 | [TEST_FILE_INPUT, TEST_FILE_EMPTY_OBJECT, ['FR', 'NL']], 136 | "couldn't be found in columns object", 137 | ], 138 | [ 139 | // Test out the message : "technical_key in columns object isn't a String" 140 | 'Option columns - unexpected technical_key value should be reject', 141 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_TKNS, ['FR', 'NL']], 142 | "technical_key in columns object isn't a String", 143 | ], 144 | [ 145 | // Test out the message : "locales key in columns object is not a JSON Object", 146 | 'Option columns - unexpected locales value should be rejected', 147 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LNAO, ['FR', 'NL']], 148 | 'locales key in columns object is not a JSON Object', 149 | ], 150 | [ 151 | // Test out the message : "At least one value for locales key in columns object isn't a string" 152 | 'Option columns - unexpected value(s) for locales should be rejected', 153 | [TEST_FILE_INPUT, TEST_FILE_COLUMNS_LVNS, ['FR', 'NL']], 154 | ], 155 | ]; 156 | 157 | // file structure for fsify, in order to run the tests 158 | const structure: fsify_structure = [ 159 | { 160 | type: fsify.DIRECTORY, 161 | name: ROOT_TEST_FOLDER, 162 | contents: [ 163 | // In this folder, everything in correct 164 | { 165 | type: fsify.DIRECTORY, 166 | name: VALID_TEST_FOLDER, 167 | contents: [ 168 | // columns file 169 | { 170 | type: fsify.FILE, 171 | name: TEST_FILE_COLUMNS, 172 | contents: JSON.stringify({ 173 | technical_key: 'Technical Key', 174 | locales: { 175 | FR: 'French translation', 176 | NL: 'Dutch translation', 177 | DE: 'German translation', 178 | }, 179 | }), 180 | }, 181 | // First format of settings.json (Path) 182 | { 183 | type: fsify.FILE, 184 | name: TEST_FILE_SETTINGS1, 185 | contents: JSON.stringify({ 186 | input: TEST_FILES[TEST_FILE_INPUT], 187 | columns: TEST_FILES[TEST_FILE_COLUMNS], 188 | locales: ['FR', 'NL', 'DE'], 189 | outputDir: path.join(TEMP_FOLDER, ROOT_TEST_FOLDER), 190 | suffix: '_settings1', 191 | }), 192 | }, 193 | // Second format of settings.json (Object/Array instead of Paths) 194 | { 195 | type: fsify.FILE, 196 | name: TEST_FILE_SETTINGS2, 197 | contents: JSON.stringify({ 198 | input: TEST_FILES[TEST_FILE_INPUT], 199 | columns: { 200 | technical_key: 'Technical Key', 201 | locales: { 202 | FR: 'French translation', 203 | NL: 'Dutch translation', 204 | DE: 'German translation', 205 | }, 206 | }, 207 | locales: ['FR', 'NL', 'DE'], 208 | outputDir: path.join(TEMP_FOLDER, ROOT_TEST_FOLDER), 209 | suffix: '_settings2', 210 | }), 211 | }, 212 | // First format of settings.js (Mixins config) 213 | { 214 | type: fsify.FILE, 215 | name: TEST_FILE_SETTINGS3, 216 | // As fsify uses fs.writeFile, we need to double backslash stuff again 217 | contents: `module.exports = { 218 | "input": "${TEST_FILES[TEST_FILE_INPUT].replace(/\\/g, '\\\\')}", 219 | "columns": { 220 | "technical_key": 'Technical Key', 221 | "locales": { 222 | "FR": 'French translation', 223 | "NL": 'Dutch translation', 224 | "DE": 'German translation' 225 | } 226 | }, 227 | "locales": ['FR', 'NL', 'DE'], 228 | "outputDir": "${TEMP_FOLDER.replace(/\\/g, '\\\\')}", 229 | "suffix": '_settings3', 230 | }`, 231 | }, 232 | // Second format of settings.js (keySeparator) 233 | { 234 | type: fsify.FILE, 235 | name: TEST_FILE_SETTINGS4, 236 | // As fsify uses fs.writeFile, we need to double backslash stuff again 237 | contents: `module.exports = { 238 | "input": "${TEST_FILES[TEST_FILE_FLAT_INPUT].replace( 239 | /\\/g, 240 | '\\\\' 241 | )}", 242 | "columns": { 243 | "technical_key": 'Technical Key', 244 | "locales": { 245 | "FR": 'French translation', 246 | "NL": 'Dutch translation', 247 | "DE": 'German translation' 248 | } 249 | }, 250 | "locales": ['FR', 'NL', 'DE'], 251 | "outputDir": "${TEMP_FOLDER.replace(/\\/g, '\\\\')}", 252 | "suffix": '_settings4', 253 | "keySeparator": false 254 | }`, 255 | }, 256 | ], 257 | }, 258 | // In this folder, files used for validations 259 | { 260 | type: fsify.DIRECTORY, 261 | name: USELESS_TEST_FOLDER, 262 | contents: [ 263 | // An empty object 264 | { 265 | type: fsify.FILE, 266 | name: TEST_FILE_EMPTY_OBJECT, 267 | contents: JSON.stringify({}), 268 | }, 269 | // An empty array 270 | { 271 | type: fsify.FILE, 272 | name: TEST_FILE_EMPTY_ARRAY, 273 | contents: JSON.stringify([]), 274 | }, 275 | // columns option - technical_key not a string 276 | { 277 | type: fsify.FILE, 278 | name: TEST_FILE_COLUMNS_TKNS, 279 | contents: JSON.stringify({ 280 | technical_key: 42.0, 281 | locales: {}, 282 | }), 283 | }, 284 | // columns option - locales not a object 285 | { 286 | type: fsify.FILE, 287 | name: TEST_FILE_COLUMNS_LNAO, 288 | contents: JSON.stringify({ 289 | technical_key: 'something', 290 | locales: [], 291 | }), 292 | }, 293 | // columns option - locales values not string 294 | { 295 | type: fsify.FILE, 296 | name: TEST_FILE_COLUMNS_LVNS, 297 | contents: JSON.stringify({ 298 | technical_key: 'something', 299 | locales: { 300 | FR: 42.0, 301 | NL: null, 302 | }, 303 | }), 304 | }, 305 | ], 306 | }, 307 | ], 308 | }, 309 | ]; 310 | 311 | beforeAll(() => { 312 | // write temporary files 313 | return fsify(structure); 314 | }); 315 | 316 | describe('[import_xlsx command]', () => { 317 | describe('Check command availability', () => { 318 | it('Should list from_xlsx in import command', async () => { 319 | const output = await fetchOutput(parser)('import --help'); 320 | expect(output).toMatch('from_xlsx'); 321 | }); 322 | 323 | it('Should display from_xlsx help output', async () => { 324 | const output = await fetchOutput(parser)('import from_xlsx --help'); 325 | expect(output).toMatch(xlsx_description); 326 | }); 327 | }); 328 | 329 | describe('Validations', () => { 330 | // mock console.log 331 | let consoleLog: any; 332 | beforeAll(() => { 333 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 334 | }); 335 | 336 | // restore console.log 337 | afterAll(() => { 338 | if (consoleLog !== undefined) { 339 | consoleLog.mockRestore(); 340 | } 341 | }); 342 | 343 | test.each(VALIDATIONS_SCENARIOS)( 344 | '%s', 345 | async ( 346 | _title: string, 347 | args: [test_files_type, test_files_type, string[], ...string[]], 348 | ...messages: string[] 349 | ) => { 350 | let [input, columns, locales, ...otherArgs] = args; 351 | let test_cmd = concat_cmd([ 352 | // mandatory args 353 | ...prepare_mandatory_args( 354 | TEST_FILES[input], 355 | TEST_FILES[columns], 356 | locales 357 | ), 358 | // optional args 359 | ...otherArgs, 360 | ]); 361 | //console.warn(test_cmd); 362 | // Test out if error message is thrown 363 | await expectError(parser)(test_cmd, ...messages); 364 | } 365 | ); 366 | }); 367 | 368 | describe('E2E successful scenarios', () => { 369 | // mock console.log 370 | let consoleLog: any; 371 | beforeAll(() => { 372 | consoleLog = jest.spyOn(console, 'log').mockImplementation(); 373 | }); 374 | 375 | // clear mock after each call 376 | afterEach(() => { 377 | consoleLog.mockClear(); 378 | }); 379 | 380 | // reenable console.log 381 | afterAll(() => { 382 | // restore console.log 383 | if (consoleLog !== undefined) { 384 | consoleLog.mockRestore(); 385 | } 386 | }); 387 | 388 | test.each([ 389 | ['(Paths)', TEST_FILE_SETTINGS1], 390 | ['(Object/Array instead of Paths)', TEST_FILE_SETTINGS2], 391 | ['should work with js config file', TEST_FILE_SETTINGS3], 392 | ['(keySeparator set to false)', TEST_FILE_SETTINGS4], 393 | ])('settings %s', async (_title: string, settingsFile: test_files_type) => { 394 | let test_cmd = concat_cmd([ 395 | '--settings', 396 | `"${TEST_FILES[settingsFile]}"`, 397 | ]); 398 | // run command 399 | //console.warn(test_cmd); 400 | await parser.parseAsync(test_cmd); 401 | 402 | expect(consoleLog).toHaveBeenCalledWith( 403 | 'Successfully exported found locale(s) to i18n json file(s)' 404 | ); 405 | }); 406 | }); 407 | }); 408 | -------------------------------------------------------------------------------- /test/test-helpers.ts: -------------------------------------------------------------------------------- 1 | // to make testing simpler in the future 2 | 3 | import os from 'os'; 4 | // type for yargs parser 5 | import type { Argv } from 'yargs'; 6 | 7 | // temp folder 8 | export const TEMP_FOLDER = os.tmpdir(); 9 | 10 | // for correct / wrong stuff 11 | export const [VALID_TEST_FOLDER, USELESS_TEST_FOLDER] = [ 12 | 'correct', // folder where every file are correct 13 | 'useless', // folder where file has an useless content ([]) 14 | ]; 15 | 16 | // initialise fsify 17 | export const fsify: { 18 | [x: string]: any; 19 | DIRECTORY: any; 20 | FILE: any; 21 | (_: { [x: string]: any }): Promise; 22 | } = require('fsify')({ 23 | cwd: TEMP_FOLDER, 24 | persistent: false, 25 | force: true, 26 | }); 27 | 28 | // return the output of a given command to the parser 29 | export const fetchOutput = 30 | (parser: Argv) => 31 | (cmd: string): Promise => { 32 | return new Promise((resolve) => { 33 | parser.parse( 34 | cmd, 35 | (_err: Error | undefined, _argv: any, output: string) => { 36 | resolve(output); 37 | } 38 | ); 39 | }); 40 | }; 41 | 42 | // makes assertions on errors 43 | export const expectError = 44 | (parser: Argv) => 45 | async (cmd: string, ...messages: string[]) => { 46 | // error to be retrieve 47 | let error: any = undefined; 48 | // In tests, I had to make sure yargs doesn't override error for the following reason : 49 | // Even when validation failed, it somehow can go to handler() 50 | let isFirstError = true; 51 | 52 | // add fail() handler 53 | // Because of problem explained above, I had to ignore if an error occurs afterwards 54 | try { 55 | await parser 56 | .fail((_, e) => { 57 | if (isFirstError) { 58 | isFirstError = false; 59 | error = e; 60 | } 61 | }) 62 | .parseAsync(cmd); 63 | } catch (_) {} 64 | // check if error was set 65 | expect(error).not.toEqual(undefined); 66 | // check if it is an error Object 67 | expect(error).toHaveProperty('message'); 68 | // check if error message contains expected element 69 | for (let expectedStr of messages) { 70 | expect((error as Error).message).toMatch(expectedStr); 71 | } 72 | }; 73 | 74 | // type for fsify structure 75 | export type fsify_structure = { 76 | type: any; 77 | name: string; 78 | contents: string | fsify_structure; 79 | }[]; 80 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | // interop between ESM and CJS modules. Recommended by TS 4 | "esModuleInterop": true 5 | } 6 | } -------------------------------------------------------------------------------- /website/.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | /node_modules 3 | 4 | # Production 5 | /build 6 | 7 | # Generated files 8 | .docusaurus 9 | .cache-loader 10 | 11 | # Misc 12 | .DS_Store 13 | .env.local 14 | .env.development.local 15 | .env.test.local 16 | .env.production.local 17 | 18 | npm-debug.log* 19 | yarn-debug.log* 20 | yarn-error.log* 21 | -------------------------------------------------------------------------------- /website/README.md: -------------------------------------------------------------------------------- 1 | # Website 2 | 3 | This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. 4 | 5 | ### Installation 6 | 7 | ``` 8 | $ yarn 9 | ``` 10 | 11 | ### Local Development 12 | 13 | ``` 14 | $ yarn start 15 | ``` 16 | 17 | This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. 18 | 19 | ### Build 20 | 21 | ``` 22 | $ yarn build 23 | ``` 24 | 25 | This command generates static content into the `build` directory and can be served using any static contents hosting service. 26 | 27 | ### Deployment 28 | 29 | ``` 30 | $ GIT_USER= USE_SSH=true yarn deploy 31 | ``` 32 | 33 | If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. 34 | -------------------------------------------------------------------------------- /website/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve('@docusaurus/core/lib/babel/preset')], 3 | }; 4 | -------------------------------------------------------------------------------- /website/blog/2021-09-05-welcome/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: welcome 3 | title: Welcome 4 | authors: [jy95] 5 | tags: [welcome] 6 | --- 7 | 8 | Welcome to @jy95/i18n-tools. 9 | This cli enables you to : 10 | - Export i18n files into something else 11 | - Turn a file to i18n file(s) 12 | - Compare at least two i18n files & generate a report 13 | - ... 14 | -------------------------------------------------------------------------------- /website/blog/2021-10-07-flat-json-now-supported/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | slug: flat-json-now-supported 3 | title: Flat JSON now supported 4 | authors: [jy95] 5 | tags: [feature] 6 | --- 7 | 8 | Flat JSON such as this one are now supported : 9 | ```json 10 | { 11 | "unchanged.key_with-special-char!":"Hello", 12 | "changed.key_test$":"world !" 13 | } 14 | ``` 15 | 16 | Don't forget to set option `keySeparator` to `false` in your `settings.json` or `settings.js` files. -------------------------------------------------------------------------------- /website/blog/authors.yml: -------------------------------------------------------------------------------- 1 | jy95: 2 | name: Jacques Yakoub 3 | title: Maintainer of @jy95/i18n-tools 4 | url: https://github.com/jy95 5 | image_url: https://avatars.githubusercontent.com/u/9306961?v=4 -------------------------------------------------------------------------------- /website/docs/commands/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Commands", 3 | "position": 2 4 | } -------------------------------------------------------------------------------- /website/docs/commands/_diff-faq.mdx: -------------------------------------------------------------------------------- 1 |
2 | The files I use are flat JSON. How can I make this command work ? 3 | 4 | Simply set option `keySeparator` to `false` in your `settings.json` or `settings.js`, such as : 5 | ```json title="settings.json" 6 | { 7 | "keySeparator": false 8 | } 9 | ``` 10 | 11 |
12 | 13 |
14 | I only want some types of changes reported in the result file. How can I achieve that ? 15 | 16 | Simply set the `operations` option in your `settings.json` or `settings.js` to your liking : 17 | 18 | :::tip 19 | 20 | Current operations are : 21 | 22 | | Operation | Description | 23 | |-----------|---------------------------------------------------------------| 24 | | PUT | When key exists in both file1 & file2 but value was replaced | 25 | | ADD | When key exists in file2 but not in file1 | 26 | | DEL | When key exists in file1 but not in file2 | 27 | 28 | ::: 29 | 30 | ```json title="settings.json" 31 | { 32 | "operations": ["PUT"] 33 | } 34 | ``` 35 | 36 |
-------------------------------------------------------------------------------- /website/docs/commands/_i18n-files-tabs.mdx: -------------------------------------------------------------------------------- 1 | import Tabs from '@theme/Tabs'; 2 | import TabItem from '@theme/TabItem'; 3 | 4 | 12 | 13 | 14 | ```json title="fr.json" 15 | { 16 | "commons":{ 17 | "myNestedKey":"Hello world FR", 18 | "myNestedArray":[ 19 | "1 FR", 20 | "2 FR", 21 | "3 FR" 22 | ] 23 | }, 24 | "array":[ 25 | "1 FR", 26 | "2 FR", 27 | "3 FR" 28 | ], 29 | "simpleKey":"[FR] not setted key", 30 | "Key with spaces":[ 31 | { 32 | "test":"42 is the answer" 33 | } 34 | ], 35 | "Missing key in DE":"present" 36 | } 37 | ``` 38 | 39 | 40 | 41 | 42 | ```json title="nl.json" 43 | { 44 | "commons":{ 45 | "myNestedKey":"Hello world NL", 46 | "myNestedArray":[ 47 | "1 NL", 48 | "2 NL", 49 | "3 NL" 50 | ] 51 | }, 52 | "array":[ 53 | "1 NL", 54 | "2 NL", 55 | "3 NL" 56 | ], 57 | "simpleKey":"[NL] not setted key", 58 | "Key with spaces":[ 59 | { 60 | "test":"42 is the answer" 61 | } 62 | ], 63 | "Missing key in DE":"present" 64 | } 65 | ``` 66 | 67 | 68 | 69 | 70 | ```json title="de.json" 71 | { 72 | "commons":{ 73 | "myNestedKey":"Hello world DE", 74 | "myNestedArray":[ 75 | "1 DE", 76 | "2 DE", 77 | "3 DE" 78 | ] 79 | }, 80 | "array":[ 81 | "1 DE", 82 | "2 DE", 83 | "3 DE" 84 | ], 85 | "simpleKey":"[DE] not setted key", 86 | "Key with spaces":[ 87 | { 88 | "test":"42 is the answer" 89 | } 90 | ] 91 | } 92 | ``` 93 | 94 | 95 | -------------------------------------------------------------------------------- /website/docs/commands/diff.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 3 3 | sidebar_label: Diff 4 | title: Diff 5 | description: Compare at least two i18n files and generate a report 6 | --- 7 | 8 | import Tabs from '@theme/Tabs'; 9 | import TabItem from '@theme/TabItem'; 10 | import DiffFaq from './_diff-faq.mdx'; 11 | 12 | > Compare at least two i18n files and generate a report 13 | 14 | ## Command 15 | 16 | ```bash 17 | # Display help for diff 18 | npx @jy95/i18n-tools diff --help 19 | ``` 20 | 21 | ## Purpose 22 | 23 | Suppose you have several versions of a i18n locale file such as : 24 | 25 | 33 | 34 | 35 | ```json title="fr_v0.json" 36 | { 37 | "untouchedKey":"Hello World", 38 | "commons":{ 39 | "nestedKey":{ 40 | "changedValue":"Changed value 0" 41 | }, 42 | "array":[ 43 | "Pierre" 44 | ], 45 | "conditionalDeletedKey":"Present" 46 | } 47 | } 48 | ``` 49 | 50 | 51 | 52 | 53 | 54 | ```json title="fr_v1.json" 55 | { 56 | "untouchedKey":"Hello World", 57 | "commons":{ 58 | "nestedKey":{ 59 | "changedValue":"Changed value 1" 60 | }, 61 | "array":[ 62 | "Pierre", 63 | "Paul" 64 | ] 65 | } 66 | } 67 | ``` 68 | 69 | 70 | 71 | 72 | ```json title="fr_v2.json" 73 | { 74 | "untouchedKey":"Hello World", 75 | "commons":{ 76 | "nestedKey":{ 77 | "changedValue":"Changed value 2" 78 | }, 79 | "array":[ 80 | "Pierre", 81 | "Paul", 82 | "Jacques" 83 | ], 84 | "conditionalDeletedKey":"Present" 85 | } 86 | } 87 | ``` 88 | 89 | 90 | 91 | 92 | This command generates a report (in JSON by default) that shows change(s) between files : 93 | 94 | 102 | 103 | 104 | ```json title="fr_v0-fr_v0.json" 105 | { 106 | "files": { 107 | "file1":"D:\\TEMP\\fr_v0.json", 108 | "file2":"D:\\TEMP\\fr_v0.json" 109 | }, 110 | "changes": [] 111 | } 112 | ``` 113 | 114 | 115 | 116 | 117 | 118 | ```json title="fr_v0-fr_v1.json" 119 | { 120 | "files":{ 121 | "file1":"D:\\TEMP\\fr_v0.json", 122 | "file2":"D:\\TEMP\\fr_v1.json" 123 | }, 124 | "changes":[ 125 | { 126 | "from":"file1", 127 | "key":"commons.nestedKey.changedValue", 128 | "newValue":"Changed value 1", 129 | "oldValue":"Changed value 0", 130 | "to":"file2", 131 | "type":"REPLACED" 132 | }, 133 | { 134 | "from":"file1", 135 | "key":"commons.conditionalDeletedKey", 136 | "oldValue":"Present", 137 | "to":"file2", 138 | "type":"DELETE" 139 | }, 140 | { 141 | "from":"file1", 142 | "key":"commons.array[1]", 143 | "newValue":"Paul", 144 | "to":"file2", 145 | "type":"ADD" 146 | } 147 | ] 148 | } 149 | ``` 150 | 151 | 152 | 153 | 154 | 155 | ```json title="fr_v0-fr_v1-fr_v2.json" 156 | { 157 | "files":{ 158 | "file1":"D:\\TEMP\\fr_v0.json", 159 | "file2":"D:\\TEMP\\fr_v1.json", 160 | "file3":"D:\\TEMP\\fr_v2.json" 161 | }, 162 | "changes":[ 163 | { 164 | "key":"commons.nestedKey.changedValue", 165 | "type":"REPLACED", 166 | "from":"file1", 167 | "to":"file2", 168 | "oldValue":"Changed value 0", 169 | "newValue":"Changed value 1" 170 | }, 171 | { 172 | "key":"commons.conditionalDeletedKey", 173 | "type":"DELETE", 174 | "from":"file1", 175 | "to":"file2", 176 | "oldValue":"Present" 177 | }, 178 | { 179 | "key":"commons.array[1]", 180 | "type":"ADD", 181 | "from":"file1", 182 | "to":"file2", 183 | "newValue":"Paul" 184 | }, 185 | { 186 | "key":"commons.nestedKey.changedValue", 187 | "type":"REPLACED", 188 | "from":"file2", 189 | "to":"file3", 190 | "oldValue":"Changed value 1", 191 | "newValue":"Changed value 2" 192 | }, 193 | { 194 | "key":"commons.array[2]", 195 | "type":"ADD", 196 | "from":"file2", 197 | "to":"file3", 198 | "newValue":"Jacques" 199 | }, 200 | { 201 | "key":"commons.conditionalDeletedKey", 202 | "type":"ADD", 203 | "from":"file2", 204 | "to":"file3", 205 | "newValue":"Present" 206 | } 207 | ] 208 | } 209 | ``` 210 | 211 | 212 | 213 | 214 | 215 | ## Examples of settings 216 | 217 | 225 | 226 | 227 | 228 | ```bash 229 | npx @jy95/i18n-tools diff --settings "/absolutePath/to/settings1.json" 230 | ``` 231 | 232 | ```json title="settings1.json" 233 | { 234 | "filename":"diff_settings1-JSON", 235 | "outputDir":"D:\\TEMP\\TEMP", 236 | "outputFormat":"JSON", 237 | "files":[ 238 | "D:\\TEMP\\fr_v0.json", 239 | "D:\\TEMP\\fr_v1.json" 240 | ] 241 | } 242 | ``` 243 | 244 | 245 | 246 | 247 | 248 | ```bash 249 | npx @jy95/i18n-tools diff --settings "/absolutePath/to/settings2.json" 250 | ``` 251 | 252 | ```json title="settings2.json" 253 | { 254 | "filename":"diff_settings2-JSON", 255 | "outputDir":"D:\\TEMP\\TEMP", 256 | "outputFormat":"JSON", 257 | "files":[ 258 | "D:\\TEMP\\fr_v0.json", 259 | "D:\\TEMP\\fr_v1.json", 260 | "D:\\TEMP\\fr_v2.json", 261 | ] 262 | } 263 | ``` 264 | 265 | 266 | 267 | 268 | 269 | ```bash 270 | npx @jy95/i18n-tools diff --settings "/absolutePath/to/settings3.js" 271 | ``` 272 | 273 | ```js title="settings3.js" 274 | module.exports = { 275 | "filename":"diff_settings3-JSON", 276 | "outputDir":"D:\\TEMP\\TEMP", 277 | "outputFormat":"JSON", 278 | "files":[ 279 | "D:\\TEMP\\fr_v0.json", 280 | "D:\\TEMP\\fr_v1.json" 281 | ] 282 | } 283 | ``` 284 | 285 | 286 | 287 | 288 | ## FAQ 289 | 290 | -------------------------------------------------------------------------------- /website/docs/commands/export/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Export commands", 3 | "position": 1 4 | } -------------------------------------------------------------------------------- /website/docs/commands/export/_export-faq.mdx: -------------------------------------------------------------------------------- 1 |
2 | I only want a subset of the data. How can I achieve that ? 3 | 4 | Simply add the `resultsFilter` option in your `settings.json` or `settings.js` : 5 | 6 | :::tip 7 | 8 | Reminder - the type of the function parameter : 9 | ```ts 10 | type I18N_Merged_Data = { 11 | technical_key: string; 12 | labels: { 13 | [locale: string]: string; 14 | }; 15 | }[]; 16 | ``` 17 | 18 | ::: 19 | 20 | ```js title="settings.js" 21 | "resultsFilter": function(data /*: I18N_Merged_Data*/) { 22 | return data.filter((row) => 23 | // Takes rows that have at least a missing label in one i18n file such as "Missing key in DE" case 24 | // Object.keys(row.labels).length !== 3 || 25 | Object 26 | .values(row.labels) 27 | // Takes rows that have at least one empty label or contains a given prefix 28 | .some( 29 | (label) => 30 | label.length === 0 || 31 | ["[FR]", "[NL]", "[DE]"].some((prefix) => label.startsWith(prefix)) 32 | ) 33 | ); 34 | } 35 | ``` 36 | 37 | OR 38 | 39 | ```json title="settings.json" 40 | "resultsFilter": "D:\\TEMP\\TEMP\\resultsFilter.js" 41 | ``` 42 | 43 | ```js title="resultsFilter.js" 44 | module.exports = function(data /*: I18N_Merged_Data*/) { 45 | return data.filter((row) => 46 | // Takes rows that have at least a missing label in one i18n file such as "Missing key in DE" case 47 | // Object.keys(row.labels).length !== 3 || 48 | Object 49 | .values(row.labels) 50 | // Takes rows that have at least one empty label or contains a given prefix 51 | .some( 52 | (label) => 53 | label.length === 0 || 54 | ["[FR]", "[NL]", "[DE]"].some((prefix) => label.startsWith(prefix)) 55 | ) 56 | ); 57 | } 58 | ``` 59 | 60 |
61 | 62 |
63 | I want the locales in a given order in the result file. How can I achieve that ? 64 | 65 | Simply update the `columns` option with your given order in your `settings.json` or `settings.js`, such as : 66 | 67 | ```js title="settings.js" 68 | "columns": [ 69 | { 70 | "locale":"NL", 71 | "label":"Dutch translation" 72 | }, 73 | { 74 | "locale":"FR", 75 | "label":"French translation" 76 | } 77 | ] 78 | ``` 79 | 80 |
81 | 82 |
83 | I only work with flat JSON file(s). How can I make this command work ? 84 | 85 | Simply set option `keySeparator` to `false` in your `settings.json` or `settings.js`, such as : 86 | ```json title="settings.json" 87 | { 88 | "keySeparator": false 89 | } 90 | ``` 91 | 92 |
-------------------------------------------------------------------------------- /website/docs/commands/export/assets/exampleXlsxExport.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/website/docs/commands/export/assets/exampleXlsxExport.png -------------------------------------------------------------------------------- /website/docs/commands/export/assets/export-csv.csv: -------------------------------------------------------------------------------- 1 | Technical Key;French translation;Dutch translation;German translation 2 | Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer 3 | Missing key in DE;present;present; 4 | array[0];1 FR;1 NL;1 DE 5 | array[1];2 FR;2 NL;2 DE 6 | array[2];3 FR;3 NL;3 DE 7 | commons.myNestedArray[0];1 FR;1 NL;1 DE 8 | commons.myNestedArray[1];2 FR;2 NL;2 DE 9 | commons.myNestedArray[2];3 FR;3 NL;3 DE 10 | commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE 11 | simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key -------------------------------------------------------------------------------- /website/docs/commands/export/assets/export-xlsx.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/website/docs/commands/export/assets/export-xlsx.xlsx -------------------------------------------------------------------------------- /website/docs/commands/export/export to_csv.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 2 3 | sidebar_label: export to_csv 4 | title: export to_csv 5 | description: Export i18n files into a csv file 6 | --- 7 | 8 | import Tabs from '@theme/Tabs'; 9 | import TabItem from '@theme/TabItem'; 10 | import ExportFaq from './_export-faq.mdx'; 11 | import I18NFilesTabs from '../_i18n-files-tabs.mdx'; 12 | 13 | > Export i18n files into a csv file 14 | 15 | ## Command 16 | 17 | ```bash 18 | # Display help for export to_csv 19 | npx @jy95/i18n-tools export to_csv --help 20 | ``` 21 | 22 | ## Purpose 23 | 24 | Suppose you have several i18n locales such as : 25 | 26 | 27 | 28 | This command helps you to turn them into a single csv file such as [this one](./assets/export-csv.csv). 29 | 30 | ```csv title="export-csv.csv" 31 | Technical Key;French translation;Dutch translation;German translation 32 | Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer 33 | Missing key in DE;present;present; 34 | array[0];1 FR;1 NL;1 DE 35 | array[1];2 FR;2 NL;2 DE 36 | array[2];3 FR;3 NL;3 DE 37 | commons.myNestedArray[0];1 FR;1 NL;1 DE 38 | commons.myNestedArray[1];2 FR;2 NL;2 DE 39 | commons.myNestedArray[2];3 FR;3 NL;3 DE 40 | commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE 41 | simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key 42 | ``` 43 | 44 | ## Examples of settings 45 | 46 | 54 | 55 | 56 | 57 | ```bash 58 | npx @jy95/i18n-tools export to_csv --settings "/absolutePath/to/settings1.json" 59 | ``` 60 | 61 | ```json title="settings1.json" 62 | { 63 | "files":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\files.json", 64 | "columns":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\columns.json", 65 | "filename":"settings1-output", 66 | "outputDir":"D:\\TEMP\\TEMP" 67 | } 68 | ``` 69 | 70 | ```json title="files.json" 71 | { 72 | "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", 73 | "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", 74 | "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" 75 | } 76 | ``` 77 | 78 | ```json title="columns.json" 79 | [ 80 | { 81 | "locale":"FR", 82 | "label":"French translation" 83 | }, 84 | { 85 | "locale":"NL", 86 | "label":"Dutch translation" 87 | }, 88 | { 89 | "locale":"DE", 90 | "label":"German translation" 91 | } 92 | ] 93 | ``` 94 | 95 | 96 | 97 | 98 | ```bash 99 | npx @jy95/i18n-tools export to_csv --settings "/absolutePath/to/settings2.json" 100 | ``` 101 | 102 | ```json title="settings2.json" 103 | { 104 | "files":{ 105 | "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", 106 | "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", 107 | "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" 108 | }, 109 | "columns":[ 110 | { 111 | "locale":"FR", 112 | "label":"French translation" 113 | }, 114 | { 115 | "locale":"NL", 116 | "label":"Dutch translation" 117 | }, 118 | { 119 | "locale":"DE", 120 | "label":"German translation" 121 | } 122 | ], 123 | "filename":"settings2-output", 124 | "outputDir":"D:\\TEMP\\TEMP" 125 | } 126 | ``` 127 | 128 | 129 | 130 | 131 | ```bash 132 | npx @jy95/i18n-tools export to_csv --settings "/absolutePath/to/settings3.js" 133 | ``` 134 | 135 | ```js title="settings3.js" 136 | module.exports = { 137 | files: ["fr", "nl", "de"].reduce( 138 | (prev, curr) => 139 | Object.assign(prev, { 140 | [curr.toUpperCase()]: `D:\\TEMP\\TEMP\\tests-for-export\\correct\\${curr}.json`, 141 | }), 142 | {} 143 | ), 144 | columns: [ 145 | ["FR", "French translation"], 146 | ["NL", "Dutch translation"], 147 | ["DE", "German translation"], 148 | ].map(([locale, label]) => ({ locale: locale, label: label })), 149 | filename: "settings3-output", 150 | outputDir: "D:\\TEMP\\TEMP" 151 | }; 152 | ``` 153 | 154 | 155 | 156 | 157 | ## FAQ 158 | 159 | -------------------------------------------------------------------------------- /website/docs/commands/export/export to_xlsx.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 1 3 | sidebar_label: export to_xlsx 4 | title: export to_xlsx 5 | description: Export i18n files into a xlsx file, created by exceljs 6 | --- 7 | 8 | import Tabs from '@theme/Tabs'; 9 | import TabItem from '@theme/TabItem'; 10 | import ExportFaq from './_export-faq.mdx'; 11 | import I18NFilesTabs from '../_i18n-files-tabs.mdx'; 12 | 13 | > Export i18n files into a xlsx file, created by exceljs 14 | 15 | ## Command 16 | 17 | ```bash 18 | # Display help for export to_xlsx 19 | npx @jy95/i18n-tools export to_xlsx --help 20 | ``` 21 | 22 | ## Purpose 23 | 24 | Suppose you have several i18n locales such as : 25 | 26 | 27 | 28 | This command helps you to turn them into a single xlsx file such as [this one](./assets/export-xlsx.xlsx). 29 | 30 | ![Export example](./assets/exampleXlsxExport.png) 31 | 32 | ## Examples of settings 33 | 34 | 43 | 44 | 45 | 46 | ```bash 47 | npx @jy95/i18n-tools export to_xlsx --settings "/absolutePath/to/settings1.json" 48 | ``` 49 | 50 | ```json title="settings1.json" 51 | { 52 | "files":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\files.json", 53 | "columns":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\columns.json", 54 | "worksheetName":"Settings 1 - Worksheet", 55 | "filename":"settings1-output", 56 | "outputDir":"D:\\TEMP\\TEMP" 57 | } 58 | ``` 59 | 60 | ```json title="files.json" 61 | { 62 | "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", 63 | "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", 64 | "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" 65 | } 66 | ``` 67 | 68 | ```json title="columns.json" 69 | [ 70 | { 71 | "locale":"FR", 72 | "label":"French translation" 73 | }, 74 | { 75 | "locale":"NL", 76 | "label":"Dutch translation" 77 | }, 78 | { 79 | "locale":"DE", 80 | "label":"German translation" 81 | } 82 | ] 83 | ``` 84 | 85 | 86 | 87 | 88 | ```bash 89 | npx @jy95/i18n-tools export to_xlsx --settings "/absolutePath/to/settings2.json" 90 | ``` 91 | 92 | ```json title="settings2.json" 93 | { 94 | "files":{ 95 | "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", 96 | "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", 97 | "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" 98 | }, 99 | "columns":[ 100 | { 101 | "locale":"FR", 102 | "label":"French translation" 103 | }, 104 | { 105 | "locale":"NL", 106 | "label":"Dutch translation" 107 | }, 108 | { 109 | "locale":"DE", 110 | "label":"German translation" 111 | } 112 | ], 113 | "worksheetName":"Settings 2 - Worksheet", 114 | "filename":"settings2-output", 115 | "outputDir":"D:\\TEMP\\TEMP" 116 | } 117 | ``` 118 | 119 | 120 | 121 | 122 | ```bash 123 | npx @jy95/i18n-tools export to_xlsx --settings "/absolutePath/to/settings3.json" 124 | ``` 125 | 126 | ```json title="settings3.json" 127 | { 128 | "files":{ 129 | "FR":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\fr.json", 130 | "NL":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\nl.json", 131 | "DE":"D:\\TEMP\\TEMP\\tests-for-export\\correct\\de.json" 132 | }, 133 | "columns":[ 134 | { 135 | "locale":"FR", 136 | "label":"French translation" 137 | }, 138 | { 139 | "locale":"NL", 140 | "label":"Dutch translation" 141 | }, 142 | { 143 | "locale":"DE", 144 | "label":"German translation" 145 | } 146 | ], 147 | "worksheetCustomizer":"D:\\workspace\\i18n-tools\\test\\fixtures\\export-xlsx\\worksheetCustomizer-dynamic.js", 148 | "worksheetName":"Settings 3 - Worksheet", 149 | "filename":"settings3-output", 150 | "outputDir":"D:\\TEMP\\TEMP" 151 | } 152 | ``` 153 | 154 | ```js title="worksheetCustomizer-dynamic.js" 155 | // I keep in comments the stuff needed to convert that into a TS file 156 | // (So I can easily update this script in the future) 157 | //import { Worksheet } from "exceljs"; 158 | 159 | module.exports = async function(worksheet /*: Worksheet*/) { 160 | // Conditionaly formatting (to better view stuff) 161 | let rowCount = worksheet.rowCount; 162 | let columnCount = worksheet.columnCount; 163 | 164 | // for easiness in the future, for arbitrary number of translations 165 | // As table have a least one language (starting to 'B'), pretty simple to compute last column letter 166 | let lastColumnLetter = String.fromCharCode(66 + (columnCount - 2)); 167 | 168 | // domain for rules ; All the cells 169 | // Ex : "B2:D" + rowCount + 2" for three languages 170 | let computedRef = `B2:${lastColumnLetter + rowCount + 2}`; 171 | 172 | worksheet.addConditionalFormatting({ 173 | ref: computedRef, 174 | rules: [ 175 | // cell is empty : put it in red 176 | { 177 | type: 'containsText', 178 | operator: 'containsBlanks', 179 | style: { 180 | fill: { 181 | type: 'pattern', 182 | pattern: 'solid', 183 | bgColor: { argb: 'FF5733' }, 184 | }, 185 | }, 186 | priority: 1, 187 | }, 188 | // cell contains either [FR], [NL] or [DE] : put it in orange 189 | { 190 | type: 'containsText', 191 | operator: 'containsText', 192 | text: '[FR]', 193 | style: { 194 | fill: { 195 | type: 'pattern', 196 | pattern: 'solid', 197 | bgColor: { argb: 'FF9633' }, 198 | }, 199 | }, 200 | priority: 2, 201 | }, 202 | { 203 | type: 'containsText', 204 | operator: 'containsText', 205 | text: '[NL]', 206 | style: { 207 | fill: { 208 | type: 'pattern', 209 | pattern: 'solid', 210 | bgColor: { argb: 'FF9633' }, 211 | }, 212 | }, 213 | priority: 2, 214 | }, 215 | { 216 | type: 'containsText', 217 | operator: 'containsText', 218 | text: '[DE]', 219 | style: { 220 | fill: { 221 | type: 'pattern', 222 | pattern: 'solid', 223 | bgColor: { argb: 'FF9633' }, 224 | }, 225 | }, 226 | priority: 2, 227 | }, 228 | ], 229 | }); 230 | 231 | return worksheet; 232 | }; 233 | ``` 234 | 235 | 236 | 237 | 238 | ```bash 239 | npx @jy95/i18n-tools export to_xlsx --settings "/absolutePath/to/settings4.js" 240 | ``` 241 | 242 | ```js title="settings4.js" 243 | // I keep in comments the stuff needed for the typings 244 | // (So I can easily update this script in the future) 245 | //import { Worksheet } from "exceljs"; 246 | 247 | module.exports = { 248 | files: ["fr", "nl", "de"].reduce( 249 | (prev, curr) => 250 | Object.assign(prev, { 251 | [curr.toUpperCase()]: `D:\\TEMP\\TEMP\\tests-for-export\\correct\\${curr}.json`, 252 | }), 253 | {} 254 | ), 255 | columns: [ 256 | ["FR", "French translation"], 257 | ["NL", "Dutch translation"], 258 | ["DE", "German translation"], 259 | ].map(([locale, label]) => ({ locale: locale, label: label })), 260 | worksheetCustomizer: async function (worksheet /*: Worksheet*/) { 261 | // Conditionaly formatting (to better view stuff) 262 | let rowCount = worksheet.rowCount; 263 | let columnCount = worksheet.columnCount; 264 | 265 | // for easiness in the future, for arbitrary number of translations 266 | // As table have a least one language (starting to 'B'), pretty simple to compute last column letter 267 | let lastColumnLetter = String.fromCharCode(66 + (columnCount - 2)); 268 | 269 | // domain for rules ; All the cells 270 | // Ex : "B2:D" + rowCount + 2" for three languages 271 | let computedRef = `B2:${lastColumnLetter + rowCount + 2}`; 272 | 273 | worksheet.addConditionalFormatting({ 274 | ref: computedRef, 275 | rules: [ 276 | // cell is empty : put it in red 277 | { 278 | type: "containsText", 279 | operator: "containsBlanks", 280 | style: { 281 | fill: { 282 | type: "pattern", 283 | pattern: "solid", 284 | bgColor: { argb: "FF5733" }, 285 | }, 286 | }, 287 | priority: 1, 288 | }, 289 | // cell contains either [FR], [NL] or [DE] : put it in orange 290 | { 291 | type: "containsText", 292 | operator: "containsText", 293 | text: "[FR]", 294 | style: { 295 | fill: { 296 | type: "pattern", 297 | pattern: "solid", 298 | bgColor: { argb: "FF9633" }, 299 | }, 300 | }, 301 | priority: 2, 302 | }, 303 | { 304 | type: "containsText", 305 | operator: "containsText", 306 | text: "[NL]", 307 | style: { 308 | fill: { 309 | type: "pattern", 310 | pattern: "solid", 311 | bgColor: { argb: "FF9633" }, 312 | }, 313 | }, 314 | priority: 2, 315 | }, 316 | { 317 | type: "containsText", 318 | operator: "containsText", 319 | text: "[DE]", 320 | style: { 321 | fill: { 322 | type: "pattern", 323 | pattern: "solid", 324 | bgColor: { argb: "FF9633" }, 325 | }, 326 | }, 327 | priority: 2, 328 | }, 329 | ], 330 | }); 331 | return worksheet; 332 | }, 333 | worksheetName: "Settings 4 - Worksheet", 334 | filename: "settings4-output", 335 | outputDir: "D:\\TEMP\\TEMP" 336 | }; 337 | ``` 338 | 339 | 340 | 341 | 342 | 343 | ## FAQ 344 | 345 | 346 | -------------------------------------------------------------------------------- /website/docs/commands/export/index.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_label: Export commands 3 | title: Export commands 4 | id: export 5 | description: Export i18n files into something else 6 | --- 7 | 8 | import DocCardList from '@theme/DocCardList'; 9 | 10 | ```bash 11 | # Display all available export commands 12 | npx @jy95/i18n-tools export --help 13 | ``` 14 | 15 | -------------------------------------------------------------------------------- /website/docs/commands/import/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Import commands", 3 | "position": 2 4 | } -------------------------------------------------------------------------------- /website/docs/commands/import/_import-faq.mdx: -------------------------------------------------------------------------------- 1 |
2 | I want as result flat JSON file(s). How can I achieve that ? 3 | 4 | Simply set option `keySeparator` to `false` in your `settings.json` or `settings.js`, such as : 5 | ```json title="settings.json" 6 | { 7 | "keySeparator": false 8 | } 9 | ``` 10 | 11 |
-------------------------------------------------------------------------------- /website/docs/commands/import/import from_csv.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 2 3 | sidebar_label: import from_csv 4 | title: import from_csv 5 | description: Turn a csv file to i18n file(s) 6 | --- 7 | 8 | import Tabs from '@theme/Tabs'; 9 | import TabItem from '@theme/TabItem'; 10 | import ImportFaq from './_import-faq.mdx'; 11 | import I18NFilesTabs from '../_i18n-files-tabs.mdx'; 12 | 13 | > Turn a csv file to i18n file(s) 14 | 15 | ## Command 16 | 17 | ```bash 18 | # Display help for import from_csv 19 | npx @jy95/i18n-tools import from_csv --help 20 | ``` 21 | 22 | ## Purpose 23 | 24 | Suppose you have a [csv file](../export/assets/export-csv.csv) structured as : 25 | 26 | ```csv title="export-csv.csv" 27 | Technical Key;French translation;Dutch translation;German translation 28 | Key with spaces[0].test;42 is the answer;42 is the answer;42 is the answer 29 | Missing key in DE;present;present; 30 | array[0];1 FR;1 NL;1 DE 31 | array[1];2 FR;2 NL;2 DE 32 | array[2];3 FR;3 NL;3 DE 33 | commons.myNestedArray[0];1 FR;1 NL;1 DE 34 | commons.myNestedArray[1];2 FR;2 NL;2 DE 35 | commons.myNestedArray[2];3 FR;3 NL;3 DE 36 | commons.myNestedKey;Hello world FR;Hello world NL;Hello world DE 37 | simpleKey;[FR] not setted key;[NL] not setted key;[DE] not setted key 38 | ``` 39 | 40 | This command helps you to turn this into several i18n json files : 41 | 42 | 43 | 44 | ## Examples of settings 45 | 46 | 54 | 55 | 56 | 57 | ```bash 58 | npx @jy95/i18n-tools import from_csv --settings "/absolutePath/to/settings1.json" 59 | ``` 60 | 61 | ```json title="settings1.json" 62 | { 63 | "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-csv\\export-csv.csv", 64 | "columns":"D:\\TEMP\\TEMP\\tests-for-import\\correct\\columns.json", 65 | "locales":[ 66 | "FR", 67 | "NL", 68 | "DE" 69 | ], 70 | "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", 71 | "suffix":"_settings1" 72 | } 73 | ``` 74 | 75 | ```json title="columns.json" 76 | { 77 | "technical_key":"Technical Key", 78 | "locales":{ 79 | "FR":"French translation", 80 | "NL":"Dutch translation", 81 | "DE":"German translation" 82 | } 83 | } 84 | ``` 85 | 86 | 87 | 88 | 89 | 90 | ```bash 91 | npx @jy95/i18n-tools import from_csv --settings "/absolutePath/to/settings2.json" 92 | ``` 93 | 94 | ```json title="settings2.json" 95 | { 96 | "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-csv\\export-csv.csv", 97 | "columns":{ 98 | "technical_key":"Technical Key", 99 | "locales":{ 100 | "FR":"French translation", 101 | "NL":"Dutch translation", 102 | "DE":"German translation" 103 | } 104 | }, 105 | "locales":[ 106 | "FR", 107 | "NL", 108 | "DE" 109 | ], 110 | "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", 111 | "suffix":"_settings2" 112 | } 113 | ``` 114 | 115 | 116 | 117 | 118 | 119 | ```bash 120 | npx @jy95/i18n-tools import from_csv --settings "/absolutePath/to/settings3.js" 121 | ``` 122 | 123 | ```js title="settings3.js" 124 | module.exports = { 125 | input: "D:\\workspace\\i18n-tools\\test\\fixtures\\import-csv\\export-csv.csv", 126 | columns: { 127 | technical_key: "Technical Key", 128 | locales: [ 129 | ["FR", "French translation"], 130 | ["NL", "Dutch translation"], 131 | ["DE", "German translation"], 132 | ].reduce( 133 | (prev, [locale, label]) => 134 | Object.assign(prev, { 135 | [locale]: label, 136 | }), 137 | {} 138 | ), 139 | }, 140 | locales: ["FR", "NL", "DE"], 141 | outputDir: "D:\\TEMP\\TEMP\\tests-for-import", 142 | suffix: "_settings3", 143 | }; 144 | ``` 145 | 146 | 147 | 148 | 149 | ## FAQ 150 | 151 | -------------------------------------------------------------------------------- /website/docs/commands/import/import from_xlsx.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 1 3 | sidebar_label: import from_xlsx 4 | title: import from_xlsx 5 | description: Turn a xlsx file to i18n file(s) 6 | --- 7 | 8 | import Tabs from '@theme/Tabs'; 9 | import TabItem from '@theme/TabItem'; 10 | import ImportFaq from './_import-faq.mdx'; 11 | import I18NFilesTabs from '../_i18n-files-tabs.mdx'; 12 | 13 | > Turn a xlsx file to i18n file(s) 14 | 15 | ## Command 16 | 17 | ```bash 18 | # Display help for import from_xlsx 19 | npx @jy95/i18n-tools import from_xlsx --help 20 | ``` 21 | 22 | ## Purpose 23 | 24 | Suppose you have a [xlsx file](../export/assets/export-xlsx.xlsx) structured as : 25 | 26 | ![xlsx file](../export/assets/exampleXlsxExport.png) 27 | 28 | This command helps you to turn this into several i18n json files : 29 | 30 | 31 | 32 | ## Examples of settings 33 | 34 | 42 | 43 | 44 | 45 | ```bash 46 | npx @jy95/i18n-tools import from_xlsx --settings "/absolutePath/to/settings1.json" 47 | ``` 48 | 49 | ```json title="settings1.json" 50 | { 51 | "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-xlsx\\export-xlsx.xlsx", 52 | "columns":"D:\\TEMP\\TEMP\\tests-for-import\\correct\\columns.json", 53 | "locales":[ 54 | "FR", 55 | "NL", 56 | "DE" 57 | ], 58 | "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", 59 | "suffix":"_settings1" 60 | } 61 | ``` 62 | 63 | ```json title="columns.json" 64 | { 65 | "technical_key":"Technical Key", 66 | "locales":{ 67 | "FR":"French translation", 68 | "NL":"Dutch translation", 69 | "DE":"German translation" 70 | } 71 | } 72 | ``` 73 | 74 | 75 | 76 | 77 | 78 | ```bash 79 | npx @jy95/i18n-tools import from_xlsx --settings "/absolutePath/to/settings2.json" 80 | ``` 81 | 82 | ```json title="settings2.json" 83 | { 84 | "input":"D:\\workspace\\i18n-tools\\test\\fixtures\\import-xlsx\\export-xlsx.xlsx", 85 | "columns":{ 86 | "technical_key":"Technical Key", 87 | "locales":{ 88 | "FR":"French translation", 89 | "NL":"Dutch translation", 90 | "DE":"German translation" 91 | } 92 | }, 93 | "locales":[ 94 | "FR", 95 | "NL", 96 | "DE" 97 | ], 98 | "outputDir":"D:\\TEMP\\TEMP\\tests-for-import", 99 | "suffix":"_settings2" 100 | } 101 | ``` 102 | 103 | 104 | 105 | 106 | 107 | ```bash 108 | npx @jy95/i18n-tools import from_xlsx --settings "/absolutePath/to/settings3.js" 109 | ``` 110 | 111 | ```js title="settings3.js" 112 | module.exports = { 113 | input: "D:\\workspace\\i18n-tools\\test\\fixtures\\import-xlsx\\export-xlsx.xlsx", 114 | columns: { 115 | technical_key: "Technical Key", 116 | locales: [ 117 | ["FR", "French translation"], 118 | ["NL", "Dutch translation"], 119 | ["DE", "German translation"], 120 | ].reduce( 121 | (prev, [locale, label]) => 122 | Object.assign(prev, { 123 | [locale]: label, 124 | }), 125 | {} 126 | ), 127 | }, 128 | locales: ["FR", "NL", "DE"], 129 | outputDir: "D:\\TEMP\\TEMP\\tests-for-import", 130 | suffix: "_settings3", 131 | }; 132 | ``` 133 | 134 | 135 | 136 | 137 | ## FAQ 138 | 139 | -------------------------------------------------------------------------------- /website/docs/commands/import/index.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_label: Import commands 3 | title: Import commands 4 | id: import 5 | description: Turn something to i18n file(s) 6 | --- 7 | 8 | import DocCardList from '@theme/DocCardList'; 9 | 10 | ```bash 11 | # Display all available import commands 12 | npx @jy95/i18n-tools import --help 13 | ``` 14 | 15 | -------------------------------------------------------------------------------- /website/docs/commands/index.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_label: Commands 3 | title: Commands 4 | id: commands 5 | --- 6 | 7 | import DocCardList from '@theme/DocCardList'; 8 | 9 | ```bash 10 | # Display all available commands 11 | npx @jy95/i18n-tools --help 12 | ``` 13 | 14 | -------------------------------------------------------------------------------- /website/docs/faq.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 3 3 | sidebar_label: FAQ 4 | --- 5 | 6 | # FAQ 7 | 8 | ## How do I apply the replace changes listed by the JSON output of diff command ? 9 | 10 | Simply with a Nodejs script : 11 | 12 | ```js 13 | const _ = require('lodash'); 14 | const path = require('path'); 15 | const fs = require('fs').promises; 16 | 17 | try { 18 | // TODO replace with path to your file in develop or whatever branch 19 | let originalFilePath = path.resolve(__dirname, "fr.json"); 20 | let jsonData = await fs.readFile(originalFilePath, 'utf8'); 21 | let currentObj = JSON.parse(jsonData); 22 | 23 | // TODO replace with path to the file generated by diff command 24 | let changesFilePath = path.resolve(__dirname, "diff_fr.json"); 25 | let jsonData2 = await fs.readFile(changesFilePath, 'utf8'); 26 | let changesFile = JSON.parse(jsonData2); 27 | let changes = changesFile.changes; 28 | 29 | // Proper variable for that 30 | let result = Object.assign({}, currentObj); 31 | 32 | // Add changed values 33 | // (Feel free to edit the file generated by diff command if you want to override some changes) 34 | for(let modifiedField of changes.filter(c => ["REPLACED"].includes(c.type)) ) { 35 | _.set(result, modifiedField.key, modifiedField.newValue); 36 | } 37 | 38 | // write result 39 | // TODO Add a path for destination 40 | await fs.writeFile("", JSON.stringify(result, null, 4)); 41 | 42 | } catch(err) { 43 | console.warn("Something bad happend"); 44 | console.error(err); 45 | process.exit(1); 46 | } 47 | ``` -------------------------------------------------------------------------------- /website/docs/installation.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 1 3 | sidebar_label: Installation 4 | --- 5 | 6 | # Installation 7 | 8 | ## Requirements {#requirements} 9 | 10 | - [Node.js](https://nodejs.org/en/download/) version >= 12 or above (which can be checked by running `node -v`). You can use [nvm](https://github.com/nvm-sh/nvm) for managing multiple Node versions on a single machine installed 11 | 12 | :::tip Single installation 13 | 14 | If not globally installed, Npx reinstalls the cli each time you run this command. 15 | Thus feel free to install it globally : `npm i -g @jy95/i18n-tools` 16 | (And not forgetting to update it when needed `npm update -g @jy95/i18n-tools` ) 17 | ::: -------------------------------------------------------------------------------- /website/docusaurus.config.js: -------------------------------------------------------------------------------- 1 | const {themes} = require('prism-react-renderer'); 2 | const lightCodeTheme = themes.github; 3 | const darkCodeTheme = themes.dracula; 4 | 5 | // With JSDoc @type annotations, IDEs can provide config autocompletion 6 | /** @type {import('@docusaurus/types').DocusaurusConfig} */ 7 | (module.exports = { 8 | title: '@jy95/i18n-tools', 9 | tagline: 'CLI to make common operations around i18n files simpler', 10 | url: 'https://jy95.github.io', 11 | baseUrl: '/i18n-tools/', 12 | trailingSlash: false, // Needed for Gh pages - https://github.com/facebook/docusaurus/issues/5026 13 | onBrokenLinks: 'throw', 14 | onBrokenMarkdownLinks: 'warn', 15 | favicon: 'img/favicon.ico', 16 | organizationName: 'jy95', // Usually your GitHub org/user name. 17 | projectName: 'i18n-tools', // Usually your repo name. 18 | 19 | presets: [ 20 | [ 21 | '@docusaurus/preset-classic', 22 | /** @type {import('@docusaurus/preset-classic').Options} */ 23 | ({ 24 | docs: { 25 | sidebarPath: require.resolve('./sidebars.js'), 26 | // Please change this to your repo. 27 | editUrl: 'https://github.com/jy95/i18n-tools/edit/master/website/', 28 | }, 29 | blog: { 30 | showReadingTime: true, 31 | // Please change this to your repo. 32 | editUrl: 33 | 'https://github.com/jy95/i18n-tools/edit/master/website/blog/', 34 | }, 35 | theme: { 36 | customCss: require.resolve('./src/css/custom.css'), 37 | }, 38 | }), 39 | ], 40 | ], 41 | 42 | themeConfig: 43 | /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ 44 | ({ 45 | algolia: { 46 | appId: 'L8HFQGH6GF', 47 | apiKey: 'e8b62ed43826085b41fcf6056c7fe4dd', 48 | indexName: 'i18n-tool' 49 | }, 50 | navbar: { 51 | title: '@jy95/i18n-tools', 52 | logo: { 53 | alt: '@jy95/i18n-tools', 54 | src: 'img/logo.svg', 55 | }, 56 | items: [ 57 | { 58 | type: 'doc', 59 | docId: 'installation', 60 | position: 'left', 61 | label: 'Tutorial', 62 | }, 63 | {to: '/blog', label: 'Blog', position: 'left'}, 64 | { 65 | href: 'https://github.com/jy95/i18n-tools', 66 | label: 'GitHub', 67 | position: 'right', 68 | }, 69 | ], 70 | }, 71 | footer: { 72 | style: 'dark', 73 | copyright: `Copyright © ${new Date().getFullYear()} @jy95/i18n-tools. Built with Docusaurus.`, 74 | }, 75 | prism: { 76 | theme: lightCodeTheme, 77 | darkTheme: darkCodeTheme, 78 | additionalLanguages: ['csv'] 79 | }, 80 | }), 81 | }); 82 | -------------------------------------------------------------------------------- /website/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "website", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "clear": "docusaurus clear", 12 | "serve": "docusaurus serve", 13 | "write-translations": "docusaurus write-translations", 14 | "write-heading-ids": "docusaurus write-heading-ids", 15 | "typecheck": "tsc" 16 | }, 17 | "dependencies": { 18 | "@docusaurus/core": "^3.6.1", 19 | "@docusaurus/preset-classic": "^3.7.0", 20 | "@jy95/i18n-tools": "file:..", 21 | "@mdx-js/react": "^3.1.0", 22 | "@svgr/webpack": "^8.0.1", 23 | "clsx": "^2.0.0", 24 | "file-loader": "^6.2.0", 25 | "prism-react-renderer": "^2.4.1", 26 | "react": "^19.1.0", 27 | "react-dom": "^19.1.0", 28 | "url-loader": "^4.1.1" 29 | }, 30 | "devDependencies": { 31 | "@docusaurus/module-type-aliases": "^3.6.1", 32 | "@docusaurus/tsconfig": "^3.7.0", 33 | "@types/react": "^19.1.2", 34 | "@types/react-helmet": "^6.1.7", 35 | "@types/react-router-dom": "^5.3.3", 36 | "typescript": "^5.2.2" 37 | }, 38 | "browserslist": { 39 | "production": [ 40 | ">0.5%", 41 | "not dead", 42 | "not op_mini all" 43 | ], 44 | "development": [ 45 | "last 1 chrome version", 46 | "last 1 firefox version", 47 | "last 1 safari version" 48 | ] 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /website/sidebars.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Creating a sidebar enables you to: 3 | - create an ordered group of docs 4 | - render a sidebar for each doc of that group 5 | - provide next/previous navigation 6 | 7 | The sidebars can be generated from the filesystem, or explicitly defined here. 8 | 9 | Create as many sidebars as you want. 10 | */ 11 | 12 | module.exports = { 13 | // By default, Docusaurus generates a sidebar from the docs folder structure 14 | tutorialSidebar: [{type: 'autogenerated', dirName: '.'}], 15 | 16 | // But you can create a sidebar manually 17 | /* 18 | tutorialSidebar: [ 19 | { 20 | type: 'category', 21 | label: 'Tutorial', 22 | items: ['hello'], 23 | }, 24 | ], 25 | */ 26 | }; 27 | -------------------------------------------------------------------------------- /website/src/components/HomepageFeatures.module.css: -------------------------------------------------------------------------------- 1 | .features { 2 | display: flex; 3 | align-items: center; 4 | padding: 2rem 0; 5 | width: 100%; 6 | } 7 | 8 | .featureSvg { 9 | height: 200px; 10 | width: 200px; 11 | } 12 | -------------------------------------------------------------------------------- /website/src/components/HomepageFeatures.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) Facebook, Inc. and its affiliates. 3 | * 4 | * This source code is licensed under the MIT license found in the 5 | * LICENSE file in the root directory of this source tree. 6 | */ 7 | import React from 'react'; 8 | import clsx from 'clsx'; 9 | import styles from './HomepageFeatures.module.css'; 10 | import useBaseUrl from '@docusaurus/useBaseUrl'; 11 | 12 | type FeatureItem = { 13 | title: string; 14 | image: string; 15 | description: JSX.Element; 16 | }; 17 | 18 | const FeatureList: FeatureItem[] = [ 19 | { 20 | title: 'Export', 21 | image: '/img/export.svg', 22 | description: ( 23 | <> 24 | Export i18n files into something else (xlsx, csv, ...) 25 | 26 | ), 27 | }, 28 | { 29 | title: 'Import', 30 | image: '/img/import.svg', 31 | description: ( 32 | <> 33 | Turn a file (xlsx, csv, ...) to i18n file(s) 34 | 35 | ), 36 | }, 37 | { 38 | title: 'Diff', 39 | image: '/img/diff.svg', 40 | description: ( 41 | <> 42 | Compare at least two i18n files and generate a report 43 | 44 | ), 45 | }, 46 | ]; 47 | 48 | function Feature({title, image, description}: FeatureItem) { 49 | return ( 50 |
51 |
52 | {title} 53 |
54 |
55 |

{title}

56 |

{description}

57 |
58 |
59 | ); 60 | } 61 | 62 | export default function HomepageFeatures(): JSX.Element { 63 | return ( 64 |
65 |
66 |
67 | {FeatureList.map((props, idx) => ( 68 | 69 | ))} 70 |
71 |
72 |
73 | ); 74 | } 75 | -------------------------------------------------------------------------------- /website/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Any CSS included here will be global. The classic template 3 | * bundles Infima by default. Infima is a CSS framework designed to 4 | * work well for content-centric websites. 5 | */ 6 | 7 | /* You can override the default Infima variables here. */ 8 | :root { 9 | --ifm-color-primary: #2557c2; 10 | --ifm-color-primary-dark: #214eaf; 11 | --ifm-color-primary-darker: #1f4aa5; 12 | --ifm-color-primary-darkest: #1a3d88; 13 | --ifm-color-primary-light: #2960d5; 14 | --ifm-color-primary-lighter: #3267d8; 15 | --ifm-color-primary-lightest: #4f7cdd; 16 | --ifm-code-font-size: 95%; 17 | } 18 | 19 | .docusaurus-highlight-code-line { 20 | background-color: rgba(0, 0, 0, 0.1); 21 | display: block; 22 | margin: 0 calc(-1 * var(--ifm-pre-padding)); 23 | padding: 0 var(--ifm-pre-padding); 24 | } 25 | 26 | html[data-theme='dark'] .docusaurus-highlight-code-line { 27 | background-color: rgba(0, 0, 0, 0.3); 28 | } 29 | -------------------------------------------------------------------------------- /website/src/pages/index.module.css: -------------------------------------------------------------------------------- 1 | /** 2 | * CSS files with the .module.css suffix will be treated as CSS modules 3 | * and scoped locally. 4 | */ 5 | 6 | .heroBanner { 7 | padding: 1rem 0; 8 | text-align: center; 9 | position: relative; 10 | overflow: hidden; 11 | } 12 | 13 | @media screen and (max-width: 966px) { 14 | .heroBanner { 15 | padding: 2rem; 16 | } 17 | } 18 | 19 | .buttons { 20 | display: flex; 21 | align-items: center; 22 | justify-content: center; 23 | } 24 | -------------------------------------------------------------------------------- /website/src/pages/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import clsx from 'clsx'; 3 | import Layout from '@theme/Layout'; 4 | import Link from '@docusaurus/Link'; 5 | import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; 6 | import styles from './index.module.css'; 7 | import HomepageFeatures from '../components/HomepageFeatures'; 8 | 9 | function HomepageHeader() { 10 | const {siteConfig} = useDocusaurusContext(); 11 | return ( 12 |
13 |
14 |

{siteConfig.title}

15 |

{siteConfig.tagline}

16 |
17 | 20 | Discover it now ✨ 21 | 22 |
23 |
24 |
25 | ); 26 | } 27 | 28 | export default function Home(): JSX.Element { 29 | const {siteConfig} = useDocusaurusContext(); 30 | return ( 31 | 34 | 35 |
36 | 37 |
38 |
39 | ); 40 | } 41 | -------------------------------------------------------------------------------- /website/static/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/website/static/.nojekyll -------------------------------------------------------------------------------- /website/static/img/diff.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /website/static/img/export.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 7 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /website/static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jy95/i18n-tools/d479132d67348090a499910b792ff39094ba7aaa/website/static/img/favicon.ico -------------------------------------------------------------------------------- /website/static/img/import.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 7 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /website/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | // This file is not used in compilation. It is here just for a nice editor experience. 3 | "extends": "@docusaurus/tsconfig", 4 | "compilerOptions": { 5 | "baseUrl": "." 6 | } 7 | } 8 | --------------------------------------------------------------------------------