├── .commitlintrc.yml
├── .devcontainer
├── Dockerfile
└── devcontainer.json
├── .dockerignore
├── .github
├── ISSUE_TEMPLATE
│ ├── FEATURE_REQUEST.yml
│ ├── ISSUE.yml
│ └── config.yml
├── dependabot.yml
└── workflows
│ ├── ci.yml
│ ├── create-release.yml
│ ├── docs.yml
│ ├── perform-release.yml
│ ├── pr.yml
│ └── prerelease.yml
├── .gitignore
├── .prettierignore
├── .prettierrc
├── .tool-versions
├── .yarn
└── releases
│ └── yarn-4.2.2.cjs
├── .yarnrc.yml
├── CHANGELOG.md
├── CNAME
├── CONFIG.md
├── Dockerfile
├── LICENSE
├── README.md
├── ci
└── generate-config-docs.ts
├── codecov.yml
├── eslint.config.mjs
├── jest.config.ts
├── lefthook.yml
├── package.json
├── src
├── client
│ ├── host.ts
│ ├── index.ts
│ ├── v5
│ │ └── index.ts
│ └── v6
│ │ └── index.ts
├── config
│ ├── index.ts
│ ├── parse.ts
│ └── schema.ts
├── index.ts
├── log.ts
├── notify.ts
├── sync.ts
└── util
│ └── string-case.ts
├── test
├── containers.ts
├── docker.ts
├── e2e.sh
├── e2e
│ └── two-targets.test.ts
├── integration.sh
├── integration
│ └── client
│ │ ├── v5
│ │ └── index.test.ts
│ │ └── v6
│ │ └── index.test.ts
└── unit
│ ├── client
│ ├── host.test.ts
│ ├── index.test.ts
│ ├── v5
│ │ └── index.test.ts
│ └── v6
│ │ └── index.test.ts
│ ├── config
│ ├── index.test.ts
│ └── parse.test.ts
│ ├── index.test.ts
│ ├── log.test.ts
│ ├── notify.test.ts
│ ├── sync.test.ts
│ └── util
│ └── string-case.test.ts
├── tsconfig.json
└── yarn.lock
/.commitlintrc.yml:
--------------------------------------------------------------------------------
1 | extends:
2 | - '@commitlint/config-conventional'
3 |
--------------------------------------------------------------------------------
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG VARIANT
2 | FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-${VARIANT}
3 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Orbital Sync",
3 | "build": {
4 | "dockerfile": "Dockerfile",
5 | "args": {
6 | "VARIANT": "18-bullseye"
7 | }
8 | },
9 |
10 | "customizations": {
11 | "vscode": {
12 | "extensions": ["dbaeumer.vscode-eslint"]
13 | }
14 | },
15 | "remoteUser": "node",
16 | "features": {
17 | "docker-from-docker": "latest",
18 | "git": "latest"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .idea
3 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml:
--------------------------------------------------------------------------------
1 | name: Feature Request
2 | description: Request a feature
3 | labels: ['feature']
4 | assignees:
5 | - mattwebbio
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to create an issue!
11 | - type: textarea
12 | id: feature-request
13 | attributes:
14 | label: What feature would you like added?
15 | description: What's missing? Why do you need it?
16 | placeholder: I would like to be able to...
17 | validations:
18 | required: true
19 | - type: input
20 | id: version
21 | attributes:
22 | label: Version
23 | description: What version of Orbital Sync are you running?
24 | - type: textarea
25 | id: logs
26 | attributes:
27 | label: Log output
28 | description: Please copy and paste any relevant log output from Orbital Sync. This will be automatically formatted, so no need for backticks.
29 | render: shell
30 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/ISSUE.yml:
--------------------------------------------------------------------------------
1 | name: Bug or Other Issue
2 | description: Get help with an issue or file a bug report
3 | labels: ['triage']
4 | assignees:
5 | - mattwebbio
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to create an issue!
11 | - type: textarea
12 | id: what-happened
13 | attributes:
14 | label: What happened?
15 | description: Or, what's the issue you're having? Also, what did you expect to happen?
16 | placeholder: A bug happened!
17 | validations:
18 | required: true
19 | - type: input
20 | id: version
21 | attributes:
22 | label: Version
23 | description: What version of Orbital Sync are you running?
24 | validations:
25 | required: true
26 | - type: textarea
27 | id: logs
28 | attributes:
29 | label: Log output
30 | description: Please copy and paste any relevant log output from Orbital Sync. This will be automatically formatted, so no need for backticks.
31 | render: shell
32 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: 'npm'
4 | directory: '/'
5 | schedule:
6 | interval: 'weekly'
7 | commit-message:
8 | prefix: fix
9 | prefix-development: chore
10 | - package-ecosystem: 'github-actions'
11 | directory: '/'
12 | schedule:
13 | interval: 'weekly'
14 | commit-message:
15 | prefix: ci
16 | prefix-development: ci
17 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master]
7 | schedule:
8 | - cron: '0 0 * * 0'
9 |
10 | concurrency:
11 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
12 | cancel-in-progress: true
13 |
14 | jobs:
15 | lint:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: actions/checkout@v4
19 | - uses: actions/setup-node@v4
20 | with:
21 | node-version: '18.x'
22 | cache: 'yarn'
23 | - run: yarn install --immutable
24 | - uses: reviewdog/action-setup@v1
25 | - run: yarn lint
26 | prettier:
27 | runs-on: ubuntu-latest
28 | steps:
29 | - uses: actions/checkout@v4
30 | - uses: actions/setup-node@v4
31 | with:
32 | node-version: '18.x'
33 | cache: 'yarn'
34 | - run: yarn install --immutable
35 | - run: yarn prettier
36 | codeql:
37 | runs-on: ubuntu-latest
38 | permissions:
39 | actions: read
40 | contents: read
41 | security-events: write
42 | steps:
43 | - name: Checkout repository
44 | uses: actions/checkout@v4
45 | - name: Initialize CodeQL
46 | uses: github/codeql-action/init@v3
47 | with:
48 | languages: typescript
49 | - uses: actions/setup-node@v4
50 | with:
51 | node-version: '18.x'
52 | cache: 'yarn'
53 | - run: yarn install --immutable
54 | - name: Perform CodeQL Analysis
55 | uses: github/codeql-action/analyze@v3
56 | test:
57 | runs-on: ubuntu-latest
58 | steps:
59 | - uses: actions/checkout@v4
60 | - uses: actions/setup-node@v4
61 | with:
62 | node-version: '18.x'
63 | cache: 'yarn'
64 | - run: yarn install --immutable
65 | - run: yarn test:unit
66 | - uses: codecov/codecov-action@v5
67 | if: ${{ github.actor != 'dependabot[bot]' && github.actor != 'tiltcamp-bot' }}
68 | with:
69 | flags: unit
70 | token: ${{ secrets.CODECOV_TOKEN }}
71 | fail_ci_if_error: true
72 | e2e:
73 | runs-on: ubuntu-latest
74 | steps:
75 | - uses: actions/checkout@v4
76 | - uses: actions/setup-node@v4
77 | with:
78 | node-version: '18.x'
79 | cache: 'yarn'
80 | - run: yarn install --immutable
81 | - run: yarn test:e2e
82 | integration:
83 | runs-on: ubuntu-latest
84 | steps:
85 | - uses: actions/checkout@v4
86 | - uses: actions/setup-node@v4
87 | with:
88 | node-version: '18.x'
89 | cache: 'yarn'
90 | - run: yarn install --immutable
91 | - run: yarn test:integration
92 | - uses: codecov/codecov-action@v5
93 | if: ${{ github.actor != 'dependabot[bot]' && github.actor != 'tiltcamp-bot' }}
94 | with:
95 | flags: integration
96 | token: ${{ secrets.CODECOV_TOKEN }}
97 | fail_ci_if_error: true
98 | docs:
99 | runs-on: ubuntu-latest
100 | if: github.event_name == 'pull_request'
101 | steps:
102 | - uses: actions/checkout@v4
103 | - uses: actions/setup-node@v4
104 | with:
105 | node-version: '18.x'
106 | cache: 'yarn'
107 | - run: yarn install --immutable
108 | - run: yarn ci:generate-config-docs
109 | - run: cat CONFIG.md
110 | lint-commits:
111 | timeout-minutes: 10
112 | runs-on: ubuntu-latest
113 | if: github.event_name == 'pull_request' && github.actor != 'dependabot[bot]'
114 | steps:
115 | - uses: actions/checkout@v4
116 | with:
117 | fetch-depth: 0
118 | - uses: actions/setup-node@v4
119 | with:
120 | node-version: '18.x'
121 | cache: 'yarn'
122 | - run: yarn install --immutable
123 | - run: yarn commitlint --from ${{ github.event.pull_request.base.sha }} --to ${{ github.event.pull_request.head.sha }} --verbose
124 |
--------------------------------------------------------------------------------
/.github/workflows/create-release.yml:
--------------------------------------------------------------------------------
1 | name: Create Release
2 |
3 | on:
4 | push:
5 | branches: [master]
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
9 | cancel-in-progress: true
10 |
11 | jobs:
12 | release-please:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: google-github-actions/release-please-action@v4
16 | id: release
17 | with:
18 | token: ${{ secrets.BOT_GITHUB_TOKEN }}
19 | release-type: node
20 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Generate Docs
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'release-please--*'
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
10 | cancel-in-progress: true
11 |
12 | jobs:
13 | config:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Extract branch name
17 | shell: bash
18 | run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
19 | id: extract_branch
20 | - uses: actions/checkout@v4
21 | with:
22 | token: ${{ secrets.BOT_GITHUB_TOKEN }}
23 | ref: ${{ steps.extract_branch.outputs.branch }}
24 | - uses: actions/setup-node@v4
25 | with:
26 | node-version: '18.x'
27 | cache: 'yarn'
28 | - run: yarn install --immutable
29 | - run: yarn ci:generate-config-docs
30 | - run: yarn prettier:fix
31 | - name: Commit and push changes
32 | run: |
33 | if [[ `git status --porcelain` ]]; then
34 | git config --local user.email "hello@tiltcamp.com"
35 | git config --local user.name "TiltCamp"
36 | git add CONFIG.md
37 | git commit -m "docs: generate config docs"
38 | git push
39 | else
40 | echo "No changes to commit"
41 | fi
42 |
--------------------------------------------------------------------------------
/.github/workflows/perform-release.yml:
--------------------------------------------------------------------------------
1 | name: Perform Release
2 |
3 | on:
4 | push:
5 | tags:
6 | - '*'
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
10 | cancel-in-progress: true
11 |
12 | jobs:
13 | docker:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Checkout
17 | uses: actions/checkout@v4
18 | - uses: actions/setup-node@v4
19 | with:
20 | node-version: '18.x'
21 | cache: 'yarn'
22 | - run: yarn install --immutable
23 | - run: yarn build
24 | - name: Set up QEMU
25 | uses: docker/setup-qemu-action@v3
26 | with:
27 | platforms: 'arm64,arm'
28 | - name: Set up Docker Buildx
29 | uses: docker/setup-buildx-action@v3
30 | - name: Login to DockerHub
31 | uses: docker/login-action@v3
32 | with:
33 | username: mattwebbio
34 | password: ${{ secrets.DOCKERHUB_TOKEN }}
35 | - name: Login to GitHub Container Registry
36 | uses: docker/login-action@v3
37 | with:
38 | registry: ghcr.io
39 | username: ${{ github.repository_owner }}
40 | password: ${{ secrets.GITHUB_TOKEN }}
41 | - name: Generate tags (alpine)
42 | id: alpine_docker_tags
43 | uses: docker/metadata-action@v5
44 | with:
45 | images: |
46 | mattwebbio/orbital-sync
47 | ghcr.io/mattwebbio/orbital-sync
48 | tags: |
49 | type=schedule
50 | type=ref,event=branch
51 | type=ref,event=pr
52 | type=semver,pattern={{version}}
53 | type=semver,pattern={{major}}.{{minor}}
54 | type=semver,pattern={{major}}
55 | type=sha
56 | - name: Build and push (alpine)
57 | uses: docker/build-push-action@v6
58 | with:
59 | context: .
60 | cache-from: type=gha
61 | cache-to: type=gha,mode=max
62 | build-args: |
63 | BASE_IMAGE=node:18-alpine
64 | push: true
65 | tags: ${{ steps.alpine_docker_tags.outputs.tags }}
66 | labels: ${{ steps.alpine_docker_tags.outputs.labels }}
67 | platforms: |
68 | linux/amd64
69 | linux/arm64
70 | linux/arm
71 | - name: Generate tags (distroless)
72 | id: distroless_docker_tags
73 | uses: docker/metadata-action@v5
74 | with:
75 | images: |
76 | mattwebbio/orbital-sync
77 | ghcr.io/mattwebbio/orbital-sync
78 | tags: |
79 | type=schedule
80 | type=ref,event=branch
81 | type=ref,event=pr
82 | type=semver,pattern={{version}}
83 | type=semver,pattern={{major}}.{{minor}}
84 | type=semver,pattern={{major}}
85 | type=sha
86 | flavor: |
87 | suffix=-distroless,onlatest=true
88 | - name: Build and push (distroless)
89 | uses: docker/build-push-action@v6
90 | with:
91 | context: .
92 | cache-from: type=gha
93 | cache-to: type=gha,mode=max
94 | build-args: |
95 | BASE_IMAGE=gcr.io/distroless/nodejs18:latest
96 | push: true
97 | tags: ${{ steps.distroless_docker_tags.outputs.tags }}
98 | labels: ${{ steps.distroless_docker_tags.outputs.labels }}
99 | platforms: |
100 | linux/amd64
101 | linux/arm64
102 | linux/arm
103 | - name: Update DockerHub description
104 | uses: peter-evans/dockerhub-description@v4
105 | with:
106 | username: mattwebbio
107 | password: ${{ secrets.DOCKERHUB_TOKEN }}
108 | npm:
109 | runs-on: ubuntu-latest
110 | steps:
111 | - uses: actions/checkout@v4
112 | - uses: actions/setup-node@v4
113 | with:
114 | node-version: '18.x'
115 | cache: 'yarn'
116 | - run: yarn install --immutable
117 | - run: yarn build
118 | - run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
119 | - run: npm publish --registry https://registry.npmjs.org
120 | - run: echo "//npm.pkg.github.com/:_authToken=${{ secrets.GITHUB_TOKEN }}" > ~/.npmrc
121 | - run: npm pkg set 'name'='@mattwebbio/orbital-sync'
122 | - run: npm publish --registry https://npm.pkg.github.com --scope=@mattwebbio
123 |
--------------------------------------------------------------------------------
/.github/workflows/pr.yml:
--------------------------------------------------------------------------------
1 | name: PR Actions
2 |
3 | on:
4 | pull_request_target:
5 |
6 | concurrency:
7 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
8 | cancel-in-progress: true
9 |
10 | jobs:
11 | automerge:
12 | timeout-minutes: 10
13 | runs-on: ubuntu-latest
14 | if: ${{ github.actor == 'dependabot[bot]' }}
15 | steps:
16 | - name: Enable auto-merge on PR
17 | run: gh pr merge --auto --merge "$PR_URL"
18 | env:
19 | PR_URL: ${{github.event.pull_request.html_url}}
20 | GH_TOKEN: '${{secrets.BOT_GITHUB_TOKEN}}'
21 | - name: Approve PR
22 | run: gh pr review --approve "$PR_URL"
23 | env:
24 | PR_URL: ${{github.event.pull_request.html_url}}
25 | GH_TOKEN: '${{secrets.BOT_GITHUB_TOKEN}}'
26 |
--------------------------------------------------------------------------------
/.github/workflows/prerelease.yml:
--------------------------------------------------------------------------------
1 | name: Prerelease
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master]
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
10 | cancel-in-progress: true
11 |
12 | jobs:
13 | docker:
14 | runs-on: ubuntu-latest
15 | if: ${{ github.actor != 'dependabot[bot]' && github.actor != 'tiltcamp-bot' }}
16 | permissions:
17 | packages: write
18 | steps:
19 | - name: Checkout
20 | uses: actions/checkout@v4
21 | - uses: actions/setup-node@v4
22 | with:
23 | node-version: '18.x'
24 | cache: 'yarn'
25 | - run: yarn install --immutable
26 | - run: yarn build
27 | - name: Set up QEMU
28 | uses: docker/setup-qemu-action@v3
29 | with:
30 | platforms: 'arm64,arm'
31 | - name: Set up Docker Buildx
32 | uses: docker/setup-buildx-action@v3
33 | - name: Login to GitHub Container Registry
34 | uses: docker/login-action@v3
35 | with:
36 | registry: ghcr.io
37 | username: ${{ github.repository_owner }}
38 | password: ${{ secrets.GITHUB_TOKEN }}
39 | - name: Generate tags (alpine)
40 | id: alpine_docker_tags
41 | uses: docker/metadata-action@v5
42 | with:
43 | images: |
44 | ghcr.io/mattwebbio/orbital-sync
45 | tags: |
46 | type=ref,event=branch
47 | type=ref,event=pr
48 | type=sha
49 | - name: Build and push (alpine)
50 | uses: docker/build-push-action@v6
51 | with:
52 | context: .
53 | cache-from: type=gha
54 | cache-to: type=gha,mode=max
55 | build-args: |
56 | BASE_IMAGE=node:18-alpine
57 | push: true
58 | tags: ${{ steps.alpine_docker_tags.outputs.tags }}
59 | labels: ${{ steps.alpine_docker_tags.outputs.labels }}
60 | platforms: |
61 | linux/amd64
62 | linux/arm64
63 | linux/arm
64 | - name: Generate tags (distroless)
65 | id: distroless_docker_tags
66 | uses: docker/metadata-action@v5
67 | with:
68 | images: |
69 | ghcr.io/mattwebbio/orbital-sync
70 | tags: |
71 | type=ref,event=branch
72 | type=ref,event=pr
73 | type=sha
74 | flavor: |
75 | suffix=-distroless,onlatest=true
76 | - name: Build and push (distroless)
77 | uses: docker/build-push-action@v6
78 | with:
79 | context: .
80 | cache-from: type=gha
81 | cache-to: type=gha,mode=max
82 | build-args: |
83 | BASE_IMAGE=gcr.io/distroless/nodejs18:latest
84 | push: true
85 | tags: ${{ steps.distroless_docker_tags.outputs.tags }}
86 | labels: ${{ steps.distroless_docker_tags.outputs.labels }}
87 | platforms: |
88 | linux/amd64
89 | linux/arm64
90 | linux/arm
91 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .idea
3 | coverage
4 | dist
5 |
6 | .pnp.*
7 | .yarn/*
8 | !.yarn/patches
9 | !.yarn/plugins
10 | !.yarn/releases
11 | !.yarn/sdks
12 | !.yarn/versions
13 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | .yarn
2 | coverage
3 | node_modules
4 | dist
5 | CHANGELOG.md
6 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "semi": true,
3 | "trailingComma": "none",
4 | "singleQuote": true,
5 | "printWidth": 90
6 | }
7 |
--------------------------------------------------------------------------------
/.tool-versions:
--------------------------------------------------------------------------------
1 | nodejs 18.15.0
2 |
--------------------------------------------------------------------------------
/.yarnrc.yml:
--------------------------------------------------------------------------------
1 | nodeLinker: node-modules
2 |
3 | yarnPath: .yarn/releases/yarn-4.2.2.cjs
4 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ### Changelog
2 |
3 |
4 | ## [1.8.4](https://github.com/mattwebbio/orbital-sync/compare/v1.8.3...v1.8.4) (2025-01-11)
5 |
6 |
7 | ### Bug Fixes
8 |
9 | * bump @honeybadger-io/js from 6.10.0 to 6.10.1 ([5724009](https://github.com/mattwebbio/orbital-sync/commit/5724009853d88c7783eff9ab0d4f1197a994db78))
10 | * bump fetch-cookie from 3.0.1 to 3.1.0 ([5df230b](https://github.com/mattwebbio/orbital-sync/commit/5df230bebd268a755bc1c8e85b21244bf7cd6097))
11 | * bump node-html-parser from 6.1.13 to 7.0.1 ([4516607](https://github.com/mattwebbio/orbital-sync/commit/451660723238a132b44de259bf832ac263979f2c))
12 |
13 | ## [1.8.3](https://github.com/mattwebbio/orbital-sync/compare/v1.8.2...v1.8.3) (2024-12-26)
14 |
15 |
16 | ### Bug Fixes
17 |
18 | * bump @eslint/plugin-kit from 0.2.0 to 0.2.4 ([bf92191](https://github.com/mattwebbio/orbital-sync/commit/bf921912c13c2a900cfbd2c17612ff0f8be2765a))
19 | * bump @honeybadger-io/js from 6.9.3 to 6.10.0 ([b3219b6](https://github.com/mattwebbio/orbital-sync/commit/b3219b66fce6af5df03ed929dd8008c9cdbb161d))
20 | * bump cross-spawn from 7.0.3 to 7.0.6 ([ea0bd0e](https://github.com/mattwebbio/orbital-sync/commit/ea0bd0eb5e61bc2baf5b50960780c6e628a29aff))
21 | * bump json-schema-to-ts from 3.1.0 to 3.1.1 ([454660c](https://github.com/mattwebbio/orbital-sync/commit/454660cbdbe681bb06a11b46a52773ed13f5e83e))
22 | * bump micromatch from 4.0.5 to 4.0.8 ([45dee5e](https://github.com/mattwebbio/orbital-sync/commit/45dee5ee690308c58d39a45f7aa682dba884e063))
23 | * bump nodemailer from 6.9.14 to 6.9.15 ([47c531a](https://github.com/mattwebbio/orbital-sync/commit/47c531afed9ab1781527402dee6ac63a636e48e2))
24 |
25 | ## [1.8.2](https://github.com/mattwebbio/orbital-sync/compare/v1.8.1...v1.8.2) (2024-08-05)
26 |
27 |
28 | ### Bug Fixes
29 |
30 | * bump @honeybadger-io/js from 6.8.3 to 6.9.1 ([0565088](https://github.com/mattwebbio/orbital-sync/commit/056508818cf312e3b4a0cef1912f4671be954a71))
31 | * bump @honeybadger-io/js from 6.9.1 to 6.9.2 ([a070ba1](https://github.com/mattwebbio/orbital-sync/commit/a070ba13a5cef05ee55a219549d73302d3a4aea3))
32 | * bump @honeybadger-io/js from 6.9.2 to 6.9.3 ([3be6d95](https://github.com/mattwebbio/orbital-sync/commit/3be6d959c9e2fa2260f5861f1a780c7dc2e74ed7))
33 | * bump @sentry/node from 7.108.0 to 7.109.0 ([b8263a9](https://github.com/mattwebbio/orbital-sync/commit/b8263a9e7b1dbac60e15c44b5e9c650719dac7b5))
34 | * bump @sentry/node from 7.109.0 to 7.111.0 ([ace2dbb](https://github.com/mattwebbio/orbital-sync/commit/ace2dbb286939853cdd6ca99fccd798a1171fa9e))
35 | * bump @sentry/node from 7.111.0 to 7.112.2 ([6642c76](https://github.com/mattwebbio/orbital-sync/commit/6642c76295126cbc25e00a7acc5e68285b235f52))
36 | * bump @sentry/node from 7.112.2 to 7.113.0 ([25cecbf](https://github.com/mattwebbio/orbital-sync/commit/25cecbf584a428926efc2cd29a3b871ce80e8804))
37 | * bump @sentry/node from 7.113.0 to 7.114.0 ([c1d3692](https://github.com/mattwebbio/orbital-sync/commit/c1d3692d14680e20b769e21486e7b183a03ca746))
38 | * bump braces from 3.0.2 to 3.0.3 ([cacc15f](https://github.com/mattwebbio/orbital-sync/commit/cacc15fd617073ea3ddbaa86f31407f1861f0e3d))
39 | * bump json-schema-to-ts from 3.0.1 to 3.1.0 ([f76d861](https://github.com/mattwebbio/orbital-sync/commit/f76d861232da4bda67fb25702225133711d2d98d))
40 | * bump node-html-parser from 6.1.12 to 6.1.13 ([aa4c6ae](https://github.com/mattwebbio/orbital-sync/commit/aa4c6aed19cb65ff09adc229342eea5a467b1590))
41 | * bump nodemailer from 6.9.13 to 6.9.14 ([17ea56a](https://github.com/mattwebbio/orbital-sync/commit/17ea56ab6fb8d08b1576166d027508b6e60b129f))
42 | * error "Not authorized" when uploading archives ([c9eea45](https://github.com/mattwebbio/orbital-sync/commit/c9eea45e6b2a806443375d35a5acad3fa53785d3))
43 |
44 | ## [1.8.1](https://github.com/mattwebbio/orbital-sync/compare/v1.8.0...v1.8.1) (2024-03-25)
45 |
46 |
47 | ### Bug Fixes
48 |
49 | * include all `dist` output in NPM release ([fbc9b05](https://github.com/mattwebbio/orbital-sync/commit/fbc9b059da03b33f02cbe84007958bad3efbd930))
50 |
51 | ## [1.8.0](https://github.com/mattwebbio/orbital-sync/compare/v1.7.0...v1.8.0) (2024-03-25)
52 |
53 |
54 | ### Features
55 |
56 | * add distroless image ([a93c38e](https://github.com/mattwebbio/orbital-sync/commit/a93c38e9b0f71033e5ad6bedf00a4bb67c3c2478))
57 |
58 |
59 | ### Bug Fixes
60 |
61 | * bump @sentry/node from 7.107.0 to 7.108.0 ([97eeec6](https://github.com/mattwebbio/orbital-sync/commit/97eeec6bcbbce4a235186518b9b38e7b60d5efbc))
62 | * bump json-schema-to-ts from 3.0.0 to 3.0.1 ([9f87443](https://github.com/mattwebbio/orbital-sync/commit/9f874439dd329a7914cda7f5616386de131ff58e))
63 | * bump nodemailer from 6.9.12 to 6.9.13 ([897e9b6](https://github.com/mattwebbio/orbital-sync/commit/897e9b6f53c535aea8c12c8dd8db01776b6b342c))
64 |
65 | ## [1.7.0](https://github.com/mattwebbio/orbital-sync/compare/v1.6.0...v1.7.0) (2024-03-24)
66 |
67 |
68 | ### Features
69 |
70 | * read config values from files ([9febf7b](https://github.com/mattwebbio/orbital-sync/commit/9febf7bf93639fcd259f18d7d094066dca959655))
71 |
72 | ## [1.6.0](https://github.com/mattwebbio/orbital-sync/compare/v1.5.7...v1.6.0) (2024-03-19)
73 |
74 |
75 | ### Features
76 |
77 | * add support for Sentry ([659bc58](https://github.com/mattwebbio/orbital-sync/commit/659bc58d76314ec90afb39d4562206aa8f236f70))
78 |
79 |
80 | ### Bug Fixes
81 |
82 | * bump @honeybadger-io/js from 6.1.1 to 6.8.3 ([d9050f0](https://github.com/mattwebbio/orbital-sync/commit/d9050f0a71862e5e89d2779ea095baa683588a72))
83 | * bump @sentry/node from 7.100.1 to 7.101.1 ([46a8919](https://github.com/mattwebbio/orbital-sync/commit/46a8919434fd3f33441b66b0623da0d4b45e0f8a))
84 | * bump @sentry/node from 7.101.1 to 7.102.1 ([a12d853](https://github.com/mattwebbio/orbital-sync/commit/a12d853dc5cd379368f974c2b35f28a857879bdf))
85 | * bump @sentry/node from 7.102.1 to 7.105.0 ([23e9dee](https://github.com/mattwebbio/orbital-sync/commit/23e9dee2bd457b362c82684217c05d75c387d336))
86 | * bump @sentry/node from 7.105.0 to 7.107.0 ([1989529](https://github.com/mattwebbio/orbital-sync/commit/19895295892f21ef183d573c0b007442990473db))
87 | * bump @sentry/node from 7.99.0 to 7.100.1 ([579c1c7](https://github.com/mattwebbio/orbital-sync/commit/579c1c7ce47311e4686961720132599a6e85b9d3))
88 | * bump fetch-cookie from 2.1.0 to 2.2.0 ([387b626](https://github.com/mattwebbio/orbital-sync/commit/387b6266518a598f0b0fb0fda10d0694d95de7ef))
89 | * bump fetch-cookie from 2.2.0 to 3.0.1 ([56fa414](https://github.com/mattwebbio/orbital-sync/commit/56fa414fa117d80bb49311831abaebc951fe2e31))
90 | * bump fetch-cookie from 2.2.0 to 3.0.1 ([5d5a5cb](https://github.com/mattwebbio/orbital-sync/commit/5d5a5cb9b28f916508c2a4f0c2e353ce14c59116))
91 | * bump fetch-cookie from 2.2.0 to 3.0.1 ([0a4cf5b](https://github.com/mattwebbio/orbital-sync/commit/0a4cf5b7ff647993bb3ebb8abb827b32b53a54c9))
92 | * bump nodemailer from 6.9.10 to 6.9.11 ([d6bd0e0](https://github.com/mattwebbio/orbital-sync/commit/d6bd0e0e275d9c34da42a45e17202d0b7367ea7e))
93 | * bump nodemailer from 6.9.11 to 6.9.12 ([78e7590](https://github.com/mattwebbio/orbital-sync/commit/78e7590d09570068b413bec26f4d41ab44dfd1a1))
94 | * bump nodemailer from 6.9.9 to 6.9.10 ([eb2d2fc](https://github.com/mattwebbio/orbital-sync/commit/eb2d2fc3453e72b1fcf2469c60cb097520321c4f))
95 |
96 | ## [1.5.7](https://github.com/mattwebbio/orbital-sync/compare/v1.5.6...v1.5.7) (2024-02-01)
97 |
98 |
99 | ### Bug Fixes
100 |
101 | * bump nodemailer from 6.9.8 to 6.9.9 ([6b57526](https://github.com/mattwebbio/orbital-sync/commit/6b575269f438808218ae80bda2b9e8d130166917))
102 |
103 | ## [1.5.5](https://github.com/mattwebbio/orbital-sync/compare/v1.5.4...v1.5.5) (2024-02-01)
104 |
105 | ### Bug Fixes
106 |
107 | * bump node-fetch from 3.3.1 to 3.3.2 ([6ba554a](https://github.com/mattwebbio/orbital-sync/commit/6ba554a793f3a991f144727d431049a5dbe57f80))
108 | * bump node-html-parser from 6.1.4 to 6.1.12 ([4121524](https://github.com/mattwebbio/orbital-sync/commit/4121524617814ce58a9f2e437deea738890b7bc4))
109 |
110 | ## [1.5.4](https://github.com/mattwebbio/orbital-sync/compare/v1.5.3...v1.5.4) (2024-02-01)
111 |
112 |
113 | ### Bug Fixes
114 |
115 | * bump nodemailer and @types/nodemailer ([99982ff](https://github.com/mattwebbio/orbital-sync/commit/99982ff5763e910a70d5a0b5f5c59c59a17cedf7))
116 |
117 | ## [1.5.3](https://github.com/mattwebbio/orbital-sync/compare/v1.5.2...v1.5.3) (2024-01-31)
118 |
119 |
120 | ### Bug Fixes
121 |
122 | * allow SMTP without credentials ([155d367](https://github.com/mattwebbio/orbital-sync/commit/155d3678d61d3e2b003c879cb08a8b63c8c562c6))
123 |
124 | ## [1.5.2](https://github.com/mattwebbio/orbital-sync/compare/v1.5.1...v1.5.2) (2023-03-23)
125 |
126 | - Use project-scoped typescript during release [`0b12b2d`](https://github.com/mattwebbio/orbital-sync/commit/0b12b2d306adf33b4a4e448da1d07a778687f3f6)
127 |
128 | ## [1.5.1](https://github.com/mattwebbio/orbital-sync/compare/v1.5.1-0...v1.5.1) (2023-03-23)
129 |
130 | - Bump nodemailer from 6.9.0 to 6.9.1 [`#141`](https://github.com/mattwebbio/orbital-sync/pull/141)
131 | - Bump @honeybadger-io/js from 4.9.3 to 5.1.1 [`#148`](https://github.com/mattwebbio/orbital-sync/pull/148)
132 | - Fix upload success expectation [`#163`](https://github.com/mattwebbio/orbital-sync/pull/163)
133 | - Bump @honeybadger-io/js from 4.8.1 to 4.9.3 [`#130`](https://github.com/mattwebbio/orbital-sync/pull/130)
134 | - Expect new response message [`1c527be`](https://github.com/mattwebbio/orbital-sync/commit/1c527bec4cb3d17255a2608f4b2130af272e5930)
135 | - Fix timestamp expectations for latest Node [`4664c60`](https://github.com/mattwebbio/orbital-sync/commit/4664c602a12c4ed38662f3b5114da6bfa5428a75)
136 |
137 | ## [1.5.0](https://github.com/mattwebbio/orbital-sync/compare/v1.4.1-0...v1.5.0) (2022-11-29)
138 |
139 | - Omit false form fields when creating backup [`#87`](https://github.com/mattwebbio/orbital-sync/pull/87)
140 | - Bump typescript from 4.8.4 to 4.9.3 [`#90`](https://github.com/mattwebbio/orbital-sync/pull/90)
141 |
142 | ## [1.4.0](https://github.com/mattwebbio/orbital-sync/compare/v1.4.0-1...v1.4.0) (2022-11-05)
143 |
144 | - Configure remote paths [`#68`](https://github.com/mattwebbio/orbital-sync/pull/68)
145 | - Add dev container [`#74`](https://github.com/mattwebbio/orbital-sync/pull/74)
146 | - Refactor Host to class & add default path [`cc1237e`](https://github.com/mattwebbio/orbital-sync/commit/cc1237ed3c18a4437175248799be52dcdb229b4c)
147 | - Process paths from env, add tests [`06492e5`](https://github.com/mattwebbio/orbital-sync/commit/06492e512f2d94313e622987558c464e72667643)
148 | - remove static /admin path from all requests and set default [`58036dc`](https://github.com/mattwebbio/orbital-sync/commit/58036dc8ed5a009951936a3ff227c0ea6480b692)
149 |
150 | ## [1.3.0](https://github.com/mattwebbio/orbital-sync/compare/v1.2.2-beta.0...v1.3.0) (2022-10-06)
151 |
152 | - Update gravity after sync [`#50`](https://github.com/mattwebbio/orbital-sync/pull/50)
153 | - Add gravity update [`2066860`](https://github.com/mattwebbio/orbital-sync/commit/20668603c869687e1bfa14d32187e7274dfec2e9)
154 | - Make client error wording consistent [`6cd5a32`](https://github.com/mattwebbio/orbital-sync/commit/6cd5a32fa5f02996b3689431c6e7aa860a1ac9cd)
155 | - Stringify non-string logging [`21737a9`](https://github.com/mattwebbio/orbital-sync/commit/21737a96fee2de8c0db50771f3bfe12cee637144)
156 |
157 | ## [1.2.1](https://github.com/mattwebbio/orbital-sync/compare/v1.2.0...v1.2.1) (2022-09-30)
158 |
159 | - Bump nodemailer from 6.7.8 to 6.8.0 [`#48`](https://github.com/mattwebbio/orbital-sync/pull/48)
160 | - Bump typescript from 4.8.3 to 4.8.4 [`#42`](https://github.com/mattwebbio/orbital-sync/pull/42)
161 | - Bump node-html-parser from 6.0.0 to 6.1.1 [`#45`](https://github.com/mattwebbio/orbital-sync/pull/45)
162 | - Bump @honeybadger-io/js from 4.1.3 to 4.3.1 [`#44`](https://github.com/mattwebbio/orbital-sync/pull/44)
163 |
164 | ## [1.2.0](https://github.com/mattwebbio/orbital-sync/compare/v1.1.3-beta.2...v1.2.0) (2022-09-30)
165 |
166 | - Add notifications and better error handling [`#31`](https://github.com/mattwebbio/orbital-sync/pull/31)
167 | - Add SMTP notifications [`80614fb`](https://github.com/mattwebbio/orbital-sync/commit/80614fba02a62aeab32b0ecfefd65366ea3622b8)
168 | - Simplify verbose logging logic [`73fd7b1`](https://github.com/mattwebbio/orbital-sync/commit/73fd7b152efbeabf6c6cec69c75611d098038a78)
169 |
170 | ## [1.0.0](https://github.com/mattwebbio/orbital-sync/compare/v0.0.8...v1.0.0) (2022-08-30)
171 |
172 | - Refactor `Config` for testability [`81df05f`](https://github.com/mattwebbio/orbital-sync/commit/81df05fc84c8158164075d9adeb32c832d73a276)
173 |
174 | ## 0.0.2 (2022-08-25)
175 |
176 | - Initial commit [`023a0e9`](https://github.com/mattwebbio/orbital-sync/commit/023a0e9e0e1f0ed43963ca87532b2c02bc42211f)
177 | - Add NPM package publish workflow [`c9aedce`](https://github.com/mattwebbio/orbital-sync/commit/c9aedcef8a0ada726061ee3623a4a963d9dd37f0)
178 | - Add basic CI workflow [`b148deb`](https://github.com/mattwebbio/orbital-sync/commit/b148deb01bcad5d2b9abfb0abaa636cae828203d)
179 |
--------------------------------------------------------------------------------
/CNAME:
--------------------------------------------------------------------------------
1 | orbitalsync.com
--------------------------------------------------------------------------------
/CONFIG.md:
--------------------------------------------------------------------------------
1 | [](https://orbitalsync.com)
2 | [](https://github.com/mattwebbio/orbital-sync)
3 | [](https://github.com/mattwebbio/orbital-sync)
4 |
5 | [Installation](https://orbitalsync.com/#getting-started) | [Configuration](https://orbitalsync.com/CONFIG.html) | [Changelog](CHANGELOG.md)
6 |
7 |
8 |
9 | # Orbital Sync: Configuration
10 |
11 | See [additional notes](#additional-notes) at the bottom for information such as how to use [Docker secrets](#docker-secrets).
12 |
13 |
14 |
15 | | Environment Variable | Required | Default | Example | Description |
16 | | -------------------- | -------- | ------- | --------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
17 | | `UPDATE_GRAVITY` | No | `true` | `true`/`false` | Triggers a gravity update after a backup has been uploaded to a secondary Pi-hole. This updates adlists and restarts gravity. |
18 | | `VERBOSE` | No | `false` | `true`/`false` | Increases the verbosity of log output. Useful for debugging. |
19 | | `RUN_ONCE` | No | `false` | `true`/`false` | By default, Orbital Sync runs indefinitely until stopped. Setting this to `true` forces it to exit immediately after the first sync. |
20 | | `INTERVAL_MINUTES` | No | `60` | Any non-zero positive integer, for example `5`, `30`, or `1440` | How long to wait between synchronizations. Defaults to sixty minutes. Remember that the DNS server on your secondary servers restarts every time a sync is performed. |
21 |
22 | ## Primary Host
23 |
24 | The primary Pi-hole that data will be copied from.
25 |
26 | | Environment Variable | Required | Default | Example | Description |
27 | | ----------------------- | -------- | ------- | ---------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- |
28 | | `PRIMARY_HOST_BASE_URL` | Yes | N/A | `http://192.168.1.2` or `https://pihole.example.com` | The base URL of your Pi-hole, including the scheme (HTTP or HTTPS) and port but not including a following slash. |
29 | | `PRIMARY_HOST_PASSWORD` | Yes | N/A | `mypassword` | The password used to log in to the admin interface. |
30 | | `PRIMARY_HOST_PATH` | No | N/A | `/` or `/apps/pi-hole` | The path to be appended to your base URL. The default Pi-hole path is `/admin`, which is added automatically. |
31 |
32 | ## Secondary Hosts
33 |
34 | Secondary Pi-holes that data will be copied to.
35 |
36 | Replace `(#)` with a number, starting at 1, to add multiple. Each must be sequential, (i.e. `SECONDARY_HOSTS_1_BASE_URL`, `SECONDARY_HOSTS_2_BASE_URL`, `SECONDARY_HOSTS_3_BASE_URL`, and so on) and start at number 1. Any gaps (for example, 3 to 5 skipping 4) will result in configuration after the gap being skipped.
37 |
38 | | Environment Variable | Required | Default | Example | Description |
39 | | ------------------------------ | -------- | ------- | ----------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- |
40 | | `SECONDARY_HOSTS_(#)_BASE_URL` | Yes | N/A | `http://192.168.1.3` or `https://pihole2.example.com` | The base URL of your secondary Pi-hole, including the scheme (HTTP or HTTPS) and port but not including a following slash. |
41 | | `SECONDARY_HOSTS_(#)_PASSWORD` | Yes | N/A | `mypassword2` | The password used to log in to the admin interface. |
42 | | `SECONDARY_HOSTS_(#)_PATH` | No | N/A | `/` or `/apps/pi-hole` | The path to be appended to your secondary base URL. The default Pi-hole path is `/admin`, which is added automatically. |
43 |
44 | ## Sync
45 |
46 | What data to copy from the primary Pi-hole to the secondary Pi-holes.
47 |
48 | ### V5
49 |
50 | Sync options for Pi-hole v5.x.
51 |
52 | | Environment Variable | Required | Default | Example | Description |
53 | | ----------------------------- | -------- | ------- | -------------- | ----------------------------------------------------------- |
54 | | `SYNC_V5_WHITELIST` | No | `true` | `true`/`false` | Copies the whitelist |
55 | | `SYNC_V5_REGEX_WHITELIST` | No | `true` | `true`/`false` | Copies the regex whitelist |
56 | | `SYNC_V5_BLACKLIST` | No | `true` | `true`/`false` | Copies the blacklist |
57 | | `SYNC_V5_REGEX_LIST` | No | `true` | `true`/`false` | Copies the regex blacklist |
58 | | `SYNC_V5_AD_LIST` | No | `true` | `true`/`false` | Copies adlists |
59 | | `SYNC_V5_CLIENT` | No | `true` | `true`/`false` | Copies clients |
60 | | `SYNC_V5_GROUP` | No | `true` | `true`/`false` | Copies groups |
61 | | `SYNC_V5_AUDIT_LOG` | No | `false` | `true`/`false` | Copies the audit log |
62 | | `SYNC_V5_STATIC_DHCP_LEASES` | No | `false` | `true`/`false` | Copies static DHCP leases |
63 | | `SYNC_V5_LOCAL_DNS_RECORDS` | No | `true` | `true`/`false` | Copies local DNS records |
64 | | `SYNC_V5_LOCAL_CNAME_RECORDS` | No | `true` | `true`/`false` | Copies local CNAME records |
65 | | `SYNC_V5_FLUSH_TABLES` | No | `true` | `true`/`false` | Clears existing data on the secondary (copy target) Pi-hole |
66 |
67 | ## Notify
68 |
69 | When to send notifications and how to send them.
70 |
71 | | Environment Variable | Required | Default | Example | Description |
72 | | -------------------- | -------- | ------- | -------------- | ----------------------------------------------------- |
73 | | `NOTIFY_ON_SUCCESS` | No | `false` | `true`/`false` | Send a notification if a sync completes successfully. |
74 | | `NOTIFY_ON_FAILURE` | No | `true` | `true`/`false` | Send a notification if a sync fails for any reason. |
75 |
76 | ### Smtp
77 |
78 | Send notifications via email using SMTP
79 |
80 | | Environment Variable | Required | Default | Example | Description |
81 | | ---------------------- | -------- | ------- | ------------------------- | -------------------------------------------------------------------------- |
82 | | `NOTIFY_SMTP_ENABLED` | No | `false` | `true`/`false` | Send notifications via email. |
83 | | `NOTIFY_SMTP_FROM` | No | N/A | `orbitalsync@example.com` | The email address to send notifications from. |
84 | | `NOTIFY_SMTP_TO` | No | N/A | `you@example.com` | The email address to send notifications to. Can be a comma-separated list. |
85 | | `NOTIFY_SMTP_HOST` | No | N/A | `smtp.example.com` | The SMTP server host. |
86 | | `NOTIFY_SMTP_PORT` | No | N/A | `25`/`587`/`465` | The SMTP server port. |
87 | | `NOTIFY_SMTP_TLS` | No | `false` | `true`/`false` | Should usually be set to true if using port 465. Otherwise, leave as is. |
88 | | `NOTIFY_SMTP_USER` | No | N/A | `orbitalsync@example.com` | The SMTP account username. |
89 | | `NOTIFY_SMTP_PASSWORD` | No | N/A | `yourpasswordhere` | The SMTP account password. |
90 |
91 | ### Exceptions
92 |
93 | Log exceptions to [Honeybadger](https://www.honeybadger.io) or [Sentry](http://sentry.io/). Used mostly for development or debugging.
94 |
95 | | Environment Variable | Required | Default | Example | Description |
96 | | --------------------------------------- | -------- | ------- | ----------------------------------- | -------------------------------------------------------------------------------------------------- |
97 | | `NOTIFY_EXCEPTIONS_HONEYBADGER_API_KEY` | No | N/A | `hbp_xxxxxxxxxxxxxxxxxx` | Set to use Honeybadger for proper exception recording; mostly useful for development or debugging. |
98 | | `NOTIFY_EXCEPTIONS_SENTRY_DSN` | No | N/A | `https://key@o0.ingest.sentry.io/0` | Set to use Sentry for proper exception recording; mostly useful for development or debugging. |
99 |
100 |
101 |
102 | # Additional Notes
103 |
104 | ## Docker Secrets
105 |
106 | All above configuration options can be set with Docker secrets. In other words, by appending any environment variable with the `_FILE` suffix, you can provide the path to a file that contains the value. For example, `PRIMARY_HOST_PASSWORD_FILE=/run/secrets/pihole_password` would read the primary host password from the file `/run/secrets/pihole_password`. In practice, a `docker-compose.yml` for this configuration might look like:
107 |
108 | ```yaml
109 | services:
110 | orbital-sync:
111 | image: mattwebbio/orbital-sync:latest
112 | secrets:
113 | - pihole1_password
114 | - pihole2_password
115 | environment:
116 | - PRIMARY_HOST_BASE_URL=https://pihole1.mydomain.com
117 | - PRIMARY_HOST_PASSWORD_FILE=/run/secrets/pihole1_password
118 | - SECONDARY_HOSTS_1_BASE_URL=https://pihole2.mydomain.com
119 | - SECONDARY_HOSTS_1_PASSWORD_FILE=/run/secrets/pihole2_password
120 | secrets:
121 | pihole1_password:
122 | external: true
123 | pihole2_password:
124 | external: true
125 | ```
126 |
127 | If both the `_FILE` and non-`_FILE` versions of an environment variable are set, the `_FILE` version will take precedence.
128 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG BASE_IMAGE
2 |
3 |
4 | FROM node:18-alpine as install
5 | ENV NODE_ENV=production
6 |
7 | WORKDIR /usr/src/app
8 | COPY package.json yarn.lock .yarnrc.yml ./
9 | COPY .yarn/releases/ .yarn/releases/
10 | RUN yarn workspaces focus --all --production
11 |
12 |
13 | FROM ${BASE_IMAGE}
14 | ENV NODE_ENV=production
15 |
16 | WORKDIR /usr/src/app
17 | COPY package.json ./
18 | COPY dist/ dist/
19 | COPY --from=install /usr/src/app/node_modules ./node_modules
20 |
21 | ENV PATH=$PATH:/nodejs/bin
22 | ENTRYPOINT [ "node" ]
23 | CMD [ "dist/index.js" ]
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Matt Webb
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/mattwebbio/orbital-sync)
2 | [](https://github.com/mattwebbio/orbital-sync)
3 |
4 |
5 |
6 | # Orbital Sync
7 |
8 | This project has been archived. For the announcement, see [here](https://github.com/mattwebbio/orbital-sync/issues/190#issuecomment-2766470506).
9 |
10 | For archival purposes, the previous version of this README can be found [here](https://github.com/mattwebbio/orbital-sync/blob/a5028a474c0ab3ec85fb3f155b5fd0c1349ba6f3/README.md) and the configuration instructions [here](https://github.com/mattwebbio/orbital-sync/blob/a5028a474c0ab3ec85fb3f155b5fd0c1349ba6f3/CONFIG.md).
11 |
12 | Thank you for supporting Orbital Sync!
13 |
--------------------------------------------------------------------------------
/ci/generate-config-docs.ts:
--------------------------------------------------------------------------------
1 | import { markdownTable } from 'markdown-table';
2 | import { Schema } from '../src/config/schema.js';
3 | import { SchemaType, pathToEnvVar } from '../src/config/parse.js';
4 | import { camelToTitleCase } from '../src/util/string-case.js';
5 | import { readFile, writeFile } from 'node:fs/promises';
6 |
7 | function generateDocs(schema: SchemaType, path: string[] = []): string {
8 | if (schema.type === 'object') {
9 | const doc: string[] = [];
10 | const withoutArray = path.filter((p) => p !== '(#)');
11 | const lastKey = withoutArray[withoutArray.length - 1];
12 | if (lastKey)
13 | doc.push(`#${'#'.repeat(withoutArray.length)} ${camelToTitleCase(lastKey)}`);
14 |
15 | if (schema.description) doc.push(schema.description);
16 |
17 | if (path[path.length - 1] === '(#)') {
18 | const example = pathToEnvVar([...path, Object.keys(schema.properties!)[0]]);
19 |
20 | doc.push(
21 | 'Replace `(#)` with a number, starting at 1, to add multiple. Each must be sequential, (i.e. ' +
22 | `\`${example.replace('(#)', '1')}\`, \`${example.replace('(#)', '2')}\`, ` +
23 | `\`${example.replace('(#)', '3')}\`, and so on) and start at number 1. Any gaps (for ` +
24 | 'example, 3 to 5 skipping 4) will result in configuration after the gap being skipped.'
25 | );
26 | }
27 |
28 | const values = Object.entries(schema.properties!).filter(
29 | ([, value]) =>
30 | typeof value === 'object' && value.type !== 'object' && value.type !== 'array'
31 | ) as [string, SchemaType][];
32 | if (values.length > 0)
33 | doc.push(
34 | markdownTable([
35 | [
36 | // "Title",
37 | 'Environment Variable',
38 | 'Required',
39 | 'Default',
40 | 'Example',
41 | 'Description'
42 | ],
43 | ...values.map(([key, value]) => {
44 | const envVar = pathToEnvVar([...path, key]);
45 | const isRequired =
46 | schema.required?.includes(key) && value.default === undefined
47 | ? 'Yes'
48 | : 'No';
49 | const dflt =
50 | value.default === undefined || value.default === null
51 | ? 'N/A'
52 | : `\`${value.default.toString()}\``;
53 | const example = value.example ?? 'N/A';
54 | const description = value.description ?? 'N/A';
55 |
56 | return [
57 | // camelToTitleCase(key),
58 | `\`${envVar}\``,
59 | isRequired,
60 | dflt,
61 | example,
62 | description
63 | ];
64 | })
65 | ])
66 | );
67 |
68 | const objects = Object.entries(schema.properties!).filter(
69 | ([, value]) =>
70 | typeof value === 'object' && (value.type === 'object' || value.type === 'array')
71 | ) as [string, SchemaType][];
72 | if (objects.length > 0)
73 | doc.push(
74 | ...objects.map(([key, value]) => {
75 | if (value.type === 'object') return generateDocs(value, [...path, key]);
76 | else if (value.type === 'array')
77 | return generateDocs(value.items as SchemaType, [...path, key, '(#)']);
78 |
79 | throw new Error(
80 | `Invalid schema type: ${value.type}. Nested schema must be an object or array.`
81 | );
82 | })
83 | );
84 |
85 | return doc.join('\n\n');
86 | } else
87 | throw new Error(
88 | `Invalid schema type: ${schema.type}. Root schema must be an object.`
89 | );
90 | }
91 |
92 | async function writeDocs(content: string): Promise {
93 | const doc = await readFile('CONFIG.md', 'utf-8');
94 | const startAnchor = '';
95 | const endAnchor = '';
96 |
97 | const start = doc.indexOf(startAnchor);
98 | const end = doc.indexOf(endAnchor);
99 |
100 | if (start === -1 || end === -1)
101 | throw new Error('Could not find start or end of config docs.');
102 |
103 | const newDoc = [doc.slice(0, start + startAnchor.length), content, doc.slice(end)].join(
104 | '\n\n'
105 | );
106 |
107 | await writeFile('CONFIG.md', newDoc);
108 | }
109 |
110 | await writeDocs(generateDocs(Schema));
111 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | flag_management:
2 | default_rules:
3 | statuses:
4 | - type: project
5 | target: auto
6 | threshold: 1%
7 | - type: patch
8 | target: 90%
9 | individual_flags:
10 | - name: unit
11 | - name: integration
12 |
--------------------------------------------------------------------------------
/eslint.config.mjs:
--------------------------------------------------------------------------------
1 | import typescriptEslint from '@typescript-eslint/eslint-plugin';
2 | import tsParser from '@typescript-eslint/parser';
3 | import path from 'node:path';
4 | import { fileURLToPath } from 'node:url';
5 | import js from '@eslint/js';
6 | import { FlatCompat } from '@eslint/eslintrc';
7 |
8 | const __filename = fileURLToPath(import.meta.url);
9 | const __dirname = path.dirname(__filename);
10 | const compat = new FlatCompat({
11 | baseDirectory: __dirname,
12 | recommendedConfig: js.configs.recommended,
13 | allConfig: js.configs.all
14 | });
15 |
16 | export default [
17 | {
18 | ignores: ['**/dist/*', '**/coverage/*']
19 | },
20 | ...compat.extends(
21 | 'eslint:recommended',
22 | 'plugin:@typescript-eslint/recommended',
23 | 'prettier'
24 | ),
25 | {
26 | plugins: {
27 | '@typescript-eslint': typescriptEslint
28 | },
29 | languageOptions: {
30 | parser: tsParser
31 | },
32 | rules: {
33 | 'no-constant-condition': [
34 | 'error',
35 | {
36 | checkLoops: false
37 | }
38 | ]
39 | }
40 | }
41 | ];
42 |
--------------------------------------------------------------------------------
/jest.config.ts:
--------------------------------------------------------------------------------
1 | import type { JestConfigWithTsJest } from 'ts-jest';
2 |
3 | export const jestConfig: JestConfigWithTsJest = {
4 | preset: 'ts-jest/presets/default-esm',
5 | testEnvironment: 'node',
6 | moduleNameMapper: {
7 | '^(\\.{1,2}/.*)\\.js$': '$1'
8 | },
9 | collectCoverage: true,
10 | coverageThreshold: {
11 | global: {
12 | branches: 80,
13 | functions: 80,
14 | lines: 80,
15 | statements: -10
16 | }
17 | },
18 | coveragePathIgnorePatterns: ['/test', '/ci']
19 | };
20 |
21 | export default jestConfig;
22 |
--------------------------------------------------------------------------------
/lefthook.yml:
--------------------------------------------------------------------------------
1 | pre-commit:
2 | parallel: true
3 | commands:
4 | prettier:
5 | run: yarn prettier --ignore-unknown --check {staged_files}
6 | eslint:
7 | glob: '*.ts'
8 | run: yarn eslint {staged_files}
9 | unit-test:
10 | glob: '*.ts'
11 | run: yarn test:unit
12 |
13 | pre-push:
14 | parallel: true
15 | commands:
16 | unit-test:
17 | run: yarn test:unit
18 | prettier:
19 | run: yarn prettier
20 | eslint:
21 | run: yarn lint
22 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "orbital-sync",
3 | "description": "Synchronize multiple Pi-hole instances",
4 | "version": "1.8.4",
5 | "type": "module",
6 | "main": "dist/index.js",
7 | "license": "MIT",
8 | "packageManager": "yarn@4.2.2",
9 | "scripts": {
10 | "build": "yarn tsc",
11 | "ci:generate-config-docs": "node --loader ts-node/esm ci/generate-config-docs.ts",
12 | "lint": "eslint .",
13 | "lint:fix": "eslint . --fix",
14 | "prettier": "prettier --ignore-unknown --check .",
15 | "prettier:fix": "prettier --ignore-unknown --write .",
16 | "test": "npm run test:unit && npm run test:integration && npm run test:e2e",
17 | "test:unit": "NODE_OPTIONS=--experimental-vm-modules yarn jest --silent test/unit/",
18 | "test:unit:linux": "docker run -it -e NODE_OPTIONS=--experimental-vm-modules -v '.:/home/code' -w /home/code --rm node:18-bullseye bash -c 'yarn install && yarn test:unit'",
19 | "test:e2e": "yarn build && test/e2e.sh",
20 | "test:integration": "test/integration.sh"
21 | },
22 | "repository": {
23 | "type": "git",
24 | "url": "git+https://github.com/mattwebbio/orbital-sync.git"
25 | },
26 | "bin": "./dist/index.js",
27 | "files": [
28 | "dist"
29 | ],
30 | "devDependencies": {
31 | "@commitlint/config-conventional": "^19.1.0",
32 | "@tsconfig/node18": "^18.2.3",
33 | "@types/jest": "^29.5.12",
34 | "@types/luxon": "^3.4.2",
35 | "@types/mustache": "^4.2.5",
36 | "@types/node": "^18.8.0",
37 | "@types/nodemailer": "^6.4.14",
38 | "@typescript-eslint/eslint-plugin": "^8.0.1",
39 | "@typescript-eslint/parser": "^8.0.0",
40 | "commitlint": "^19.2.1",
41 | "eslint": "^9.8.0",
42 | "eslint-config-prettier": "^10.0.1",
43 | "jest": "^29.7.0",
44 | "lefthook": "^1.6.7",
45 | "markdown-table": "3",
46 | "nock": "^13.0.0",
47 | "prettier": "^3.2.5",
48 | "tempy": "^3.1.0",
49 | "testcontainers": "^10.7.2",
50 | "ts-jest": "^29.1.2",
51 | "ts-node": "^10.9.2",
52 | "typescript": "^5.4.3"
53 | },
54 | "dependencies": {
55 | "@honeybadger-io/js": "^6.8.3",
56 | "@sentry/node": "^7.108.0",
57 | "chalk": "^4.1.2",
58 | "fetch-cookie": "^3.0.1",
59 | "json-schema-to-ts": "^3.0.1",
60 | "mustache": "^4.2.0",
61 | "node-fetch": "^3.3.2",
62 | "node-html-parser": "^7.0.1",
63 | "nodemailer": "^6.9.13",
64 | "sleep-promise": "^9.1.0"
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/client/host.ts:
--------------------------------------------------------------------------------
1 | export class Host {
2 | baseUrl: string;
3 | path: string;
4 | fullUrl: string;
5 | password: string;
6 |
7 | private static pathExtractor = RegExp('^(http[s]?:+//[^/s]+)([/]?[^?#]+)?');
8 |
9 | constructor({
10 | baseUrl,
11 | password,
12 | path
13 | }: {
14 | baseUrl: string;
15 | password: string;
16 | path?: string;
17 | }) {
18 | path ??= ''; // admin should get added in v5 client
19 | if (path && !path.startsWith('/')) {
20 | path = '/' + path;
21 | }
22 |
23 | const includedPath = Host.pathExtractor.exec(baseUrl);
24 |
25 | if (includedPath && includedPath[1] && includedPath[2]) {
26 | baseUrl = includedPath[1];
27 | path = (this.trimTrailingSlash(includedPath[2]) ?? '') + path;
28 | }
29 |
30 | // Make sure to remove /admin or /api or / from the end
31 | path = path.replace(/(\/admin|\/api|\/)\/?$/, '');
32 |
33 | this.baseUrl = baseUrl;
34 | this.password = password;
35 | this.path = this.trimTrailingSlash(path);
36 | this.fullUrl = this.baseUrl + this.path;
37 | }
38 |
39 | private trimTrailingSlash(url: string): string {
40 | return url.endsWith('/') ? url.slice(0, url.length - 1) : url;
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/client/index.ts:
--------------------------------------------------------------------------------
1 | import chalk from 'chalk';
2 | import nodeFetch from 'node-fetch';
3 | import { Host } from './host.js';
4 | import { ClientV5 } from './v5/index.js';
5 | import { ClientV6 } from './v6/index.js';
6 | import { Log } from '../log.js';
7 | import { Version, SyncOptionsV5, SyncOptionsV6 } from '../config/index.js';
8 |
9 | export interface Client {
10 | downloadBackup(): Promise;
11 | uploadBackup(backup: Blob): Promise;
12 | updateGravity(): Promise;
13 | getHost(): Host;
14 | getVersion(): number;
15 | }
16 |
17 | export class ClientFactory {
18 | public static async createClient({
19 | host,
20 | version,
21 | options,
22 | log
23 | }: {
24 | host: Host;
25 | version: Version;
26 | options: SyncOptionsV6 | SyncOptionsV5;
27 | log: Log;
28 | }): Promise {
29 | if (version === '5') {
30 | return ClientV5.create({ host, options: options as SyncOptionsV5, log });
31 | } else if (version === '6') {
32 | return ClientV6.create({ host, options: options as SyncOptionsV6, log });
33 | } else {
34 | // Auto-detect Pi-hole version
35 | log.info(chalk.yellow(`Checking PiHole version for ${host.fullUrl}...`));
36 |
37 | try {
38 | // Get the documentation URl since that does not require a password.
39 | const response = await nodeFetch(`${host.fullUrl}/api/docs`, {
40 | method: 'GET'
41 | });
42 |
43 | if (response.status === 200) {
44 | log.info(chalk.green(`✔️ PiHole (v6) API is available for ${host.fullUrl}`));
45 | return ClientV6.create({ host, options: options as SyncOptionsV6, log });
46 | }
47 |
48 | // Otherwise, assume v5
49 | log.info(
50 | chalk.green(
51 | `✔️ PiHole (v6) API is NOT available, assuming v5 for ${host.fullUrl}`
52 | )
53 | );
54 | return ClientV5.create({ host, options: options as SyncOptionsV5, log });
55 | } catch (error) {
56 | log.info(chalk.red(`${error}`));
57 | // default to V6
58 | return ClientV6.create({ host, options: options as SyncOptionsV6, log });
59 | }
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/client/v5/index.ts:
--------------------------------------------------------------------------------
1 | import chalk from 'chalk';
2 | import fetchCookie from 'fetch-cookie';
3 | import nodeFetch, {
4 | Blob,
5 | FormData,
6 | RequestInfo,
7 | RequestInit,
8 | Response
9 | } from 'node-fetch';
10 | import { parse } from 'node-html-parser';
11 | import type { Host } from '../host.js';
12 | import { Log } from '../../log.js';
13 | import { ErrorNotification } from '../../notify.js';
14 | import type { SyncOptionsV5 } from '../../config/index.js';
15 |
16 | export class ClientV5 {
17 | private constructor(
18 | private fetch: NodeFetchCookie,
19 | private host: Host,
20 | private token: string,
21 | private options: SyncOptionsV5,
22 | private log: Log,
23 | private version: 5
24 | ) {}
25 |
26 | public static async create({
27 | host,
28 | options,
29 | log
30 | }: {
31 | host: Host;
32 | options: SyncOptionsV5;
33 | log: Log;
34 | }): Promise {
35 | log.info(chalk.yellow(`➡️ Signing in to ${host.fullUrl}...`));
36 | const fetch = fetchCookie(nodeFetch);
37 |
38 | await fetch(`${host.fullUrl}/admin/index.php?login`, { method: 'GET' });
39 | const response = await fetch(`${host.fullUrl}/admin/index.php?login`, {
40 | headers: {
41 | 'content-type': 'application/x-www-form-urlencoded'
42 | },
43 | body: `pw=${encodeURIComponent(host.password)}`,
44 | method: 'POST'
45 | });
46 | if (response.status !== 200)
47 | throw new ErrorNotification({
48 | message: `There was an error logging in to "${host.fullUrl}" - are you able to log in with the configured password?`,
49 | verbose: {
50 | host: host.baseUrl,
51 | path: host.path,
52 | status: response.status,
53 | responseBody: await response.text()
54 | }
55 | });
56 |
57 | const token = this.parseResponseForToken(host, await response.text());
58 |
59 | log.info(chalk.green(`✔️ Successfully signed in to ${host.fullUrl}!`));
60 | return new this(fetch, host, token, options, log, 5);
61 | }
62 |
63 | private static parseResponseForToken(host: Host, responseBody: string): string {
64 | const root = parse(responseBody);
65 | const tokenDiv = root.querySelector('#token');
66 | if (!tokenDiv)
67 | throw new ErrorNotification({
68 | message: `No token could be found while logging in to "${host.fullUrl}" - are you able to log in with the configured password?`,
69 | verbose: {
70 | host: host.baseUrl,
71 | path: host.path,
72 | innerHtml: root.innerHTML
73 | }
74 | });
75 |
76 | const token = tokenDiv.innerText;
77 | if (token.length != 44)
78 | throw new ErrorNotification({
79 | message: `A token was found but could not be validated while logging in to "${host.fullUrl}" - are you able to log in with the configured password?`,
80 | verbose: {
81 | host: host.baseUrl,
82 | path: host.path,
83 | token: token
84 | }
85 | });
86 |
87 | return token;
88 | }
89 |
90 | public getVersion(): number {
91 | return this.version;
92 | }
93 |
94 | public getHost(): Host {
95 | return this.host;
96 | }
97 |
98 | public async downloadBackup(): Promise {
99 | this.log.info(chalk.yellow(`➡️ Downloading backup from ${this.host.fullUrl}...`));
100 | const form = this.generateForm();
101 |
102 | const response = await this.fetch(
103 | `${this.host.fullUrl}/admin/scripts/pi-hole/php/teleporter.php`,
104 | {
105 | body: form,
106 | method: 'POST'
107 | }
108 | );
109 | if (
110 | response.status !== 200 ||
111 | response.headers.get('content-type') !== 'application/gzip'
112 | )
113 | throw new ErrorNotification({
114 | message: `Failed to download backup from "${this.host.fullUrl}".`,
115 | verbose: {
116 | host: this.host.baseUrl,
117 | path: this.host.path,
118 | status: response.status,
119 | responseBody: await response.text()
120 | }
121 | });
122 |
123 | const data = await response.arrayBuffer();
124 |
125 | this.log.info(chalk.green(`✔️ Backup from ${this.host.fullUrl} completed!`));
126 | return new Blob([data]);
127 | }
128 |
129 | public async uploadBackup(backup: Blob): Promise {
130 | this.log.info(chalk.yellow(`➡️ Uploading backup to ${this.host.fullUrl}...`));
131 |
132 | const form = this.generateForm();
133 | form.append('action', 'in');
134 | form.append('zip_file', backup, 'backup.tar.gz');
135 |
136 | const uploadResponse = await this.fetch(
137 | `${this.host.fullUrl}/admin/scripts/pi-hole/php/teleporter.php`,
138 | {
139 | body: form,
140 | method: 'POST'
141 | }
142 | );
143 | const uploadText = await uploadResponse.text();
144 | if (
145 | uploadResponse.status !== 200 ||
146 | !(uploadText.endsWith('OK') || uploadText.endsWith('Done importing'))
147 | )
148 | throw new ErrorNotification({
149 | message: `Failed to upload backup to "${this.host.fullUrl}".`,
150 | verbose: {
151 | host: this.host.baseUrl,
152 | path: this.host.path,
153 | status: uploadResponse.status,
154 | responseBody: uploadText
155 | }
156 | });
157 |
158 | this.log.info(chalk.green(`✔️ Backup uploaded to ${this.host.fullUrl}!`));
159 | this.log.verbose(`Result:\n${chalk.blue(uploadText)}`);
160 |
161 | return true;
162 | }
163 |
164 | public async updateGravity(): Promise {
165 | this.log.info(chalk.yellow(`➡️ Updating gravity on ${this.host.fullUrl}...`));
166 | const gravityUpdateResponse = await this.fetch(
167 | `${this.host.fullUrl}/admin/scripts/pi-hole/php/gravity.sh.php`,
168 | { method: 'GET' }
169 | );
170 |
171 | const updateText = (await gravityUpdateResponse.text())
172 | .replaceAll('\ndata:', '')
173 | .trim();
174 | if (
175 | gravityUpdateResponse.status !== 200 ||
176 | !updateText.endsWith('Pi-hole blocking is enabled')
177 | )
178 | throw new ErrorNotification({
179 | message: `Failed updating gravity on "${this.host.fullUrl}".`,
180 | verbose: {
181 | host: this.host.baseUrl,
182 | path: this.host.path,
183 | status: gravityUpdateResponse.status,
184 | eventStream: updateText
185 | }
186 | });
187 |
188 | this.log.info(chalk.green(`✔️ Gravity updated on ${this.host.fullUrl}!`));
189 | this.log.verbose(`Result:\n${chalk.blue(updateText)}`);
190 |
191 | return true;
192 | }
193 |
194 | private generateForm(): typeof FormData.prototype {
195 | const form = new FormData();
196 | form.append('token', this.token);
197 |
198 | if (this.options.whitelist) form.append('whitelist', true);
199 | if (this.options.regexWhitelist) form.append('regex_whitelist', true);
200 | if (this.options.blacklist) form.append('blacklist', true);
201 | if (this.options.regexList) form.append('regexlist', true);
202 | if (this.options.adList) form.append('adlist', true);
203 | if (this.options.client) form.append('client', true);
204 | if (this.options.group) form.append('group', true);
205 | if (this.options.auditLog) form.append('auditlog', true);
206 | if (this.options.staticDhcpLeases) form.append('staticdhcpleases', true);
207 | if (this.options.localDnsRecords) form.append('localdnsrecords', true);
208 | if (this.options.localCnameRecords) form.append('localcnamerecords', true);
209 | if (this.options.flushTables) form.append('flushtables', true);
210 |
211 | return form;
212 | }
213 | }
214 |
215 | type NodeFetchCookie = ReturnType>;
216 |
--------------------------------------------------------------------------------
/src/client/v6/index.ts:
--------------------------------------------------------------------------------
1 | import chalk from 'chalk';
2 | import fetchCookie from 'fetch-cookie';
3 | import nodeFetch, {
4 | Blob,
5 | FetchError,
6 | FormData,
7 | RequestInfo,
8 | RequestInit,
9 | Response
10 | } from 'node-fetch';
11 | import type { Host } from '../host.js';
12 | import { Log } from '../../log.js';
13 | import { ErrorNotification } from '../../notify.js';
14 | import type { SyncOptionsV6 } from '../../config/index.js';
15 | import sleep from 'sleep-promise';
16 |
17 | export type PiHoleSession = {
18 | valid: boolean;
19 | totp: boolean;
20 | sid: string;
21 | csrf: string;
22 | validity: number;
23 | message: string;
24 | };
25 |
26 | export type PiHoleAuthResponse = {
27 | session: PiHoleSession;
28 | };
29 |
30 | export class ClientV6 {
31 | private constructor(
32 | private fetch: NodeFetchCookie,
33 | private host: Host,
34 | private token: string,
35 | private options: SyncOptionsV6,
36 | private log: Log,
37 | private version: 6
38 | ) {}
39 |
40 | public static async create({
41 | host,
42 | options,
43 | log
44 | }: {
45 | host: Host;
46 | options: SyncOptionsV6;
47 | log: Log;
48 | }): Promise {
49 | log.info(chalk.yellow(`➡️ Signing in to ${host.fullUrl}...`));
50 | const fetch = fetchCookie(nodeFetch);
51 | const path = '/api/auth';
52 | await fetch(`${host.fullUrl}/api/auth`, { method: 'GET' });
53 | const response = await fetch(`${host.fullUrl}${path}`, {
54 | headers: {
55 | 'content-type': 'application/json'
56 | },
57 | body: `{"password":"${host.password}"}`,
58 | method: 'POST'
59 | });
60 | if (response.status !== 200) {
61 | throw new ErrorNotification({
62 | message: `There was an error logging in to "${host.fullUrl}" - are you able to log in with the configured password?`,
63 | verbose: {
64 | host: host.fullUrl,
65 | path,
66 | status: response.status,
67 | responseBody: await response.text()
68 | }
69 | });
70 | }
71 |
72 | const body: PiHoleAuthResponse = (await response.json()) as PiHoleAuthResponse;
73 | const token = body.session.sid;
74 |
75 | log.info(chalk.green(`✔️ Successfully signed in to ${host.fullUrl}!`));
76 | return new this(fetch, host, token, options, log, 6);
77 | }
78 |
79 | public getVersion(): number {
80 | return this.version;
81 | }
82 |
83 | public getHost(): Host {
84 | return this.host;
85 | }
86 |
87 | public async downloadBackup(): Promise {
88 | this.log.info(chalk.yellow(`➡️ Downloading backup from ${this.host.fullUrl}...`));
89 |
90 | const path = '/api/teleporter';
91 | const response = await this.fetch(`${this.host.fullUrl}${path}`, {
92 | headers: {
93 | accept: 'application/zip',
94 | sid: this.token
95 | },
96 | method: 'GET'
97 | });
98 |
99 | if (response.status !== 200) {
100 | throw new ErrorNotification({
101 | message: `Failed to download backup from "${this.host.fullUrl}".`,
102 | verbose: {
103 | host: this.host.fullUrl,
104 | path,
105 | status: response.status,
106 | responseBody: await response.text()
107 | }
108 | });
109 | }
110 |
111 | const data = await response.arrayBuffer();
112 |
113 | this.log.info(chalk.green(`✔️ Backup from ${this.host.fullUrl} completed!`));
114 | return new Blob([data]);
115 | }
116 |
117 | public async uploadBackup(backup: Blob): Promise {
118 | this.log.info(chalk.yellow(`➡️ Uploading backup to ${this.host.fullUrl}...`));
119 | const path = '/api/teleporter';
120 | const form = this.generateForm();
121 | form.append('file', backup, 'backup.zip');
122 |
123 | const uploadResponse = await this.fetch(`${this.host.fullUrl}${path}`, {
124 | headers: {
125 | accept: 'application/json',
126 | sid: this.token
127 | },
128 | body: form,
129 | method: 'POST'
130 | });
131 | const uploadText = await uploadResponse.text();
132 | if (uploadResponse.status !== 200) {
133 | throw new ErrorNotification({
134 | message: `Failed to upload backup to "${this.host.fullUrl}".`,
135 | verbose: {
136 | host: this.host.fullUrl,
137 | path,
138 | status: uploadResponse.status,
139 | responseBody: uploadText
140 | }
141 | });
142 | }
143 |
144 | this.log.info(chalk.green(`✔️ Backup uploaded to ${this.host.fullUrl}!`));
145 | this.log.verbose(`Result:\n${chalk.blue(uploadText)}`);
146 |
147 | return true;
148 | }
149 |
150 | public async updateGravity(attempt = 1): Promise {
151 | const path = '/api/action/gravity';
152 |
153 | if (attempt > this.options.gravityUpdateRetryCount) {
154 | throw new ErrorNotification({
155 | message: `Exhausted ${attempt} retries updating gravity on ${this.host.fullUrl}.`,
156 | verbose: {
157 | host: this.host.fullUrl,
158 | path
159 | }
160 | });
161 | }
162 |
163 | try {
164 | this.log.info(
165 | chalk.yellow(`➡️ Attempting to update gravity on ${this.host.fullUrl}...`)
166 | );
167 | const gravityUpdateResponse = await this.fetch(`${this.host.fullUrl}${path}`, {
168 | headers: {
169 | accept: 'text/plain',
170 | sid: this.token
171 | },
172 | method: 'POST',
173 | body: null
174 | });
175 |
176 | const updateText = (await gravityUpdateResponse.text()).trim();
177 | if (gravityUpdateResponse.status >= 502 && gravityUpdateResponse.status <= 504) {
178 | this.log.verbose(chalk.red(`⚠️ ${gravityUpdateResponse.status} - ${updateText}`));
179 | await this.exponentialSleep(attempt);
180 | return this.updateGravity(attempt + 1);
181 | }
182 |
183 | if (gravityUpdateResponse.status !== 200) {
184 | throw new ErrorNotification({
185 | message: `Failed updating gravity on "${this.host.fullUrl}".`,
186 | verbose: {
187 | host: this.host.fullUrl,
188 | path,
189 | status: gravityUpdateResponse.status,
190 | eventStream: updateText
191 | }
192 | });
193 | }
194 |
195 | this.log.info(chalk.green(`✔️ Gravity updated on ${this.host.fullUrl}!`));
196 | this.log.verbose(`Result:\n${chalk.blue(updateText)}`);
197 | } catch (error) {
198 | if (!(error instanceof FetchError)) throw error;
199 |
200 | this.log.verbose(chalk.red(error));
201 | await this.exponentialSleep(attempt);
202 | return this.updateGravity(attempt + 1);
203 | }
204 |
205 | return true;
206 | }
207 |
208 | private exponentialSleep(attempt: number): Promise {
209 | const backoffMs = Math.min(1000 * Math.pow(2, attempt - 1), 60000);
210 | this.log.info(
211 | chalk.yellow(
212 | `Sleeping for ${backoffMs / 1000}s and waiting for ${this.host.fullUrl} to be up...`
213 | )
214 | );
215 | return sleep(backoffMs);
216 | }
217 |
218 | private generateForm(): typeof FormData.prototype {
219 | const form = new FormData();
220 |
221 | form.append(
222 | 'import',
223 | JSON.stringify({
224 | config: this.options.config,
225 | dhcp_leases: this.options.dhcp_leases,
226 | gravity: {
227 | group: this.options.group,
228 | adlist: this.options.adlist,
229 | adlist_by_group: this.options.adlist_by_group,
230 | domainlist: this.options.domainlist,
231 | domainlist_by_group: this.options.domainlist_by_group,
232 | client: this.options.client,
233 | client_by_group: this.options.client_by_group
234 | }
235 | })
236 | );
237 |
238 | return form;
239 | }
240 | }
241 |
242 | type NodeFetchCookie = ReturnType>;
243 |
--------------------------------------------------------------------------------
/src/config/index.ts:
--------------------------------------------------------------------------------
1 | import { parseSchema, RecursivePartial } from './parse.js';
2 | import { FromSchema } from 'json-schema-to-ts';
3 | import { Schema } from './schema.js';
4 |
5 | export type ConfigInterface = FromSchema;
6 | export type SyncOptionsV5 = ConfigInterface['sync']['v5'];
7 | export type SyncOptionsV6 = ConfigInterface['sync']['v6'];
8 | export type Version = ConfigInterface['piHoleVersion'];
9 |
10 | export function Config(
11 | overrides: RecursivePartial = {}
12 | ): ConfigInterface {
13 | // @ts-expect-error - Type instantiation is excessively deep and possibly infinite
14 | return parseSchema(Schema, { overrides });
15 | }
16 |
--------------------------------------------------------------------------------
/src/config/parse.ts:
--------------------------------------------------------------------------------
1 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment
2 | // @ts-nocheck
3 | import mustache from 'mustache';
4 | import { FromSchema } from 'json-schema-to-ts';
5 | import type { JSONSchema } from 'json-schema-to-ts';
6 | import { readFileSync } from 'fs';
7 | import { camelToSnakeCase } from '../util/string-case.js';
8 |
9 | /*
10 | While the return type of this function should be accurate, the internal type checking
11 | of the values being returned are not (particularly, there are a few `unknown`s and
12 | `any`s in here). TypeScript seems to be unable to properly infer the types of the values
13 | encased in the JSONSchema, and any attempts to correct this seem to cause the classic
14 | `type instantiation is excessively deep and possibly infinite`:
15 | https://github.com/ThomasAribart/json-schema-to-ts/blob/cf1aaf03266ec02286e0320ee6141f22ef0bb348/documentation/FAQs/i-get-a-type-instantiation-is-excessively-deep-and-potentially-infinite-error-what-should-i-do.md
16 |
17 | I'll try to compensate for this with thorough testing, but if anyone out there in the
18 | big internet world has a solution, please submit a PR!
19 | - @mattwebbio
20 | */
21 | /* eslint-disable @typescript-eslint/no-explicit-any */
22 | export function parseSchema(
23 | schema: T,
24 | {
25 | path,
26 | overrides
27 | }: {
28 | path?: string[];
29 | overrides?: RecursivePartial>;
30 | } = {}
31 | ): FromSchema {
32 | path ??= [];
33 | const pathDescriptor = path.length === 0 ? 'root' : `"${path.join('.')}"`;
34 | if (typeof schema !== 'object') throw new Error(`Invalid schema for ${pathDescriptor}`);
35 |
36 | if (schema.type === 'array') {
37 | if (schema.items === undefined)
38 | throw new Error(`Undefined array items for ${pathDescriptor}`);
39 |
40 | const template = JSON.stringify(schema.items);
41 | const templateRequiresInterpolation = template.includes('{{i}}');
42 |
43 | // TODO: Perform loop _minimum_ the number of elements present in array of `config.json`
44 | const results: unknown[] = [];
45 | // eslint-disable-next-line for-direction
46 | for (let i = 1; i >= 0; i++) {
47 | const element: JSONSchema = templateRequiresInterpolation
48 | ? JSON.parse(mustache.render(template, { i }))
49 | : schema.items;
50 |
51 | try {
52 | const result = parseSchema(element, {
53 | path: [...path, i.toString()],
54 | overrides: overrides?.[i - 1]
55 | });
56 | if (!result || Object.values(result).every((v) => !v)) break;
57 | else results.push(result);
58 | } catch (e) {
59 | if (i > (schema.minItems ?? 0) && e instanceof MissingRequiredPropertiesError)
60 | break;
61 | else throw e;
62 | }
63 | }
64 |
65 | return results as any;
66 | } else if (schema.type === 'object') {
67 | if (schema.properties === undefined)
68 | throw new Error(`Undefined object properties for ${pathDescriptor}`);
69 |
70 | const properties = Object.entries(schema.properties).map(([key, value]) => {
71 | return [
72 | key,
73 | parseSchema(value, { path: [...path, key], overrides: overrides?.[key] })
74 | ];
75 | });
76 | const object = Object.fromEntries(properties);
77 |
78 | if (schema.required && schema.required.length > 0) {
79 | const missingRequired = schema.required.filter((key) => object[key] === undefined);
80 | if (missingRequired.length > 0)
81 | throw new MissingRequiredPropertiesError(path, missingRequired);
82 | }
83 |
84 | return object;
85 | } else {
86 | let value: any;
87 | const secretFile = process.env[`${pathToEnvVar(path)}_FILE`];
88 |
89 | if (secretFile)
90 | value ??= readFileSync(process.env[`${pathToEnvVar(path)}_FILE`], 'utf8').trim();
91 | value ??= overrides;
92 | value ??= process.env[pathToEnvVar(path)];
93 | if (schema.envVar) value ??= process.env[schema.envVar];
94 | value ??= schema.default;
95 |
96 | if (typeof value === 'string') {
97 | if (schema.type === 'boolean') {
98 | const lowercase = value.toLowerCase();
99 | if (!['true', 'false'].includes(lowercase))
100 | throw new Error(
101 | `Invalid boolean for ${pathDescriptor}: expected 'true' or 'false'`
102 | );
103 |
104 | value = lowercase === 'true';
105 | }
106 |
107 | if (schema.type === 'number') {
108 | const number = parseFloat(value);
109 | if (isNaN(number))
110 | throw new Error(`Invalid number for ${pathDescriptor}: expected a number`);
111 |
112 | value = number;
113 | }
114 | }
115 |
116 | return value;
117 | }
118 | }
119 |
120 | class MissingRequiredPropertiesError extends Error {
121 | constructor(path: string[], missing: string[]) {
122 | const pathDescriptor = path.length === 0 ? 'root' : `"${path.join('.')}"`;
123 |
124 | super(`Missing required properties for ${pathDescriptor}: ${missing.join(', ')}`);
125 | this.name = 'MissingRequiredPropertiesError';
126 | }
127 | }
128 |
129 | export function pathToEnvVar(path: string[]): string {
130 | return path.map((p) => camelToSnakeCase(p).toUpperCase()).join('_');
131 | }
132 |
133 | export type RecursivePartial = {
134 | [P in keyof T]?: T[P] extends (infer U)[]
135 | ? RecursivePartial[]
136 | : T[P] extends object | undefined
137 | ? RecursivePartial
138 | : T[P];
139 | };
140 |
141 | export type SchemaType = Exclude & {
142 | envVar?: string;
143 | example?: string;
144 | };
145 |
--------------------------------------------------------------------------------
/src/config/schema.ts:
--------------------------------------------------------------------------------
1 | import { asConst } from 'json-schema-to-ts';
2 |
3 | export const Schema = asConst({
4 | type: 'object',
5 | properties: {
6 | piHoleVersion: {
7 | $id: '/schemas/piHoleVersion',
8 | type: 'string',
9 | default: 'auto',
10 | enum: ['auto', '6', '5'],
11 | envVar: 'PIHOLE_VERSION',
12 | example: '`auto`, `6`, or `5`',
13 | description: 'The version of PiHole you are using'
14 | },
15 | primaryHost: {
16 | type: 'object',
17 | description: 'The primary Pi-hole that data will be copied from.',
18 | properties: {
19 | baseUrl: {
20 | type: 'string',
21 | envVar: 'PRIMARY_HOST_BASE_URL',
22 | example: '`http://192.168.1.2` or `https://pihole.example.com`',
23 | description:
24 | 'The base URL of your Pi-hole, including the scheme (HTTP or HTTPS) and port but not including a following slash.'
25 | },
26 | password: {
27 | type: 'string',
28 | envVar: 'PRIMARY_HOST_PASSWORD',
29 | example: '`mypassword`',
30 | description: 'The password (v5) or app password (v6) used to login to PiHole.'
31 | },
32 | path: {
33 | type: 'string',
34 | envVar: 'PRIMARY_HOST_PATH',
35 | example: '`/` or `/apps/pi-hole`',
36 | description:
37 | 'The path to be appended to your base URL. The default Pi-hole path is `/admin` (v5) or `/api` (v6), which is added automatically.'
38 | }
39 | },
40 | required: ['baseUrl', 'password']
41 | },
42 | secondaryHosts: {
43 | type: 'array',
44 | items: {
45 | type: 'object',
46 | description: 'Secondary Pi-holes that data will be copied to.',
47 | properties: {
48 | baseUrl: {
49 | type: 'string',
50 | envVar: 'SECONDARY_HOST_{{i}}_BASE_URL',
51 | example: '`http://192.168.1.3` or `https://pihole2.example.com`',
52 | description:
53 | 'The base URL of your secondary Pi-hole, including the scheme (HTTP or HTTPS) and port but not including a following slash.'
54 | },
55 | password: {
56 | type: 'string',
57 | envVar: 'SECONDARY_HOST_{{i}}_PASSWORD',
58 | example: '`mypassword2`',
59 | description: 'The password used to log in to the admin interface.'
60 | },
61 | path: {
62 | type: 'string',
63 | envVar: 'SECONDARY_HOST_{{i}}_PATH',
64 | example: '`/` or `/apps/pi-hole`',
65 | description:
66 | 'The path to be appended to your secondary base URL. The default Pi-hole path is `/admin`, which is added automatically.'
67 | }
68 | },
69 | required: ['baseUrl', 'password']
70 | },
71 | minItems: 1
72 | },
73 | sync: {
74 | type: 'object',
75 | description:
76 | 'What data to copy from the primary Pi-hole to the secondary Pi-holes.',
77 | properties: {
78 | v5: {
79 | type: 'object',
80 | description: 'Sync options for Pi-hole v5.x.',
81 | properties: {
82 | whitelist: {
83 | type: 'boolean',
84 | default: true,
85 | envVar: 'SYNC_WHITELIST',
86 | example: '`true`/`false`',
87 | description: 'Copies the whitelist'
88 | },
89 | regexWhitelist: {
90 | type: 'boolean',
91 | default: true,
92 | envVar: 'SYNC_REGEX_WHITELIST',
93 | example: '`true`/`false`',
94 | description: 'Copies the regex whitelist'
95 | },
96 | blacklist: {
97 | type: 'boolean',
98 | default: true,
99 | envVar: 'SYNC_BLACKLIST',
100 | example: '`true`/`false`',
101 | description: 'Copies the blacklist'
102 | },
103 | regexList: {
104 | type: 'boolean',
105 | default: true,
106 | envVar: 'SYNC_REGEXLIST',
107 | example: '`true`/`false`',
108 | description: 'Copies the regex blacklist'
109 | },
110 | adList: {
111 | type: 'boolean',
112 | default: true,
113 | envVar: 'SYNC_ADLIST',
114 | example: '`true`/`false`',
115 | description: 'Copies adlists'
116 | },
117 | client: {
118 | type: 'boolean',
119 | default: true,
120 | envVar: 'SYNC_CLIENT',
121 | example: '`true`/`false`',
122 | description: 'Copies clients'
123 | },
124 | group: {
125 | type: 'boolean',
126 | default: true,
127 | envVar: 'SYNC_GROUP',
128 | example: '`true`/`false`',
129 | description: 'Copies groups'
130 | },
131 | auditLog: {
132 | type: 'boolean',
133 | default: false,
134 | envVar: 'SYNC_AUDITLOG',
135 | example: '`true`/`false`',
136 | description: 'Copies the audit log'
137 | },
138 | staticDhcpLeases: {
139 | type: 'boolean',
140 | default: false,
141 | envVar: 'SYNC_STATICDHCPLEASES',
142 | example: '`true`/`false`',
143 | description: 'Copies static DHCP leases'
144 | },
145 | localDnsRecords: {
146 | type: 'boolean',
147 | default: true,
148 | envVar: 'SYNC_LOCALDNSRECORDS',
149 | example: '`true`/`false`',
150 | description: 'Copies local DNS records'
151 | },
152 | localCnameRecords: {
153 | type: 'boolean',
154 | default: true,
155 | envVar: 'SYNC_LOCALCNAMERECORDS',
156 | example: '`true`/`false`',
157 | description: 'Copies local CNAME records'
158 | },
159 | flushTables: {
160 | type: 'boolean',
161 | default: true,
162 | envVar: 'SYNC_FLUSHTABLES',
163 | example: '`true`/`false`',
164 | description: 'Clears existing data on the secondary (copy target) Pi-hole'
165 | }
166 | },
167 | required: [
168 | 'whitelist',
169 | 'regexWhitelist',
170 | 'blacklist',
171 | 'regexList',
172 | 'adList',
173 | 'client',
174 | 'group',
175 | 'auditLog',
176 | 'staticDhcpLeases',
177 | 'localDnsRecords',
178 | 'localCnameRecords',
179 | 'flushTables'
180 | ]
181 | },
182 | v6: {
183 | type: 'object',
184 | description: 'Sync options for Pi-hole v6.x.',
185 | properties: {
186 | config: {
187 | type: 'boolean',
188 | default: false,
189 | example: '`true`/`false`',
190 | description: 'Copies the TOML config file'
191 | },
192 | dhcp_leases: {
193 | type: 'boolean',
194 | default: true,
195 | envVar: 'DHCP_LEASES',
196 | example: '`true`/`false`',
197 | description: 'Copies the DHCP leases'
198 | },
199 | group: {
200 | type: 'boolean',
201 | default: true,
202 | envVar: 'SYNC_GROUP',
203 | example: '`true`/`false`',
204 | description: 'Copies groups'
205 | },
206 | adlist: {
207 | type: 'boolean',
208 | default: true,
209 | envVar: 'SYNC_ADLIST',
210 | example: '`true`/`false`',
211 | description: 'Copies adlists'
212 | },
213 | adlist_by_group: {
214 | type: 'boolean',
215 | default: true,
216 | envVar: 'SYNC_ADLIST_BY_GROUP',
217 | example: '`true`/`false`',
218 | description: 'Copies adlists by group'
219 | },
220 | domainlist: {
221 | type: 'boolean',
222 | default: true,
223 | envVar: 'SYNC_DOMAINLIST',
224 | example: '`true`/`false`',
225 | description: 'Copies domain list'
226 | },
227 | domainlist_by_group: {
228 | type: 'boolean',
229 | default: true,
230 | envVar: 'SYNC_DOMAINLIST_BY_GROUP',
231 | example: '`true`/`false`',
232 | description: 'Copies domain list by group'
233 | },
234 | client: {
235 | type: 'boolean',
236 | default: true,
237 | envVar: 'SYNC_CLIENT',
238 | example: '`true`/`false`',
239 | description: 'Copies clients'
240 | },
241 | client_by_group: {
242 | type: 'boolean',
243 | default: true,
244 | envVar: 'SYNC_CLIENT_BY_GROUP',
245 | example: '`true`/`false`',
246 | description: 'Copies clients by group'
247 | },
248 | gravityUpdateRetryCount: {
249 | type: 'number',
250 | default: 5,
251 | example: '`3`',
252 | description:
253 | 'The number of times to retry updating gravity if it fails. Only used if `UPDATE_GRAVITY` is not set to `false`. Defaults to 5. Uses an exponential backoff; the second attempt will wait a second, the third attempt 2, the fourth attempt 4, the fifth attempt 8, and so on - up to a maximum of 60 seconds.'
254 | }
255 | },
256 | required: [
257 | 'dhcp_leases',
258 | 'group',
259 | 'adlist',
260 | 'adlist_by_group',
261 | 'domainlist',
262 | 'domainlist_by_group',
263 | 'client',
264 | 'client_by_group'
265 | ]
266 | }
267 | },
268 | required: ['v5', 'v6']
269 | },
270 | notify: {
271 | type: 'object',
272 | description: 'When to send notifications and how to send them.',
273 | properties: {
274 | onSuccess: {
275 | type: 'boolean',
276 | default: false,
277 | envVar: 'NOTIFY_ON_SUCCESS',
278 | example: '`true`/`false`',
279 | description: 'Send a notification if a sync completes successfully.'
280 | },
281 | onFailure: {
282 | type: 'boolean',
283 | default: true,
284 | envVar: 'NOTIFY_ON_FAILURE',
285 | example: '`true`/`false`',
286 | description: 'Send a notification if a sync fails for any reason.'
287 | },
288 | smtp: {
289 | type: 'object',
290 | description: 'Send notifications via email using SMTP',
291 | properties: {
292 | enabled: {
293 | type: 'boolean',
294 | default: false,
295 | envVar: 'SMTP_ENABLED',
296 | example: '`true`/`false`',
297 | description: 'Send notifications via email.'
298 | },
299 | from: {
300 | type: 'string',
301 | envVar: 'SMTP_FROM',
302 | example: '`orbitalsync@example.com`',
303 | description: 'The email address to send notifications from.'
304 | },
305 | to: {
306 | type: 'string',
307 | envVar: 'SMTP_TO',
308 | example: '`you@example.com`',
309 | description:
310 | 'The email address to send notifications to. Can be a comma-separated list.'
311 | },
312 | host: {
313 | type: 'string',
314 | envVar: 'SMTP_HOST',
315 | example: '`smtp.example.com`',
316 | description: 'The SMTP server host.'
317 | },
318 | port: {
319 | type: 'number',
320 | envVar: 'SMTP_PORT',
321 | example: '`25`/`587`/`465`',
322 | description: 'The SMTP server port.'
323 | },
324 | tls: {
325 | type: 'boolean',
326 | default: false,
327 | envVar: 'SMTP_TLS',
328 | example: '`true`/`false`',
329 | description:
330 | 'Should usually be set to true if using port 465. Otherwise, leave as is.'
331 | },
332 | user: {
333 | type: 'string',
334 | envVar: 'SMTP_USER',
335 | example: '`orbitalsync@example.com`',
336 | description: 'The SMTP account username.'
337 | },
338 | password: {
339 | type: 'string',
340 | envVar: 'SMTP_PASSWORD',
341 | example: '`yourpasswordhere`',
342 | description: 'The SMTP account password.'
343 | }
344 | }
345 | },
346 | exceptions: {
347 | type: 'object',
348 | description:
349 | 'Log exceptions to [Honeybadger](https://www.honeybadger.io) or [Sentry](http://sentry.io/). Used mostly for development or debugging.',
350 | properties: {
351 | honeybadgerApiKey: {
352 | type: 'string',
353 | envVar: 'HONEYBADGER_API_KEY',
354 | example: '`hbp_xxxxxxxxxxxxxxxxxx`',
355 | description:
356 | 'Set to use Honeybadger for proper exception recording; mostly useful for development or debugging.'
357 | },
358 | sentryDsn: {
359 | type: 'string',
360 | envVar: 'SENTRY_DSN',
361 | example: '`https://key@o0.ingest.sentry.io/0`',
362 | description:
363 | 'Set to use Sentry for proper exception recording; mostly useful for development or debugging.'
364 | }
365 | }
366 | }
367 | }
368 | },
369 | updateGravity: {
370 | type: 'boolean',
371 | default: true,
372 | envVar: 'UPDATE_GRAVITY',
373 | example: '`true`/`false`',
374 | description:
375 | 'Triggers a gravity update after a backup has been uploaded to a secondary Pi-hole. This updates adlists and restarts gravity.'
376 | },
377 | verbose: {
378 | type: 'boolean',
379 | default: false,
380 | envVar: 'VERBOSE',
381 | example: '`true`/`false`',
382 | description: 'Increases the verbosity of log output. Useful for debugging.'
383 | },
384 | runOnce: {
385 | type: 'boolean',
386 | default: false,
387 | envVar: 'RUN_ONCE',
388 | example: '`true`/`false`',
389 | description:
390 | 'By default, Orbital Sync runs indefinitely until stopped. Setting this to `true` forces it to exit immediately after the first sync.'
391 | },
392 | intervalMinutes: {
393 | type: 'number',
394 | default: 60,
395 | envVar: 'INTERVAL_MINUTES',
396 | example: 'Any non-zero positive integer, for example `5`, `30`, or `1440`',
397 | description:
398 | 'How long to wait between synchronizations. Defaults to sixty minutes. Remember that the DNS server on your secondary servers restarts every time a sync is performed.'
399 | }
400 | },
401 | required: [
402 | 'primaryHost',
403 | 'secondaryHosts',
404 | 'sync',
405 | 'notify',
406 | 'updateGravity',
407 | 'verbose',
408 | 'runOnce',
409 | 'intervalMinutes'
410 | ]
411 | });
412 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import chalk from 'chalk';
4 | import sleep from 'sleep-promise';
5 | import { Log } from './log.js';
6 | import { Sync } from './sync.js';
7 | import { Config } from './config/index.js';
8 |
9 | const config = Config();
10 | const log = new Log(config.verbose);
11 |
12 | do {
13 | await Sync.perform(config, { log });
14 |
15 | if (!config.runOnce) {
16 | log.info(chalk.dim(`Waiting ${config.intervalMinutes} minutes...`));
17 | await sleep(config.intervalMinutes * 60 * 1000);
18 | }
19 | } while (!config.runOnce);
20 |
--------------------------------------------------------------------------------
/src/log.ts:
--------------------------------------------------------------------------------
1 | import chalk from 'chalk';
2 |
3 | export class Log {
4 | constructor(public verboseMode: boolean) {}
5 |
6 | info(message: unknown) {
7 | console.log(`${this.timestamp}: ${this.stringify(message)}`);
8 | }
9 |
10 | verbose(message: unknown) {
11 | if (this.verboseMode && message) this.info(this.stringify(message));
12 | }
13 |
14 | error(message: unknown) {
15 | console.error(`${this.timestamp}: ${chalk.red(this.stringify(message))}`);
16 | }
17 |
18 | private stringify(message: unknown): string {
19 | if (typeof message === 'string') return message;
20 | else return JSON.stringify(message);
21 | }
22 |
23 | private get timestamp(): string {
24 | return chalk.dim(new Date().toLocaleString('en-US'));
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/notify.ts:
--------------------------------------------------------------------------------
1 | import Honeybadger from '@honeybadger-io/js';
2 | import Sentry from '@sentry/node';
3 | import { FetchError } from 'node-fetch';
4 | import nodemailer from 'nodemailer';
5 | import { Log } from './log.js';
6 | import { ConfigInterface } from './config/index.js';
7 | import { Host } from './client/host.js';
8 |
9 | export class Notify {
10 | private errorQueue: NotificationInterface[] = [];
11 | private _honeybadger?: Honeybadger;
12 | private _sentry?: typeof Sentry;
13 | private _smtpClient?: nodemailer.Transporter;
14 | private allHostUrls: string[];
15 |
16 | constructor(
17 | private config: ConfigInterface,
18 | private log: Log = new Log(config.verbose)
19 | ) {
20 | this.allHostUrls = [config.primaryHost, ...config.secondaryHosts].map(
21 | (host) => new Host(host).fullUrl
22 | );
23 | }
24 |
25 | async ofThrow(error: unknown, queue = false): Promise {
26 | if (error instanceof ErrorNotification) {
27 | if (!queue || (error as NotificationInterface).exit) await this.ofFailure(error);
28 | else this.queueError(error);
29 | } else if (error instanceof FetchError && error.code === 'ECONNREFUSED') {
30 | const messageSubstring = error.message.split('ECONNREFUSED')[0];
31 | const url = this.allHostUrls.find((url) => messageSubstring.includes(url));
32 |
33 | await this.ofThrow(
34 | new ErrorNotification({
35 | message: `The host "${url}" refused to connect. Is it down?`,
36 | verbose: error.message
37 | }),
38 | queue
39 | );
40 | } else {
41 | if (error instanceof Error || typeof error === 'string') {
42 | this.honeybadger?.notify(error);
43 | this.sentry?.captureException(error);
44 | }
45 | await this.ofFailure({
46 | message: `An unexpected error was thrown:\n- ${error?.toString() ?? error}`
47 | });
48 | }
49 | }
50 |
51 | async ofSuccess({
52 | message,
53 | verbose,
54 | sendNotification
55 | }: NotificationInterface): Promise {
56 | if (this.errorQueue.length > 0) {
57 | await this.ofFailure({
58 | message: `Sync succeeded, but there were some unexpected errors. ${message}`
59 | });
60 |
61 | return;
62 | }
63 |
64 | this.log.info(`✔️ Success: ${message}`);
65 | this.log.verbose(verbose);
66 |
67 | if (sendNotification ?? this.config.notify.onSuccess) {
68 | await this.dispatch('✔️ Success', message);
69 | }
70 | }
71 |
72 | ofFailure({ exit }: NotificationInterface & { exit: true }): never;
73 | ofFailure({ exit }: NotificationInterface): Promise;
74 | async ofFailure({
75 | message,
76 | verbose,
77 | sendNotification,
78 | exit
79 | }: NotificationInterface): Promise {
80 | this.log.error(`⚠ Failure: ${message}`);
81 | this.log.verbose(verbose);
82 |
83 | const errors = this.errorQueue.map((notif) => notif.message);
84 | this.errorQueue = [];
85 |
86 | if (sendNotification ?? this.config.notify.onFailure) {
87 | let formatted = message;
88 | if (errors.length > 0) {
89 | formatted = formatted.concat(
90 | '\n\nThe following errors occurred during sync:\n- ',
91 | errors.join('\n- ')
92 | );
93 | }
94 |
95 | await this.dispatch(`⚠ Failed`, formatted);
96 | }
97 |
98 | if (exit || this.config.runOnce) process.exit(1);
99 | }
100 |
101 | queueError(error: NotificationInterface): void {
102 | this.log.error(`⚠ Error: ${error.message}`);
103 | this.log.verbose(error.verbose);
104 |
105 | this.errorQueue.push(error);
106 | }
107 |
108 | private get honeybadger(): Honeybadger | undefined {
109 | if (this.config.notify.exceptions?.honeybadgerApiKey === undefined) return;
110 |
111 | this._honeybadger ??= Honeybadger.configure({
112 | apiKey: this.config.notify.exceptions.honeybadgerApiKey
113 | });
114 |
115 | return this._honeybadger;
116 | }
117 |
118 | private get sentry(): typeof Sentry | undefined {
119 | if (this.config.notify.exceptions?.sentryDsn === undefined) return;
120 |
121 | if (this._sentry === undefined) {
122 | Sentry.init({
123 | dsn: this.config.notify.exceptions.sentryDsn
124 | });
125 |
126 | this._sentry = Sentry;
127 | }
128 |
129 | return this._sentry;
130 | }
131 |
132 | private async dispatch(subject: string, contents: string): Promise {
133 | await Promise.allSettled([this.dispatchSmtp(subject, contents)]);
134 | }
135 |
136 | private async dispatchSmtp(subject: string, contents: string): Promise {
137 | try {
138 | if (this.config.notify.smtp?.enabled && this.smtpClient) {
139 | this.log.verbose('➡️ Dispatching notification email...');
140 |
141 | await this.smtpClient.sendMail({
142 | from: this.config.notify.smtp.from
143 | ? `"Orbital Sync" <${this.config.notify.smtp.from}>`
144 | : undefined,
145 | to: this.config.notify.smtp.to,
146 | subject: `Orbital Sync: ${subject}`,
147 | text: `Orbital Sync\n${subject}\n\n${contents}`,
148 | html: `Orbital Sync
${subject}
${contents.replaceAll(
149 | '\n',
150 | '
'
151 | )}
`
152 | });
153 |
154 | this.log.verbose('✔️ Notification email dispatched.');
155 | }
156 | } catch (e) {
157 | const error: NotificationInterface = {
158 | message: 'SMTP is misconfigured. Please check your configuration.'
159 | };
160 | if (e instanceof Error) error.verbose = e.message;
161 | else error.verbose = JSON.stringify(e);
162 |
163 | this.queueError(error);
164 | }
165 | }
166 |
167 | private get smtpClient(): nodemailer.Transporter | undefined {
168 | if (!this.config.notify.smtp?.enabled) return;
169 | if (!this.config.notify.smtp?.host)
170 | throw new Error('SMTP is enabled but no host is provided.');
171 |
172 | if (!this._smtpClient) {
173 | this.log.verbose('➡️ Creating SMTP client...');
174 |
175 | this._smtpClient = nodemailer.createTransport({
176 | host: this.config.notify.smtp.host,
177 | port: this.config.notify.smtp.port,
178 | secure: this.config.notify.smtp.tls,
179 | auth: this.config.notify.smtp.user
180 | ? {
181 | user: this.config.notify.smtp.user,
182 | pass: this.config.notify.smtp.password
183 | }
184 | : undefined
185 | });
186 |
187 | this.log.verbose('✔️ SMTP client created successfully.');
188 | }
189 |
190 | return this._smtpClient;
191 | }
192 | }
193 |
194 | export class ErrorNotification extends Error implements NotificationInterface {
195 | verbose?: string | Record | undefined;
196 |
197 | constructor(args: NotificationInterface) {
198 | super(args.message);
199 | Object.assign(this, args);
200 | }
201 | }
202 |
203 | export interface NotificationInterface {
204 | message: string;
205 | verbose?: string | Record;
206 | sendNotification?: boolean;
207 | exit?: boolean;
208 | }
209 |
--------------------------------------------------------------------------------
/src/sync.ts:
--------------------------------------------------------------------------------
1 | import { Host } from './client/host.js';
2 | import { ClientFactory } from './client/index.js';
3 | import { ConfigInterface } from './config/index.js';
4 | import { Log } from './log.js';
5 | import { Notify } from './notify.js';
6 |
7 | export class Sync {
8 | static async perform(
9 | config: ConfigInterface,
10 | { notify: _notify, log: _log }: { notify?: Notify; log?: Log } = {}
11 | ): Promise {
12 | const notify = _notify ?? new Notify(config);
13 | const log = _log ?? new Log(config.verbose);
14 |
15 | try {
16 | const primaryHost = await ClientFactory.createClient({
17 | host: new Host(config.primaryHost),
18 | version: config.piHoleVersion || 'auto',
19 | options: config.sync.v6 || config.sync.v5,
20 | log
21 | });
22 | const backup = await primaryHost.downloadBackup();
23 |
24 | const secondaryHostCount = config.secondaryHosts?.length ?? 0;
25 | const successfulRestoreCount = (
26 | await Promise.all(
27 | config.secondaryHosts.map((host) =>
28 | ClientFactory.createClient({
29 | host: new Host(host),
30 | version: config.piHoleVersion || 'auto',
31 | options: config.sync.v6 || config.sync.v5,
32 | log
33 | })
34 | .then(async (client) => {
35 | let success = await client.uploadBackup(backup);
36 |
37 | if (success && config.updateGravity)
38 | success = await client.updateGravity();
39 |
40 | return success;
41 | })
42 | .catch((error) => notify.ofThrow(error, true))
43 | )
44 | )
45 | ).filter(Boolean).length;
46 |
47 | if (secondaryHostCount === successfulRestoreCount) {
48 | await notify.ofSuccess({
49 | message: `${successfulRestoreCount}/${secondaryHostCount} hosts synced.`
50 | });
51 | } else if (successfulRestoreCount > 0) {
52 | await notify.ofFailure({
53 | sendNotification: config.notify.onSuccess || config.notify.onFailure,
54 | message: `${successfulRestoreCount}/${secondaryHostCount} hosts synced.`
55 | });
56 | } else {
57 | await notify.ofFailure({
58 | message: `${successfulRestoreCount}/${secondaryHostCount} hosts synced.`
59 | });
60 | }
61 | } catch (e: unknown) {
62 | await notify.ofThrow(e);
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/src/util/string-case.ts:
--------------------------------------------------------------------------------
1 | export function camelToSnakeCase(str: string): string {
2 | return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
3 | }
4 |
5 | export function camelToTitleCase(str: string): string {
6 | return (
7 | str.charAt(0).toUpperCase() +
8 | str.slice(1).replace(/[A-Z]/g, (letter) => ` ${letter.toUpperCase()}`)
9 | );
10 | }
11 |
--------------------------------------------------------------------------------
/test/containers.ts:
--------------------------------------------------------------------------------
1 | import { GenericContainer, Wait } from 'testcontainers';
2 |
3 | export function createPiholeContainer({
4 | password,
5 | tag
6 | }: {
7 | password: string;
8 | tag: string;
9 | }): GenericContainer {
10 | return new GenericContainer(`pihole/pihole:${tag}`)
11 | .withEnvironment(
12 | tag == 'latest'
13 | ? {
14 | FTLCONF_webserver_api_password: password,
15 | FTLCONF_debug_api: 'true'
16 | }
17 | : {
18 | WEBPASSWORD: password
19 | }
20 | )
21 | .withExposedPorts(80)
22 | .withHealthCheck({
23 | test: ['CMD', 'curl', '-f', 'http://localhost/admin/'],
24 | interval: 10000,
25 | timeout: 10000,
26 | retries: 5
27 | })
28 | .withWaitStrategy(Wait.forHealthCheck());
29 | }
30 |
31 | export function createOrbitalSyncContainer(
32 | baseImage: OrbitalBaseImage = OrbitalBaseImage.Alpine
33 | ): Promise {
34 | return GenericContainer.fromDockerfile('./', 'Dockerfile')
35 | .withBuildArgs({
36 | BASE_IMAGE: baseImage
37 | })
38 | .build();
39 | }
40 |
41 | export enum OrbitalBaseImage {
42 | Alpine = 'node:18-alpine',
43 | Distroless = 'gcr.io/distroless/nodejs18:latest'
44 | }
45 |
--------------------------------------------------------------------------------
/test/docker.ts:
--------------------------------------------------------------------------------
1 | import { ContainerInspectInfo } from 'dockerode';
2 | import TestContainers from 'testcontainers';
3 |
4 | export async function inspectById(id: string): Promise {
5 | return await (await TestContainers.getContainerRuntimeClient()).container
6 | .getById(id)
7 | .inspect();
8 | }
9 |
--------------------------------------------------------------------------------
/test/e2e.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ "$(uname)" == "Darwin" ]; then
4 | export TESTCONTAINERS_RYUK_DISABLED=true
5 | fi
6 |
7 | NODE_OPTIONS=--experimental-vm-modules yarn jest --no-coverage test/e2e/
8 |
--------------------------------------------------------------------------------
/test/e2e/two-targets.test.ts:
--------------------------------------------------------------------------------
1 | import { Network } from 'testcontainers';
2 | import {
3 | OrbitalBaseImage,
4 | createOrbitalSyncContainer,
5 | createPiholeContainer
6 | } from '../containers';
7 | import { inspectById } from '../docker';
8 | import sleep from 'sleep-promise';
9 |
10 | describe('Orbital', () => {
11 | describe('Alpine', () => {
12 | it('should sync two v6 targets and exit with "zero" exit code', async () => {
13 | const network = await new Network().start();
14 | const [pihole1, pihole2, pihole3, orbitalImage] = await Promise.all([
15 | createPiholeContainer({ password: 'primary', tag: 'latest' })
16 | .withNetwork(network)
17 | .start(),
18 | createPiholeContainer({ password: 'secondary', tag: 'latest' })
19 | .withNetwork(network)
20 | .start(),
21 | createPiholeContainer({ password: 'tertiary', tag: 'latest' })
22 | .withNetwork(network)
23 | .start(),
24 | createOrbitalSyncContainer(OrbitalBaseImage.Alpine)
25 | ]);
26 |
27 | const orbital = await orbitalImage
28 | .withEnvironment({
29 | PRIMARY_HOST_BASE_URL: `http://${pihole1.getIpAddress(network.getName())}`,
30 | PRIMARY_HOST_PASSWORD: 'primary',
31 | SECONDARY_HOST_1_BASE_URL: `http://${pihole2.getIpAddress(network.getName())}`,
32 | SECONDARY_HOST_1_PASSWORD: 'secondary',
33 | SECONDARY_HOST_2_BASE_URL: `http://${pihole3.getIpAddress(network.getName())}`,
34 | SECONDARY_HOST_2_PASSWORD: 'tertiary',
35 | RUN_ONCE: 'true',
36 | VERBOSE: 'true'
37 | })
38 | .withLogConsumer((stream) => stream.on('data', (chunk) => console.log(chunk)))
39 | .withNetwork(network)
40 | .start();
41 |
42 | let orbitalStatus = await inspectById(orbital.getId());
43 | while (orbitalStatus.State.Running) {
44 | await sleep(500);
45 | orbitalStatus = await inspectById(orbital.getId());
46 | }
47 |
48 | await Promise.all([pihole1.stop(), pihole2.stop(), pihole3.stop()]);
49 | await network.stop();
50 | expect(orbitalStatus.State.ExitCode).toBe(0);
51 | }, 300000);
52 |
53 | it('should sync two v5 targets and exit with "zero" exit code', async () => {
54 | const network = await new Network().start();
55 | const [pihole1, pihole2, pihole3, orbitalImage] = await Promise.all([
56 | createPiholeContainer({ password: 'primary', tag: '2024.07.0' })
57 | .withNetwork(network)
58 | .start(),
59 | createPiholeContainer({ password: 'secondary', tag: '2024.07.0' })
60 | .withNetwork(network)
61 | .start(),
62 | createPiholeContainer({ password: 'tertiary', tag: '2024.07.0' })
63 | .withNetwork(network)
64 | .start(),
65 | createOrbitalSyncContainer(OrbitalBaseImage.Alpine)
66 | ]);
67 |
68 | const orbital = await orbitalImage
69 | .withEnvironment({
70 | PRIMARY_HOST_BASE_URL: `http://${pihole1.getIpAddress(network.getName())}`,
71 | PRIMARY_HOST_PASSWORD: 'primary',
72 | SECONDARY_HOST_1_BASE_URL: `http://${pihole2.getIpAddress(network.getName())}`,
73 | SECONDARY_HOST_1_PASSWORD: 'secondary',
74 | SECONDARY_HOST_2_BASE_URL: `http://${pihole3.getIpAddress(network.getName())}`,
75 | SECONDARY_HOST_2_PASSWORD: 'tertiary',
76 | RUN_ONCE: 'true',
77 | VERBOSE: 'true'
78 | })
79 | .withLogConsumer((stream) => stream.on('data', (chunk) => console.log(chunk)))
80 | .withNetwork(network)
81 | .start();
82 |
83 | let orbitalStatus = await inspectById(orbital.getId());
84 | while (orbitalStatus.State.Running) {
85 | await sleep(500);
86 | orbitalStatus = await inspectById(orbital.getId());
87 | }
88 |
89 | await Promise.all([pihole1.stop(), pihole2.stop(), pihole3.stop()]);
90 | await network.stop();
91 | expect(orbitalStatus.State.ExitCode).toBe(0);
92 | }, 300000);
93 | });
94 |
95 | describe('Distroless', () => {
96 | it('should sync two v6 targets and exit with "zero" exit code', async () => {
97 | const network = await new Network().start();
98 | const [pihole1, pihole2, pihole3, orbitalImage] = await Promise.all([
99 | createPiholeContainer({ password: 'primary', tag: 'latest' })
100 | .withNetwork(network)
101 | .start(),
102 | createPiholeContainer({ password: 'secondary', tag: 'latest' })
103 | .withNetwork(network)
104 | .start(),
105 | createPiholeContainer({ password: 'tertiary', tag: 'latest' })
106 | .withNetwork(network)
107 | .start(),
108 | createOrbitalSyncContainer(OrbitalBaseImage.Distroless)
109 | ]);
110 |
111 | const orbital = await orbitalImage
112 | .withEnvironment({
113 | PRIMARY_HOST_BASE_URL: `http://${pihole1.getIpAddress(network.getName())}`,
114 | PRIMARY_HOST_PASSWORD: 'primary',
115 | SECONDARY_HOST_1_BASE_URL: `http://${pihole2.getIpAddress(network.getName())}`,
116 | SECONDARY_HOST_1_PASSWORD: 'secondary',
117 | SECONDARY_HOST_2_BASE_URL: `http://${pihole3.getIpAddress(network.getName())}`,
118 | SECONDARY_HOST_2_PASSWORD: 'tertiary',
119 | RUN_ONCE: 'true',
120 | VERBOSE: 'true'
121 | })
122 | .withLogConsumer((stream) => stream.on('data', (chunk) => console.log(chunk)))
123 | .withNetwork(network)
124 | .start();
125 |
126 | let orbitalStatus = await inspectById(orbital.getId());
127 | while (orbitalStatus.State.Running) {
128 | await sleep(500);
129 | orbitalStatus = await inspectById(orbital.getId());
130 | }
131 |
132 | await Promise.all([pihole1.stop(), pihole2.stop(), pihole3.stop()]);
133 | await network.stop();
134 | expect(orbitalStatus.State.ExitCode).toBe(0);
135 | }, 300000);
136 |
137 | it('should sync two v5 targets and exit with "zero" exit code', async () => {
138 | const network = await new Network().start();
139 | const [pihole1, pihole2, pihole3, orbitalImage] = await Promise.all([
140 | createPiholeContainer({ password: 'primary', tag: '2024.07.0' })
141 | .withNetwork(network)
142 | .start(),
143 | createPiholeContainer({ password: 'secondary', tag: '2024.07.0' })
144 | .withNetwork(network)
145 | .start(),
146 | createPiholeContainer({ password: 'tertiary', tag: '2024.07.0' })
147 | .withNetwork(network)
148 | .start(),
149 | createOrbitalSyncContainer(OrbitalBaseImage.Distroless)
150 | ]);
151 |
152 | const orbital = await orbitalImage
153 | .withEnvironment({
154 | PRIMARY_HOST_BASE_URL: `http://${pihole1.getIpAddress(network.getName())}`,
155 | PRIMARY_HOST_PASSWORD: 'primary',
156 | SECONDARY_HOST_1_BASE_URL: `http://${pihole2.getIpAddress(network.getName())}`,
157 | SECONDARY_HOST_1_PASSWORD: 'secondary',
158 | SECONDARY_HOST_2_BASE_URL: `http://${pihole3.getIpAddress(network.getName())}`,
159 | SECONDARY_HOST_2_PASSWORD: 'tertiary',
160 | RUN_ONCE: 'true',
161 | VERBOSE: 'true'
162 | })
163 | .withLogConsumer((stream) => stream.on('data', (chunk) => console.log(chunk)))
164 | .withNetwork(network)
165 | .start();
166 |
167 | let orbitalStatus = await inspectById(orbital.getId());
168 | while (orbitalStatus.State.Running) {
169 | await sleep(500);
170 | orbitalStatus = await inspectById(orbital.getId());
171 | }
172 |
173 | await Promise.all([pihole1.stop(), pihole2.stop(), pihole3.stop()]);
174 | await network.stop();
175 | expect(orbitalStatus.State.ExitCode).toBe(0);
176 | }, 300000);
177 | });
178 | });
179 |
--------------------------------------------------------------------------------
/test/integration.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ "$(uname)" == "Darwin" ]; then
4 | export TESTCONTAINERS_RYUK_DISABLED=true
5 | fi
6 |
7 | NODE_OPTIONS=--experimental-vm-modules yarn jest --coverageThreshold='{}' test/integration/
8 |
--------------------------------------------------------------------------------
/test/integration/client/v5/index.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect } from '@jest/globals';
2 | import { StartedTestContainer } from 'testcontainers';
3 | import { Blob } from 'node-fetch';
4 | import { createPiholeContainer } from '../../../containers';
5 | import { ClientV5 } from '../../../../src/client/v5';
6 | import { Host } from '../../../../src/client/host';
7 | import { Config, ConfigInterface } from '../../../../src/config/index';
8 | import { Log } from '../../../../src/log';
9 |
10 | describe('Client', () => {
11 | describe('V5', () => {
12 | let piholeContainer: StartedTestContainer;
13 | let pihole: Host;
14 | let config: ConfigInterface;
15 |
16 | beforeAll(async () => {
17 | piholeContainer = await createPiholeContainer({
18 | password: 'mock_password',
19 | tag: '2024.07.0'
20 | }).start();
21 | pihole = new Host({
22 | baseUrl: `http://${piholeContainer.getHost()}:${piholeContainer.getMappedPort(80)}`,
23 | password: 'mock_password'
24 | });
25 | config = Config({
26 | primaryHost: {
27 | baseUrl: pihole.baseUrl,
28 | password: pihole.password
29 | },
30 | secondaryHosts: [
31 | {
32 | baseUrl: pihole.baseUrl,
33 | password: pihole.password
34 | }
35 | ]
36 | });
37 | }, 60000);
38 |
39 | afterAll(async () => {
40 | await piholeContainer.stop();
41 | });
42 |
43 | it('should connect, backup, and upload', async () => {
44 | const client = await ClientV5.create({
45 | host: pihole,
46 | options: config.sync.v5,
47 | log: new Log(true)
48 | });
49 |
50 | const backup = await client.downloadBackup();
51 | expect(backup).toBeInstanceOf(Blob);
52 |
53 | const upload = await client.uploadBackup(backup);
54 | expect(upload).toBe(true);
55 | });
56 | });
57 | });
58 |
--------------------------------------------------------------------------------
/test/integration/client/v6/index.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect } from '@jest/globals';
2 | import { StartedTestContainer } from 'testcontainers';
3 | import { Blob } from 'node-fetch';
4 | import { createPiholeContainer } from '../../../containers';
5 | import { ClientV6 } from '../../../../src/client/v6';
6 | import { Host } from '../../../../src/client/host';
7 | import { Config, ConfigInterface } from '../../../../src/config/index';
8 | import { Log } from '../../../../src/log';
9 |
10 | describe('Client', () => {
11 | describe('V6', () => {
12 | let piholeContainer: StartedTestContainer;
13 | let pihole: Host;
14 | let config: ConfigInterface;
15 |
16 | beforeAll(async () => {
17 | piholeContainer = await createPiholeContainer({
18 | password: 'mock_password',
19 | tag: 'latest'
20 | }).start();
21 | pihole = new Host({
22 | baseUrl: `http://${piholeContainer.getHost()}:${piholeContainer.getMappedPort(80)}`,
23 | password: 'mock_password'
24 | });
25 | config = Config({
26 | primaryHost: {
27 | baseUrl: pihole.baseUrl,
28 | password: pihole.password
29 | },
30 | secondaryHosts: [
31 | {
32 | baseUrl: pihole.baseUrl,
33 | password: pihole.password
34 | }
35 | ]
36 | });
37 | }, 60000);
38 |
39 | afterAll(async () => {
40 | await piholeContainer.stop();
41 | });
42 |
43 | it('should connect, backup, and upload', async () => {
44 | const client = await ClientV6.create({
45 | host: pihole,
46 | options: config.sync.v6,
47 | log: new Log(true)
48 | });
49 |
50 | const backup = await client.downloadBackup();
51 | expect(backup).toBeInstanceOf(Blob);
52 |
53 | const upload = await client.uploadBackup(backup);
54 | expect(upload).toBe(true);
55 | });
56 | });
57 | });
58 |
--------------------------------------------------------------------------------
/test/unit/client/host.test.ts:
--------------------------------------------------------------------------------
1 | import { Host } from '../../../src/client/host';
2 |
3 | describe('Client', () => {
4 | describe('Host', () => {
5 | test('should prepend / to path if not present', () => {
6 | const host = new Host({
7 | baseUrl: 'http://10.0.0.3',
8 | path: 'foobar',
9 | password: 'mock'
10 | });
11 |
12 | expect(host.path).toBe('/foobar');
13 | expect(host.fullUrl).toBe('http://10.0.0.3/foobar');
14 | });
15 |
16 | test('should remove trailing slash if present', () => {
17 | const host = new Host({
18 | baseUrl: 'http://10.0.0.3',
19 | path: '/foobar/',
20 | password: 'mock'
21 | });
22 |
23 | expect(host.path).toBe('/foobar');
24 | expect(host.fullUrl).toBe('http://10.0.0.3/foobar');
25 | });
26 |
27 | test('separates path and baseUrl if baseUrl has path', () => {
28 | const host = new Host({ baseUrl: 'http://10.0.0.3/foobar', password: 'mock' });
29 |
30 | expect(host.path).toBe('/foobar');
31 | expect(host.fullUrl).toBe('http://10.0.0.3/foobar');
32 | });
33 |
34 | test('should strip /admin from the end', async () => {
35 | const host = new Host({
36 | baseUrl: 'http://10.0.0.2',
37 | password: 'mypassword',
38 | path: '/admin'
39 | });
40 |
41 | expect(host.fullUrl).toBe('http://10.0.0.2');
42 | expect(host.baseUrl).toBe('http://10.0.0.2');
43 | expect(host.path).toBe('');
44 | });
45 |
46 | test('should strip /api from the end', async () => {
47 | const host = new Host({
48 | baseUrl: 'http://pi-iot.domain.net/bob/',
49 | password: 'mypassword',
50 | path: '/api'
51 | });
52 |
53 | expect(host.fullUrl).toBe('http://pi-iot.domain.net/bob');
54 | expect(host.baseUrl).toBe('http://pi-iot.domain.net');
55 | expect(host.path).toBe('/bob');
56 | });
57 | });
58 | });
59 |
--------------------------------------------------------------------------------
/test/unit/client/index.test.ts:
--------------------------------------------------------------------------------
1 | import nock from 'nock';
2 | import { ClientFactory } from '../../../src/client';
3 | import { Host } from '../../../src/client/host';
4 | import { Config, SyncOptionsV5, SyncOptionsV6, Version } from '../../../src/config/index';
5 | import { Log } from '../../../src/log';
6 | import { ClientV5 } from '../../../src/client/v5';
7 | import { ClientV6 } from '../../../src/client/v6';
8 |
9 | describe('Client', () => {
10 | const host = new Host({
11 | baseUrl: 'http://10.0.0.2',
12 | password: 'mypassword'
13 | });
14 | const config = Config({
15 | piHoleVersion: 'auto',
16 | primaryHost: { baseUrl: host.baseUrl, password: host.password },
17 | secondaryHosts: [{ baseUrl: host.baseUrl, password: host.password }]
18 | });
19 | const log = new Log(false);
20 |
21 | const createClient = async ({
22 | host,
23 | version,
24 | options,
25 | log
26 | }: {
27 | host: Host;
28 | version: Version;
29 | options: SyncOptionsV6 | SyncOptionsV5;
30 | log: Log;
31 | }) => {
32 | // V6 requests
33 | nock(host.baseUrl).get('/api/auth').reply(200);
34 | nock(host.baseUrl)
35 | .post('/api/auth')
36 | .reply(
37 | 200,
38 | '{"session":{"valid":true,"totp":true,"sid":"IEFZjjlRXX0FMaemtB8opQ=","csrf":"+Y5Qx4Qxa5XXYSzz8Nu7gw=","validity":1800,"message":"app-password correct"},"took":0.074608087539672852}'
39 | );
40 | // V5 requests
41 | nock(host.fullUrl).get('/admin/index.php?login').reply(200);
42 | nock(host.fullUrl)
43 | .post('/admin/index.php?login')
44 | .reply(
45 | 200,
46 | 'abcdefgijklmnopqrstuvwxyzabcdefgijklmnopqrst
'
47 | );
48 |
49 | return await ClientFactory.createClient({ host, log, options, version });
50 | };
51 |
52 | beforeEach(() => {
53 | nock.disableNetConnect();
54 | });
55 |
56 | afterEach(() => {
57 | nock.cleanAll();
58 | nock.enableNetConnect();
59 | });
60 |
61 | describe('create', () => {
62 | test('should return client v5', async () => {
63 | await expect(
64 | createClient({ host, version: '5', log, options: config.sync.v5 })
65 | ).resolves.toBeInstanceOf(ClientV5);
66 | });
67 |
68 | test('should return client v6', async () => {
69 | await expect(
70 | createClient({ host, version: '6', log, options: config.sync.v6 })
71 | ).resolves.toBeInstanceOf(ClientV6);
72 | });
73 |
74 | test('should return client v5 from auto', async () => {
75 | const initialRequest = nock(host.fullUrl).get('/api/docs').reply(404);
76 |
77 | await expect(
78 | createClient({ host, version: 'auto', log, options: config.sync.v5 })
79 | ).resolves.toBeInstanceOf(ClientV5);
80 |
81 | initialRequest.done();
82 | });
83 |
84 | test('should return client v6 from auto', async () => {
85 | const initialRequest = nock(host.fullUrl).get('/api/docs').reply(200);
86 |
87 | await expect(
88 | createClient({ host, version: 'auto', log, options: config.sync.v6 })
89 | ).resolves.toBeInstanceOf(ClientV6);
90 |
91 | initialRequest.done();
92 | });
93 |
94 | test('should return client v6 from exception', async () => {
95 | await expect(
96 | createClient({ host, version: 'auto', log, options: config.sync.v6 })
97 | ).resolves.toBeInstanceOf(ClientV6);
98 | });
99 | });
100 | });
101 |
--------------------------------------------------------------------------------
/test/unit/client/v5/index.test.ts:
--------------------------------------------------------------------------------
1 | import nock from 'nock';
2 | import { Blob } from 'node-fetch';
3 | import { ClientV5 } from '../../../../src/client/v5';
4 | import { Host } from '../../../../src/client/host';
5 | import { Config } from '../../../../src/config/index';
6 | import { ErrorNotification } from '../../../../src/notify';
7 | import { Log } from '../../../../src/log';
8 |
9 | describe('Client', () => {
10 | describe('V5', () => {
11 | const host = new Host({
12 | baseUrl: 'http://10.0.0.2',
13 | password: 'mypassword',
14 | path: ''
15 | });
16 | const config = Config({
17 | primaryHost: { baseUrl: host.baseUrl, password: host.password },
18 | secondaryHosts: [{ baseUrl: host.baseUrl, password: host.password }]
19 | });
20 | const log = new Log(false);
21 |
22 | const createClient = async () => {
23 | nock(host.fullUrl).get('/admin/index.php?login').reply(200);
24 | nock(host.fullUrl)
25 | .post('/admin/index.php?login')
26 | .reply(
27 | 200,
28 | 'abcdefgijklmnopqrstuvwxyzabcdefgijklmnopqrst
'
29 | );
30 |
31 | return {
32 | teleporter: nock(host.fullUrl),
33 | client: await ClientV5.create({ host, log, options: config.sync.v5 })
34 | };
35 | };
36 |
37 | beforeEach(() => {
38 | nock.disableNetConnect();
39 | });
40 |
41 | afterEach(() => {
42 | nock.cleanAll();
43 | nock.enableNetConnect();
44 | });
45 |
46 | describe('create', () => {
47 | test('should strip /admin from the end', async () => {
48 | const host = new Host({
49 | baseUrl: 'http://10.0.0.2',
50 | password: 'mypassword',
51 | path: '/admin'
52 | });
53 |
54 | expect(host.fullUrl).toBe('http://10.0.0.2');
55 |
56 | const host1 = new Host({
57 | baseUrl: 'http://10.0.0.2',
58 | password: 'mypassword',
59 | path: '/'
60 | });
61 |
62 | expect(host1.fullUrl).toBe('http://10.0.0.2');
63 |
64 | const host2 = new Host({
65 | baseUrl: 'http://10.0.0.2/admin/',
66 | password: 'mypassword',
67 | path: ''
68 | });
69 |
70 | expect(host2.fullUrl).toBe('http://10.0.0.2');
71 |
72 | const host3 = new Host({
73 | baseUrl: 'http://10.0.0.2/',
74 | password: 'mypassword',
75 | path: '/'
76 | });
77 |
78 | expect(host3.fullUrl).toBe('http://10.0.0.2');
79 | });
80 |
81 | test('should throw error if status code is not ok', async () => {
82 | const initialRequest = nock(host.fullUrl)
83 | .get('/admin/index.php?login')
84 | .reply(200);
85 | const loginRequest = nock(host.fullUrl).post('/admin/index.php?login').reply(500);
86 |
87 | const expectError = expect(
88 | ClientV5.create({ host, log, options: config.sync.v5 })
89 | ).rejects;
90 |
91 | await expectError.toBeInstanceOf(ErrorNotification);
92 | await expectError.toMatchObject({
93 | message:
94 | 'There was an error logging in to "http://10.0.0.2" - are you able to log in with the configured password?',
95 | verbose: {
96 | host: 'http://10.0.0.2',
97 | path: '',
98 | status: 500,
99 | responseBody: ''
100 | }
101 | });
102 | initialRequest.done();
103 | loginRequest.done();
104 | });
105 |
106 | test('should throw error if no token is present', async () => {
107 | const initialRequest = nock(host.fullUrl)
108 | .get('/admin/index.php?login')
109 | .reply(200);
110 | const loginRequest = nock(host.fullUrl).post('/admin/index.php?login').reply(200);
111 |
112 | const expectError = expect(
113 | ClientV5.create({ host, log, options: config.sync.v5 })
114 | ).rejects;
115 |
116 | await expectError.toBeInstanceOf(ErrorNotification);
117 | await expectError.toMatchObject({
118 | message:
119 | 'No token could be found while logging in to "http://10.0.0.2" - are you able to log in with the configured password?',
120 | verbose: {
121 | host: 'http://10.0.0.2',
122 | path: '',
123 | innerHtml: ''
124 | }
125 | });
126 | initialRequest.done();
127 | loginRequest.done();
128 | });
129 |
130 | test('should throw error if token is in incorrect format', async () => {
131 | const initialRequest = nock(host.fullUrl)
132 | .get('/admin/index.php?login')
133 | .reply(200);
134 | const loginRequest = nock(host.fullUrl)
135 | .post('/admin/index.php?login')
136 | .reply(200, 'abcdef
');
137 |
138 | const expectError = expect(
139 | ClientV5.create({ host, log, options: config.sync.v5 })
140 | ).rejects;
141 |
142 | await expectError.toBeInstanceOf(ErrorNotification);
143 | await expectError.toMatchObject({
144 | message:
145 | 'A token was found but could not be validated while logging in to "http://10.0.0.2" - are you able to log in with the configured password?',
146 | verbose: {
147 | host: 'http://10.0.0.2',
148 | path: '',
149 | token: 'abcdef'
150 | }
151 | });
152 | initialRequest.done();
153 | loginRequest.done();
154 | });
155 |
156 | test('should return client', async () => {
157 | const initialRequest = nock(host.fullUrl)
158 | .get('/admin/index.php?login')
159 | .reply(200);
160 | const loginRequest = nock(host.fullUrl)
161 | .post('/admin/index.php?login')
162 | .reply(
163 | 200,
164 | 'abcdefgijklmnopqrstuvwxyzabcdefgijklmnopqrst
'
165 | );
166 |
167 | await expect(
168 | ClientV5.create({ host, log, options: config.sync.v5 })
169 | ).resolves.toBeInstanceOf(ClientV5);
170 |
171 | initialRequest.done();
172 | loginRequest.done();
173 | });
174 |
175 | test('should return version 5 and Host info', async () => {
176 | const initialRequest = nock(host.fullUrl)
177 | .get('/admin/index.php?login')
178 | .reply(200);
179 | const loginRequest = nock(host.fullUrl)
180 | .post('/admin/index.php?login')
181 | .reply(
182 | 200,
183 | 'abcdefgijklmnopqrstuvwxyzabcdefgijklmnopqrst
'
184 | );
185 |
186 | const v5Client = await ClientV5.create({ host, log, options: config.sync.v5 });
187 |
188 | expect(v5Client.getVersion()).toEqual(5);
189 | expect(v5Client.getHost()).toBe(host);
190 |
191 | initialRequest.done();
192 | loginRequest.done();
193 | });
194 | });
195 |
196 | describe('downloadBackup', () => {
197 | let client: ClientV5;
198 | let teleporter: nock.Scope;
199 |
200 | beforeEach(async () => {
201 | ({ client, teleporter } = await createClient());
202 | });
203 |
204 | afterEach(() => {
205 | teleporter.done();
206 | });
207 |
208 | test('should throw error if response is non-200', async () => {
209 | teleporter.post('/admin/scripts/pi-hole/php/teleporter.php').reply(500);
210 |
211 | const expectError = expect(client.downloadBackup()).rejects;
212 |
213 | await expectError.toBeInstanceOf(ErrorNotification);
214 | await expectError.toMatchObject({
215 | message: 'Failed to download backup from "http://10.0.0.2".',
216 | verbose: {
217 | host: 'http://10.0.0.2',
218 | path: '',
219 | status: 500,
220 | responseBody: ''
221 | }
222 | });
223 | });
224 |
225 | test('should throw error if content type is not gzip', async () => {
226 | teleporter
227 | .post('/admin/scripts/pi-hole/php/teleporter.php')
228 | .reply(200, undefined, { 'content-type': 'text/html' });
229 |
230 | const expectError = expect(client.downloadBackup()).rejects;
231 |
232 | await expectError.toBeInstanceOf(ErrorNotification);
233 | await expectError.toMatchObject({
234 | message: 'Failed to download backup from "http://10.0.0.2".',
235 | verbose: {
236 | host: 'http://10.0.0.2',
237 | path: '',
238 | status: 200,
239 | responseBody: ''
240 | }
241 | });
242 | });
243 |
244 | test('should return response data', async () => {
245 | let requestBody = '';
246 | teleporter
247 | .post(
248 | '/admin/scripts/pi-hole/php/teleporter.php',
249 | (body) => (requestBody = body)
250 | )
251 | .reply(200, undefined, { 'content-type': 'application/gzip' });
252 |
253 | const backup = await client.downloadBackup();
254 |
255 | expect(backup).toBeInstanceOf(Blob);
256 | expect(requestBody).toContain(
257 | 'name="token"\r\n\r\nabcdefgijklmnopqrstuvwxyzabcdefgijklmnopqrst'
258 | );
259 | expect(requestBody).toContain('name="whitelist"\r\n\r\ntrue');
260 | expect(requestBody).toContain('name="regex_whitelist"\r\n\r\ntrue');
261 | expect(requestBody).toContain('name="blacklist"\r\n\r\ntrue');
262 | expect(requestBody).toContain('name="regexlist"\r\n\r\ntrue');
263 | expect(requestBody).toContain('name="adlist"\r\n\r\ntrue');
264 | expect(requestBody).toContain('name="client"\r\n\r\ntrue');
265 | expect(requestBody).toContain('name="group"\r\n\r\ntrue');
266 | expect(requestBody).not.toContain('name="auditlog"');
267 | expect(requestBody).not.toContain('name="staticdhcpleases"');
268 | expect(requestBody).toContain('name="localdnsrecords"\r\n\r\ntrue');
269 | expect(requestBody).toContain('name="localcnamerecords"\r\n\r\ntrue');
270 | expect(requestBody).toContain('name="flushtables"\r\n\r\ntrue');
271 | expect(requestBody).not.toContain('name="action"\r\n\r\nin');
272 | expect(requestBody).not.toContain('name="zip_file"\r\n\r\nin');
273 | expect(requestBody.match(/Content-Disposition: form-data; name=/g)).toHaveLength(
274 | 11
275 | );
276 | });
277 | });
278 |
279 | describe('uploadBackup', () => {
280 | const backup = new Blob([]);
281 | let client: ClientV5;
282 | let teleporter: nock.Scope;
283 |
284 | beforeEach(async () => {
285 | ({ client, teleporter } = await createClient());
286 | });
287 |
288 | afterEach(() => {
289 | teleporter.done();
290 | });
291 |
292 | test('should return success if response is 200', async () => {
293 | teleporter
294 | .post('/admin/scripts/pi-hole/php/teleporter.php')
295 | .reply(
296 | 200,
297 | '\ndata: \n\ndata: [✓] TCP (IPv6)\ndata: \ndata: \n\ndata: OK'
298 | );
299 |
300 | const response = expect(client.uploadBackup(backup)).resolves;
301 |
302 | await response.toEqual(true);
303 | });
304 |
305 | test('should throw error if response is non-200', async () => {
306 | teleporter.post('/admin/scripts/pi-hole/php/teleporter.php').reply(500);
307 |
308 | const expectError = expect(client.uploadBackup(backup)).rejects;
309 |
310 | await expectError.toBeInstanceOf(ErrorNotification);
311 | await expectError.toMatchObject({
312 | message: 'Failed to upload backup to "http://10.0.0.2".',
313 | verbose: {
314 | host: 'http://10.0.0.2',
315 | path: '',
316 | status: 500,
317 | responseBody: ''
318 | }
319 | });
320 | });
321 |
322 | test('should throw error if response does not end with "OK" or "Done importing"', async () => {
323 | teleporter.post('/admin/scripts/pi-hole/php/teleporter.php').reply(200);
324 |
325 | const expectError = expect(client.uploadBackup(backup)).rejects;
326 |
327 | await expectError.toBeInstanceOf(ErrorNotification);
328 | await expectError.toMatchObject({
329 | message: 'Failed to upload backup to "http://10.0.0.2".',
330 | verbose: {
331 | host: 'http://10.0.0.2',
332 | path: '',
333 | status: 200,
334 | responseBody: ''
335 | }
336 | });
337 | });
338 | });
339 |
340 | describe('updateGravity', () => {
341 | let client: ClientV5;
342 | let teleporter: nock.Scope;
343 |
344 | beforeEach(async () => {
345 | ({ client, teleporter } = await createClient());
346 | });
347 |
348 | afterEach(() => {
349 | teleporter.done();
350 | });
351 |
352 | test('should upload backup and update gravity successfully', async () => {
353 | teleporter
354 | .get('/admin/scripts/pi-hole/php/gravity.sh.php', undefined)
355 | .reply(
356 | 200,
357 | '\ndata: \n\ndata: [✓] TCP (IPv6)\ndata: \ndata: \n\ndata: [✓] Pi-hole blocking is enabled\ndata: \n\ndata:'
358 | );
359 |
360 | const result = await client.updateGravity();
361 |
362 | expect(result).toStrictEqual(true);
363 | });
364 |
365 | test('should throw error if gravity update fails', async () => {
366 | teleporter
367 | .get('/admin/scripts/pi-hole/php/gravity.sh.php', undefined)
368 | .reply(200, '\ndata: \n\ndata: [✓] TCP (IPv6)\ndata: \ndata: \n\ndata:');
369 |
370 | const expectError = expect(client.updateGravity()).rejects;
371 |
372 | await expectError.toBeInstanceOf(ErrorNotification);
373 | await expectError.toMatchObject({
374 | message: 'Failed updating gravity on "http://10.0.0.2".',
375 | verbose: {
376 | host: 'http://10.0.0.2',
377 | path: '',
378 | status: 200,
379 | eventStream: '[✓] TCP (IPv6)'
380 | }
381 | });
382 | });
383 | });
384 | });
385 | });
386 |
--------------------------------------------------------------------------------
/test/unit/client/v6/index.test.ts:
--------------------------------------------------------------------------------
1 | import { jest } from '@jest/globals';
2 | import nock from 'nock';
3 | import { Blob } from 'node-fetch';
4 | import { ClientV6 } from '../../../../src/client/v6';
5 | import { Host } from '../../../../src/client/host';
6 | import { Config, ConfigInterface } from '../../../../src/config/index';
7 | import { ErrorNotification } from '../../../../src/notify';
8 | import { Log } from '../../../../src/log';
9 |
10 | describe('Client', () => {
11 | describe('V6', () => {
12 | const goodResponse =
13 | '{"session":{"valid":true,"totp":true,"sid":"IEFZjjlRXX0FMaemtB8opQ=","csrf":"+Y5Qx4Qxa5XXYSzz8Nu7gw=","validity":1800,"message":"app-password correct"},"took":0.074608087539672852}';
14 | const host = new Host({
15 | baseUrl: 'http://10.0.0.2',
16 | password: 'mypassword'
17 | });
18 | let config: ConfigInterface;
19 | const log = new Log(false);
20 |
21 | const createClient = async () => {
22 | nock(host.fullUrl).get('/api/auth').reply(200);
23 | nock(host.fullUrl).post('/api/auth').reply(200, goodResponse);
24 |
25 | return {
26 | teleporter: nock(host.fullUrl),
27 | client: await ClientV6.create({ host, log, options: config.sync.v6 })
28 | };
29 | };
30 |
31 | beforeEach(() => {
32 | nock.disableNetConnect();
33 |
34 | config = Config({
35 | primaryHost: { baseUrl: host.baseUrl, password: host.password },
36 | secondaryHosts: [{ baseUrl: host.baseUrl, password: host.password }]
37 | });
38 | });
39 |
40 | afterEach(() => {
41 | nock.cleanAll();
42 | nock.enableNetConnect();
43 | });
44 |
45 | describe('create', () => {
46 | test('should throw error if status code is not ok', async () => {
47 | const initialRequest = nock(host.fullUrl).get('/api/auth').reply(200);
48 | const loginRequest = nock(host.fullUrl).post('/api/auth').reply(500);
49 |
50 | const expectError = expect(
51 | ClientV6.create({ host, log, options: config.sync.v6 })
52 | ).rejects;
53 |
54 | await expectError.toBeInstanceOf(ErrorNotification);
55 | await expectError.toMatchObject({
56 | message:
57 | 'There was an error logging in to "http://10.0.0.2" - are you able to log in with the configured password?',
58 | verbose: {
59 | host: 'http://10.0.0.2',
60 | path: '/api/auth',
61 | status: 500,
62 | responseBody: ''
63 | }
64 | });
65 | initialRequest.done();
66 | loginRequest.done();
67 | });
68 |
69 | test('should return client', async () => {
70 | const initialRequest = nock(host.fullUrl).get('/api/auth').reply(200);
71 | const loginRequest = nock(host.fullUrl)
72 | .post('/api/auth')
73 | .reply(200, goodResponse);
74 |
75 | await expect(
76 | ClientV6.create({ host, log, options: config.sync.v6 })
77 | ).resolves.toBeInstanceOf(ClientV6);
78 |
79 | initialRequest.done();
80 | loginRequest.done();
81 | });
82 |
83 | test('should return version 6 and Host info', async () => {
84 | const initialRequest = nock(host.baseUrl).get('/api/auth').reply(200);
85 | const loginRequest = nock(host.baseUrl)
86 | .post('/api/auth')
87 | .reply(200, goodResponse);
88 |
89 | const v6Client = await ClientV6.create({ host, log, options: config.sync.v6 });
90 |
91 | expect(v6Client.getVersion()).toEqual(6);
92 | expect(v6Client.getHost()).toBe(host);
93 |
94 | initialRequest.done();
95 | loginRequest.done();
96 | });
97 | });
98 |
99 | describe('downloadBackup', () => {
100 | let client: ClientV6;
101 | let teleporter: nock.Scope;
102 |
103 | beforeEach(async () => {
104 | ({ client, teleporter } = await createClient());
105 | });
106 |
107 | afterEach(() => {
108 | teleporter.done();
109 | });
110 |
111 | test('should throw error if response is non-200', async () => {
112 | teleporter.get('/api/teleporter').reply(500);
113 |
114 | const expectError = expect(client.downloadBackup()).rejects;
115 |
116 | await expectError.toBeInstanceOf(ErrorNotification);
117 | await expectError.toMatchObject({
118 | message: 'Failed to download backup from "http://10.0.0.2".',
119 | verbose: {
120 | host: 'http://10.0.0.2',
121 | path: '/api/teleporter',
122 | status: 500,
123 | responseBody: ''
124 | }
125 | });
126 | });
127 |
128 | test('should return response data', async () => {
129 | teleporter.get('/api/teleporter').reply(200, undefined, {
130 | 'content-type': 'application/zip',
131 | 'content-disposition': 'attachement; filename="backup.zip"'
132 | });
133 |
134 | const backup = await client.downloadBackup();
135 |
136 | expect(backup).toBeInstanceOf(Blob);
137 | });
138 | });
139 |
140 | describe('uploadBackup', () => {
141 | const backup = new Blob([]);
142 | let client: ClientV6;
143 | let teleporter: nock.Scope;
144 |
145 | beforeEach(async () => {
146 | ({ client, teleporter } = await createClient());
147 | });
148 |
149 | afterEach(() => {
150 | teleporter.done();
151 | });
152 |
153 | test('should return success if response is 200', async () => {
154 | teleporter.post('/api/teleporter').reply(200);
155 |
156 | const response = expect(client.uploadBackup(backup)).resolves;
157 |
158 | await response.toEqual(true);
159 | });
160 |
161 | test('should throw error if response is non-200', async () => {
162 | teleporter.post('/api/teleporter').reply(500);
163 |
164 | const expectError = expect(client.uploadBackup(backup)).rejects;
165 |
166 | await expectError.toBeInstanceOf(ErrorNotification);
167 | await expectError.toMatchObject({
168 | message: 'Failed to upload backup to "http://10.0.0.2".',
169 | verbose: {
170 | host: 'http://10.0.0.2',
171 | path: '/api/teleporter',
172 | status: 500,
173 | responseBody: ''
174 | }
175 | });
176 | });
177 | });
178 |
179 | describe('updateGravity', () => {
180 | let client: ClientV6;
181 | let teleporter: nock.Scope;
182 |
183 | beforeEach(async () => {
184 | ({ client, teleporter } = await createClient());
185 | });
186 |
187 | afterEach(() => {
188 | teleporter.done();
189 | });
190 |
191 | test('should upload backup and update gravity successfully', async () => {
192 | teleporter
193 | .post('/api/action/gravity', undefined)
194 | .reply(200, '[✓] TCP (IPv6)\n[✓] Pi-hole blocking is enabled\n[✓] Done');
195 |
196 | const result = await client.updateGravity();
197 |
198 | expect(result).toStrictEqual(true);
199 | });
200 |
201 | test('should throw error if gravity update fails', async () => {
202 | teleporter.post('/api/action/gravity', undefined).reply(401, '');
203 |
204 | const expectError = expect(client.updateGravity()).rejects;
205 |
206 | await expectError.toBeInstanceOf(ErrorNotification);
207 | await expectError.toMatchObject({
208 | message: 'Failed updating gravity on "http://10.0.0.2".',
209 | verbose: {
210 | host: 'http://10.0.0.2',
211 | path: '/api/action/gravity',
212 | status: 401,
213 | eventStream: ''
214 | }
215 | });
216 | });
217 |
218 | test('should retry if fetch error is thrown', async () => {
219 | jest.useFakeTimers({ advanceTimers: true });
220 | teleporter
221 | .post('/api/action/gravity', undefined)
222 | .replyWithError({ code: 'ETIMEDOUT' })
223 | .post('/api/action/gravity', undefined)
224 | .reply(200, '[✓] TCP (IPv6)\n[✓] Pi-hole blocking is enabled\n[✓] Done');
225 |
226 | const result = client.updateGravity();
227 |
228 | expect(await result).toStrictEqual(true);
229 | });
230 |
231 | test('should retry if 502-504 us returned', async () => {
232 | for (const status of [502, 503, 504]) {
233 | jest.useFakeTimers({ advanceTimers: true });
234 | teleporter
235 | .post('/api/action/gravity', undefined)
236 | .reply(status, 'Bad Gateway')
237 | .post('/api/action/gravity', undefined)
238 | .reply(200, '[✓] TCP (IPv6)\n[✓] Pi-hole blocking is enabled\n[✓] Done');
239 |
240 | const result = client.updateGravity();
241 |
242 | expect(await result).toStrictEqual(true);
243 | }
244 | });
245 |
246 | test('should throw if all retries fail', async () => {
247 | jest.useFakeTimers({ advanceTimers: true });
248 | config.sync.v6.gravityUpdateRetryCount = 2;
249 | teleporter
250 | .post('/api/action/gravity', undefined)
251 | .replyWithError({ code: 'ETIMEDOUT' })
252 | .post('/api/action/gravity', undefined)
253 | .replyWithError({ code: 'ETIMEDOUT' });
254 |
255 | const expectError = expect(client.updateGravity()).rejects;
256 |
257 | await expectError.toBeInstanceOf(ErrorNotification);
258 | await expectError.toMatchObject({
259 | message: 'Exhausted 3 retries updating gravity on http://10.0.0.2.',
260 | verbose: {
261 | host: 'http://10.0.0.2',
262 | path: '/api/action/gravity'
263 | }
264 | });
265 | });
266 | });
267 | });
268 | });
269 |
--------------------------------------------------------------------------------
/test/unit/config/index.test.ts:
--------------------------------------------------------------------------------
1 | import { writeFile } from 'node:fs/promises';
2 | import { temporaryFile } from 'tempy';
3 | import { Config } from '../../../src/config/index';
4 |
5 | describe('Config', () => {
6 | const initialEnv = Object.assign({}, process.env);
7 |
8 | afterEach(() => {
9 | process.env = Object.assign({}, initialEnv);
10 | });
11 |
12 | it('should generate configuration', () => {
13 | process.env['PRIMARY_HOST_BASE_URL'] = 'http://localhost:3000';
14 | process.env['PRIMARY_HOST_PASSWORD'] = 'password';
15 |
16 | process.env['SECONDARY_HOSTS_1_BASE_URL'] = 'http://localhost:3001';
17 | process.env['SECONDARY_HOSTS_1_PASSWORD'] = 'password';
18 |
19 | const config = Config();
20 | expect(config).toEqual({
21 | piHoleVersion: 'auto',
22 | primaryHost: {
23 | baseUrl: 'http://localhost:3000',
24 | password: 'password'
25 | },
26 | secondaryHosts: [
27 | {
28 | baseUrl: 'http://localhost:3001',
29 | password: 'password'
30 | }
31 | ],
32 | sync: {
33 | v5: {
34 | adList: true,
35 | auditLog: false,
36 | blacklist: true,
37 | client: true,
38 | flushTables: true,
39 | group: true,
40 | localCnameRecords: true,
41 | localDnsRecords: true,
42 | regexList: true,
43 | regexWhitelist: true,
44 | staticDhcpLeases: false,
45 | whitelist: true
46 | },
47 | v6: {
48 | adlist: true,
49 | adlist_by_group: true,
50 | client: true,
51 | config: false,
52 | client_by_group: true,
53 | dhcp_leases: true,
54 | domainlist: true,
55 | domainlist_by_group: true,
56 | group: true,
57 | gravityUpdateRetryCount: 5
58 | }
59 | },
60 | notify: {
61 | exceptions: {
62 | honeybadgerApiKey: undefined,
63 | sentryDsn: undefined
64 | },
65 | onFailure: true,
66 | onSuccess: false,
67 | smtp: {
68 | enabled: false,
69 | from: undefined,
70 | host: undefined,
71 | password: undefined,
72 | port: undefined,
73 | tls: false,
74 | to: undefined,
75 | user: undefined
76 | }
77 | },
78 | intervalMinutes: 60,
79 | runOnce: false,
80 | updateGravity: true,
81 | verbose: false
82 | });
83 | });
84 |
85 | it('should read passwords from files', async () => {
86 | const passwordFile1 = temporaryFile();
87 | await writeFile(passwordFile1, 'password_from_file_1', 'utf-8');
88 | const passwordFile2 = temporaryFile();
89 | await writeFile(passwordFile2, 'password_from_file_2\n', 'utf-8');
90 |
91 | process.env['PRIMARY_HOST_BASE_URL'] = 'http://localhost:3000';
92 | process.env['PRIMARY_HOST_PASSWORD_FILE'] = passwordFile1;
93 |
94 | process.env['SECONDARY_HOSTS_1_BASE_URL'] = 'http://localhost:3001';
95 | process.env['SECONDARY_HOSTS_1_PASSWORD_FILE'] = passwordFile2;
96 |
97 | const config = Config();
98 | expect(config).toEqual({
99 | piHoleVersion: 'auto',
100 | primaryHost: {
101 | baseUrl: 'http://localhost:3000',
102 | password: 'password_from_file_1'
103 | },
104 | secondaryHosts: [
105 | {
106 | baseUrl: 'http://localhost:3001',
107 | password: 'password_from_file_2'
108 | }
109 | ],
110 | sync: {
111 | v5: {
112 | adList: true,
113 | auditLog: false,
114 | blacklist: true,
115 | client: true,
116 | flushTables: true,
117 | group: true,
118 | localCnameRecords: true,
119 | localDnsRecords: true,
120 | regexList: true,
121 | regexWhitelist: true,
122 | staticDhcpLeases: false,
123 | whitelist: true
124 | },
125 | v6: {
126 | adlist: true,
127 | adlist_by_group: true,
128 | client: true,
129 | config: false,
130 | client_by_group: true,
131 | dhcp_leases: true,
132 | domainlist: true,
133 | domainlist_by_group: true,
134 | group: true,
135 | gravityUpdateRetryCount: 5
136 | }
137 | },
138 | notify: {
139 | exceptions: {
140 | honeybadgerApiKey: undefined,
141 | sentryDsn: undefined
142 | },
143 | onFailure: true,
144 | onSuccess: false,
145 | smtp: {
146 | enabled: false,
147 | from: undefined,
148 | host: undefined,
149 | password: undefined,
150 | port: undefined,
151 | tls: false,
152 | to: undefined,
153 | user: undefined
154 | }
155 | },
156 | intervalMinutes: 60,
157 | runOnce: false,
158 | updateGravity: true,
159 | verbose: false
160 | });
161 | });
162 | });
163 |
--------------------------------------------------------------------------------
/test/unit/config/parse.test.ts:
--------------------------------------------------------------------------------
1 | import { asConst } from 'json-schema-to-ts';
2 | import { writeFile } from 'node:fs/promises';
3 | import { temporaryFile } from 'tempy';
4 | import { parseSchema } from '../../../src/config/parse';
5 |
6 | describe('Config', () => {
7 | describe('parseSchema', () => {
8 | const initialEnv = Object.assign({}, process.env);
9 |
10 | afterEach(() => {
11 | process.env = Object.assign({}, initialEnv);
12 | });
13 |
14 | describe('array', () => {
15 | test('should parse array of objects', () => {
16 | process.env['1_VAR_ONE'] = 'mock_value_1';
17 | process.env['1_VAR_TWO'] = 'mock_value_2';
18 | process.env['2_VAR_ONE'] = 'mock_value_3';
19 |
20 | // @ts-expect-error type instantiation is excessively deep and possibly infinite
21 | const parsed = parseSchema(
22 | asConst({
23 | type: 'array',
24 | items: {
25 | type: 'object',
26 | properties: {
27 | varOne: {
28 | type: 'string'
29 | },
30 | varTwo: {
31 | type: 'string'
32 | }
33 | },
34 | required: ['varOne']
35 | }
36 | })
37 | );
38 |
39 | expect(parsed).toEqual([
40 | { varOne: 'mock_value_1', varTwo: 'mock_value_2' },
41 | { varOne: 'mock_value_3' }
42 | ]);
43 | });
44 |
45 | test('should exclude array items with missing required properties', () => {
46 | process.env['1_VAR_ONE'] = 'mock_value_1';
47 | process.env['1_VAR_TWO'] = 'mock_value_2';
48 | process.env['2_VAR_TWO'] = 'mock_value_3';
49 | process.env['3_VAR_ONE'] = 'mock_value_4';
50 |
51 | const parsed = parseSchema(
52 | asConst({
53 | type: 'array',
54 | items: {
55 | type: 'object',
56 | properties: {
57 | varOne: {
58 | type: 'string'
59 | },
60 | varTwo: {
61 | type: 'string'
62 | }
63 | },
64 | required: ['varOne']
65 | }
66 | })
67 | );
68 |
69 | expect(parsed).toEqual([{ varOne: 'mock_value_1', varTwo: 'mock_value_2' }]);
70 | });
71 |
72 | test('should not error if no items contain required properties if no minLength', () => {
73 | process.env['1_VAR_TWO'] = 'mock_value_1';
74 | process.env['2_VAR_TWO'] = 'mock_value_2';
75 |
76 | const parsed = parseSchema(
77 | asConst({
78 | type: 'array',
79 | items: {
80 | type: 'object',
81 | properties: {
82 | varOne: {
83 | type: 'string'
84 | },
85 | varTwo: {
86 | type: 'string'
87 | }
88 | },
89 | required: ['varOne']
90 | }
91 | })
92 | );
93 |
94 | expect(parsed).toEqual([]);
95 | });
96 |
97 | test('should throw an error if items is not defined', () => {
98 | expect(() =>
99 | parseSchema(
100 | asConst({
101 | type: 'array'
102 | })
103 | )
104 | ).toThrow('Undefined array items for root');
105 | });
106 |
107 | test('should template custom env vars', () => {
108 | process.env['VAR_1'] = 'mock_value_1';
109 | process.env['2_VAR'] = 'mock_value_2';
110 | process.env['VAR_2'] = 'mock_value_3';
111 |
112 | const parsed = parseSchema(
113 | asConst({
114 | type: 'array',
115 | items: {
116 | type: 'object',
117 | properties: {
118 | var: {
119 | type: 'string',
120 | envVar: 'VAR_{{i}}'
121 | }
122 | }
123 | }
124 | })
125 | );
126 |
127 | expect(parsed).toEqual([{ var: 'mock_value_1' }, { var: 'mock_value_2' }]);
128 | });
129 |
130 | test('should ignore objects with no properties', () => {
131 | const parsed = parseSchema(
132 | asConst({
133 | type: 'array',
134 | items: {
135 | type: 'object',
136 | properties: {
137 | var: {
138 | type: 'string'
139 | }
140 | }
141 | }
142 | })
143 | );
144 |
145 | expect(parsed).toEqual([]);
146 | });
147 |
148 | test('should not error if below minItems but no required values', () => {
149 | const parsed = parseSchema(
150 | asConst({
151 | type: 'array',
152 | items: {
153 | type: 'object',
154 | properties: {
155 | varOne: {
156 | type: 'string'
157 | }
158 | }
159 | },
160 | minItems: 1
161 | })
162 | );
163 |
164 | expect(parsed).toEqual([]);
165 | });
166 |
167 | test('should error if missing properties below minlength', () => {
168 | process.env['1_VAR_TWO'] = 'mock_value_1';
169 | process.env['2_VAR_TWO'] = 'mock_value_2';
170 |
171 | expect(() =>
172 | parseSchema(
173 | asConst({
174 | type: 'array',
175 | items: {
176 | type: 'object',
177 | properties: {
178 | varOne: {
179 | type: 'string'
180 | },
181 | varTwo: {
182 | type: 'string'
183 | }
184 | },
185 | required: ['varOne']
186 | },
187 | minItems: 1
188 | })
189 | )
190 | ).toThrow('Missing required properties for "1": varOne');
191 | });
192 |
193 | test('should accept overrides', () => {
194 | process.env['1_VAR_ONE'] = 'mock_value_1';
195 | process.env['1_VAR_TWO'] = 'mock_value_2';
196 | process.env['2_VAR_ONE'] = 'mock_value_3';
197 | process.env['2_VAR_TWO'] = 'mock_value_4';
198 |
199 | const parsed = parseSchema(
200 | asConst({
201 | type: 'array',
202 | items: {
203 | type: 'object',
204 | properties: {
205 | varOne: {
206 | type: 'string'
207 | },
208 | varTwo: {
209 | type: 'string'
210 | }
211 | },
212 | required: ['varOne']
213 | }
214 | }),
215 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
216 | { overrides: [{ varTwo: 'override_value' }] as any }
217 | );
218 |
219 | expect(parsed).toEqual([
220 | { varOne: 'mock_value_1', varTwo: 'override_value' },
221 | { varOne: 'mock_value_3', varTwo: 'mock_value_4' }
222 | ]);
223 | });
224 | });
225 |
226 | describe('object', () => {
227 | test('should parse object', () => {
228 | process.env['VAR_ONE'] = 'mock_value_1';
229 | process.env['VAR_TWO'] = 'mock_value_2';
230 |
231 | const parsed = parseSchema(
232 | asConst({
233 | type: 'object',
234 | properties: {
235 | varOne: {
236 | type: 'string'
237 | },
238 | varTwo: {
239 | type: 'string'
240 | }
241 | },
242 | required: ['varOne']
243 | })
244 | );
245 |
246 | expect(parsed).toEqual({ varOne: 'mock_value_1', varTwo: 'mock_value_2' });
247 | });
248 |
249 | test('should throw error if required properties are missing', () => {
250 | process.env['VAR_TWO'] = 'mock_value_2';
251 |
252 | expect(() =>
253 | parseSchema(
254 | asConst({
255 | type: 'object',
256 | properties: {
257 | varOne: {
258 | type: 'string'
259 | },
260 | varTwo: {
261 | type: 'string'
262 | }
263 | },
264 | required: ['varOne']
265 | })
266 | )
267 | ).toThrow('Missing required properties for root: varOne');
268 | });
269 |
270 | test('should not throw an error if required properties have defaults', () => {
271 | process.env['VAR_TWO'] = 'mock_value_2';
272 |
273 | const parsed = parseSchema(
274 | asConst({
275 | type: 'object',
276 | properties: {
277 | varOne: {
278 | type: 'string',
279 | default: 'default_value'
280 | },
281 | varTwo: {
282 | type: 'string'
283 | }
284 | },
285 | required: ['varOne']
286 | })
287 | );
288 |
289 | expect(parsed).toEqual({ varOne: 'default_value', varTwo: 'mock_value_2' });
290 | });
291 |
292 | test('should allow objects to require objects', () => {
293 | process.env['VAR_ONE_VAR'] = 'mock_value_1';
294 | process.env['VAR_TWO'] = 'mock_value_2';
295 |
296 | const parsed = parseSchema(
297 | asConst({
298 | type: 'object',
299 | properties: {
300 | varOne: {
301 | type: 'object',
302 | properties: {
303 | var: {
304 | type: 'string'
305 | }
306 | },
307 | required: ['var']
308 | },
309 | varTwo: {
310 | type: 'string'
311 | }
312 | },
313 | required: ['varOne']
314 | })
315 | );
316 |
317 | expect(parsed).toEqual({
318 | varOne: { var: 'mock_value_1' },
319 | varTwo: 'mock_value_2'
320 | });
321 | });
322 |
323 | test('should throw error if properties are not defined', () => {
324 | expect(() =>
325 | parseSchema(
326 | asConst({
327 | type: 'object'
328 | })
329 | )
330 | ).toThrow('Undefined object properties for root');
331 | });
332 | });
333 |
334 | describe('string', () => {
335 | test('should read config values from a file', async () => {
336 | const file = temporaryFile();
337 | await writeFile(file, 'mock_value_from_file', 'utf-8');
338 |
339 | process.env['VAR_ONE_FILE'] = file;
340 | process.env['VAR_ONE'] = 'mock_value_1';
341 | process.env['VAR_TWO'] = 'mock_value_2';
342 |
343 | const parsed = parseSchema(
344 | asConst({
345 | type: 'object',
346 | properties: {
347 | varOne: {
348 | type: 'string'
349 | },
350 | varTwo: {
351 | type: 'string'
352 | }
353 | },
354 | required: ['varOne']
355 | })
356 | );
357 |
358 | expect(parsed).toEqual({
359 | varOne: 'mock_value_from_file',
360 | varTwo: 'mock_value_2'
361 | });
362 | });
363 | });
364 |
365 | describe('boolean', () => {
366 | test('should parse boolean', () => {
367 | process.env['VAR'] = 'true';
368 |
369 | const parsed = parseSchema(
370 | asConst({
371 | type: 'object',
372 | properties: {
373 | var: {
374 | type: 'boolean'
375 | }
376 | }
377 | })
378 | );
379 |
380 | expect(parsed).toEqual({ var: true });
381 | });
382 |
383 | test('should not throw error if required boolean is false', () => {
384 | process.env['VAR'] = 'false';
385 |
386 | const parsed = parseSchema(
387 | asConst({
388 | type: 'object',
389 | properties: {
390 | var: {
391 | type: 'boolean'
392 | }
393 | },
394 | required: ['var']
395 | })
396 | );
397 |
398 | expect(parsed).toEqual({ var: false });
399 | });
400 |
401 | test('should throw error if boolean is invalid', () => {
402 | process.env['VAR'] = 'invalid';
403 |
404 | expect(() =>
405 | parseSchema(
406 | asConst({
407 | type: 'object',
408 | properties: {
409 | var: {
410 | type: 'boolean'
411 | }
412 | }
413 | })
414 | )
415 | ).toThrow("Invalid boolean for \"var\": expected 'true' or 'false'");
416 | });
417 | });
418 |
419 | describe('number', () => {
420 | test('should parse number', () => {
421 | process.env['VAR'] = '1';
422 |
423 | const parsed = parseSchema(
424 | asConst({
425 | type: 'object',
426 | properties: {
427 | var: {
428 | type: 'number'
429 | }
430 | }
431 | })
432 | );
433 |
434 | expect(parsed).toEqual({ var: 1 });
435 | });
436 |
437 | test('should not throw error if required number is 0', () => {
438 | process.env['VAR'] = '0';
439 |
440 | const parsed = parseSchema(
441 | asConst({
442 | type: 'object',
443 | properties: {
444 | var: {
445 | type: 'number'
446 | }
447 | },
448 | required: ['var']
449 | })
450 | );
451 |
452 | expect(parsed).toEqual({ var: 0 });
453 | });
454 |
455 | test('should throw error if number is invalid', () => {
456 | process.env['VAR'] = 'invalid';
457 |
458 | expect(() =>
459 | parseSchema(
460 | asConst({
461 | type: 'object',
462 | properties: {
463 | var: {
464 | type: 'number'
465 | }
466 | }
467 | })
468 | )
469 | ).toThrow('Invalid number for "var": expected a number');
470 | });
471 | });
472 |
473 | test('throws if schema is not an object', () => {
474 | // @ts-expect-error testing invalid input
475 | expect(() => parseSchema(false)).toThrow('Invalid schema for root');
476 | });
477 | });
478 | });
479 |
--------------------------------------------------------------------------------
/test/unit/index.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect, jest, test } from '@jest/globals';
2 | import nock from 'nock';
3 | import { Sync } from '../../src/sync';
4 |
5 | describe('entrypoint', () => {
6 | const initialEnv = Object.assign({}, process.env);
7 |
8 | beforeEach(() => {
9 | nock.disableNetConnect();
10 | process.env['PRIMARY_HOST_BASE_URL'] = 'http://localhost:3000';
11 | process.env['PRIMARY_HOST_PASSWORD'] = 'password';
12 | process.env['SECONDARY_HOSTS_1_BASE_URL'] = 'http://localhost:3001';
13 | process.env['SECONDARY_HOSTS_1_PASSWORD'] = 'password';
14 | process.env['RUN_ONCE'] = 'true';
15 | });
16 |
17 | afterEach(() => {
18 | jest.resetModules();
19 | process.env = Object.assign({}, initialEnv);
20 | nock.cleanAll();
21 | nock.enableNetConnect();
22 | });
23 |
24 | test('should perform sync', async () => {
25 | const sync = jest.spyOn(Sync, 'perform').mockImplementation(() => Promise.resolve());
26 |
27 | await import('../../src/index');
28 |
29 | expect(sync).toHaveBeenCalledTimes(1);
30 | });
31 | });
32 |
--------------------------------------------------------------------------------
/test/unit/log.test.ts:
--------------------------------------------------------------------------------
1 | import { jest } from '@jest/globals';
2 | import chalk from 'chalk';
3 | import { Log } from '../../src/log';
4 |
5 | describe('Log', () => {
6 | beforeEach(() => {
7 | jest.useFakeTimers({ now: new Date(2022, 7, 27, 8, 17, 31) });
8 | });
9 |
10 | afterEach(() => {
11 | jest.resetModules();
12 | jest.restoreAllMocks();
13 | });
14 |
15 | afterAll(() => {
16 | jest.useRealTimers();
17 | });
18 |
19 | describe('info', () => {
20 | test('should log with dimmed date', () => {
21 | const log = new Log(false);
22 | const consoleLog = jest.spyOn(console, 'log');
23 |
24 | log.info('Hello world');
25 |
26 | expect(consoleLog).toHaveBeenCalledTimes(1);
27 | expect(consoleLog).toHaveBeenCalledWith(
28 | `${chalk.dim('8/27/2022, 8:17:31 AM')}: Hello world`
29 | );
30 | });
31 |
32 | test('should log stringified', () => {
33 | const log = new Log(false);
34 | const consoleLog = jest.spyOn(console, 'log');
35 |
36 | log.info({ foo: 'bar' });
37 |
38 | expect(consoleLog).toHaveBeenCalledTimes(1);
39 | expect(consoleLog).toHaveBeenCalledWith(
40 | // eslint-disable-next-line no-useless-escape
41 | `${chalk.dim('8/27/2022, 8:17:31 AM')}: {\"foo\":\"bar\"}`
42 | );
43 | });
44 | });
45 |
46 | describe('verbose', () => {
47 | test('should not log if not verboseMode', () => {
48 | const log = new Log(false);
49 | const logInfo = jest.spyOn(log, 'info');
50 |
51 | log.verbose('Hello world');
52 |
53 | expect(logInfo).not.toHaveBeenCalled();
54 | });
55 |
56 | test('should not log if empty', () => {
57 | const log = new Log(true);
58 | const logInfo = jest.spyOn(log, 'info');
59 |
60 | log.verbose(undefined);
61 |
62 | expect(logInfo).not.toHaveBeenCalled();
63 | });
64 |
65 | test('should log if verboseMode', () => {
66 | const log = new Log(true);
67 | const logInfo = jest.spyOn(log, 'info');
68 |
69 | log.verbose('Hello world');
70 |
71 | expect(logInfo).toHaveBeenCalledTimes(1);
72 | expect(logInfo).toHaveBeenCalledWith('Hello world');
73 | });
74 | });
75 |
76 | describe('logError', () => {
77 | test('should log with dimmed date', () => {
78 | const log = new Log(true);
79 | const consoleError = jest.spyOn(console, 'error');
80 |
81 | log.error('Hello world');
82 |
83 | expect(consoleError).toHaveBeenCalledTimes(1);
84 | expect(consoleError).toHaveBeenCalledWith(
85 | `${chalk.dim('8/27/2022, 8:17:31 AM')}: ${chalk.red('Hello world')}`
86 | );
87 | });
88 | });
89 | });
90 |
--------------------------------------------------------------------------------
/test/unit/notify.test.ts:
--------------------------------------------------------------------------------
1 | import Honeybadger from '@honeybadger-io/js';
2 | import Sentry from '@sentry/node';
3 | import { jest } from '@jest/globals';
4 | import nock from 'nock';
5 | import { FetchError } from 'node-fetch';
6 | import nodemailer from 'nodemailer';
7 | import type Mail from 'nodemailer/lib/mailer';
8 | import { Log } from '../../src/log';
9 | import { ErrorNotification, Notify } from '../../src/notify';
10 | import { Config, ConfigInterface } from '../../src/config/index';
11 |
12 | describe('Notify', () => {
13 | let config: ConfigInterface;
14 |
15 | let log: Log;
16 | let logInfo: jest.SpiedFunction;
17 | let logError: jest.SpiedFunction;
18 | let logVerbose: jest.SpiedFunction;
19 | const sendMail = jest.fn();
20 | const createTransport = jest
21 | .spyOn(nodemailer, 'createTransport')
22 | .mockReturnValue({ sendMail } as unknown as Mail);
23 | const processExit = jest
24 | .spyOn(process, 'exit')
25 | .mockImplementation(jest.fn());
26 | let notifyOfFailure: ReturnType;
27 | let verboseMode: ReturnType;
28 |
29 | const smtpConfig: ConfigInterface['notify']['smtp'] = {
30 | enabled: true,
31 | host: 'notify.example.com',
32 | port: 587,
33 | user: 'user@example.com',
34 | from: 'from@example.com',
35 | to: 'to@example.com',
36 | tls: false,
37 | password: 'pass'
38 | };
39 |
40 | beforeEach(() => {
41 | config = Config({
42 | primaryHost: {
43 | baseUrl: 'http://10.0.0.2',
44 | password: 'password'
45 | },
46 | secondaryHosts: [
47 | {
48 | baseUrl: 'http://10.0.0.3',
49 | password: 'password2'
50 | }
51 | ],
52 | verbose: false,
53 | notify: {
54 | onSuccess: true,
55 | onFailure: true,
56 | smtp: { enabled: false },
57 | exceptions: {
58 | honeybadgerApiKey: undefined,
59 | sentryDsn: undefined
60 | }
61 | }
62 | });
63 |
64 | log = new Log(false);
65 | logInfo = jest.spyOn(log, 'info');
66 | logError = jest.spyOn(log, 'error');
67 | logVerbose = jest.spyOn(log, 'verbose');
68 |
69 | nock.disableNetConnect();
70 | logVerbose.mockClear();
71 | sendMail.mockClear();
72 | processExit.mockClear();
73 | createTransport.mockClear();
74 | notifyOfFailure?.mockRestore();
75 | verboseMode?.mockRestore();
76 | });
77 |
78 | afterEach(() => {
79 | nock.cleanAll();
80 | nock.enableNetConnect();
81 | });
82 |
83 | afterAll(() => {
84 | expect(createTransport).toHaveBeenCalledTimes(1);
85 | expect(createTransport).toHaveBeenCalledWith({
86 | host: 'notify.example.com',
87 | port: 587,
88 | secure: false,
89 | auth: {
90 | user: 'user@example.com',
91 | pass: 'pass'
92 | }
93 | });
94 | });
95 |
96 | describe('ofSuccess', () => {
97 | test('should log success', async () => {
98 | const notify = new Notify(config, log);
99 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
100 |
101 | await notify.ofSuccess({
102 | message: '3/3 hosts synced.',
103 | verbose: 'Example verbose context'
104 | });
105 |
106 | expect(logInfo).toHaveBeenCalledTimes(1);
107 | expect(logInfo).toHaveBeenCalledWith('✔️ Success: 3/3 hosts synced.');
108 | expect(notifyOfFailure).not.toHaveBeenCalled();
109 | expect(processExit).not.toHaveBeenCalled();
110 | });
111 |
112 | test('should log verbose context', async () => {
113 | log.verboseMode = true;
114 | const notify = new Notify(config, log);
115 |
116 | await notify.ofSuccess({
117 | message: '3/3 hosts synced.',
118 | verbose: 'Example verbose context'
119 | });
120 |
121 | expect(logInfo).toHaveBeenCalledTimes(2);
122 | expect(logInfo).toHaveBeenCalledWith('✔️ Success: 3/3 hosts synced.');
123 | expect(logInfo).toHaveBeenCalledWith('Example verbose context');
124 | });
125 |
126 | test('should notify as failure if errors present', async () => {
127 | const notify = new Notify(config, log);
128 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
129 | notify.queueError({ message: 'Example error' });
130 |
131 | await notify.ofSuccess({
132 | message: '3/3 hosts synced.',
133 | verbose: 'Example verbose context'
134 | });
135 |
136 | expect(logInfo).not.toHaveBeenCalled();
137 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
138 | expect(notifyOfFailure).toHaveBeenCalledWith({
139 | message:
140 | 'Sync succeeded, but there were some unexpected errors. 3/3 hosts synced.'
141 | });
142 | expect(processExit).not.toHaveBeenCalled();
143 | });
144 |
145 | test('should dispatch email', async () => {
146 | const notify = new Notify(config, log);
147 | config.notify.onSuccess = true;
148 | config.notify.smtp = smtpConfig;
149 | sendMail.mockReturnValue(Promise.resolve());
150 |
151 | await notify.ofSuccess({
152 | message: '3/3 hosts synced.',
153 | verbose: 'Example verbose context'
154 | });
155 |
156 | expect(sendMail).toHaveBeenCalledTimes(1);
157 | expect(sendMail).toHaveBeenCalledWith({
158 | from: '"Orbital Sync" ',
159 | to: 'to@example.com',
160 | subject: 'Orbital Sync: ✔️ Success',
161 | text: 'Orbital Sync\n✔️ Success\n\n3/3 hosts synced.',
162 | html: 'Orbital Sync
✔️ Success3/3 hosts synced.
'
163 | });
164 | expect(processExit).not.toHaveBeenCalled();
165 | });
166 | });
167 |
168 | describe('ofFailure', () => {
169 | test('should log error', async () => {
170 | const notify = new Notify(config, log);
171 | await notify.ofFailure({
172 | message: 'Example failure message',
173 | verbose: 'Example verbose context'
174 | });
175 |
176 | expect(logError).toHaveBeenCalledTimes(1);
177 | expect(logError).toHaveBeenCalledWith('⚠ Failure: Example failure message');
178 | expect(processExit).not.toHaveBeenCalled();
179 | });
180 |
181 | test('should log verbose context', async () => {
182 | const notify = new Notify(config, log);
183 | config.verbose = true;
184 |
185 | await notify.ofFailure({
186 | message: 'Example failure message',
187 | verbose: 'Example verbose context'
188 | });
189 |
190 | expect(logError).toHaveBeenCalledTimes(1);
191 | expect(logError).toHaveBeenCalledWith('⚠ Failure: Example failure message');
192 | expect(logVerbose).toHaveBeenCalledTimes(1);
193 | expect(logVerbose).toHaveBeenCalledWith('Example verbose context');
194 | });
195 |
196 | test('should notify and exit', async () => {
197 | const notify = new Notify(config, log);
198 | await notify.ofFailure({
199 | message: 'Example catastrophic failure',
200 | exit: true
201 | });
202 |
203 | expect(processExit).toHaveBeenCalledTimes(1);
204 | expect(processExit).toHaveBeenCalledWith(1);
205 | });
206 |
207 | test('should dispatch email', async () => {
208 | const notify = new Notify(config, log);
209 | config.notify.smtp = smtpConfig;
210 | sendMail.mockReturnValue(Promise.resolve());
211 |
212 | await notify.ofFailure({
213 | message: 'Example failure message',
214 | verbose: 'Example verbose context'
215 | });
216 |
217 | expect(sendMail).toHaveBeenCalledTimes(1);
218 | expect(sendMail).toHaveBeenCalledWith({
219 | from: '"Orbital Sync" ',
220 | to: 'to@example.com',
221 | subject: 'Orbital Sync: ⚠ Failed',
222 | text: 'Orbital Sync\n⚠ Failed\n\nExample failure message',
223 | html: 'Orbital Sync
⚠ FailedExample failure message
'
224 | });
225 | expect(processExit).not.toHaveBeenCalled();
226 | });
227 | });
228 |
229 | describe('ofThrow', () => {
230 | test('should prepend Notify of unexpected error', async () => {
231 | const notify = new Notify(config, log);
232 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
233 |
234 | await notify.ofThrow(new Error('Example thrown error'));
235 |
236 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
237 | expect(notifyOfFailure).toHaveBeenCalledWith({
238 | message: 'An unexpected error was thrown:\n- Error: Example thrown error'
239 | });
240 | });
241 |
242 | test('should Notify on throw', async () => {
243 | const notify = new Notify(config, log);
244 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
245 |
246 | await notify.ofThrow(
247 | new ErrorNotification({
248 | message: 'Example thrown error',
249 | exit: true,
250 | sendNotification: false
251 | })
252 | );
253 |
254 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
255 | expect(notifyOfFailure).toHaveBeenCalledWith(
256 | expect.objectContaining({
257 | message: 'Example thrown error',
258 | exit: true,
259 | sendNotification: false
260 | })
261 | );
262 | });
263 |
264 | test('should Notify on throw of connection refused FetchError', async () => {
265 | const notify = new Notify(config, log);
266 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
267 |
268 | await notify.ofThrow(
269 | new FetchError(
270 | 'request to http://10.0.0.3/admin/index.php?login failed, reason: connect ECONNREFUSED 10.0.0.2:443',
271 | 'system',
272 | { code: 'ECONNREFUSED' }
273 | )
274 | );
275 |
276 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
277 | expect(notifyOfFailure).toHaveBeenCalledWith(
278 | expect.objectContaining({
279 | message: 'The host "http://10.0.0.3" refused to connect. Is it down?',
280 | verbose:
281 | 'request to http://10.0.0.3/admin/index.php?login failed, reason: connect ECONNREFUSED 10.0.0.2:443'
282 | })
283 | );
284 | });
285 |
286 | test('should send unexpected error to Honeybadger if configured', async () => {
287 | const notify = new Notify(config, log);
288 | config.notify.exceptions!.honeybadgerApiKey = 'foobar';
289 | const honeybadgerNotify = jest
290 | .spyOn(Honeybadger, 'notify')
291 | .mockImplementation(jest.fn());
292 | const honeybadgerConfigure = jest.spyOn(Honeybadger, 'configure').mockReturnValue({
293 | notify: honeybadgerNotify
294 | } as unknown as ReturnType);
295 | const mockError = new FetchError('Connection error', 'system', {
296 | code: 'ECONNRESET'
297 | });
298 |
299 | await notify.ofThrow(mockError);
300 | await notify.ofThrow('Example thrown string');
301 |
302 | expect(honeybadgerNotify).toHaveBeenCalledTimes(2);
303 | expect(honeybadgerNotify).toHaveBeenCalledWith(mockError);
304 | expect(honeybadgerNotify).toHaveBeenCalledWith('Example thrown string');
305 | expect(honeybadgerConfigure).toHaveBeenCalledTimes(1);
306 | expect(honeybadgerConfigure).toHaveBeenCalledWith({ apiKey: 'foobar' });
307 | });
308 |
309 | test('should send unexpected error to Sentry if configured', async () => {
310 | const notify = new Notify(config, log);
311 | config.notify.exceptions!.sentryDsn = 'foobar';
312 | const sentryCapture = jest
313 | .spyOn(Sentry, 'captureException')
314 | .mockImplementation(jest.fn());
315 | const sentryInit = jest.spyOn(Sentry, 'init').mockReturnValue({
316 | captureException: sentryCapture
317 | } as unknown as ReturnType);
318 | const mockError = new FetchError('Connection error', 'system', {
319 | code: 'ECONNRESET'
320 | });
321 |
322 | await notify.ofThrow(mockError);
323 | await notify.ofThrow('Example thrown string');
324 |
325 | expect(sentryCapture).toHaveBeenCalledTimes(2);
326 | expect(sentryCapture).toHaveBeenCalledWith(mockError);
327 | expect(sentryCapture).toHaveBeenCalledWith('Example thrown string');
328 | expect(sentryInit).toHaveBeenCalledTimes(1);
329 | expect(sentryInit).toHaveBeenCalledWith({ dsn: 'foobar' });
330 | });
331 | });
332 |
333 | describe('dispatchSmtp', () => {
334 | test('should queue configuration error', async () => {
335 | const notify = new Notify(config, log);
336 | const queueError = jest.spyOn(notify, 'queueError');
337 | config.notify.smtp = smtpConfig;
338 | jest.spyOn(nodemailer, 'createTransport').mockImplementation(() => {
339 | throw new Error('Example configuration error');
340 | });
341 |
342 | await notify.ofFailure({
343 | message: 'Example failure message',
344 | verbose: 'Example verbose context'
345 | });
346 |
347 | expect(queueError).toHaveBeenCalledTimes(1);
348 | expect(queueError).toHaveBeenCalledWith({
349 | message: 'SMTP is misconfigured. Please check your configuration.',
350 | verbose: 'Example configuration error'
351 | });
352 | expect(sendMail).not.toHaveBeenCalled();
353 | expect(processExit).not.toHaveBeenCalled();
354 | queueError.mockRestore();
355 | });
356 |
357 | test('should stringify non-error objects', async () => {
358 | const notify = new Notify(config, log);
359 | const queueError = jest.spyOn(notify, 'queueError');
360 | config.notify.smtp = smtpConfig;
361 | jest.spyOn(nodemailer, 'createTransport').mockImplementation(() => {
362 | throw 'Example configuration error';
363 | });
364 |
365 | await notify.ofFailure({
366 | message: 'Example failure message',
367 | verbose: 'Example verbose context'
368 | });
369 |
370 | expect(queueError).toHaveBeenCalledTimes(1);
371 | expect(queueError).toHaveBeenCalledWith({
372 | message: 'SMTP is misconfigured. Please check your configuration.',
373 | verbose: '"Example configuration error"'
374 | });
375 | expect(sendMail).not.toHaveBeenCalled();
376 | expect(processExit).not.toHaveBeenCalled();
377 | queueError.mockRestore();
378 | });
379 | });
380 | });
381 |
--------------------------------------------------------------------------------
/test/unit/sync.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect, jest, test } from '@jest/globals';
2 | import nock from 'nock';
3 | import { Blob } from 'node-fetch';
4 | import { ClientFactory, Client } from '../../src/client';
5 | import { Config, Version } from '../../src/config/index';
6 | import { Log } from '../../src/log';
7 | import { ErrorNotification, Notify } from '../../src/notify';
8 | import { Sync } from '../../src/sync';
9 | import { SyncOptionsV6 } from '../../src/config/index';
10 |
11 | describe('sync', () => {
12 | let clientCreate: ReturnType;
13 | let notifyOfFailure: ReturnType;
14 | let notifyQueueError: ReturnType;
15 | let notifyOfSuccess: ReturnType;
16 | let processExit: ReturnType;
17 | let primaryHostClient: Client;
18 | let secondaryHostClient1: Client;
19 | let secondaryHostClient2: Client;
20 |
21 | const primaryHostValue = {
22 | baseUrl: 'http://10.0.0.2',
23 | password: 'password1',
24 | path: ''
25 | };
26 | const secondaryHostsValue = [
27 | { baseUrl: 'http://10.0.0.3', password: 'password2', path: '' },
28 | { baseUrl: 'http://10.0.0.4', password: 'password3', path: '' }
29 | ];
30 | const backupData = new Blob([]);
31 |
32 | beforeEach(() => {
33 | nock.disableNetConnect();
34 | jest.restoreAllMocks();
35 | // Had to disable useFakeTimers for promise-sleep to work in sync.ts !?!?!?!?!
36 | // jest.useFakeTimers();
37 | });
38 |
39 | afterEach(() => {
40 | jest.resetModules();
41 | });
42 |
43 | afterAll(() => {
44 | // jest.useRealTimers();
45 | nock.cleanAll();
46 | nock.enableNetConnect();
47 | });
48 |
49 | const prepare = ({
50 | primaryResult,
51 | secondaryOneResult,
52 | secondaryTwoResult
53 | }: {
54 | primaryResult?: Promise;
55 | secondaryOneResult?: Promise;
56 | secondaryTwoResult?: Promise;
57 | } = {}) => {
58 | const config = Config({
59 | piHoleVersion: '6',
60 | primaryHost: primaryHostValue,
61 | secondaryHosts: secondaryHostsValue,
62 | runOnce: true
63 | });
64 | const notify = new Notify(config);
65 | const log = new Log(config.verbose);
66 |
67 | processExit = jest.spyOn(process, 'exit').mockReturnValue(undefined as never);
68 | primaryHostClient = {
69 | downloadBackup: jest.fn(() => primaryResult ?? Promise.resolve(backupData))
70 | } as unknown as Client;
71 | secondaryHostClient1 = {
72 | uploadBackup: jest.fn(() => secondaryOneResult ?? Promise.resolve(true)),
73 | updateGravity: jest.fn(() => Promise.resolve(true))
74 | } as unknown as Client;
75 | secondaryHostClient2 = {
76 | uploadBackup: jest.fn(() => secondaryTwoResult ?? Promise.resolve(true)),
77 | updateGravity: jest.fn(() => Promise.resolve(true))
78 | } as unknown as Client;
79 | clientCreate = jest
80 | .spyOn(ClientFactory, 'createClient')
81 | .mockResolvedValueOnce(primaryHostClient)
82 | .mockResolvedValueOnce(secondaryHostClient1)
83 | .mockResolvedValueOnce(secondaryHostClient2);
84 | notifyOfFailure = jest.spyOn(notify, 'ofFailure');
85 | notifyQueueError = jest.spyOn(notify, 'queueError');
86 | notifyOfSuccess = jest.spyOn(notify, 'ofSuccess');
87 |
88 | return { config, notify, log };
89 | };
90 |
91 | const expectSyncToHaveBeenPerformed = ({
92 | version,
93 | options,
94 | log
95 | }: {
96 | version: Version;
97 | options: SyncOptionsV6;
98 | log: Log;
99 | }) => {
100 | expect(clientCreate).toHaveBeenCalledTimes(3);
101 | expect(clientCreate).toHaveBeenNthCalledWith(1, {
102 | host: expect.objectContaining(primaryHostValue),
103 | version,
104 | options,
105 | log
106 | });
107 | expect(clientCreate).toHaveBeenNthCalledWith(2, {
108 | host: expect.objectContaining(secondaryHostsValue[0]),
109 | version,
110 | options,
111 | log
112 | });
113 | expect(clientCreate).toHaveBeenNthCalledWith(3, {
114 | host: expect.objectContaining(secondaryHostsValue[1]),
115 | version,
116 | options,
117 | log
118 | });
119 | expect(primaryHostClient.downloadBackup).toHaveBeenCalledTimes(1);
120 | expect(secondaryHostClient1.uploadBackup).toHaveBeenCalledTimes(1);
121 | expect(secondaryHostClient1.uploadBackup).toHaveBeenCalledWith(backupData);
122 | expect(secondaryHostClient2.uploadBackup).toHaveBeenCalledTimes(1);
123 | expect(secondaryHostClient2.uploadBackup).toHaveBeenCalledWith(backupData);
124 | };
125 |
126 | test('should perform sync and succeed', async () => {
127 | const { config, notify, log } = prepare();
128 |
129 | await Sync.perform(config, { notify, log });
130 |
131 | expectSyncToHaveBeenPerformed({ version: '6', options: config.sync.v6, log });
132 | expect(notifyOfFailure).not.toHaveBeenCalled();
133 | expect(notifyQueueError).not.toHaveBeenCalled();
134 | expect(notifyOfSuccess).toHaveBeenCalledTimes(1);
135 | expect(notifyOfSuccess).toHaveBeenCalledWith({
136 | message: '2/2 hosts synced.'
137 | });
138 | expect(processExit).not.toHaveBeenCalled();
139 | });
140 |
141 | test('should perform sync and partially succeed', async () => {
142 | const { config, notify, log } = prepare({
143 | secondaryTwoResult: Promise.reject(new ErrorNotification({ message: 'foobar' }))
144 | });
145 |
146 | await Sync.perform(config, { notify, log });
147 |
148 | expectSyncToHaveBeenPerformed({ version: '6', options: config.sync.v6, log });
149 | expect(notifyOfSuccess).not.toHaveBeenCalled();
150 | expect(notifyQueueError).toHaveBeenCalledTimes(1);
151 | expect(notifyQueueError).toHaveBeenCalledWith(
152 | expect.objectContaining({
153 | message: 'foobar'
154 | })
155 | );
156 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
157 | expect(notifyOfFailure).toHaveBeenCalledWith({
158 | sendNotification: true,
159 | message: '1/2 hosts synced.'
160 | });
161 | expect(processExit).toHaveBeenCalledTimes(1);
162 | });
163 |
164 | test('should perform sync and fail', async () => {
165 | const { config, notify, log } = prepare({
166 | secondaryOneResult: Promise.reject(new ErrorNotification({ message: 'foobar' })),
167 | secondaryTwoResult: Promise.reject(
168 | new ErrorNotification({ message: 'hello world' })
169 | )
170 | });
171 |
172 | await Sync.perform(config, { notify, log });
173 |
174 | expectSyncToHaveBeenPerformed({ version: '6', options: config.sync.v6, log });
175 | expect(notifyOfSuccess).not.toHaveBeenCalled();
176 | expect(notifyQueueError).toHaveBeenCalledTimes(2);
177 | expect(notifyQueueError).toHaveBeenCalledWith(
178 | expect.objectContaining({
179 | message: 'foobar'
180 | })
181 | );
182 | expect(notifyQueueError).toHaveBeenCalledWith(
183 | expect.objectContaining({
184 | message: 'hello world'
185 | })
186 | );
187 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
188 | expect(notifyOfFailure).toHaveBeenCalledWith({
189 | message: '0/2 hosts synced.'
190 | });
191 | expect(processExit).toHaveBeenCalledTimes(1);
192 | });
193 |
194 | test('should perform sync and fail', async () => {
195 | const { config, notify, log } = prepare({
196 | primaryResult: Promise.reject(
197 | new ErrorNotification({ message: 'Backup failed to download' })
198 | )
199 | });
200 |
201 | await Sync.perform(config, { notify, log });
202 |
203 | expect(notifyOfSuccess).not.toHaveBeenCalled();
204 | expect(notifyQueueError).not.toHaveBeenCalled();
205 | expect(notifyOfFailure).toHaveBeenCalledTimes(1);
206 | expect(notifyOfFailure).toHaveBeenCalledWith(
207 | expect.objectContaining({
208 | message: 'Backup failed to download'
209 | })
210 | );
211 | expect(secondaryHostClient1.uploadBackup).not.toHaveBeenCalled();
212 | expect(secondaryHostClient2.uploadBackup).not.toHaveBeenCalled();
213 | expect(processExit).toHaveBeenCalledTimes(1);
214 | });
215 | });
216 |
--------------------------------------------------------------------------------
/test/unit/util/string-case.test.ts:
--------------------------------------------------------------------------------
1 | import { camelToTitleCase } from '../../../src/util/string-case';
2 |
3 | describe('String Case', () => {
4 | describe('camelToTitleCase', () => {
5 | it('should convert camel case to title case', () => {
6 | expect(camelToTitleCase('camelCase')).toBe('Camel Case');
7 | });
8 | });
9 | });
10 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "@tsconfig/node18/tsconfig.json",
3 | "include": ["src"],
4 | "exclude": ["node_modules", "test"],
5 | "compilerOptions": {
6 | "outDir": "dist"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------