├── v1 ├── test │ ├── TEST │ ├── dirs │ │ ├── exclude this one │ │ │ └── In exclude dir.txt │ │ ├── include this one │ │ │ ├── Abe.jpg │ │ │ └── Krummi.JPG │ │ └── compressable │ │ │ └── Lorem Ipsum.txt │ ├── google │ │ └── googled53c072eaf660178.html │ ├── GREENLAND.JPEG │ ├── test-fail.sh │ ├── compile │ │ ├── src │ │ │ ├── dar-2.7.13.tar.gz │ │ │ └── dar-2.7.14.tar.gz │ │ └── README.md │ ├── templates │ │ ├── backups.d │ │ │ └── TEST │ │ └── dar-backup.conf.template │ ├── misc │ │ └── downloads.py │ ├── test-cleanup-fail-no-dir.sh │ ├── test-check-if-no-restorefile.sh │ ├── test-list-definitions.sh │ ├── test-archive-exists.sh │ ├── test-shellcheck.sh │ ├── test-populate-catalog-single-archive.sh │ ├── test-install-head.sh │ ├── test-restore-link.sh │ ├── test-install-script.sh │ ├── test-find-correct-full.sh │ ├── dar-operations.sh.org │ ├── mk-DEV.sh │ ├── test-cleanup-fail-diff-age-calc.sh │ ├── test-cleanup-fail-inc-age-calc.sh │ ├── test-create-single-catalog.sh │ ├── runner.sh │ ├── test-multiple-definitions.sh │ ├── mk-release.sh │ ├── test-mk-release-on-dev.sh │ ├── test-parchive.sh │ ├── test-parchive-large-bitrot.sh │ ├── setup.sh │ ├── test-cleanup.sh │ ├── test-run-restore-test.sh │ ├── test-parchive-multiple-8k-blocks.sh │ ├── test-cleanup-alternate-dir.sh │ ├── test-remove-single-archive-from-catalog.sh │ ├── test-use-catalog-option.sh │ ├── test-create-catalog.sh │ ├── test-populate-single-catalog.sh │ └── test-definitions-with-spaces.sh ├── conf │ └── README.md ├── doc │ ├── pics │ │ └── log-example-01.png │ └── catalogs.md ├── share │ ├── dar-cleanup.timer │ ├── dar-inc-backup.timer │ ├── alert-upcoming-full-backup.timer │ ├── alert-upcoming-diff-backup.timer │ ├── dar-backup.timer │ ├── dar-diff-backup.timer │ └── README.md ├── templates │ ├── systemd │ │ ├── dar-backup.service │ │ ├── dar-inc-backup.service │ │ ├── dar-cleanup.service │ │ ├── dar-diff-backup.service │ │ ├── alert-upcoming-diff-backup.service │ │ └── alert-upcoming-full-backup.service │ ├── backups.d │ │ └── dar-backup │ ├── dar-backup.conf.template │ └── darrc.template └── bin │ ├── send-msg.sh │ ├── show-DIFF.sh │ ├── show-FULL.sh │ ├── install.sh │ └── par2.sh ├── v2 ├── tests │ ├── __init__.py │ ├── install.py │ ├── test_add_old_archive_confirmation.py │ ├── test_binary_info.py │ ├── test_prereq.py │ ├── envdata.py │ ├── test_filter_darrc_file.py │ ├── test_stress.py │ ├── test_create_backup_command.py │ ├── test_autocompletion_install.py │ ├── test_links.py │ ├── play.py │ ├── test_postreq.py │ ├── test_discord_webhook.py │ ├── test_generic_backup_command_execution.py │ ├── test_status_indicators.py │ ├── test_systemd_unit_generation.py │ ├── test_space_definition.py │ ├── test_alternate_reference_archive.py │ ├── test_preflight.py │ ├── test_run_command.py │ ├── test_verbose.py │ ├── test_util.py │ └── test_restore.py ├── src │ ├── dar_backup │ │ ├── __init__.py │ │ ├── exceptions.py │ │ ├── __about__.py │ │ ├── dar-backup.conf │ │ ├── demo_backup_def.j2 │ │ ├── dar-backup.conf.j2 │ │ ├── .darrc │ │ └── rich_progress.py │ └── misc │ │ ├── curl.sh │ │ ├── downloads.py │ │ └── duplicate2format.py ├── pytest-minimal.ini ├── doc │ ├── dar-backup-overview.png │ ├── dar-backup-overview-small.png │ └── doc.md ├── .coveragerc ├── pytest-cov.sh ├── sitecustomize.py ├── pytest.ini ├── MANIFEST.in ├── packages │ └── deb │ │ ├── templates │ │ └── control.template-default │ │ └── README.md ├── testall.sh ├── build.sh ├── template │ ├── dar-backup.conf.template │ └── .darrc └── pyproject.toml ├── src └── clonepulse │ ├── __init__.py │ ├── __about__.py │ └── util.py ├── downloads.json ├── clonepulse ├── milestone_500.txt ├── milestone_1000.txt ├── milestone_2000.txt ├── weekly_clones.png ├── badge_clones.json └── milestone_badge.json ├── codecov.yml ├── .vscode └── settings.json ├── CONTRIBUTORS.md ├── .github ├── workflows │ ├── utc.yml │ ├── linting.yml │ ├── list.yml │ ├── dashboard_clones.yml │ ├── reparing.yml │ ├── fetch_clones.yml │ ├── update_downloads.yml │ ├── install.yml │ └── py-tests.yml ├── pull_request_template.md └── ISSUE_TEMPLATE │ └── bug_report.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── track_downloads.py └── CONTRIBUTING.md /v1/test/TEST: -------------------------------------------------------------------------------- 1 | TEST 4 2 | -------------------------------------------------------------------------------- /v2/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/clonepulse/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /v2/src/dar_backup/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /v2/pytest-minimal.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | pythonpath = =src -------------------------------------------------------------------------------- /downloads.json: -------------------------------------------------------------------------------- 1 | { 2 | "total": 2467, 3 | "fetched": "2025-12-18" 4 | } -------------------------------------------------------------------------------- /v1/test/dirs/exclude this one/In exclude dir.txt: -------------------------------------------------------------------------------- 1 | In exclude dir 2 | 3 | -------------------------------------------------------------------------------- /clonepulse/milestone_500.txt: -------------------------------------------------------------------------------- 1 | Reached 500 clones on 2025-05-25T06:40:03.355466+00:00Z 2 | -------------------------------------------------------------------------------- /v1/conf/README.md: -------------------------------------------------------------------------------- 1 | The config file is located in templates/dar-backup.conf.template 2 | -------------------------------------------------------------------------------- /clonepulse/milestone_1000.txt: -------------------------------------------------------------------------------- 1 | Reached 1000 clones on 2025-06-13T02:51:59.728578+00:00Z 2 | -------------------------------------------------------------------------------- /clonepulse/milestone_2000.txt: -------------------------------------------------------------------------------- 1 | Reached 2000 clones on 2025-09-02T02:45:11.709965+00:00Z 2 | -------------------------------------------------------------------------------- /v1/test/google/googled53c072eaf660178.html: -------------------------------------------------------------------------------- 1 | google-site-verification: googled53c072eaf660178.html -------------------------------------------------------------------------------- /v1/test/GREENLAND.JPEG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/test/GREENLAND.JPEG -------------------------------------------------------------------------------- /v1/test/test-fail.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # 4 | # exit immediately with an error 5 | # 6 | 7 | exit 1 8 | -------------------------------------------------------------------------------- /clonepulse/weekly_clones.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/clonepulse/weekly_clones.png -------------------------------------------------------------------------------- /v1/doc/pics/log-example-01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/doc/pics/log-example-01.png -------------------------------------------------------------------------------- /v2/doc/dar-backup-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v2/doc/dar-backup-overview.png -------------------------------------------------------------------------------- /v1/test/compile/src/dar-2.7.13.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/test/compile/src/dar-2.7.13.tar.gz -------------------------------------------------------------------------------- /v1/test/compile/src/dar-2.7.14.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/test/compile/src/dar-2.7.14.tar.gz -------------------------------------------------------------------------------- /v1/test/dirs/include this one/Abe.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/test/dirs/include this one/Abe.jpg -------------------------------------------------------------------------------- /v2/doc/dar-backup-overview-small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v2/doc/dar-backup-overview-small.png -------------------------------------------------------------------------------- /v1/test/dirs/include this one/Krummi.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/per2jensen/dar-backup/HEAD/v1/test/dirs/include this one/Krummi.JPG -------------------------------------------------------------------------------- /clonepulse/badge_clones.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": 1, 3 | "label": "# clones", 4 | "message": "3970", 5 | "color": "deeppink" 6 | } -------------------------------------------------------------------------------- /clonepulse/milestone_badge.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaVersion": 1, 3 | "label": "Milestone", 4 | "message": "2k+ clones", 5 | "color": "red" 6 | } -------------------------------------------------------------------------------- /v2/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = src 4 | relative_files = True 5 | parallel = True 6 | concurrency = multiprocessing,thread 7 | -------------------------------------------------------------------------------- /v2/pytest-cov.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #PYTHONPATH=src pytest --cov=src/dar_backup 4 | PYTHONPATH=src pytest --rootdir=. 5 | coverage xml -o coverage.xml --reporter lcovonly 6 | 7 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | # codecov.yml (repo root) 2 | fixes: 3 | - "v2/::" 4 | - "*/work/*/*/::" 5 | - "*/runner/*/work/*/*/::" 6 | ignore: 7 | - "tests/**" 8 | - "venv/**" 9 | 10 | -------------------------------------------------------------------------------- /v1/share/dar-cleanup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-cleanup DIFF & INC timer 3 | 4 | [Timer] 5 | OnCalendar=*-*-* 21:07:00 6 | 7 | [Install] 8 | WantedBy=timers.target 9 | 10 | 11 | -------------------------------------------------------------------------------- /v1/share/dar-inc-backup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup INC timer 3 | 4 | [Timer] 5 | OnCalendar=*-*-04/3 19:03:00 6 | 7 | [Install] 8 | WantedBy=timers.target 9 | 10 | 11 | -------------------------------------------------------------------------------- /v2/sitecustomize.py: -------------------------------------------------------------------------------- 1 | import os 2 | try: 3 | if os.getenv("COVERAGE_PROCESS_START"): 4 | import coverage 5 | coverage.process_startup() 6 | except Exception: 7 | pass 8 | 9 | -------------------------------------------------------------------------------- /v2/src/dar_backup/exceptions.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: GPL-3.0-or-later 2 | 3 | class ConfigSettingsError(Exception): 4 | """Raised when ConfigSettings encounters a critical error.""" 5 | pass 6 | -------------------------------------------------------------------------------- /v1/share/alert-upcoming-full-backup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=alert upcoming FULL dar-backup 3 | 4 | [Timer] 5 | OnCalendar=*-12-28 10:03:00 6 | 7 | [Install] 8 | WantedBy=timers.target 9 | 10 | 11 | -------------------------------------------------------------------------------- /v1/share/alert-upcoming-diff-backup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=alert upcoming DIFF dar-backup 3 | 4 | [Timer] 5 | # Monthly DIFF on the first day of month 6 | OnCalendar=*-*-01 07:03:00 7 | 8 | [Install] 9 | WantedBy=timers.target 10 | 11 | 12 | -------------------------------------------------------------------------------- /v1/share/dar-backup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup FULL timer 3 | 4 | [Timer] 5 | # yearly the day before new years, that is typically a vacation day for Per 6 | OnCalendar=*-12-30 10:03:00 7 | 8 | [Install] 9 | WantedBy=timers.target 10 | 11 | 12 | -------------------------------------------------------------------------------- /v1/templates/systemd/dar-backup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup FULL 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=infinity 8 | RemainAfterExit=no 9 | ExecStart="@@DAR_BACKUP_DIR@@/bin/dar-backup.sh" 10 | -------------------------------------------------------------------------------- /v1/templates/systemd/dar-inc-backup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup INC 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=infinity 8 | RemainAfterExit=no 9 | ExecStart="@@DAR_BACKUP_DIR@@/bin/dar-inc-backup.sh" 10 | -------------------------------------------------------------------------------- /v1/templates/systemd/dar-cleanup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=cleanup up old DIFF & INC archives 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=60 8 | RemainAfterExit=no 9 | ExecStart="@@DAR_BACKUP_DIR@@/bin/cleanup.sh" 10 | -------------------------------------------------------------------------------- /v1/templates/systemd/dar-diff-backup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup DIFF 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=infinity 8 | RemainAfterExit=no 9 | ExecStart="@@DAR_BACKUP_DIR@@/bin/dar-diff-backup.sh" 10 | -------------------------------------------------------------------------------- /v2/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --import-mode=importlib --cov=dar_backup --cov-report=term --cov-report=xml --no-cov-on-fail -m "not live_discord" 3 | testpaths = tests 4 | markers = 5 | live_discord: runs tests that hit a real Discord webhook (requires DISCORD_WEBHOOK_URL) 6 | -------------------------------------------------------------------------------- /v2/src/misc/curl.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # clones 4 | curl -L \ 5 | -H "Accept: application/json" \ 6 | -H "Authorization: Bearer $TOKEN" \ 7 | -H "X-GitHub-Api-Version: 2022-11-28" \ 8 | https://api.github.com/repos/per2jensen/dar-backup/traffic/clones 9 | 10 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "terminal.integrated.env.linux": { 3 | "PYTHONPATH": "${workspaceFolder}/v2/src:${PYTHONPATH}" 4 | }, 5 | "python.analysis.extraPaths": [ 6 | "${workspaceFolder}/v2/src"], 7 | "python.analysis.exclude": [ 8 | "**/venv/**" 9 | ] 10 | 11 | } -------------------------------------------------------------------------------- /v2/MANIFEST.in: -------------------------------------------------------------------------------- 1 | 2 | include README.md 3 | include Changelog.md 4 | include LICENSE 5 | include verify-signature.sh 6 | recursive-include doc *.md *.txt 7 | 8 | 9 | exclude venv/* 10 | exclude tests/* 11 | exclude dist/* 12 | exclude build.sh 13 | exclude testall.sh 14 | exclude pytest* 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /v1/templates/systemd/alert-upcoming-diff-backup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=alert upcoming DIFF dar-backup 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=300 8 | RemainAfterExit=no 9 | ExecStart=@@DAR_BACKUP_DIR@@/bin/../bin/send-msg.sh "NOTICE: DIFF dar-backup coming up" 10 | -------------------------------------------------------------------------------- /v1/templates/systemd/alert-upcoming-full-backup.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=alert upcoming FULL dar-backup 3 | StartLimitIntervalSec=120 4 | StartLimitBurst=1 5 | [Service] 6 | Type=oneshot 7 | TimeoutSec=300 8 | RemainAfterExit=no 9 | ExecStart=@@DAR_BACKUP_DIR@@/bin/../bin/send-msg.sh "NOTICE: FULL dar-backup coming up" 10 | -------------------------------------------------------------------------------- /v1/share/dar-diff-backup.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=dar-backup DIFF timer 3 | 4 | [Timer] 5 | # Monthly DIFF on the first day of the month 6 | OnCalendar=*-*-01 19:03:00 7 | # Make sure the service is run on next login after "onCalendar", if machine was not turned on 8 | Persistent=true 9 | [Install] 10 | WantedBy=timers.target 11 | 12 | 13 | -------------------------------------------------------------------------------- /v2/src/dar_backup/__about__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.0.2" 2 | 3 | __author__ = "Per Jensen" 4 | 5 | __license__ = '''Licensed under GNU GENERAL PUBLIC LICENSE v3, see the supplied file "LICENSE" for details. 6 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW, not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 7 | See section 15 and section 16 in the supplied "LICENSE" file.''' 8 | 9 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | # Contributors to dar-backup 2 | 3 | `dar-backup` is made possible thanks to the people listed here. 4 | 5 | ## Author & Maintainer 6 | 7 | - Per Jensen (@per2jensen) 8 | 9 | ## Contributors 10 | 11 | - [Your name here] 12 | 13 | --- 14 | 15 | If you have contributed to dar-backup (code, tests, documentation, bug reports, or reviews) and are not listed, please open a Pull Request to add yourself. 16 | -------------------------------------------------------------------------------- /v2/packages/deb/templates/control.template-default: -------------------------------------------------------------------------------- 1 | Package: dar-backup 2 | Version: {version} 3 | Section: utils 4 | Priority: optional 5 | Architecture: all 6 | Maintainer: dar-backup 7 | Homepage: https://github.com/per2jensen/dar-backup 8 | Depends: python3, python3-rich (>= 13.0.0), python3-jinja2, python3-argcomplete, dar (>= 2.7.13), par2 9 | Description: Backup utility using dar with cleanup and verification tools. 10 | -------------------------------------------------------------------------------- /.github/workflows/utc.yml: -------------------------------------------------------------------------------- 1 | name: UTC 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | on: 7 | schedule: 8 | - cron: '15 7 * * *' # Runs every day at 07:15 UTC 9 | workflow_dispatch: # Allow manual trigger 10 | 11 | jobs: 12 | update: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Log current UTC time 17 | run: | 18 | echo Time of workflow run: 19 | date -u 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | v2/README.md 3 | v2/conf/dar-backup.conf 4 | v2/backup.d/pj-homedir 5 | v2/dar_backup.egg-info 6 | v2/dependency_links.txt 7 | v2/PKG-INFO 8 | v2/SOURCES.txt 9 | v2/src/mount-microserver.sh 10 | v2/src/dar_backup/*.md 11 | v2/test/__pycache__/prereq.cpython-312.pyc 12 | v2/test/test_template.py 13 | v2/top_level.txt 14 | v2/venv 15 | v2/old_dar_backup 16 | v2/dist 17 | v2/build 18 | v2/.coverage 19 | v2/dar-backup.tar 20 | v2/coverage.xml 21 | v2/packages/* 22 | v2/.coverage.*.*.*.* 23 | -------------------------------------------------------------------------------- /src/clonepulse/__about__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.0.5" 2 | 3 | __license__ = '''Licensed under the MIT License. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 4 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 5 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 6 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 7 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 8 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 9 | SOFTWARE.''' 10 | 11 | -------------------------------------------------------------------------------- /v2/packages/deb/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Notes 3 | 4 | ## inputimeout not available on Debian 12 and Ubuntu 25.04 5 | 6 | 💡 Note: 7 | This package bundles `inputimeout` from PyPI because it is not yet available in Debian/Ubuntu repositories. 8 | All other dependencies are installed using your system package manager. 9 | 10 | # Note 11 | 12 | 'inputimeout' is NOT listed in Depends because it is not available in Debian/Ubuntu APT repositories. 13 | It is vendored directly from PyPI during build in tools/build_deb.py. 14 | If it becomes available in APT in the future, you may consider replacing the vendored version. -------------------------------------------------------------------------------- /v2/testall.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # Licensed under GNU GENERAL PUBLIC LICENSE v3, see the supplied file "LICENSE" for details. 4 | 5 | # THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW, 6 | # not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 7 | # See section 15 and section 16 in the supplied "LICENSE" file 8 | 9 | # 10 | # Run all pytest tests in tests/ directory 11 | # 12 | 13 | if [ -z "$VIRTUAL_ENV" ] || [ "$VIRTUAL_ENV" != "$(realpath ./venv)" ]; then 14 | echo "Activating virtual environment in ./venv" 15 | source ./venv/bin/activate 16 | fi 17 | 18 | pytest -c pytest-minimal.ini tests/ -------------------------------------------------------------------------------- /v1/test/templates/backups.d/TEST: -------------------------------------------------------------------------------- 1 | # Include defaults 2 | -B "@@CONFDIR@@/defaults-rc" 3 | 4 | # Switch to ordered selection mode, which means that the following 5 | # options will be considered top to bottom 6 | -am 7 | 8 | # Backup Root dir 9 | -R "@@CONFDIR@@/.." 10 | 11 | # Directories to backup below the Root dir 12 | -g "dirs" 13 | 14 | # Directories to exclude below the Root dir 15 | -P "dirs/exclude this one" 16 | 17 | # compression level 18 | -z5 19 | 20 | # size of each slice in the archive 21 | --slice 4G 22 | 23 | # bypass directores marked as cache directories 24 | # http://dar.linux.free.fr/doc/Features.html 25 | --cache-directory-tagging 26 | -------------------------------------------------------------------------------- /v2/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # SPDX-License-Identifier: GPL-3.0-or-later 3 | 4 | set -euo pipefail 5 | 6 | if [ ! -d venv ]; then 7 | python3 -m venv venv 8 | fi 9 | source venv/bin/activate 10 | 11 | PYTHON="$(which python3)" 12 | PIP="$(which pip)" 13 | 14 | echo "🔧 Installing project in editable mode in venv: $VIRTUAL_ENV" 15 | $PIP install --upgrade pip hatch 16 | $PIP install -e .[dev] 17 | echo "✅ Project installed in editable mode." 18 | 19 | echo "🧹 Cleaning old build artifacts..." 20 | rm -rf dist/* 2>/dev/null || true 21 | 22 | echo "📦 Building installable packages (sdist + wheel)..." 23 | hatch build --clean 24 | echo "✅ Packages written to dist/:" 25 | ls -1 dist 26 | -------------------------------------------------------------------------------- /v2/src/misc/downloads.py: -------------------------------------------------------------------------------- 1 | # 2 | # All credits to 3 | # https://stackoverflow.com/users/797495/pedro-lobito 4 | # https://stackoverflow.com/questions/4338358/github-can-i-see-the-number-of-downloads-for-a-repo 5 | # 6 | 7 | import requests 8 | 9 | owner = "per2jensen" 10 | repo = "dar-backup" 11 | h = {"Accept": "application/vnd.github.v3+json"} 12 | u = f"https://api.github.com/repos/{owner}/{repo}/releases?per_page=100" 13 | r = requests.get(u, headers=h).json() 14 | r.reverse() # older tags first 15 | for rel in r: 16 | if rel['assets']: 17 | tag = rel['tag_name'] 18 | dls = rel['assets'][0]['download_count'] 19 | pub = rel['published_at'] 20 | print(f"Pub: {pub} | Tag: {tag} | Dls: {dls} ") -------------------------------------------------------------------------------- /v1/test/misc/downloads.py: -------------------------------------------------------------------------------- 1 | # 2 | # All credits to 3 | # https://stackoverflow.com/users/797495/pedro-lobito 4 | # https://stackoverflow.com/questions/4338358/github-can-i-see-the-number-of-downloads-for-a-repo 5 | # 6 | 7 | import requests 8 | 9 | owner = "per2jensen" 10 | repo = "dar-backup" 11 | h = {"Accept": "application/vnd.github.v3+json"} 12 | u = f"https://api.github.com/repos/{owner}/{repo}/releases?per_page=100" 13 | r = requests.get(u, headers=h).json() 14 | r.reverse() # older tags first 15 | for rel in r: 16 | if rel['assets']: 17 | tag = rel['tag_name'] 18 | dls = rel['assets'][0]['download_count'] 19 | pub = rel['published_at'] 20 | print(f"Pub: {pub} | Tag: {tag} | Dls: {dls} ") -------------------------------------------------------------------------------- /v1/test/test-cleanup-fail-no-dir.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test cleanup.sh fails if the alternate-archive-dir does not exist 4 | 5 | TEST_RESULT=0 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/conf/dar-backup.conf" 13 | 14 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir --alternate-archive-dir /dir/does/not/exist 15 | RESULT=$? 16 | 17 | if [[ "$RESULT" != "1" ]]; then 18 | TEST_RESULT=1 19 | fi 20 | 21 | grep ERROR "$TESTDIR"/archives/dar-backup.log > /dev/null 22 | RESULT=$? 23 | 24 | if [[ "$RESULT" != "0" ]]; then 25 | TEST_RESULT=1 26 | fi 27 | 28 | echo TEST_RESULT: $TEST_RESULT 29 | exit $TEST_RESULT 30 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Pull Request 2 | 3 | Thank you for considering contributing a PR to **dar-backup**. 4 | Please complete the sections below to help us review your changes. 5 | 6 | ## Description 7 | 8 | 9 | ## Related Issues 10 | 11 | 12 | ## Checklist 13 | - [ ] Tests pass locally (`pytest -v` for v2, `./runner.sh` for v1) 14 | - [ ] Added or updated test cases for new/changed code 15 | - [ ] Code is documented (docstrings, comments) 16 | - [ ] Documentation updated if needed (`README.md` or `doc/`) 17 | - [ ] CI checks pass (GitHub Actions) 18 | 19 | ## Notes 20 | -------------------------------------------------------------------------------- /v1/test/test-check-if-no-restorefile.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # Check that script does not try to restore a file, if no files are found in backup 4 | 5 | SCRIPTPATH=$(realpath "$0") 6 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 7 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 8 | 9 | source "$SCRIPTDIRPATH"/setup.sh 10 | 11 | # run the test 12 | "$TESTDIR"/bin/dar-backup.sh --verbose -d TEST --local-backup-dir 13 | RESULT=$? 14 | if [[ $RESULT != "0" ]]; then 15 | _RESULT=1 16 | fi 17 | 18 | "$TESTDIR"/bin/dar-diff-backup.sh --verbose -d TEST --local-backup-dir 19 | RESULT=$? 20 | if [[ $RESULT != "0" ]]; then 21 | _RESULT=1 22 | fi 23 | 24 | grep -i "no files found for restore test" "$LOG_LOCATION"/dar-backup.log > /dev/null 2>&1 25 | if [[ $? != "0" ]]; then 26 | TESTRESULT=1 27 | fi 28 | 29 | echo TEST RESULT: $TESTRESULT 30 | exit $TESTRESULT 31 | 32 | 33 | -------------------------------------------------------------------------------- /v1/test/test-list-definitions.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # check that ls-archives.sh -l works 4 | 5 | SCRIPTPATH=$(realpath $0) 6 | SCRIPTDIRPATH=$(dirname $SCRIPTPATH) 7 | #echo SCRIPTDIRPATH: $SCRIPTDIRPATH 8 | 9 | source $SCRIPTDIRPATH/setup.sh 10 | 11 | touch $TESTDIR/backups.d/TEST_BACKUP_DEF 12 | 13 | # list definitions in backups.d/ 14 | DEFS=$($TESTDIR/bin/ls-archives.sh -l) 15 | if [[ $? != "0" ]]; then 16 | echo ERROR list operation failed 17 | TESTRESULT=1 18 | fi 19 | echo $DEFS|grep -o TEST_BACKUP_DEF 20 | if [[ $? != "0" ]]; then 21 | echo ERROR definition TEST_BACKUP_DEF not found 22 | TESTRESULT=1 23 | fi 24 | 25 | 26 | # check another option works 27 | $TESTDIR/bin/ls-archives.sh -h > /dev/null 28 | if [[ $? != "0" ]]; then 29 | echo a non list option failed 30 | TESTRESULT=1 31 | fi 32 | 33 | #echo TEST RESULT: $TESTRESULT 34 | exit $TESTRESULT 35 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | name: linting 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | 7 | on: 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 17 | jobs: 18 | shellcheck: 19 | runs-on: ubuntu-latest 20 | steps: 21 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 22 | - uses: actions/checkout@v4 23 | 24 | - name: Install required packages 25 | run: sudo apt install -y shellcheck 26 | 27 | - name: Run linter 28 | run: | 29 | chmod +x $GITHUB_WORKSPACE/test/test-shellcheck.sh 30 | $GITHUB_WORKSPACE/test/test-shellcheck.sh 31 | -------------------------------------------------------------------------------- /v1/templates/backups.d/dar-backup: -------------------------------------------------------------------------------- 1 | # definition to backup the deployed version 2 | # 3 | 4 | # Include defaults 5 | -B "@@CONFDIR@@/defaults-rc" 6 | 7 | # Switch to ordered selection mode, which means that the following 8 | # options will be considered top to bottom 9 | -am 10 | 11 | # Backup Root dir 12 | -R "@@CONFDIR@@/../.." 13 | 14 | # Directories to backup below the Root dir 15 | -g dar-backup 16 | 17 | # Directories to exclude below the Root dir 18 | -P dar-backup/archives 19 | 20 | # compression level 21 | -z5 22 | 23 | # size of each slice in the archive 24 | --slice 4G 25 | 26 | # see https://github.com/per2jensen/dar-backup?tab=readme-ov-file#restore-test-exit-code-4 27 | # useful if running dar using a non-privileged user 28 | --comparison-field=ignore-owner 29 | 30 | 31 | # bypass directores marked as cache directories 32 | # http://dar.linux.free.fr/doc/Features.html 33 | --cache-directory-tagging 34 | -------------------------------------------------------------------------------- /v1/test/test-archive-exists.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # run dar-back.sh once more - expected result is to skip, as an archive already exists 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/conf/dar-backup.conf" 13 | 14 | # run the test 15 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir 16 | RESULT=$? 17 | if [[ $RESULT != "0" ]]; then 18 | TESTRESULT=1 19 | fi 20 | 21 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir 22 | RESULT=$? 23 | if [[ $RESULT != "0" ]]; then 24 | TESTRESULT=1 25 | fi 26 | 27 | grep -E "WARN.*?TEST_FULL.*?already exists" "$TESTDIR/archives/dar-backup.log" 28 | RESULT=$? 29 | if [[ "$RESULT" != "0" ]]; then 30 | echo existing archive was NOT found 31 | TESTRESULT=1 32 | fi 33 | 34 | exit "$TESTRESULT" 35 | -------------------------------------------------------------------------------- /v1/doc/catalogs.md: -------------------------------------------------------------------------------- 1 | # Catalogs 2 | 3 | ## Introduction 4 | A dar_manager catalog keeps track of which files are in which archives, and helps restoring the correct version of any given file. 5 | 6 | dar_manager docs are here: 7 | 8 | - [dar_manager man page](http://dar.linux.free.fr/doc/man/dar_manager.html) 9 | - [dar tutorial](http://dar.linux.free.fr/doc/Tutorial.html) 10 | 11 | ## dar-backup.log - confusion 12 | 13 | The dar-backup.log can be a little confusing if a catalog operation results in an ERROR and dar-backup a little later reports SUCCESS. There is a reason :-) 14 | 15 | dar-backup's SUCCESS message is due to all backup definitions have been performed without error. That means the following succeeded: 16 | 17 | - Backup 18 | - Test of archive 19 | - Test restore of a file 20 | 21 | If an archive was not added to it's catalog, an ERROR is issued, but it is not considered a backup error. 22 | 23 | ## Issues 24 | 25 | Currently no issues -------------------------------------------------------------------------------- /v1/test/test-shellcheck.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # 4 | # Run shellcheck ( https://github.com/koalaman/shellcheck ) on the shellscripts 5 | # 6 | # Fail if shellchekc detects errors 7 | # 8 | 9 | 10 | which shellcheck 11 | if [[ $? != "0" ]]; then 12 | echo "shellcheck not installed, exiting" 13 | exit 1 14 | fi 15 | 16 | SCRIPTPATH=$(realpath "$0") 17 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 18 | 19 | export RESULT=0 20 | export SHELLCHECK_OPTS="-e SC2181 -e SC1090" 21 | 22 | # $1: shell script to check 23 | run_shellcheck () { 24 | echo "linting \"$1\"" 25 | shellcheck -s bash -S error "$1" 26 | if [[ $? != "0" ]]; then 27 | RESULT=1 28 | fi 29 | 30 | } 31 | 32 | for file in "$SCRIPTDIRPATH"/../{bin,test}/*.sh 33 | do 34 | run_shellcheck "$file" 35 | done 36 | 37 | if [[ "$RESULT" == "0" ]]; then 38 | echo "shellcheck options: \"$SHELLCHECK_OPTS\"" 39 | echo "shellcheck did not find errors" 40 | fi 41 | exit "$RESULT" 42 | -------------------------------------------------------------------------------- /v1/test/test-populate-catalog-single-archive.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test adding a specific achive to catalog 4 | 5 | TESTRESULT=0 6 | CATALOG=TEST.catalog 7 | 8 | SCRIPTPATH=$(realpath "$0") 9 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 10 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 11 | 12 | source "$SCRIPTDIRPATH/setup.sh" 13 | source "$TESTDIR/conf/dar-backup.conf" 14 | 15 | # create catalogs 16 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 17 | if [[ $? != "0" ]]; then 18 | echo ERROR catalog was not created, exiting 19 | exit 1 20 | fi 21 | 22 | # do backups 23 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir 24 | RESULT=$? 25 | if [[ $RESULT != "0" ]]; then 26 | TESTRESULT=1 27 | fi 28 | 29 | echo "List catalog: $CATALOG" 30 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/"$CATALOG")" 31 | 32 | if [[ "$TESTRESULT" == "0" ]]; then 33 | log_success "Test case succeeded" 34 | else 35 | log_fail "Test case failed" 36 | fi 37 | 38 | exit "$TESTRESULT" 39 | -------------------------------------------------------------------------------- /v1/test/test-install-head.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -x 2 | 3 | # 4 | # Do install from HEAD, run install process, execute install backup definition 5 | # 6 | SCRIPTPATH=$(realpath "$0") 7 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 8 | RESULT=0 9 | 10 | # cleanup before starting 11 | DIR=/tmp/dar-backup 12 | if [[ -e "$DIR" ]]; then rm -fr "$DIR" || exit 1; fi 13 | 14 | # Follow install steps given in README.md 15 | cd /tmp || exit 1 16 | git clone https://github.com/per2jensen/dar-backup.git || exit 1 17 | chmod +x /tmp/dar-backup/bin/install.sh || exit 1 18 | /tmp/dar-backup/bin/install.sh || exit 1 19 | /tmp/dar-backup/bin/dar-backup.sh --local-backup-dir --debug 20 | if [[ $? != "0" ]]; then 21 | echo "ERROR delivered backup definition failed" 22 | RESULT=1 23 | fi 24 | find /tmp/dar-backup -name ".git*" -prune -o -ls 25 | cat /tmp/dar-backup/archives/dar-backup.log 26 | 27 | echo "non directories restored:" 28 | find /tmp/dar-restore/ ! -type d 29 | 30 | echo "RESULT: $RESULT" 31 | exit "$RESULT" 32 | -------------------------------------------------------------------------------- /v1/bin/send-msg.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Copyright (C) 2024 Per Jensen 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | # $1: the msg to send to Discord 19 | 20 | 21 | SCRIPTPATH=$(realpath "$0") 22 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 23 | 24 | source "${SCRIPTDIRPATH}/../conf/dar-backup.conf" 25 | source "${SCRIPTDIRPATH}/dar-util.sh" 26 | 27 | sendDiscordMsg "$1" 28 | 29 | -------------------------------------------------------------------------------- /v2/template/dar-backup.conf.template: -------------------------------------------------------------------------------- 1 | [MISC] 2 | LOGFILE_LOCATION = /tmp/unit-test/dar-backup.log 3 | MAX_SIZE_VERIFICATION_MB = 20 4 | MIN_SIZE_VERIFICATION_MB = 0 5 | NO_FILES_VERIFICATION = 5 6 | 7 | # timeout in seconds for backup, test, restore and par2 operations 8 | # The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used. 9 | # If a timeout is not specified when using the util.run_command(), a default timeout of 30 secs is used. 10 | COMMAND_TIMEOUT_SECS = 86400 11 | 12 | [DIRECTORIES] 13 | BACKUP_DIR = /tmp/unit-test/@@test-case-name@@/backups/ 14 | BACKUP.D_DIR = /tmp/unit-test/@@test-case-name@@/backup.d/ 15 | DATA_DIR = /tmp/unit-test/@@test-case-name@@/data/ 16 | TEST_RESTORE_DIR = /tmp/unit-test/@@test-case-name@@/restore/ 17 | # Optional parameter 18 | # If you want to store the catalog database away from the BACKUP_DIR, use the MANAGER_DB_DIR variable. 19 | #MANAGER_DB_DIR = /some/where/else/ 20 | 21 | 22 | 23 | [AGE] 24 | DIFF_AGE = 30 25 | INCR_AGE = 15 26 | 27 | [PAR2] 28 | ERROR_CORRECTION_PERCENT = 5 29 | ENABLED = True 30 | 31 | -------------------------------------------------------------------------------- /v1/test/test-restore-link.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # Verify a symbolic link is restored and handled correctly by script 4 | # don't follow the link, restore the link itself 5 | 6 | SCRIPTPATH=$(realpath "$0") 7 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 8 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 9 | 10 | source "$SCRIPTDIRPATH"/setup.sh 11 | 12 | "$TESTDIR"/bin/dar-backup.sh -d TEST --local-backup-dir 13 | if [[ $? != "0" ]]; then 14 | exit 1 15 | fi 16 | 17 | 18 | NEWDIR=/tmp/dar-395043 19 | rm -fr "$NEWDIR" 20 | mkdir "$NEWDIR" 21 | touch "$NEWDIR"/a-file.txt 22 | 23 | ln -s "$NEWDIR"/a-file.txt "$TESTDIR"/dirs/a-file 24 | 25 | # run DIFF backup 26 | "$TESTDIR"/bin/dar-diff-backup.sh -d TEST --local-backup-dir 27 | if [[ $? != "0" ]]; then 28 | exit 1 29 | fi 30 | 31 | dar -l "$MOUNT_POINT/TEST_DIFF_$DATE" > "$TESTDIR"/DIFF-filelist.txt 32 | if [[ $? != "0" ]]; then 33 | exit 1 34 | fi 35 | 36 | checkExpectLog "\[Saved\].*?dirs/a-file" "$TESTDIR/DIFF-filelist.txt" 37 | checkExpectSymbolicLink "$TESTDIR/dirs/a-file" 38 | 39 | echo TEST RESULT: "$TESTRESULT" 40 | 41 | rm -fr "$NEWDIR" 42 | 43 | exit "$TESTRESULT" 44 | -------------------------------------------------------------------------------- /v1/test/test-install-script.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # 4 | # cp ~/git/dar-backup to /tmp, run install process, execute install backup definition 5 | # 6 | 7 | 8 | SCRIPTPATH=$(realpath "$0") 9 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 10 | source "$SCRIPTDIRPATH/../bin/dar-util.sh" 11 | 12 | RESULT=0 13 | 14 | INSTALLTEST=/tmp/installtest 15 | TESTDIR="$INSTALLTEST"/dar-backup 16 | 17 | rm -fr "$TESTDIR" 18 | mkdir "$INSTALLTEST" 19 | 20 | cp -R ~/git/dar-backup "$INSTALLTEST" 21 | 22 | cd "$TESTDIR" 23 | 24 | rm -fr "$TESTDIR"/.git 25 | rm -fr "$TESTDIR"/.github 26 | rm -fr "$TESTDIR"/test 27 | 28 | chmod +x "$TESTDIR/bin/install.sh" 29 | "$TESTDIR/bin/install.sh" 30 | 31 | # create catalogs 32 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 33 | if [[ $? != "0" ]]; then 34 | log_error "catalog was not created, exiting" 35 | exit 1 36 | fi 37 | 38 | find "$TESTDIR" -ls 39 | 40 | "${TESTDIR}/bin/dar-backup.sh" -d dar-backup --local-backup-dir 41 | if [[ $? != "0" ]]; then 42 | RESULT=1 43 | fi 44 | 45 | if [[ "$RESULT" == "0" ]]; then 46 | log_success "$0" 47 | else 48 | log_fail "$0" 49 | fi 50 | 51 | exit "$RESULT" 52 | -------------------------------------------------------------------------------- /v1/test/test-find-correct-full.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # Verify that dar-diff finds the newest FULL backup to diff against 4 | 5 | SCRIPTPATH=$(realpath "$0") 6 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 7 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 8 | 9 | source "$SCRIPTDIRPATH"/setup.sh 10 | 11 | TEST_ARCHIVE_DIR="$TESTDIR/archives" 12 | 13 | touch "$TEST_ARCHIVE_DIR"/TEST_FULL_2019-01-01.1.dar 14 | touch "$TEST_ARCHIVE_DIR"/TEST_FULL_2021-12-31.1.dar 15 | touch "$TEST_ARCHIVE_DIR"/TEST_FULL_2021-22-33.1.dar #not a valid date 16 | touch "$TEST_ARCHIVE_DIR"/TEST_FULL_2099-12-31.1.dar # future date 17 | echo files in archive directory: 18 | ls -lh "$MOUNT_POINT" 19 | 20 | # run DIFF backup 21 | "$TESTDIR"/bin/dar-diff-backup.sh -d TEST --local-backup-dir > "$TESTDIR"/dar-output.txt 22 | 23 | grep -E "Create DIFF compared to: +TEST_FULL_2099-12-31" "$TESTDIR"/dar-output.txt 24 | if [[ $? == "0" ]]; then 25 | echo "script DID use the correct (fake) archive to diff against" 26 | else 27 | echo "ERROR script did not did find the archive: TEST_FULL_2099-12-31.1.dar" 28 | TESTRESULT=1 29 | fi 30 | 31 | echo TEST RESULT: $TESTRESULT 32 | exit $TESTRESULT 33 | -------------------------------------------------------------------------------- /.github/workflows/list.yml: -------------------------------------------------------------------------------- 1 | name: list 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | 7 | on: 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | jobs: 17 | list-definitions: 18 | runs-on: ubuntu-latest 19 | steps: 20 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 21 | - uses: actions/checkout@v4 22 | 23 | # From: https://lannonbr.com/blog/2019-12-09-git-commit-in-actions 24 | # setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default 25 | - name: setup git config 26 | run: | 27 | git config user.name "GitHub Actions Bot" 28 | git config user.email "<>" 29 | 30 | - name: Install required packages 31 | run: sudo apt install -y dar dar-static par2 git 32 | 33 | - name: test ls-archives.sh --listdef 34 | run: | 35 | chmod +x $GITHUB_WORKSPACE/test/test-list-definitions.sh 36 | $GITHUB_WORKSPACE/test/test-list-definitions.sh 37 | 38 | -------------------------------------------------------------------------------- /.github/workflows/dashboard_clones.yml: -------------------------------------------------------------------------------- 1 | name: Generate Clone Dashboard 2 | 3 | permissions: 4 | contents: write 5 | pull-requests: write 6 | 7 | on: 8 | schedule: 9 | - cron: '19 3 * * 1' # Every Monday at 03:19 UTC 10 | workflow_dispatch: 11 | 12 | jobs: 13 | dashboard: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - name: Checkout repo 18 | uses: actions/checkout@v3 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: '3.x' 24 | 25 | - name: Install dependencies 26 | run: pip install matplotlib pandas 27 | 28 | - name: Generate dashboard image 29 | run: PYTHONPATH=src python src/clonepulse/generate_clone_dashboard.py --user per2jensen --repo dar-backup 30 | 31 | - name: Commit dashboard update 32 | run: | 33 | git config --global user.name "github-actions" 34 | git config --global user.email "actions@github.com" 35 | if ! git diff --quiet; then 36 | git add clonepulse/weekly_clones.png 37 | git commit -m "Update weekly clones dashboard" 38 | git push 39 | fi 40 | -------------------------------------------------------------------------------- /v2/tests/install.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | Install dar-backup.py parallel to the bash version 5 | """ 6 | import os 7 | import shutil 8 | import logging 9 | 10 | INSTALL_DIR = os.path.expanduser(os.path.expandvars('~/programmer/dar-backup.py')) 11 | ARCHIVES_DIR = os.path.join(INSTALL_DIR, 'archives') 12 | BIN_DIR = os.path.join(INSTALL_DIR, 'bin') 13 | CONF_DIR = os.path.join(INSTALL_DIR, 'conf') 14 | BACKUP_D_DIR = os.path.join(INSTALL_DIR, 'backup.d') 15 | 16 | print(f"BIN_DIR: {BIN_DIR}") 17 | 18 | 19 | def create_directories(): 20 | os.makedirs(INSTALL_DIR, exist_ok=True) 21 | os.makedirs(ARCHIVES_DIR, exist_ok=True) 22 | os.makedirs(BIN_DIR, exist_ok=True) 23 | os.makedirs(BACKUP_D_DIR, exist_ok=True) 24 | os.makedirs(CONF_DIR, exist_ok=True) 25 | 26 | def copy_bin_files(): 27 | src_dir = os.path.join(os.path.dirname(__file__), '../src') 28 | for file in os.listdir(src_dir): 29 | if os.path.isfile(os.path.join(os.path.dirname(__file__), '../src', file)): 30 | shutil.copy(os.path.join(os.path.dirname(__file__), '../src', file), BIN_DIR) 31 | 32 | 33 | if __name__ == "__main__": 34 | create_directories() 35 | copy_bin_files() 36 | -------------------------------------------------------------------------------- /v1/test/templates/dar-backup.conf.template: -------------------------------------------------------------------------------- 1 | # 2 | # Configuration file for automated tests 3 | # 4 | # Environment variables. 5 | 6 | # the Discord webhook address to send messages to 7 | DISCORD_WEBHOOK="the long Discord webhook here" 8 | 9 | # server name or ip address 10 | # not relevant if --local-backup-dir is used 11 | SERVER=some-server 12 | 13 | # the directory on the server used to store backups 14 | # not relevant if --local-backup-dir is used 15 | SERVER_DIR="/some/dir" 16 | 17 | # dar archives are written here, change to a more suitable location 18 | # use --local-backup-dir for not trying to do an sshfs mount 19 | # TODO rename to something like ARCHIVE_DIR 20 | MOUNT_POINT="@@ARCHIVE_DIR@@/archives" 21 | 22 | # use catalogs or not, default is no - to not break existing setups 23 | USE_CATALOGS=y 24 | 25 | # Name of catalog database 26 | CATALOG_SUFFIX=.catalog 27 | 28 | # path to log file 29 | LOG_LOCATION="@@ARCHIVE_DIR@@/archives" 30 | 31 | # print more info, not quite debug 32 | VERBOSE="n" 33 | 34 | # should all output be captured in a file 35 | DEBUG=n 36 | 37 | # path to debug log file 38 | DEBUG_LOCATION="/tmp/dar-debug.log" 39 | 40 | # age in days for DIFF archives to be cleaned up (deleted) 41 | DIFF_AGE=100 42 | 43 | # age in days for INC archives to be cleaned up (deleted) 44 | INC_AGE=40 -------------------------------------------------------------------------------- /v2/tests/test_add_old_archive_confirmation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import patch 3 | from inputimeout import TimeoutOccurred 4 | import dar_backup.manager as manager 5 | 6 | def test_confirm_add_old_archive_yes(setup_environment, env): 7 | manager.logger = env.logger 8 | with patch("dar_backup.manager.inputimeout", return_value="yes"): 9 | assert manager.confirm_add_old_archive("example_FULL_2024-01-01", "2024-01-10") 10 | 11 | 12 | def test_confirm_add_old_archive_no(setup_environment, env): 13 | manager.logger = env.logger 14 | with patch("dar_backup.manager.inputimeout", return_value="no"): 15 | assert not manager.confirm_add_old_archive("example_FULL_2024-01-01", "2024-01-10") 16 | 17 | 18 | def test_confirm_add_old_archive_timeout(setup_environment, env): 19 | manager.logger = env.logger 20 | with patch("dar_backup.manager.inputimeout", side_effect=TimeoutOccurred): 21 | assert not manager.confirm_add_old_archive("example_FULL_2024-01-01", "2024-01-10") 22 | 23 | 24 | def test_confirm_add_old_archive_keyboard_interrupt(setup_environment, env): 25 | manager.logger = env.logger 26 | with patch("dar_backup.manager.inputimeout", side_effect=KeyboardInterrupt): 27 | assert not manager.confirm_add_old_archive("example_FULL_2024-01-01", "2024-01-10") 28 | -------------------------------------------------------------------------------- /v2/tests/test_binary_info.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from dar_backup.util import get_binary_info 4 | from tests.envdata import EnvData 5 | 6 | @pytest.mark.parametrize("binary_name", ["dar", "dar_manager"]) 7 | def test_binary_path_exists_and_executable(binary_name): 8 | info = get_binary_info(binary_name) 9 | 10 | #print(f"DEBUG: Binary info: {info}") 11 | 12 | # Check that the binary was found 13 | assert info["path"] != "Not found", f"{binary_name} not found in PATH" 14 | 15 | # Check that the path is a valid file and executable 16 | assert os.path.isfile(info["path"]), f"{binary_name} path is not a file: {info['path']}" 17 | assert os.access(info["path"], os.X_OK), f"{binary_name} is not executable: {info['path']}" 18 | 19 | 20 | @pytest.mark.parametrize("binary_name", ["dar", "dar_manager"]) 21 | def test_binary_version_detected_and_valid(binary_name, env): 22 | info = get_binary_info(binary_name) 23 | 24 | # Ensure version info was extracted correctly 25 | assert info["version"] not in ("unknown", "error"), f"{binary_name} version extraction failed: {info['version']}" 26 | 27 | # Optionally ensure version contains numeric values 28 | assert any(char.isdigit() for char in info["version"]), f"{binary_name} version string looks invalid: {info['version']}" 29 | -------------------------------------------------------------------------------- /v1/test/dar-operations.sh.org: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # -I in list operation fails 4 | 5 | rm -fr dar-test 6 | mkdir dar-test 7 | cd dar-test 8 | 9 | git clone https://github.com/per2jensen/dar-backup 10 | 11 | dar -c shell-scripts -R dar-backup 12 | 13 | find dar-backup -name "*.sh" 14 | 15 | dar -l shell-scripts |grep "\.sh$" 16 | 17 | dar -l shell-scripts -I "*.sh" 18 | 19 | 20 | ======================== 21 | 22 | # backup restore 23 | 24 | rm -fr dar-test 25 | mkdir dar-test 26 | cd dar-test 27 | 28 | git clone https://github.com/per2jensen/dar-backup 29 | 30 | tar cvf dar-backup-pre.tar dar-backup/ 31 | 32 | dar -N -c dar-backup -R . -g dar-backup 33 | 34 | rm -fr dar-backup 35 | 36 | dar -x dar-backup 37 | 38 | tar cvf dar-backup-after.tar dar-backup/ 39 | 40 | sha256sum "./*.tar" 41 | 42 | 43 | # restore shell scripts using -I works 44 | rm -fr dar-backup 45 | dar -x dar-backup -I "*.sh" 46 | find dar-backup/ -type f 47 | 48 | 49 | # restore a specifik shell script 50 | # this restores empty directories below the "test" directory 51 | dar -x dar-backup -g dar-backup/test -I report-deleted-files.sh 52 | find dar-backup/ 53 | 54 | # restore a single file and the necessary directory structure (and no more) 55 | dar -x dar-backup -g dar-backup/test/report-deleted-files.sh 56 | find dar-backup/ 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /v1/test/mk-DEV.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # package scripts into a tar and set the DEV-version 4 | # 5 | # $1 is the DEV-tag to package 6 | 7 | if [ -z "${1}" ]; then echo "tag not given, exiting"; exit; fi 8 | echo tag to create release from: \""$1"\" 9 | 10 | TAG=$(grep -P -o "^DEV\d+\.\d+\.\d+$" <<< "$1") 11 | if [[ "$TAG" == "" ]]; then 12 | echo "TAG \"$1\" does not match required DEV tag patten, exiting" 13 | exit 1 14 | fi 15 | if ! git show-ref --tags --quiet "$1"; then 16 | echo "TAG \"$1\" not found, exiting" 17 | exit 1 18 | fi 19 | 20 | DIR=/tmp/dar-backup 21 | TARFILE="dar-backup-scripts-${1}.tar.gz" 22 | 23 | if [[ -e "$DIR" ]]; then rm -fr "$DIR" || exit 1; fi 24 | if [[ -f "/tmp/$TARFILE" ]]; then rm "/tmp/$TARFILE" || exit 1; fi 25 | 26 | cd /tmp || exit 1 27 | git clone https://github.com/per2jensen/dar-backup.git || exit 1 28 | cd dar-backup || exit 1 29 | 30 | git checkout "tags/$1" -b "release-$1" || exit 1 31 | chmod +x bin/*.sh 32 | 33 | # add version number to shell scripts 34 | while IFS= read -r -d "" file 35 | do 36 | sed -i "s/@@DEV-VERSION@@/$1/" "$file" 37 | done < <(find . -name "*.sh" -type f -print0) 38 | 39 | 40 | cd $DIR/.. || exit 1 41 | tar czvf "$TARFILE" dar-backup/bin 42 | 43 | 44 | echo "Unpack command into Per's dar-backup directory:" 45 | echo "==> tar xvf /tmp/$TARFILE --directory ~/programmer/" 46 | -------------------------------------------------------------------------------- /.github/workflows/reparing.yml: -------------------------------------------------------------------------------- 1 | name: archive-repairing 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | # Allows you to run this workflow manually from the Actions tab 13 | workflow_dispatch: 14 | 15 | jobs: 16 | repair-4k-bitrot: 17 | runs-on: ubuntu-latest 18 | steps: 19 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 20 | - uses: actions/checkout@v4 21 | 22 | - name: Install required packages 23 | run: sudo apt install -y dar dar-static par2 24 | 25 | - name: repair 4096 bytes bitrot 26 | run: | 27 | chmod +x $GITHUB_WORKSPACE/test/test-parchive.sh 28 | $GITHUB_WORKSPACE/test/test-parchive.sh 29 | repair-3pcs-8k-blocks-bitrot: 30 | runs-on: ubuntu-latest 31 | steps: 32 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 33 | - uses: actions/checkout@v4 34 | 35 | - name: Install required packages 36 | run: sudo apt install -y dar dar-static par2 37 | 38 | - name: repair 3x8192 bytes bitrot in archive 39 | run: | 40 | chmod +x $GITHUB_WORKSPACE/test/test-parchive-multiple-8k-blocks.sh 41 | $GITHUB_WORKSPACE/test/test-parchive-multiple-8k-blocks.sh 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /v2/src/dar_backup/dar-backup.conf: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: GPL-3.0-or-later 2 | 3 | # This config file is intended to demo `dar-backup`. 4 | # 5 | # The `demo` application puts this file in ~/.config/dar-backup/dar-backup.conf 6 | 7 | [MISC] 8 | LOGFILE_LOCATION = ~/dar-backup/dar-backup.log 9 | MAX_SIZE_VERIFICATION_MB = 20 10 | MIN_SIZE_VERIFICATION_MB = 1 11 | NO_FILES_VERIFICATION = 5 12 | # timeout in seconds for backup, test, restore and par2 operations 13 | # The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used. 14 | # If a timeout is not specified when using the CommandRunner, a default timeout of 30 secs is used. 15 | COMMAND_TIMEOUT_SECS = 86400 16 | #DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks// 17 | 18 | [DIRECTORIES] 19 | BACKUP_DIR = @@BACKUP_DIR@@ 20 | BACKUP.D_DIR = ~/.config/dar-backup/backup.d/ 21 | TEST_RESTORE_DIR = ~/dar-backup/restore/ 22 | # Optional parameter 23 | # If you want to store the catalog database away from the BACKUP_DIR, use the MANAGER_DB_DIR variable. 24 | #MANAGER_DB_DIR = /some/where/else/ 25 | 26 | [AGE] 27 | # DIFF and INCR backups are kept for a configured number of days, then deleted by the `cleanuo` 28 | # age settings are in days 29 | DIFF_AGE = 100 30 | INCR_AGE = 40 31 | 32 | [PAR2] 33 | ERROR_CORRECTION_PERCENT = 5 34 | ENABLED = True 35 | 36 | [PREREQ] 37 | #SCRIPT_1 = 38 | 39 | [POSTREQ] 40 | #SCRIPT_1 = 41 | -------------------------------------------------------------------------------- /v1/test/test-cleanup-fail-diff-age-calc.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # add file GREENLAND.JEP to include dir and to the exclude dir 6 | # run dar-diff-backup.sh 7 | # cleanup DIFF and INC archives 8 | 9 | TEST_RESULT=0 10 | 11 | SCRIPTPATH=$(realpath "$0") 12 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 13 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 14 | 15 | source "$SCRIPTDIRPATH/setup.sh" 16 | source "$TESTDIR/conf/dar-backup.conf" 17 | 18 | DAY_1_OLD=$(date --date="-1 days" -I) 19 | DAY_2_OLD=$(date --date="-2 days" -I) 20 | 21 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_1_OLD}.dar 22 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_2_OLD}.dar 23 | 24 | touch "$TESTDIR"/archives/TEST_INC_${DAY_1_OLD}.dar 25 | touch "$TESTDIR"/archives/TEST_INC_${DAY_2_OLD}.dar 26 | 27 | #check that a direcotry name does not mess anythin up (it did on a usbdisk disk :-) ) 28 | mkdir "$TESTDIR"/archives/"$DAY_2_OLD" 29 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_DIFF_${DAY_2_OLD}.dar 30 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_INC_${DAY_2_OLD}.dar 31 | 32 | 33 | # set DIFF_AGE to fail 34 | sed -i s/DIFF_AGE.*/DIFF_AGE=2MUST-FAIL/ "$TESTDIR"/conf/dar-backup.conf 35 | 36 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir 37 | 38 | COUNT=$(grep -c -E "ERROR.*?DIFF_AGE_DATE" "$TESTDIR"/archives/dar-backup.log) 39 | echo "COUNT: $COUNT" 40 | if [[ "$COUNT" == "0" ]]; then 41 | echo \"FAIL\" message on wrong DIFF age not found 42 | TEST_RESULT=1 43 | fi 44 | 45 | echo TEST_RESULT: $TEST_RESULT 46 | exit $TEST_RESULT 47 | -------------------------------------------------------------------------------- /v1/test/test-cleanup-fail-inc-age-calc.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # add file GREENLAND.JEP to include dir and to the exclude dir 6 | # run dar-diff-backup.sh 7 | # cleanup DIFF and INC archives 8 | 9 | TEST_RESULT=0 10 | 11 | SCRIPTPATH=$(realpath "$0") 12 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 13 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 14 | 15 | source "$SCRIPTDIRPATH/setup.sh" 16 | source "$TESTDIR/conf/dar-backup.conf" 17 | 18 | DAY_1_OLD=$(date --date="-1 days" -I) 19 | DAY_2_OLD=$(date --date="-2 days" -I) 20 | 21 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_1_OLD}.dar 22 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_2_OLD}.dar 23 | 24 | touch "$TESTDIR"/archives/TEST_INC_${DAY_1_OLD}.dar 25 | touch "$TESTDIR"/archives/TEST_INC_${DAY_2_OLD}.dar 26 | 27 | #check that a direcotry name does not mess anythin up (it did on a usbdisk disk :-) ) 28 | mkdir "$TESTDIR"/archives/"$DAY_2_OLD" 29 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_DIFF_${DAY_2_OLD}.dar 30 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_INC_${DAY_2_OLD}.dar 31 | 32 | 33 | # set INC_AGE calc to fail 34 | sed -i s/INC_AGE.*/INC_AGE=2MUST-FAIL/ "$TESTDIR"/conf/dar-backup.conf 35 | 36 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir 37 | 38 | COUNT=$(grep -c -E "ERROR.*?INC_AGE_DATE" "$TESTDIR"/archives/dar-backup.log) 39 | echo "COUNT: $COUNT" 40 | if [[ "$COUNT" == "0" ]]; then 41 | echo \"FAIL\" INC_AGE_DATE error message not found 42 | TEST_RESULT=1 43 | fi 44 | 45 | echo TEST_RESULT: $TEST_RESULT 46 | exit $TEST_RESULT 47 | -------------------------------------------------------------------------------- /v1/test/test-create-single-catalog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test creation of single catalog for an existing backup definition 4 | 5 | TESTRESULT=0 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/bin/dar-util.sh" 13 | source "$TESTDIR/conf/dar-backup.conf" 14 | 15 | # generate 5 different backups 16 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST2 17 | 18 | # create catalog 19 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir --backup-def TEST 20 | if [[ $? != "0" ]]; then 21 | echo ERROR catalog was not created, exiting 22 | TESTRESULT=1 23 | fi 24 | 25 | # create another catalog 26 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir --backup-def TEST2 27 | if [[ $? != "0" ]]; then 28 | echo ERROR catalog was not created, exiting 29 | TESTRESULT=1 30 | fi 31 | 32 | 33 | # create catalog, which already exists 34 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir --backup-def TEST 35 | if [[ $? != "0" ]]; then 36 | echo ERROR when trying to create existing catalog 37 | TESTRESULT=1 38 | fi 39 | 40 | # create catalog, for non-existing backup definition 41 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir --backup-def DOES_NOT_EXIST 42 | if [[ $? == "0" ]]; then 43 | echo ERROR an error should have happened here 44 | TESTRESULT=1 45 | fi 46 | 47 | 48 | if [[ "$TESTRESULT" == "0" ]]; then 49 | echo "Test case succeeded" 50 | fi 51 | exit "$TESTRESULT" 52 | -------------------------------------------------------------------------------- /v1/bin/show-DIFF.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Copyright (C) 2024 Per Jensen 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | # print DIFF archives including number of slices and the slice size 19 | 20 | export DATE=`date -I` 21 | export SCRIPTPATH=`realpath "$0"` 22 | export SCRIPTDIRPATH=`dirname "$SCRIPTPATH"` 23 | 24 | source "${SCRIPTDIRPATH}/../conf/dar-backup.conf" 25 | source "${SCRIPTDIRPATH}/dar-util.sh" 26 | 27 | # make sure mounts are in order 28 | mountPrereqs 29 | 30 | echo Mountpoint: ${MOUNT_POINT} 31 | for archive in `ls ${MOUNT_POINT}/*_DIFF_*.dar|egrep "^.*?[0-9]{4}-[0-9]{2}-[0-9]{2}" -o|sort -u`; do 32 | BASE=`basename ${archive}` 33 | NO_SLICES=`find ${MOUNT_POINT} -name "${BASE}*.dar"|wc -l` 34 | SLICE_SIZE=`ls -l --block-size=G "${MOUNT_POINT}/${BASE}.1.dar"|cut -d" " -f5` 35 | printf "%-30s slices: %-3s (%s)\n" ${BASE} ${NO_SLICES} ${SLICE_SIZE} 36 | done 37 | -------------------------------------------------------------------------------- /v1/templates/dar-backup.conf.template: -------------------------------------------------------------------------------- 1 | # Environment variables. 2 | 3 | # the Discord webhook address to send messages to 4 | DISCORD_WEBHOOK="the long Discord webhook here" 5 | 6 | # server name or ip address 7 | # not relevant if --local-backup-dir is used 8 | SERVER=some-server 9 | 10 | # the directory on the server used to store backups 11 | # not relevant if --local-backup-dir is used 12 | SERVER_DIR="/some/dir" 13 | 14 | # dar archives are written here, change this default to a more suitable 15 | # location 16 | # use --local-backup-dir for not using a sshfs mount 17 | MOUNT_POINT="@@ARCHIVE_DIR@@/archives" 18 | 19 | # not relevant if --local-backup-dir is used 20 | # which ssh config file to use when connecting to a server using sshfs 21 | # it could be the stadard ~/.ssh/config or this one ~/.ssh/dar-config if 22 | # a special ssh setup is used for script to mount a server directory 23 | SSH_CONFIG="~/.ssh/dar-config" 24 | 25 | # use catalogs or not, default is no - to not break existing setups 26 | # use "n" for no or "y" for yes 27 | USE_CATALOGS=n 28 | 29 | # Name of catalog database 30 | CATALOG_SUFFIX=.catalog 31 | 32 | # path to log file 33 | LOG_LOCATION="@@ARCHIVE_DIR@@/archives" 34 | 35 | # print more info, not quite debug 36 | VERBOSE="n" 37 | 38 | # should all output be captured in a file 39 | DEBUG=n 40 | 41 | # path to debug log file 42 | DEBUG_LOCATION="/tmp/dar-debug.log" 43 | 44 | # age in days for DIFF archives to be cleaned up (deleted) 45 | DIFF_AGE=100 46 | 47 | # age in days for INC archives to be cleaned up (deleted) 48 | INC_AGE=40 49 | -------------------------------------------------------------------------------- /v1/bin/show-FULL.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Copyright (C) 2024 Per Jensen 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | # print FULL archives including number of slices and the slice size 19 | 20 | export DATE=`date -I` 21 | export SCRIPTPATH=`realpath "$0"` 22 | export SCRIPTDIRPATH=`dirname "$SCRIPTPATH"` 23 | 24 | source "${SCRIPTDIRPATH}/../conf/dar-backup.conf" 25 | source "${SCRIPTDIRPATH}/dar-util.sh" 26 | 27 | # make sure mounts are in order 28 | mountPrereqs 29 | 30 | 31 | echo Mountpoint: ${MOUNT_POINT} 32 | for archive in `ls ${MOUNT_POINT}/*_FULL_*.dar|egrep "^.*?[0-9]{4}-[0-9]{2}-[0-9]{2}" -o|sort -u`; do 33 | BASE=`basename ${archive}` 34 | NO_SLICES=`find ${MOUNT_POINT} -name "${BASE}*.dar"|wc -l` 35 | SLICE_SIZE=`ls -l --block-size=G "${MOUNT_POINT}/${BASE}.1.dar"|cut -d" " -f5` 36 | printf "%-30s slices: %-3s (%s)\n" ${BASE} ${NO_SLICES} ${SLICE_SIZE} 37 | done 38 | -------------------------------------------------------------------------------- /v1/test/runner.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # 4 | # Run all "test-*" scripts 5 | # 6 | RUNNER_LOG=/tmp/dar-backup-runner.log 7 | 8 | TESTCASE_LOG=/tmp/dar-backup-test-cases.log 9 | rm -f $TESTCASE_LOG 10 | 11 | exec 1> >(tee -a -- $RUNNER_LOG) 12 | exec 2> >(tee -a -- $RUNNER_LOG >&2) 13 | DATE=$(date -Iseconds) 14 | 15 | echo "------------------------------------------------------------------" 16 | echo "dar-backup test runner started: $DATE" 17 | echo "------------------------------------------------------------------" 18 | echo "dar binary used: $(command -v dar | head -n1)" 19 | _DAR_=$(dar --version | grep -P -o "dar +version +\d+\.\d+.\d+") 20 | echo "dar version used: $_DAR_" 21 | 22 | SCRIPTPATH=$(realpath "$0") 23 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 24 | 25 | set -o monitor 26 | 27 | TESTRESULT=0 28 | TESTNO=1 29 | RESULT="" 30 | for file in "${SCRIPTDIRPATH}"/test-*.sh; do 31 | #echo "test: \"$file\"" 32 | TIME=$(date "-Iseconds") 33 | printf "%-20s: #: %-15s %-60s \n" "==>Testcase<==: ${TESTNO}" "${TIME}" "${file}" >> $TESTCASE_LOG 34 | $("$file" >> $TESTCASE_LOG 2>&1) 35 | if [[ $? == "0" ]]; then 36 | RESULT=ok 37 | else 38 | RESULT=error 39 | if [[ ! "$file" =~ test-fail.sh ]]; then 40 | TESTRESULT=1 41 | fi 42 | fi 43 | printf "%-6s: #: %-3s %-60s \n" "${RESULT}" "${TESTNO}" "${file}" 44 | TESTNO=$(( TESTNO + 1 )) 45 | RESULT="" 46 | done 47 | 48 | if [[ $TESTRESULT == "0" ]]; then 49 | printf "SUCCESS - all testcases succeeded (test-fail.sh must fail) \n" 50 | fi 51 | exit "$TESTRESULT" 52 | 53 | -------------------------------------------------------------------------------- /v2/tests/test_prereq.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 4 | from dar_backup.command_runner import CommandRunner 5 | 6 | 7 | def test_prereq(setup_environment, env): 8 | """ 9 | Test the prereq command in the config file. 10 | dar-backup must fail when a prereq command fails. 11 | """ 12 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 13 | 14 | # Patch config file with a successful command 15 | with open(env.config_file, 'a') as f: 16 | f.write('\n[PREREQ]\n') 17 | f.write('PREREQ_01 = ls /tmp\n') 18 | 19 | command = ['dar-backup', '--full-backup', '-d', "example", '--config-file', env.config_file] 20 | process = runner.run(command) 21 | if process.returncode != 0: 22 | raise Exception(f"Command failed {command}") 23 | 24 | # Patch the config file with a failing command 25 | with open(env.config_file, 'a') as f: 26 | f.write('PREREQ_02 = command-does-not-exist /tmp\n') 27 | env.logger.info(f"PREREQ_02 which fails has been added to config file: {env.config_file}") 28 | 29 | try: 30 | command = ['dar-backup', '--full-backup', '-d', "example", '--config-file', env.config_file] 31 | process = runner.run(command) 32 | env.logger.info(f"return code: {process.returncode}") 33 | if process.returncode == 0: 34 | raise Exception("dar-backup must fail when a prereq command fails") 35 | except Exception: 36 | env.logger.exception("Expected exception: dar-backup must fail when a prereq command fails") 37 | assert True 38 | -------------------------------------------------------------------------------- /v2/tests/envdata.py: -------------------------------------------------------------------------------- 1 | 2 | import logging 3 | import os 4 | 5 | from dataclasses import dataclass 6 | from datetime import datetime 7 | from dar_backup.util import setup_logging 8 | 9 | @dataclass 10 | class EnvData(): 11 | test_case_name: str 12 | test_dir: str 13 | backup_dir: str 14 | backup_d_dir: str 15 | data_dir: str 16 | restore_dir: str 17 | template_config_file: str 18 | config_file: str 19 | template_dar_rc: str 20 | dar_rc: str 21 | log_file: str 22 | datestamp: str 23 | logger: logging.Logger 24 | command_logger: logging.Logger 25 | 26 | def __init__(self, test_case_name: str, logger: logging.Logger, command_logger: logging.Logger): 27 | self.test_case_name = test_case_name 28 | self.test_dir = f"/tmp/unit-test/{test_case_name.lower()}" 29 | self.backup_dir = os.path.join(self.test_dir, "backups") 30 | self.backup_d_dir = os.path.join(self.test_dir, "backup.d") 31 | self.restore_dir = os.path.join(self.test_dir, "restore") 32 | self.data_dir = os.path.join(self.test_dir, "data") 33 | self.template_config_file = os.path.abspath(os.path.join(os.path.dirname(__file__),"../template/dar-backup.conf.template")) 34 | self.config_file = os.path.join(self.test_dir, "dar-backup.conf") 35 | self.template_dar_rc = os.path.abspath(os.path.join(os.path.dirname(__file__), "../template/.darrc")) 36 | self.dar_rc = os.path.join(self.test_dir, ".darrc") 37 | self.log_file = "/tmp/unit-test/test.log" 38 | self.datestamp = datetime.now().strftime('%Y-%m-%d') 39 | self.logger = logger 40 | self.command_logger = command_logger 41 | -------------------------------------------------------------------------------- /v2/tests/test_filter_darrc_file.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import logging 4 | from pathlib import Path 5 | 6 | from dar_backup.dar_backup import filter_darrc_file 7 | from tests.envdata import EnvData 8 | 9 | def test_filter_darrc_file_removes_verbose_flags(monkeypatch): 10 | logger = logging.getLogger("test_logger") 11 | command_logger = logging.getLogger("command_logger") 12 | env = EnvData("FilterDarrcVerboseTest", logger, command_logger) 13 | 14 | # Create test .darrc content with verbose flags and valid entries 15 | verbose_lines = [ 16 | "-vt", "-vs", "-vd", "-vf", "-va", # should be removed 17 | "+ /important/data", "- /tmp", "-v", "--", "+ /extra" 18 | ] 19 | 20 | os.makedirs(env.test_dir, exist_ok=True) 21 | test_darrc_path = os.path.join(env.test_dir, ".darrc") 22 | with open(test_darrc_path, "w") as f: 23 | f.write("\n".join(verbose_lines)) 24 | 25 | # Redirect HOME to a temporary location 26 | fake_home = os.path.join(env.test_dir, "fake_home") 27 | os.makedirs(fake_home, exist_ok=True) 28 | monkeypatch.setenv("HOME", fake_home) 29 | 30 | # Call the function and get the filtered file path 31 | filtered_path = filter_darrc_file(test_darrc_path) 32 | 33 | # Verify the file was created 34 | assert os.path.exists(filtered_path) 35 | 36 | env.logger.info(f"Filtered darrc: {filtered_path}") 37 | 38 | with open(filtered_path, "r") as f: 39 | filtered_lines = f.read().splitlines() 40 | 41 | # These flags should have been removed 42 | for removed in ["-vt", "-vs", "-vd", "-vf", "-va"]: 43 | assert removed not in filtered_lines 44 | 45 | # These should remain 46 | for kept in ["+ /important/data", "- /tmp", "-v", "--", "+ /extra"]: 47 | assert kept in filtered_lines 48 | 49 | -------------------------------------------------------------------------------- /v1/test/test-multiple-definitions.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # copy backup definition TEST --> "A backup definition to verify multiple definitions are handled correctly 5 | 6 | BACKUP_DEFINITION_SPACES="A backup definition" 7 | 8 | SCRIPTPATH=$(realpath "$0") 9 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 10 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 11 | 12 | source "$SCRIPTDIRPATH"/setup.sh 13 | 14 | source "$TESTDIR"/conf/dar-backup.conf 15 | 16 | # make a second backup definition 17 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR/backups.d/$BACKUP_DEFINITION_SPACES" 18 | 19 | # make a third bad backup definition name containing "_" 20 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR/backups.d/CONTAINS_UNDERSCORES" 21 | 22 | 23 | # create catalogs 24 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 25 | if [[ $? != "0" ]]; then 26 | echo ERROR catalog was not created, exiting 27 | exit 1 28 | fi 29 | 30 | # run FULL backup 31 | "$TESTDIR"/bin/dar-backup.sh --local-backup-dir 32 | RESULT=$? 33 | if [[ $RESULT != "0" ]]; then 34 | TESTRESULT=1 35 | fi 36 | 37 | # alter backup set 38 | cp "$SCRIPTDIRPATH"/GREENLAND.JPEG "$TESTDIR/dirs/include this one/" 39 | cp "$SCRIPTDIRPATH"/GREENLAND.JPEG "$TESTDIR/dirs/exclude this one/" 40 | 41 | # run DIFF backup 42 | "$TESTDIR"/bin/dar-diff-backup.sh --local-backup-dir 43 | RESULT=$? 44 | if [[ $RESULT != "0" ]]; then 45 | TESTRESULT=1 46 | fi 47 | 48 | # modify a file backed up in the DIFF 49 | touch "$TESTDIR/dirs/include this one/GREENLAND.JPEG" 50 | 51 | # run INCREMENTAL backup 52 | "$TESTDIR"/bin/dar-inc-backup.sh --local-backup-dir 53 | RESULT=$? 54 | if [[ $RESULT != "0" ]]; then 55 | TESTRESULT=1 56 | fi 57 | 58 | checkExpectLog "=> backup discarded" "$TESTDIR/archives/dar-backup.log" 59 | 60 | echo TEST RESULT: "$TESTRESULT" 61 | exit "$TESTRESULT" 62 | -------------------------------------------------------------------------------- /v1/test/mk-release.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Make a release tar file 4 | # 5 | # $1 is the tag to package 6 | 7 | if [ -z "${1}" ]; then echo "tag not given, exiting"; exit 1; fi 8 | echo tag to create release from: \""$1"\" 9 | 10 | TAG=$(grep -P -o "^v\d+\.\d+\.\d+$" <<< "$1") 11 | if [[ "$TAG" == "" ]]; then 12 | echo "TAG \"$1\" does not match required tag patten, exiting" 13 | exit 1 14 | fi 15 | if ! git show-ref --tags --quiet "$1"; then 16 | echo "TAG \"$1\" not found, exiting" 17 | exit 1 18 | fi 19 | 20 | DIR=/tmp/dar-backup 21 | TARFILE="dar-backup-linux-${1}.tar.gz" 22 | 23 | if [[ -e "$DIR" ]]; then rm -fr "$DIR" || exit 1; fi 24 | if [[ -f "/tmp/$TARFILE" ]]; then rm "/tmp/$TARFILE" || exit 1; fi 25 | 26 | cd /tmp || exit 1 27 | git clone https://github.com/per2jensen/dar-backup.git || exit 1 28 | cd dar-backup || exit 1 29 | 30 | git checkout "tags/$1" -b "release-$1" || exit 1 31 | chmod +x bin/install.sh 32 | rm -fr "$DIR/.git" 33 | rm -fr "$DIR/.github" 34 | rm -fr "$DIR/test" 35 | 36 | echo "This package is built from tag: $1" > VERSION 37 | 38 | # add version number to shell scripts 39 | while IFS= read -r -d "" file 40 | do 41 | sed -i "s/@@DEV-VERSION@@/$1/" "$file" 42 | done < <(find . -name "*.sh" -type f -print0) 43 | 44 | 45 | cd $DIR/.. || exit 1 46 | tar czvf "$TARFILE" dar-backup 47 | 48 | rm -fr /tmp/dar-backup || exit 1 49 | tar -x -f ${TARFILE} dar-backup/LICENSE || exit 1 50 | SHA256=$(sha256sum /tmp/dar-backup/LICENSE |cut -d" " -f1) 51 | if [[ "$SHA256" == "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986" ]]; then 52 | echo LICENSE exists in tarball and is unchanged 53 | else 54 | echo "\"LICENSE\" file has changed, exiting" 55 | exit 56 | fi 57 | 58 | echo SHA256: 59 | sha256sum "$TARFILE" 60 | echo "SUCCESS: a release tarball from tag: \"$TAG\" was produced" 61 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report something not working as expected in dar-backup version 2 3 | title: "[Bug] " 4 | labels: [bug] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to help improve `dar-backup`! 10 | Please provide as much detail as possible. 11 | 12 | - type: input 13 | id: version 14 | attributes: 15 | label: Version 16 | description: Output of `dar --version` and `dar-backup --version` 17 | placeholder: e.g., dar 2.7.12, dar-backup 1.4.0 18 | validations: 19 | required: true 20 | 21 | - type: textarea 22 | id: description 23 | attributes: 24 | label: What happened? 25 | description: Describe the issue and expected behavior. 26 | placeholder: Tell us what you see vs. what you expected. 27 | validations: 28 | required: true 29 | 30 | - type: textarea 31 | id: steps 32 | attributes: 33 | label: Steps to Reproduce 34 | description: Help us reproduce the issue step-by-step. 35 | placeholder: | 36 | 1. Run `dar-backup ...` 37 | 2. Use this config ... 38 | 3. Observe this output ... 39 | validations: 40 | required: true 41 | 42 | - type: textarea 43 | id: logs 44 | attributes: 45 | label: Relevant Logs or Output 46 | description: Paste logs or command output (redact sensitive info). 47 | render: shell 48 | validations: 49 | required: false 50 | 51 | - type: dropdown 52 | id: os 53 | attributes: 54 | label: Operating System 55 | description: Which OS are you running on? 56 | options: 57 | - Linux (Debian/Ubuntu) 58 | - Linux (Arch/Fedora) 59 | - macOS 60 | - Other (please describe) 61 | validations: 62 | required: true 63 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## TL;DR 4 | Be respectful. Be kind. Assume good intent. 5 | 6 | --- 7 | 8 | ## Our Pledge 9 | 10 | We are committed to making participation in this project a harassment-free experience for everyone, regardless of experience level, gender, identity, ability, ethnicity, religion, or background. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. 13 | 14 | ## Our Standards 15 | 16 | Examples of behavior that contributes to a positive environment include: 17 | 18 | - Using welcoming and inclusive language 19 | - Being respectful of differing viewpoints and experiences 20 | - Gracefully accepting constructive criticism 21 | - Helping others, especially newcomers 22 | - Showing empathy towards other community members 23 | 24 | Examples of unacceptable behavior include: 25 | 26 | - The use of sexualized language or imagery 27 | - Personal attacks, insults, or derogatory comments 28 | - Trolling, deliberate intimidation, or disruptive behavior 29 | - Harassment in any form 30 | - Topics not in some way meaningfully connected to the `dar-back` project 31 | 32 | ## Scope 33 | 34 | This Code of Conduct applies within all project spaces, including GitHub issues, pull requests, discussions, and any other interaction related to the `dar-backup` project. 35 | 36 | ## Enforcement 37 | 38 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening a confidential issue or contacting the maintainers directly. 39 | 40 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any behavior they deem inappropriate, threatening, offensive, or harmful. 41 | 42 | ## Attribution 43 | 44 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), version 2.1. 45 | -------------------------------------------------------------------------------- /v1/test/compile/README.md: -------------------------------------------------------------------------------- 1 | # Compile dar 2 | 3 | ## Problems 4 | 5 | ## dar_manager cannot find dar 6 | 7 | dar_manager cannot find it's dar binary, even though they are in the same directory. 8 | 9 | I suspect a config somewhere is teasing me - temporary "fix" unti I figure out what is wrong: 10 | 11 | - sudo apt remove dar 12 | 13 | - sudo ln -s /home/$USER/.local/dar/bin/dar /usr/bin/dar 14 | 15 | 16 | ### --disable-libcurl-linking 17 | 18 | currently I get errors building dar if not issueing this config option 19 | 20 | 21 | ## package prereg 22 | This kind of works on Ubuntu 23.10 23 | 24 | ``` 25 | sudo apt install \ 26 | zlib1g-dev \ 27 | libbz2-dev \ 28 | liblzo2-dev \ 29 | liblzma-dev \ 30 | libzstd-dev \ 31 | liblz4-dev \ 32 | libgcrypt20-dev \ 33 | libgpgme-dev \ 34 | doxygen \ 35 | graphviz \ 36 | upx-ucl \ 37 | groff \ 38 | libext2fs-dev \ 39 | libthreadar-dev \ 40 | librsync-dev \ 41 | libcurl4-openssl-dev \ 42 | python3-pybind11 \ 43 | python3-dev \ 44 | libargon2-dev 45 | ``` 46 | 47 | ## Compile 48 | 49 | ``` 50 | DAR_VERSION=2.7.14 51 | if [[ -d /.local/dar-${DAR_VERSION} ]]; then 52 | mv /.local/dar-${DAR_VERSION} /.local/dar-${DAR_VERSION}.old 53 | fi 54 | mkdir -p ~/.local/dar-${DAR_VERSION} 55 | 56 | if [[ -d /tmp/dar-${DAR_VERSION} ]]; then 57 | rm -fr /tmp/dar-${DAR_VERSION} || exit 2 58 | fi 59 | 60 | tar xvf $HOME/git/dar-backup/test/compile/src/dar-${DAR_VERSION}.tar.gz --directory /tmp 61 | 62 | cd /tmp/dar-${DAR_VERSION} 63 | 64 | CXXFLAGS=-O 65 | export CXXFLAGS 66 | make clean distclean 67 | 68 | ./configure \ 69 | --prefix=/home/$USER/.local/dar-${DAR_VERSION} \ 70 | --disable-libcurl-linking 71 | 72 | 73 | make 74 | make install-strip 75 | 76 | 77 | if [[ -L ~/.local/dar ]]; then 78 | rm ~/.local/dar 79 | ln -s ~/.local/dar-${DAR_VERSION} ~/.local/dar 80 | else 81 | echo "ERROR ~/.locar/dar is not a link" 82 | fi 83 | ``` 84 | 85 | 86 | -------------------------------------------------------------------------------- /v2/tests/test_stress.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import sys 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 5 | from dar_backup.command_runner import CommandRunner 6 | from tests.envdata import EnvData 7 | 8 | 9 | def create_many_tiny_files(env: EnvData, count=5000): 10 | """ 11 | Create a large number of tiny files (1–4 bytes each) for stress testing. 12 | """ 13 | for i in range(count): 14 | filepath = os.path.join(env.data_dir, f"tiny_{i}.txt") 15 | with open(filepath, "wb") as f: 16 | f.write(os.urandom(i % 4 + 1)) # 1–4 bytes 17 | env.logger.info(f"Created {count} tiny files in {env.data_dir}") 18 | 19 | 20 | def test_backup_with_many_small_files(setup_environment, env: EnvData): 21 | """ 22 | Stress test: Archive and restore thousands of tiny files. 23 | """ 24 | file_count = 5000 25 | create_many_tiny_files(env, file_count) 26 | 27 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 28 | 29 | # Run full backup 30 | command = [ 31 | 'dar-backup', 32 | '--full-backup', 33 | '-d', 'example', 34 | '--config-file', env.config_file, 35 | '--log-level', 'debug', 36 | '--log-stdout' 37 | ] 38 | result = runner.run(command) 39 | env.logger.info("Ran dar-backup with many tiny files") 40 | assert result.returncode == 0 41 | 42 | # List archive contents 43 | archive_base = f"example_FULL_{env.datestamp}" 44 | list_result = runner.run([ 45 | 'dar-backup', 46 | '--list-contents', 47 | archive_base, 48 | '--config-file', env.config_file 49 | ]) 50 | assert list_result.returncode == 0 51 | file_hits = sum(1 for line in list_result.stdout.splitlines() if "tiny_" in line) 52 | assert file_hits == file_count, f"Expected {file_count} tiny files, found {file_hits}" 53 | 54 | env.logger.info(f"Archive contains all {file_count} tiny files.") 55 | -------------------------------------------------------------------------------- /v2/tests/test_create_backup_command.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dar_backup.dar_backup import create_backup_command 3 | from dar_backup.config_settings import ConfigSettings 4 | import os 5 | import sys 6 | # Ensure the src directory is in the Python path 7 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 8 | 9 | @pytest.mark.parametrize("backup_type, expect_reference", [ 10 | ("FULL", False), 11 | ("DIFF", True), 12 | ("INCR", True), 13 | ]) 14 | def test_create_backup_command_minimal_structure(tmp_path, backup_type, expect_reference): 15 | dummy_config_path = tmp_path / "dar-backup.conf" 16 | dummy_config_path.write_text(""" 17 | [MISC] 18 | LOGFILE_LOCATION = /tmp/fake.log 19 | MAX_SIZE_VERIFICATION_MB = 20 20 | MIN_SIZE_VERIFICATION_MB = 0 21 | NO_FILES_VERIFICATION = 5 22 | COMMAND_TIMEOUT_SECS = 86400 23 | 24 | [DIRECTORIES] 25 | BACKUP_DIR = /tmp/backups/ 26 | BACKUP.D_DIR = /tmp/backup.d/ 27 | DATA_DIR = /tmp/data/ 28 | TEST_RESTORE_DIR = /tmp/restore/ 29 | 30 | [AGE] 31 | DIFF_AGE = 30 32 | INCR_AGE = 15 33 | 34 | [PAR2] 35 | ERROR_CORRECTION_PERCENT = 5 36 | ENABLED = True 37 | """) 38 | _ = ConfigSettings(config_file=str(dummy_config_path)) 39 | 40 | backup_file = "test-archive" 41 | darrc_path = str(tmp_path / ".darrc") 42 | backup_definition = str(tmp_path / "backup_def.dar") 43 | reference_backup = "ref-archive" if expect_reference else None 44 | 45 | command = create_backup_command( 46 | backup_type, 47 | backup_file, 48 | darrc_path, 49 | backup_definition, 50 | reference_backup 51 | ) 52 | 53 | assert command[0] == "dar" 54 | assert "-c" in command 55 | assert backup_file in command 56 | assert "-B" in command 57 | assert darrc_path in command 58 | assert backup_definition in command 59 | 60 | if expect_reference: 61 | assert "-A" in command 62 | assert reference_backup in command 63 | else: 64 | assert "-A" not in command 65 | -------------------------------------------------------------------------------- /v2/src/dar_backup/demo_backup_def.j2: -------------------------------------------------------------------------------- 1 | 2 | # SPDX-License-Identifier: GPL-3.0-or-later 3 | 4 | # ------------------------------------------------------------------------ 5 | # Demo of a `dar-backup` definition file 6 | # This file was generated by dar-backup's `demo` program. 7 | # 8 | {%- if opts_dict | length > 0 %} 9 | # Options given to the `demo` program: 10 | {% endif %} 11 | {%- if opts_dict.ROOT_DIR -%} 12 | # --root-dir : {{ opts_dict.ROOT_DIR }} 13 | {% endif %} 14 | {%- if opts_dict.DIR_TO_BACKUP -%} 15 | # --dir-to-backup : {{ opts_dict.DIR_TO_BACKUP }} 16 | {% endif -%} 17 | {%- if opts_dict.BACKUP_DIR -%} 18 | # --backup-dir : {{ opts_dict.BACKUP_DIR }} 19 | {% endif %} 20 | # 21 | # Variables used to generate this file: 22 | # ===================================== 23 | {% for k,v in vars_map|dictsort %}# {{ k }} : {{ v }} 24 | {% endfor -%} 25 | # ------------------------------------------------------------------------ 26 | 27 | # Switch to ordered selection mode, which means that the following options 28 | # will be considered top to bottom 29 | -am 30 | 31 | # Backup Root dir 32 | {%- if vars_map.ROOT_DIR %} 33 | -R {{ vars_map.ROOT_DIR }} 34 | {% endif -%} 35 | 36 | {% if vars_map.DIR_TO_BACKUP %} 37 | # Directories to backup below the Root dir 38 | -g {{ vars_map.DIR_TO_BACKUP }} 39 | 40 | # This is an example of exclusion of a `.private` directory inside the 41 | # directory that is backed up 42 | -P {{ vars_map.DIR_TO_BACKUP }}/.private 43 | {%- else %} 44 | # Examples of directories to exclude below the Root dir 45 | -P mnt 46 | -P .cache 47 | {% endif %} 48 | 49 | # compression level 50 | -z5 51 | 52 | # no overwrite, if you rerun a backup, 'dar' halts and asks what to do 53 | # as `dar-backup` gives the `-Q` option to `dar`, the net effect of `-n` and `-Q` is 54 | # that `dar` will quit and not overwrite the existing backup 55 | -n 56 | 57 | # size of each slice in the archive (10G is 10 Gigabytes) 58 | --slice 10G 59 | 60 | # bypass directores marked as cache directories 61 | # http://dar.linux.free.fr/doc/Features.html 62 | --cache-directory-tagging 63 | -------------------------------------------------------------------------------- /v2/tests/test_autocompletion_install.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | import pytest 4 | from dar_backup.installer import install_autocompletion, uninstall_autocompletion 5 | 6 | @pytest.fixture(autouse=True) 7 | def isolate_home(tmp_path, monkeypatch): 8 | """ 9 | Redirect HOME and SHELL to a temporary directory and default shell. 10 | """ 11 | # Create fake home and rc files 12 | fake_home = tmp_path / "home" 13 | fake_home.mkdir() 14 | # Set HOME to fake_home 15 | monkeypatch.setenv("HOME", str(fake_home)) 16 | # Default to bash shell 17 | monkeypatch.setenv("SHELL", "/bin/bash") 18 | return fake_home 19 | 20 | def read_rc(home, filename): 21 | return (home / filename).read_text().splitlines() 22 | 23 | 24 | def test_install_autocompletion_first_time(isolate_home, monkeypatch): 25 | home = isolate_home 26 | # Ensure no existing rc 27 | rc_file = home / ".bashrc" 28 | # Install 29 | install_autocompletion() 30 | # After install, rc should exist 31 | assert rc_file.exists() 32 | lines = read_rc(home, ".bashrc") 33 | # Marker present 34 | assert any("# >>> dar-backup autocompletion >>>" in l for l in lines) 35 | 36 | 37 | def test_install_autocompletion_idempotent(isolate_home, monkeypatch): 38 | home = isolate_home 39 | rc_file = home / ".bashrc" 40 | # First install 41 | install_autocompletion() 42 | # Clear logger 43 | # Second install should not duplicate 44 | install_autocompletion() 45 | lines = read_rc(home, ".bashrc") 46 | # Only one marker 47 | assert sum(1 for l in lines if "# >>> dar-backup autocompletion >>>" in l) == 1 48 | 49 | 50 | def test_uninstall_autocompletion_removes_block(isolate_home, monkeypatch): 51 | home = isolate_home 52 | rc_file = home / ".bashrc" 53 | # Prepare rc with block 54 | install_autocompletion() 55 | # Now uninstall 56 | uninstall_autocompletion() 57 | lines = read_rc(home, ".bashrc") 58 | # Marker should be absent 59 | assert not any("# >>> dar-backup autocompletion >>>" in l for l in lines) 60 | -------------------------------------------------------------------------------- /v1/test/test-mk-release-on-dev.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # - run mk-release.sh by patching it to accept a DEV tag 4 | # - unpack the tar file and run the installer 5 | # - run dar-backup (full, diff and inc) 6 | 7 | if [[ -n "$1" ]]; then 8 | LATEST_DEV="$1" 9 | echo \$1: "$1" 10 | fi 11 | 12 | TEST_RESULT=0 13 | 14 | SCRIPTPATH=$(realpath "$0") 15 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 16 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 17 | 18 | #patch mk-release.sh 19 | cp "$SCRIPTDIRPATH"/mk-release.sh /tmp/ || exit 1 20 | sed -i 's/"\^v\\d+/"\^DEV\\d+/' /tmp/mk-release.sh 21 | echo patched TAG regex in /tmp/mk-release.sh: 22 | grep "TAG=" /tmp/mk-release.sh 23 | 24 | 25 | # latest DEV tag 26 | if [[ "$LATEST_DEV" == "" ]]; then 27 | LATEST_DEV=$(git tag|grep -P "DEV\d.\d"|sort|tail -n1) 28 | fi 29 | echo LATEST_DEV: "$LATEST_DEV" 30 | # build a "release" based on latest DEV tag 31 | /tmp/mk-release.sh "$LATEST_DEV" || { echo a release tar ball was not produced, exiting; exit 1; } 32 | 33 | 34 | UNTAR_LOCATION=/tmp/dar-test-install/ 35 | TAR_FILE=dar-backup-linux-${LATEST_DEV}.tar.gz 36 | echo TAR_FILE to test install of: "$TAR_FILE" 37 | rm -fr "$UNTAR_LOCATION" || exit 1 38 | # follow install instructions 39 | mkdir -p "$UNTAR_LOCATION" 40 | tar zxf "/tmp/${TAR_FILE}" --directory "$UNTAR_LOCATION" 41 | chmod +x "$UNTAR_LOCATION"/dar-backup/bin/install.sh 42 | "$UNTAR_LOCATION"/dar-backup/bin/install.sh 43 | "$UNTAR_LOCATION"/dar-backup/bin/dar-backup.sh --local-backup-dir 44 | TEST_RESULT=$? 45 | 46 | touch "$UNTAR_LOCATION"/dar-backup/bin/DUMMY 47 | "$UNTAR_LOCATION"/dar-backup/bin/dar-diff-backup.sh --local-backup-dir 48 | RESULT=$? 49 | if [[ "$RESULT" != "0" ]]; then 50 | TEST_RESULT="$RESULT" 51 | fi 52 | 53 | 54 | touch "$UNTAR_LOCATION"/dar-backup/bin/DUMMY2 55 | "$UNTAR_LOCATION"/dar-backup/bin/dar-inc-backup.sh --local-backup-dir 56 | RESULT=$? 57 | if [[ "$RESULT" != "0" ]]; then 58 | TEST_RESULT="$RESULT" 59 | fi 60 | 61 | #cleanup 62 | rm -fr "$UNTAR_LOCATION" || exit 1 63 | rm -f /tmp/"$TAR_FILE" || exit 1 64 | 65 | echo TEST_RESULT: $TEST_RESULT 66 | exit $TEST_RESULT 67 | -------------------------------------------------------------------------------- /.github/workflows/fetch_clones.yml: -------------------------------------------------------------------------------- 1 | name: Update GitHub Clone Metrics 2 | 3 | permissions: 4 | contents: write 5 | pull-requests: write 6 | 7 | on: 8 | schedule: 9 | - cron: '17 2 * * *' # Run daily at 02:17 UTC 10 | workflow_dispatch: 11 | 12 | jobs: 13 | update-clones: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - name: Checkout repository 18 | uses: actions/checkout@v3 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: '3.x' 24 | 25 | - name: Install dependencies 26 | run: pip install requests 27 | 28 | - name: Run fetch_clones.py 29 | env: 30 | TOKEN: ${{ secrets.DAR_BACKUP_METRICS }} 31 | run: PYTHONPATH=src python src/clonepulse/fetch_clones.py --user per2jensen --repo dar-backup 32 | 33 | - name: Show Git status (debug) 34 | run: | 35 | echo "::group::Git Status" 36 | git status 37 | echo "::endgroup::" 38 | echo "::group::Git Diff" 39 | git diff 40 | echo "::endgroup::" 41 | echo "::group::Untracked Files" 42 | git ls-files --others --exclude-standard 43 | echo "::endgroup::" 44 | 45 | - name: Commit changes 46 | run: | 47 | git config --global user.name "github-actions" 48 | git config --global user.email "actions@github.com" 49 | 50 | # Check for changes before committing 51 | if ! git diff --quiet || [ -n "$(git ls-files --others --exclude-standard)" ]; then 52 | echo "Changes detected, committing..." 53 | git add $(find clonepulse -type f \( -name '*.json' -o -name '*.png' -o -name 'milestone_*.txt' \)) || true 54 | git commit -m "Update clone data on $(date -u '+%Y-%m-%dT%H:%M:%SZ')" 55 | # Push changes — if it fails, likely due to race; next run will retry 56 | git push || echo "⚠️ Push failed (likely a fast-forward conflict). Will retry next time." 57 | else 58 | echo "No changes to commit." 59 | fi 60 | -------------------------------------------------------------------------------- /.github/workflows/update_downloads.yml: -------------------------------------------------------------------------------- 1 | name: Update PyPI Downloads 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | on: 7 | schedule: 8 | # 3:15 UTC according to https://docs.github.com/en/actions/use-cases-and-examples/project-management/scheduling-issue-creation 9 | - cron: '15 3 * * *' # approx 2 hours after PyPI stats should finish 10 | workflow_dispatch: # Allow manual runs 11 | 12 | jobs: 13 | update-downloads: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | permissions: 18 | contents: write # Required to push changes to the repository 19 | 20 | steps: 21 | - name: Checkout repository 22 | uses: actions/checkout@v3 23 | 24 | - name: Log current UTC time 25 | run: | 26 | echo Time of workflow run: 27 | date -u 28 | 29 | - name: Set up Python 30 | uses: actions/setup-python@v4 31 | with: 32 | python-version: '3.11' 33 | 34 | - name: Install dependencies 35 | run: | 36 | pip install pypistats 37 | 38 | - name: Run download tracker 39 | run: python track_downloads.py 40 | 41 | - name: Verify changes 42 | run: | 43 | unauthorized_files=$(git diff --name-only | grep -vE "^(downloads\.json|README\.md)$" || true) 44 | if [[ -n "$unauthorized_files" ]]; then 45 | echo "Unauthorized file modification detected in:" 46 | echo "$unauthorized_files" 47 | exit 1 48 | fi 49 | 50 | - name: Commit and push changes 51 | run: | 52 | git config --global user.name 'GitHub Actions' 53 | git config --global user.email 'actions@github.com' 54 | 55 | # Always add downloads.json 56 | git add downloads.json 57 | 58 | # If README.md is modified, add it too 59 | if git diff --name-only | grep -q "^README\.md$"; then 60 | git add README.md 61 | fi 62 | 63 | git commit -m 'chore: update total downloads' || echo "No changes to commit" 64 | git push 65 | -------------------------------------------------------------------------------- /v2/tests/test_links.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 5 | 6 | import subprocess 7 | from dar_backup.command_runner import CommandRunner 8 | 9 | 10 | def test_backup_with_broken_symlink(setup_environment, env): 11 | """Ensure that dar-backup handles broken symlinks gracefully during full backup.""" 12 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 13 | 14 | # Setup a broken symlink in the data dir 15 | broken_link = os.path.join(env.data_dir, "broken_link") 16 | os.symlink("/non/existent/target", broken_link) 17 | assert os.path.islink(broken_link) 18 | assert not os.path.exists(broken_link) # Confirm it's broken 19 | 20 | # Also add a real file (just to trigger archive creation) 21 | with open(os.path.join(env.data_dir, "real.txt"), "w") as f: 22 | f.write("real content\n") 23 | 24 | # Run full backup 25 | result = runner.run(["dar-backup", "--full-backup", "--config-file", env.config_file]) 26 | env.logger.info("Ran dar-backup with a broken symlink in the data directory") 27 | 28 | # The backup should either succeed or report non-critical issues (return code 0 or 5) 29 | assert result.returncode in (0, 5), f"Unexpected return code: {result.returncode}" 30 | 31 | # Derive expected archive path 32 | expected_archive_base = os.path.join(env.backup_dir, "example") 33 | expected_archive = f"{expected_archive_base}_FULL_{env.datestamp}.1.dar" 34 | assert os.path.exists(expected_archive), f"Expected archive not found: {expected_archive}" 35 | 36 | 37 | # Use the dar-backup list-contents to inspect archive contents 38 | list_result = runner.run([ 39 | "dar-backup", 40 | "--list-contents", f"{expected_archive_base}_FULL_{env.datestamp}", 41 | "--config-file", env.config_file 42 | ]) 43 | env.logger.info("Listed contents of archive with broken symlink") 44 | 45 | # Output should mention the symlink (dar shows them with 'l' in type column) 46 | assert "broken_link" in list_result.stdout or "broken_link" in list_result.stderr 47 | 48 | -------------------------------------------------------------------------------- /v1/test/test-parchive.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # on purpose introduce "bitrot", 4k random chars 10k into the archive 6 | # repair via "par2 r" 7 | # verify repair is successful using "dar -t" and "par2 v" 8 | 9 | SCRIPTPATH=$(realpath "$0") 10 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 11 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 12 | 13 | source "$SCRIPTDIRPATH/setup.sh" 14 | source "$TESTDIR/conf/dar-backup.conf" 15 | 16 | TESTRESULT=0 17 | 18 | # do a backup 19 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir > /dev/null 20 | RESULT=$? 21 | if [[ $RESULT != "0" ]]; then 22 | TESTRESULT=1 23 | fi 24 | 25 | # introduce "bitrot" 26 | # 4096 random chars, 10kB into the archive 27 | echo "==> introduce bitrot" 28 | BITROT=$(tr -dc 'a-z0-9' < /dev/random|head -c4096) 29 | printf "BITROT data:\n%s\n" "$BITROT" 30 | ARCHIVEFILE=$TESTDIR/archives/TEST_FULL_${DATE}.1.dar 31 | echo "$BITROT"|tr -d '\n'|dd of="$ARCHIVEFILE" bs=4096 seek=$((10*1024)) oflag=seek_bytes conv=notrunc 32 | 33 | # does dar detect the changes 34 | echo "==> dar test archive" 35 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" 36 | RESULT=$? 37 | if [[ $RESULT == "0" ]]; then 38 | echo "dar did NOT detect bitrot" 39 | TESTRESULT=1 40 | fi 41 | 42 | # does par2 detect bitrot 43 | echo "==> par2 verify archive" 44 | par2 v -q "$ARCHIVEFILE" 45 | RESULT=$? 46 | if [[ $RESULT == "0" ]]; then 47 | echo "par2 did NOT detect bitrot" 48 | TESTRESULT=1 49 | fi 50 | 51 | # fix bitrot 52 | echo "==> par2 repair archive" 53 | par2 r -q "$ARCHIVEFILE" 54 | RESULT=$? 55 | if [[ $RESULT != "0" ]]; then 56 | echo "par2 did NOT repair bitrot" 57 | TESTRESULT=1 58 | fi 59 | 60 | # test archive with dar 61 | echo "==> dar test archive" 62 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" > /dev/null 63 | RESULT=$? 64 | if [[ $RESULT != "0" ]]; then 65 | echo "archive was not repaired" 66 | TESTRESULT=1 67 | fi 68 | 69 | # test archive with par2 70 | echo "==> par2 verify archive" 71 | par2 v -q "$ARCHIVEFILE" 72 | RESULT=$? 73 | if [[ $RESULT != "0" ]]; then 74 | echo "par2 did NOT repair bitrot" 75 | TESTRESULT=1 76 | fi 77 | 78 | echo TEST RESULT: "$TESTRESULT" 79 | exit "$TESTRESULT" 80 | -------------------------------------------------------------------------------- /.github/workflows/install.yml: -------------------------------------------------------------------------------- 1 | name: installation 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | 7 | on: 8 | push: 9 | branches: [ main ] 10 | pull_request: 11 | branches: [ main ] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | jobs: 17 | install-procedure: 18 | runs-on: ubuntu-latest 19 | steps: 20 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 21 | - uses: actions/checkout@v4 22 | 23 | # From: https://lannonbr.com/blog/2019-12-09-git-commit-in-actions 24 | # setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default 25 | - name: setup git config 26 | run: | 27 | git config user.name "GitHub Actions Bot" 28 | git config user.email "<>" 29 | 30 | - name: Install required packages 31 | run: sudo apt install -y dar dar-static par2 git 32 | 33 | - name: install and backup from HEAD 34 | run: | 35 | chmod +x $GITHUB_WORKSPACE/test/test-install-head.sh 36 | $GITHUB_WORKSPACE/test/test-install-head.sh 37 | 38 | # # package the latest DEV tag 39 | # # install using the install procedure described in the README 40 | # # run a full backup 41 | # # run a diff backup 42 | # # run an inc backup 43 | # install-run-latest-DEV: 44 | # runs-on: ubuntu-latest 45 | # steps: 46 | # # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 47 | # - uses: actions/checkout@v4 48 | 49 | # # From: https://lannonbr.com/blog/2019-12-09-git-commit-in-actions 50 | # # setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default 51 | # - name: setup git config 52 | # run: | 53 | # git config user.name "GitHub Actions Bot" 54 | # git config user.email "<>" 55 | 56 | # - name: Install required packages 57 | # run: sudo apt install -y dar dar-static par2 git 58 | 59 | # - name: run packaging script, install and run dar-backup 60 | # run: | 61 | # git config user.name "GitHub Actions Bot" 62 | # git config user.email "<>" 63 | -------------------------------------------------------------------------------- /v2/src/misc/duplicate2format.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDF License: GPL-3.0 or later 3 | 4 | """ 5 | Let `black` format a copy of --src 6 | 7 | No changes to the original source directory. 8 | 9 | 10 | 💡 Key features 11 | 12 | Options (--src, --dest) 13 | --src Source directory to copy and format (default: src) 14 | --dest Destination directory for the formatted copy (default: src-formatted) 15 | Graceful error handling (checks if source exists) 16 | Uses shutil.copytree for fast recursive copying 17 | Runs `black` on the destination directory 18 | """ 19 | 20 | 21 | import argparse 22 | import shutil 23 | import subprocess 24 | import os 25 | import sys 26 | 27 | def main(): 28 | parser = argparse.ArgumentParser( 29 | description="Copy a source directory to a destination and run black on the destination." 30 | ) 31 | parser.add_argument( 32 | "--src", 33 | default="src", 34 | help="Source directory to copy and format (default: src).", 35 | ) 36 | parser.add_argument( 37 | "--dest", 38 | default="src-formatted", 39 | help="Destination directory for the formatted copy (default: src-formatted).", 40 | ) 41 | 42 | args = parser.parse_args() 43 | src_dir = args.src 44 | dest_dir = args.dest 45 | 46 | # If --dest exists, exit 47 | if os.path.exists(dest_dir): 48 | print( 49 | f"❌ Destination '{dest_dir}' already exists. " 50 | "Provide a custom --dest directory to proceed." 51 | ) 52 | exit(1) 53 | 54 | if not os.path.exists(src_dir): 55 | print(f"❌ Source directory '{src_dir}' does not exist.") 56 | exit(1) 57 | 58 | print(f"🔧 Copying '{src_dir}' to '{dest_dir}'...") 59 | shutil.copytree(src_dir, dest_dir, dirs_exist_ok=True) 60 | print("✅ Copy complete.") 61 | 62 | # Find black inside the active virtual environment 63 | venv_bin_dir = os.path.dirname(sys.executable) 64 | black_path = os.path.join(venv_bin_dir, "black") 65 | 66 | print(f"🎨 Running black on '{dest_dir}'...") 67 | subprocess.run([black_path, dest_dir], check=True) 68 | print(f"✅ Formatting complete. Formatted code in: {dest_dir}") 69 | 70 | 71 | if __name__ == "__main__": 72 | main() 73 | -------------------------------------------------------------------------------- /v2/tests/play.py: -------------------------------------------------------------------------------- 1 | import xml.etree.ElementTree as ET 2 | 3 | xml_data=""" 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | """ 23 | 24 | 25 | 26 | 27 | 28 | # Function to recursively find tags and build their full paths 29 | def find_files_with_paths(element, current_path=""): 30 | files = [] 31 | if element.tag == "Directory": 32 | current_path = f"{current_path}/{element.get('name')}" 33 | for child in element: 34 | if child.tag == "File": 35 | file_path = f"{current_path}/{child.get('name')}" 36 | files.append(file_path) 37 | elif child.tag == "Directory": 38 | files.extend(find_files_with_paths(child, current_path)) 39 | return files 40 | 41 | 42 | # Parse the XML data 43 | root = ET.fromstring(xml_data) 44 | # Extract full paths for all elements 45 | file_paths = find_files_with_paths(root) 46 | 47 | 48 | def success(): 49 | print("\033[1m\033[32mSUCCESS\033[0m") 50 | print("\033[1m\033[31mErrors\033[0m encountered") 51 | # Print the full paths 52 | #for path in file_paths: 53 | # print(path) 54 | 55 | 56 | success() -------------------------------------------------------------------------------- /v2/src/dar_backup/dar-backup.conf.j2: -------------------------------------------------------------------------------- 1 | 2 | # SPDX-License-Identifier: GPL-3.0-or-later 3 | 4 | # ------------------------------------------------------------------------ 5 | # Demo of a `dar-backup` configuration file 6 | # This file was generated by dar-backup's `demo` program. 7 | # 8 | {%- if opts_dict | length > 0 %} 9 | # Options given to the `demo` program: 10 | {% endif %} 11 | {%- if opts_dict.ROOT_DIR -%} 12 | # --root-dir : {{ opts_dict.ROOT_DIR }} 13 | {% endif %} 14 | {%- if opts_dict.DIR_TO_BACKUP -%} 15 | # --dir-to-backup : {{ opts_dict.DIR_TO_BACKUP }} 16 | {% endif -%} 17 | {%- if opts_dict.BACKUP_DIR -%} 18 | # --backup-dir : {{ opts_dict.BACKUP_DIR }} 19 | {% endif %} 20 | # 21 | # Variables used to generate this file: 22 | # ===================================== 23 | {% for k,v in vars_map|dictsort %}# {{ k }} : {{ v }} 24 | {% endfor -%} 25 | # ------------------------------------------------------------------------ 26 | 27 | [MISC] 28 | LOGFILE_LOCATION = {{ vars_map.DAR_BACKUP_DIR -}}/dar-backup.log 29 | # optional parameters 30 | # LOGFILE_MAX_BYTES = 26214400 # 25 MB default, change as neeeded 31 | # LOGFILE_BACKUP_COUNT = 5 # default, change as needed 32 | # DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks// 33 | 34 | MAX_SIZE_VERIFICATION_MB = 2 35 | MIN_SIZE_VERIFICATION_MB = 0 36 | NO_FILES_VERIFICATION = 1 37 | # timeout in seconds for backup, test, restore and par2 operations 38 | # The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used. 39 | # If a timeout is not specified when using the CommandRunner, a default timeout of 30 secs is used. 40 | COMMAND_TIMEOUT_SECS = 86400 41 | 42 | [DIRECTORIES] 43 | BACKUP_DIR = {{ vars_map.BACKUP_DIR }} 44 | BACKUP.D_DIR = {{ vars_map.BACKUP_D_DIR }} 45 | TEST_RESTORE_DIR = {{ vars_map.TEST_RESTORE_DIR }} 46 | # Optional parameter 47 | # If you want to store the catalog database away from the BACKUP_DIR, use the MANAGER_DB_DIR variable. 48 | #MANAGER_DB_DIR = /some/where/else/ 49 | 50 | [AGE] 51 | # DIFF and INCR backups are kept for a configured number of days, then deleted by the `cleanuo` 52 | # age settings are in days 53 | DIFF_AGE = 100 54 | INCR_AGE = 40 55 | 56 | [PAR2] 57 | ERROR_CORRECTION_PERCENT = 5 58 | ENABLED = True 59 | 60 | [PREREQ] 61 | #SCRIPT_1 = 62 | 63 | [POSTREQ] 64 | #SCRIPT_1 = 65 | -------------------------------------------------------------------------------- /v2/tests/test_postreq.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 5 | from dar_backup.command_runner import CommandRunner 6 | 7 | 8 | from envdata import EnvData 9 | 10 | def delete_backups(env: EnvData): 11 | # Patterns for the file types to delete 12 | patterns = ["*.dar", "*.par2"] 13 | # Find and delete matching files 14 | for pattern in patterns: 15 | glob_pattern = os.path.join(env.backup_dir, pattern) 16 | files_to_delete = glob.glob(glob_pattern) # Search for matching files 17 | for file_path in files_to_delete: 18 | env.logger.info(file_path) 19 | try: 20 | os.remove(file_path) 21 | env.logger.info(f"Removed: {file_path}") 22 | except Exception as e: 23 | pass 24 | 25 | def test_postreq(setup_environment, env): 26 | """ 27 | Test the postreq command in the config file. 28 | dar-backup must fail when a prereq command fails. 29 | """ 30 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 31 | 32 | # Patch config file with a successful command 33 | with open(env.config_file, 'a') as f: 34 | f.write('\n[POSTREQ]\n') 35 | f.write(f'POSTREQ_01 = ls {env.backup_dir}\n') 36 | 37 | 38 | # Run the command 39 | command = ['dar-backup', '--full-backup' ,'-d', "example", '--config-file', env.config_file, '--log-level', 'debug'] 40 | process = runner.run(command) 41 | assert process.returncode == 0 42 | 43 | # Patch the config file with a failing command 44 | with open(env.config_file, 'a') as f: 45 | f.write('PREREQ_02 = command-does-not-exist /tmp\n') 46 | 47 | # cleanup first backup, otherwise the POSTREQ result is skewed 48 | delete_backups(env) 49 | 50 | # Run the command 51 | try: 52 | command = ['dar-backup', '--full-backup' ,'-d', "example", '--config-file', env.config_file, '--log-stdout' ] 53 | process = runner.run(command) 54 | assert process.returncode != 0, "dar-backup must fail when a postreq command fails" 55 | except Exception as e: 56 | env.logger.exception("Expected exception: dar-backup must fail when a prereq command fails") 57 | assert False, "dar-backup must fail when a prereq command fails" 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /v2/tests/test_discord_webhook.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from types import SimpleNamespace 4 | 5 | import pytest 6 | 7 | import dar_backup.util as util 8 | 9 | 10 | class DummyResponse: 11 | def __enter__(self): 12 | return self 13 | 14 | def __exit__(self, exc_type, exc_val, exc_tb): 15 | return False 16 | 17 | 18 | def test_send_discord_message_prefers_config_over_env(monkeypatch): 19 | captured = {} 20 | 21 | def fake_urlopen(request, timeout): 22 | captured["url"] = request.full_url 23 | captured["payload"] = request.data 24 | captured["headers"] = {k.lower(): v for k, v in request.headers.items()} 25 | captured["timeout"] = timeout 26 | return DummyResponse() 27 | 28 | monkeypatch.setattr(util.urllib.request, "urlopen", fake_urlopen) 29 | monkeypatch.setenv("DISCORD_WEBHOOK_URL", "https://env.example/webhook") 30 | config = SimpleNamespace(discord_webhook_url="https://config.example/webhook") 31 | 32 | assert util.send_discord_message("hello", config_settings=config, timeout_seconds=3) is True 33 | assert captured["url"] == "https://config.example/webhook" 34 | assert json.loads(captured["payload"].decode())["content"] == "hello" 35 | assert captured["headers"]["user-agent"].startswith("dar-backup/") 36 | assert captured["timeout"] == 3 37 | 38 | 39 | def test_send_discord_message_uses_env_when_no_config(monkeypatch): 40 | captured = {} 41 | 42 | def fake_urlopen(request, timeout): 43 | captured["url"] = request.full_url 44 | return DummyResponse() 45 | 46 | monkeypatch.setattr(util.urllib.request, "urlopen", fake_urlopen) 47 | monkeypatch.setenv("DISCORD_WEBHOOK_URL", "https://env-only.example/webhook") 48 | 49 | assert util.send_discord_message("hi there", config_settings=None) is True 50 | assert captured["url"] == "https://env-only.example/webhook" 51 | 52 | 53 | @pytest.mark.live_discord 54 | def test_send_discord_message_live(monkeypatch): 55 | webhook = os.environ.get("DISCORD_WEBHOOK_URL") 56 | if not webhook: 57 | pytest.skip("DISCORD_WEBHOOK_URL not set for live Discord test") 58 | 59 | # Avoid leaking any monkeypatch from other tests 60 | monkeypatch.delenv("DISCORD_WEBHOOK_URL", raising=False) 61 | monkeypatch.setenv("DISCORD_WEBHOOK_URL", webhook) 62 | 63 | assert util.send_discord_message("dar-backup live webhook test") is True 64 | -------------------------------------------------------------------------------- /v1/test/test-parchive-large-bitrot.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -x 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # on purpose introduce "bitrot", 34000 random chars (very close to 5% bitrot) 10k into the archive 6 | # repair via "par2 r" 7 | # verify repair is successful using "dar -t" and "par2 v" 8 | 9 | SCRIPTPATH=$(realpath "$0") 10 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 11 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 12 | 13 | source "$SCRIPTDIRPATH/setup.sh" 14 | source "$TESTDIR/conf/dar-backup.conf" 15 | 16 | TESTRESULT=0 17 | 18 | # do a backup 19 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir > /dev/null 20 | RESULT=$? 21 | if [[ $RESULT != "0" ]]; then 22 | TESTRESULT=1 23 | fi 24 | 25 | # introduce "bitrot" 26 | # 30000 random chars, 10kB into the archive 27 | echo "==> introduce bitrot" 28 | BLOCKSIZE=34000 29 | BITROT=/tmp/bitrot-block.txt 30 | tr -dc 'a-z0-9' < /dev/random|head -c${BLOCKSIZE} > "$BITROT" 31 | echo "BITROT data:" 32 | cat "$BITROT" 33 | ARCHIVEFILE=$TESTDIR/archives/TEST_FULL_${DATE}.1.dar 34 | ls -l $ARCHIVEFILE 35 | cp $ARCHIVEFILE ${ARCHIVEFILE}.org 36 | tr -d '\n' < "$BITROT"|dd of="$ARCHIVEFILE" bs=${BLOCKSIZE} seek=$((10*1024)) oflag=seek_bytes conv=notrunc 37 | ls -l $ARCHIVEFILE 38 | diff $ARCHIVEFILE ${ARCHIVEFILE}.org 39 | 40 | 41 | # does dar detect the changes 42 | echo "==> dar test archive" 43 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" 44 | RESULT=$? 45 | if [[ $RESULT == "0" ]]; then 46 | echo "dar did NOT detect bitrot" 47 | TESTRESULT=1 48 | fi 49 | 50 | # does par2 detect bitrot 51 | echo "==> par2 verify archive" 52 | par2 v -q "$ARCHIVEFILE" 53 | RESULT=$? 54 | if [[ $RESULT == "0" ]]; then 55 | echo "par2 did NOT detect bitrot" 56 | TESTRESULT=1 57 | fi 58 | 59 | # fix bitrot 60 | echo "==> par2 repair archive" 61 | par2 r -q "$ARCHIVEFILE" 62 | RESULT=$? 63 | if [[ $RESULT != "0" ]]; then 64 | echo "par2 did NOT repair bitrot" 65 | TESTRESULT=1 66 | fi 67 | 68 | # test archive with dar 69 | echo "==> dar test archive" 70 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" > /dev/null 71 | RESULT=$? 72 | if [[ $RESULT != "0" ]]; then 73 | echo "archive was not repaired" 74 | TESTRESULT=1 75 | fi 76 | 77 | # test archive with par2 78 | echo "==> par2 verify archive" 79 | par2 v -q "$ARCHIVEFILE" 80 | RESULT=$? 81 | if [[ $RESULT != "0" ]]; then 82 | echo "par2 did NOT repair bitrot" 83 | TESTRESULT=1 84 | fi 85 | 86 | echo TEST RESULT: "$TESTRESULT" 87 | exit "$TESTRESULT" 88 | -------------------------------------------------------------------------------- /v2/tests/test_generic_backup_command_execution.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import subprocess 3 | import io 4 | from unittest.mock import patch, MagicMock, Mock 5 | from dar_backup.dar_backup import generic_backup 6 | from dar_backup.config_settings import ConfigSettings 7 | from tests.envdata import EnvData 8 | 9 | @pytest.fixture 10 | def mock_envdata(): 11 | logger = MagicMock() 12 | command_logger = MagicMock() 13 | return EnvData(test_case_name="GenericBackupTest", logger=logger, command_logger=command_logger) 14 | 15 | @pytest.fixture 16 | def mock_config(): 17 | config = MagicMock(spec=ConfigSettings) 18 | config.backup_root_dir = "/mock/backups" 19 | config.command_timeout_secs = 60 20 | config.logfile_location = "/mock/logs/backup.log" 21 | return config 22 | 23 | #@patch("dar_backup.util.get_logger", return_value=MagicMock()) 24 | @patch("dar_backup.dar_backup.get_logger", return_value=MagicMock()) 25 | @patch("dar_backup.util", return_value=MagicMock()) 26 | @patch("dar_backup.util.shutil.which", return_value=True) 27 | @patch("dar_backup.util.subprocess.Popen") 28 | @patch("dar_backup.dar_backup.logger", new_callable=MagicMock) 29 | @patch("dar_backup.dar_backup.os.path.exists") 30 | @patch("dar_backup.dar_backup.runner") 31 | def test_generic_backup_success( 32 | mock_runner, 33 | mock_exists, 34 | mock_logger, 35 | mock_popen, 36 | mock_which, 37 | mock_get_logger, 38 | mock_envdata, 39 | mock_config, 40 | ): 41 | # Arrange 42 | mock_exists.return_value = False 43 | 44 | # Setup mocked runner behavior 45 | mock_runner.run.side_effect = [ 46 | # First call simulates successful `dar` run 47 | MagicMock(returncode=0, stdout="stdout", stderr=""), 48 | # Second call simulates successful catalog update 49 | MagicMock(returncode=0, stdout="catalog added", stderr="") 50 | ] 51 | 52 | args = MagicMock() 53 | args.config_file = "/mock/dar-backup.conf" 54 | 55 | backup_file = "backup_test" 56 | backup_definition = "/mock/data" 57 | darrc = "/mock/.darrc" 58 | backup_type = "FULL" 59 | command = ["dar", "-c", backup_file, "-R", backup_definition, "-B", darrc] 60 | 61 | # Act 62 | result = generic_backup(backup_type, command, backup_file, backup_definition, darrc, mock_config, args) 63 | 64 | # Assert 65 | assert isinstance(result, list) 66 | assert mock_runner.run.call_count == 2 67 | mock_logger.info.assert_called() 68 | -------------------------------------------------------------------------------- /v1/test/setup.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash -x 2 | # test setup for every test script 3 | # this is sourced in the actual test scripts 4 | 5 | DATE=$(date +"%Y-%m-%d") 6 | TESTRESULT=0 7 | 8 | TESTDIR=/tmp/dar-backup-test 9 | 10 | failOnError () { 11 | if [[ $1 != "0" ]]; then 12 | echo operation failed, exiting 13 | exit 1 14 | fi 15 | } 16 | 17 | 18 | # grep for expected string and print result 19 | # $1: search string 20 | # $2: logfile to search in 21 | checkExpectLog () { 22 | grep -P "$1" "$2" > /dev/null 23 | if [[ $? == "0" ]]; then 24 | echo "ok \"$1\" found" 25 | else 26 | echo "ERROR: \"$1\" NOT found" 27 | TESTRESULT=1 28 | fi 29 | } 30 | 31 | 32 | 33 | # grep for string expected NOT to be found and print result 34 | # $1: search string 35 | # $2: logfile to search in 36 | checkDontFindLog () { 37 | grep -P "$1" "$2" > /dev/null 38 | if [[ $? == "0" ]]; then 39 | echo "ERROR \"$1\" was found" 40 | TESTRESULT=1 41 | else 42 | echo "ok \"$1\" not found as expected" 43 | fi 44 | } 45 | 46 | # check given symbolic link path, verify it exists and is a link 47 | # $1: link paht 48 | checkExpectSymbolicLink () { 49 | if [[ -L "$1" ]]; then 50 | echo "ok Symbolic link: \"$1\" found" 51 | else 52 | echo "ERROR: symbolic link \"$1\" NOT found" 53 | TESTRESULT=1 54 | fi 55 | } 56 | 57 | echo "setup.sh: TESTDIR: $TESTDIR" 58 | echo "setup.sh: SCRIPTDIRPATH: $SCRIPTDIRPATH" 59 | #find "$SCRIPTDIRPATH"/.. ! -path "*/.github*" ! -path "*/.git*" 60 | 61 | rm -fr "$TESTDIR" || { echo "$TESTDIR could not be deleted, exiting"; exit 1; } 62 | mkdir -p "$TESTDIR/archives" 63 | 64 | cp -R "$SCRIPTDIRPATH/dirs" "$TESTDIR/" 65 | cp -R "$SCRIPTDIRPATH/../bin" "$TESTDIR/" 66 | cp -R "$SCRIPTDIRPATH/../conf" "$TESTDIR/" 67 | cp -R "$SCRIPTDIRPATH/../share" "$TESTDIR/" 68 | cp -R "$SCRIPTDIRPATH/../templates" "$TESTDIR/" && rm "$TESTDIR"/templates/backups.d/dar-backup 69 | 70 | # test templates dir and copy it 71 | cp -R "$SCRIPTDIRPATH/templates" "$TESTDIR/" 72 | # non-test templates 73 | cp "$SCRIPTDIRPATH/../templates/darrc.template" "$TESTDIR/templates/" 74 | 75 | source "$TESTDIR/bin/dar-util.sh" 76 | 77 | chmod +x "$TESTDIR/bin/install.sh" 78 | "$TESTDIR/bin/install.sh" 79 | 80 | # dar-backup.conf has been generated from the template, now use it 81 | source "$TESTDIR/conf/dar-backup.conf" 82 | 83 | # create dar catalog 84 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 85 | -------------------------------------------------------------------------------- /v1/test/test-cleanup.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # add file GREENLAND.JEP to include dir and to the exclude dir 6 | # run dar-diff-backup.sh 7 | # cleanup DIFF and INC archives 8 | 9 | TEST_RESULT=0 10 | 11 | SCRIPTPATH=$(realpath "$0") 12 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 13 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 14 | 15 | source "$SCRIPTDIRPATH/setup.sh" 16 | source "$TESTDIR/conf/dar-backup.conf" 17 | 18 | DAY_1_OLD=$(date --date="-1 days" -I) 19 | DAY_2_OLD=$(date --date="-2 days" -I) 20 | 21 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_1_OLD}.dar 22 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_2_OLD}.dar 23 | 24 | touch "$TESTDIR"/archives/TEST_INC_${DAY_1_OLD}.dar 25 | touch "$TESTDIR"/archives/TEST_INC_${DAY_2_OLD}.dar 26 | 27 | #check that a direcotry name does not mess anythin up (it did on a usbdisk disk :-) ) 28 | mkdir "$TESTDIR"/archives/"$DAY_2_OLD" 29 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_DIFF_${DAY_2_OLD}.dar 30 | touch "$TESTDIR"/archives/"$DAY_2_OLD"/TEST_INC_${DAY_2_OLD}.dar 31 | 32 | 33 | # set DIFF_AGE and INC_AGE so that one DIFF and one INC are cleaned up 34 | sed -i s/INC_AGE.*/INC_AGE=2/ "$TESTDIR"/conf/dar-backup.conf 35 | sed -i s/DIFF_AGE.*/DIFF_AGE=2/ "$TESTDIR"/conf/dar-backup.conf 36 | 37 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir 38 | 39 | COUNT=$(grep -c -E "removed:.*_DIFF_" "$TESTDIR"/archives/dar-backup.log) 40 | echo "COUNT: $COUNT" 41 | if [[ "$COUNT" != "2" ]]; then 42 | echo number of DIFF cleanups is wrong 43 | TEST_RESULT=1 44 | fi 45 | 46 | COUNT=$(grep -c -E "removed:.*_INC_" "$TESTDIR"/archives/dar-backup.log) 47 | echo "COUNT: $COUNT" 48 | if [[ "$COUNT" != "2" ]]; then 49 | echo number of INC cleanups is wrong 50 | TEST_RESULT=1 51 | fi 52 | 53 | # set DIFF_AGE and INC_AGE so that one more DIFF and one more INC are cleaned up 54 | sed -i s/DIFF_AGE.*/DIFF_AGE=1/ "$TESTDIR"/conf/dar-backup.conf 55 | sed -i s/INC_AGE.*/INC_AGE=1/ "$TESTDIR"/conf/dar-backup.conf 56 | 57 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir 58 | 59 | COUNT=$(grep -c -E "removed:.*_DIFF_" "$TESTDIR"/archives/dar-backup.log) 60 | echo "COUNT: $COUNT" 61 | if [[ "$COUNT" != "3" ]]; then 62 | echo number of DIFF cleanups is wrong 63 | TEST_RESULT=1 64 | fi 65 | 66 | COUNT=$(grep -c -E "removed:.*_INC_" "$TESTDIR"/archives/dar-backup.log) 67 | echo "COUNT: $COUNT" 68 | if [[ "$COUNT" != "3" ]]; then 69 | echo number of INC cleanups is wrong 70 | TEST_RESULT=1 71 | fi 72 | 73 | echo TEST_RESULT: $TEST_RESULT 74 | exit $TEST_RESULT 75 | -------------------------------------------------------------------------------- /v1/bin/install.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Copyright (C) 2024 Per Jensen 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | # set correct dir paths in config files 19 | # make the backup executable 20 | # create the softlink for the "diff" version 21 | 22 | 23 | VERSION=@@DEV-VERSION@@ 24 | 25 | _backup_file () { 26 | if [[ -f "$1" ]]; then 27 | cp "$1" "${1}.org" 28 | fi 29 | } 30 | 31 | SCRIPTPATH=$(realpath "$0") 32 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 33 | 34 | ARCHIVE_DIR=$(realpath "$SCRIPTDIRPATH/../archives") 35 | mkdir "$ARCHIVE_DIR" > /dev/null 2>&1 36 | 37 | chmod +x "$SCRIPTDIRPATH"/*.sh 38 | 39 | (cd "$SCRIPTDIRPATH"; rm dar-diff-backup.sh > /dev/null 2>&1; ln -s dar-backup.sh dar-diff-backup.sh) 40 | (cd "$SCRIPTDIRPATH"; rm dar-inc-backup.sh > /dev/null 2>&1; ln -s dar-backup.sh dar-inc-backup.sh) 41 | 42 | 43 | #template files 44 | FILE="$SCRIPTDIRPATH/../conf/defaults-rc" 45 | _backup_file "$FILE" 46 | sed -e "s|@@CONFDIR@@|$(realpath "${SCRIPTDIRPATH}"/../conf)|" "$SCRIPTDIRPATH/../templates/darrc.template" > "$FILE" 47 | 48 | FILE="$SCRIPTDIRPATH/../conf/dar-backup.conf" 49 | _backup_file "$FILE" 50 | sed -e "s|@@ARCHIVE_DIR@@|$(realpath "${SCRIPTDIRPATH}"/..)|" "$SCRIPTDIRPATH/../templates/dar-backup.conf.template" > "$FILE" 51 | 52 | 53 | if [ ! -d "$SCRIPTDIRPATH/../backups.d" ]; then 54 | mkdir "$SCRIPTDIRPATH/../backups.d" 55 | fi 56 | for file in "$SCRIPTDIRPATH"/../templates/backups.d/*; do 57 | base=$(basename "$file") 58 | sed -e "s|@@CONFDIR@@|$(realpath "${SCRIPTDIRPATH}"/../conf)|" "$SCRIPTDIRPATH/../templates/backups.d/$base" > "$SCRIPTDIRPATH/../backups.d/$base" 59 | done 60 | 61 | for file in "$SCRIPTDIRPATH"/../templates/systemd/*; do 62 | base=$(basename "$file") 63 | sed -e "s|@@DAR_BACKUP_DIR@@|$(realpath "${SCRIPTDIRPATH}"/..)|" "$SCRIPTDIRPATH/../templates/systemd/$base" > "$SCRIPTDIRPATH/../share/$base" 64 | done 65 | -------------------------------------------------------------------------------- /v1/test/test-run-restore-test.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # run restore test option 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/conf/dar-backup.conf" 13 | 14 | # run the test 15 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir 16 | RESULT=$? 17 | if [[ $RESULT != "0" ]]; then 18 | TESTRESULT=1 19 | fi 20 | echo "non directories restored:" 21 | find /tmp/dar-restore/ ! -type d 22 | 23 | dar -l "$MOUNT_POINT/TEST_FULL_$DATE" > "$TESTDIR/FULL-filelist.txt" 24 | echo dar exit code: $? 25 | 26 | echo "TESTDIR: $TESTDIR" 27 | 28 | 29 | 30 | # run --run-restore-test 31 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir --run-restore-test "TEST_FULL_$DATE" 32 | 33 | 34 | exit 35 | 36 | 37 | 38 | 39 | echo . 40 | echo .. 41 | echo =========================================== 42 | echo "cat filelists & logfile, then do checks" 43 | echo =========================================== 44 | echo "FULL dar archive:" 45 | cat "$TESTDIR/FULL-filelist.txt" 46 | echo "DIFF dar archive:" 47 | cat "$TESTDIR/DIFF-filelist.txt" 48 | echo "Logfile:" 49 | cat "$LOG_LOCATION/dar-backup.log" 50 | echo RESULTS for FULL backup: 51 | # FULL backup 52 | checkExpectLog "\[Saved\].*?dirs/include this one/Abe.jpg" "$TESTDIR/FULL-filelist.txt" 53 | checkExpectLog "\[Saved\].*?dirs/include this one/Krummi.JPG" "$TESTDIR/FULL-filelist.txt" 54 | checkExpectLog "\[Saved\].*?dirs/compressable/Lorem Ipsum.txt" "$TESTDIR/FULL-filelist.txt" 55 | checkDontFindLog "include this one/GREENLAND.JPEG" "$TESTDIR/FULL-filelist.txt" 56 | checkDontFindLog "exclude this one/In exclude dir.txt" "$TESTDIR/FULL-filelist.txt" 57 | 58 | echo RESULTS for DIFF backup: 59 | # DIFF backup 60 | checkExpectLog "\[Saved\].*?dirs/include this one/GREENLAND.JPEG" "$TESTDIR/DIFF-filelist.txt" 61 | checkDontFindLog "exclude this one/GREENLAND.JPEG" "$TESTDIR/DIFF-filelist.txt" 62 | 63 | echo RESULTS for INCREMENTAL backup: 64 | # INC backup 65 | checkExpectLog "\[Saved\].*?dirs/include this one/GREENLAND.JPEG" "$TESTDIR/INC-filelist.txt" 66 | NO_SAVED=$(grep -c "\[Saved\]" "$TESTDIR/INC-filelist.txt") 67 | if [[ $NO_SAVED == "1" ]]; then 68 | echo "ok Number of files saved in INCREMENTAL archive: $NO_SAVED" 69 | else 70 | echo "error more than one file saved in the INCREMENTAL archive" 71 | TESTRESULT=1 72 | fi 73 | 74 | echo TEST RESULT: "$TESTRESULT" 75 | exit "$TESTRESULT" 76 | -------------------------------------------------------------------------------- /track_downloads.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyPI Total Downloads Tracker (Block Marker Edition) 3 | 4 | Fetches total downloads without mirrors from PyPIStats 5 | and replaces a block in README.md marked by custom START/END markers. 6 | 7 | LICENSE: MIT 8 | """ 9 | 10 | import json 11 | import subprocess 12 | from datetime import datetime, UTC 13 | from pathlib import Path 14 | 15 | # --- CONFIGURATION --- 16 | PACKAGE_NAME = "dar-backup" 17 | JSON_FILE = Path("downloads.json") 18 | README_FILE = Path("README.md") 19 | START_MARKER = "" 20 | END_MARKER = "" 21 | 22 | 23 | def fetch_total_downloads_without_mirrors(package: str) -> int: 24 | try: 25 | result = subprocess.run( 26 | ["pypistats", "overall", package, "--json"], 27 | check=True, 28 | capture_output=True, 29 | text=True, 30 | ) 31 | data = json.loads(result.stdout) 32 | for entry in data["data"]: 33 | if entry["category"] == "without_mirrors": 34 | return entry["downloads"] 35 | except Exception as e: 36 | print(f"Error fetching download data: {e}") 37 | return 0 38 | 39 | 40 | def save_download_data(total: int): 41 | today = datetime.now(UTC).strftime("%Y-%m-%d") 42 | data = {"total": total, "fetched": today} 43 | with open(JSON_FILE, "w") as f: 44 | json.dump(data, f, indent=2) 45 | print(f"Saved total: {total} (as of {today})") 46 | 47 | 48 | def update_readme(total: int): 49 | if not README_FILE.exists(): 50 | print("README.md not found.") 51 | return 52 | 53 | content = README_FILE.read_text() 54 | start = content.find(START_MARKER) 55 | end = content.find(END_MARKER) 56 | 57 | if start == -1 or end == -1 or start >= end: 58 | print("Start or end marker not found or malformed.") 59 | return 60 | 61 | before = content[:start + len(START_MARKER)] 62 | after = content[end:] 63 | 64 | insert_block = f"\n📦 Total PyPI downloads: {total}\n" 65 | 66 | updated_content = before + insert_block + after 67 | README_FILE.write_text(updated_content) 68 | print("README.md updated between markers.") 69 | 70 | 71 | def main(): 72 | total = fetch_total_downloads_without_mirrors(PACKAGE_NAME) 73 | print(f"Fetched total downloads (without mirrors): {total}") 74 | if total > 0: 75 | save_download_data(total) 76 | update_readme(total) 77 | else: 78 | print("No valid download count received. Skipping update.") 79 | 80 | 81 | if __name__ == "__main__": 82 | main() -------------------------------------------------------------------------------- /v1/test/test-parchive-multiple-8k-blocks.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # run dar-backup.sh 5 | # on purpose introduce "bitrot", 3 8k random blocks at 10k, 100k and 400k into the archive 6 | # repair via "par2 r" 7 | # verify repair is successful using "dar -t" and "par2 v" 8 | 9 | SCRIPTPATH=$(realpath "$0") 10 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 11 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 12 | 13 | source "$SCRIPTDIRPATH/setup.sh" 14 | source "$TESTDIR/conf/dar-backup.conf" 15 | 16 | TESTRESULT=0 17 | 18 | # do a backup 19 | "$TESTDIR/bin/dar-backup.sh" -d TEST --local-backup-dir > /dev/null 20 | RESULT=$? 21 | if [[ $RESULT != "0" ]]; then 22 | TESTRESULT=1 23 | fi 24 | 25 | # introduce "bitrot" 26 | # 8k random chars, 10kB into the archive 27 | echo "==> introduce bitrot block 1" 28 | BITROT=$(tr -dc 'a-z0-9' < /dev/random|head -c8192) 29 | ARCHIVEFILE=$TESTDIR/archives/TEST_FULL_${DATE}.1.dar 30 | echo "$BITROT"|tr -d '\n'|dd of="$ARCHIVEFILE" bs=8192 seek=$((10*1024)) oflag=seek_bytes conv=notrunc 31 | 32 | echo "==> introduce bitrot block 2" 33 | BITROT=$(tr -dc 'a-z0-9' < /dev/random|head -c8192) 34 | echo "$BITROT"|tr -d '\n'|dd of="$ARCHIVEFILE" bs=8192 seek=$((100*1024)) oflag=seek_bytes conv=notrunc 35 | 36 | BITROT=$(tr -dc 'a-z0-9' < /dev/random|head -c8192) 37 | echo "$BITROT"|tr -d '\n'|dd of="$ARCHIVEFILE" bs=8192 seek=$((400*1024)) oflag=seek_bytes conv=notrunc 38 | 39 | 40 | # does dar detect the changes 41 | echo "==> dar test archive" 42 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" 43 | RESULT=$? 44 | if [[ $RESULT == "0" ]]; then 45 | echo "dar did NOT detect bitrot" 46 | TESTRESULT=1 47 | fi 48 | 49 | # does par2 detect bitrot 50 | echo "==> par2 verify archive" 51 | par2 v -q "$ARCHIVEFILE" 52 | RESULT=$? 53 | if [[ $RESULT == "0" ]]; then 54 | echo "par2 did NOT detect bitrot" 55 | TESTRESULT=1 56 | fi 57 | 58 | # fix bitrot 59 | echo "==> par2 repair archive" 60 | par2 r -q "$ARCHIVEFILE" 61 | RESULT=$? 62 | if [[ $RESULT != "0" ]]; then 63 | echo "par2 did NOT repair bitrot" 64 | TESTRESULT=1 65 | fi 66 | 67 | # test archive with dar 68 | echo "==> dar test archive" 69 | dar -t "$TESTDIR/archives/TEST_FULL_${DATE}" > /dev/null 70 | RESULT=$? 71 | if [[ $RESULT != "0" ]]; then 72 | echo "archive was not repaired" 73 | TESTRESULT=1 74 | fi 75 | 76 | # test archive with par2 77 | echo "==> par2 verify archive" 78 | par2 v -q "$ARCHIVEFILE" 79 | RESULT=$? 80 | if [[ $RESULT != "0" ]]; then 81 | echo "par2 did NOT repair bitrot" 82 | TESTRESULT=1 83 | fi 84 | 85 | echo TEST RESULT: "$TESTRESULT" 86 | exit "$TESTRESULT" 87 | -------------------------------------------------------------------------------- /v2/template/.darrc: -------------------------------------------------------------------------------- 1 | # Default configuration file for dar 2 | # Place this file in the user's home directory as .darrc or specify it with the -B option 3 | 4 | restore-options: 5 | # don't restore File Specific Attributes 6 | #--fsa-scope none 7 | 8 | # ignore owner, useful when used by a non-privileged user 9 | --comparison-field=ignore-owner 10 | 11 | 12 | 13 | compress-exclusion: 14 | # Exclude specific file types from compression 15 | 16 | # First setting case insensitive mode on: 17 | -an 18 | -ag 19 | 20 | 21 | # Exclude specific file types from compression 22 | -Z *.gz 23 | -Z *.bz2 24 | -Z *.xz 25 | -Z *.zip 26 | -Z *.rar 27 | -Z *.7z 28 | -Z *.tar 29 | -Z *.tgz 30 | -Z *.tbz2 31 | -Z *.txz 32 | # Exclude common image file types from compression 33 | -Z *.jpg 34 | -Z *.jpeg 35 | -Z *.png 36 | -Z *.gif 37 | -Z *.bmp 38 | -Z *.tiff 39 | -Z *.svg 40 | # Exclude common movie file types from compression 41 | -Z *.mp4 42 | -Z *.avi 43 | -Z *.mkv 44 | -Z *.mov 45 | -Z *.wmv 46 | -Z *.flv 47 | -Z *.mpeg 48 | -Z *.mpg 49 | 50 | # These are zip files. Not all are compressed, but considering that they can 51 | # get quite large it is probably more prudent to leave this uncommented. 52 | -Z "*.pk3" 53 | -Z "*.zip" 54 | # You can get better compression on these files, but then you should be 55 | # de/recompressing with an actual program, not dar. 56 | -Z "*.lz4" 57 | -Z "*.zoo" 58 | 59 | # Other, in alphabetical order. 60 | -Z "*.Po" 61 | -Z "*.aar" 62 | -Z "*.bx" 63 | -Z "*.chm" 64 | -Z "*.doc" 65 | -Z "*.epub" 66 | -Z "*.f3d" 67 | -Z "*.gpg" 68 | -Z "*.htmlz" 69 | -Z "*.iix" 70 | -Z "*.iso" 71 | -Z "*.jin" 72 | -Z "*.ods" 73 | -Z "*.odt" 74 | -Z "*.ser" 75 | -Z "*.svgz" 76 | -Z "*.swx" 77 | -Z "*.sxi" 78 | -Z "*.whl" 79 | -Z "*.wings" 80 | 81 | 82 | # Dar archives (may be compressed). 83 | -Z "*.dar" 84 | 85 | # Now we swap back to case sensitive mode for masks which is the default 86 | # mode: 87 | -acase 88 | 89 | 90 | ############################################################## 91 | # target: verbose 92 | # remove comments belov for dar being more verbose 93 | verbose: 94 | 95 | # -vt show each file teated due to filtering inclusion or no filtering at all 96 | # -vt 97 | 98 | # -vs show skipped files du to exclusion 99 | # -vs 100 | 101 | # -vd show diretory currently being processed 102 | -vd 103 | 104 | # -vm show detailed messages, not related to files and directories 105 | # -vm 106 | 107 | # -vf show summary of each treated directory, including average compression 108 | -vf 109 | 110 | # -va equivalent to "-vm -vs -vt" 111 | # -va 112 | 113 | -------------------------------------------------------------------------------- /v2/src/dar_backup/.darrc: -------------------------------------------------------------------------------- 1 | # .darrc configuration file for `dar` as used by the `dar-backup` script. 2 | # `dar-backup` lives here: https://github.com/per2jensen/dar-backup 3 | 4 | 5 | ############################################################## 6 | # target: verbose 7 | # remove comments belov for dar being more verbose 8 | verbose: 9 | 10 | # shows files teated due to filtering inclusion or no filtering at all 11 | # -vt 12 | 13 | # shows skipped files du to exclusion 14 | # -vs 15 | 16 | # shows diretory currently being processed 17 | -vd 18 | 19 | # shows detailed messages, not related to files and directories 20 | # -vm 21 | 22 | # shows summary of each treated directory, including average compression 23 | -vf 24 | 25 | # equivalent to "-vm -vs -vt" 26 | # -va 27 | 28 | 29 | restore-options: 30 | # don't restore File Specific Attributes 31 | #--fsa-scope none 32 | 33 | # ignore owner, useful when used by a non-privileged user 34 | --comparison-field=ignore-owner 35 | 36 | 37 | 38 | # Exclude specific file types from compression 39 | compress-exclusion: 40 | 41 | # First setting case insensitive mode on: 42 | -an 43 | -ag 44 | 45 | -Z "*.gz" 46 | -Z "*.bz2" 47 | -Z "*.xz" 48 | -Z "*.zip" 49 | -Z "*.rar" 50 | -Z "*.7z" 51 | -Z "*.tar" 52 | -Z "*.tgz" 53 | -Z "*.tbz2" 54 | -Z "*.txz" 55 | # Exclude common image file types from compression 56 | -Z "*.jpg" 57 | -Z "*.jpeg" 58 | -Z "*.png" 59 | -Z "*.gif" 60 | -Z "*.bmp" 61 | -Z "*.tiff" 62 | -Z "*.svg" 63 | -Z "*.ico" 64 | -Z "*.webp" 65 | # The author uses Nikon compressed NEFs raw files 66 | -Z "*.NEF" 67 | # Exclude common movie file types from compression 68 | -Z "*.mp4" 69 | -Z "*.avi" 70 | -Z "*.mkv" 71 | -Z "*.mov" 72 | -Z "*.wmv" 73 | -Z "*.flv" 74 | -Z "*.mpeg" 75 | -Z "*.mpg" 76 | 77 | # These are zip files. Not all are compressed, but considering that they can 78 | # get quite large it is probably more prudent to leave this uncommented. 79 | -Z "*.pk3" 80 | -Z "*.zip" 81 | 82 | -Z "*.lz4" 83 | -Z "*.zoo" 84 | 85 | -Z "*.Po" 86 | -Z "*.aar" 87 | -Z "*.bx" 88 | -Z "*.chm" 89 | -Z "*.doc" 90 | -Z "*.epub" 91 | -Z "*.f3d" 92 | -Z "*.gpg" 93 | -Z "*.htmlz" 94 | -Z "*.iix" 95 | -Z "*.iso" 96 | -Z "*.jin" 97 | -Z "*.ods" 98 | -Z "*.odt" 99 | -Z "*.ser" 100 | -Z "*.svgz" 101 | -Z "*.swx" 102 | -Z "*.sxi" 103 | -Z "*.whl" 104 | -Z "*.wings" 105 | 106 | 107 | # Dar archives (may be compressed). 108 | -Z "*.dar" 109 | 110 | # Now we swap back to case sensitive mode for masks which is the default 111 | -acase 112 | 113 | -------------------------------------------------------------------------------- /.github/workflows/py-tests.yml: -------------------------------------------------------------------------------- 1 | name: Pytest 2 | permissions: 3 | contents: read 4 | pull-requests: write 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | workflow_dispatch: 12 | 13 | jobs: 14 | python3-unit-tests: 15 | runs-on: ubuntu-24.04 16 | 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Setup git config 22 | run: | 23 | git config user.name "GitHub Actions Bot" 24 | git config user.email "<>" 25 | 26 | - name: Install required system packages 27 | run: | 28 | sudo apt update 29 | sudo apt install -y dar dar-static par2 git python3 python3-venv libguestfs-tools 30 | 31 | - name: Make kernel readable for libguestfs 32 | run: | 33 | version=$(uname -r) 34 | sudo dpkg-statoverride --update --add root root 0644 /boot/vmlinuz-${version} 35 | 36 | - name: Copy README into v2 for build 37 | run: cp README.md v2/README.md 38 | 39 | - name: Create and prepare Python virtual environment 40 | run: | 41 | cd $GITHUB_WORKSPACE/v2 42 | if [[ -d venv* ]]; then 43 | rm -rf venv* 44 | fi 45 | 46 | ./build.sh 47 | 48 | 49 | # ensure subprocess coverage is enabled before running tests 50 | - name: Enable coverage in subprocesses 51 | working-directory: v2 52 | run: | 53 | . venv*/bin/activate 54 | export COVERAGE_PROCESS_START="$PWD/.coveragerc" 55 | export PYTHONPATH="$PWD:$PYTHONPATH" 56 | python - <<'PY' 57 | import site, shutil, pathlib 58 | dst = pathlib.Path(next(p for p in site.getsitepackages() if 'site-packages' in p))/'sitecustomize.py' 59 | shutil.copyfile('sitecustomize.py', dst) 60 | print("installed:", dst) 61 | PY 62 | 63 | - name: Run pytest with coverage 64 | working-directory: v2 65 | run: | 66 | . venv*/bin/activate 67 | export COVERAGE_PROCESS_START="$PWD/.coveragerc" 68 | export PYTHONPATH="$PWD:$PYTHONPATH" 69 | coverage erase 70 | python -c "import sitecustomize; print('sitecustomize OK')" 71 | pytest --cov=src --cov-config=.coveragerc --cov-report=term --cov-report=xml:coverage.xml 72 | coverage combine || true 73 | coverage xml -i 74 | 75 | 76 | 77 | - name: Upload coverage report to Codecov 78 | uses: codecov/codecov-action@v5 79 | with: 80 | token: ${{ secrets.CODECOV_TOKEN }} # optional for public repos 81 | files: ./v2/coverage.xml 82 | verbose: true 83 | -------------------------------------------------------------------------------- /v1/test/test-cleanup-alternate-dir.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # verify cleanup.sh works on the directory given in the --alternate-archive-dir option 4 | # that option requires --local-backup-dir 5 | 6 | 7 | TEST_RESULT=0 8 | 9 | SCRIPTPATH=$(realpath "$0") 10 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 11 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 12 | 13 | source "$SCRIPTDIRPATH/setup.sh" 14 | source "$TESTDIR/conf/dar-backup.conf" 15 | 16 | DAY_1_OLD=$(date --date="-1 days" -I) 17 | DAY_2_OLD=$(date --date="-2 days" -I) 18 | 19 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_1_OLD}.dar 20 | touch "$TESTDIR"/archives/TEST_DIFF_${DAY_2_OLD}.dar 21 | 22 | touch "$TESTDIR"/archives/TEST_INC_${DAY_1_OLD}.dar 23 | touch "$TESTDIR"/archives/TEST_INC_${DAY_2_OLD}.dar 24 | 25 | cp -R "$TESTDIR"/archives "$TESTDIR"/archives2 26 | 27 | # set DIFF_AGE and INC_AGE so that one DIFF and one INC are cleaned up 28 | sed -i s/INC_AGE.*/INC_AGE=2/ "$TESTDIR"/conf/dar-backup.conf 29 | sed -i s/DIFF_AGE.*/DIFF_AGE=2/ "$TESTDIR"/conf/dar-backup.conf 30 | 31 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir --alternate-archive-dir "$TESTDIR/archives2" 32 | 33 | COUNT=$(grep -c -E "removed:.*_DIFF_" "$TESTDIR"/archives/dar-backup.log) 34 | echo "COUNT: $COUNT" 35 | if [[ "$COUNT" != "1" ]]; then 36 | echo number of DIFF cleanups is wrong 37 | TEST_RESULT=1 38 | fi 39 | 40 | COUNT=$(grep -c -E "removed:.*_INC_" "$TESTDIR"/archives/dar-backup.log) 41 | echo "COUNT: $COUNT" 42 | if [[ "$COUNT" != "1" ]]; then 43 | echo number of INC cleanups is wrong 44 | TEST_RESULT=1 45 | fi 46 | 47 | # set DIFF_AGE and INC_AGE so that one more DIFF and one more INC are cleaned up 48 | sed -i s/DIFF_AGE.*/DIFF_AGE=1/ "$TESTDIR"/conf/dar-backup.conf 49 | sed -i s/INC_AGE.*/INC_AGE=1/ "$TESTDIR"/conf/dar-backup.conf 50 | 51 | "$TESTDIR"/bin/cleanup.sh --local-backup-dir --alternate-archive-dir "$TESTDIR/archives2" 52 | 53 | COUNT=$(grep -c -E "removed:.*_DIFF_" "$TESTDIR"/archives/dar-backup.log) 54 | echo "COUNT: $COUNT" 55 | if [[ "$COUNT" != "2" ]]; then 56 | echo number of DIFF cleanups is wrong 57 | TEST_RESULT=1 58 | fi 59 | 60 | COUNT=$(grep -c -E "removed:.*_INC_" "$TESTDIR"/archives/dar-backup.log) 61 | echo "COUNT: $COUNT" 62 | if [[ "$COUNT" != "2" ]]; then 63 | echo number of INC cleanups is wrong 64 | TEST_RESULT=1 65 | fi 66 | 67 | if [[ "$TEST_RESULT" == "0" ]]; then 68 | echo "test of cleanup successfully completed" 69 | fi 70 | 71 | #verify all archives still exists in $TESTDIR/archives 72 | COUNT=$(ls /tmp/dar-backup-test/archives|grep -c -E "FULL|DIFF|INC") 73 | echo "COUNT: $COUNT" 74 | if [[ "$COUNT" != "4" ]]; then 75 | echo one or more archives were cleaned up in $TESTDIR/archives 76 | TEST_RESULT=1 77 | fi 78 | 79 | echo TEST_RESULT: $TEST_RESULT 80 | exit $TEST_RESULT 81 | 82 | -------------------------------------------------------------------------------- /v2/tests/test_status_indicators.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from sys import path 4 | from threading import Event 5 | from unittest.mock import patch, MagicMock, mock_open 6 | from dar_backup.rich_progress import show_log_driven_bar 7 | 8 | # Ensure the test directory is in the Python path 9 | path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 10 | 11 | @patch("dar_backup.rich_progress.Console") 12 | @patch("dar_backup.rich_progress.Live") 13 | @patch("dar_backup.rich_progress.os.path.exists", return_value=True) 14 | @patch("dar_backup.rich_progress.os.path.getsize", return_value=100) 15 | @patch("dar_backup.rich_progress.open", new_callable=mock_open, read_data="""\ 16 | Some unrelated log line 17 | === START BACKUP SESSION: 1234 18 | Inspecting directory /home 19 | Finished inspecting directory /home 20 | Inspecting directory /etc 21 | Finished inspecting directory /etc 22 | """) 23 | def test_show_log_driven_bar_updates_progress( 24 | mock_file, # open 25 | mock_getsize, # os.path.getsize 26 | mock_exists, # os.path.exists 27 | mock_live_class, # Live 28 | mock_console_class # Console 29 | ): 30 | # Setup console mock 31 | mock_console = MagicMock() 32 | mock_console.is_terminal = True 33 | mock_console_class.return_value = mock_console 34 | 35 | # Setup Live mock 36 | mock_live = MagicMock() 37 | mock_live_class.return_value.__enter__.return_value = mock_live 38 | 39 | # Setup mocked file behavior 40 | mock_file_handle = mock_file.return_value.__enter__.return_value 41 | mock_file_handle.tell.return_value = 100 42 | mock_file_handle.seek.return_value = None 43 | 44 | stop_event = Event() 45 | 46 | import threading 47 | thread = threading.Thread( 48 | target=show_log_driven_bar, 49 | args=("/mock/path.log", stop_event, "=== START BACKUP SESSION: 1234", 10), 50 | daemon=True 51 | ) 52 | thread.start() 53 | 54 | import time 55 | time.sleep(1) 56 | stop_event.set() 57 | thread.join() 58 | 59 | # ✅ Assert that progress bar updated at least once 60 | assert mock_live.update.call_count >= 1 61 | 62 | # ✅ Extract plain text from Rich objects 63 | updates = [call.args[0] for call in mock_live.update.call_args_list] 64 | 65 | def extract_plain_text(obj): 66 | if hasattr(obj, "plain"): 67 | return obj.plain 68 | elif hasattr(obj, "renderables"): # Rich Group object 69 | return " ".join( 70 | r.plain if hasattr(r, "plain") else str(r) 71 | for r in obj.renderables 72 | ) 73 | return str(obj) 74 | 75 | combined_text = " ".join(extract_plain_text(u) for u in updates) 76 | assert "📂 /etc" in combined_text or "📂 /home" in combined_text 77 | -------------------------------------------------------------------------------- /v1/test/test-remove-single-archive-from-catalog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test removing 1 entry of an specific achive from it's catalog 4 | # - multiple different backup definitions 5 | # - a backup definition name containing spaces 6 | 7 | TESTRESULT=0 8 | 9 | SCRIPTPATH=$(realpath "$0") 10 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 11 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 12 | 13 | source "$SCRIPTDIRPATH/setup.sh" 14 | source "$TESTDIR/bin/dar-util.sh" 15 | source "$TESTDIR/conf/dar-backup.conf" 16 | 17 | # generate 2 different backups 18 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/"TEST2 with spaces" 19 | 20 | 21 | # create catalogs 22 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 23 | if [[ $? != "0" ]]; then 24 | echo ERROR catalog was not created, exiting 25 | exit 1 26 | fi 27 | 28 | 29 | # do backups 30 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir 31 | RESULT=$? 32 | if [[ $RESULT != "0" ]]; then 33 | TESTRESULT=1 34 | fi 35 | 36 | 37 | # add archives for specific backup definition 38 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def "TEST2 with spaces" --local-backup-dir 39 | if [[ $? != "0" ]]; then 40 | echo ERROR some or all archives were not added to catalog, exiting 41 | exit 1 42 | fi 43 | 44 | # add all archives in named directory 45 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --local-backup-dir 46 | if [[ $? != "0" ]]; then 47 | echo ERROR some or all archives were not added to catalog, exiting 48 | exit 1 49 | fi 50 | 51 | 52 | # remove TEST entry from catalog 53 | _DATE=$(date -I) 54 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/TEST.catalog)" 55 | RESULT=$? 56 | if [[ $RESULT != "0" ]]; then 57 | TESTRESULT=1 58 | fi 59 | 60 | "$TESTDIR/bin/manager.sh" --remove-specific-archive TEST_FULL_$_DATE --local-backup-dir 61 | RESULT=$? 62 | if [[ $RESULT != "0" ]]; then 63 | TESTRESULT=1 64 | fi 65 | 66 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/TEST.catalog)" 67 | RESULT=$? 68 | if [[ $RESULT != "0" ]]; then 69 | TESTRESULT=1 70 | fi 71 | 72 | 73 | #remove TEST2..... entry from catalog 74 | _REALPATH=$(realpath "$TESTDIR"/archives/"TEST2 with spaces.catalog") 75 | RESULT=$? 76 | if [[ $RESULT != "0" ]]; then 77 | TESTRESULT=1 78 | fi 79 | 80 | dar_manager -l --base "$_REALPATH" 81 | RESULT=$? 82 | if [[ $RESULT != "0" ]]; then 83 | TESTRESULT=1 84 | fi 85 | 86 | "$TESTDIR/bin/manager.sh" --remove-specific-archive "TEST2 with spaces_FULL_$_DATE" --local-backup-dir 87 | RESULT=$? 88 | if [[ $RESULT != "0" ]]; then 89 | TESTRESULT=1 90 | fi 91 | 92 | dar_manager -l --base "$_REALPATH" 93 | RESULT=$? 94 | if [[ $RESULT != "0" ]]; then 95 | TESTRESULT=1 96 | fi 97 | 98 | 99 | if [[ "$TESTRESULT" == "0" ]]; then 100 | log_success "Test case succeeded" 101 | else 102 | log_fail "Test case failed" 103 | fi 104 | 105 | exit "$TESTRESULT" 106 | 107 | -------------------------------------------------------------------------------- /v1/templates/darrc.template: -------------------------------------------------------------------------------- 1 | 2 | # default options when extract (-x) is used 3 | extract: 4 | # don't restore File Specific Attributes 5 | #--fsa-scope none 6 | 7 | # ignore owner, useful when used by a non-privileged user 8 | --comparison-field=ignore-owner 9 | 10 | compress-exclusion: 11 | # First setting case insensitive mode on: 12 | -an 13 | -ag 14 | 15 | # Compressed video format. 16 | -Z "*.avi" 17 | -Z "*.cr2" 18 | -Z "*.flv" 19 | -Z "*.jng" 20 | -Z "*.m4v" 21 | -Z "*.mkv" 22 | -Z "*.mov" 23 | -Z "*.mp4*" 24 | -Z "*.mpeg" 25 | -Z "*.mpg" 26 | -Z "*.oga" 27 | -Z "*.swf" 28 | -Z "*.vob" 29 | -Z "*.webm" 30 | -Z "*.wmv" 31 | 32 | # Compressed animation. 33 | -Z "*.mng" 34 | 35 | # Compressed image format. 36 | -Z "*.bmp" 37 | -Z "*.gif" 38 | -Z "*.ico" 39 | -Z "*.jpe" 40 | -Z "*.jpeg" 41 | -Z "*.jpg" 42 | -Z "*.mmpz" 43 | -Z "*.mpeg" 44 | -Z "*.png" 45 | -Z "*.tif" 46 | -Z "*.tiff" 47 | -Z "*.webp" 48 | 49 | # Compressed audio format. 50 | -Z "*.ac3" 51 | -Z "*.als" 52 | -Z "*.ape" 53 | -Z "*.bonk" 54 | -Z "*.flac" 55 | -Z "*.m4a" 56 | -Z "*.mp2" 57 | -Z "*.mp3" 58 | -Z "*.mpc" 59 | -Z "*.nsf" 60 | -Z "*.ogg" 61 | -Z "*.speex" 62 | -Z "*.spx" 63 | -Z "*.weba" 64 | -Z "*.wv" 65 | 66 | # Compressed package. 67 | -Z "*.deb" 68 | -Z "*.rpm" 69 | -Z "*.run" 70 | -Z "*.sis" 71 | -Z "*.xpi" 72 | 73 | # Compressed data. 74 | -Z "*.7z" 75 | -Z "*.Z" 76 | -Z "*.bz2" 77 | -Z "*.cab" 78 | -Z "*.gz" 79 | -Z "*.jar" 80 | -Z "*.rar" 81 | -Z "*.tbz" 82 | -Z "*.tbz2" 83 | -Z "*.tgz" 84 | -Z "*.txz" 85 | -Z "*.wsz" 86 | -Z "*.wz" 87 | -Z "*.xz" 88 | -Z "*.zst" 89 | # These are zip files. Not all are compressed, but considering that they can 90 | # get quite large it is probably more prudent to leave this uncommented. 91 | -Z "*.pk3" 92 | -Z "*.zip" 93 | # You can get better compression on these files, but then you should be 94 | # de/recompressing with an actual program, not dar. 95 | -Z "*.lz4" 96 | -Z "*.zoo" 97 | 98 | # Other, in alphabetical order. 99 | -Z "*.Po" 100 | -Z "*.aar" 101 | -Z "*.bx" 102 | -Z "*.chm" 103 | -Z "*.doc" 104 | -Z "*.epub" 105 | -Z "*.f3d" 106 | -Z "*.gpg" 107 | -Z "*.htmlz" 108 | -Z "*.iix" 109 | -Z "*.iso" 110 | -Z "*.jin" 111 | -Z "*.ods" 112 | -Z "*.odt" 113 | -Z "*.ser" 114 | -Z "*.svgz" 115 | -Z "*.swx" 116 | -Z "*.sxi" 117 | -Z "*.whl" 118 | -Z "*.wings" 119 | 120 | 121 | # Dar archives (may be compressed). 122 | -Z "*.dar" 123 | 124 | # Now we swap back to case sensitive mode for masks which is the default 125 | # mode: 126 | -acase 127 | 128 | ############################################################## 129 | # target: verbose 130 | # remove comments belov for dar being more verbose 131 | verbose: 132 | 133 | # -vt show files teated due to filtering inclusion or no filtering at all 134 | # -vt 135 | 136 | # -vs show skipped files du to exclusion 137 | # -vs 138 | 139 | # -vd show diretory currently being processed 140 | # -vd 141 | 142 | # -vm show detailed messages, not related to files and directories 143 | # -vm 144 | 145 | # -vf show summary of each treated directory, including average compression 146 | # -vf 147 | 148 | # -va equivalent to "-vm -vs -vt" 149 | # -va 150 | 151 | -------------------------------------------------------------------------------- /v2/tests/test_systemd_unit_generation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import patch, mock_open 3 | from pathlib import Path 4 | import os 5 | from sys import path as path 6 | 7 | # Add src directory to path 8 | path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 9 | 10 | from dar_backup.dar_backup_systemd import ( 11 | generate_service, 12 | generate_timer, 13 | generate_cleanup_service, 14 | build_exec_command, 15 | FLAGS, 16 | TIMINGS 17 | ) 18 | 19 | class TestDarBackupUnitGenerator(unittest.TestCase): 20 | 21 | def setUp(self): 22 | self.venv = "/fake/venv" 23 | self.dar_path = "/opt/dar" 24 | 25 | def test_build_exec_command_with_dar(self): 26 | cmd = build_exec_command(self.venv, "-F", self.dar_path) 27 | self.assertIn("PATH=/opt/dar:$PATH", cmd) 28 | self.assertIn("dar-backup -F", cmd) 29 | 30 | def test_build_exec_command_without_dar(self): 31 | cmd = build_exec_command(self.venv, "-F") 32 | self.assertNotIn("PATH=", cmd) 33 | self.assertIn("dar-backup -F", cmd) 34 | 35 | def test_generate_service(self): 36 | content = generate_service("FULL", self.venv, self.dar_path) 37 | self.assertIn("Description=dar-backup FULL", content) 38 | self.assertIn("ExecStart=/bin/bash -c", content) 39 | 40 | def test_generate_timer(self): 41 | content = generate_timer("DIFF") 42 | self.assertIn("OnCalendar=*-*-01 19:03:00", content) 43 | 44 | def test_generate_cleanup_service(self): 45 | content = generate_cleanup_service(self.venv, self.dar_path) 46 | self.assertIn("cleanup", content) 47 | self.assertIn("ExecStart=/bin/bash -c", content) 48 | 49 | @patch("builtins.print") 50 | @patch("pathlib.Path.write_text") 51 | def test_write_unit_file(self, mock_write, mock_print): 52 | from dar_backup.dar_backup_systemd import write_unit_file 53 | path = Path("/tmp") 54 | write_unit_file(path, "test.service", "unit content") 55 | mock_write.assert_called_once() 56 | mock_print.assert_called_once_with("Generated test.service") 57 | 58 | 59 | 60 | 61 | from dar_backup.dar_backup_systemd import write_unit_files 62 | from unittest.mock import MagicMock 63 | 64 | def test_write_unit_files_triggers_enable_and_start(monkeypatch, tmp_path): 65 | venv = tmp_path / "venv" 66 | venv.mkdir() 67 | dar_path = "/usr/local/bin" 68 | 69 | mock_run = MagicMock() 70 | monkeypatch.setattr("dar_backup.dar_backup_systemd.subprocess.run", mock_run) 71 | 72 | # Optional: override Path.home() to avoid writing into ~/.config 73 | monkeypatch.setattr("dar_backup.dar_backup_systemd.Path.home", lambda: tmp_path) 74 | 75 | write_unit_files(str(venv), dar_path, install=True) 76 | 77 | calls = [call.args[0] for call in mock_run.call_args_list] 78 | 79 | assert ["systemctl", "--user", "enable", "dar-full-backup.timer"] in calls 80 | assert ["systemctl", "--user", "start", "dar-cleanup.timer"] in calls 81 | assert ["systemctl", "--user", "daemon-reexec"] in calls 82 | assert ["systemctl", "--user", "daemon-reload"] in calls 83 | 84 | 85 | 86 | 87 | if __name__ == '__main__': 88 | unittest.main() 89 | -------------------------------------------------------------------------------- /v2/tests/test_space_definition.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | import os 4 | 5 | # Ensure the test directory is in the Python path 6 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 7 | 8 | from tests.envdata import EnvData 9 | from datetime import datetime 10 | from dar_backup.command_runner import CommandRunner 11 | 12 | 13 | def create_random_data_file(env: EnvData, name: str, size: int) -> None: 14 | """ 15 | Create a file with random data of a specific size. 16 | 17 | Args: 18 | name (str): The name of the file. 19 | size (int): The size of the file in bytes. 20 | """ 21 | filename = f"random-{name}.dat" 22 | with open(os.path.join(env.test_dir, "data", filename), 'wb') as f: 23 | f.write(os.urandom(size)) 24 | env.logger.info(f'Created {os.path.join(env.test_dir, "data", filename)} of size {name}') 25 | 26 | 27 | def generate_datafiles(env: EnvData) -> None: 28 | """ 29 | Generate the data files for testing. 30 | 31 | This method creates files of different sizes using the create_random_data_file method. 32 | """ 33 | try: 34 | # Create files 35 | for name, size in env.file_sizes.items(): 36 | create_random_data_file(env, name, size) 37 | except Exception as e: 38 | env.logger.exception("data file generation failed") 39 | raise 40 | 41 | 42 | def create_backup_definitions(env: EnvData) -> None: 43 | """ 44 | Generate the backup definitions for testing. 45 | 46 | This method creates the backup definition files using the backup_definitions dictionary. 47 | """ 48 | logging.info("Generating backup definition") 49 | backup_definitions = { 50 | "example 2" : f""" 51 | -Q 52 | -B {env.dar_rc} 53 | -R / 54 | -s 10G 55 | -z6 56 | -am 57 | -g {os.path.join(env.test_dir, 'data')} 58 | """.replace("-g /tmp/", "-g tmp/") # because dar does not allow first "/" 59 | } 60 | 61 | for filename, content in backup_definitions.items(): 62 | with open(os.path.join(env.test_dir, 'backup.d', filename), 'w') as f: 63 | f.write(content) 64 | 65 | 66 | def test_backup_definition_with_space(setup_environment, env): 67 | """ 68 | Verify that the backups are correct when a backup 69 | definition name contains space(s) 70 | 71 | Expects to be run in a virtal environment with dar-backup installed. 72 | """ 73 | env.file_sizes = { 74 | '100kB': 100 * 1024, 75 | '1MB': 1024 * 1024, 76 | '10MB': 10 * 1024 * 1024 77 | } 78 | 79 | generate_datafiles(env) 80 | 81 | create_backup_definitions(env) 82 | 83 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 84 | 85 | # make sure the catalog database is in place 86 | command = ['manager', '--create-db', '--config-file', env.config_file] 87 | process = runner.run(command) 88 | if process.returncode != 0: 89 | raise Exception(f"Command failed {command}") 90 | 91 | command = ['dar-backup', '--full-backup' ,'-d', "example 2", '--config-file', env.config_file] 92 | process = runner.run(command) 93 | if process.returncode != 0: 94 | raise Exception(f"Command failed {command}") 95 | -------------------------------------------------------------------------------- /v1/test/test-use-catalog-option.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test if --use-catalogs option works as intended 4 | 5 | TESTRESULT=0 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/conf/dar-backup.conf" 13 | 14 | echo "Remove the auto created catalogs by setup.sh" 15 | rm -f "$TESTDIR"/archives/*.catalog 16 | find "$TESTDIR" -name "*.catalog" 17 | 18 | 19 | # do backups 20 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir --verbose 21 | RESULT=$? 22 | if [[ $RESULT != "1" ]]; then # dar-backup should report the missing catalog error 23 | TESTRESULT=1 24 | fi 25 | 26 | echo "Find catalogs, there should not be any...." 27 | find "$TESTDIR" -name "*.catalog" 28 | 29 | 30 | # list the catalog - there should not be any 31 | echo "List all catalogs" 32 | while IFS= read -r -d "" file 33 | do 34 | CURRENT_BACKUPDEF=$(basename "$file") 35 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 36 | if [[ -e "$TESTDIR"/archives/"$CATALOG" ]]; then 37 | echo "ERROR - there should not be any catalog at this point in time" 38 | TESTRESULT=1 # there should not be any catalog 39 | fi 40 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 41 | 42 | 43 | echo "Now do backup with catalog enabled" 44 | 45 | rm -f "$TESTDIR"/archives/TEST_FULL_* 46 | 47 | # create catalogs 48 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 49 | if [[ $? != "0" ]]; then 50 | echo ERROR catalog was not created, exiting 51 | exit 1 52 | fi 53 | 54 | # do backups 55 | "$TESTDIR/bin/dar-backup.sh" --use-catalogs --local-backup-dir --verbose 56 | RESULT=$? 57 | if [[ $RESULT != "0" ]]; then 58 | TESTRESULT=1 59 | fi 60 | 61 | # list the catalog 62 | echo "List catalogs" 63 | while IFS= read -r -d "" file 64 | do 65 | CURRENT_BACKUPDEF=$(basename "$file") 66 | CATALOG="${CURRENT_BACKUPDEF}"${CATALOG_SUFFIX} 67 | _ARCHIVENAME="${CURRENT_BACKUPDEF}"_FULL_$(date -I) 68 | echo "List catalog \"$CATALOG\" for archive: \"$_ARCHIVENAME\"" 69 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/"$CATALOG")" | grep "$_ARCHIVENAME" 70 | if [[ $? != "0" ]]; then 71 | echo ERROR catalog --list for failed "\"$CATALOG\"" 72 | TESTRESULT=1 73 | fi 74 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 75 | 76 | 77 | # check catalogs 78 | echo "Check catalogs" 79 | while IFS= read -r -d "" file 80 | do 81 | CURRENT_BACKUPDEF=$(basename "$file") 82 | CATALOG="${CURRENT_BACKUPDEF}"${CATALOG_SUFFIX} 83 | echo "check catalog "\"$CATALOG\" 84 | if [[ -e "$TESTDIR"/archives/"$CATALOG" ]]; then 85 | dar_manager -c --base "$(realpath "$TESTDIR"/archives/"${CATALOG}")" 86 | if [[ $? != "0" ]]; then 87 | echo ERROR catalog DB "\"$CATALOG\"" is not OK 88 | TESTRESULT=1 89 | fi 90 | else 91 | echo "ERROR \"${TESTDIR}/archives/${CATALOG}\" does not exist" 92 | TESTRESULT=1 93 | fi 94 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 95 | 96 | 97 | if [[ "$TESTRESULT" == "0" ]]; then 98 | echo "Test case succeeded" 99 | else 100 | echo "Test case FAILED" 101 | fi 102 | 103 | exit "$TESTRESULT" 104 | -------------------------------------------------------------------------------- /v2/tests/test_alternate_reference_archive.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 5 | 6 | from dar_backup.command_runner import CommandRunner 7 | 8 | 9 | test_files = { 10 | 'file1.txt': 'This is file 1.', 11 | 'file2.txt': 'This is file 2.', 12 | 'file3.txt': 'This is file 3.', 13 | 'file with spaces.txt': 'This is file with spaces.', 14 | 'file_with_danish_chars_æøå.txt': 'This is file with danish chars æøå.', 15 | 'file_with_DANISH_CHARS_ÆØÅ.txt': 'This is file with DANISH CHARS ÆØÅ.', 16 | 'file_with_colon:.txt': 'This is file with colon :.', 17 | 'file_with_hash#.txt': 'This is file with hash #.', 18 | 'file_with_currency¤.txt': 'This is file with currency ¤.' 19 | } 20 | 21 | 22 | def create_test_files(env): 23 | env.logger.info("Creating test files...") 24 | for filename, content in test_files.items(): 25 | env.logger.info(f"Creating {filename} with content: {content} in {env.test_dir}") 26 | with open(os.path.join(env.test_dir, 'data', filename), 'w') as f: 27 | f.write(content) 28 | 29 | 30 | def test_diff_extected_to_work(setup_environment, env): 31 | """ 32 | Test that a diff backup works as expected without any alternate reference archive. 33 | """ 34 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 35 | 36 | create_test_files(env) 37 | 38 | # Do a full backup 39 | command = ['dar-backup', '--full-backup', '-d', "example", '--config-file', env.config_file] 40 | process = runner.run(command) 41 | assert process.returncode == 0, "dar-backup must succeed" 42 | 43 | # Do a DIFF 44 | command = ['dar-backup', '--differential-backup' ,'-d', "example", '--config-file', env.config_file] 45 | process = runner.run(command) 46 | assert process.returncode == 0, "dar-backup must succeed" 47 | 48 | 49 | def test_diff_missing_alternate_reference_archive(setup_environment, env): 50 | """ 51 | Provide a non-existing alternate archive me. 52 | dar-backup must fail doing a DIFF. 53 | """ 54 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 55 | 56 | create_test_files(env) 57 | 58 | # Do a DIFF with a non-existing alternate reference archive 59 | command = ['dar-backup', '--differential-backup' ,'-d', "example", '--config-file', env.config_file, '--alternate-reference-archive', 'non-existing-archive'] 60 | process = runner.run(command) 61 | print("return code", process.returncode) 62 | assert process.returncode != 0, "dar-backup must fail when the alternate reference archive does not exist" 63 | 64 | def test_incr_missing_alternate_reference_archive(setup_environment, env): 65 | """ 66 | Provide a non-existing alternate archive me. 67 | dar-backup must fail doing an INCR. 68 | """ 69 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 70 | 71 | create_test_files(env) 72 | 73 | # Do a INCRIFF with a non-existing alternate reference archive 74 | command = ['dar-backup', '--incremental-backup' ,'-d', "example", '--config-file', env.config_file, '--alternate-reference-archive', 'non-existing-archive'] 75 | process = runner.run(command) 76 | print("return code", process.returncode) 77 | assert process.returncode != 0, "dar-backup must fail when the alternate reference archive does not exist" 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /v2/tests/test_preflight.py: -------------------------------------------------------------------------------- 1 | import os 2 | from types import SimpleNamespace 3 | from textwrap import dedent 4 | 5 | from dar_backup.config_settings import ConfigSettings 6 | from dar_backup.dar_backup import preflight_check 7 | 8 | 9 | def write_config(path, backup_dir, backup_d_dir, test_restore_dir, par2_enabled="false"): 10 | cfg = dedent( 11 | f""" 12 | [MISC] 13 | LOGFILE_LOCATION = {backup_dir}/dar-backup.log 14 | MAX_SIZE_VERIFICATION_MB = 20 15 | MIN_SIZE_VERIFICATION_MB = 0 16 | NO_FILES_VERIFICATION = 5 17 | COMMAND_TIMEOUT_SECS = 86400 18 | 19 | [DIRECTORIES] 20 | BACKUP_DIR = {backup_dir} 21 | BACKUP.D_DIR = {backup_d_dir} 22 | TEST_RESTORE_DIR = {test_restore_dir} 23 | 24 | [AGE] 25 | DIFF_AGE = 30 26 | INCR_AGE = 15 27 | 28 | [PAR2] 29 | ERROR_CORRECTION_PERCENT = 5 30 | ENABLED = {par2_enabled} 31 | """ 32 | ).strip() 33 | path.write_text(cfg) 34 | return path 35 | 36 | 37 | def make_args(backup_definition=None): 38 | return SimpleNamespace( 39 | backup_definition=backup_definition, 40 | darrc=None, 41 | alternate_reference_archive=None, 42 | ) 43 | 44 | 45 | def test_preflight_passes_when_env_is_valid(tmp_path, capsys): 46 | backup_dir = tmp_path / "backups" 47 | backup_d_dir = tmp_path / "backup.d" 48 | test_restore_dir = tmp_path / "restore" 49 | backup_dir.mkdir() 50 | backup_d_dir.mkdir() 51 | test_restore_dir.mkdir() 52 | 53 | backup_def = backup_d_dir / "foo.dcf" 54 | backup_def.write_text("-R /tmp\n") 55 | 56 | config_file = tmp_path / "dar.conf" 57 | write_config(config_file, backup_dir, backup_d_dir, test_restore_dir, par2_enabled="false") 58 | config_settings = ConfigSettings(str(config_file)) 59 | 60 | ok = preflight_check(make_args("foo.dcf"), config_settings) 61 | assert ok is True 62 | 63 | out = capsys.readouterr().out 64 | assert "Preflight checks passed." in out 65 | 66 | 67 | def test_preflight_fails_when_backup_definition_missing(tmp_path, capsys): 68 | backup_dir = tmp_path / "backups" 69 | backup_d_dir = tmp_path / "backup.d" 70 | test_restore_dir = tmp_path / "restore" 71 | backup_dir.mkdir() 72 | backup_d_dir.mkdir() 73 | test_restore_dir.mkdir() 74 | 75 | config_file = tmp_path / "dar.conf" 76 | write_config(config_file, backup_dir, backup_d_dir, test_restore_dir, par2_enabled="false") 77 | config_settings = ConfigSettings(str(config_file)) 78 | 79 | ok = preflight_check(make_args("missing.dcf"), config_settings) 80 | assert ok is False 81 | 82 | out = capsys.readouterr().out 83 | assert "Backup definition not found" in out 84 | 85 | 86 | def test_preflight_fails_when_directories_missing(tmp_path, capsys): 87 | backup_dir = tmp_path / "backups" # not created 88 | backup_d_dir = tmp_path / "backup.d" # not created 89 | test_restore_dir = tmp_path / "restore" # not created 90 | 91 | config_file = tmp_path / "dar.conf" 92 | write_config(config_file, backup_dir, backup_d_dir, test_restore_dir, par2_enabled="false") 93 | config_settings = ConfigSettings(str(config_file)) 94 | 95 | ok = preflight_check(make_args(), config_settings) 96 | assert ok is False 97 | 98 | out = capsys.readouterr().out 99 | assert "does not exist" in out 100 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to dar-backup 2 | 3 | Thanks for considering contributing to **dar-backup**. 4 | This document outlines the basic rules and workflow for contributions. 5 | 6 | ## Ground Rules 7 | 8 | - **Tests must pass**: All PRs must run with a clean test suite (`pytest`) before submission. 9 | - **Code must be documented**: Public functions, classes, and modules require docstrings. Inline comments should be clear and factual. 10 | - **Include test cases**: Any functional code change must include appropriate test coverage. Pull requests without tests will not be merged. 11 | - **English only**: All code comments, commit messages, and documentation must be written in English. 12 | - **Security awareness**: No unsafe shell expansions, unvalidated inputs, or credential leaks. Use of *external* libraries should be kept minimal. 13 | - **Consistency**: Follow existing coding style (PEP8, Black formatting). 14 | - **License**: Contributions are accepted under GPL-3.0-or-later, consistent with the project license. 15 | 16 | ## Development Workflow 17 | 18 | **1. Fork and branch** 19 | 20 | - Fork the repository and create a branch off `main` (or the relevant maintenance branch). 21 | 22 | - Use descriptive branch names: `fix-spelling`, `add-tests-verify`, etc. 23 | 24 | **2. Run tests locally** 25 | 26 | ```bash 27 | # Version 2 28 | cd v2; pytest -v 29 | ``` 30 | 31 | ```bash 32 | # Version 1 33 | cd v1/test; ./runner.sh 34 | ``` 35 | 36 | **3. Ensure no regressions** 37 | 38 | - Add tests. 39 | 40 | - New functionality: add one or more test cases. 41 | 42 | - Bug fix: include a regression test. 43 | 44 | **4. Code style** 45 | 46 | - Use Black for formatting. 47 | 48 | - Use isort for import ordering. 49 | 50 | - Use flake8 to catch basic errors. 51 | 52 | **5. Commit and push** 53 | 54 | - Write meaningful commit messages. 55 | 56 | - Group related changes into a single commit when possible. 57 | 58 | **6. Licensing Requirements** 59 | 60 | All contributions to this project must be made under the GNU General Public License v3.0 or later (GPL-3.0+). 61 | 62 | To ensure clarity and legal compliance, contributors must explicitly confirm in writing (e.g., in the PR comment or description) that: 63 | 64 | “I am submitting this contribution under the terms of the GPL-3.0 or later.” 65 | 66 | Checking the license box in the PR template is **not sufficient** on its own. 67 | 68 | **7. Pull request** 69 | 70 | - Open a PR against the correct branch. 71 | 72 | - Describe what the PR does and why. 73 | 74 | - Reference issues if applicable. 75 | 76 | ## CI Checks 77 | 78 | All pull requests are automatically validated by GitHub Actions. 79 | 80 | - The CI workflow runs tests for both v1 and v2. 81 | 82 | - Code style checks (Black, isort, flake8) must pass. 83 | 84 | - PRs will not be merged if CI fails. 85 | 86 | - Run the same commands locally before pushing to avoid unnecessary iterations. 87 | 88 | ## Reporting Issues 89 | 90 | - Use the GitHub issue tracker. 91 | 92 | - Provide steps to reproduce, logs, or configuration details. 93 | 94 | - Clearly state the expected vs actual behavior. 95 | 96 | ## Documentation 97 | 98 | - Documentation changes are welcome, even for small fixes. 99 | 100 | - Place user-facing docs in README.md or doc/. 101 | - Consider references in README.md to documentation in doc/. 102 | 103 | - Use Markdown. Keep language concise and in English. 104 | -------------------------------------------------------------------------------- /v2/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ['hatchling'] 3 | build-backend = "hatchling.build" 4 | 5 | #[tool.pytest.ini_options] 6 | # test the deployed package 7 | #pythonpath = "venv/lib/python3.12/site-packages" 8 | #addopts = [ 9 | # "--import-mode=importlib", 10 | # "--cov=dar_backup", 11 | # "--cov-report=xml", 12 | # "--no-cov-on-fail" 13 | #] 14 | 15 | #addopts = [ 16 | # "--import-mode=importlib", 17 | #] 18 | 19 | 20 | [tool.hatch.build] 21 | include = [ 22 | "src/dar_backup/**", 23 | "README.md", 24 | "Changelog.md", 25 | "LICENSE", 26 | "verify-signature.sh", 27 | "doc/**", 28 | "src/dar_backup/README.md", 29 | "src/dar_backup/Changelog.md" 30 | ] 31 | 32 | [tool.hatch.build.targets.wheel] 33 | packages = ["src/dar_backup"] 34 | sources = ["src"] 35 | 36 | 37 | [tool.hatch.version] 38 | path = "src/dar_backup/__about__.py" 39 | 40 | [tool.dar-backup] 41 | gpg_key_id = "B54F5682F28DBA3622D78E0458DBFADBBBAC1BB1" 42 | gpg_fingerprint = "B54F 5682 F28D BA36 22D7 8E04 58DB FADB BBAC 1BB1" 43 | gpg_key_url = "https://keys.openpgp.org/vks/v1/by-fingerprint/4592D7396DBAEFFD084502B85CCEC7E16814A36E" 44 | 45 | [project.scripts] 46 | dar-backup = "dar_backup.dar_backup:main" 47 | cleanup = "dar_backup.cleanup:main" 48 | clean-log = "dar_backup.clean_log:main" 49 | manager = "dar_backup.manager:main" 50 | demo = "dar_backup.demo:main" 51 | installer = "dar_backup.installer:main" 52 | dar-backup-systemd = "dar_backup.dar_backup_systemd:main" 53 | 54 | 55 | [project] 56 | name = "dar-backup" 57 | dynamic = [ 58 | "version", 59 | ] 60 | authors = [ 61 | { name="dar-backup", email="dar-backup@pm.me" }, 62 | ] 63 | description = """A script to do full, differential and incremental backups using dar. 64 | Some files are restored from the backups during verification, after which par2 redundancy files are created. 65 | The script also has a cleanup feature to remove old backups and par2 files.""" 66 | readme = "README.md" 67 | readme-content-type = "text/markdown" 68 | license = { file="LICENSE", name="GPL-3.0-or-later" } 69 | requires-python = ">=3.9" 70 | classifiers = [ 71 | "Development Status :: 5 - Production/Stable", 72 | "Intended Audience :: End Users/Desktop", 73 | 'Programming Language :: Python :: 3.9', 74 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", 75 | "Operating System :: POSIX :: Linux", 76 | "Topic :: System :: Archiving :: Backup", 77 | ] 78 | 79 | 80 | dependencies = [ 81 | "inputimeout>=1.0.4", 82 | "rich>=13.0.0", 83 | "argcomplete>=3.6.2", 84 | "Jinja2>=3.1.6", 85 | ] 86 | 87 | [project.optional-dependencies] 88 | dev = [ 89 | "pytest", 90 | "wheel>=0.45.1", 91 | "requests>=2.32.2", 92 | "coverage>=7.8.2", 93 | "pytest>=8.4.0", 94 | "pytest-cov>=6.1.1", 95 | "psutil>=7.0.0", 96 | "pytest-timeout>=2.4.0", 97 | "httpcore>=0.17.3", 98 | "h11>=0.16.0", 99 | "zipp>=3.19.1", 100 | "anyio>=4.4.0", 101 | "black>=25.1.0", 102 | "pandas>=2.3.0", 103 | "matplotlib>=3.10.3", 104 | ] 105 | 106 | packaging = [ 107 | "build>=1.2.2", 108 | "hatchling>=1.27.0", 109 | "hatch>=1.14.1", 110 | "twine>=6.1.0", 111 | ] 112 | 113 | [project.urls] 114 | "GPG Public Key" = "https://keys.openpgp.org/search?q=dar-backup@pm.me" 115 | Homepage = "https://github.com/per2jensen/dar-backup/tree/main/v2" 116 | Changelog = "https://github.com/per2jensen/dar-backup/blob/main/v2/Changelog.md" 117 | Issues = "https://github.com/per2jensen" 118 | -------------------------------------------------------------------------------- /v2/tests/test_run_command.py: -------------------------------------------------------------------------------- 1 | # modified: 2021-07-25 to be a pytest test 2 | import re 3 | import os 4 | import sys 5 | import pytest 6 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 7 | 8 | 9 | from tests.envdata import EnvData 10 | from dar_backup.command_runner import CommandRunner 11 | 12 | 13 | import tempfile 14 | from pathlib import Path 15 | 16 | def test_stdout_1MB(setup_environment, env): 17 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 18 | 19 | script = ( 20 | "import sys, os\n" 21 | "sys.stdout.buffer.write(os.urandom(1048576))\n" 22 | "sys.stdout.flush()\n" 23 | ) 24 | with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f: 25 | f.write(script) 26 | script_path = f.name 27 | 28 | command = ["python3", script_path] 29 | process = runner.run(command, text=False) 30 | 31 | assert process.returncode == 0 32 | assert isinstance(process.stdout, bytes) 33 | assert len(process.stdout) == 1048576 34 | 35 | 36 | 37 | def test_command_not_found(setup_environment, env): 38 | """ 39 | Test that run_command correctly handles a missing command. 40 | """ 41 | try: 42 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 43 | command = ["nonexistent_command"] 44 | result = runner.run(command) 45 | env.logger.info(f"Returncode: {result.returncode}") 46 | env.logger.info(f"Stderr: {result.stderr}") 47 | assert result.returncode == 127, f"returncode was: {result.returncode}, expected return code 127 for command not found" 48 | assert "FileNotFoundError" in result.stderr or "not found" in result.stderr.lower(), "Expected command not found message in stderr" 49 | env.logger.info("Successfully handled missing command") 50 | except Exception as e: 51 | env.logger.error(f"Expected result: Test failed with exception: {e}") 52 | assert True 53 | 54 | 55 | 56 | def test_check_flag_logs_error(setup_environment, env): 57 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 58 | command = ['bash', '-c', 'exit 1'] 59 | result = runner.run(command, check=True) 60 | assert result.returncode == 1 61 | 62 | 63 | 64 | 65 | def test_capture_output_false(setup_environment, env): 66 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 67 | command = ['echo', 'hello'] 68 | result = runner.run(command, capture_output=False) 69 | assert result.returncode == 0 70 | assert result.stdout == '' 71 | assert result.stderr == '' 72 | 73 | 74 | def test_logger_fallback(monkeypatch): 75 | runner = CommandRunner(logger=None, command_logger=None) 76 | assert runner.logger is not None 77 | assert runner.command_logger is not None 78 | 79 | 80 | 81 | def test_timeout_handling(setup_environment, env): 82 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger, default_timeout=1) 83 | command = ['bash', '-c', 'sleep 5'] 84 | result = runner.run(command) 85 | assert result.returncode == -1 86 | assert "timed out" in result.stderr or result.stdout == '' # based on fallback handling 87 | 88 | 89 | @pytest.mark.skip(reason="Binary output mode (text=False) is not supported in CommandRunner") 90 | def test_binary_output_mode(setup_environment, env): 91 | """ 92 | This test is intentionally skipped because CommandRunner is designed for text mode only. 93 | """ 94 | pass 95 | 96 | -------------------------------------------------------------------------------- /v1/test/test-create-catalog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test creation of catalog 4 | # - make 5 backup definitions 5 | # - do a backup 6 | # - create the catalog 7 | # - populate the catalog with archive data 8 | # - list catalog 9 | # - check catalog 10 | # - restore files 11 | 12 | TESTRESULT=0 13 | 14 | SCRIPTPATH=$(realpath "$0") 15 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 16 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 17 | 18 | source "$SCRIPTDIRPATH/setup.sh" 19 | source "$TESTDIR/bin/dar-util.sh" 20 | source "$TESTDIR/conf/dar-backup.conf" 21 | 22 | # generate 5 different backups 23 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST2 24 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST3 25 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST4 26 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST5 27 | 28 | # create catalogs 29 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 30 | if [[ $? != "0" ]]; then 31 | echo ERROR catalog was not created, exiting 32 | exit 1 33 | fi 34 | 35 | 36 | # do backups 37 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir 38 | RESULT=$? 39 | if [[ $RESULT != "0" ]]; then 40 | TESTRESULT=1 41 | fi 42 | 43 | 44 | # populate catalogs with archive data 45 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --local-backup-dir 46 | if [[ $? != "0" ]]; then 47 | echo ERROR some or all archives were not added to catalog, exiting 48 | exit 1 49 | fi 50 | 51 | # list the catalog 52 | while IFS= read -r -d "" file 53 | do 54 | CURRENT_BACKUPDEF=$(basename "$file") 55 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 56 | echo "List catalog \"$CATALOG\" for backup definition \"$CURRENT_BACKUPDEF\"" 57 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/"$CATALOG")" 58 | if [[ $? != "0" ]]; then 59 | echo ERROR catalog --list for failed "\"$CATALOG\"" 60 | TESTRESULT=1 61 | fi 62 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 63 | 64 | 65 | # check catalogs 66 | while IFS= read -r -d "" file 67 | do 68 | CURRENT_BACKUPDEF=$(basename "$file") 69 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 70 | echo check catalog "\"$CATALOG\"" 71 | if [[ -e "$TESTDIR"/archives/"$CATALOG" ]]; then 72 | dar_manager -c --base "$(realpath "$TESTDIR"/archives/"${CATALOG}")" 73 | if [[ $? != "0" ]]; then 74 | echo ERROR catalog DB "\"$CATALOG\"" is not OK 75 | TESTRESULT=1 76 | fi 77 | else 78 | echo "ERROR \"${TESTDIR}/archives/${CATALOG}\" does not exist" 79 | TESTRESULT=1 80 | fi 81 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 82 | 83 | 84 | # restore files to temp dirs from catalogs 85 | while IFS= read -r -d "" file 86 | do 87 | CURRENT_BACKUPDEF=$(basename "$file") 88 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 89 | 90 | TEMPDIR=$(mktemp -d) 91 | echo restoring "\"$CURRENT_BACKUPDEF\"" to "\"$TEMPDIR\"" from catalog "\"$CATALOG\"" 92 | dar_manager --base "$(realpath "$TESTDIR"/archives/"$CATALOG")" -e "-R $TEMPDIR -Oignore-owner 93 | " -r "dirs" 94 | if [[ $? != "0" ]]; then 95 | echo ERROR dar_manager restore failed 96 | TESTRESULT=1 97 | fi 98 | find "$TEMPDIR" -type f 99 | rm -fr "$TEMPDIR" 100 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 101 | 102 | if [[ "$TESTRESULT" == "0" ]]; then 103 | log_success "Test case succeeded" 104 | else 105 | log_fail "Test case failed" 106 | fi 107 | 108 | exit "$TESTRESULT" 109 | -------------------------------------------------------------------------------- /v1/share/README.md: -------------------------------------------------------------------------------- 1 | # Systemd timers and services 2 | 3 | Systemd timers and service files can be found in the share/ directory. 4 | The following is here: 5 | 6 | - FULL, DIFF and INC backups 7 | - Scheduled cleanups of old archives 8 | - Alert sent on upcoming FULL & DIFF backups. 9 | - Cleanups of old DIFFs and INCs, veryday on 21:07 10 | 11 | The files are ready to be copied by the user to "~/.config/systemd/user" 12 | 13 | ## Grandfather, father, son backup strategy 14 | I use a variation of the [grandfather-father-son](https://en.wikipedia.org/wiki/Backup_rotation_scheme#Grandfather-father-son) backup strategy. The setup of backups detailed below is fitting for me, given the the current speed of changes to my filesystems. 15 | 16 | It is very easy to change the installed timers into a different variation of the backup strategy. I encourage you to always view the timer setup, as described below, after changes. 17 | 18 | 19 | The timers have been setup like this: 20 | 21 | - FULL backup on December 30 10:03:00 22 | - DIFF backups on the first day of a month (including Jan) 19:03:00 23 | - INC backups starting on the fourth day of a month 19:03:00, repeating every 3 days 24 | 25 | ## Installation 26 | 27 | - Copy the "share/*.timers" & "share/*.service" to ~/.config/systemd/user/ 28 | - install the files in systemd 29 | 30 | ```` 31 | cp /share/*.timer ~/.config/systemd/user/ 32 | cp /share/*.service ~/.config/systemd/user/ 33 | systemctl --user enable dar-backup.timer 34 | systemctl --user start dar-backup.timer 35 | 36 | systemctl --user enable dar-diff-backup.timer 37 | systemctl --user start dar-diff-backup.timer 38 | 39 | systemctl --user enable dar-inc-backup.timer 40 | systemctl --user start dar-inc-backup.timer 41 | 42 | systemctl --user enable alert-upcoming-full-backup.timer 43 | systemctl --user start alert-upcoming-full-backup.timer 44 | 45 | systemctl --user enable alert-upcoming-diff-backup.timer 46 | systemctl --user start alert-upcoming-diff-backup.timer 47 | 48 | systemctl --user enable dar-cleanup.timer 49 | systemctl --user start dar-cleanup.timer 50 | 51 | 52 | systemctl --user daemon-reload 53 | ```` 54 | - verify your timers are listed, and that the "NEXT" time is correct for each timer 55 | 56 | ```` 57 | systemctl --user list-timers 58 | ```` 59 | 60 | ## View systemd status 61 | Do this to view systemd status for your services 62 | 63 | ```` 64 | systemctl --user status dar-backup.service 65 | systemctl --user status dar-diff-backup.service 66 | systemctl --user status dar-inc-backup.service 67 | ```` 68 | 69 | ## View systemd log 70 | 71 | - View systemd messages for the FULL service 72 | ```` 73 | journalctl --user -u dar-backup.service 74 | ```` 75 | 76 | - View systemd messages for a time period for the FULL service 77 | ```` 78 | journalctl --user -u dar-backup.service --since "2022-04-13 08:00:00" --until "2022-04-13 09:00:00" 79 | ```` 80 | 81 | ## Example: run INC service & tail journal 82 | This example shows how to start the systemd "dar-inc-back.service" and view the status messages showing up in the systemd journal 83 | ```` 84 | systemctl --user start dar-inc-backup.service 85 | 86 | journalctl --user -n 50 -f -u dar-inc-backup.service 87 | ```` 88 | 89 | 90 | # Systemd documentation 91 | 92 | - [systemd website](https://systemd.io/) 93 | - [systemd timer](https://www.freedesktop.org/software/systemd/man/systemd.timer.html) 94 | - [systemd service (unit)](https://www.freedesktop.org/software/systemd/man/systemd.unit.html) 95 | 96 | -------------------------------------------------------------------------------- /v1/test/test-populate-single-catalog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # test adding achives to catalog for multiple different backup definitions 4 | 5 | TESTRESULT=0 6 | 7 | SCRIPTPATH=$(realpath "$0") 8 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 9 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 10 | 11 | source "$SCRIPTDIRPATH/setup.sh" 12 | source "$TESTDIR/bin/dar-util.sh" 13 | source "$TESTDIR/conf/dar-backup.conf" 14 | 15 | # generate 5 different backups 16 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST2 17 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST3 18 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST4 19 | cp "$TESTDIR"/backups.d/TEST "$TESTDIR"/backups.d/TEST5 20 | 21 | # create catalogs 22 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 23 | if [[ $? != "0" ]]; then 24 | echo ERROR catalog was not created, exiting 25 | exit 1 26 | fi 27 | 28 | 29 | # do backups 30 | "$TESTDIR/bin/dar-backup.sh" --local-backup-dir 31 | RESULT=$? 32 | if [[ $RESULT != "0" ]]; then 33 | TESTRESULT=1 34 | fi 35 | 36 | 37 | # populate catalogs with archive data, one at a time 38 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def TEST --local-backup-dir 39 | if [[ $? != "0" ]]; then 40 | echo ERROR some or all archives were not added to catalog, exiting 41 | exit 1 42 | fi 43 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def TEST2 --local-backup-dir 44 | if [[ $? != "0" ]]; then 45 | echo ERROR some or all archives were not added to catalog, exiting 46 | exit 1 47 | fi 48 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def TEST3 --local-backup-dir 49 | if [[ $? != "0" ]]; then 50 | echo ERROR some or all archives were not added to catalog, exiting 51 | exit 1 52 | fi 53 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def TEST4 --local-backup-dir 54 | if [[ $? != "0" ]]; then 55 | echo ERROR some or all archives were not added to catalog, exiting 56 | exit 1 57 | fi 58 | "$TESTDIR/bin/manager.sh" --add-dir "$TESTDIR"/archives --backup-def TEST5 --local-backup-dir 59 | if [[ $? != "0" ]]; then 60 | echo ERROR some or all archives were not added to catalog, exiting 61 | exit 1 62 | fi 63 | 64 | 65 | 66 | # list the catalogs 67 | while IFS= read -r -d "" file 68 | do 69 | CURRENT_BACKUPDEF=$(basename "$file") 70 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 71 | echo "List catalog \"$CATALOG\" for backup definition \"$CURRENT_BACKUPDEF\"" 72 | dar_manager -l --base "$(realpath "$TESTDIR"/archives/"$CATALOG")" 73 | if [[ $? != "0" ]]; then 74 | echo ERROR catalog --list for failed "\"$CATALOG\"" 75 | TESTRESULT=1 76 | fi 77 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 78 | 79 | 80 | # check catalogs 81 | while IFS= read -r -d "" file 82 | do 83 | CURRENT_BACKUPDEF=$(basename "$file") 84 | CATALOG=${CURRENT_BACKUPDEF}${CATALOG_SUFFIX} 85 | echo check catalog "\"$CATALOG\"" 86 | if [[ -e "$TESTDIR"/archives/"$CATALOG" ]]; then 87 | dar_manager -c --base "$(realpath "$TESTDIR"/archives/"${CATALOG}")" 88 | if [[ $? != "0" ]]; then 89 | echo ERROR catalog DB "\"$CATALOG\"" is not OK 90 | TESTRESULT=1 91 | fi 92 | else 93 | echo "ERROR \"${TESTDIR}/archives/${CATALOG}\" does not exist" 94 | TESTRESULT=1 95 | fi 96 | done < <(find "${TESTDIR}"/backups.d -type f -print0) 97 | 98 | if [[ "$TESTRESULT" == "0" ]]; then 99 | log_success "Test case succeeded" 100 | else 101 | log_fail "Test case failed" 102 | fi 103 | 104 | exit "$TESTRESULT" 105 | -------------------------------------------------------------------------------- /src/clonepulse/util.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: MIT 2 | 3 | """ 4 | Source code is here: https://github.com/per2jensen/clonepulse 5 | 6 | MIT License 7 | 8 | Copyright (c) 2025 Per Jensen 9 | 10 | Permission is hereby granted, free of charge, to any person obtaining a copy 11 | of this software and associated documentation files (the "Software"), to deal 12 | in the Software without restriction, including without limitation the rights 13 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 14 | copies of the Software, and to permit persons to whom the Software is 15 | furnished to do so, subject to the following conditions: 16 | 17 | The above copyright notice and this permission notice shall be included in all 18 | copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 21 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 22 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 23 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 24 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 25 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 | SOFTWARE. 27 | 28 | """ 29 | 30 | import clonepulse.__about__ as about 31 | import os 32 | import re 33 | import sys 34 | 35 | from pathlib import Path 36 | 37 | def get_invocation_command_line() -> str: 38 | """ 39 | Safely retrieves the exact command line used to invoke the current Python process. 40 | 41 | On Unix-like systems, this reads from /proc/[pid]/cmdline to reconstruct the 42 | command with interpreter and arguments. If any error occurs (e.g., file not found, 43 | permission denied, non-Unix platform), it returns a descriptive error message. 44 | 45 | Returns: 46 | str: The full command line string, or an error description if it cannot be retrieved. 47 | """ 48 | try: 49 | cmdline_path = f"/proc/{os.getpid()}/cmdline" 50 | with open(cmdline_path, "rb") as f: 51 | content = f.read() 52 | if not content: 53 | return "[error: /proc/cmdline is empty]" 54 | return content.replace(b'\x00', b' ').decode().strip() 55 | except Exception as e: 56 | return f"[error: could not read /proc/[pid]/cmdline: {e}]" 57 | 58 | 59 | def show_scriptname() -> str: 60 | """ 61 | Return script name, useful in start banner for example 62 | """ 63 | try: 64 | scriptname = os.path.basename(sys.argv[0]) 65 | except: 66 | scriptname = "unknown" 67 | return scriptname 68 | 69 | 70 | def show_version(): 71 | script_name = os.path.basename(sys.argv[0]) 72 | print(f"{script_name} {about.__version__}") 73 | print(f"{script_name} source code is here: https://github.com/per2jensen/clonepulse") 74 | print(about.__license__) 75 | 76 | 77 | 78 | def extract_version(output): 79 | match = re.search(r'(\d+\.\d+(\.\d+)?)', output) 80 | return match.group(1) if match else "unknown" 81 | 82 | 83 | def expand_path(path: str) -> str: 84 | """ 85 | Expand ~ and environment variables like $HOME in a path. 86 | """ 87 | return os.path.expanduser(os.path.expandvars(path)) 88 | 89 | 90 | def normalize_dir(path: str) -> str: 91 | """ 92 | Strip any trailing slash/backslash but leave root (“/” or “C:\\”) intact. 93 | """ 94 | p = Path(path) 95 | # Path(__str__) drops any trailing separators 96 | normalized = str(p) 97 | return normalized 98 | 99 | 100 | -------------------------------------------------------------------------------- /v2/tests/test_verbose.py: -------------------------------------------------------------------------------- 1 | # modified: 2021-07-25 to be a pytest test 2 | import importlib 3 | import re 4 | import sys 5 | import os 6 | 7 | # Add src directory to path 8 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src"))) 9 | 10 | from tests.envdata import EnvData 11 | from dar_backup.command_runner import CommandRunner 12 | 13 | 14 | 15 | def create_test_files(env: EnvData) -> dict: 16 | env.logger.info("Creating test dummy archive files...") 17 | test_files = { 18 | f'dummy_FULL_.1.dar': 'dummy', 19 | } 20 | for filename, content in test_files.items(): 21 | with open(os.path.join(env.test_dir, 'backups', filename), 'w') as f: 22 | f.write(content) 23 | 24 | return test_files 25 | 26 | def test_verbose(setup_environment, env): 27 | test_files = create_test_files(env) 28 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 29 | 30 | command = ['dar-backup', '--list', '--config-file', env.config_file, '--verbose'] 31 | process = runner.run(command) 32 | if process.returncode != 0: 33 | raise Exception(f"Command failed {command}") 34 | 35 | expected_patterns = [ 36 | 'Script directory:', 37 | 'Backup.d dir:', 38 | 'Backup dir:', 39 | 'Restore dir:', 40 | 'Logfile location:', 41 | '--do-not-compare:' 42 | ] 43 | 44 | for pattern in expected_patterns: 45 | assert re.search(pattern, process.stdout), f"Pattern `{pattern}` not found in output" 46 | 47 | def test_verbose_cleanup(setup_environment, env): 48 | test_files = create_test_files(env) 49 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 50 | 51 | command = ['cleanup', '--list', '--config-file', env.config_file, '--verbose'] 52 | process = runner.run(command) 53 | if process.returncode != 0: 54 | raise Exception(f"Command failed {command}") 55 | stdout, stderr = process.stdout, process.stderr 56 | 57 | expected_patterns = [ 58 | 'Script directory:', 59 | 'Config file:', 60 | 'Backup dir:', 61 | 'Logfile:', 62 | '--cleanup-specific-archives:', 63 | '--alternate-archive-dir:' 64 | ] 65 | 66 | for pattern in expected_patterns: 67 | assert re.search(pattern, stdout), f"Pattern {pattern} not found in output" 68 | 69 | def test_verbose_error_reporting(setup_environment, env): 70 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 71 | # Patch config file with a successful command 72 | with open(env.config_file, 'a') as f: 73 | f.write('\n[PREREQ]\n') 74 | f.write('PREREQ_01 = ls /tmp\n') 75 | 76 | # Run the command 77 | command = ['dar-backup', '--full-backup', '-d', "example", '--config-file', env.config_file, '--verbose'] 78 | process = runner.run(command) 79 | assert process.returncode == 0 80 | 81 | # Patch the config file with a failing command 82 | with open(env.config_file, 'a') as f: 83 | f.write('PREREQ_02 = command-does-not-exist /tmp\n') 84 | env.logger.info(f"PREREQ_02 which fails has been added to config file: {env.config_file}") 85 | 86 | # Run the command 87 | try: 88 | command = ['dar-backup', '--differential-backup', '-d', "example", '--config-file', env.config_file, '--verbose'] 89 | process = runner.run(command) 90 | assert process.returncode != 0 91 | assert "CalledProcessError(127, 'command-does-not-exist /tmp')" in process.stdout 92 | 93 | except Exception as e: 94 | env.logger.exception("Expected exception: dar-backup must fail when a prereq command fails") 95 | assert True 96 | -------------------------------------------------------------------------------- /v2/tests/test_util.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import pytest 4 | from pathlib import Path 5 | from unittest.mock import patch, MagicMock 6 | from dar_backup import util 7 | 8 | 9 | @pytest.fixture 10 | def temp_logdir(tmp_path): 11 | log_dir = tmp_path / "logs" 12 | log_dir.mkdir() 13 | return log_dir 14 | 15 | 16 | def test_setup_logging_creates_file_and_logger(temp_logdir): 17 | logfile = temp_logdir / "test.log" 18 | command_output_file = temp_logdir / "command.log" 19 | 20 | logger = util.setup_logging(logfile, command_output_file) 21 | 22 | assert logfile.exists() 23 | assert command_output_file.exists() 24 | assert isinstance(logger, logging.Logger) 25 | 26 | 27 | def test_setup_logging_stdout_and_file(tmp_path): 28 | logfile = tmp_path / "combo.log" 29 | command_output_file = tmp_path / "command.log" 30 | 31 | # We patch sys.stdout to a dummy, but setup_logging doesn’t support also_stdout anymore 32 | logger = util.setup_logging(logfile, command_output_file) 33 | logger.info("Hello from logger") 34 | 35 | assert logfile.exists() 36 | assert "Hello" in logfile.read_text() 37 | assert command_output_file.exists() 38 | 39 | def list_backups(backup_dir: Path) -> list: 40 | if not backup_dir.exists() or not backup_dir.is_dir(): 41 | print("No backups available.") 42 | return [] 43 | 44 | backups = [f.name for f in backup_dir.iterdir() if f.is_dir()] 45 | if not backups: 46 | print("No backups available.") 47 | else: 48 | for b in sorted(backups): 49 | print(b) 50 | 51 | return sorted(backups) 52 | 53 | 54 | 55 | def test_list_backups_ignores_files(tmp_path, capsys): 56 | test_dir = tmp_path / "backups" 57 | test_dir.mkdir() 58 | 59 | # Create a valid .dar file matching expected pattern 60 | valid_backup = test_dir / "mybackup_FULL_2024-04-01.1.dar" 61 | valid_backup.write_text("dummy") 62 | 63 | # Create a file that should be ignored 64 | (test_dir / "note.txt").write_text("Ignore me") 65 | 66 | # Call the function 67 | util.list_backups(str(test_dir)) 68 | 69 | # Capture stdout 70 | out = capsys.readouterr().out 71 | 72 | # Assertions: should include valid backup name, not note.txt 73 | assert "mybackup_FULL_2024-04-01" in out 74 | assert "note.txt" not in out 75 | 76 | 77 | 78 | 79 | from dar_backup.util import get_invocation_command_line 80 | 81 | def test_get_invocation_command_line_positive(monkeypatch): 82 | """ 83 | Positive test: 84 | Simulates reading from /proc/[pid]/cmdline and verifies the reconstructed command line. 85 | """ 86 | fake_cmdline = b"/usr/bin/python3\x00myscript.py\x00--option\x00value" 87 | 88 | def mock_open(*args, **kwargs): 89 | from io import BytesIO 90 | return BytesIO(fake_cmdline) 91 | 92 | monkeypatch.setattr("builtins.open", mock_open) 93 | result = get_invocation_command_line() 94 | 95 | assert isinstance(result, str) 96 | assert "/usr/bin/python3 myscript.py --option value" in result 97 | 98 | def test_get_invocation_command_line_negative(monkeypatch): 99 | """ 100 | Negative test: 101 | Simulates a file read failure (e.g. missing /proc/[pid]/cmdline) and verifies fallback message. 102 | """ 103 | def mock_open(*args, **kwargs): 104 | raise FileNotFoundError("simulated missing /proc file") 105 | 106 | monkeypatch.setattr("builtins.open", mock_open) 107 | result = get_invocation_command_line() 108 | 109 | assert isinstance(result, str) 110 | assert "error" in result.lower() 111 | assert "could not read" in result.lower() 112 | -------------------------------------------------------------------------------- /v2/src/dar_backup/rich_progress.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: GPL-3.0-or-later 3 | 4 | import os 5 | import time 6 | from threading import Event 7 | from rich.console import Console, Group 8 | from rich.live import Live 9 | from rich.text import Text 10 | 11 | def is_terminal(): 12 | return Console().is_terminal 13 | 14 | def tail_log_file(log_path, stop_event, session_marker=None): 15 | """Yields new lines from the log file, starting only after the session_marker is found.""" 16 | last_size = 0 17 | marker_found = session_marker is None 18 | 19 | while not stop_event.is_set(): 20 | if not os.path.exists(log_path): 21 | time.sleep(0.5) 22 | continue 23 | 24 | try: 25 | with open(log_path, "r") as f: 26 | if last_size > os.path.getsize(log_path): 27 | f.seek(0) 28 | else: 29 | f.seek(last_size) 30 | 31 | while not stop_event.is_set(): 32 | line = f.readline() 33 | if not line: 34 | break 35 | 36 | line = line.strip() 37 | last_size = f.tell() 38 | 39 | if not marker_found: 40 | if session_marker in line: 41 | marker_found = True 42 | continue 43 | 44 | yield line 45 | 46 | except Exception as e: 47 | print(f"[!] Error reading log: {e}") 48 | 49 | time.sleep(0.5) 50 | 51 | def get_green_shade(step, max_width): 52 | """Returns a green color from light to dark across the bar.""" 53 | start = 180 54 | end = 20 55 | value = int(start - ((start - end) * (step / max_width))) 56 | return f"rgb(0,{value},0)" 57 | 58 | def show_log_driven_bar(log_path: str, stop_event: Event, session_marker: str, max_width=50): 59 | console = Console() 60 | 61 | if not console.is_terminal: 62 | console.log("[~] Not a terminal — progress bar skipped.") 63 | return 64 | 65 | progress = 0 66 | dir_count = 0 67 | last_dir = "Waiting for directory..." 68 | 69 | 70 | 71 | with Live(console=console, refresh_per_second=5) as live: 72 | for line in tail_log_file(log_path, stop_event, session_marker): 73 | lowered = line.lower() 74 | 75 | updated = False 76 | 77 | # Update directory name on "Inspecting" 78 | if "inspecting directory" in lowered and "finished" not in lowered: 79 | last_dir = line.split("Inspecting directory")[-1].strip() 80 | updated = True 81 | 82 | # Advance progress on "Finished" 83 | if "finished inspecting directory" in lowered: 84 | dir_count += 1 85 | progress = (progress + 1) % (max_width + 1) 86 | updated = True 87 | 88 | if updated: 89 | bar_text = "" 90 | for i in range(max_width): 91 | if i < progress: 92 | color = get_green_shade(i, max_width) 93 | bar_text += f"[{color}]#[/{color}]" 94 | else: 95 | bar_text += "-" 96 | 97 | bar = Text.from_markup(f"[white][{bar_text}][/white] [dim]Dirs: {dir_count}[/dim]") 98 | dir_display = Text(f"📂 {last_dir}", style="dim") 99 | 100 | live.update(Group(bar, dir_display)) 101 | 102 | if stop_event.is_set(): 103 | break 104 | 105 | # Rich prints a \n here, I will live with it -------------------------------------------------------------------------------- /v2/tests/test_restore.py: -------------------------------------------------------------------------------- 1 | "" 2 | """ 3 | modified: 2021-07-25 to be a pytest test 4 | 5 | see more restore tests/verifications in v2/tests/test_create_full_diff_incr_backup.py 6 | """ 7 | 8 | import os 9 | import re 10 | import shutil 11 | import tempfile 12 | 13 | from tests.envdata import EnvData 14 | from tests.conftest import test_files 15 | from dar_backup.command_runner import CommandRunner 16 | from dar_backup.util import CommandResult 17 | from testdata_verification import verify_restore_contents, verify_backup_contents, run_backup_script 18 | 19 | def test_restoredir_requires_value(setup_environment, env): 20 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 21 | command = ['dar-backup', '--restore', 'dummy_FULL_1970-01-01', '--restore-dir', '--log-stdout', '--log-level', 'debug', '--config-file', env.config_file] 22 | process = runner.run(command) 23 | env.logger.info(f"process.returncode={process.returncode}") 24 | if process.returncode == 0: 25 | raise Exception(f'dar-backup must fail because value to --restore-dir is not given') 26 | else: 27 | stdout, stderr = process.stdout, process.stderr 28 | if not re.search('usage: dar-backup', stderr): 29 | raise Exception(f"Expected error message not found in stderr: {stderr}") 30 | env.logger.info(f"process.returncode={process.returncode} which is expected") 31 | 32 | def test_restore_requires_value(setup_environment, env): 33 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 34 | command = ['dar-backup', '--restore', '--restore-dir', '/tmp/unit-test' , '--log-stdout', '--log-level', 'debug', '--config-file', env.config_file] 35 | process = runner.run(command) 36 | env.logger.info(f"process.returncode={process.returncode}") 37 | if process.returncode == 0: 38 | raise Exception(f'dar-backup must fail because a value to --restore is not given') 39 | else: 40 | stdout, stderr = process.stdout, process.stderr 41 | if not re.search('usage: dar-backup', stderr): 42 | raise Exception(f"Expected error message not found in stderr: {stderr}") 43 | env.logger.info(f"process.returncode={process.returncode} which is expected") 44 | 45 | def test_restore_with_restoredir(setup_environment, env): 46 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 47 | try: 48 | run_backup_script("--full-backup", env) 49 | unique_dir = tempfile.mkdtemp(dir='/tmp') 50 | env.logger.info(f"unique_dir={unique_dir}") 51 | command = ['dar-backup', '--restore', f'example_FULL_{env.datestamp}', '--restore-dir', unique_dir , '--log-stdout', '--log-level', 'debug', '--config-file', env.config_file] 52 | process = runner.run(command) 53 | env.logger.info(f"process.returncode={process.returncode}") 54 | if process.returncode != 0: 55 | stdout, stderr = process.stdout, process.stderr 56 | env.logger.error(f"command failed: \nstdout:{stdout}\nstderr:{stderr}") 57 | raise RuntimeError(f"Expected error message not found in stderr: {stderr}") 58 | verify_restore_contents(test_files, f"example_FULL_{env.datestamp}", env, unique_dir) 59 | finally: 60 | shutil.rmtree(unique_dir) 61 | env.logger.info(f"test_restore_with_restoredir(): removed directory {unique_dir}") 62 | 63 | def test_restore_validatation(setup_environment, env): 64 | runner = CommandRunner(logger=env.logger, command_logger=env.command_logger) 65 | try: 66 | result: CommandResult = run_backup_script("--full-backup", env) 67 | if "Restoring file: '" not in result.stdout or "' for file comparing" not in result.stdout: 68 | assert False, f"Expected message not found in stdout: {result.stdout}" 69 | finally: 70 | pass 71 | -------------------------------------------------------------------------------- /v1/bin/par2.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # Copyright (C) 2024 Per Jensen 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | # Generate par2 repair files of .dar files 19 | # 20 | # 21 | 22 | SCRIPTPATH=$(realpath "$0") 23 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 24 | SCRIPTNAME=$(basename "$0") 25 | 26 | VERSION=@@DEV-VERSION@@ 27 | 28 | ARCHIVE_DIR="" 29 | DAR_ARCHIVE="" 30 | 31 | 32 | # Get the options 33 | while [ -n "$1" ]; do 34 | case "$1" in 35 | --archive-dir) 36 | shift 37 | ARCHIVE_DIR="$1" 38 | ;; 39 | --archive|-a) 40 | shift 41 | DAR_ARCHIVE="$1" 42 | ;; 43 | --help|-h) 44 | echo "$SCRIPTNAME [--help|-h] [--archive|-a ] [--local-backup-dir] [--alternate-archive-dir ]" 45 | echo " --archive-dir " 46 | echo " --archive , the archive to work on ie: \"TEST_FULL_2022-12-28\"" 47 | echo " --version, print version number, license and notice of no warranty" 48 | echo " --help, display this usage notice" 49 | exit 50 | ;; 51 | --version) 52 | echo "$SCRIPTNAME $VERSION" 53 | echo "Licensed under GNU GENERAL PUBLIC LICENSE v3, see \"LICENSE\" file for details" 54 | echo "THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 55 | APPLICABLE LAW, see section 15 and section 16 in the \"LICENSE\" file" 56 | exit 57 | ;; 58 | *) 59 | echo option "\"$1\"" not recognized, exiting 60 | exit 61 | ;; 62 | esac 63 | shift 64 | done 65 | 66 | if [[ $ARCHIVE_DIR == "" ]]; then 67 | echo "ERROR \"ARCHIVE_DIR\" not given, $SCRIPTNAME exiting" 68 | exit 1 69 | fi 70 | if [[ $DAR_ARCHIVE == "" ]]; then 71 | echo "ERROR \"DAR_ARCHIVE\" not given, $SCRIPTNAME exiting" 72 | exit 1 73 | fi 74 | 75 | 76 | if [[ ! -d "$ARCHIVE_DIR" ]]; then 77 | echo "ERROR alternate archive directory: \"$ARCHIVE_DIR\" not found, $SCRIPTNAME exiting" 78 | exit 1 79 | fi 80 | 81 | RESULT=0 82 | while IFS= read -r -d "" file 83 | do 84 | echo "Generate 5% repair data for: \"$file\"" 85 | par2 c -r5 -n1 "$file" > /dev/null 2>&1 86 | if [[ $? -ne "0" ]]; then 87 | RESULT=1 88 | fi 89 | done < <(find "$ARCHIVE_DIR" -type f -name "${DAR_ARCHIVE}.*.dar" -print0) 90 | 91 | 92 | 93 | NO_OF_DAR_SLICES=$(find "$ARCHIVE_DIR" -type f -name "${DAR_ARCHIVE}.*.dar" |wc -l) 94 | NO_OF_PAR2_FILES=$(find "$ARCHIVE_DIR" -type f -name "${DAR_ARCHIVE}.*.dar.par2" |wc -l) 95 | if (( NO_OF_DAR_SLICES == NO_OF_PAR2_FILES )); then 96 | if [[ $RESULT -eq "0" ]]; then 97 | echo "par2 successfully generated repair files" 98 | exit 0 99 | else 100 | echo "Number of dar slices not equal to number of par2 files" 101 | exit 1 102 | fi 103 | 104 | fi 105 | 106 | if [[ $RESULT -ne "0" ]]; then 107 | echo "par2 generation of repair files failed" 108 | exit 1 109 | fi 110 | 111 | if (( PAR2_FILES == 0 )); then 112 | echo "no par2 repair files was generated" 113 | exit 1 114 | fi 115 | -------------------------------------------------------------------------------- /v1/test/test-definitions-with-spaces.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # run install.sh 4 | # rename the backup definition from TEST --> "A backup definition" 5 | # run dar-backup.sh 6 | # add file GREENLAND.JPEG to include dir and to the exclude dir 7 | # run dar-diff-backup.sh 8 | # list the FULL, DIFF, INC archives 9 | 10 | _RESULT="0" 11 | BACKUP_DEFINITON="A backup definition" 12 | 13 | SCRIPTPATH=$(realpath "$0") 14 | SCRIPTDIRPATH=$(dirname "$SCRIPTPATH") 15 | echo SCRIPTDIRPATH: "$SCRIPTDIRPATH" 16 | 17 | source "$SCRIPTDIRPATH"/setup.sh 18 | 19 | source "$TESTDIR"/conf/dar-backup.conf 20 | 21 | mv "$TESTDIR"/backups.d/TEST "$TESTDIR/backups.d/$BACKUP_DEFINITON" 22 | 23 | # create catalogs 24 | "$TESTDIR/bin/manager.sh" --create-catalog --local-backup-dir 25 | if [[ $? != "0" ]]; then 26 | echo ERROR catalog was not created, exiting 27 | exit 1 28 | fi 29 | 30 | 31 | # run the test 32 | "$TESTDIR"/bin/dar-backup.sh -d "$BACKUP_DEFINITON" --local-backup-dir 33 | RESULT=$? 34 | if [[ $RESULT != "0" ]]; then 35 | _RESULT=1 36 | fi 37 | 38 | dar -l "$MOUNT_POINT"/"$BACKUP_DEFINITON"_FULL_"$DATE" > "$TESTDIR"/FULL-filelist.txt 39 | echo "dar exit code: $?" 40 | 41 | 42 | # alter backup set 43 | cp "$SCRIPTDIRPATH"/GREENLAND.JPEG "$TESTDIR/dirs/include this one/" 44 | cp "$SCRIPTDIRPATH"/GREENLAND.JPEG "$TESTDIR/dirs/exclude this one/" 45 | 46 | # run DIFF backup 47 | "$TESTDIR"/bin/dar-diff-backup.sh -d "$BACKUP_DEFINITON" --local-backup-dir 48 | RESULT=$? 49 | if [[ $RESULT != "0" ]]; then 50 | _RESULT=1 51 | fi 52 | 53 | dar -l "$MOUNT_POINT"/"$BACKUP_DEFINITON"_DIFF_"$DATE" > "$TESTDIR"/DIFF-filelist.txt 54 | echo dar exit code: $? 55 | 56 | 57 | # modify a file backed up in the DIFF 58 | touch "$TESTDIR/dirs/include this one/GREENLAND.JPEG" 59 | 60 | # run INCREMENTAL backup 61 | "$TESTDIR"/bin/dar-inc-backup.sh -d "$BACKUP_DEFINITON" --local-backup-dir 62 | RESULT=$? 63 | if [[ $RESULT != "0" ]]; then 64 | _RESULT=1 65 | fi 66 | 67 | dar -l "$MOUNT_POINT"/"$BACKUP_DEFINITON"_INC_"$DATE" > "$TESTDIR"/INC-filelist.txt 68 | echo dar exit code: $? 69 | 70 | 71 | if [[ $_RESULT != "0" ]]; then 72 | echo "Something went wrong, exiting" 73 | exit 1 74 | fi 75 | 76 | 77 | echo . 78 | echo .. 79 | echo =========================================== 80 | echo "cat filelists & logfile, then do checks" 81 | echo =========================================== 82 | echo "FULL dar archive:" 83 | cat "$TESTDIR"/FULL-filelist.txt 84 | echo "DIFF dar archive:" 85 | cat "$TESTDIR"/DIFF-filelist.txt 86 | echo "Logfile:" 87 | cat "$TESTDIR"/dar-backup.log 88 | echo RESULTS for FULL backup: 89 | # FULL backup 90 | checkExpectLog "\[Saved\].*?dirs/include this one/Abe.jpg" "$TESTDIR/FULL-filelist.txt" 91 | checkExpectLog "\[Saved\].*?dirs/include this one/Krummi.JPG" "$TESTDIR/FULL-filelist.txt" 92 | checkExpectLog "\[Saved\].*?dirs/compressable/Lorem Ipsum.txt" "$TESTDIR/FULL-filelist.txt" 93 | checkDontFindLog "include this one/GREENLAND.JPEG" "$TESTDIR/FULL-filelist.txt" 94 | checkDontFindLog "exclude this one/In exclude dir.txt" "$TESTDIR/FULL-filelist.txt" 95 | 96 | echo RESULTS for DIFF backup: 97 | # DIFF backup 98 | checkExpectLog "\[Saved\].*?dirs/include this one/GREENLAND.JPEG" "$TESTDIR/DIFF-filelist.txt" 99 | checkDontFindLog "exclude this one/GREENLAND.JPEG" "$TESTDIR/DIFF-filelist.txt" 100 | 101 | echo RESULTS for INCREMENTAL backup: 102 | # INC backup 103 | checkExpectLog "\[Saved\].*?dirs/include this one/GREENLAND.JPEG" "$TESTDIR/INC-filelist.txt" 104 | NO_SAVED=$(grep -c "\[Saved\]" "$TESTDIR"/INC-filelist.txt) 105 | echo "Number of files saved in INCREMENTAL archive: $NO_SAVED" 106 | if [[ $NO_SAVED != "1" ]]; then 107 | echo "more than one file saved in the INCREMENTAL archive" 108 | TESTRESULT=1 109 | fi 110 | 111 | echo TEST RESULT: "$TESTRESULT" 112 | exit "$TESTRESULT" 113 | -------------------------------------------------------------------------------- /v1/test/dirs/compressable/Lorem Ipsum.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | Հայերեն Shqip ‫العربية Български Català 中文简体 Hrvatski Česky Dansk Nederlands English Eesti Filipino Suomi Français ქართული Deutsch Ελληνικά ‫עברית हिन्दी Magyar Indonesia Italiano Latviski Lietuviškai македонски Melayu Norsk Polski Português Româna Pyccкий Српски Slovenčina Slovenščina Español Svenska ไทย Türkçe Українська Tiếng Việt 4 | Lorem Ipsum 5 | "Neque porro quisquam est qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit..." 6 | "There is no one who loves pain itself, who seeks after it and wants to have it, simply because it is pain..." 7 | 8 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce libero magna, ornare commodo elit eu, mattis sagittis augue. Nunc aliquet, est et condimentum blandit, tellus lacus egestas nisl, eleifend egestas risus urna a nisl. Aenean feugiat ante quam, sit amet vehicula orci fringilla vel. Donec vitae dui lacinia, maximus nisl sed, pulvinar diam. Curabitur nulla est, dictum eget ornare ut, bibendum a orci. Phasellus consequat eros ut lacus posuere pretium. Nulla rutrum, magna sit amet eleifend volutpat, massa augue malesuada magna, ut euismod nunc nibh ut risus. Aenean egestas ligula quis ullamcorper facilisis. Aenean rhoncus, erat quis vestibulum facilisis, sem orci iaculis est, a volutpat eros risus sit amet purus. Fusce mi libero, tempor sit amet tincidunt et, vestibulum quis nulla. Morbi at venenatis turpis. Quisque sit amet diam id tortor vestibulum posuere. Nam leo orci, porta at ligula eget, interdum blandit eros. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Integer egestas leo felis, sed volutpat enim pharetra ac. 9 | 10 | Suspendisse potenti. Vivamus posuere tempus vehicula. Aliquam vehicula massa dolor, ac imperdiet diam placerat id. Quisque id leo ornare, suscipit mauris in, hendrerit massa. Proin rutrum, sapien at scelerisque euismod, tellus justo vestibulum felis, ut fringilla neque odio a purus. Aliquam fringilla lorem et nisl ultrices, non sodales justo viverra. Aenean consequat tempor metus quis aliquet. Praesent ac rutrum nunc, vel sagittis lectus. Etiam rhoncus sapien at est facilisis vestibulum vitae vitae magna. Cras faucibus molestie ex, eu fermentum est. Sed eleifend metus nec enim pellentesque sollicitudin. Mauris at dui nibh. 11 | 12 | Nam tempor venenatis odio vitae aliquet. Ut aliquet fringilla justo, at suscipit quam tristique quis. Curabitur non libero ut leo convallis aliquam non sit amet dui. Sed ex libero, sodales id leo in, mattis mollis lacus. Nulla efficitur vel mauris non vestibulum. Suspendisse sodales finibus mauris, sed dapibus lectus faucibus vel. Nullam tincidunt interdum sapien fringilla fringilla. Pellentesque aliquet, nisl in dictum congue, tortor risus consectetur nunc, et ultrices mauris tellus tincidunt orci. Nunc lobortis maximus diam, dapibus dictum dui dapibus id. Aliquam vulputate bibendum quam nec blandit. Praesent viverra libero ut accumsan feugiat. Phasellus eu ligula imperdiet, semper ex ut, ornare massa. Nulla sed orci nec diam sodales euismod. Praesent vel eros nec felis porttitor pellentesque. Donec iaculis lacus nunc, id eleifend mi tempor ut. 13 | 14 | Nunc ac imperdiet quam. Etiam eget cursus turpis, sit amet lacinia augue. Nulla facilisi. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Duis hendrerit, est eget imperdiet imperdiet, mauris nunc semper leo, eu molestie quam dui at leo. Aenean facilisis nibh erat, vel aliquet erat pulvinar et. Nam venenatis eros nec lorem ullamcorper lobortis. Donec tincidunt nec quam nec blandit. Integer et facilisis sem, eget pulvinar neque. 15 | 16 | Nulla dignissim venenatis quam vitae scelerisque. Phasellus ullamcorper gravida aliquet. Sed hendrerit, risus id tempor vehicula, turpis quam convallis justo, eget sodales tortor nisi sagittis elit. Curabitur vitae neque in ipsum tincidunt pretium sed et est. Quisque at odio quis magna ultrices pulvinar at et ipsum. Sed venenatis blandit hendrerit. Integer nec lectus consequat, luctus eros at, vehicula arcu. 17 | Generated 5 paragraphs, 503 words, 3388 bytes of Lorem Ipsum 18 | help@lipsum.com 19 | Privacy Policy 20 | -------------------------------------------------------------------------------- /v2/doc/doc.md: -------------------------------------------------------------------------------- 1 | Sure! Here is the complete documentation of the script in Markdown format: 2 | 3 | markdown 4 | 5 | # Backup Script Documentation 6 | 7 | This script allows you to create, verify, and manage backups using `dar` (Disk ARchive). It supports full and differential backups based on configuration snippets. Below you will find detailed documentation on the available options and how they interact. 8 | 9 | ## Index 10 | 11 | 1. [Configuration](#configuration) 12 | 2. [Usage](#usage) 13 | 3. [Options](#options) 14 | 4. [Examples](#examples) 15 | 16 | ## Configuration 17 | 18 | Before using the script, ensure that the configuration file (`backup_script.conf`) is correctly set up. This configuration file should contain the following variables: 19 | 20 | ```ini 21 | [DEFAULT] 22 | LOGFILE_LOCATION=/tmp/backup_script.log 23 | BACKUP_DIR=/tmp 24 | TEST_RESTORE_DIR=/tmp/restore-test 25 | BACKUP.D=/path/to/backup/configs 26 | 27 | LOGFILE_LOCATION: The location of the log file. 28 | BACKUP_DIR: The directory where backup archives will be stored. 29 | TEST_RESTORE_DIR: The directory where test restores will be performed. 30 | BACKUP.D: The directory containing the configuration snippets. 31 | 32 | ## Usage 33 | 34 | The script can be run from the command line with various options to perform different tasks. Below are the available options and their descriptions. 35 | 36 | ### Options 37 | -d, --backup-definition 38 | Specifies the config snippet to use. The script will look for this config snippet in the directory specified by BACKUP.D in the configuration file. 39 | 40 | --list 41 | Lists available backups. By default, it lists both full and differential backups. 42 | 43 | --restore 44 | Restores a specific backup file. You can also specify the directory to restore files to using --restore-dir. 45 | 46 | --restore-dir 47 | Specifies the directory to restore files to. If not provided, the directory specified by TEST_RESTORE_DIR in the configuration file will be used. 48 | 49 | --selection 50 | Specifies selection criteria for restoring specific files. 51 | 52 | 53 | --list-contents 54 | Lists the contents of a specific backup file. 55 | 56 | --differential-backup 57 | Performs a differential backup. If -d is specified, it performs a differential backup for that specific config snippet. Otherwise, it performs differential backups for all config snippets in the BACKUP.D directory. 58 | 59 | # Examples 60 | ## List Available Backups 61 | 62 | ./backup_script.py --list 63 | 64 | ## Perform a Full Backup 65 | 66 | ./backup_script.py -d example_config_snippet 67 | 68 | ## Perform a Differential Backup 69 | 70 | ./backup_script.py --differential-backup -d example_config_snippet 71 | 72 | ## Restore a Specific Backup 73 | 74 | ./backup_script.py --restore example_FULL_2023-06-23 --restore-dir /path/to/restore/dir 75 | 76 | ## List Contents of a Specific Backup 77 | 78 | ./backup_script.py --list-contents example_FULL_2023-06-23 79 | 80 | ##How Options Interact 81 | 82 | Full Backup: If you specify a backup definition with -d without the --differential-backup option, the script performs a full backup using the specified config snippet. 83 | Differential 84 | 85 | Backup: If you specify the --differential-backup option with -d, the script performs a differential backup based on the latest full backup for the specified config snippet. If no full backup is found, it logs an error and skips the differential backup. 86 | 87 | List Backups: The --list option lists all available backups, both full and differential. If combined with -d, it lists backups only for the specified config snippet. 88 | 89 | Restore Backup: The --restore option restores a specified backup. If combined with --restore-dir, it restores the backup to the specified directory. 90 | 91 | List Contents: The --list-contents option lists the contents of a specified backup file. It can be combined with --selection to list specific contents based on selection criteria. 92 | 93 | This script is designed to provide a flexible and powerful way to manage your backups using dar. By understanding how these options interact, you can tailor the script to meet your specific backup and restore needs. --------------------------------------------------------------------------------