├── docs ├── .sphinx │ ├── version │ ├── pa11y.json │ ├── .pymarkdown.json │ ├── metrics │ │ ├── source_metrics.sh │ │ └── build_metrics.py │ └── get_vale_conf.py ├── .custom_wordlist.txt ├── reuse │ ├── substitutions.yaml │ ├── substitutions.txt │ └── links.txt ├── reference │ ├── actions.md │ ├── index.md │ ├── configurations.md │ ├── integrations.md │ ├── backup-integrator-charm-architecture.md │ ├── bacula-fd-charm-architecture.md │ └── backup-server-charm-architecture.md ├── explanation │ ├── index.md │ └── security.md ├── .gitignore ├── changelog.md ├── how-to │ ├── index.md │ ├── upgrade.md │ ├── contribution.md │ ├── integrate-with-backup-integrator-charm.md │ └── use-baculum.md ├── requirements.txt ├── _static │ └── cookie-banner.css ├── _templates │ ├── header.html │ └── footer.html ├── index.md ├── tutorial.md └── Makefile ├── CODEOWNERS ├── .jujuignore ├── trivy.yaml ├── backup_integrator_operator ├── requirements.txt ├── src │ ├── __init__.py │ ├── __main__.py │ └── charm.py ├── scripts │ └── entrypoint └── charmcraft.yaml ├── bacula_fd_operator ├── requirements.txt ├── src │ ├── __init__.py │ ├── __main__.py │ ├── templates │ │ └── bacula-fd.conf.j2 │ ├── bacula.py │ ├── bacula_relation.py │ └── charm.py ├── scripts │ ├── entrypoint │ └── noop ├── charmcraft.yaml └── lib │ └── charms │ └── operator_libs_linux │ └── v0 │ └── systemd.py ├── .vale └── styles │ └── config │ └── vocabularies │ └── local │ └── accept.txt ├── bacula_server_operator ├── requirements.txt ├── src │ ├── __init__.py │ ├── templates │ │ ├── baculum-web-hosts.conf.j2 │ │ ├── bconsole.conf.j2 │ │ ├── bacula-fd.conf.j2 │ │ ├── baculum-web-settings.conf.j2 │ │ ├── baculum-api.conf.j2 │ │ ├── bacula-sd.conf.j2 │ │ └── bacula-dir.conf.j2 │ ├── __main__.py │ └── bacula_relation.py ├── scripts │ └── entrypoint └── charmcraft.yaml ├── terraform ├── .gitignore ├── .tflint.hcl ├── tests │ ├── .tflint.hcl │ ├── main.tftest.hcl │ └── main.tf ├── versions.tf ├── outputs.tf ├── main.tf ├── variables.tf └── README.md ├── tests ├── unit │ ├── __init__.py │ ├── bacula_fd │ │ ├── __init__.py │ │ ├── conftest.py │ │ └── test_charm.py │ ├── bacula_server │ │ ├── __init__.py │ │ └── conftest.py │ └── backup_integrator │ │ ├── __init__.py │ │ ├── conftest.py │ │ └── test_charm.py ├── integration │ ├── __init__.py │ ├── test_charm.py │ ├── baculum.py │ └── conftest.py └── conftest.py ├── .gitignore ├── charmed_bacula_server └── snap │ └── hooks │ └── configure ├── .github ├── workflows │ ├── bot_pr_approval.yaml │ ├── auto_update_libs.yaml │ ├── comment.yaml │ ├── issues.yaml │ ├── docs_rtd.yaml │ ├── docs.yaml │ ├── cla-check.yml │ ├── promote_charm.yaml │ ├── close_stale.yaml │ ├── publish_snap.yaml │ ├── test.yaml │ ├── test_terraform_files.yaml │ ├── publish_charms.yaml │ └── test_terraform_module.yaml ├── .jira_sync_config.yaml ├── ISSUE_TEMPLATE │ ├── enhancement_proposal.yml │ └── bug_report.yml └── pull_request_template.yaml ├── .vale.ini ├── .licenserc.yaml ├── .readthedocs.yaml ├── renovate.json ├── pyproject.toml ├── README.md ├── tox.ini └── CONTRIBUTING.md /docs/.sphinx/version: -------------------------------------------------------------------------------- 1 | 1.3.0 2 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @canonical/platform-engineering-apac 2 | -------------------------------------------------------------------------------- /.jujuignore: -------------------------------------------------------------------------------- 1 | /venv 2 | *.py[cod] 3 | *.charm 4 | /.github 5 | -------------------------------------------------------------------------------- /trivy.yaml: -------------------------------------------------------------------------------- 1 | timeout: 20m 2 | scan: 3 | offline-scan: true 4 | -------------------------------------------------------------------------------- /backup_integrator_operator/requirements.txt: -------------------------------------------------------------------------------- 1 | ops==3.5.0 2 | pydantic==2.12.5 3 | -------------------------------------------------------------------------------- /bacula_fd_operator/requirements.txt: -------------------------------------------------------------------------------- 1 | ops==3.5.0 2 | pydantic==2.12.5 3 | jinja2==3.1.6 4 | -------------------------------------------------------------------------------- /docs/.custom_wordlist.txt: -------------------------------------------------------------------------------- 1 | Bacula 2 | Baculum 3 | minio 4 | iptables 5 | sudo 6 | conntrack -------------------------------------------------------------------------------- /.vale/styles/config/vocabularies/local/accept.txt: -------------------------------------------------------------------------------- 1 | Bacula 2 | Baculum 3 | minio 4 | iptables 5 | -------------------------------------------------------------------------------- /bacula_server_operator/requirements.txt: -------------------------------------------------------------------------------- 1 | ops==3.5.0 2 | jinja2==3.1.6 3 | requests==2.32.5 4 | pydantic==2.12.5 5 | -------------------------------------------------------------------------------- /terraform/.gitignore: -------------------------------------------------------------------------------- 1 | *.json 2 | **/.terraform/* 3 | *.tfstate 4 | *.tfstate.* 5 | .terraform.lock.hcl 6 | terraform.auto.tfvars 7 | providers.tf 8 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Backup charm unit tests.""" 5 | -------------------------------------------------------------------------------- /bacula_fd_operator/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-fd charm module.""" 5 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Backup charm integration tests.""" 5 | -------------------------------------------------------------------------------- /tests/unit/bacula_fd/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-fd charm unit tests.""" 5 | -------------------------------------------------------------------------------- /bacula_server_operator/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-server charm module.""" 5 | -------------------------------------------------------------------------------- /tests/unit/bacula_server/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-server charm unit tests.""" 5 | -------------------------------------------------------------------------------- /backup_integrator_operator/src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """backup-integrator charm module.""" 5 | -------------------------------------------------------------------------------- /bacula_fd_operator/scripts/entrypoint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2025 Canonical Ltd. 3 | # See LICENSE file for licensing details. 4 | 5 | exec python3 -m src 6 | -------------------------------------------------------------------------------- /tests/unit/backup_integrator/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """backup-integrator charm unit tests.""" 5 | -------------------------------------------------------------------------------- /bacula_server_operator/scripts/entrypoint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2025 Canonical Ltd. 3 | # See LICENSE file for licensing details. 4 | 5 | exec python3 -m src 6 | -------------------------------------------------------------------------------- /terraform/.tflint.hcl: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | rule "terraform_required_version" { 5 | enabled = false 6 | } 7 | -------------------------------------------------------------------------------- /terraform/tests/.tflint.hcl: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | rule "terraform_required_version" { 5 | enabled = false 6 | } 7 | -------------------------------------------------------------------------------- /backup_integrator_operator/scripts/entrypoint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2025 Canonical Ltd. 3 | # See LICENSE file for licensing details. 4 | 5 | 6 | exec python3 -m src 7 | -------------------------------------------------------------------------------- /docs/.sphinx/pa11y.json: -------------------------------------------------------------------------------- 1 | { 2 | "chromeLaunchConfig": { 3 | "args": [ 4 | "--no-sandbox" 5 | ] 6 | }, 7 | "reporter": "cli", 8 | "standard": "WCAG2AA" 9 | } 10 | -------------------------------------------------------------------------------- /bacula_fd_operator/scripts/noop: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2025 Canonical Ltd. 4 | # See LICENSE file for licensing details. 5 | 6 | """A placeholder Python script that does nothing.""" 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | build/ 3 | *.charm 4 | *.snap 5 | .tox/ 6 | .coverage 7 | __pycache__/ 8 | *.py[cod] 9 | .idea 10 | .vscode 11 | .mypy_cache 12 | *.egg-info/ 13 | */*.rock 14 | terraform/tests/.terraform 15 | .logs 16 | -------------------------------------------------------------------------------- /docs/reuse/substitutions.yaml: -------------------------------------------------------------------------------- 1 | # Key/value substitutions to use within the Sphinx doc. 2 | {version_number: "0.1.0", 3 | formatted_text: "*Multi-line* text\n that uses basic **markup**.", 4 | site_link: "[Website link](https://example.com)"} -------------------------------------------------------------------------------- /charmed_bacula_server/snap/hooks/configure: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2025 Canonical Ltd. 4 | # See LICENSE file for licensing details. 5 | 6 | set -e 7 | 8 | mkdir -p "$SNAP_DATA/apache2/run/" "$SNAP_DATA/apache2/lock" "$SNAP_DATA/apache2/log" 9 | -------------------------------------------------------------------------------- /docs/reuse/substitutions.txt: -------------------------------------------------------------------------------- 1 | .. |version_number| replace:: 0.1.0 2 | 3 | .. |rest_text| replace:: *Multi-line* text 4 | that uses basic **markup**. 5 | 6 | .. |site_link| replace:: Website link 7 | .. _site_link: https://example.com -------------------------------------------------------------------------------- /docs/reference/actions.md: -------------------------------------------------------------------------------- 1 | (reference_actions)= 2 | 3 | # Actions 4 | 5 | ## Bacula server charm 6 | 7 | See [Actions](https://charmhub.io/bacula-server/actions). 8 | 9 | ```{note} 10 | Read more about actions in the Juju docs: {ref}`juju:action` 11 | ``` -------------------------------------------------------------------------------- /terraform/versions.tf: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | terraform { 5 | required_providers { 6 | juju = { 7 | source = "juju/juju" 8 | version = ">= 0.20.0" 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /.github/workflows/bot_pr_approval.yaml: -------------------------------------------------------------------------------- 1 | name: Provide approval for bot PRs 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | bot_pr_approval: 8 | uses: canonical/operator-workflows/.github/workflows/bot_pr_approval.yaml@main 9 | secrets: inherit 10 | 11 | -------------------------------------------------------------------------------- /.github/workflows/auto_update_libs.yaml: -------------------------------------------------------------------------------- 1 | name: Auto-update charm libraries 2 | 3 | on: 4 | schedule: 5 | - cron: "0 1 * * *" 6 | 7 | jobs: 8 | auto-update-libs: 9 | uses: canonical/operator-workflows/.github/workflows/auto_update_charm_libs.yaml@main 10 | secrets: inherit 11 | -------------------------------------------------------------------------------- /.github/workflows/comment.yaml: -------------------------------------------------------------------------------- 1 | name: Comment on the pull request 2 | 3 | on: 4 | workflow_run: 5 | workflows: ["Tests"] 6 | types: 7 | - completed 8 | 9 | jobs: 10 | comment-on-pr: 11 | uses: canonical/operator-workflows/.github/workflows/comment.yaml@main 12 | secrets: inherit 13 | -------------------------------------------------------------------------------- /.github/workflows/issues.yaml: -------------------------------------------------------------------------------- 1 | name: Sync issues to Jira 2 | 3 | on: 4 | issues: 5 | # available via github.event.action 6 | types: [opened, reopened, closed] 7 | 8 | jobs: 9 | issues-to-jira: 10 | uses: canonical/operator-workflows/.github/workflows/jira.yaml@main 11 | secrets: inherit 12 | -------------------------------------------------------------------------------- /.github/workflows/docs_rtd.yaml: -------------------------------------------------------------------------------- 1 | name: RTD workflows 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | rtd-docs-checks: 8 | uses: canonical/operator-workflows/.github/workflows/docs_rtd.yaml@main 9 | secrets: inherit 10 | with: 11 | enable-sphinx-python-dependency-build-checks: false 12 | 13 | -------------------------------------------------------------------------------- /.vale.ini: -------------------------------------------------------------------------------- 1 | ; Copyright 2025 Canonical Ltd. 2 | ; See LICENSE file for licensing details. 3 | 4 | StylesPath = .vale/styles 5 | 6 | Packages = https://github.com/canonical/platform-engineering-vale-package/releases/download/latest/pfe-vale.zip 7 | 8 | Vocab = PFE, local 9 | 10 | [*] 11 | BasedOnStyles = PFE 12 | 13 | -------------------------------------------------------------------------------- /terraform/outputs.tf: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | output "app_name" { 5 | description = "Name of the deployed application." 6 | value = juju_application.charm_name.name 7 | } 8 | 9 | output "endpoints" { 10 | value = { 11 | ingress = "ingress" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/baculum-web-hosts.conf.j2: -------------------------------------------------------------------------------- 1 | [Main] 2 | auth_type = "basic" 3 | login = "{{ baculum_api.username }}" 4 | password = "{{ baculum_api.password }}" 5 | client_id = "" 6 | client_secret = "" 7 | redirect_uri = "" 8 | scope = "" 9 | protocol = "http" 10 | address = "localhost" 11 | port = "9096" 12 | url_prefix = "" 13 | -------------------------------------------------------------------------------- /bacula_fd_operator/src/__main__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-fd charm module entrypoint.""" 5 | 6 | # suppress pylint false positive no-member warning 7 | # pylint: disable=no-member 8 | 9 | import ops 10 | 11 | from . import charm 12 | 13 | ops.main.main(charm.BaculaFdCharm) 14 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Fixtures for charm tests.""" 5 | 6 | 7 | def pytest_addoption(parser): 8 | """Parse additional pytest options. 9 | 10 | Args: 11 | parser: Pytest parser. 12 | """ 13 | parser.addoption("--charm-file", action="append") 14 | -------------------------------------------------------------------------------- /bacula_server_operator/src/__main__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-server charm module entrypoint.""" 5 | 6 | # suppress pylint false positive no-member warning 7 | # pylint: disable=no-member 8 | 9 | import ops 10 | 11 | from . import charm 12 | 13 | ops.main.main(charm.BaculaServerCharm) 14 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/bconsole.conf.j2: -------------------------------------------------------------------------------- 1 | # 2 | # Bacula User Agent (or Console) Configuration File 3 | # 4 | # Copyright (C) 2000-2022 Kern Sibbald 5 | # License: BSD 2-Clause; see file LICENSE-FOSS 6 | # 7 | 8 | Director { 9 | Name = charm-bacula-dir 10 | DIRport = 9101 11 | address = localhost 12 | Password = "{{ bacula.dir_password }}" 13 | } 14 | -------------------------------------------------------------------------------- /backup_integrator_operator/src/__main__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """backup-integrator charm module entrypoint.""" 5 | 6 | # suppress pylint false positive no-member warning 7 | # pylint: disable=no-member 8 | 9 | import ops 10 | 11 | from . import charm 12 | 13 | ops.main.main(charm.BackupIntegratorCharm) 14 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | 8 | jobs: 9 | docs-checks: 10 | uses: canonical/operator-workflows/.github/workflows/docs.yaml@main 11 | secrets: inherit 12 | with: 13 | vale-files: '["README.md", "CONTRIBUTING.md"]' 14 | linkcheck-files: "README.md CONTRIBUTING.md" 15 | -------------------------------------------------------------------------------- /docs/explanation/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description lang=en": "Explanation material explaining key concepts about the backup charms." 5 | --- 6 | 7 | (explanation)= 8 | 9 | # Explanation 10 | 11 | The following guides cover key processes and common tasks for managing and using backup charms. 12 | 13 | ```{toctree} 14 | :maxdepth: 1 15 | security.md 16 | ``` 17 | -------------------------------------------------------------------------------- /.github/.jira_sync_config.yaml: -------------------------------------------------------------------------------- 1 | # See https://github.com/canonical/gh-jira-sync-bot for config 2 | settings: 3 | jira_project_key: "ISD" 4 | 5 | status_mapping: 6 | opened: Untriaged 7 | closed: done 8 | not_planned: rejected 9 | 10 | add_gh_comment: true 11 | 12 | epic_key: ISD-3981 13 | 14 | label_mapping: 15 | bug: Bug 16 | enhancement: Story 17 | -------------------------------------------------------------------------------- /.github/workflows/cla-check.yml: -------------------------------------------------------------------------------- 1 | # This workflow checks if the contributor has signed the Canonical Contributor Licence Agreement (CLA) 2 | name: Canonical Contributor Licence Agreement check 3 | 4 | on: 5 | pull_request: 6 | branches: [main] 7 | 8 | jobs: 9 | cla-check: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check if CLA signed 13 | uses: canonical/has-signed-canonical-cla@v2 14 | -------------------------------------------------------------------------------- /terraform/tests/main.tftest.hcl: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | variables { 5 | channel = "latest/edge" 6 | # renovate: depName="charm_name" 7 | revision = 1 8 | } 9 | 10 | run "basic_deploy" { 11 | assert { 12 | condition = module.charm_name.app_name == "charm_name" 13 | error_message = "charm_name app_name did not match expected" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | # Environment 2 | *env*/ 3 | .sphinx/venv/ 4 | 5 | # Sphinx 6 | .sphinx/warnings.txt 7 | .sphinx/.wordlist.dic 8 | .sphinx/.doctrees/ 9 | .sphinx/update/ 10 | .sphinx/node_modules/ 11 | 12 | # Vale 13 | .sphinx/styles/* 14 | .sphinx/vale.ini 15 | 16 | # Build outputs 17 | _build 18 | 19 | # Node.js 20 | package*.json 21 | 22 | # Unrelated cache and config files 23 | .DS_Store 24 | __pycache__ 25 | .idea/ 26 | .vscode/ 27 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | (changelog)= 2 | 3 | # Changelog 4 | 5 | All notable changes to this project will be documented in this file. 6 | 7 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). 8 | 9 | Each revision is versioned by the date of the revision. 10 | 11 | ## 2025-12-17 12 | 13 | * Moved charm architecture documentation from Explanation to Reference category. 14 | 15 | ## 2025-12-09 16 | 17 | * Added upgrade documentation. 18 | -------------------------------------------------------------------------------- /docs/reference/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description lang=en": "Technical information related to backup charms." 5 | --- 6 | 7 | (reference)= 8 | 9 | # Reference 10 | 11 | The pages in this section contain technical information for topics relevant to backup charms. 12 | 13 | ```{toctree} 14 | :maxdepth: 1 15 | actions.md 16 | backup-integrator-charm-architecture.md 17 | backup-server-charm-architecture.md 18 | bacula-fd-charm-architecture.md 19 | configurations.md 20 | integrations.md 21 | ``` 22 | -------------------------------------------------------------------------------- /terraform/main.tf: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | resource "juju_application" "charm_name" { 5 | name = var.app_name 6 | model = var.model 7 | 8 | charm { 9 | name = "" 10 | channel = var.channel 11 | revision = var.revision 12 | base = var.base 13 | } 14 | 15 | config = var.config 16 | constraints = var.constraints 17 | units = var.units 18 | storage_directives = var.storage 19 | } 20 | -------------------------------------------------------------------------------- /docs/reference/configurations.md: -------------------------------------------------------------------------------- 1 | (reference_configurations)= 2 | 3 | # Configurations 4 | 5 | ## Bacula server charm 6 | 7 | See [Configurations](https://charmhub.io/bacula-server/configurations). 8 | 9 | ## Bacula-fd charm 10 | 11 | See [Configurations](https://charmhub.io/bacula-fd/configurations). 12 | 13 | ## Backup integrator charm 14 | 15 | See [Configurations](https://charmhub.io/backup-integrator/configurations). 16 | 17 | ```{note} 18 | Read more about configurations in the Juju docs: {ref}`juju:configuration` 19 | ``` -------------------------------------------------------------------------------- /bacula_fd_operator/src/templates/bacula-fd.conf.j2: -------------------------------------------------------------------------------- 1 | Director { 2 | Name = {{ director_name }} 3 | Password = "{{ director_password }}" 4 | } 5 | 6 | FileDaemon { 7 | Name = {{ name }} 8 | FDport = {{ port }} 9 | WorkingDirectory = /var/lib/bacula 10 | Pid Directory = /run/bacula 11 | Maximum Concurrent Jobs = 20 12 | Plugin Directory = /usr/lib/bacula 13 | FDAddress = {{ host }} 14 | } 15 | 16 | Messages { 17 | Name = Standard 18 | director = {{ director_name }} = all, !skipped, !restored, !verified, !saved 19 | } 20 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/bacula-fd.conf.j2: -------------------------------------------------------------------------------- 1 | Director { 2 | Name = charm-bacula-dir 3 | Password = {{ bacula.fd_password }} 4 | } 5 | 6 | FileDaemon { 7 | Name = charm-bacula-fd 8 | FDport = 9102 9 | WorkingDirectory = /opt/bacula/working 10 | Pid Directory = /opt/bacula/run 11 | Maximum Concurrent Jobs = 20 12 | Plugin Directory = /opt/bacula/plugins 13 | FDAddress = 127.0.0.1 14 | } 15 | 16 | Messages { 17 | Name = Standard 18 | director = client1-dir = all, !skipped, !restored, !verified, !saved 19 | } 20 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/baculum-web-settings.conf.j2: -------------------------------------------------------------------------------- 1 | [baculum] 2 | debug = "0" 3 | lang = "en" 4 | max_jobs = "15000" 5 | size_values_unit = "decimal" 6 | time_in_job_log = "0" 7 | date_time_format = "Y-M-D R" 8 | enable_messages_log = "1" 9 | 10 | [auth_basic] 11 | allow_manage_users = "1" 12 | user_file = "/usr/share/baculum/htdocs/protected/Web/Config/baculum.users" 13 | hash_alg = "apr-md5" 14 | 15 | [security] 16 | auth_method = "basic" 17 | def_access = "default_settings" 18 | def_role = "normal" 19 | def_api_host = "Main" 20 | -------------------------------------------------------------------------------- /docs/how-to/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description lang=en": "How-to guides covering backup charm operations lifecycle." 5 | --- 6 | 7 | (how_to)= 8 | 9 | # How-to guides 10 | 11 | The following guides cover key processes and common tasks for managing and using the backup charms. 12 | 13 | ```{toctree} 14 | :maxdepth: 1 15 | Contribute 16 | Integrate with the backup integrator 17 | Use the Baculum web interface 18 | Upgrade 19 | ``` 20 | -------------------------------------------------------------------------------- /tests/unit/backup_integrator/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Fixtures for charm tests.""" 5 | 6 | import pytest 7 | 8 | import backup_integrator_operator.src.charm 9 | 10 | 11 | @pytest.fixture(autouse=True) 12 | def backup_integrator_charm(monkeypatch, tmp_path): 13 | """Patch _CHARM_OPT_DIR in BackupIntegratorCharm.""" 14 | monkeypatch.setattr( 15 | backup_integrator_operator.src.charm.BackupIntegratorCharm, "_CHARM_OPT_DIR", tmp_path 16 | ) 17 | return backup_integrator_operator.src.charm.BackupIntegratorCharm 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/enhancement_proposal.yml: -------------------------------------------------------------------------------- 1 | name: Enhancement Proposal 2 | description: File an enhancement proposal 3 | labels: ["enhancement"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: > 8 | Thanks for taking the time to fill out this enhancement proposal! Before submitting your issue, please make 9 | sure there isn't already a prior issue concerning this. If there is, please join that discussion instead. 10 | - type: textarea 11 | id: enhancement-proposal 12 | attributes: 13 | label: Enhancement Proposal 14 | description: > 15 | Describe the enhancement you would like to see in as much detail as needed. 16 | validations: 17 | required: true 18 | -------------------------------------------------------------------------------- /.github/workflows/promote_charm.yaml: -------------------------------------------------------------------------------- 1 | name: Promote charm 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | origin-channel: 7 | type: choice 8 | description: 'Origin Channel' 9 | options: 10 | - latest/edge 11 | destination-channel: 12 | type: choice 13 | description: 'Destination Channel' 14 | options: 15 | - latest/stable 16 | secrets: 17 | CHARMHUB_TOKEN: 18 | required: true 19 | 20 | jobs: 21 | promote-charm: 22 | uses: canonical/operator-workflows/.github/workflows/promote_charm.yaml@main 23 | with: 24 | origin-channel: ${{ github.event.inputs.origin-channel }} 25 | destination-channel: ${{ github.event.inputs.destination-channel }} 26 | secrets: inherit 27 | -------------------------------------------------------------------------------- /.github/workflows/close_stale.yaml: -------------------------------------------------------------------------------- 1 | name: 'Close stale issues and PRs' 2 | on: 3 | schedule: 4 | - cron: '0 0 * * *' 5 | 6 | jobs: 7 | stale: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | actions: write 11 | contents: write 12 | issues: write 13 | pull-requests: write 14 | steps: 15 | - uses: actions/stale@v10.1.1 16 | with: 17 | stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 14 days.' 18 | stale-pr-message: 'This PR is stale because it has been open 90 days with no activity. Remove stale label or comment or this will be closed in 14 days.' 19 | days-before-stale: 90 20 | days-before-close: 14 21 | -------------------------------------------------------------------------------- /terraform/tests/main.tf: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | variable "channel" { 5 | description = "The channel to use when deploying a charm." 6 | type = string 7 | default = "latest/edge" 8 | } 9 | 10 | variable "revision" { 11 | description = "Revision number of the charm." 12 | type = number 13 | default = null 14 | } 15 | 16 | terraform { 17 | required_providers { 18 | juju = { 19 | version = "~> 0.20.0" 20 | source = "juju/juju" 21 | } 22 | } 23 | } 24 | 25 | provider "juju" {} 26 | 27 | module "charm_name" { 28 | source = "./.." 29 | app_name = "charm_name" 30 | channel = var.channel 31 | model = "prod-charm_name-example" 32 | revision = var.revision 33 | } 34 | -------------------------------------------------------------------------------- /.licenserc.yaml: -------------------------------------------------------------------------------- 1 | header: 2 | license: 3 | spdx-id: Apache-2.0 4 | copyright-owner: Canonical Ltd. 5 | content: | 6 | Copyright [year] [owner] 7 | See LICENSE file for licensing details. 8 | pattern: | 9 | Copyright \d{4} Canonical Ltd. 10 | See LICENSE file for licensing details. 11 | paths: 12 | - '**' 13 | paths-ignore: 14 | - '.github/**' 15 | - '**/*.json' 16 | - '**/*.md' 17 | - '**/*.txt' 18 | - '.jujuignore' 19 | - '.gitignore' 20 | - '.licenserc.yaml' 21 | - 'CODEOWNERS' 22 | - 'LICENSE' 23 | - 'trivy.yaml' 24 | - 'pyproject.toml' 25 | - 'zap_rules.tsv' 26 | - '**/*.md.j2' 27 | - '**/lib/**' 28 | - '**/templates/**' 29 | - 'charmed_bacula_server/snap/hooks/**' 30 | - '.readthedocs.yaml' 31 | - 'docs/**' 32 | comment: on-failure 33 | -------------------------------------------------------------------------------- /docs/explanation/security.md: -------------------------------------------------------------------------------- 1 | (explanation_security)= 2 | 3 | # Security overview 4 | 5 | ## Bacula charms 6 | 7 | Please refer to the [Bacula security document](https://www.bacula.org/15.0.x-manuals/en/main/Bacula_Security_Issues.html) 8 | on Bacula security issues. 9 | 10 | In the current version of the Bacula charms (bacula-server, bacula-fd), 11 | the following non-default protections are omitted: 12 | 13 | * bacula-dir and bacula-sd run as root. 14 | * The internal firewall and TCP wrappers are not enabled. 15 | * TLS is not enabled, meaning transmission between bacula-fd and bacula-server 16 | is unencrypted. 17 | * Volume encryption is disabled; backups stored in S3 are unencrypted. 18 | 19 | ## Backup integrator charm 20 | 21 | The backup integrator charm is a workload-less subordinate 22 | charm. There are no security vulnerabilities beyond {ref}`Juju's intrinsic ones `. 23 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # Canonical theme (still needed for Furo theme and custom templates) 2 | canonical-sphinx>=0.5.1 3 | 4 | # Extensions previously auto-loaded by canonical-sphinx 5 | myst-parser 6 | sphinx-autobuild 7 | sphinx-design 8 | sphinx-notfound-page 9 | sphinx-reredirects 10 | sphinx-tabs 11 | sphinxcontrib-jquery 12 | sphinxext-opengraph 13 | 14 | # Extra extensions, previously bundled as canonical-sphinx-extensions 15 | sphinx-config-options>=0.1.0 16 | sphinx-contributor-listing>=0.1.0 17 | sphinx-filtered-toctree>=0.1.0 18 | sphinx-related-links>=0.1.1 19 | sphinx-roles>=0.1.0 20 | sphinx-terminal>=1.0.2 21 | sphinx-ubuntu-images>=0.1.0 22 | sphinx-youtube-links>=0.1.0 23 | 24 | # Other dependencies 25 | packaging 26 | sphinxcontrib-svg2pdfconverter[CairoSVG] 27 | sphinx-last-updated-by-git 28 | sphinx-sitemap 29 | 30 | # Vale dependencies 31 | rst2html 32 | vale 33 | sphinxcontrib-mermaid 34 | -------------------------------------------------------------------------------- /.github/workflows/publish_snap.yaml: -------------------------------------------------------------------------------- 1 | name: Publish snap 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - main 8 | paths: 9 | - charmed-bacula-server/** 10 | 11 | jobs: 12 | build: 13 | name: Build charmed-bacula-server snap 14 | uses: canonical/data-platform-workflows/.github/workflows/build_snap.yaml@v40.0.2 15 | with: 16 | path-to-snap-project-directory: ./charmed_bacula_server 17 | 18 | release: 19 | name: Release charmed-bacula-server snap 20 | needs: 21 | - build 22 | uses: canonical/data-platform-workflows/.github/workflows/release_snap.yaml@v40.0.2 23 | with: 24 | path-to-snap-project-directory: ./charmed_bacula_server 25 | channel: latest/edge 26 | artifact-prefix: ${{ needs.build.outputs.artifact-prefix }} 27 | secrets: 28 | snap-store-token: ${{ secrets.SNAPSTORE_TOKEN }} 29 | permissions: 30 | contents: write # Needed to create git tags 31 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/baculum-api.conf.j2: -------------------------------------------------------------------------------- 1 | [api] 2 | auth_type = "basic" 3 | debug = "0" 4 | lang = "en" 5 | 6 | [db] 7 | enabled = "1" 8 | type = "pgsql" 9 | name = "{{ db.name }}" 10 | login = "{{ db.username }}" 11 | password = "{{ db.password }}" 12 | ip_addr = "{{ db.host }}" 13 | port = "{{ db.port }}" 14 | path = "" 15 | 16 | [bconsole] 17 | enabled = "1" 18 | bin_path = "/opt/bacula/bin/bconsole" 19 | cfg_path = "/opt/bacula/etc/bconsole.conf" 20 | use_sudo = "0" 21 | interpret_bacula_errors = "1" 22 | 23 | [jsontools] 24 | enabled = "1" 25 | use_sudo = "0" 26 | bconfig_dir = "/etc/baculum/Config-api-cache" 27 | bdirjson_path = "/opt/bacula/bin/bdirjson" 28 | dir_cfg_path = "/opt/bacula/etc/bacula-dir.conf" 29 | bsdjson_path = "/opt/bacula/bin/bsdjson" 30 | sd_cfg_path = "/opt/bacula/etc/bacula-sd.conf" 31 | bfdjson_path = "/opt/bacula/bin/bfdjson" 32 | fd_cfg_path = "/opt/bacula/etc/bacula-fd.conf" 33 | bbconsjson_path = "/opt/bacula/bin/bbconsjson" 34 | bcons_cfg_path = "/opt/bacula/etc/bconsole.conf" 35 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | pull_request: 5 | schedule: 6 | - cron: "0 15 * * SAT" 7 | 8 | jobs: 9 | unit-tests: 10 | uses: canonical/operator-workflows/.github/workflows/test.yaml@main 11 | secrets: inherit 12 | with: 13 | self-hosted-runner: true 14 | self-hosted-runner-label: "edge" 15 | integration-tests: 16 | uses: 17 | canonical/operator-workflows/.github/workflows/integration_test.yaml@main 18 | secrets: inherit 19 | with: 20 | juju-channel: '3/stable' 21 | provider: 'lxd' 22 | snap-tests: 23 | name: Build charmed-bacula-server snap 24 | uses: 25 | canonical/data-platform-workflows/.github/workflows/build_snap.yaml@v36.0.1 26 | with: 27 | path-to-snap-project-directory: ./charmed_bacula_server 28 | allure-report: 29 | if: ${{ !cancelled() && github.event_name == 'schedule' }} 30 | needs: 31 | - integration-tests 32 | uses: canonical/operator-workflows/.github/workflows/allure_report.yaml@main 33 | -------------------------------------------------------------------------------- /tests/unit/bacula_fd/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Fixtures for charm tests.""" 5 | 6 | import pytest 7 | 8 | import bacula_fd_operator.src.bacula 9 | import bacula_fd_operator.src.charm 10 | 11 | 12 | @pytest.fixture(autouse=True) 13 | def bacula_fd_charm(monkeypatch, tmp_path): 14 | """Patch the BaculaFdCharm.""" 15 | is_installed = False 16 | 17 | def _install(): 18 | """Mock installation function""" 19 | nonlocal is_installed 20 | is_installed = True 21 | 22 | monkeypatch.setattr(bacula_fd_operator.src.bacula, "is_installed", lambda: is_installed) 23 | monkeypatch.setattr(bacula_fd_operator.src.bacula, "install", _install) 24 | monkeypatch.setattr(bacula_fd_operator.src.bacula, "restart", lambda: None) 25 | monkeypatch.setattr( 26 | bacula_fd_operator.src.bacula, 27 | "BACULA_FD_CONFIG_FILE", 28 | tmp_path / "bacula-fd.conf", 29 | ) 30 | 31 | return bacula_fd_operator.src.charm.BaculaFdCharm 32 | -------------------------------------------------------------------------------- /.github/workflows/test_terraform_files.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | name: Terraform files test 5 | 6 | on: 7 | pull_request: 8 | paths: 9 | - 'terraform/**' 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | validate: 16 | name: Validate terraform configuration files 17 | runs-on: ubuntu-latest 18 | env: 19 | WORKING_DIR: 'terraform' 20 | steps: 21 | - name: Check out code 22 | uses: actions/checkout@v6.0.1 23 | 24 | - name: Setup Terraform 25 | uses: hashicorp/setup-terraform@v3.1.2 26 | 27 | - name: Run terraform fmt 28 | run: terraform fmt -check -recursive 29 | working-directory: ${{env.WORKING_DIR}} 30 | 31 | - name: Setup Tflint 32 | uses: terraform-linters/setup-tflint@v6.2.1 33 | with: 34 | tflint_wrapper_enabled: true 35 | 36 | - name: Run tflint 37 | run: | 38 | tflint --version 39 | tflint --init 40 | tflint -f compact --recursive 41 | working-directory: ${{env.WORKING_DIR}} 42 | -------------------------------------------------------------------------------- /docs/.sphinx/.pymarkdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": { 3 | "selectively_enable_rules": true, 4 | "heading-style": { 5 | "enabled": true, 6 | "style": "atx" 7 | }, 8 | "commands-show-output": { 9 | "enabled": true 10 | }, 11 | "no-missing-space-atx": { 12 | "enabled": true 13 | }, 14 | "blanks-around-headings": { 15 | "enabled": true 16 | }, 17 | "heading-start-left": { 18 | "enabled": true 19 | }, 20 | "no-trailing-punctuation": { 21 | "enabled": true, 22 | "punctuation": ".,;。,;" 23 | }, 24 | "blanks-around-fences": { 25 | "enabled": true, 26 | "list_items": false 27 | }, 28 | "blanks-around-lists": { 29 | "enabled": true 30 | }, 31 | "hr-style": { 32 | "enabled": true 33 | }, 34 | "no-empty-links": { 35 | "enabled": true 36 | }, 37 | "no-alt-text": { 38 | "enabled": true 39 | } 40 | }, 41 | "extensions": { 42 | "front-matter" : { 43 | "enabled" : true 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /.github/pull_request_template.yaml: -------------------------------------------------------------------------------- 1 | Applicable spec: 2 | 3 | ### Overview 4 | 5 | 6 | 7 | ### Rationale 8 | 9 | 10 | 11 | ### Juju Events Changes 12 | 13 | 14 | 15 | ### Module Changes 16 | 17 | 18 | 19 | ### Library Changes 20 | 21 | 22 | 23 | ### Checklist 24 | 25 | - [ ] The [charm style guide](https://documentation.ubuntu.com/juju/3.6/reference/charm/charm-development-best-practices/) was applied 26 | - [ ] The [contributing guide](https://github.com/canonical/is-charms-contributing-guide) was applied 27 | - [ ] The changes are compliant with [ISD054 - Managing Charm Complexity](https://discourse.charmhub.io/t/specification-isd014-managing-charm-complexity/11619) 28 | - [ ] The documentation for charmhub is updated 29 | - [ ] The PR is tagged with appropriate label (`urgent`, `trivial`, `senior-review-required`, `documentation`) 30 | - [ ] The `docs/changelog.md` is updated with user-relevant changes. 31 | 32 | 33 | -------------------------------------------------------------------------------- /docs/how-to/upgrade.md: -------------------------------------------------------------------------------- 1 | (how_to_upgrade)= 2 | 3 | # How to upgrade 4 | 5 | ## Upgrade `backup-integrator` and `bacula-fd` 6 | 7 | The `backup-integrator` and `bacula-fd` are both stateless charms, meaning 8 | they don't store persistent data that could be lost during an upgrade. This makes the upgrade process for both the charms straightforward. 9 | 10 | Upgrade the charms with the `refresh` command: 11 | 12 | ```bash 13 | juju refresh backup-integrator 14 | juju refresh bacula-fd 15 | ``` 16 | 17 | ## Upgrade `bacula-server` 18 | 19 | The `bacula-server` is a stateful charm as it maintains persistent data in its 20 | PostgreSQL database. Before upgrading the `bacula-server` charm, you must back up the PostgreSQL database. 21 | 22 | Follow the [PostgreSQL documentation](https://canonical-charmed-postgresql.readthedocs-hosted.com/14/how-to/back-up-and-restore/create-a-backup/) 23 | for instructions on how to create a backup of the `postgresql` charm. 24 | 25 | After confirming the PostgreSQL backup is complete, upgrade the `bacula-server` charm: 26 | 27 | ```bash 28 | juju refresh bacula-server 29 | ``` 30 | 31 | ## Verify the upgrade 32 | 33 | After upgrading, verify that the charms are functioning correctly with the `juju status` command. 34 | The upgraded charms must be in active and idle state. 35 | 36 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/bacula-sd.conf.j2: -------------------------------------------------------------------------------- 1 | Storage { 2 | Name = charm-bacula-sd 3 | SDPort = 9103 4 | WorkingDirectory = "/opt/bacula/working" 5 | Pid Directory = "/opt/bacula/working" 6 | Plugin Directory = "/opt/bacula/plugins" 7 | Maximum Concurrent Jobs = 20 8 | SDAddress = {{ bacula.sd_address }} 9 | } 10 | 11 | Cloud { 12 | Name = charm-s3-cloud 13 | Driver = S3 14 | HostName = "{{ s3.address }}" 15 | BucketName = "{{ s3.bucket }}" 16 | AccessKey = "{{ s3.access_key }}" 17 | SecretKey = "{{ s3.secret_key }}" 18 | Protocol = {{ s3.protocol }} 19 | UriStyle = {{ s3.uri_style }} 20 | Truncate Cache = AfterUpload 21 | Upload = EachPart 22 | } 23 | 24 | Device { 25 | Name = charm-s3-storage 26 | Media Type = CloudType 27 | Device Type = Cloud 28 | Cloud = charm-s3-cloud 29 | Archive Device = /opt/bacula/archive 30 | Maximum Part Size = 10MB 31 | LabelMedia = yes 32 | Random Access = yes 33 | AutomaticMount = yes 34 | RemovableMedia = no 35 | AlwaysOpen = no 36 | } 37 | 38 | Director { 39 | Name = charm-bacula-dir 40 | Password = "{{ bacula.sd_password }}" 41 | } 42 | 43 | Messages { 44 | Name = Standard 45 | director = charm-bacula-dir = all 46 | } 47 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | jobs: 14 | post_checkout: 15 | - git fetch --unshallow || true 16 | # Cancel building pull requests when there aren't changed in the docs directory. 17 | # If there are no changes (git diff exits with 0) we force the command to return with 183. 18 | # This is a special exit code on Read the Docs that will cancel the build immediately. 19 | # https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition 20 | - | 21 | if [ "$READTHEDOCS_VERSION_TYPE" = "external" ] && git diff --quiet origin/main -- 'docs/' '.readthedocs.yaml'; 22 | then 23 | exit 183; 24 | fi 25 | 26 | # Build documentation in the docs/ directory with Sphinx 27 | sphinx: 28 | builder: dirhtml 29 | configuration: docs/conf.py 30 | fail_on_warning: true 31 | 32 | # If using Sphinx, optionally build your docs in additional formats such as PDF 33 | formats: 34 | - pdf 35 | 36 | # Optionally declare the Python requirements required to build your docs 37 | python: 38 | install: 39 | - requirements: docs/requirements.txt 40 | -------------------------------------------------------------------------------- /terraform/variables.tf: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | variable "app_name" { 5 | description = "Name of the application in the Juju model." 6 | type = string 7 | default = "" 8 | } 9 | 10 | variable "base" { 11 | description = "The operating system on which to deploy" 12 | type = string 13 | default = "ubuntu@24.04" 14 | } 15 | 16 | variable "channel" { 17 | description = "The channel to use when deploying a charm." 18 | type = string 19 | default = "latest/stable" 20 | } 21 | 22 | variable "config" { 23 | description = "Application config. Details about available options can be found at https://charmhub.io//configurations." 24 | type = map(string) 25 | default = {} 26 | } 27 | 28 | variable "constraints" { 29 | description = "Juju constraints to apply for this application." 30 | type = string 31 | default = "" 32 | } 33 | 34 | variable "model" { 35 | description = "Reference to a `juju_model`." 36 | type = string 37 | } 38 | 39 | variable "revision" { 40 | description = "Revision number of the charm" 41 | type = number 42 | default = null 43 | } 44 | 45 | variable "storage" { 46 | description = "Map of storage used by the application." 47 | type = map(string) 48 | default = {} 49 | } 50 | 51 | variable "units" { 52 | description = "Number of units to deploy" 53 | type = number 54 | default = 1 55 | } 56 | -------------------------------------------------------------------------------- /docs/how-to/contribution.md: -------------------------------------------------------------------------------- 1 | (how_to_contribute)= 2 | 3 | # How to contribute 4 | 5 | ```{note} 6 | See [CONTRIBUTING.md](https://github.com/canonical/backup-operators/blob/main/CONTRIBUTING.md) 7 | for information on contributing to the source code. 8 | ``` 9 | 10 | Our documentation is stored in the `docs` directory alongside the [source code on GitHub](https://github.com/canonical/backup-operators/). 11 | It is based on the Canonical starter pack 12 | and hosted on [Read the Docs](https://about.readthedocs.com/). In structuring, 13 | the documentation employs the [Diátaxis](https://diataxis.fr/) approach. 14 | 15 | Click on the "Contribute to this page" icon at the top of each page to propose changes. This button 16 | will bring you directly to the source on GitHub. Similarly, you may click on "Give feedback" to provide 17 | suggestions or feedback about any page in the documentation. 18 | 19 | On GitHub, you may open a pull request with your documentation changes, or you can 20 | [file a bug](https://github.com/canonical/backup-operators/issues) to provide constructive feedback or suggestions. 21 | 22 | For syntax help and guidelines, 23 | refer to the 24 | {ref}`Canonical MyST style guide `. 25 | 26 | To run the documentation locally before submitting your changes: 27 | 28 | ```bash 29 | cd docs 30 | make run 31 | ``` 32 | 33 | ## Automatic checks 34 | 35 | GitHub runs automatic checks on the documentation 36 | to verify spelling, validate links and style guide compliance. 37 | 38 | You can (and should) run the same checks locally: 39 | 40 | ```bash 41 | make spelling 42 | make linkcheck 43 | make vale 44 | make lint-md 45 | ``` 46 | -------------------------------------------------------------------------------- /terraform/README.md: -------------------------------------------------------------------------------- 1 | # Backup operator Terraform module 2 | 3 | This folder contains a base [Terraform][Terraform] module for the backup charm. 4 | 5 | The module uses the [Terraform Juju provider][Terraform Juju provider] to model the charm 6 | deployment onto any Kubernetes environment managed by [Juju][Juju]. 7 | 8 | ## Module structure 9 | 10 | - **main.tf** - Defines the Juju application to be deployed. 11 | - **variables.tf** - Allows customization of the deployment. Also models the charm configuration, 12 | except for exposing the deployment options (Juju model name, channel or application name). 13 | - **output.tf** - Integrates the module with other Terraform modules, primarily 14 | by defining potential integration endpoints (charm integrations), but also by exposing 15 | the Juju application name. 16 | - **versions.tf** - Defines the Terraform provider version. 17 | 18 | ## Using backup operator base module in higher level modules 19 | 20 | If you want to use `backup` base module as part of your Terraform module, import it 21 | like shown below: 22 | 23 | ```text 24 | data "juju_model" "my_model" { 25 | name = var.model 26 | } 27 | 28 | module "backup" { 29 | source = "git::https://github.com/canonical/backup-operator//terraform" 30 | 31 | model = juju_model.my_model.name 32 | # (Customize configuration variables here if needed) 33 | } 34 | ``` 35 | 36 | Create integrations, for instance: 37 | 38 | ```text 39 | resource "juju_integration" "backup-loki" { 40 | model = juju_model.my_model.name 41 | application { 42 | name = module.backup.app_name 43 | endpoint = module.backup.endpoints.logging 44 | } 45 | application { 46 | name = "loki-k8s" 47 | endpoint = "logging" 48 | } 49 | } 50 | ``` 51 | 52 | The complete list of available integrations can be found [in the Integrations tab][backup-integrations]. 53 | 54 | [Terraform]: https://www.terraform.io/ 55 | [Terraform Juju provider]: https://registry.terraform.io/providers/juju/juju/latest 56 | [Juju]: https://juju.is 57 | [backup-integrations]: https://charmhub.io/ 58 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:recommended", 5 | "group:allNonMajor" 6 | ], 7 | "customManagers": [ 8 | { 9 | "customType": "regex", 10 | "managerFilePatterns": [ 11 | "/(^|/)rockcraft.yaml$/" 12 | ], 13 | "description": "Update base image references", 14 | "matchStringsStrategy": "any", 15 | "matchStrings": [ 16 | "# renovate: build-base:\\s+(?[^:]*):(?[^\\s@]*)(@(?sha256:[0-9a-f]*))?", 17 | "# renovate: base:\\s+(?[^:]*):(?[^\\s@]*)(@(?sha256:[0-9a-f]*))?" 18 | ], 19 | "datasourceTemplate": "docker", 20 | "versioningTemplate": "ubuntu" 21 | }, 22 | { 23 | "customType": "regex", 24 | "description": "Update Bacula versions", 25 | "managerFilePatterns": [ 26 | "/(^|/)snapcraft\\.yaml$/" 27 | ], 28 | "matchStrings": [ 29 | "source-tag:\\s*Release-(?\\d+\\.\\d+\\.\\d+)", 30 | "(?:^|\\n|\\r)\\s*version:\\s*(?\\d+\\.\\d+\\.\\d+)" 31 | ], 32 | "datasourceTemplate": "gitlab-tags", 33 | "depNameTemplate": "bacula-community-edition/bacula-community", 34 | "registryUrlTemplate": "https://gitlab.bacula.org", 35 | "extractVersionTemplate": "^Release-(?\\d+\\.\\d+\\.\\d+)$", 36 | "versioningTemplate": "semver" 37 | } 38 | ], 39 | "packageRules": [ 40 | { 41 | "enabled": true, 42 | "matchDatasources": [ 43 | "docker" 44 | ], 45 | "pinDigests": true 46 | }, 47 | { 48 | "matchFileNames": [ 49 | "rockcraft.yaml" 50 | ], 51 | "matchUpdateTypes": [ 52 | "major", 53 | "minor", 54 | "patch" 55 | ], 56 | "enabled": false 57 | }, 58 | { 59 | "matchDatasources": [ 60 | "terraform-provider" 61 | ], 62 | "matchPackageNames": [ 63 | "juju/juju" 64 | ], 65 | "enabled": true 66 | } 67 | ] 68 | } 69 | -------------------------------------------------------------------------------- /bacula_fd_operator/charmcraft.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | type: charm 5 | subordinate: true 6 | name: bacula-fd 7 | title: Bacula file daemon 8 | summary: Bacula file daemon charm. 9 | links: 10 | documentation: https://discourse.charmhub.io 11 | issues: https://github.com/canonical/is-charms-template-repo/issues 12 | source: https://github.com/canonical/is-charms-template-repo 13 | contact: https://launchpad.net/~canonical-is-devops 14 | 15 | description: | 16 | A subordinate charm for installing and managing Bacula file daemon. 17 | 18 | platforms: 19 | ubuntu@24.04:amd64: 20 | 21 | peers: 22 | bacula-peer: 23 | interface: bacula_peer 24 | 25 | provides: 26 | backup: 27 | interface: backup 28 | limit: 1 29 | 30 | requires: 31 | bacula-dir: 32 | interface: bacula_dir 33 | limit: 1 34 | juju-info: 35 | interface: juju-info 36 | limit: 1 37 | scope: container 38 | 39 | config: 40 | options: 41 | port: 42 | description: | 43 | Listen port for the bacula file daemon. 44 | default: 9102 45 | type: int 46 | schedule: 47 | description: | 48 | Defines when the backup job is to be run. 49 | If not provided, the backup job will only run manually. 50 | The detailed format of the schedule string is defined at https://docs.baculasystems.com/BETechnicalReference/Director/DirectorResourceTypes/ScheduleResource/index.html#director-schedule-run. 51 | 52 | You can provide multiple schedule strings separated by commas. 53 | For example, a schedule that performs a full backup on Sunday and incremental backups from Monday to Saturday is: 54 | `Level=Full sun at 01:00, Level=Incremental mon-sat at 01:00` 55 | default: "" 56 | type: string 57 | 58 | parts: 59 | charm: 60 | charm-entrypoint: scripts/entrypoint 61 | build-snaps: 62 | - rustup 63 | override-build: | 64 | rustup default stable 65 | craftctl default 66 | build-packages: 67 | - libffi-dev 68 | - libssl-dev 69 | - pkg-config 70 | -------------------------------------------------------------------------------- /docs/_static/cookie-banner.css: -------------------------------------------------------------------------------- 1 | /* Cookie policy styling WILL BE REMOVED when implementation of new theme with vanilla is implemented */ 2 | .cookie-policy { 3 | overflow: auto; 4 | top: 35%; 5 | z-index: 50; 6 | position: fixed; 7 | } 8 | 9 | dialog.cookie-policy { 10 | background-color: var(--color-code-background); 11 | color: var(--color-code-foreground); 12 | height: auto; 13 | max-height: 60vh; 14 | max-width: 40rem; 15 | padding: 0 1rem 0 1rem; 16 | width: auto; 17 | } 18 | 19 | header.p-modal__header { 20 | margin-bottom: .5rem; 21 | } 22 | 23 | header.p-modal__header::after { 24 | background-color: #d9d9d9; 25 | content: ""; 26 | height: 1px; 27 | left: 0; 28 | margin-left: 1rem; 29 | margin-right: 1rem; 30 | position: absolute; 31 | right: 0; 32 | } 33 | 34 | h2#cookie-policy-title.p-modal__title { 35 | align-self: flex-end; 36 | font-size: 1.5rem; 37 | font-style: normal; 38 | font-weight: 275; 39 | line-height: 2rem; 40 | margin: 0 0 1.05rem 0; 41 | padding: 0.45rem 0 0 0; 42 | } 43 | 44 | .cookie-policy p { 45 | font-size: 1rem; 46 | line-height: 1.5rem; 47 | margin-top: 0; 48 | padding-top: .4rem; 49 | } 50 | 51 | .cookie-policy p a { 52 | text-decoration: none; 53 | color: var(--color-link); 54 | } 55 | .cookie-policy button { 56 | border-style: solid; 57 | border-width: 1.5px; 58 | cursor: pointer; 59 | display: inline-block; 60 | font-size: 1rem; 61 | font-weight: 400; 62 | justify-content: center; 63 | line-height: 1.5rem; 64 | padding: calc(.4rem - 1px) 1rem; 65 | text-align: center; 66 | text-decoration: none; 67 | transition-duration: .1s; 68 | transition-property: background-color,border-color; 69 | transition-timing-function: cubic-bezier(0.55,0.055,0.675,0.19); 70 | } 71 | 72 | .cookie-policy button { 73 | background-color: #fff; 74 | border-color: rgba(0,0,0,0.56); 75 | color: #000; 76 | } 77 | 78 | .cookie-policy .p-button--positive { 79 | background-color: #0e8420; 80 | border-color: #0e8420; 81 | color: #fff; 82 | } 83 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.bandit] 2 | exclude_dirs = ["/venv/"] 3 | [tool.bandit.assert_used] 4 | skips = ["*/*test.py", "*/test_*.py", "*tests/*.py"] 5 | 6 | # Testing tools configuration 7 | [tool.coverage.run] 8 | branch = true 9 | 10 | # Formatting tools configuration 11 | [tool.black] 12 | line-length = 99 13 | target-version = ["py38"] 14 | 15 | [tool.coverage.report] 16 | show_missing = true 17 | 18 | # Linting tools configuration 19 | [tool.flake8] 20 | max-line-length = 99 21 | max-doc-length = 99 22 | max-complexity = 10 23 | exclude = [".git", "__pycache__", ".tox", "build", "dist", "*.egg_info", "venv"] 24 | select = ["E", "W", "F", "C", "N", "R", "D", "H"] 25 | # Ignore W503, E501 because using black creates errors with this 26 | # Ignore D107 Missing docstring in __init__ 27 | ignore = ["W503", "E501", "D107"] 28 | # D100, D101, D102, D103: Ignore missing docstrings in tests 29 | per-file-ignores = ["tests/*:D100,D101,D102,D103,D104,D205,D212,D415,E501"] 30 | docstring-convention = "google" 31 | 32 | [tool.isort] 33 | line_length = 99 34 | profile = "black" 35 | 36 | [tool.mypy] 37 | check_untyped_defs = true 38 | disallow_untyped_defs = true 39 | explicit_package_bases = true 40 | ignore_missing_imports = true 41 | namespace_packages = true 42 | 43 | [[tool.mypy.overrides]] 44 | disallow_untyped_defs = false 45 | module = "tests.*" 46 | 47 | [tool.pylint] 48 | disable = "wrong-import-order" 49 | 50 | [tool.pytest.ini_options] 51 | minversion = "6.0" 52 | log_cli_level = "INFO" 53 | pythonpath = [ 54 | "lib", 55 | "src" 56 | ] 57 | 58 | # Linting tools configuration 59 | [tool.ruff] 60 | line-length = 99 61 | select = ["E", "W", "F", "C", "N", "D", "I001"] 62 | extend-ignore = [ 63 | "D203", 64 | "D204", 65 | "D213", 66 | "D215", 67 | "D400", 68 | "D404", 69 | "D406", 70 | "D407", 71 | "D408", 72 | "D409", 73 | "D413", 74 | ] 75 | ignore = ["E501", "D107"] 76 | extend-exclude = ["__pycache__", "*.egg_info"] 77 | per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} 78 | 79 | [tool.ruff.mccabe] 80 | max-complexity = 10 81 | 82 | [tool.codespell] 83 | skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage" 84 | -------------------------------------------------------------------------------- /bacula_server_operator/charmcraft.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | # This file configures Charmcraft. 4 | # See https://canonical-charmcraft.readthedocs-hosted.com/stable/howto/manage-charmcraft/ 5 | # for guidance. 6 | 7 | type: charm 8 | name: bacula-server 9 | title: Bacula Server 10 | summary: Bacula Server (bacula-dir, bacula-sd, baculum) charm. 11 | links: 12 | documentation: https://discourse.charmhub.io 13 | issues: https://github.com/canonical/is-charms-template-repo/issues 14 | source: https://github.com/canonical/is-charms-template-repo 15 | contact: https://launchpad.net/~canonical-is-devops 16 | 17 | description: >- 18 | The bacula-server charm helps install and manage Bacula server components, 19 | including the Bacula Director, Bacula Storage Daemon, and Baculum. 20 | 21 | platforms: 22 | ubuntu@24.04:amd64: 23 | 24 | peers: 25 | bacula-peer: 26 | interface: bacula_peer 27 | 28 | provides: 29 | bacula-dir: 30 | interface: bacula_dir 31 | 32 | requires: 33 | postgresql: 34 | interface: postgresql_client 35 | optional: false 36 | limit: 1 37 | s3: 38 | interface: s3 39 | optional: false 40 | limit: 1 41 | 42 | config: 43 | options: 44 | file-retention: 45 | default: "1 year" 46 | type: string 47 | 48 | job-retention: 49 | default: "1 year" 50 | type: string 51 | 52 | volume-retention: 53 | default: "1 year" 54 | type: string 55 | 56 | actions: 57 | create-web-user: 58 | params: 59 | username: 60 | type: string 61 | required: 62 | - username 63 | additionalProperties: false 64 | create-api-user: 65 | params: 66 | username: 67 | type: string 68 | required: 69 | - username 70 | additionalProperties: false 71 | 72 | parts: 73 | charm: 74 | charm-entrypoint: scripts/entrypoint 75 | charm-binary-python-packages: [ "psycopg2-binary" ] 76 | build-snaps: 77 | - rustup 78 | override-build: | 79 | rustup default stable 80 | craftctl default 81 | build-packages: 82 | - libffi-dev 83 | - libssl-dev 84 | - pkg-config 85 | - libpq-dev 86 | -------------------------------------------------------------------------------- /bacula_fd_operator/src/bacula.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """A helper library for managing bacula-fd.""" 5 | 6 | import shutil 7 | from pathlib import Path 8 | 9 | import jinja2 10 | from charms.operator_libs_linux.v0 import apt, systemd 11 | 12 | BACULA_FD_CONFIG_TEMPLATE_FILE = Path(__file__).parent / "templates/bacula-fd.conf.j2" 13 | BACULA_FD_CONFIG_FILE = Path("/etc/bacula/bacula-fd.conf") 14 | 15 | 16 | def is_installed() -> bool: 17 | """Check if bacula-fd is installed. 18 | 19 | Returns: 20 | True if bacula-fd is installed. 21 | """ 22 | return bool(shutil.which("bacula-fd")) 23 | 24 | 25 | def install() -> None: 26 | """Install bacula-fd.""" 27 | apt.add_package(["bacula-fd"], update_cache=True) 28 | 29 | 30 | def restart() -> None: 31 | """Restart bacula-fd service.""" 32 | systemd.service_restart("bacula-fd") 33 | 34 | 35 | def read_config() -> str: 36 | """Read the current bacula-fd configuration file. 37 | 38 | Returns: 39 | The content of the bacula-fd configuration file, empty string if not exists. 40 | """ 41 | if not BACULA_FD_CONFIG_FILE.exists(): 42 | return "" 43 | return BACULA_FD_CONFIG_FILE.read_text(encoding="utf-8") 44 | 45 | 46 | def config_reload( 47 | *, 48 | name: str, 49 | host: str, 50 | port: int, 51 | director_name: str, 52 | director_password: str, 53 | ) -> None: 54 | """Update and reload bacula-fd configuration. 55 | 56 | Args: 57 | name: bacula-fd name. 58 | host: bacula-fd address. 59 | port: bacula-fd port. 60 | director_name: bacula-dir name. 61 | director_password: bacula-dir password. 62 | """ 63 | # not used for HTML 64 | env = jinja2.Environment() # nosec 65 | template = env.from_string(BACULA_FD_CONFIG_TEMPLATE_FILE.read_text()) 66 | config = template.render( 67 | host=host, 68 | director_name=director_name, 69 | director_password=director_password, 70 | name=name, 71 | port=port, 72 | ) 73 | if config == read_config(): 74 | return 75 | BACULA_FD_CONFIG_FILE.write_text(config, encoding="utf-8") 76 | restart() 77 | -------------------------------------------------------------------------------- /.github/workflows/publish_charms.yaml: -------------------------------------------------------------------------------- 1 | name: Publish charms 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | find-charms: 11 | name: Find Charms 12 | runs-on: ubuntu-latest 13 | outputs: 14 | charm-dirs: ${{ steps.charm-dirs.outputs.charm-dirs }} 15 | steps: 16 | - uses: actions/checkout@v6.0.1 17 | - id: charm-dirs 18 | run: | 19 | echo charm-dirs=`find -name charmcraft.yaml | xargs dirname | jq --raw-input --slurp 'split("\n") | map(select(. != ""))'` >> $GITHUB_OUTPUT 20 | 21 | publish-charm: 22 | needs: [ find-charms ] 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | charm-dir: ${{ fromJSON(needs.find-charms.outputs.charm-dirs) }} 27 | name: Publish Charm (${{ matrix.charm-dir }}) 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v6.0.1 31 | - name: change directory 32 | run: | 33 | TEMP_DIR=$(mktemp -d) 34 | cp -rp ./${{ matrix.charm-dir }}/. $TEMP_DIR 35 | rm -rf .* * || : 36 | cp -rp $TEMP_DIR/. . 37 | rm -rf $TEMP_DIR 38 | - name: setup lxd 39 | uses: canonical/setup-lxd@v0.1.3 40 | - if: github.event_name == 'push' 41 | name: publish charm 42 | uses: canonical/charming-actions/upload-charm@2.7.0 43 | with: 44 | credentials: ${{ secrets.CHARMHUB_TOKEN }} 45 | github-token: ${{ secrets.GITHUB_TOKEN }} 46 | tag-prefix: ${{ steps.charm-name.outputs.charm-name }} 47 | 48 | publish-charm-libs: 49 | name: Release charm libs 50 | runs-on: ubuntu-24.04 51 | needs: [ publish-charm ] 52 | if: github.event_name == 'push' 53 | steps: 54 | - uses: actions/checkout@v6.0.1 55 | - name: change directory 56 | run: | 57 | TEMP_DIR=$(mktemp -d) 58 | cp -rp ./backup_integrator_operator/. $TEMP_DIR 59 | rm -rf .* * || : 60 | cp -rp $TEMP_DIR/. . 61 | rm -rf $TEMP_DIR 62 | - uses: canonical/charming-actions/release-libraries@2.7.0 63 | name: Release libs 64 | with: 65 | credentials: ${{ secrets.CHARMHUB_TOKEN }} 66 | github-token: ${{ secrets.GITHUB_TOKEN }} 67 | -------------------------------------------------------------------------------- /.github/workflows/test_terraform_module.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | # Attention: valid for K8s charms only 5 | 6 | name: Terraform module tests 7 | 8 | on: 9 | pull_request: 10 | paths: 11 | - 'terraform/**' 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | test-terraform: 18 | # remove this condition for enabling the workflow 19 | if: false 20 | name: Test Terraform with Juju 21 | runs-on: ubuntu-latest 22 | env: 23 | WORKING_DIR: 'terraform/tests' 24 | steps: 25 | - uses: actions/checkout@@v4.2.2 26 | - uses: charmed-kubernetes/actions-operator@main 27 | with: 28 | provider: "k8s" 29 | use-canonical-k8s: true 30 | channel: 1.33-classic/stable 31 | juju-channel: 3.6/stable 32 | - name: Prepare juju tf provider environment 33 | run: | 34 | CONTROLLER=$(juju whoami | yq .Controller) 35 | JUJU_CONTROLLER_ADDRESSES="$(juju show-controller | yq '.[$CONTROLLER]'.details.\"api-endpoints\" | tr -d "[]' "|tr -d '"'|tr -d '\n')" 36 | JUJU_USERNAME="$(cat ~/.local/share/juju/accounts.yaml | yq .controllers.$CONTROLLER.user|tr -d '"')" 37 | JUJU_PASSWORD="$(cat ~/.local/share/juju/accounts.yaml | yq .controllers.$CONTROLLER.password|tr -d '"')" 38 | 39 | echo "JUJU_CONTROLLER_ADDRESSES=$JUJU_CONTROLLER_ADDRESSES" >> "$GITHUB_ENV" 40 | echo "JUJU_USERNAME=$JUJU_USERNAME" >> "$GITHUB_ENV" 41 | echo "JUJU_PASSWORD=$JUJU_PASSWORD" >> "$GITHUB_ENV" 42 | { 43 | echo 'JUJU_CA_CERT<> "$GITHUB_ENV" 47 | - uses: hashicorp/setup-terraform@v3.1.2 48 | - run: terraform init 49 | working-directory: ${{env.WORKING_DIR}} 50 | - run: terraform plan -out=tfplan 51 | working-directory: ${{env.WORKING_DIR}} 52 | - run: terraform show tfplan 53 | working-directory: ${{env.WORKING_DIR}} 54 | - run: | 55 | juju add-model prod-chat-example 56 | set -e # Exit on error 57 | terraform test || { echo "Terraform test failed"; exit 1; } 58 | working-directory: ${{env.WORKING_DIR}} 59 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report 3 | labels: ["bug"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: > 8 | Thanks for taking the time to fill out this bug report! Before submitting your issue, please make 9 | sure you are using the latest version of the charm. If not, please switch to this image prior to 10 | posting your report to make sure it's not already solved. 11 | - type: textarea 12 | id: bug-description 13 | attributes: 14 | label: Bug Description 15 | description: > 16 | If applicable, add screenshots to help explain the problem you are facing. 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: reproduction 21 | attributes: 22 | label: To Reproduce 23 | description: > 24 | Please provide a step-by-step instruction of how to reproduce the behavior. 25 | placeholder: | 26 | 1. `juju deploy ...` 27 | 2. `juju relate ...` 28 | 3. `juju status --relations` 29 | validations: 30 | required: true 31 | - type: textarea 32 | id: environment 33 | attributes: 34 | label: Environment 35 | description: > 36 | We need to know a bit more about the context in which you run the charm. 37 | - Are you running Juju locally, on lxd, in multipass or on some other platform? 38 | - What track and channel you deployed the charm from (i.e. `latest/edge` or similar). 39 | - Version of any applicable components, like the juju snap, the model controller, lxd, microk8s, and/or multipass. 40 | validations: 41 | required: true 42 | - type: textarea 43 | id: logs 44 | attributes: 45 | label: Relevant log output 46 | description: > 47 | Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. 48 | Fetch the logs using `juju debug-log --replay` and `kubectl logs ...`. Additional details available in the juju docs 49 | at https://documentation.ubuntu.com/juju/3.6/howto/manage-logs/#manage-logs 50 | render: shell 51 | validations: 52 | required: true 53 | - type: textarea 54 | id: additional-context 55 | attributes: 56 | label: Additional context 57 | 58 | -------------------------------------------------------------------------------- /docs/_templates/header.html: -------------------------------------------------------------------------------- 1 | 73 | -------------------------------------------------------------------------------- /docs/.sphinx/metrics/source_metrics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=all 3 | 4 | VENV=".sphinx/venv/bin/activate" 5 | 6 | files=0 7 | words=0 8 | readabilityWords=0 9 | readabilitySentences=0 10 | readabilitySyllables=0 11 | readabilityAverage=0 12 | readable=true 13 | 14 | # measure number of files (.rst and .md), excluding those in .sphinx dir 15 | files=$(find . -type d -path './.sphinx' -prune -o -type f \( -name '*.md' -o -name '*.rst' \) -print | wc -l) 16 | 17 | # calculate metrics only if source files are present 18 | if [ "$files" -eq 0 ]; then 19 | echo "There are no source files to calculate metrics" 20 | else 21 | # measure raw total number of words, excluding those in .sphinx dir 22 | words=$(find . -type d -path './.sphinx' -prune -o \( -name '*.md' -o -name '*.rst' \) -exec cat {} + | wc -w) 23 | 24 | # calculate readability for markdown source files 25 | echo "Activating virtual environment to run vale..." 26 | source "${VENV}" 27 | 28 | for file in *.md *.rst; do 29 | if [ -f "$file" ]; then 30 | readabilityWords=$(vale ls-metrics "$file" | grep '"words"' | sed 's/[^0-9]*//g') 31 | readabilitySentences=$(vale ls-metrics "$file" | grep '"sentences"' | sed 's/[^0-9]*//g') 32 | readabilitySyllables=$(vale ls-metrics "$file" | grep '"syllables"' | sed 's/[^0-9]*//g') 33 | fi 34 | done 35 | 36 | echo "Deactivating virtual environment..." 37 | deactivate 38 | 39 | # calculate mean number of words 40 | if [ "$files" -ge 1 ]; then 41 | meanval=$((readabilityWords / files)) 42 | else 43 | meanval=$readabilityWords 44 | fi 45 | 46 | readabilityAverage=$(echo "scale=2; 0.39 * ($readabilityWords / $readabilitySentences) + (11.8 * ($readabilitySyllables / $readabilityWords)) - 15.59" | bc) 47 | 48 | # cast average to int for comparison 49 | readabilityAverageInt=$(echo "$readabilityAverage / 1" | bc) 50 | 51 | # value below 8 is considered readable 52 | if [ "$readabilityAverageInt" -lt 8 ]; then 53 | readable=true 54 | else 55 | readable=false 56 | fi 57 | 58 | # summarise latest metrics 59 | echo "Summarising metrics for source files (.md, .rst)..." 60 | echo -e "\ttotal files: $files" 61 | echo -e "\ttotal words (raw): $words" 62 | echo -e "\ttotal words (prose): $readabilityWords" 63 | echo -e "\taverage word count: $meanval" 64 | echo -e "\treadability: $readabilityAverage" 65 | echo -e "\treadable: $readable" 66 | fi 67 | -------------------------------------------------------------------------------- /backup_integrator_operator/charmcraft.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | type: charm 5 | subordinate: true 6 | name: backup-integrator 7 | title: Backup integrator 8 | summary: A integrator charm to require backup on behalf of other charms. 9 | links: 10 | documentation: https://discourse.charmhub.io 11 | issues: https://github.com/canonical/backup-operators/issues 12 | source: https://github.com/canonical/backup-operators 13 | contact: https://launchpad.net/~canonical-is-devops 14 | 15 | description: | 16 | The backup integrator charm is a subordinate charm that requires 17 | backups on behalf of charms that do not implement the backup relation 18 | themselves. 19 | 20 | requires: 21 | backup: 22 | interface: backup 23 | juju-info: 24 | interface: juju-info 25 | scope: container 26 | 27 | platforms: 28 | ubuntu@24.04:amd64: 29 | 30 | config: 31 | options: 32 | fileset: 33 | description: > 34 | A comma-separated list of files to be backed up. Paths must be absolute. 35 | type: string 36 | run-before-backup: 37 | description: > 38 | The content of a script to run before starting the backup operation. 39 | If this script fails, the backup operation will be canceled. 40 | You should include a shebang in your script. 41 | type: string 42 | run-after-backup: 43 | description: > 44 | The content of a script to run after completing the backup operation. 45 | This script will still run even if the backup operation fails. 46 | You should include a shebang in your script. 47 | type: string 48 | run-before-restore: 49 | description: > 50 | The content of a script to run before starting the restore operation. 51 | If this script fails, the restore operation will be canceled. 52 | You should include a shebang in your script. 53 | type: string 54 | run-after-restore: 55 | description: > 56 | The content of a script to run after completing the restore operation. 57 | This script will still run even if the restore operation fails. 58 | You should include a shebang in your script. 59 | type: string 60 | 61 | parts: 62 | charm: 63 | charm-entrypoint: scripts/entrypoint 64 | build-snaps: 65 | - rustup 66 | override-build: | 67 | rustup default stable 68 | craftctl default 69 | build-packages: 70 | - libffi-dev 71 | - libssl-dev 72 | - pkg-config 73 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Backup operators 2 | 3 | This repository contains a collection of operators that handle backups 4 | in the Juju ecosystem. Its goal is to provide an easy-to-use, highly 5 | integrated backup solution for charms in Juju. 6 | 7 | This repository contains the source code for the following 8 | backup-related charms: 9 | 10 | 1. `backup-integrator`: An integrator charm that requires backup 11 | relation on behalf of other charms. 12 | 2. `bacula-server`: A machine charm that installs and manages all server 13 | components of the Bacula backup solution, including the Bacula 14 | Director, Bacula Storage Daemon, and Baculum. 15 | 3. `bacula-fd`: A subordinate charm that installs and manages the Bacula 16 | File Daemon, which is the backup agent in the Bacula solution. 17 | 18 | The repository also holds the snapped workloads of the aforementioned 19 | charms: 20 | 21 | 1. `charmed-bacula-server`: A snap containing all server components of 22 | the Bacula backup solution, including the Bacula Director, Bacula 23 | Storage Daemon, and Baculum. 24 | 25 | ## Documentation 26 | 27 | Our documentation is stored in the `docs` directory. 28 | It is based on the Canonical starter pack and hosted on 29 | [Read the Docs](https://about.readthedocs.com/). In structuring, the 30 | documentation employs the [Diátaxis](https://diataxis.fr/) approach. 31 | 32 | You may open a pull request with your documentation changes, or you can 33 | [file a bug](https://github.com/canonical/backup-operators/issues) to 34 | provide constructive feedback or suggestions. 35 | 36 | To run the documentation locally before submitting your changes: 37 | 38 | ```bash 39 | cd docs 40 | make run 41 | ``` 42 | 43 | GitHub runs automatic checks on the documentation to verify spelling, 44 | validate links and style guide compliance. 45 | 46 | You can (and should) run the same checks locally: 47 | 48 | ```bash 49 | make spelling 50 | make linkcheck 51 | make vale 52 | make lint-md 53 | ``` 54 | 55 | ## Project and community 56 | 57 | The backup operators project is a member of the Ubuntu family. It is an 58 | open source project that warmly welcomes community projects, 59 | contributions, suggestions, fixes and constructive feedback. 60 | 61 | * [Code of conduct](https://ubuntu.com/community/code-of-conduct) 62 | * [Get support](https://discourse.charmhub.io/) 63 | * [Issues](https://github.com/canonical/backup-operators/issues) 64 | * [Matrix](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) 65 | * [Contribute](https://github.com/canonical/backup-operators/blob/main/CONTRIBUTING.md) 66 | -------------------------------------------------------------------------------- /docs/how-to/integrate-with-backup-integrator-charm.md: -------------------------------------------------------------------------------- 1 | (how_to_integrate_with_backup_integrator_charm)= 2 | 3 | # How to integrate with the backup integrator charm 4 | 5 | The backup integrator charm provides backup functionality to charms that 6 | haven't, or cannot, implement the backup relation themselves. 7 | 8 | To use the backup integrator charm, integrate the backup integrator 9 | charm with the backup source using the `juju-info` relation. Then you 10 | can use the `backup` relation endpoint to request backups from 11 | backup-provider charms such as `bacula-fd`. Let's 12 | demonstrate by using the Ubuntu charm as an example. 13 | 14 | Deploy the charm and integrate with the backup integrator 15 | and Provider charms: 16 | 17 | ``` 18 | juju deploy ubuntu 19 | 20 | juju integrate ubuntu:juju-info backup-integrator 21 | juju integrate backup-integrator:backup bacula-fd 22 | ``` 23 | 24 | Attach the bacula-fd charm to the principal charm because a 25 | subordinate charm cannot be a principal charm for another 26 | subordinate charm. 27 | 28 | ``` 29 | juju integrate ubuntu:juju-info bacula-fd 30 | ``` 31 | 32 | Integrate bacula-fd charm with the bacula-server charm: 33 | 34 | ``` 35 | juju integrate bacula-fd bacula-server 36 | ``` 37 | 38 | ## Configure the backup integrator charm 39 | 40 | As the backup integrator charm is the requirer of backups, you need to 41 | provide the specification of what to back up and how to back it up to 42 | the backup integrator charm. This is controlled by the `fileset`, 43 | `run-before-backup`, `run-after-backup`, `run-before-restore`, and 44 | `run-after-restore` configuration options on the backup integrator 45 | charm. 46 | 47 | The `fileset` configuration describes what to back up; it's a 48 | comma-separated list of absolute files or directories on the backup 49 | source machine. 50 | 51 | The `run-before-backup`, `run-after-backup`, `run-before-restore`, and 52 | `run-after-restore` configurations describe how to back up and restore. 53 | Each contains the content of a script that will run before or after a 54 | backup or restore. These scripts can be used to prepare backup files and 55 | to restore the service from a backup. 56 | 57 | The following is an example configuration for the backup integrator 58 | charm on an imaginary PostgreSQL charm (not the 59 | real [`postgresql`](https://charmhub.io/postgresql) charm). It uses 60 | `pg_dump` to create a backup file of the database and `psql` to restore 61 | the database from that file during a restoration. 62 | 63 | 64 | 65 | ```yaml 66 | fileset: /var/backups/postgresql 67 | run-before-backup: | 68 | #!/bin/bash 69 | sudo -u postgres pg_dump -d ubuntu -c -f /var/backups/postgresql/ubuntu.dump 70 | run-after-backup: | 71 | #!/bin/bash 72 | sudo rm -f /var/backups/postgresql/ubuntu.dump 73 | run-before-restore: null 74 | run-after-restore: | 75 | #!/bin/bash 76 | sudo -u postgres psql -d ubuntu -1 -f /var/backups/postgresql/ubuntu.dump 77 | sudo rm -f /var/backups/postgresql/ubuntu.dump 78 | ``` 79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description lang=en": "A collection of charms for backup charms in machine environments." 5 | --- 6 | 7 | 8 | 9 | # Backup Operators 10 | 11 | 12 | 13 | Backup operators are a collection of {ref}`charms ` that provide a highly integrated, low-operations backup solution for charms running in machine environments. 14 | 15 | Backup operators deliver file-level backups using the tried-and-true Bacula system and leverage the `backup` relation to automatically define what and how to back up for all supported backup target charms in the Juju ecosystem. This significantly reduces the operational cost of setting up backups in complex systems. 16 | 17 | For Site Reliability Engineers, backup operators offer a turnkey, out-of-the-box backup solution. 18 | 19 | ## In this documentation 20 | 21 | | | | 22 | |---------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------| 23 | | {ref}`Tutorial `
Get started - a hands-on introduction to using the charm for new users
| {ref}`How-to guides `
Step-by-step guides covering key operations and common tasks | 24 | | {ref}`Reference `
Technical information - specifications, APIs, architecture | {ref}`Explanation `
Concepts - discussion and clarification of key topics | 25 | 26 | ## Contributing to this documentation 27 | 28 | Documentation is an important part of this project, and we take the same open-source approach 29 | to the documentation as the code. As such, we welcome community contributions, suggestions, and 30 | constructive feedback on our documentation. 31 | See {ref}`How to contribute ` for more information. 32 | 33 | 34 | If there's a particular area of documentation that you'd like to see that's missing, please 35 | [file a bug](https://github.com/canonical/backup-operators/issues). 36 | 37 | ## Project and community 38 | 39 | The backup operators are a member of the Ubuntu family. It's an open-source project that warmly welcomes community 40 | projects, contributions, suggestions, fixes, and constructive feedback. 41 | 42 | - [Code of conduct](https://ubuntu.com/community/code-of-conduct) 43 | - [Get support](https://discourse.charmhub.io/) 44 | - [Join our online chat](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) 45 | - [Contribute](https://github.com/canonical/backup-operators/blob/main/CONTRIBUTING.md) 46 | 47 | Thinking about using the backup operators for your next project? 48 | [Get in touch](https://matrix.to/#/#charmhub-charmdev:ubuntu.com)! 49 | 50 | ```{toctree} 51 | :hidden: 52 | :maxdepth: 1 53 | 54 | Tutorial 55 | how-to/index 56 | reference/index 57 | explanation/index 58 | changelog 59 | ``` 60 | -------------------------------------------------------------------------------- /docs/reuse/links.txt: -------------------------------------------------------------------------------- 1 | .. _Canonical Documentation Style Guide: https://docs.ubuntu.com/styleguide/en 2 | .. _Canonical Reference Library: https://library.canonical.com/ 3 | .. _Canonical Sphinx: https://github.com/canonical/canonical-sphinx 4 | .. _change log: https://github.com/canonical/sphinx-docs-starter-pack/wiki/Change-log 5 | .. _Diátaxis: https://diataxis.fr/ 6 | .. _Example product documentation: https://canonical-example-product-documentation.readthedocs-hosted.com/ 7 | .. _Example product documentation repository: https://github.com/canonical/example-product-documentation 8 | .. _`file-wide metadata`: https://www.sphinx-doc.org/en/master/usage/restructuredtext/field-lists.html 9 | .. _Five golden rules for compliant alt text: https://abilitynet.org.uk/news-blogs/five-golden-rules-compliant-alt-text 10 | .. _`Furo documentation`: https://pradyunsg.me/furo/quickstart/ 11 | .. _grid tables: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#grid-tables 12 | .. _`Hiding Contents sidebar`: https://pradyunsg.me/furo/customisation/toc/ 13 | .. _How to connect your Read the Docs account to your Git provider: https://docs.readthedocs.com/platform/stable/guides/connecting-git-account.html 14 | .. _How to manually configure a Git repository integration: https://docs.readthedocs.io/en/stable/guides/setup/git-repo-manual.html 15 | .. _How to publish documentation on Read the Docs: https://library.canonical.com/documentation/publish-on-read-the-docs 16 | .. _Level AA conformance: https://www.w3.org/WAI/WCAG2AA-Conformance 17 | .. _list tables: https://docutils.sourceforge.io/docs/ref/rst/directives.html#list-table 18 | .. _manual import: https://readthedocs.com/dashboard/import/manual/ 19 | .. _Markdown: https://commonmark.org/ 20 | .. _MyST: https://myst-parser.readthedocs.io/ 21 | .. _Open Graph: https://ogp.me/ 22 | .. _Pa11y: https://pa11y.org/ 23 | .. _Pa11y readme: https://github.com/pa11y/pa11y#command-line-configuration 24 | .. _Pygments documentation: https://pygments.org/languages/ 25 | .. _Read the Docs at Canonical: https://library.canonical.com/documentation/read-the-docs-at-canonical 26 | .. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html 27 | .. _`Sphinx`: https://www.sphinx-doc.org/ 28 | .. _`Sphinx configuration`: https://www.sphinx-doc.org/en/master/usage/configuration.html 29 | .. _Sphinx design: https://sphinx-design.readthedocs.io/en/latest/ 30 | .. _Sphinx documentation starter pack: 31 | .. _Sphinx documentation starter pack repository: https://github.com/canonical/starter-pack 32 | .. _Sphinx documentation starter pack documentation: https://canonical-starter-pack.readthedocs-hosted.com/ 33 | .. _`Sphinx extensions`: https://www.sphinx-doc.org/en/master/usage/extensions/index.html 34 | .. _Sphinx tabs: https://sphinx-tabs.readthedocs.io/en/latest/ 35 | .. _tables: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#table-directives 36 | .. _toctree: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-toctree 37 | .. _Vale: https://vale.sh/ 38 | .. _Vale rules: https://github.com/canonical/documentation-style-guide 39 | .. _Web Content Accessibility Guidelines (WCAG) 2.2: https://www.w3.org/TR/WCAG22/ 40 | 41 | 42 | .. SHORTCUTS 43 | .. |RST| replace:: :abbr:`reST (reStructuredText)` 44 | -------------------------------------------------------------------------------- /docs/.sphinx/metrics/build_metrics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import argparse 5 | from pathlib import Path 6 | from html.parser import HTMLParser 7 | from urllib.parse import urlsplit 8 | 9 | 10 | class MetricsParser(HTMLParser): 11 | def __init__(self): 12 | super().__init__() 13 | self.int_link_count = 0 14 | self.ext_link_count = 0 15 | self.fragment_count = 0 16 | self.image_count = 0 17 | self.in_object = 0 18 | 19 | @property 20 | def link_count(self): 21 | return self.fragment_count + self.int_link_count + self.ext_link_count 22 | 23 | def read(self, file): 24 | """ 25 | Read *file* (a file-like object with a ``read`` method returning 26 | strings) a chunk at a time, feeding each chunk to the parser. 27 | """ 28 | # Ensure the parser state is reset before each file (just in case 29 | # there's an erroneous dangling ) 30 | self.reset() 31 | self.in_object = 0 32 | buf = '' 33 | while True: 34 | # Parse 1MB chunks at a time 35 | buf = file.read(1024**2) 36 | if not buf: 37 | break 38 | self.feed(buf) 39 | 40 | def handle_starttag(self, tag, attrs): 41 | """ 42 | Count , , and tags to determine the number of internal 43 | and external links, and the number of images. 44 | """ 45 | attrs = dict(attrs) 46 | if tag == 'a' and 'href' in attrs: 47 | # If there's no href, it's an anchor; if there's no hostname 48 | # (netloc) or path, it's just a fragment link within the page 49 | url = urlsplit(attrs['href']) 50 | if url.netloc: 51 | self.ext_link_count += 1 52 | elif url.path: 53 | self.int_link_count += 1 54 | else: 55 | self.fragment_count += 1 56 | elif tag == 'object': 57 | # tags are a bit complex as they nest to offer fallbacks 58 | # and may contain an fallback. We only want to count the 59 | # outer-most in this case 60 | if self.in_object == 0: 61 | self.image_count += 1 62 | self.in_object += 1 63 | elif tag == 'img' and self.in_object == 0: 64 | self.image_count += 1 65 | 66 | def handle_endtag(self, tag): 67 | if tag == 'object': 68 | # Never let in_object be negative 69 | self.in_object = max(0, self.in_object - 1) 70 | 71 | 72 | def main(args=None): 73 | parser = argparse.ArgumentParser() 74 | parser.add_argument( 75 | 'build_dir', metavar='build-dir', nargs='?', default='.', 76 | help="The directory to scan for HTML files") 77 | config = parser.parse_args(args) 78 | 79 | parser = MetricsParser() 80 | for path in Path(config.build_dir).rglob('*.html'): 81 | with path.open('r', encoding='utf-8', errors='replace') as f: 82 | parser.read(f) 83 | 84 | print('Summarising metrics for build files (.html)...') 85 | print(f'\tlinks: {parser.link_count} (' 86 | f'{parser.fragment_count} #frag…, ' 87 | f'{parser.int_link_count} /int…, ' 88 | f'{parser.ext_link_count} https://ext…' 89 | ')') 90 | print(f'\timages: {parser.image_count}') 91 | 92 | 93 | if __name__ == '__main__': 94 | sys.exit(main()) 95 | -------------------------------------------------------------------------------- /docs/reference/integrations.md: -------------------------------------------------------------------------------- 1 | (reference_integrations)= 2 | 3 | # Integrations 4 | 5 | ## Bacula server charm 6 | 7 | The following are relations for the Bacula server charm. 8 | 9 | ### `bacula-dir` 10 | 11 | *Interface*: `bacula_dir` 12 | *Supported charms*: [`bacula-fd`](https://charmhub.io/bacula-fd) 13 | 14 | The `bacula-dir` relation connects the `bacula-server` charm and the 15 | `bacula-fd` charm to exchange `backup-dir` and `bacula-fd` information. 16 | 17 | Example `bacula-dir` integration command: 18 | 19 | ``` 20 | juju integrate bacula-server bacula-fd:bacula-dir 21 | ``` 22 | 23 | ### `postgresql` 24 | 25 | *Interface*: `postgresql_client` 26 | *Supported 27 | charms*: [`postgresql`](https://charmhub.io/postgresql), [`pgbouncer`](https://charmhub.io/pgbouncer) 28 | 29 | The `postgresql` relation provides a PostgreSQL database for the Bacula 30 | server to store backup metadata. 31 | 32 | Example `postgresql` integration command: 33 | 34 | ``` 35 | juju integrate bacula-server postgresql 36 | ``` 37 | 38 | ### `s3` 39 | 40 | *Interface*: `s3` 41 | *Supported charms*: [`s3-integrator`](https://charmhub.io/s3-integrator) 42 | 43 | The `s3` relation provides S3-compatible storage for the Bacula server 44 | to store backup files. 45 | 46 | Example `s3` integration command: 47 | 48 | ``` 49 | juju integrate bacula-server s3-integrator 50 | ``` 51 | 52 | ## Bacula-fd charm 53 | 54 | The following are relations for the Bacula-fd charm. 55 | 56 | ### `bacula-dir` 57 | 58 | *Interface*: `bacula_dir` 59 | *Supported charms*: [`bacula-server`](https://charmhub.io/bacula-server) 60 | 61 | The `bacula-dir` relation connects the `bacula-server` charm and the 62 | `bacula-fd` charm to exchange `backup-dir` and `bacula-fd` information. 63 | 64 | Example `bacula-dir` integration command: 65 | 66 | ``` 67 | juju integrate bacula-fd bacula-server:bacula-dir 68 | ``` 69 | 70 | ### `backup` 71 | 72 | *Interface*: `backup` 73 | *Supported 74 | charms*: [`backup-integrator`](https://charmhub.io/backup-integrator) 75 | 76 | The Bacula-fd charm implements the provider side of the `backup` 77 | relation and uses it to provide backup services. 78 | 79 | Example `backup` integration command: 80 | 81 | ``` 82 | juju integrate bacula-fd backup-integrator:backup 83 | ``` 84 | 85 | ### `juju-info` 86 | 87 | *Interface*: `juju-info` 88 | *Supported charms*: Any machine charm 89 | 90 | The Bacula-fd charm uses the `juju-info` relation to attach itself to a 91 | principal charm. 92 | 93 | Example `juju-info` integration command: 94 | 95 | ``` 96 | juju integrate bacula-fd ubuntu:juju-info 97 | ``` 98 | 99 | ## Backup integrator charm 100 | 101 | The following are relations for the backup integrator charm. 102 | 103 | ### `backup` 104 | 105 | *Interface*: `backup` 106 | *Supported charms*: [`bacula-fd`](https://charmhub.io/bacula-fd) 107 | 108 | The Backup integrator charm implements the requirer side of the `backup` 109 | relation and uses it to request backup services from providers. 110 | 111 | Example `backup` integration command: 112 | 113 | ``` 114 | juju integrate backup-integrator bacula-fd:backup 115 | ``` 116 | 117 | ### `juju-info` 118 | 119 | *Interface*: `juju-info` 120 | *Supported charms*: Any machine charm 121 | 122 | The Backup integrator charm uses the `juju-info` relation to attach 123 | itself to a principal charm. 124 | 125 | Example `juju-info` integration command: 126 | 127 | ``` 128 | juju integrate backup-integrator ubuntu:juju-info 129 | ``` 130 | -------------------------------------------------------------------------------- /bacula_fd_operator/src/bacula_relation.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Library to handle the requirer part of the bacula-dir relation.""" 5 | 6 | import ops 7 | from pydantic import BaseModel, Field 8 | 9 | BACULA_DIR_RELATION_NAME = "bacula-dir" 10 | 11 | 12 | class BaculaDirInfo(BaseModel): 13 | """Bacula-dir relation model. 14 | 15 | Attributes: 16 | name: bacula-dir name. 17 | password: bacula-dir password. 18 | """ 19 | 20 | name: str = Field(min_length=1) 21 | password: str = Field(min_length=1) 22 | 23 | 24 | class BaculaRequirer: 25 | """Requirer for the bacula-dir relation.""" 26 | 27 | def __init__( 28 | self, charm: ops.CharmBase, relation_name: str = BACULA_DIR_RELATION_NAME 29 | ) -> None: 30 | """Initialize the requirer. 31 | 32 | Args: 33 | charm: the charm instance. 34 | relation_name: the bacula-dir relation name. 35 | """ 36 | self._charm = charm 37 | self._relation_name = relation_name 38 | 39 | def send_to_bacula_dir( # pylint: disable=too-many-arguments 40 | self, 41 | *, 42 | name: str, 43 | fileset: str, 44 | client_run_before_backup: str, 45 | client_run_after_backup: str, 46 | client_run_before_restore: str, 47 | client_run_after_restore: str, 48 | port: int = 9102, 49 | schedule: str | None = None, 50 | ) -> None: 51 | """Send bacula-fd information to the bacula-dir. 52 | 53 | Args: 54 | name: bacula-fd name. 55 | fileset: backup fileset. 56 | client_run_before_backup: backup run-before-backup script. 57 | client_run_after_backup: backup run-after-backup script. 58 | client_run_before_restore: backup run-before-restore script. 59 | client_run_after_restore: backup run-after-restore script. 60 | port: bacula-fd port. 61 | schedule: backup schedule. 62 | """ 63 | relation = self._charm.model.get_relation(self._relation_name) 64 | if not relation: 65 | return 66 | data = relation.data[self._charm.unit] 67 | data["name"] = name 68 | data["fileset"] = fileset 69 | data["port"] = str(port) 70 | data["client-run-before-backup"] = str(client_run_before_backup) 71 | data["client-run-after-backup"] = str(client_run_after_backup) 72 | data["client-run-before-restore"] = str(client_run_before_restore) 73 | data["client-run-after-restore"] = str(client_run_after_restore) 74 | if schedule: 75 | data["schedule"] = schedule 76 | else: 77 | del data["schedule"] 78 | 79 | def receive_from_bacula_dir(self) -> BaculaDirInfo | None: 80 | """Receive the bacula-dir information from the bacula-dir. 81 | 82 | Returns: 83 | bacula-dir information or None if the relation or relation data doesn't exist. 84 | """ 85 | relation = self._charm.model.get_relation(self._relation_name) 86 | if not relation or not relation.app: 87 | return None 88 | data = relation.data[relation.app] 89 | name = data.get("name") 90 | password_secret_id = data.get("password") 91 | if name and password_secret_id: 92 | password = self._charm.model.get_secret(id=password_secret_id).get_content( 93 | refresh=True 94 | )["password"] 95 | return BaculaDirInfo(name=name, password=password) 96 | return None 97 | -------------------------------------------------------------------------------- /docs/_templates/footer.html: -------------------------------------------------------------------------------- 1 | 38 |
39 |
40 | {%- if show_copyright %} 41 | 52 | {%- endif %} 53 | 54 | {# mod: removed "Made with" #} 55 | 56 | {%- if last_updated -%} 57 |
58 | {% trans last_updated=last_updated|e -%} 59 | Last updated on {{ last_updated }} 60 | {%- endtrans -%} 61 |
62 | {%- endif %} 63 | 64 | {%- if show_source and has_source and sourcename %} 65 |
66 | Show source 68 |
69 | {%- endif %} 70 |
71 |
72 | {% if has_contributor_listing and display_contributors and pagename and page_source_suffix %} 73 | {% set contributors = get_contributors_for_file(pagename, page_source_suffix) %} 74 | {% if contributors %} 75 | {% if contributors | length > 1 %} 76 | Thanks to the {{ contributors |length }} contributors! 77 | {% else %} 78 | Thanks to our contributor! 79 | {% endif %} 80 |
81 |
    82 | {% for contributor in contributors %} 83 |
  • 84 | {{ contributor[0] }} 85 |
  • 86 | {% endfor %} 87 |
88 | {% endif %} 89 | {% endif %} 90 |
91 | 92 |
-------------------------------------------------------------------------------- /docs/how-to/use-baculum.md: -------------------------------------------------------------------------------- 1 | (how_to_use_baculum)= 2 | 3 | # How to use the Baculum web interface 4 | 5 | Baculum is the web interface for Bacula. The Bacula server charm manages 6 | and installs Baculum along with other Bacula server components, which 7 | you can access using the URL `http://:9095/web/`. 8 | 9 | ## Obtain a Baculum credential 10 | 11 | Run the `create-web-user` Juju action on the `bacula-server` charm to 12 | create a new login credential for Baculum. You can choose any username; 13 | in this example, `admin` is used. 14 | 15 | ``` 16 | juju run bacula-server/leader create-web-user username=admin 17 | ``` 18 | 19 | ## Navigate the list of connected `bacula-fd` charms 20 | 21 | `http://:9095/web/client/` displays a list of all 22 | Bacula clients that are connected to the Bacula server. 23 | 24 | The client named `charm-bacula-fd` is the Bacula file daemon installed 25 | on the Bacula server; it usually serves as a placeholder in 26 | configuration files and has no other use. All clients that start with 27 | `relation-` are `bacula-fd` charms connected to the Bacula server. Each 28 | of their names has the format 29 | `relation----fd`, 30 | which you can use to identify the location of the `bacula-fd` charm. 31 | 32 | Clicking the "Details" button in the client list navigates to the client 33 | detail page, where you can click the "Status client" button to verify 34 | the client connection. 35 | 36 | ## Perform a manual backup 37 | 38 | `http://:9095/web/job/` displays a list of backup and 39 | restore jobs the charm has created for all backup clients. 40 | 41 | The naming format is similar to the client name, except that backup jobs 42 | have the `-backup` suffix and restore jobs have the `-restore` suffix. 43 | 44 | The Bacula server charm performs backups automatically based on the 45 | schedule configured in each `bacula-fd` charm. To perform an 46 | ad-hoc backup, click the "Details" button for the backup job in the job 47 | list. Then click the "Run job" button on the job details page, and click 48 | the "Run job" button in the popup window. It is advised not to modify 49 | any default settings in the popup window unless you are certain of the 50 | required changes. 51 | 52 | ## Perform a restore 53 | 54 | `http://:9095/web/restore/` provides the restore 55 | wizard, which is an interactive way of setting up a restoration. 56 | 57 | 58 | 59 | ### Step 1 - Select source backup client 60 | 61 | Select the backup client that owns the backup you want to 62 | restore from. 63 | 64 | ### Step 2 - Select backup to restore 65 | 66 | Select the version of the backup you want to restore. 67 | 68 | ### Step 3 - Select files to restore 69 | 70 | Select the files you want to restore. It is advised to 71 | select all files in the backup. 72 | 73 | ### Step 4 - Select destination for restore 74 | 75 | Select the destination for the restore. It's important to 76 | change the "Restore to directory:" setting to "/", which restores files 77 | to their original locations. This is required for the backup charm to 78 | function correctly. 79 | 80 | ### Step 5 - Options for restore 81 | 82 | Select the destination client that the files need to be 83 | restored to by choosing the corresponding restore job. 84 | 85 | 86 | 87 | ### Step 6 - Finish 88 | 89 | 90 | 91 | Click "Run restore" to start the restoration. The restoration time can 92 | vary depending on the backup size and network connectivity. 93 | 94 | 95 | -------------------------------------------------------------------------------- /backup_integrator_operator/src/charm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2025 Canonical Ltd. 4 | # See LICENSE file for licensing details. 5 | 6 | """backup-integrator charm.""" 7 | 8 | import logging 9 | import pathlib 10 | import typing 11 | 12 | import ops 13 | import pydantic 14 | from charms.backup_integrator.v0.backup import BackupDynamicRequirer 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class BackupIntegratorCharm(ops.CharmBase): 20 | """Charm the service.""" 21 | 22 | _CHARM_OPT_DIR = pathlib.Path("/opt/backup-integrator-charm") 23 | 24 | def __init__(self, *args: typing.Any): 25 | """Construct. 26 | 27 | Args: 28 | args: Arguments passed to the CharmBase parent constructor. 29 | """ 30 | super().__init__(*args) 31 | self._requirer = BackupDynamicRequirer(charm=self) 32 | self.framework.observe(self.on.backup_relation_created, self._reconcile) 33 | self.framework.observe(self.on.backup_relation_changed, self._reconcile) 34 | self.framework.observe(self.on.config_changed, self._reconcile) 35 | self.framework.observe(self.on.leader_elected, self._reconcile) 36 | self.framework.observe(self.on.leader_settings_changed, self._reconcile) 37 | self.framework.observe(self.on.upgrade_charm, self._reconcile) 38 | 39 | def _save_script(self, config_option: str) -> pathlib.Path | None: 40 | """Save the content of a script configuration. 41 | 42 | Args: 43 | config_option: The name of the script configuration option. 44 | 45 | Returns: 46 | The path to the saved script if the configuration value is not empty, otherwise, None. 47 | """ 48 | content = typing.cast(str | None, self.config.get(config_option)) 49 | if not content: 50 | return None 51 | script_path = ( 52 | self._CHARM_OPT_DIR / self.unit.name.replace("/", "-") / "scripts" / config_option 53 | ) 54 | script_path.parent.mkdir(parents=True, exist_ok=True) 55 | script_path.write_text(content, encoding="utf-8") 56 | script_path.chmod(0o755) 57 | return script_path 58 | 59 | def _reconcile(self, _: ops.EventBase) -> None: 60 | """Reconciles the charm.""" 61 | if self.model.get_relation("backup") is None: 62 | self.unit.status = ops.WaitingStatus("waiting for backup relation") 63 | return 64 | fileset_config = typing.cast(str, self.config.get("fileset", "")).strip() 65 | if not fileset_config: 66 | self.unit.status = ops.BlockedStatus("missing fileset config") 67 | return 68 | fileset = [file.strip() for file in fileset_config.split(",") if file.strip()] 69 | if not self.unit.is_leader(): 70 | self.unit.status = ops.ActiveStatus() 71 | return 72 | try: 73 | self._requirer.require_backup( 74 | fileset=fileset, 75 | run_before_backup=self._save_script("run-before-backup"), 76 | run_after_backup=self._save_script("run-after-backup"), 77 | run_before_restore=self._save_script("run-before-restore"), 78 | run_after_restore=self._save_script("run-after-restore"), 79 | ) 80 | except pydantic.ValidationError as exc: 81 | logger.exception("invalid charm configuration") 82 | errors = exc.errors() 83 | error_fields = [str(e["loc"][0]) for e in errors if e.get("loc")] 84 | self.unit.status = ops.BlockedStatus(f"invalid config: {', '.join(error_fields)}") 85 | return 86 | self.unit.status = ops.ActiveStatus() 87 | 88 | 89 | if __name__ == "__main__": # pragma: nocover 90 | ops.main.main(BackupIntegratorCharm) 91 | -------------------------------------------------------------------------------- /tests/integration/test_charm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2025 Canonical Ltd. 4 | # See LICENSE file for licensing details. 5 | 6 | """Integration tests.""" 7 | 8 | import logging 9 | import time 10 | 11 | import jubilant 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | def wait_job_complete(baculum, job_name, timeout=300) -> dict: 17 | """Wait for a bacula job to complete. 18 | 19 | Args: 20 | baculum: baculum API client. 21 | job_name: bacula job name. 22 | timeout: timeout in seconds. 23 | 24 | Returns: 25 | completed job run object. 26 | 27 | Raises: 28 | TimeoutError: When timeout is reached. 29 | """ 30 | deadline = time.time() + timeout 31 | while time.time() < deadline: 32 | job_run = baculum.list_job_runs(job_name)[0] 33 | logger.info("%s job run status: %s", job_name, job_run) 34 | if job_run["jobstatus"] == "T": 35 | return job_run 36 | time.sleep(1) 37 | raise TimeoutError(f"Timed out waiting for job '{job_name}' completion") 38 | 39 | 40 | def select_table(juju) -> str: 41 | """Select all content from the test postgresql table. 42 | 43 | Args: 44 | juju: jubilant.Juju object. 45 | 46 | Returns: 47 | Table content. 48 | """ 49 | return juju.ssh( 50 | "ubuntu/0", "sudo -u postgres psql -P pager=off -d ubuntu -c 'SELECT * FROM release;'" 51 | ) 52 | 53 | 54 | def list_objects(s3, bucket) -> list[str]: 55 | """List all objects in a s3 bucket. 56 | 57 | Args: 58 | s3: S3 client. 59 | bucket: S3 bucket name. 60 | 61 | Returns: 62 | List of object names. 63 | """ 64 | paginator = s3.get_paginator("list_objects_v2") 65 | pages = paginator.paginate(Bucket=bucket) 66 | objects = [] 67 | for page in pages: 68 | for obj in page.get("Contents", []): 69 | objects.append(obj["Key"]) 70 | return objects 71 | 72 | 73 | def test_list_jobs(baculum): 74 | """ 75 | arrange: deploy and integrate backup charms. 76 | act: list bacula jobs using baculum API 77 | assert: bacula-server should have the correct number of jobs 78 | """ 79 | assert len(baculum.list_job_names()) == 3 80 | 81 | 82 | def test_connect_client(baculum): 83 | """ 84 | arrange: deploy and integrate backup charms. 85 | act: get client status using baculum API 86 | assert: client should show normal status. 87 | """ 88 | clients = baculum.list_clients() 89 | assert len(clients) == 2 90 | for client in clients: 91 | assert "Daemon started" in baculum.get_client_status(client_id=client["clientid"]) 92 | 93 | 94 | def test_backup_restore_database(juju: jubilant.Juju, baculum, s3): 95 | """ 96 | arrange: deploy and integrate backup charms. 97 | arrange: run a backup and restore. 98 | assert: the backup and restore should work as intended. 99 | """ 100 | assert "Noble Numbat" in select_table(juju) 101 | assert len(list_objects(s3, "bacula")) == 0 102 | 103 | backup_job = [j for j in baculum.list_job_names() if j.endswith("-backup")][0] 104 | logger.info("run backup job: %s", backup_job) 105 | output = baculum.run_backup_job(name=backup_job) 106 | logger.info("run backup job output: %s", output) 107 | backup_job_run = wait_job_complete(baculum, backup_job) 108 | objects = list_objects(s3, "bacula") 109 | logger.info("s3 objects: %s", objects) 110 | assert len(objects) > 1 111 | 112 | output = juju.ssh("ubuntu/0", "sudo -u postgres psql -d ubuntu -c 'TRUNCATE release;'") 113 | logger.info("truncate database table 'release': %s", output) 114 | assert "Noble Numbat" not in select_table(juju) 115 | 116 | restore_job = [j for j in baculum.list_job_names() if j.endswith("-restore")][0] 117 | output = baculum.run_restore_job(name=restore_job, backup_job_id=backup_job_run["jobid"]) 118 | logger.info("run restore job output: %s", output) 119 | wait_job_complete(baculum, restore_job) 120 | assert "Noble Numbat" in select_table(juju) 121 | -------------------------------------------------------------------------------- /tests/unit/bacula_server/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Fixtures for charm tests.""" 5 | 6 | import unittest.mock 7 | 8 | import pytest 9 | 10 | import bacula_server_operator.src.bacula 11 | 12 | 13 | @pytest.fixture(autouse=True) 14 | def mock_bacula_services(monkeypatch): 15 | """Patch bacula services.""" 16 | monkeypatch.setattr( 17 | bacula_server_operator.src.bacula.BaculaFdService, 18 | "_reload", 19 | unittest.mock.MagicMock(), 20 | ) 21 | monkeypatch.setattr( 22 | bacula_server_operator.src.bacula.BaculaFdService, 23 | "_test_config", 24 | unittest.mock.MagicMock(return_value=True), 25 | ) 26 | monkeypatch.setattr( 27 | bacula_server_operator.src.bacula.BaculaSdService, 28 | "_reload", 29 | unittest.mock.MagicMock(), 30 | ) 31 | monkeypatch.setattr( 32 | bacula_server_operator.src.bacula.BaculaSdService, 33 | "_test_config", 34 | unittest.mock.MagicMock(return_value=True), 35 | ) 36 | monkeypatch.setattr( 37 | bacula_server_operator.src.bacula.BaculaDirService, 38 | "_reload", 39 | unittest.mock.MagicMock(), 40 | ) 41 | monkeypatch.setattr( 42 | bacula_server_operator.src.bacula.BaculaDirService, 43 | "_test_config", 44 | unittest.mock.MagicMock(return_value=True), 45 | ) 46 | monkeypatch.setattr( 47 | bacula_server_operator.src.bacula.BaculumService, 48 | "_reload", 49 | unittest.mock.MagicMock(), 50 | ) 51 | 52 | 53 | @pytest.fixture(autouse=True) 54 | def mock_bacula(monkeypatch): 55 | """Patch bacula install/initialization related functions.""" 56 | monkeypatch.setattr( 57 | bacula_server_operator.src.bacula.Bacula, 58 | "is_installed", 59 | unittest.mock.MagicMock(return_value=True), 60 | ) 61 | monkeypatch.setattr( 62 | bacula_server_operator.src.bacula.Bacula, 63 | "install", 64 | unittest.mock.MagicMock(), 65 | ) 66 | monkeypatch.setattr( 67 | bacula_server_operator.src.bacula.Bacula, 68 | "is_initialized", 69 | unittest.mock.MagicMock(return_value=True), 70 | ) 71 | monkeypatch.setattr( 72 | bacula_server_operator.src.bacula.Bacula, 73 | "initialize", 74 | unittest.mock.MagicMock(), 75 | ) 76 | 77 | 78 | @pytest.fixture(autouse=True) 79 | def baculum_api_htpasswd(monkeypatch): 80 | """Patch Bacula.update_baculum_api_user.""" 81 | htpasswd = {} 82 | 83 | def update_user(_, username: str, password: str) -> None: 84 | """Mock update_baculum_api_user function. 85 | 86 | Args: 87 | username: username 88 | password: password 89 | """ 90 | htpasswd[username] = password 91 | 92 | monkeypatch.setattr( 93 | bacula_server_operator.src.bacula.Bacula, 94 | "update_baculum_api_user", 95 | update_user, 96 | ) 97 | 98 | return htpasswd 99 | 100 | 101 | @pytest.fixture(autouse=True) 102 | def baculum_web_htpasswd(monkeypatch): 103 | """Patch Bacula.update_baculum_web_user.""" 104 | htpasswd = {} 105 | 106 | def update_user(_, username: str, password: str): 107 | """Mock update_baculum_web_user function. 108 | 109 | Args: 110 | username: username 111 | password: password 112 | """ 113 | htpasswd[username] = password 114 | 115 | monkeypatch.setattr( 116 | bacula_server_operator.src.bacula.Bacula, 117 | "update_baculum_web_user", 118 | update_user, 119 | ) 120 | 121 | return htpasswd 122 | 123 | 124 | @pytest.fixture(autouse=True) 125 | def patch_bacula_snap_path(monkeypatch, tmp_path): 126 | """Patch charmed-bacula-server snap directory path.""" 127 | monkeypatch.setattr( 128 | bacula_server_operator.src.bacula, 129 | "BACULA_SERVER_SNAP_COMMON", 130 | tmp_path, 131 | ) 132 | 133 | for path in [ 134 | "opt/bacula/etc", 135 | "usr/share/baculum/htdocs/protected/API/Config", 136 | "usr/share/baculum/htdocs/protected/Web/Config", 137 | ]: 138 | (tmp_path / path).mkdir(parents=True, exist_ok=True) 139 | -------------------------------------------------------------------------------- /tests/unit/backup_integrator/test_charm.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """backup-integrator charm unit tests.""" 5 | 6 | from pathlib import Path 7 | 8 | import ops.testing 9 | import pytest 10 | 11 | 12 | @pytest.mark.parametrize("config", [{}, {"run-before-backup": "#!/bin/bash"}]) 13 | def test_no_fileset_config(backup_integrator_charm, config: dict): 14 | """ 15 | arrange: none 16 | act: don't set the fileset charm config 17 | assert: the unit should be in the blocked status 18 | """ 19 | ctx = ops.testing.Context(backup_integrator_charm) 20 | state_in = ops.testing.State( 21 | config=config, 22 | relations=[ 23 | ops.testing.SubordinateRelation(endpoint="juju-info", id=1), 24 | ops.testing.Relation(endpoint="backup", id=2), 25 | ], 26 | ) 27 | 28 | state_out = ctx.run(ctx.on.config_changed(), state_in) 29 | assert state_out.unit_status.name == "blocked" 30 | 31 | 32 | def test_no_backup_relation(backup_integrator_charm) -> None: 33 | """ 34 | arrange: none 35 | act: don't relate the backup relation 36 | assert: the unit should be in the waiting status 37 | """ 38 | ctx = ops.testing.Context(backup_integrator_charm) 39 | state_in = ops.testing.State( 40 | config={"fileset": "/var/backups"}, 41 | relations=[ops.testing.SubordinateRelation(endpoint="juju-info", id=1)], 42 | ) 43 | 44 | state_out = ctx.run(ctx.on.config_changed(), state_in) 45 | assert state_out.unit_status.name == "waiting" 46 | 47 | 48 | @pytest.mark.parametrize( 49 | "config", 50 | [ 51 | {"fileset": "/var/backups"}, 52 | {"fileset": "/var/backups/foo,/var/backups/bar"}, 53 | {"fileset": "/var/backups", "run-before-backup": "run-before-backup"}, 54 | { 55 | "fileset": "/var/backups", 56 | "run-before-backup": "run-before-backup", 57 | "run-after-backup": "run-after-backup", 58 | "run-before-restore": "run-before-restore", 59 | "run-after-restore": "run-after-restore", 60 | }, 61 | ], 62 | ) 63 | def test_update_backup_relation(backup_integrator_charm, config) -> None: 64 | """ 65 | arrange: integrate the charm with a backup provider 66 | act: set appropriate backup charm configuration 67 | assert: backup relation should be updated with the configuration value 68 | """ 69 | ctx = ops.testing.Context(backup_integrator_charm) 70 | relation = ops.testing.Relation(endpoint="backup", id=2) 71 | state_in = ops.testing.State( 72 | config=config, 73 | relations=[ops.testing.SubordinateRelation(endpoint="juju-info", id=1), relation], 74 | leader=True, 75 | ) 76 | 77 | state_out = ctx.run(ctx.on.config_changed(), state_in) 78 | relation_data = state_out.get_relation(relation.id).local_app_data 79 | 80 | assert state_out.unit_status.name == "active" 81 | assert relation_data["fileset"] == config.get("fileset") 82 | scripts = ["run-before-backup", "run-after-backup", "run-before-restore", "run-after-restore"] 83 | for script in scripts: 84 | if script not in config: 85 | assert script not in relation_data 86 | else: 87 | assert config[script] == Path(relation_data[script]).read_text(encoding="utf-8") 88 | 89 | 90 | @pytest.mark.parametrize("fileset", ["var/backups", "/var/backups,etc,/var/backups/foobar"]) 91 | def test_invalid_fileset(backup_integrator_charm, fileset: str) -> None: 92 | """ 93 | arrange: none 94 | act: set the fileset charm config with an invalid value 95 | assert: the unit should be in the blocked status 96 | """ 97 | ctx = ops.testing.Context(backup_integrator_charm) 98 | relation = ops.testing.Relation(endpoint="backup", id=2) 99 | state_in = ops.testing.State( 100 | config={"fileset": fileset}, 101 | relations=[ops.testing.SubordinateRelation(endpoint="juju-info", id=1), relation], 102 | leader=True, 103 | ) 104 | 105 | state_out = ctx.run(ctx.on.config_changed(), state_in) 106 | relation_data = state_out.get_relation(relation.id).local_app_data 107 | assert state_out.unit_status.name == "blocked" 108 | assert not relation_data 109 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | [tox] 5 | skipsdist=True 6 | skip_missing_interpreters = True 7 | envlist = lint, unit, static, coverage-report 8 | 9 | [vars] 10 | src_path = {toxinidir}/backup_integrator_operator/src {toxinidir}/bacula_fd_operator/src {toxinidir}/bacula_server_operator/src 11 | tst_path = {toxinidir}/tests/ 12 | lib_path = {toxinidir}/backup_integrator_operator/lib/charms/backup_integrator/v0/backup.py 13 | all_path = {[vars]src_path} {[vars]tst_path} 14 | 15 | [testenv] 16 | setenv = 17 | PYTHONPATH = {toxinidir}:{toxinidir}/backup_integrator_operator/lib:{toxinidir}/bacula_fd_operator/lib::{toxinidir}/bacula_server_operator/lib 18 | PYTHONBREAKPOINT=ipdb.set_trace 19 | PY_COLORS=1 20 | passenv = 21 | PYTHONPATH 22 | CHARM_BUILD_DIR 23 | MODEL_SETTINGS 24 | 25 | [testenv:fmt] 26 | description = Apply coding style standards to code 27 | deps = 28 | black 29 | isort 30 | commands = 31 | isort {[vars]all_path} 32 | black {[vars]all_path} 33 | 34 | [testenv:lint] 35 | description = Check code against coding style standards 36 | deps = 37 | black 38 | boto3 39 | ruff 40 | codespell 41 | flake8 42 | flake8-builtins 43 | flake8-copyright 44 | flake8-docstrings>=1.6.0 45 | flake8-docstrings-complete>=1.0.3 46 | flake8-test-docs>=1.0 47 | isort 48 | jubilant 49 | mypy 50 | ops[testing] 51 | pep8-naming 52 | pydocstyle>=2.10 53 | pylint 54 | pyproject-flake8 55 | pytest 56 | pytest-asyncio 57 | pytest-operator 58 | pytest-playwright 59 | requests 60 | types-PyYAML 61 | types-requests 62 | -r{toxinidir}/backup_integrator_operator/requirements.txt 63 | -r{toxinidir}/bacula_fd_operator/requirements.txt 64 | -r{toxinidir}/bacula_server_operator/requirements.txt 65 | psycopg2-binary 66 | commands = 67 | pydocstyle {[vars]src_path} 68 | # uncomment the following line if this charm owns a lib 69 | # codespell {[vars]lib_path} 70 | codespell {toxinidir} --skip {toxinidir}/.git --skip {toxinidir}/.tox \ 71 | --skip {toxinidir}/build --skip {toxinidir}/lib --skip {toxinidir}/venv \ 72 | --skip {toxinidir}/.mypy_cache --skip {toxinidir}/icon.svg 73 | # pflake8 wrapper supports config from pyproject.toml 74 | pflake8 {[vars]all_path} --ignore=W503 75 | isort --check-only --diff {[vars]all_path} 76 | black --check --diff {[vars]all_path} 77 | mypy {[vars]src_path} 78 | pylint {[vars]all_path} 79 | 80 | [testenv:unit] 81 | description = Run unit tests 82 | deps = 83 | coverage[toml] 84 | pytest 85 | ops[testing] 86 | -r{toxinidir}/backup_integrator_operator/requirements.txt 87 | -r{toxinidir}/bacula_fd_operator/requirements.txt 88 | -r{toxinidir}/bacula_server_operator/requirements.txt 89 | psycopg2-binary 90 | commands = 91 | coverage run --omit="*/lib/**" \ 92 | -m pytest --ignore={[vars]tst_path}integration -v --tb native -s {posargs} 93 | coverage report 94 | 95 | [testenv:coverage-report] 96 | description = Create test coverage report 97 | deps = 98 | coverage[toml] 99 | pytest 100 | -r{toxinidir}/backup_integrator_operator/requirements.txt 101 | -r{toxinidir}/bacula_fd_operator/requirements.txt 102 | -r{toxinidir}/bacula_server_operator/requirements.txt 103 | psycopg2-binary 104 | commands = 105 | coverage report 106 | 107 | [testenv:static] 108 | description = Run static analysis tests 109 | deps = 110 | bandit[toml] 111 | -r{toxinidir}/backup_integrator_operator/requirements.txt 112 | -r{toxinidir}/bacula_fd_operator/requirements.txt 113 | -r{toxinidir}/bacula_server_operator/requirements.txt 114 | psycopg2-binary 115 | commands = 116 | bandit -c {toxinidir}/pyproject.toml -r {[vars]src_path} {[vars]tst_path} 117 | 118 | [testenv:integration] 119 | description = Run integration tests 120 | deps = 121 | allure-pytest>=2.8.18 122 | boto3 123 | git+https://github.com/canonical/data-platform-workflows@v24.0.0\#subdirectory=python/pytest_plugins/allure_pytest_collection_report 124 | jubilant 125 | pytest 126 | pytest-jubilant 127 | requests 128 | commands = 129 | pytest -v --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs} 130 | -------------------------------------------------------------------------------- /bacula_server_operator/src/templates/bacula-dir.conf.j2: -------------------------------------------------------------------------------- 1 | Director { 2 | Name = charm-bacula-dir 3 | DIRport = 9101 4 | QueryFile = "/opt/bacula/scripts/query.sql" 5 | WorkingDirectory = "/opt/bacula/working" 6 | PidDirectory = "/opt/bacula/working" 7 | Maximum Concurrent Jobs = 20 8 | Password = {{ bacula.dir_password }} 9 | Messages = charm-daemon-messages 10 | DirAddress = 127.0.0.1 11 | } 12 | 13 | Storage { 14 | Name = charm-s3-storage 15 | Address = {{ bacula.dir_address }} 16 | SDPort = 9103 17 | Password = {{ bacula.sd_password }} 18 | Device = charm-s3-storage 19 | Media Type = CloudType 20 | } 21 | 22 | Pool { 23 | Name = charm-cloud-pool 24 | Pool Type = Backup 25 | Recycle = yes 26 | AutoPrune = yes 27 | Maximum Volume Jobs = 1 28 | Volume Retention = {{ bacula.volume_retention }} 29 | Label Format = "{{ name }}-vol-${Year}${Month:p/2/0/r}${Day:p/2/0/r}-${Job}-${NumVols}" 30 | } 31 | 32 | Schedule { 33 | Name = charm-cloud-upload-schedule 34 | Run = daily at 01:00 35 | } 36 | 37 | Job { 38 | Name = charm-cloud-upload 39 | Type = Admin 40 | Client = charm-bacula-fd 41 | Schedule = charm-cloud-upload-schedule 42 | RunScript { 43 | RunsOnClient = No 44 | RunsWhen = Always 45 | Console = "cloud upload storage=charm-s3-storage allpools" 46 | Console = "cloud truncate storage=charm-s3-storage allpools" 47 | } 48 | Storage = charm-s3-storage 49 | Messages = charm-daemon-messages 50 | Pool = charm-cloud-pool 51 | Fileset = charm-empty-fileset 52 | } 53 | 54 | FileSet { 55 | Name = charm-empty-fileset 56 | Include { File = /dev/null } 57 | } 58 | 59 | Client { 60 | Name = charm-bacula-fd 61 | Address = 127.0.0.1 62 | FDPort = 9102 63 | Catalog = charm-catalog 64 | Password = "{{ bacula.fd_password }}" 65 | File Retention = {{ bacula.file_retention }} 66 | Job Retention = {{ bacula.job_retention }} 67 | } 68 | 69 | Catalog { 70 | Name = charm-catalog 71 | dbname = "{{ db.name }}" 72 | DB Address = "{{ db.host }}" 73 | DB Port = {{ db.port }} 74 | dbuser = "{{ db.username }}" 75 | dbpassword = "{{ db.password }}" 76 | } 77 | 78 | Messages { 79 | Name = charm-daemon-messages 80 | # mailcommand = "/sbin/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula daemon message\" %r" 81 | # mail = root = all, !skipped 82 | console = all, !skipped, !saved 83 | append = "/opt/bacula/log/bacula.log" = all, !skipped 84 | } 85 | 86 | {% for relation_fd in relation_fd_list %} 87 | FileSet { 88 | Name = "{{ relation_fd.name }}-fileset" 89 | Include { 90 | Options { 91 | Signature = SHA256 92 | Compression = ZSTD 93 | } 94 | {% for file in relation_fd.fileset %} 95 | File = {{ file }} 96 | {% endfor %} 97 | } 98 | } 99 | 100 | Client { 101 | Name = "{{ relation_fd.name }}-fd" 102 | Address = "{{ relation_fd.host }}" 103 | FDPort = {{ relation_fd.port }} 104 | Catalog = charm-catalog 105 | Password = "{{ relation_fd.password }}" 106 | File Retention = {{ bacula.file_retention }} 107 | Job Retention = {{ bacula.job_retention }} 108 | AutoPrune = yes 109 | } 110 | 111 | {% if relation_fd.schedule %} 112 | Schedule { 113 | Name = "{{ relation_fd.name }}-schedule" 114 | {% for schedule in relation_fd.schedule %} 115 | Run = {{ schedule }} 116 | {% endfor %} 117 | } 118 | {% endif %} 119 | 120 | Job { 121 | Name = "{{ relation_fd.name }}-backup" 122 | Type = Backup 123 | Client = "{{ relation_fd.name }}-fd" 124 | FileSet = "{{ relation_fd.name }}-fileset" 125 | Storage = charm-s3-storage 126 | Messages = charm-daemon-messages 127 | Pool = charm-cloud-pool 128 | {% if relation_fd.schedule %} 129 | Schedule = charm-cloud-upload-schedule 130 | {% endif %} 131 | 132 | RunScript { 133 | Command = "{{ relation_fd.client_run_before_backup }}" 134 | RunsOnClient = yes 135 | RunsWhen = Before 136 | FailJobOnError = yes 137 | } 138 | 139 | RunScript { 140 | Command = "{{ relation_fd.client_run_after_backup }}" 141 | RunsOnClient = yes 142 | RunsWhen = After 143 | FailJobOnError = no 144 | } 145 | } 146 | 147 | Job { 148 | Name = "{{ relation_fd.name }}-restore" 149 | Type = Restore 150 | Client = "{{ relation_fd.name }}-fd" 151 | FileSet = "{{ relation_fd.name }}-fileset" 152 | Storage = charm-s3-storage 153 | Messages = charm-daemon-messages 154 | Pool = charm-cloud-pool 155 | 156 | Where = / 157 | Replace = IfNewer 158 | 159 | RunScript { 160 | Command = "{{ relation_fd.client_run_before_restore }}" 161 | RunsOnClient = yes 162 | RunsWhen = Before 163 | FailJobOnError = yes 164 | } 165 | RunScript { 166 | Command = "{{ relation_fd.client_run_after_restore }}" 167 | RunsOnClient = yes 168 | RunsWhen = After 169 | FailJobOnError = no 170 | } 171 | } 172 | {% endfor %} -------------------------------------------------------------------------------- /docs/reference/backup-integrator-charm-architecture.md: -------------------------------------------------------------------------------- 1 | (explanation_backup_integrator_charm_architecture)= 2 | 3 | # Charm architecture: backup integrator 4 | 5 | The backup integrator charm is a subordinate charm that requires backup 6 | on behalf of other charms. Usually this is necessary because the charm 7 | needs backup functionality but either has not implemented the backup 8 | relation itself or cannot implement it. 9 | 10 | The design and functionality of the backup integrator are very simple. 11 | Basically, the charm passes the its configuration values 12 | to the backup relation. For the `fileset` configuration, the backup 13 | integrator validates it before passing it to the relation. For the 14 | `run-*` configurations, the backup integrator writes the configuration 15 | content to a local file and passes the filename to the backup relation. 16 | 17 | ## High-level overview of backup charms deployment 18 | 19 | Here's a typical backup charm suite deployment in the machine charm 20 | environment. It deploys the bacula-server charm as the backup server, 21 | the bacula-fd charm as the backup agent, and the backup-integrator charm 22 | as the backup relation provider. 23 | 24 | The backup-integrator charm requests backups from the bacula-fd charm on 25 | behalf of the backup source charm `source`. The bacula-fd charm then 26 | integrates with the bacula-server charm to submit the backup files. The 27 | bacula-server charm is integrated with the PostgreSQL charm for storing 28 | backup metadata and with the s3-integrator charm to use S3 storage as 29 | the destination for backup files. 30 | 31 | ```{mermaid} 32 | C4Context 33 | title Container diagram for backup charms 34 | 35 | System_Boundary(backup server, "Backup Server Model") { 36 | Container(s3-integrator, "S3 Integrator", "", "Provides backup destination") 37 | Container(bacula-server, "Bacula Server", "", "Provides backup server") 38 | Container(postgresql, "PostgreSQL", "", "Stores backup metadata") 39 | Rel(s3-integrator, bacula-server, "") 40 | Rel(postgresql, bacula-server, "") 41 | } 42 | 43 | System_Boundary(backup source, "Backup Source Model") { 44 | Container_Boundary(backup-source, "Backup Source") { 45 | Component(source, "", "Backup source principal charm") 46 | Component(backup-integrator, "", "Backup relation integrator") 47 | Component(bacula-fd, "", "Bacula file daemon") 48 | } 49 | Rel(source, backup-integrator, "") 50 | Rel(backup-integrator, bacula-fd, "") 51 | } 52 | Rel(bacula-fd, bacula-server, "") 53 | ``` 54 | 55 | ## Juju events 56 | 57 | For this charm, the following Juju events are observed: 58 | 59 | 1. {ref}`backup-relation-changed `, 60 | {ref}`backup-relation-created `: 61 | Monitors changes and creation of the `backup` relation to update 62 | relation data when needed. 63 | 2. {ref}`config-changed `: 64 | Monitors changes to the backup integrator configuration to update the 65 | relation data with the latest configuration values. 66 | 3. `leader-elected`, `leader-settings-changed`: 67 | Monitors changes in the charm’s leadership. Since only the leader 68 | unit can modify application relation data, triggering a relation data 69 | update when leadership changes ensures the relation is updated 70 | regardless of leader status during relation establishment. 71 | 4. {ref}`upgrade-charm `: 72 | Triggered when the charm has been upgraded. This ensures that the new 73 | version of the backup integrator charm can update the relation data 74 | if needed. 75 | 76 | ```{note} 77 | See more in the Juju docs: {ref}`juju:hook` 78 | ``` 79 | 80 | ## Charm code overview 81 | 82 | The `src/__main__.py` file is the default entry point for the backup 83 | integrator charm; it creates an instance of the `BackupIntegratorCharm` 84 | class (imported from the `charm` module), which inherits from 85 | `ops.CharmBase`. `ops.CharmBase` is the base class from which all charms 86 | are derived, provided 87 | by [Ops](https://ops.readthedocs.io/en/latest/index.html) (the Python 88 | framework for developing charms). 89 | 90 | ```{note} 91 | See more in the Juju docs: {ref}`juju:charm` 92 | ``` 93 | 94 | The `__init__` method of `BackupIntegratorCharm` ensures that the charm 95 | observes and handles all events relevant to its operation. 96 | 97 | For example, when a configuration is changed via the CLI: 98 | 99 | 1. The user runs the configuration command: 100 | 101 | ```bash 102 | juju config backup-integrator fileset=/var/backups 103 | ``` 104 | 105 | 2. A `config-changed` event is emitted. 106 | 3. In the `__init__` method, the handler for this event is defined as 107 | follows: 108 | 109 | ```python 110 | self.framework.observe(self.on.config_changed, self._reconcile) 111 | ``` 112 | 113 | 4. The `_reconcile` method, in turn, takes the necessary actions, such 114 | as waiting for the backup relation(s) and updating the backup 115 | relation data. 116 | -------------------------------------------------------------------------------- /docs/.sphinx/get_vale_conf.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import os 4 | import shutil 5 | import subprocess 6 | import tempfile 7 | import sys 8 | import logging 9 | import argparse 10 | 11 | # Configure logging 12 | logging.basicConfig( 13 | level=logging.INFO, 14 | format='%(asctime)s - %(levelname)s - %(message)s', 15 | datefmt='%Y-%m-%d %H:%M:%S' 16 | ) 17 | 18 | SPHINX_DIR = os.path.join(os.getcwd(), ".sphinx") 19 | 20 | GITHUB_REPO = "canonical/documentation-style-guide" 21 | GITHUB_CLONE_URL = f"https://github.com/{GITHUB_REPO}.git" 22 | 23 | # Source paths to copy from repo 24 | VALE_FILE_LIST = [ 25 | "styles/Canonical", 26 | "styles/config/vocabularies/Canonical", 27 | "styles/config/dictionaries", 28 | "vale.ini" 29 | ] 30 | 31 | def clone_repo_and_copy_paths(file_source_dest, overwrite=False): 32 | """ 33 | Clone the repository to a temporary directory and copy required files 34 | 35 | Args: 36 | file_source_dest: dictionary of file paths to copy from the repository, 37 | and their destination paths 38 | overwrite: boolean flag to overwrite existing files in the destination 39 | 40 | Returns: 41 | bool: True if all files were copied successfully, False otherwise 42 | """ 43 | 44 | if not file_source_dest: 45 | logging.error("No files to copy") 46 | return False 47 | 48 | # Create temporary directory on disk for cloning 49 | temp_dir = tempfile.mkdtemp() 50 | logging.info("Cloning repository <%s> to temporary directory: %s", GITHUB_REPO, temp_dir) 51 | clone_cmd = ["git", "clone", "--depth", "1", GITHUB_CLONE_URL, temp_dir] 52 | 53 | try: 54 | result = subprocess.run( 55 | clone_cmd, 56 | capture_output=True, 57 | text=True, 58 | check=True 59 | ) 60 | logging.debug("Git clone output: %s", result.stdout) 61 | except subprocess.CalledProcessError as e: 62 | logging.error("Git clone failed: %s", e.stderr) 63 | return False 64 | 65 | # Copy files from the cloned repository to the destination paths 66 | is_copy_success = True 67 | for source, dest in file_source_dest.items(): 68 | source_path = os.path.join(temp_dir, source) 69 | 70 | if not os.path.exists(source_path): 71 | is_copy_success = False 72 | logging.error("Source path not found: %s", source_path) 73 | continue 74 | 75 | if not copy_files_to_path(source_path, dest, overwrite): 76 | is_copy_success = False 77 | logging.error("Failed to copy %s to %s", source_path, dest) 78 | 79 | # Clean up temporary directory 80 | logging.info("Cleaning up temporary directory: %s", temp_dir) 81 | shutil.rmtree(temp_dir) 82 | 83 | return is_copy_success 84 | 85 | def copy_files_to_path(source_path, dest_path, overwrite=False): 86 | """ 87 | Copy a file or directory from source to destination 88 | 89 | Args: 90 | source_path: Path to the source file or directory 91 | dest_path: Path to the destination 92 | overwrite: Boolean flag to overwrite existing files in the destination 93 | 94 | Returns: 95 | bool: True if copy was successful, False otherwise 96 | """ 97 | # Skip if source file doesn't exist 98 | if not os.path.exists(source_path): 99 | logging.warning("Source path not found: %s", source_path) 100 | return False 101 | 102 | logging.info("Copying %s to %s", source_path, dest_path) 103 | # Handle existing files 104 | if os.path.exists(dest_path): 105 | if overwrite: 106 | logging.info(" Destination exists, overwriting: %s", dest_path) 107 | if os.path.isdir(dest_path): 108 | shutil.rmtree(dest_path) 109 | else: 110 | os.remove(dest_path) 111 | else: 112 | logging.info(" Destination exists, skip copying (use overwrite=True to replace): %s", 113 | dest_path) 114 | return True # Skip copying 115 | 116 | # Copy the source to destination 117 | try: 118 | if os.path.isdir(source_path): 119 | # entire directory 120 | shutil.copytree(source_path, dest_path) 121 | else: 122 | # individual files 123 | shutil.copy2(source_path, dest_path) 124 | return True 125 | except (shutil.Error, OSError) as e: 126 | logging.error("Copy failed: %s", e) 127 | return False 128 | 129 | def parse_arguments(): 130 | parser = argparse.ArgumentParser(description="Download Vale configuration files") 131 | parser.add_argument("--no-overwrite", action="store_true", help="Don't overwrite existing files") 132 | return parser.parse_args() 133 | 134 | def main(): 135 | # Define local directory paths 136 | vale_files_dict = {file: os.path.join(SPHINX_DIR, file) for file in VALE_FILE_LIST} 137 | 138 | # Parse command line arguments, default to overwrite_enabled = True 139 | overwrite_enabled = not parse_arguments().no_overwrite 140 | 141 | # Download into /tmp through git clone 142 | if not clone_repo_and_copy_paths(vale_files_dict, overwrite=overwrite_enabled): 143 | logging.error("Failed to download files from repository") 144 | return 1 145 | 146 | logging.info("Download complete") 147 | return 0 148 | 149 | 150 | if __name__ == "__main__": 151 | sys.exit(main()) # Keep return code 152 | -------------------------------------------------------------------------------- /docs/reference/bacula-fd-charm-architecture.md: -------------------------------------------------------------------------------- 1 | (explanation_bacula_fd_charm_architecture)= 2 | 3 | 4 | 5 | # Charm architecture: bacula-fd 6 | 7 | 8 | 9 | The bacula-fd charm is a subordinate charm that installs and 10 | manages the Bacula file daemon on target machines. Bacula-fd (Bacula 11 | File Daemon) is the backup agent that needs to be installed on machines 12 | requiring backups. The bacula-fd handles many important duties, such as 13 | uploading backup files and downloading restored backup files. 14 | 15 | ## High-level overview of backup charms deployment 16 | 17 | Here's a typical backup charm suite deployment in the machine charm 18 | environment. This deployment shows the bacula-server charm as the backup server, 19 | the bacula-fd charm as the backup agent, and the backup-integrator charm 20 | as the backup relation provider. 21 | 22 | The backup-integrator charm requests backups from the bacula-fd charm on 23 | behalf of the backup source charm `source`. The bacula-fd charm then 24 | integrates with the bacula-server charm to submit the backup files. The 25 | bacula-server charm is integrated with the PostgreSQL charm for storing 26 | backup metadata and with the s3-integrator charm to use S3 storage as 27 | the destination for backup files. 28 | 29 | ```{mermaid} 30 | C4Context 31 | title Container diagram for backup charms 32 | 33 | System_Boundary(backup server, "Backup Server Model") { 34 | Container(s3-integrator, "S3 Integrator", "", "Provide backup destination") 35 | Container(bacula-server, "Bacula Server", "", "Backup server") 36 | Container(postgresql, "PostgreSQL", "", "Store backup metadata") 37 | Rel(s3-integrator, bacula-server, "") 38 | Rel(postgresql, bacula-server, "") 39 | } 40 | 41 | System_Boundary(backup source, "Backup Source Model") { 42 | Container_Boundary(backup-source, "Backup Source") { 43 | Component(source, "", "Backup source principal charm") 44 | Component(backup-integrator, "", "Backup relation integrator") 45 | Component(bacula-fd, "", "Bacula file daemon") 46 | } 47 | Rel(source, backup-integrator, "") 48 | Rel(backup-integrator, bacula-fd, "") 49 | } 50 | Rel(bacula-fd, bacula-server, "") 51 | ``` 52 | 53 | ## Juju events 54 | 55 | 1. {ref}`config-changed `: 56 | Monitors changes to the backup integrator configuration to update the 57 | relation data with the latest configuration values. 58 | 2. `leader-elected`, `leader-settings-changed`: 59 | Monitors changes in the charm’s leadership. Since only the leader 60 | unit can modify application relation data, triggering a relation data 61 | update when leadership changes ensures the relation is updated 62 | regardless of leader status during relation establishment. 63 | 3. {ref}`upgrade-charm `: 64 | Triggered when the charm has been upgraded. This ensures that the new 65 | version of the backup integrator charm can update the relation data 66 | if needed. 67 | 4. {ref}`bacula-peer-relation-changed `, 68 | {ref}`bacula-peer-relation-created `, 69 | {ref}`bacula-peer-relation-departed `: 70 | Monitors changes, creation, and removal of the `bacula-peer` relation 71 | to update relation data when needed. 72 | 5. {ref}`bacula-dir-relation-changed `, 73 | {ref}`bacula-dir-relation-broken `: 74 | Monitors changes, creation, and removal of the `bacula-dir` relation 75 | to update relation data when needed. 76 | 6. {ref}`backup-relation-changed `, 77 | {ref}`backup-relation-broken `, 78 | {ref}`backup-relation-departed `: 79 | Monitors changes, creation, and removal of the `backup` relation to 80 | update relation data when needed. 81 | 7. {ref}`secret-changed `: 82 | Monitors changes in secrets inside relations to update configuration 83 | when needed. 84 | 85 | ```{note} 86 | See more in the Juju docs: {ref}`juju:hook` 87 | ``` 88 | 89 | ## Charm code overview 90 | 91 | The `src/__main__.py` file is the default entry point for the backup 92 | integrator charm; it creates an instance of the `BaculaFdCharm` 93 | class (imported from the `charm` module), which inherits from 94 | `ops.CharmBase`. `ops.CharmBase` is the base class from which all charms 95 | are derived, provided 96 | by [Ops](https://ops.readthedocs.io/en/latest/index.html) (the Python 97 | framework for developing charms). 98 | 99 | ```{note} 100 | See more in the Juju docs: {ref}`juju:charm` 101 | ``` 102 | 103 | The `__init__` method of `BaculaFdCharm` ensures that the charm observes 104 | and handles all events relevant to its operation. 105 | 106 | For example, when a configuration is changed via the CLI: 107 | 108 | 1. The user runs the configuration command: 109 | 110 | ```bash 111 | juju config bacula-fd port=8888 112 | ``` 113 | 114 | 2. A `config-changed` event is emitted. 115 | 3. In the `__init__` method, the handler for this event is defined as 116 | follows: 117 | 118 | ```python 119 | self.framework.observe(self.on.config_changed, self._reconcile_event) 120 | ``` 121 | 122 | 4. The `__reconcile_event` method, in turn, takes the necessary actions, 123 | such as waiting for the backup relation(s) and updating the backup 124 | relation data. 125 | -------------------------------------------------------------------------------- /tests/integration/baculum.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """A simple Baculum API client for testing.""" 5 | 6 | import typing 7 | 8 | import requests 9 | 10 | 11 | class BaculumApiError(Exception): 12 | """Baculum API error.""" 13 | 14 | def __init__(self, message: str, output: str, errno: int) -> None: 15 | """Initialize BaculumApiError. 16 | 17 | Args: 18 | message: error message. 19 | output: Baculum API output. 20 | errno: Baculum API error number. 21 | """ 22 | super().__init__(f"{message}: {output} (error: {errno})") 23 | self.output = output 24 | self.errno = errno 25 | 26 | 27 | class Baculum: 28 | """Baculum API client.""" 29 | 30 | def __init__(self, base_url: str, username: str, password: str, timeout: int = 60): 31 | """Initialize Baculum API client. 32 | 33 | Args: 34 | base_url: Baculum API base URL. 35 | username: Baculum API username. 36 | password: Baculum API password. 37 | timeout: Baculum API request timeout. 38 | """ 39 | self._base = base_url.rstrip("/") 40 | self._session = requests.Session() 41 | self._session.timeout = timeout 42 | self._session.auth = (username, password) 43 | self._session.headers.update({"Content-Type": "application/json"}) 44 | 45 | def _extract_output(self, operation: str, response: requests.Response) -> typing.Any: 46 | """Extract output from Baculum API response. 47 | 48 | Args: 49 | operation: Baculum API operation name. 50 | response: Baculum API response object. 51 | """ 52 | response.raise_for_status() 53 | result = response.json() 54 | output, error = result["output"], result["error"] 55 | if error: 56 | raise BaculumApiError(f"failed to {operation}", output, error) 57 | return output 58 | 59 | def run_backup_job(self, name: str) -> str: 60 | """Run a full backup job. 61 | 62 | Args: 63 | name: backup job name. 64 | 65 | Returns: 66 | Backup job output. 67 | """ 68 | job = self.get_job(job=name) 69 | payload = { 70 | "name": name, 71 | "level": "F", # Full backup 72 | "client": job["client"], 73 | "storage": job["storage"], 74 | "pool": job["pool"], 75 | "fileset": job["fileset"], 76 | } 77 | response = self._session.post(f"{self._base}/jobs/run", json=payload) 78 | return "\n".join(self._extract_output(f"run backup '{name}'", response)) 79 | 80 | def run_restore_job( 81 | self, 82 | name: str, 83 | backup_job_id: int, 84 | ) -> str: 85 | """Run a restore job. 86 | 87 | Args: 88 | name: restore job name. 89 | backup_job_id: backup job run ID to restore. 90 | 91 | Returns: 92 | Baculum API output. 93 | """ 94 | job = self.get_job(job=name) 95 | payload = { 96 | "id": backup_job_id, 97 | "restorejob": name, 98 | "client": job["client"], 99 | "fileset": job["fileset"], 100 | "where": "/", 101 | "replace": "always", 102 | "full": True, 103 | } 104 | response = self._session.post(f"{self._base}/jobs/restore", json=payload) 105 | return "\n".join( 106 | self._extract_output(f"restore '{name}' from backup {backup_job_id}", response) 107 | ) 108 | 109 | def list_job_runs(self, name: str) -> list[dict]: 110 | """List job runs. 111 | 112 | Args: 113 | name: job name. 114 | 115 | Returns: 116 | A list of job run objects. 117 | """ 118 | job = self.get_job(job=name) 119 | params = { 120 | "name": name, 121 | "client": job["client"], 122 | } 123 | response = self._session.get(f"{self._base}/jobs", params=params) 124 | return self._extract_output(f"list jobs '{name}'", response) 125 | 126 | def list_job_names(self) -> list[str]: 127 | """List job names. 128 | 129 | Returns: 130 | A list of job names. 131 | """ 132 | response = self._session.get(f"{self._base}/jobs/resnames") 133 | result = self._extract_output("list job names", response) 134 | return list(result.values())[0] 135 | 136 | def get_job(self, job: str) -> dict: 137 | """Get job details. 138 | 139 | Args: 140 | job: job name. 141 | 142 | Returns: 143 | Job details object. 144 | """ 145 | params = {"name": job, "output": "json"} 146 | r = self._session.get(f"{self._base}/jobs/show", params=params) 147 | return self._extract_output(f"show job '{job}' detail", r) 148 | 149 | def list_clients(self) -> list[dict]: 150 | """List clients. 151 | 152 | Returns: 153 | A list of client objects. 154 | """ 155 | response = self._session.get(f"{self._base}/clients") 156 | return self._extract_output("list clients", response) 157 | 158 | def get_client_status(self, client_id: int) -> str: 159 | """Get client status. 160 | 161 | Args: 162 | client_id: client ID. 163 | 164 | Returns: 165 | Client status. 166 | """ 167 | response = self._session.get(f"{self._base}/clients/{client_id}/status") 168 | return "\n".join(self._extract_output(f"get client (id: {client_id}) status", response)) 169 | -------------------------------------------------------------------------------- /bacula_fd_operator/lib/charms/operator_libs_linux/v0/systemd.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Canonical Ltd. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | """Abstractions for stopping, starting and managing system services via systemd. 17 | 18 | This library assumes that your charm is running on a platform that uses systemd. E.g., 19 | Centos 7 or later, Ubuntu Xenial (16.04) or later. 20 | 21 | For the most part, we transparently provide an interface to a commonly used selection of 22 | systemd commands, with a few shortcuts baked in. For example, service_pause and 23 | service_resume with run the mask/unmask and enable/disable invocations. 24 | 25 | Example usage: 26 | ```python 27 | from charms.operator_libs_linux.v0.systemd import service_running, service_reload 28 | 29 | # Start a service 30 | if not service_running("mysql"): 31 | success = service_start("mysql") 32 | 33 | # Attempt to reload a service, restarting if necessary 34 | success = service_reload("nginx", restart_on_failure=True) 35 | ``` 36 | 37 | """ 38 | 39 | import logging 40 | import subprocess 41 | 42 | __all__ = [ # Don't export `_systemctl`. (It's not the intended way of using this lib.) 43 | "service_pause", 44 | "service_reload", 45 | "service_restart", 46 | "service_resume", 47 | "service_running", 48 | "service_start", 49 | "service_stop", 50 | "daemon_reload", 51 | ] 52 | 53 | logger = logging.getLogger(__name__) 54 | 55 | # The unique Charmhub library identifier, never change it 56 | LIBID = "045b0d179f6b4514a8bb9b48aee9ebaf" 57 | 58 | # Increment this major API version when introducing breaking changes 59 | LIBAPI = 0 60 | 61 | # Increment this PATCH version before using `charmcraft publish-lib` or reset 62 | # to 0 if you are raising the major API version 63 | LIBPATCH = 3 64 | 65 | 66 | def _popen_kwargs(): 67 | return dict( 68 | stdout=subprocess.PIPE, 69 | stderr=subprocess.STDOUT, 70 | bufsize=1, 71 | universal_newlines=True, 72 | encoding="utf-8", 73 | ) 74 | 75 | 76 | def _systemctl( 77 | sub_cmd: str, service_name: str = None, now: bool = None, quiet: bool = None 78 | ) -> bool: 79 | """Control a system service. 80 | 81 | Args: 82 | sub_cmd: the systemctl subcommand to issue 83 | service_name: the name of the service to perform the action on 84 | now: passes the --now flag to the shell invocation. 85 | quiet: passes the --quiet flag to the shell invocation. 86 | """ 87 | cmd = ["systemctl", sub_cmd] 88 | 89 | if service_name is not None: 90 | cmd.append(service_name) 91 | if now is not None: 92 | cmd.append("--now") 93 | if quiet is not None: 94 | cmd.append("--quiet") 95 | if sub_cmd != "is-active": 96 | logger.debug("Attempting to {} '{}' with command {}.".format(cmd, service_name, cmd)) 97 | else: 98 | logger.debug("Checking if '{}' is active".format(service_name)) 99 | 100 | proc = subprocess.Popen(cmd, **_popen_kwargs()) 101 | for line in iter(proc.stdout.readline, ""): 102 | logger.debug(line) 103 | 104 | proc.wait() 105 | return proc.returncode == 0 106 | 107 | 108 | def service_running(service_name: str) -> bool: 109 | """Determine whether a system service is running. 110 | 111 | Args: 112 | service_name: the name of the service 113 | """ 114 | return _systemctl("is-active", service_name, quiet=True) 115 | 116 | 117 | def service_start(service_name: str) -> bool: 118 | """Start a system service. 119 | 120 | Args: 121 | service_name: the name of the service to stop 122 | """ 123 | return _systemctl("start", service_name) 124 | 125 | 126 | def service_stop(service_name: str) -> bool: 127 | """Stop a system service. 128 | 129 | Args: 130 | service_name: the name of the service to stop 131 | """ 132 | return _systemctl("stop", service_name) 133 | 134 | 135 | def service_restart(service_name: str) -> bool: 136 | """Restart a system service. 137 | 138 | Args: 139 | service_name: the name of the service to restart 140 | """ 141 | return _systemctl("restart", service_name) 142 | 143 | 144 | def service_reload(service_name: str, restart_on_failure: bool = False) -> bool: 145 | """Reload a system service, optionally falling back to restart if reload fails. 146 | 147 | Args: 148 | service_name: the name of the service to reload 149 | restart_on_failure: boolean indicating whether to fallback to a restart if the 150 | reload fails. 151 | """ 152 | service_result = _systemctl("reload", service_name) 153 | if not service_result and restart_on_failure: 154 | service_result = _systemctl("restart", service_name) 155 | return service_result 156 | 157 | 158 | def service_pause(service_name: str) -> bool: 159 | """Pause a system service. 160 | 161 | Stop it, and prevent it from starting again at boot. 162 | 163 | Args: 164 | service_name: the name of the service to pause 165 | """ 166 | _systemctl("disable", service_name, now=True) 167 | _systemctl("mask", service_name) 168 | return not service_running(service_name) 169 | 170 | 171 | def service_resume(service_name: str) -> bool: 172 | """Resume a system service. 173 | 174 | Re-enable starting again at boot. Start the service. 175 | 176 | Args: 177 | service_name: the name of the service to resume 178 | """ 179 | _systemctl("unmask", service_name) 180 | _systemctl("enable", service_name, now=True) 181 | return service_running(service_name) 182 | 183 | 184 | def daemon_reload() -> bool: 185 | """Reload systemd manager configuration.""" 186 | return _systemctl("daemon-reload") 187 | -------------------------------------------------------------------------------- /tests/unit/bacula_fd/test_charm.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """bacula-fd unit tests.""" 5 | 6 | import textwrap 7 | 8 | import ops.testing 9 | import pytest 10 | 11 | import bacula_fd_operator.src.bacula 12 | 13 | 14 | def test_no_backup_relation(bacula_fd_charm) -> None: 15 | """ 16 | arrange: none 17 | act: run config-changed event hook without backup relation 18 | assert: the charm should be in waiting state 19 | """ 20 | ctx = ops.testing.Context(bacula_fd_charm) 21 | state_in = ops.testing.State( 22 | leader=True, 23 | relations=[ops.testing.PeerRelation(endpoint="bacula-peer")], 24 | ) 25 | state_out = ctx.run(ctx.on.config_changed(), state_in) 26 | assert state_out.unit_status.name == "waiting" 27 | assert state_out.unit_status.message == "waiting for backup relation" 28 | 29 | 30 | def test_no_bacula_dir_relation(bacula_fd_charm) -> None: 31 | """ 32 | arrange: integrate the bacula-fd charm with a backup relation 33 | act: run config-changed event hook without bacula-dir relation 34 | assert: the charm should be in waiting state 35 | """ 36 | ctx = ops.testing.Context(bacula_fd_charm) 37 | state_in = ops.testing.State( 38 | leader=True, 39 | relations=[ 40 | ops.testing.PeerRelation(endpoint="bacula-peer"), 41 | ops.testing.Relation(endpoint="backup", remote_app_data={"fileset": "/var/backups"}), 42 | ], 43 | ) 44 | 45 | state_out = ctx.run(ctx.on.config_changed(), state_in) 46 | assert state_out.unit_status.name == "waiting" 47 | assert state_out.unit_status.message == "waiting for bacula-dir relation" 48 | 49 | 50 | def test_port_config(bacula_fd_charm) -> None: 51 | """ 52 | arrange: integrate the bacula-fd charm with a backup relation and a bacula-dir relation. 53 | act: set the port configuration. 54 | assert: port in bacula-fd.conf and relation should be updated. 55 | """ 56 | ctx = ops.testing.Context(bacula_fd_charm) 57 | secret = ops.testing.Secret(tracked_content={"password": "foobar"}) 58 | state_in = ops.testing.State( 59 | leader=True, 60 | secrets=[secret], 61 | config={"port": 12345}, 62 | relations=[ 63 | ops.testing.PeerRelation(endpoint="bacula-peer"), 64 | ops.testing.Relation(endpoint="backup", remote_app_data={"fileset": "/var/backups"}), 65 | ops.testing.Relation( 66 | endpoint="bacula-dir", 67 | remote_app_data={"name": "bacula-dir", "password": secret.id}, 68 | id=99, 69 | ), 70 | ], 71 | ) 72 | 73 | state_out = ctx.run(ctx.on.config_changed(), state_in) 74 | assert "FDport = 12345" in bacula_fd_operator.src.bacula.read_config() 75 | assert state_out.get_relation(99).local_unit_data["port"] == "12345" 76 | 77 | 78 | @pytest.mark.parametrize( 79 | "schedule", ["", "Level=Full sun at 01:00,Level=Incremental mon-sat at 01:00"] 80 | ) 81 | def test_schedule_config(bacula_fd_charm, schedule) -> None: 82 | """ 83 | arrange: integrate the bacula-fd charm with a backup relation and a bacula-dir relation. 84 | act: set the schedule configuration. 85 | assert: schedule in relation should be updated. 86 | """ 87 | ctx = ops.testing.Context(bacula_fd_charm) 88 | secret = ops.testing.Secret(tracked_content={"password": "foobar"}) 89 | backup_relation = ops.testing.Relation( 90 | endpoint="backup", remote_app_data={"fileset": "/var/backups"} 91 | ) 92 | state_in = ops.testing.State( 93 | leader=True, 94 | secrets=[secret], 95 | config={"schedule": schedule}, 96 | relations=[ 97 | ops.testing.PeerRelation(endpoint="bacula-peer"), 98 | backup_relation, 99 | ops.testing.Relation( 100 | endpoint="bacula-dir", 101 | local_unit_data={"schedule": "Level=Full sun at 01:00"}, 102 | remote_app_data={"name": "bacula-dir", "password": secret.id}, 103 | id=99, 104 | ), 105 | ], 106 | ) 107 | 108 | state_out = ctx.run(ctx.on.relation_changed(backup_relation), state_in) 109 | if schedule: 110 | assert state_out.get_relation(99).local_unit_data["schedule"] == schedule 111 | else: 112 | assert "schedule" not in state_out.get_relation(99).local_unit_data 113 | 114 | 115 | def test_bacula_fd_config(bacula_fd_charm) -> None: 116 | """ 117 | arrange: integrate the bacula-fd charm with a backup relation and a bacula-dir relation. 118 | act: run config-changed event hook. 119 | assert: the bacula-fd charm should write the correct bacula-fd configuration file. 120 | """ 121 | ctx = ops.testing.Context(bacula_fd_charm) 122 | secret = ops.testing.Secret(tracked_content={"password": "foobar"}) 123 | state_in = ops.testing.State( 124 | model=ops.testing.Model(name="test-bacula", uuid="00000000-0000-0000-0000-000000000000"), 125 | leader=True, 126 | secrets=[secret], 127 | relations=[ 128 | ops.testing.PeerRelation(endpoint="bacula-peer"), 129 | ops.testing.Relation(endpoint="backup", remote_app_data={"fileset": "/var/backups"}), 130 | ops.testing.Relation( 131 | endpoint="bacula-dir", 132 | remote_app_data={"name": "bacula-dir", "password": secret.id}, 133 | ), 134 | ], 135 | ) 136 | 137 | state_out = ctx.run(ctx.on.config_changed(), state_in) 138 | assert state_out.unit_status.name == "active" 139 | assert ( 140 | bacula_fd_operator.src.bacula.read_config().strip() 141 | == textwrap.dedent( 142 | """\ 143 | Director { 144 | Name = bacula-dir 145 | Password = "foobar" 146 | } 147 | 148 | FileDaemon { 149 | Name = relation-test-bacula-bacula-fd-0-000000000000-fd 150 | FDport = 9102 151 | WorkingDirectory = /var/lib/bacula 152 | Pid Directory = /run/bacula 153 | Maximum Concurrent Jobs = 20 154 | Plugin Directory = /usr/lib/bacula 155 | FDAddress = 192.0.2.0 156 | } 157 | 158 | Messages { 159 | Name = Standard 160 | director = bacula-dir = all, !skipped, !restored, !verified, !saved 161 | } 162 | """ 163 | ).strip() 164 | ) 165 | -------------------------------------------------------------------------------- /bacula_server_operator/src/bacula_relation.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | """Library to handle the provider part of the bacula-dir relation.""" 5 | 6 | import logging 7 | import secrets 8 | import typing 9 | from pathlib import Path 10 | 11 | import ops 12 | from pydantic import BaseModel, ConfigDict, Field, field_validator 13 | 14 | BACULA_DIR_RELATION_NAME = "bacula-dir" 15 | BACULA_DIR_NAME = "charm-bacula-dir" 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | class BaculaFdInfo(BaseModel): 21 | """Bacula file daemon information model. 22 | 23 | Attributes: 24 | model_config: Pydantic model configuration. 25 | name: Bacula file daemon name. 26 | password: Bacula file daemon password. 27 | fileset: backup fileset. 28 | host: Bacula file daemon host. 29 | port: Bacula file daemon port. 30 | schedule: backup schedule. 31 | client_run_before_backup: run script on Bacula file daemon before backup. 32 | client_run_after_backup: run script on Bacula file daemon after backup 33 | client_run_before_restore: run script on Bacula file daemon before restore. 34 | client_run_after_restore: run script on Bacula file daemon after restore. 35 | """ 36 | 37 | model_config = ConfigDict( 38 | alias_generator=lambda name: name.replace("_", "-"), serialize_by_alias=True 39 | ) 40 | name: str 41 | password: str 42 | fileset: list[Path] 43 | host: str = Field(..., alias="ingress-address") 44 | port: int = 9102 45 | schedule: list[str] = Field(default_factory=list) 46 | client_run_before_backup: str 47 | client_run_after_backup: str 48 | client_run_before_restore: str 49 | client_run_after_restore: str 50 | 51 | @field_validator("name", mode="before") 52 | @classmethod 53 | def _coerce_name(cls, value: str) -> str: 54 | """Normalize Bacula file daemon name. 55 | 56 | Args: 57 | value: Bacula file daemon name. 58 | 59 | Returns: 60 | Normalized Bacula file daemon name. 61 | """ 62 | return value.removesuffix("-fd") 63 | 64 | @field_validator("schedule", mode="before") 65 | @classmethod 66 | def _coerce_schedule(cls, value: str | None) -> list[str]: 67 | """Normalize backup schedule. 68 | 69 | Args: 70 | value: backup schedule input. 71 | 72 | Returns: normalized schedule. 73 | """ 74 | if not value: 75 | return [] 76 | return [p.strip() for p in value.split(",") if p.strip()] 77 | 78 | @field_validator("fileset", mode="before") 79 | @classmethod 80 | def _coerce_fileset(cls, value: str) -> list[Path]: 81 | """Normalize backup fileset. 82 | 83 | Args: 84 | value: backup fileset input. 85 | 86 | Returns: normalized backup fileset. 87 | """ 88 | if isinstance(value, str): 89 | parts = [p.strip() for p in value.split(",") if p.strip()] 90 | return [Path(p) for p in parts] 91 | return [Path(p) for p in value] 92 | 93 | @field_validator("fileset", mode="after") 94 | @classmethod 95 | def _validate_fileset(cls, value: list[Path]) -> list[Path]: 96 | """Validate backup fileset input. 97 | 98 | Args: 99 | value: backup fileset input. 100 | 101 | Returns: validated backup fileset. 102 | """ 103 | if not value: 104 | raise ValueError("fileset cannot be empty") 105 | for path in value: 106 | str_path = str(path) 107 | if str_path != str_path.strip(): 108 | raise ValueError("path cannot start or end with whitespaces") 109 | if "," in str_path: 110 | raise ValueError("path cannot contain commas") 111 | if not path.is_absolute(): 112 | raise ValueError("all path in fileset must be absolute.") 113 | return value 114 | 115 | 116 | class BaculaProvider: 117 | """bacula-dir relation provider.""" 118 | 119 | def __init__( 120 | self, charm: ops.CharmBase, relation_name: str = BACULA_DIR_RELATION_NAME 121 | ) -> None: 122 | """Initialize bacula-dir relation provider. 123 | 124 | Args: 125 | charm: charm instance. 126 | relation_name: bacula-dir relation name. 127 | """ 128 | self._charm = charm 129 | self._relation_name = relation_name 130 | 131 | def send_to_bacula_fd(self) -> None: 132 | """Send Bacula directory information to the Bacula file daemon.""" 133 | relations = self._charm.model.relations[self._relation_name] 134 | for relation in relations: 135 | data = relation.data[self._charm.app] 136 | password_secret_id = data.get("password") 137 | if password_secret_id is None: 138 | password_secret = self._charm.app.add_secret( 139 | content={"password": secrets.token_urlsafe(32)}, 140 | label=f"relation-{relation.id}", 141 | ) 142 | password_secret.grant(relation) 143 | password_secret_id = password_secret.id 144 | data["name"] = BACULA_DIR_NAME 145 | data["password"] = typing.cast(str, password_secret_id) 146 | 147 | def receive_from_bacula_fd(self) -> list[BaculaFdInfo]: 148 | """Receive Bacula file daemon information from relations. 149 | 150 | Returns: 151 | list of BaculaFdInfo retrieved from relations. 152 | """ 153 | relations = self._charm.model.relations[self._relation_name] 154 | info = [] 155 | for relation in relations: 156 | if relation.app is None: 157 | continue 158 | password = self._charm.model.get_secret( 159 | id=relation.data[self._charm.app]["password"] 160 | ).get_content(refresh=True)["password"] 161 | for unit in relation.units: 162 | data = dict(relation.data[unit]) 163 | if "name" not in data: 164 | continue 165 | try: 166 | info.append( 167 | BaculaFdInfo.model_validate( 168 | { 169 | **relation.data[unit], 170 | "password": password, 171 | } 172 | ) 173 | ) 174 | except ValueError as exc: 175 | logger.error( 176 | "skipping invalid bacula-dir relation (id: %s) from %s: %s", 177 | relation.id, 178 | relation.app.name, 179 | exc, 180 | ) 181 | continue 182 | return info 183 | -------------------------------------------------------------------------------- /docs/reference/backup-server-charm-architecture.md: -------------------------------------------------------------------------------- 1 | (explanation_backup_server_charm_architecture)= 2 | 3 | # Charm architecture: Bacula server charm 4 | 5 | The Bacula Server charm is a machine charm that installs and manages all Bacula server components, including the Bacula Director, the Bacula Storage Daemon, and the Baculum web UI. Together, they provide backup orchestration, backup storage for Bacula backup agents, and a user interface for operators. 6 | 7 | ## High-level overview of backup charms deployment 8 | 9 | Here's a typical backup charm suite deployment in the machine charm 10 | environment. The deployment shows the bacula-server charm as the backup server, 11 | the bacula-fd charm as the backup agent, and the backup-integrator charm 12 | as the backup relation provider. 13 | 14 | The backup-integrator charm requests backups from the bacula-fd charm on 15 | behalf of the backup source charm `source`. The bacula-fd charm then 16 | integrates with the bacula-server charm to submit the backup files. The 17 | bacula-server charm is integrated with the PostgreSQL charm for storing 18 | backup metadata and with the s3-integrator charm to use S3 storage as 19 | the destination for backup files. 20 | 21 | ```{mermaid} 22 | C4Context 23 | title Container diagram for backup charms 24 | 25 | System_Boundary(backup server, "Backup Server Model") { 26 | Container(s3-integrator, "S3 Integrator", "", "Provide backup destination") 27 | Container(bacula-server, "Bacula Server", "", "Backup server") 28 | Container(postgresql, "PostgreSQL", "", "Store backup metadata") 29 | Rel(s3-integrator, bacula-server, "") 30 | Rel(postgresql, bacula-server, "") 31 | } 32 | 33 | System_Boundary(backup source, "Backup Source Model") { 34 | Container_Boundary(backup-source, "Backup Source") { 35 | Component(source, "", "Backup source principal charm") 36 | Component(backup-integrator, "", "Backup relation integrator") 37 | Component(bacula-fd, "", "Bacula file daemon") 38 | } 39 | Rel(source, backup-integrator, "") 40 | Rel(backup-integrator, bacula-fd, "") 41 | } 42 | Rel(bacula-fd, bacula-server, "") 43 | ``` 44 | 45 | ## Charm architecture 46 | 47 | The following diagram shows the architecture of the Bacula server charm: 48 | 49 | ```{mermaid} 50 | C4Container 51 | title Component diagram for bacula-server charm 52 | 53 | Container_Boundary(backup server, "Backup Server") { 54 | Container(bacula-server-charm, "Bacula server charm", "", "Install and manages charmed-bacula-server") 55 | Container_Boundary(charmed-bacula-server, "charmed-bacula-server snap") { 56 | Container(bacula-dir, "Bacula Director", "", "Backup orchestration") 57 | Container(bacula-sd, "Bacula Storage Daemon", "", "Backup storage") 58 | Container(bacula-fd, "Bacula File Daemon", "", "Placeholder bacula-fd on bacula-server") 59 | Container(baculum, "Baculum Web Interface", "", "Apache web server") 60 | } 61 | Rel(bacula-server-charm, bacula-dir, "config") 62 | Rel(bacula-server-charm, bacula-sd, "config") 63 | Rel(bacula-server-charm, bacula-fd, "config") 64 | Rel(bacula-server-charm, baculum, "config") 65 | UpdateRelStyle(bacula-server-charm, bacula-dir, $offsetY="-15", $offsetX="10") 66 | UpdateRelStyle(bacula-server-charm, bacula-sd, $offsetY="-15", $offsetX="-30") 67 | UpdateRelStyle(bacula-server-charm, bacula-fd, $offsetY="-15", $offsetX="-50") 68 | UpdateRelStyle(bacula-server-charm, baculum, $offsetY="-20", $offsetX="-110") 69 | } 70 | ``` 71 | 72 | ## Juju events 73 | 74 | 1. {ref}`config-changed `: 75 | Monitors changes to the backup integrator configuration to update the 76 | relation data with the latest configuration values. 77 | 2. `leader-elected`, `leader-settings-changed`: 78 | Monitors changes in the charm’s leadership. Since only the leader 79 | unit can modify application relation data, triggering a relation data 80 | update when leadership changes ensures the relation is updated 81 | regardless of leader status during relation establishment. 82 | 3. {ref}`upgrade-charm `: 83 | Triggered when the charm has been upgraded. This ensures that the new 84 | version of the backup integrator charm can update the relation data 85 | if needed. 86 | 4. {ref}`bacula-peer-relation-changed `, 87 | {ref}`bacula-peer-relation-created `, 88 | {ref}`bacula-peer-relation-departed `: 89 | Monitors changes, creation, and removal of the `bacula-peer` relation 90 | to update relation data when needed. 91 | 5. {ref}`bacula-dir-relation-changed `, 92 | {ref}`bacula-dir-relation-broken `, 93 | {ref}`bacula-dir-relation-joined `, 94 | {ref}`bacula-dir-relation-departed `: 95 | Monitors changes, creation, and removal of the `bacula-dir` relation 96 | to update relation data when needed. And cleanup resources when 97 | relation is removed. 98 | 6. {ref}`backup-relation-changed `, 99 | {ref}`backup-relation-broken `, 100 | {ref}`backup-relation-departed `: 101 | Monitors changes, creation, and removal of the `backup` relation to 102 | update relation data when needed. 103 | 7. {ref}`secret-changed `, 104 | {ref}`secret-removed `: 105 | Monitors changes or removal of secrets inside relations to update 106 | configuration when needed. 107 | 8. {ref}`s3-relation-changed `, 108 | {ref}`s3-relation-broken `: 109 | Monitors changes, creation, and removal of the `s3` relation to 110 | update relation data when needed. 111 | 9. {ref}`postgres-relation-changed `, 112 | {ref}`postgres-relation-broken `: 113 | Monitors changes, creation, and removal of the `postgres` relation to 114 | update relation data when needed. 115 | 116 | ```{note} 117 | See more in the Juju docs: {ref}`juju:hook` 118 | ``` 119 | 120 | ## Charm code overview 121 | 122 | The `src/__main__.py` file is the default entry point for the backup 123 | integrator charm; it creates an instance of the `BaculaServerCharm` 124 | class (imported from the `charm` module), which inherits from 125 | `ops.CharmBase`. `ops.CharmBase` is the base class from which all charms 126 | are derived, provided by [Ops](https://ops.readthedocs.io/en/latest/index.html) 127 | (the Python framework for developing charms). 128 | 129 | ```{note} 130 | See more in the Juju docs: {ref}`juju:charm` 131 | ``` 132 | 133 | The `__init__` method of `BaculaServerCharm` ensures that the charm observes 134 | and handles all events relevant to its operation. 135 | 136 | For example, when a configuration is changed via the CLI: 137 | 138 | 1. The user runs the configuration command: 139 | 140 | ```bash 141 | juju config backup-server volume-retention="2 years" 142 | ``` 143 | 144 | 2. A `config-changed` event is emitted. 145 | 3. In the `__init__` method, the handler for this event is defined as 146 | follows: 147 | 148 | ```python 149 | self.framework.observe(self.on.config_changed, self._reconcile_event) 150 | ``` 151 | 152 | 4. The `__reconcile_event` method, in turn, takes the necessary actions, 153 | such as waiting for the backup relation(s) and updating the backup 154 | relation data. 155 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Contributing 4 | 5 | This document explains the processes and practices recommended for contributing enhancements to the charm. 6 | 7 | ## Overview 8 | 9 | - Generally, before developing enhancements to this charm, you should consider [opening an issue 10 | ](link to issues page) explaining your use case. 11 | - If you would like to chat with us about your use-cases or proposed implementation, you can reach 12 | us at [Canonical Matrix public channel](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) 13 | or [Discourse](https://discourse.charmhub.io/). 14 | - Familiarizing yourself with the [Juju documentation](https://canonical-juju.readthedocs-hosted.com/en/latest/user/howto/manage-charms/) 15 | will help you a lot when working on new features or bug fixes. 16 | - All enhancements require review before being merged. Code review typically examines 17 | - code quality 18 | - test coverage 19 | - user experience for Juju operators of this charm. 20 | - Once your pull request is approved, we squash and merge your pull request branch onto 21 | the `main` branch. This creates a linear Git commit history. 22 | - For further information on contributing, please refer to our 23 | [Contributing Guide](https://github.com/canonical/is-charms-contributing-guide). 24 | 25 | ## Code of conduct 26 | 27 | When contributing, you must abide by the 28 | [Ubuntu Code of Conduct](https://ubuntu.com/community/ethos/code-of-conduct). 29 | 30 | ## Releases and versions 31 | 32 | This project uses [semantic versioning](https://semver.org/). 33 | 34 | Please ensure that any new feature, fix, or significant change is documented by 35 | adding an entry to the [CHANGELOG.md](./docs/changelog.md) file. 36 | 37 | To learn more about changelog best practices, visit [Keep a Changelog](https://keepachangelog.com/). 38 | 39 | ## Submissions 40 | 41 | If you want to address an issue or a bug in this project, 42 | notify in advance the people involved to avoid confusion; 43 | also, reference the issue or bug number when you submit the changes. 44 | 45 | - [Fork](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/about-forks) 46 | our [GitHub repository](https://github.com/canonical/backup-operators) 47 | and add the changes to your fork, properly structuring your commits, 48 | providing detailed commit messages and signing your commits. 49 | - Make sure the updated project builds and runs without warnings or errors; 50 | this includes linting, documentation, code and tests. 51 | - Submit the changes as a 52 | [pull request (PR)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork). 53 | 54 | Your changes will be reviewed in due time; if approved, they will be eventually merged. 55 | 56 | ### Describing pull requests 57 | 58 | To be properly considered, reviewed and merged, 59 | your pull request must provide the following details: 60 | 61 | - **Title**: Summarize the change in a short, descriptive title. 62 | 63 | - **Overview**: Describe the problem that your pull request solves. 64 | Mention any new features, bug fixes or refactoring. 65 | 66 | - **Rationale**: Explain why the change is needed. 67 | 68 | - **Juju Events Changes**: Describe any changes made to Juju events, or 69 | "None" if the pull request does not change any Juju events. 70 | 71 | - **Module Changes**: Describe any changes made to the module, or "None" 72 | if your pull request does not change the module. 73 | 74 | - **Library Changes**: Describe any changes made to the library, 75 | or "None" is the library is not affected. 76 | 77 | - **Checklist**: Complete the following items: 78 | 79 | - The [charm style guide](https://documentation.ubuntu.com/juju/3.6/reference/charm/charm-development-best-practices/) was applied 80 | - The [contributing guide](https://github.com/canonical/is-charms-contributing-guide) was applied 81 | - The changes are compliant with [ISD054 - Managing Charm Complexity](https://discourse.charmhub.io/t/specification-isd014-managing-charm-complexity/11619) 82 | - The documentation is updated 83 | - The PR is tagged with appropriate label (trivial, senior-review-required) 84 | - The changelog has been updated 85 | 86 | ### Signing commits 87 | 88 | To improve contribution tracking, 89 | we use the [Canonical contributor license agreement](https://assets.ubuntu.com/v1/ff2478d1-Canonical-HA-CLA-ANY-I_v1.2.pdf) 90 | (CLA) as a legal sign-off, and we require all commits to have verified signatures. 91 | 92 | #### Canonical contributor agreement 93 | 94 | Canonical welcomes contributions to the charm. Please check out our 95 | [contributor agreement](https://ubuntu.com/legal/contributors) if you're interested in contributing to the solution. 96 | 97 | The CLA sign-off is simple line at the 98 | end of the commit message certifying that you wrote it 99 | or have the right to commit it as an open-source contribution. 100 | 101 | #### Verified signatures on commits 102 | 103 | All commits in a pull request must have cryptographic (verified) signatures. 104 | To add signatures on your commits, follow the 105 | [GitHub documentation](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits). 106 | 107 | ## Develop 108 | 109 | To make contributions to this charm, you'll need a working 110 | [development setup](https://documentation.ubuntu.com/juju/latest/user/howto/manage-your-deployment/manage-your-deployment-environment/). 111 | 112 | The code for this charm can be downloaded as follows: 113 | 114 | ``` 115 | git clone https://github.com/canonical/ 116 | ``` 117 | 118 | You can create an environment for development with `python3-venv`. 119 | We will also install `tox` inside the virtual environment for testing: 120 | 121 | ```bash 122 | sudo apt install python3-venv 123 | python3 -m venv venv 124 | source venv/bin/activate 125 | pip install tox 126 | ``` 127 | 128 | ### Test 129 | 130 | This project uses `tox` for managing test environments. There are some pre-configured environments 131 | that can be used for linting and formatting code when you're preparing contributions to the charm: 132 | 133 | * ``tox``: Executes all of the basic checks and tests (``lint``, ``unit``, ``static``, and ``coverage-report``). 134 | * ``tox -e fmt``: Runs formatting using ``black`` and ``isort``. 135 | * ``tox -e lint``: Runs a range of static code analysis to check the code. 136 | * ``tox -e static``: Runs other checks such as ``bandit`` for security issues. 137 | 138 | ### Build the rock and charm 139 | 140 | Use [Rockcraft](https://documentation.ubuntu.com/rockcraft/en/latest/) to create an 141 | OCI image for the app, and then upload the image to a MicroK8s registry, 142 | which stores OCI archives so they can be downloaded and deployed. 143 | 144 | Enable the MicroK8s registry: 145 | 146 | ```bash 147 | microk8s enable registry 148 | ``` 149 | 150 | The following commands pack the OCI image and push it into 151 | the MicroK8s registry: 152 | 153 | ```bash 154 | cd 155 | rockcraft pack 156 | skopeo --insecure-policy copy --dest-tls-verify=false oci-archive:.rock docker://localhost:32000/:latest 157 | ``` 158 | 159 | Build the charm in this git repository using: 160 | 161 | ```shell 162 | charmcraft pack 163 | ``` 164 | 165 | ### Deploy 166 | 167 | ```bash 168 | # Create a model 169 | juju add-model charm-dev 170 | # Enable DEBUG logging 171 | juju model-config logging-config="=INFO;unit=DEBUG" 172 | # Deploy the charm 173 | juju deploy ./.charm 174 | ``` 175 | 176 | 177 | -------------------------------------------------------------------------------- /bacula_fd_operator/src/charm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2025 Canonical Ltd. 4 | # See LICENSE file for licensing details. 5 | 6 | """Bacula file daemon charm the service.""" 7 | 8 | import logging 9 | import typing 10 | from pathlib import Path 11 | 12 | import ops 13 | import pydantic 14 | from charms.backup_integrator.v0 import backup 15 | 16 | from . import bacula, bacula_relation 17 | 18 | BACKUP_RELATION_NAME = "backup" 19 | BACULA_DIR_RELATION_NAME = "bacula-dir" 20 | PEER_RELATION_NAME = "bacula-peer" 21 | NOOP_SCRIPT = str((Path(__file__).parent.parent / "scripts/noop").absolute()) 22 | 23 | 24 | class NotReadyError(Exception): 25 | """Charm is not ready.""" 26 | 27 | 28 | class UnrecoverableCharmError(Exception): 29 | """Unrecoverable Charm failure.""" 30 | 31 | 32 | class PeerRelationNotReadyError(NotReadyError): 33 | """Peer relation is not ready.""" 34 | 35 | 36 | class PeerRelationDataNotReadyError(NotReadyError): 37 | """Peer relation data is not ready.""" 38 | 39 | 40 | class BackupRelationNotReadyError(NotReadyError): 41 | """Backup relation is not ready.""" 42 | 43 | 44 | class BackupRelationDataNotReadyError(NotReadyError): 45 | """Backup relation data is not ready.""" 46 | 47 | 48 | class BaculaDirRelationNotReadyError(NotReadyError): 49 | """bacula-dir relation not ready.""" 50 | 51 | 52 | class BaculaDirRelationDataNotReadyError(NotReadyError): 53 | """bacula-dir relation data not ready.""" 54 | 55 | 56 | class InvalidBackupRelationDataError(UnrecoverableCharmError): 57 | """Invalid backup relation data.""" 58 | 59 | 60 | logger = logging.getLogger(__name__) 61 | 62 | 63 | class BaculaFdCharm(ops.CharmBase): 64 | """Bacula file daemon charm the service.""" 65 | 66 | def __init__(self, *args: typing.Any): 67 | """Construct. 68 | 69 | Args: 70 | args: Arguments passed to the CharmBase parent constructor. 71 | """ 72 | super().__init__(*args) 73 | self._backup_provider = backup.BackupProvider(charm=self) 74 | self._bacula_dir = bacula_relation.BaculaRequirer(charm=self) 75 | 76 | self.framework.observe(self.on.config_changed, self._reconcile_event) 77 | self.framework.observe(self.on.upgrade_charm, self._reconcile_event) 78 | self.framework.observe(self.on.secret_changed, self._reconcile_event) 79 | self.framework.observe(self.on.leader_elected, self._reconcile_event) 80 | self.framework.observe(self.on.leader_settings_changed, self._reconcile_event) 81 | 82 | self.framework.observe(self.on.bacula_peer_relation_created, self._reconcile_event) 83 | self.framework.observe(self.on.bacula_peer_relation_changed, self._reconcile_event) 84 | self.framework.observe(self.on.bacula_peer_relation_departed, self._reconcile_event) 85 | 86 | self.framework.observe(self._backup_provider.on.backup_required, self._reconcile_event) 87 | self.framework.observe(self.on.backup_relation_departed, self._reconcile_event) 88 | self.framework.observe(self.on.backup_relation_broken, self._reconcile_event) 89 | 90 | self.framework.observe(self.on.bacula_dir_relation_changed, self._reconcile_event) 91 | self.framework.observe(self.on.bacula_dir_relation_broken, self._reconcile_event) 92 | 93 | def _get_peer_data(self) -> dict[str, str] | None: 94 | """Get data stored in the peer relation and initialize it if not exist. 95 | 96 | Returns: 97 | peer data stored in the peer relation, None if peer relation doesn't exist yet. 98 | """ 99 | peer_relation = self.model.get_relation(PEER_RELATION_NAME) 100 | if not peer_relation: 101 | return None 102 | data = peer_relation.data[self.app] 103 | default_peer_data = { 104 | "name": "-".join( 105 | [ 106 | "relation", 107 | self.model.name, 108 | self.unit.name.replace("/", "-"), 109 | self.model.uuid.split("-")[-1], 110 | "fd", 111 | ] 112 | ), 113 | } 114 | if set(data.keys()) != set(default_peer_data.keys()): 115 | if self.unit.is_leader(): 116 | for key, value in default_peer_data.items(): 117 | if key not in data: 118 | data[key] = value 119 | return dict(data) 120 | return {} 121 | return dict(data) 122 | 123 | def _load_schedule(self) -> list[str]: 124 | """Load the backup schedule configuration. 125 | 126 | Returns: 127 | Backup schedule configuration. 128 | """ 129 | schedule_config = typing.cast(str, self.config.get("schedule", "")).split(",") 130 | return [schedule.strip() for schedule in schedule_config if schedule.strip()] 131 | 132 | def _get_unit_address(self) -> str: 133 | """Get the address of the unit. 134 | 135 | Returns: 136 | The IP address of the unit. 137 | """ 138 | peer_relation = self.model.get_relation(PEER_RELATION_NAME) 139 | if not peer_relation: 140 | raise PeerRelationNotReadyError("waiting for peer relation") 141 | return peer_relation.data[self.unit]["ingress-address"] 142 | 143 | def _reconcile(self) -> None: 144 | """Reconcile the charm.""" 145 | if not bacula.is_installed(): 146 | self.unit.status = ops.WaitingStatus("installing bacula-fd") 147 | bacula.install() 148 | self.unit.status = ops.ActiveStatus() 149 | peer_data = self._get_peer_data() 150 | if not peer_data: 151 | raise PeerRelationDataNotReadyError("waiting for peer data to be initialized") 152 | name = peer_data["name"] 153 | backup_relation = self.model.get_relation(BACKUP_RELATION_NAME) 154 | if not backup_relation: 155 | raise BackupRelationNotReadyError("waiting for backup relation") 156 | try: 157 | backup_spec = self._backup_provider.get_backup_spec(backup_relation) 158 | except pydantic.ValidationError as exc: 159 | logger.exception("invalid backup relation data") 160 | errors = exc.errors() 161 | error_fields = [str(e["loc"][0]) for e in errors if e.get("loc")] 162 | raise InvalidBackupRelationDataError( 163 | f"invalid backup relation data: {', '.join(error_fields)}" 164 | ) from exc 165 | if not backup_spec: 166 | raise BackupRelationDataNotReadyError("waiting for backup relation data") 167 | bacula_dir = self.model.get_relation(BACULA_DIR_RELATION_NAME) 168 | if not bacula_dir: 169 | raise BaculaDirRelationNotReadyError("waiting for bacula-dir relation") 170 | port = typing.cast(int, self.config.get("port", 9102)) 171 | self.unit.set_ports(port) 172 | self._bacula_dir.send_to_bacula_dir( 173 | name=name, 174 | port=port, 175 | fileset=",".join(map(str, backup_spec.fileset)), 176 | schedule=",".join(self._load_schedule()), 177 | client_run_before_backup=backup_spec.run_before_backup or NOOP_SCRIPT, 178 | client_run_after_backup=backup_spec.run_after_backup or NOOP_SCRIPT, 179 | client_run_before_restore=backup_spec.run_before_restore or NOOP_SCRIPT, 180 | client_run_after_restore=backup_spec.run_after_restore or NOOP_SCRIPT, 181 | ) 182 | dir_data = self._bacula_dir.receive_from_bacula_dir() 183 | if not dir_data: 184 | raise BaculaDirRelationDataNotReadyError("waiting for bacula-dir relation data") 185 | bacula.config_reload( 186 | name=name, 187 | host=self._get_unit_address(), 188 | port=port, 189 | director_name=dir_data.name, 190 | director_password=dir_data.password, 191 | ) 192 | 193 | def _reconcile_event(self, _: ops.EventBase) -> None: 194 | """Reconcile the charm.""" 195 | try: 196 | self._reconcile() 197 | self.unit.status = ops.ActiveStatus() 198 | except NotReadyError as exc: 199 | self.unit.status = ops.WaitingStatus(str(exc)) 200 | except UnrecoverableCharmError as exc: 201 | self.unit.status = ops.BlockedStatus(str(exc)) 202 | 203 | 204 | if __name__ == "__main__": # pragma: nocover 205 | ops.main.main(BaculaFdCharm) 206 | -------------------------------------------------------------------------------- /docs/tutorial.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description lang=en": "The Bacula server charm tutorial that walks a user through setting up a basic Bacula server." 5 | --- 6 | 7 | (tutorial)= 8 | 9 | # Deploy the Bacula server charm for the first time 10 | 11 | The `bacula-server` charm is the core of the Bacula backup charms. It 12 | comprises all the server components of the Bacula charms. This tutorial 13 | walks you through the steps to deploy a basic Bacula server. 14 | 15 | ## What you'll need 16 | 17 | * A workstation (for example, a laptop) with AMD64 architecture. 18 | * Juju 3 installed. For more information about installing Juju, 19 | see {ref}`Get started with Juju `. 20 | * A Juju controller bootstrapped to LXD, for example: 21 | `juju bootstrap localhost tutorial-controller` 22 | 23 | 24 | ```{note} 25 | You can get a working setup by using a Multipass VM as outlined in 26 | the {ref}`Set up your test environment ` 27 | guide. 28 | ``` 29 | 30 | ## What you'll do 31 | 32 | 1. Deploy the [Bacula server charm](https://charmhub.io/bacula-server). 33 | 2. Deploy and integrate S3 storage. 34 | 3. Deploy and integrate a PostgreSQL database. 35 | 4. Get admin credentials. 36 | 5. Access the Baculum web interface. 37 | 6. Clean up the environment. 38 | 39 | ## Set up the environment 40 | 41 | To work inside the Multipass VM, log in with the following command: 42 | 43 | ```bash 44 | multipass shell my-juju-vm 45 | ``` 46 | 47 | ```{note} 48 | If you're working locally, you don't need to do this step. 49 | ``` 50 | 51 | To manage resources and separate this tutorial's workload from your 52 | usual work, create a new model on the LXD controller with the following 53 | command: 54 | 55 | ``` 56 | juju add-model bacula-tutorial 57 | ``` 58 | 59 | ## Deploy the Bacula server charm 60 | 61 | Start by deploying the Bacula server charm. For this tutorial, deploy 62 | the `bacula-server` charm from the edge channel: 63 | 64 | ``` 65 | juju deploy bacula-server --channel edge 66 | ``` 67 | 68 | ## Deploy and integrate S3 storage 69 | 70 | The Bacula server charm requires S3-compatible storage as the backup 71 | destination. For testing, we'll deploy minio and use the [ 72 | `s3-integrator`](https://charmhub.io/s3-integrator) charm to provide S3 73 | storage. 74 | 75 | ### Deploy minio 76 | 77 | We will use Docker to run minio. Run the following commands **inside the 78 | Multipass VM** to install Docker and start minio. 79 | 80 | Install Docker: 81 | 82 | ``` 83 | sudo apt update && sudo apt install -y docker 84 | ``` 85 | 86 | Let's update Docker's iptables to allow LXD network traffic. This step 87 | is 88 | required after every reboot. If `lxdbr0` is not the name of your LXD 89 | bridge, 90 | replace it with the actual name. 91 | 92 | ``` 93 | sudo iptables -I DOCKER-USER -i lxdbr0 -j ACCEPT 94 | sudo iptables -I DOCKER-USER -o lxdbr0 -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT 95 | ``` 96 | 97 | Start the minio container: 98 | 99 | ``` 100 | sudo docker run -d --name minio -p 9000:9000 -p 9001:9001 -e minio_ROOT_USER=minioadmin -e minio_ROOT_PASSWORD=minioadmin minio/minio server /data --console-address ":9001" 101 | ``` 102 | 103 | Create the Bacula bucket: 104 | 105 | ``` 106 | sudo docker exec minio mkdir -m 777 /data/bacula 107 | ``` 108 | 109 | When everything is set up, you should see output similar to the 110 | following from `juju status`: 111 | 112 | ``` 113 | Model Controller Cloud/Region Version SLA Timestamp 114 | bacula-tutorial lxd localhost/localhost 3.6.2 unsupported 17:41:40+08:00 115 | 116 | App Version Status Scale Charm Channel Rev Exposed Message 117 | bacula-server waiting 1 bacula-server latest/edge 6 no waiting for postgresql relation 118 | s3-integrator blocked 1 s3-integrator 1/stable 145 no Missing parameters: ['access-key', 'secret-key'] 119 | 120 | Unit Workload Agent Machine Public address Ports Message 121 | bacula-server/0* waiting idle 0 10.212.71.247 waiting for postgresql relation 122 | s3-integrator/0* blocked idle 1 10.212.71.44 Missing parameters: ['access-key', 'secret-key'] 123 | 124 | Machine State Address Inst id Base AZ Message 125 | 0 started 10.212.71.247 juju-b6e2bb-0 ubuntu@24.04 Running 126 | 1 started 10.212.71.44 juju-b6e2bb-1 ubuntu@22.04 Running 127 | ``` 128 | 129 | The next step is to configure the `s3-integrator` charm. Run the 130 | following commands to configure it. All configuration values are static 131 | except the endpoint, which should be the LXD network gateway address ( 132 | this varies depending on your setup). You can infer the gateway address 133 | by substituting the last octet of the unit address shown in the 134 | `juju status` output with `1`. In this example, `10.212.71.247` becomes 135 | `10.212.71.1`, so the gateway IP address is `10.212.71.1`. 136 | 137 | ``` 138 | juju config s3-integrator bucket=bacula endpoint=http://10.212.71.1:9000 s3-uri-style=path 139 | juju run s3-integrator/leader sync-s3-credentials access-key=minioadmin secret-key=minioadmin 140 | ``` 141 | 142 | Now integrate the `bacula-server` charm with the `s3-integrator` charm: 143 | 144 | ``` 145 | juju integrate s3-integrator bacula-server 146 | ``` 147 | 148 | ## Deploy and integrate a PostgreSQL database 149 | 150 | The Bacula server charm also requires a PostgreSQL database to store 151 | backup metadata. We'll use the [ 152 | `postgresql`](https://charmhub.io/postgresql) charm. 153 | 154 | The following commands deploy the `postgresql` charm and integrate it 155 | with the `bacula-server` charm. 156 | 157 | ``` 158 | juju deploy postgresql --channel 14/stable 159 | juju integrate postgresql bacula-server 160 | ``` 161 | 162 | Run `juju status` to see the current status of the deployment. The 163 | output should be similar to the following: 164 | 165 | ``` 166 | Model Controller Cloud/Region Version SLA Timestamp 167 | bacula-tutorial lxd localhost/localhost 3.6.2 unsupported 18:01:26+08:00 168 | 169 | App Version Status Scale Charm Channel Rev Exposed Message 170 | bacula-server active 1 bacula-server latest/edge 6 no 171 | postgresql 14.19 active 1 postgresql 14/stable 936 no 172 | s3-integrator active 1 s3-integrator 1/stable 145 no 173 | 174 | Unit Workload Agent Machine Public address Ports Message 175 | bacula-server/0* active idle 0 10.212.71.247 9095-9096,9101,9103/tcp 176 | postgresql/0* active idle 2 10.212.71.237 5432/tcp Primary 177 | s3-integrator/0* active idle 1 10.212.71.44 178 | 179 | Machine State Address Inst id Base AZ Message 180 | 0 started 10.212.71.247 juju-b6e2bb-0 ubuntu@24.04 Running 181 | 1 started 10.212.71.44 juju-b6e2bb-1 ubuntu@22.04 Running 182 | 2 started 10.212.71.237 juju-b6e2bb-2 ubuntu@22.04 Running 183 | ``` 184 | 185 | ## Access the Baculum web interface 186 | 187 | To access the Baculum web interface, first create a Baculum web account 188 | by running the `create-web-user` action on the `bacula-server` leader: 189 | 190 | ``` 191 | juju run bacula-server/leader create-web-user username=admin 192 | ``` 193 | 194 | The username and password are shown in the command output, for example: 195 | 196 | ``` 197 | Running operation 3 with 1 task 198 | - task 4 on unit-bacula-server-0 199 | 200 | Waiting for task 4... 201 | password: Waz4TS5Y4lSJDYF_GHAlTQ 202 | username: admin 203 | ``` 204 | 205 | If you deployed the test environment on your host machine, you can 206 | directly access Baculum at `http://10.212.71.247:9095/web/` using the 207 | username and password you just created. The IP address may vary in your 208 | deployment. 209 | 210 | If you deployed the test environment inside a Multipass VM, use `socat` 211 | to forward external traffic to the `bacula-server` by running the 212 | following command inside the Multipass VM: 213 | 214 | ``` 215 | socat TCP-LISTEN:9095,reuseaddr,fork TCP:10.212.71.247:9095 216 | ``` 217 | 218 | Then access Baculum at `http://:9095/web/`. 219 | 220 | ## Clean up the environment 221 | 222 | Congratulations! You have successfully deployed the Bacula server charm, 223 | added S3 storage and a database, and accessed the application. 224 | 225 | You can clean up your Juju environment by following this guide: 226 | {ref}`Tear down your test environment ` 227 | 228 | Clean up the minio Docker image by running the following command: 229 | 230 | ``` 231 | docker rm -f minio 232 | ``` 233 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | # Minimal makefile for Sphinx documentation 5 | # 6 | # Add your customisation to `Makefile` instead. 7 | 8 | # You can set these variables from the command line, and also 9 | # from the environment for the first two. 10 | SPHINXDIR = .sphinx 11 | SPHINXOPTS ?= -c . -d $(SPHINXDIR)/.doctrees -j auto 12 | SPHINXBUILD ?= $(VENVDIR)/bin/sphinx-build 13 | SOURCEDIR = . 14 | BUILDDIR = _build 15 | VENVDIR = $(SPHINXDIR)/venv 16 | PA11Y = $(SPHINXDIR)/node_modules/pa11y/bin/pa11y.js --config $(SPHINXDIR)/pa11y.json 17 | VENV = $(VENVDIR)/bin/activate 18 | TARGET = * 19 | METRICSDIR = $(SOURCEDIR)/.sphinx/metrics 20 | REQPDFPACKS = latexmk fonts-freefont-otf texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended texlive-font-utils texlive-lang-cjk texlive-xetex plantuml xindy tex-gyre dvipng 21 | CONFIRM_SUDO ?= N 22 | VALE_CONFIG = $(SPHINXDIR)/vale.ini 23 | VALEDIR = $(SPHINXDIR)/venv/lib/python*/site-packages/vale 24 | VOCAB_CANONICAL = $(SPHINXDIR)/styles/config/vocabularies/Canonical 25 | SPHINX_HOST ?= 127.0.0.1 26 | SPHINX_PORT ?= 8000 27 | 28 | # Put it first so that "make" without argument is like "make help". 29 | help: 30 | @echo 31 | @echo "-------------------------------------------------------------" 32 | @echo "* watch, build and serve the documentation: make run" 33 | @echo "* only build: make html" 34 | @echo "* only serve: make serve" 35 | @echo "* clean built doc files: make clean-doc" 36 | @echo "* clean full environment: make clean" 37 | @echo "* check links: make linkcheck" 38 | @echo "* check markdown: make lint-md" 39 | @echo "* check spelling: make spelling" 40 | @echo "* check spelling (without building again): make spellcheck" 41 | @echo "* check inclusive language: make woke" 42 | @echo "* check accessibility: make pa11y" 43 | @echo "* check style guide compliance: make vale" 44 | @echo "* check style guide compliance on target: make vale TARGET=*" 45 | @echo "* check metrics for documentation: make allmetrics" 46 | @echo "* other possible targets: make " 47 | @echo "-------------------------------------------------------------" 48 | @echo 49 | 50 | .PHONY: help full‑help html epub pdf linkcheck spelling spellcheck woke \ 51 | vale pa11y run serve install pa11y‑install \ 52 | vale‑install pdf‑prep pdf‑prep‑force clean clean‑doc allmetrics \ 53 | update lint-md 54 | 55 | full-help: $(VENVDIR) 56 | @. $(VENV); $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 57 | @echo "\n\033[1;31mNOTE: This help texts shows unsupported targets!\033[0m" 58 | @echo "Run 'make help' to see supported targets." 59 | 60 | # If requirements are updated, venv should be rebuilt and timestamped. 61 | $(VENVDIR): 62 | @echo "... setting up virtualenv" 63 | python3 -m venv $(VENVDIR) || { echo "You must install python3-venv before you can build the documentation."; exit 1; } 64 | . $(VENV); pip install $(PIPOPTS) --require-virtualenv \ 65 | --upgrade -r requirements.txt \ 66 | --log $(VENVDIR)/pip_install.log 67 | @test ! -f $(VENVDIR)/pip_list.txt || \ 68 | mv $(VENVDIR)/pip_list.txt $(VENVDIR)/pip_list.txt.bak 69 | @. $(VENV); pip list --local --format=freeze > $(VENVDIR)/pip_list.txt 70 | @touch $(VENVDIR) 71 | 72 | pa11y-install: 73 | @command -v $(PA11Y) >/dev/null || { \ 74 | echo "Installing \"pa11y\" from npm..."; echo; \ 75 | mkdir -p $(SPHINXDIR)/node_modules/ ; \ 76 | npm install --prefix $(SPHINXDIR) pa11y; \ 77 | } 78 | 79 | pymarkdownlnt-install: 80 | @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/pymarkdown || pip install pymarkdownlnt 81 | 82 | install: $(VENVDIR) 83 | 84 | run: install 85 | . $(VENV); $(VENVDIR)/bin/sphinx-autobuild -b dirhtml --host $(SPHINX_HOST) --port $(SPHINX_PORT) "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 86 | 87 | # Does not depend on $(BUILDDIR) to rebuild properly at every run. 88 | html: install 89 | . $(VENV); $(SPHINXBUILD) --fail-on-warning --keep-going -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 90 | 91 | epub: install 92 | . $(VENV); $(SPHINXBUILD) -b epub "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 93 | 94 | serve: html 95 | cd "$(BUILDDIR)"; python3 -m http.server --bind $(SPHINX_HOST) $(SPHINX_PORT) 96 | 97 | clean: clean-doc 98 | @test ! -e "$(VENVDIR)" -o -d "$(VENVDIR)" -a "$(abspath $(VENVDIR))" != "$(VENVDIR)" 99 | rm -rf $(VENVDIR) 100 | rm -rf $(SPHINXDIR)/node_modules/ 101 | rm -rf $(SPHINXDIR)/styles 102 | rm -rf $(VALE_CONFIG) 103 | 104 | clean-doc: 105 | git clean -fx "$(BUILDDIR)" 106 | rm -rf $(SPHINXDIR)/.doctrees 107 | 108 | linkcheck: install 109 | . $(VENV) ; $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) || { grep --color -F "[broken]" "$(BUILDDIR)/output.txt"; exit 1; } 110 | exit 0 111 | 112 | pa11y: pa11y-install html 113 | find $(BUILDDIR) -name *.html -print0 | xargs -n 1 -0 $(PA11Y) 114 | 115 | lint-md: pymarkdownlnt-install 116 | @. $(VENV); pymarkdownlnt --config $(SPHINXDIR)/.pymarkdown.json scan --recurse --exclude=./$(SPHINXDIR)/** $(SOURCEDIR) 117 | 118 | vale-install: install 119 | @. $(VENV); test -f $(VALE_CONFIG) || python3 $(SPHINXDIR)/get_vale_conf.py 120 | @echo '.Name=="Canonical.400-Enforce-inclusive-terms"' > $(SPHINXDIR)/styles/woke.filter 121 | @echo '.Level=="error" and .Name!="Canonical.500-Repeated-words" and .Name!="Canonical.000-US-spellcheck"' > $(SPHINXDIR)/styles/error.filter 122 | @echo '.Name=="Canonical.000-US-spellcheck"' > $(SPHINXDIR)/styles/spelling.filter 123 | @. $(VENV); find $(VALEDIR)/vale_bin -size 195c -exec vale --version \; 124 | 125 | woke: vale-install 126 | @cat $(VOCAB_CANONICAL)/accept.txt > $(VOCAB_CANONICAL)/accept_backup.txt 127 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(VOCAB_CANONICAL)/accept.txt 128 | @echo "Running Vale acceptable term check against $(TARGET). To change target set TARGET= with make command" 129 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/woke.filter' --glob='*.{md,rst}' $(TARGET) 130 | @cat $(VOCAB_CANONICAL)/accept_backup.txt > $(VOCAB_CANONICAL)/accept.txt && rm $(VOCAB_CANONICAL)/accept_backup.txt 131 | 132 | vale: vale-install 133 | @cat $(VOCAB_CANONICAL)/accept.txt > $(VOCAB_CANONICAL)/accept_backup.txt 134 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(VOCAB_CANONICAL)/accept.txt 135 | @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" 136 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/error.filter' --glob='*.{md,rst}' $(TARGET) 137 | @cat $(VOCAB_CANONICAL)/accept_backup.txt > $(VOCAB_CANONICAL)/accept.txt && rm $(VOCAB_CANONICAL)/accept_backup.txt 138 | 139 | spelling: vale-install 140 | @cat $(VOCAB_CANONICAL)/accept.txt > $(VOCAB_CANONICAL)/accept_backup.txt 141 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(VOCAB_CANONICAL)/accept.txt 142 | @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" 143 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/spelling.filter' --glob='*.{md,rst}' $(TARGET) 144 | @cat $(VOCAB_CANONICAL)/accept_backup.txt > $(VOCAB_CANONICAL)/accept.txt && rm $(VOCAB_CANONICAL)/accept_backup.txt 145 | 146 | spellcheck: spelling 147 | @echo "Please note that the \`make spellcheck\` command is being deprecated in favor of \`make spelling\`" 148 | 149 | pdf-prep: install 150 | @for packageName in $(REQPDFPACKS); do (dpkg-query -W -f='$${Status}' $$packageName 2>/dev/null | \ 151 | grep -c "ok installed" >/dev/null && echo "Package $$packageName is installed") && continue || \ 152 | (echo; echo "PDF generation requires the installation of the following packages: $(REQPDFPACKS)" && \ 153 | echo "" && echo "Run 'sudo make pdf-prep-force' to install these packages" && echo "" && echo \ 154 | "Please be aware these packages will be installed to your system") && exit 1 ; done 155 | 156 | pdf-prep-force: 157 | apt-get update 158 | apt-get upgrade -y 159 | apt-get install --no-install-recommends -y $(REQPDFPACKS) \ 160 | 161 | pdf: pdf-prep 162 | @. $(VENV); sphinx-build -M latexpdf "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 163 | @rm ./$(BUILDDIR)/latex/front-page-light.pdf || true 164 | @rm ./$(BUILDDIR)/latex/normal-page-footer.pdf || true 165 | @find ./$(BUILDDIR)/latex -name "*.pdf" -exec mv -t ./$(BUILDDIR) {} + 166 | @rm -r $(BUILDDIR)/latex 167 | @echo 168 | @echo "Output can be found in ./$(BUILDDIR)" 169 | @echo 170 | 171 | allmetrics: html 172 | @echo "Recording documentation metrics..." 173 | @echo "Checking for existence of vale..." 174 | . $(VENV) 175 | @. $(VENV); test -d $(VALEDIR) || pip install vale 176 | @. $(VENV); test -f $(VALE_CONFIG) || python3 $(SPHINXDIR)/get_vale_conf.py 177 | @. $(VENV); find $(VALEDIR)/vale_bin -size 195c -exec vale --config "$(VALE_CONFIG)" $(TARGET) > /dev/null \; 178 | @eval '$(METRICSDIR)/source_metrics.sh $(PWD)' 179 | @. $(VENV); python3 $(METRICSDIR)/build_metrics.py $(BUILDDIR) 180 | 181 | update: install 182 | @. $(VENV); .sphinx/update_sp.py 183 | 184 | # Catch-all target: route all unknown targets to Sphinx using the new 185 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 186 | %: 187 | $(MAKE) --no-print-directory install 188 | . $(VENV); $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 189 | -------------------------------------------------------------------------------- /tests/integration/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | # for using fixtures in fixtures 5 | # pylint: disable=unused-argument 6 | 7 | """Fixtures for integration tests.""" 8 | 9 | import json 10 | import subprocess # nosec 11 | import textwrap 12 | 13 | import boto3 14 | import botocore.config 15 | import jubilant 16 | import pytest 17 | 18 | from tests.integration import baculum 19 | 20 | 21 | def find_charm_file(pytestconfig, name: str) -> str | None: 22 | """Find charm file from --charm-file input. 23 | 24 | Args: 25 | pytestconfig: pytest config. 26 | name: The filename of the charm file. 27 | 28 | Returns: 29 | The path to the charm file. 30 | """ 31 | charm_files = pytestconfig.getoption("--charm-file") 32 | if not charm_files: 33 | return None 34 | for file in charm_files: 35 | if file.endswith(name): 36 | return file 37 | return None 38 | 39 | 40 | @pytest.fixture(scope="module", name="backup_integrator_charm_file") 41 | def backup_integrator_charm_file_fixture(pytestconfig) -> str: 42 | """Get backup-integrator charm file.""" 43 | file = find_charm_file(pytestconfig, "backup-integrator_ubuntu@24.04-amd64.charm") 44 | if file: 45 | return file 46 | subprocess.check_call(["charmcraft", "pack"], cwd="./backup_integrator_operator/") # nosec 47 | return "./backup_integrator_operator/backup-integrator_ubuntu@24.04-amd64.charm" 48 | 49 | 50 | @pytest.fixture(scope="module", name="bacula_fd_charm_file") 51 | def bacula_fd_charm_file_fixture(pytestconfig) -> str: 52 | """Get bacula-fd charm file.""" 53 | file = find_charm_file(pytestconfig, "bacula-fd_ubuntu@24.04-amd64.charm") 54 | if file: 55 | return file 56 | subprocess.check_call(["charmcraft", "pack"], cwd="./bacula_fd_operator/") # nosec 57 | return "./bacula_fd_operator/bacula-fd_ubuntu@24.04-amd64.charm" 58 | 59 | 60 | @pytest.fixture(scope="module", name="bacula_server_charm_file") 61 | def bacula_server_charm_file_fixture(pytestconfig) -> str: 62 | """Get bacula-server charm file.""" 63 | file = find_charm_file(pytestconfig, "bacula-server_ubuntu@24.04-amd64.charm") 64 | if file: 65 | return file 66 | subprocess.check_call(["charmcraft", "pack"], cwd="./bacula_server_operator/") # nosec 67 | return "./bacula_server_operator/bacula-server_ubuntu@24.04-amd64.charm" 68 | 69 | 70 | @pytest.fixture(scope="module", name="deploy_minio") 71 | def deploy_minio_fixture(juju: jubilant.Juju): 72 | """Deploy the minio charm (using any-charm).""" 73 | any_charm = textwrap.dedent( 74 | ''' 75 | import os 76 | import subprocess 77 | import textwrap 78 | import urllib.request 79 | 80 | import ops 81 | 82 | from any_charm_base import AnyCharmBase 83 | 84 | class AnyCharm(AnyCharmBase): 85 | def __init__(self, *args): 86 | super().__init__(*args) 87 | self.framework.observe(self.on.install, self._on_install) 88 | 89 | def _on_install(self, _): 90 | self.unit.status = ops.MaintenanceStatus("downloading minio") 91 | urllib.request.urlretrieve( 92 | "https://dl.min.io/server/minio/release/linux-amd64/minio", "/usr/bin/minio" 93 | ) 94 | os.chmod("/usr/bin/minio", 0o755) 95 | self.unit.status = ops.MaintenanceStatus("setting up minio") 96 | service = textwrap.dedent( 97 | """ 98 | [Unit] 99 | Description=minio 100 | Wants=network-online.target 101 | After=network-online.target 102 | 103 | [Service] 104 | Type=simple 105 | Environment="MINIO_ROOT_USER=minioadmin" 106 | Environment="MINIO_ROOT_PASSWORD=minioadmin" 107 | ExecStartPre=/usr/bin/mkdir -p /srv/bacula 108 | ExecStart=/usr/bin/minio server --console-address :9001 /srv 109 | Restart=on-failure 110 | RestartSec=5 111 | 112 | [Install] 113 | WantedBy=multi-user.target 114 | """ 115 | ) 116 | with open("/etc/systemd/system/minio.service", "w") as f: 117 | f.write(service) 118 | subprocess.check_call(["systemctl", "daemon-reload"]) 119 | subprocess.check_call(["systemctl", "enable", "--now", "minio"]) 120 | self.unit.set_ports(9000, 9001) 121 | self.unit.status = ops.ActiveStatus() 122 | ''' 123 | ) 124 | juju.deploy( 125 | "any-charm", 126 | "minio", 127 | channel="latest/edge", 128 | config={"src-overwrite": json.dumps({"any_charm.py": any_charm})}, 129 | ) 130 | 131 | 132 | @pytest.fixture(scope="module", name="deploy_charms") 133 | def deploy_charms_fixture( 134 | juju: jubilant.Juju, 135 | deploy_minio, 136 | backup_integrator_charm_file, 137 | bacula_fd_charm_file, 138 | bacula_server_charm_file, 139 | ): 140 | """Deploy backup charms.""" 141 | juju.deploy("ubuntu", base="ubuntu@24.04") 142 | juju.deploy(backup_integrator_charm_file, config={"fileset": "/var/backups/"}) 143 | juju.deploy( 144 | bacula_fd_charm_file, 145 | config={"schedule": "Level=Full sun at 01:00, Level=Incremental mon-sat at 01:00"}, 146 | ) 147 | juju.deploy(bacula_server_charm_file) 148 | juju.deploy("postgresql", "bacula-database", channel="14/stable") 149 | juju.deploy("s3-integrator") 150 | juju.wait(lambda status: jubilant.all_agents_idle(status, "s3-integrator"), timeout=7200) 151 | minio_address = list(juju.status().apps["minio"].units.values())[0].public_address 152 | juju.config( 153 | "s3-integrator", 154 | { 155 | "endpoint": f"http://{minio_address}:9000", 156 | "bucket": "bacula", 157 | "s3-uri-style": "path", 158 | }, 159 | ) 160 | juju.run( 161 | unit="s3-integrator/0", 162 | action="sync-s3-credentials", 163 | params={"access-key": "minioadmin", "secret-key": "minioadmin"}, 164 | ) 165 | 166 | juju.integrate("ubuntu:juju-info", "backup-integrator") 167 | juju.integrate("ubuntu:juju-info", "bacula-fd") 168 | juju.integrate("backup-integrator:backup", "bacula-fd") 169 | juju.integrate("bacula-server", "bacula-database") 170 | juju.integrate("bacula-server", "s3-integrator") 171 | juju.integrate("bacula-server", "bacula-fd") 172 | 173 | juju.wait(jubilant.all_active, timeout=7200) 174 | 175 | 176 | @pytest.fixture(scope="module", name="setup_database") 177 | def setup_database_fixture(juju: jubilant.Juju, deploy_charms): 178 | """Setup backup source, a simple postgresql server.""" 179 | juju.ssh("ubuntu/0", "sudo apt-get install -y postgresql") 180 | juju.ssh("ubuntu/0", "sudo mkdir -p /var/backups/postgresql") 181 | juju.ssh("ubuntu/0", "sudo chown postgres /var/backups/postgresql") 182 | sql = """ 183 | CREATE DATABASE ubuntu; 184 | 185 | \\c ubuntu 186 | 187 | CREATE TABLE IF NOT EXISTS release ( 188 | version TEXT NOT NULL, 189 | code_name TEXT NOT NULL 190 | ); 191 | 192 | INSERT INTO "release" (version, code_name) VALUES 193 | ('25.10', 'Questing Quokka'), 194 | ('25.04', 'Plucky Puffin'), 195 | ('24.04', 'Noble Numbat'), 196 | ('23.10', 'Mantic Minotaur'), 197 | ('23.04', 'Lunar Lobster'), 198 | ('22.10', 'Kinetic Kudu'), 199 | ('22.04', 'Jammy Jellyfish'), 200 | ('21.10', 'Impish Indri'), 201 | ('21.04', 'Hirsute Hippo'), 202 | ('20.10', 'Groovy Gorilla'), 203 | ('20.04', 'Focal Fossa'), 204 | ('19.10', 'Eoan Ermine'), 205 | ('19.04', 'Disco Dingo'), 206 | ('18.10', 'Cosmic Cuttlefish'), 207 | ('18.04', 'Bionic Beaver'), 208 | ('17.10', 'Artful Aardvark'), 209 | ('17.04', 'Zesty Zapus'), 210 | ('16.10', 'Yakkety Yak'), 211 | ('16.04', 'Xenial Xerus'), 212 | ('15.10', 'Wily Werewolf'), 213 | ('15.04', 'Vivid Vervet'), 214 | ('14.10', 'Utopic Unicorn'), 215 | ('14.04', 'Trusty Tahr'), 216 | ('13.10', 'Saucy Salamander'), 217 | ('13.04', 'Raring Ringtail'), 218 | ('12.10', 'Quantal Quetzal'), 219 | ('12.04', 'Precise Pangolin'), 220 | ('11.10', 'Oneiric Ocelot'), 221 | ('11.04', 'Natty Narwhal'), 222 | ('10.10', 'Maverick Meerkat'), 223 | ('10.04', 'Lucid Lynx'), 224 | ('09.10', 'Karmic Koala'), 225 | ('09.04', 'Jaunty Jackalope'), 226 | ('08.10', 'Intrepid Ibex'), 227 | ('08.04', 'Hardy Heron'), 228 | ('07.10', 'Gutsy Gibbon'), 229 | ('07.04', 'Feisty Fawn'), 230 | ('06.10', 'Edgy Eft'), 231 | ('06.06', 'Dapper Drake'), 232 | ('05.10', 'Breezy Badger'), 233 | ('05.04', 'Hoary Hedgehog'), 234 | ('04.10', 'Warty Warthog'); 235 | """ 236 | juju.cli("ssh", "ubuntu/0", "sudo -u postgres psql -v ON_ERROR_STOP=1 postgres", stdin=sql) 237 | juju.config( 238 | "backup-integrator", 239 | { 240 | "run-before-backup": textwrap.dedent( 241 | """\ 242 | #!/bin/bash 243 | sudo -u postgres pg_dump -d ubuntu -c -f /var/backups/postgresql/ubuntu.dump 244 | """ 245 | ), 246 | "run-after-backup": textwrap.dedent( 247 | """\ 248 | #!/bin/bash 249 | sudo rm -f /var/backups/postgresql/ubuntu.dump 250 | """ 251 | ), 252 | "run-after-restore": textwrap.dedent( 253 | """\ 254 | #!/bin/bash 255 | sudo -u postgres psql -d ubuntu -1 -f /var/backups/postgresql/ubuntu.dump 256 | sudo rm -f /var/backups/postgresql/ubuntu.dump 257 | """ 258 | ), 259 | }, 260 | ) 261 | 262 | 263 | @pytest.fixture(scope="module", name="baculum") 264 | def baculum_client(juju: jubilant.Juju, setup_database) -> baculum.Baculum: 265 | """Initialize a Baculum API client.""" 266 | unit_name, _ = list(juju.status().apps["bacula-server"].units.items())[0] 267 | username = "test-admin" 268 | password = juju.run( 269 | unit_name, 270 | "create-api-user", 271 | params={"username": username}, 272 | wait=60, 273 | ).results["password"] 274 | address = list(juju.status().apps["bacula-server"].units.values())[0].public_address 275 | return baculum.Baculum(f"http://{address}:9096/api/v2", username=username, password=password) 276 | 277 | 278 | @pytest.fixture(scope="module", name="s3") 279 | def s3_client(juju: jubilant.Juju, setup_database): 280 | """Initialize a S3 client.""" 281 | minio_address = list(juju.status().apps["minio"].units.values())[0].public_address 282 | return boto3.client( 283 | "s3", 284 | endpoint_url=f"http://{minio_address}:9000", 285 | aws_access_key_id="minioadmin", # nosec 286 | aws_secret_access_key="minioadmin", # nosec 287 | config=botocore.config.Config(s3={"addressing_style": "path"}), 288 | ) 289 | --------------------------------------------------------------------------------