├── .coveragerc
├── .env-example
├── .github
├── ISSUE_TEMPLATE
│ ├── bug-report.yml
│ └── feature-request.yml
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── build.yml
│ └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .pylintrc
├── .readthedocs.yaml
├── .sourcery.yaml
├── .vscode
├── extensions.json
└── settings.json
├── .yamllint.yaml
├── AUTHORS.txt
├── LICENSE.txt
├── MANIFEST.in
├── Makefile
├── README.rst
├── codecov.yml
├── contributing.md
├── dbbackup
├── VERSION
├── __init__.py
├── apps.py
├── checks.py
├── db
│ ├── __init__.py
│ ├── base.py
│ ├── exceptions.py
│ ├── mongodb.py
│ ├── mysql.py
│ ├── postgresql.py
│ └── sqlite.py
├── log.py
├── management
│ ├── __init__.py
│ └── commands
│ │ ├── __init__.py
│ │ ├── _base.py
│ │ ├── dbbackup.py
│ │ ├── dbrestore.py
│ │ ├── listbackups.py
│ │ ├── mediabackup.py
│ │ └── mediarestore.py
├── settings.py
├── storage.py
├── tests
│ ├── __init__.py
│ ├── commands
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_dbbackup.py
│ │ ├── test_dbrestore.py
│ │ ├── test_listbackups.py
│ │ └── test_mediabackup.py
│ ├── functional
│ │ ├── __init__.py
│ │ └── test_commands.py
│ ├── settings.py
│ ├── test_checks.py
│ ├── test_connectors
│ │ ├── __init__.py
│ │ ├── test_base.py
│ │ ├── test_mongodb.py
│ │ ├── test_mysql.py
│ │ ├── test_postgresql.py
│ │ └── test_sqlite.py
│ ├── test_log.py
│ ├── test_storage.py
│ ├── test_utils.py
│ ├── testapp
│ │ ├── __init__.py
│ │ ├── blobs
│ │ │ ├── gpg
│ │ │ │ ├── pubring.gpg
│ │ │ │ └── secring.gpg
│ │ │ ├── test.gz
│ │ │ ├── test.txt.gpg
│ │ │ ├── test.txt.gz
│ │ │ ├── test.txt.gz.gpg
│ │ │ └── test.txt.tar
│ │ ├── management
│ │ │ ├── __init__.py
│ │ │ └── commands
│ │ │ │ ├── __init__.py
│ │ │ │ ├── count.py
│ │ │ │ └── feed.py
│ │ ├── migrations
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0002_textmodel.py
│ │ │ └── __init__.py
│ │ ├── models.py
│ │ ├── urls.py
│ │ └── views.py
│ └── utils.py
└── utils.py
├── docs
├── Makefile
├── changelog.rst
├── commands.rst
├── conf.py
├── configuration.rst
├── contributing.rst
├── databases.rst
├── index.rst
├── installation.rst
├── integration.rst
├── make.bat
└── storage.rst
├── functional.sh
├── pyproject.toml
├── requirements.txt
├── requirements
├── build.txt
├── dev.txt
├── docs.txt
└── tests.txt
├── runtests.py
├── setup.py
└── tox.ini
/.coveragerc:
--------------------------------------------------------------------------------
1 | # .coveragerc to control coverage.py
2 | [run]
3 | branch = True
4 | source = dbbackup
5 | omit =
6 | dbbackup/tests/*
7 | dbbackup/models.py
8 | dbbackup/views.py
9 | dbbackup/migrations*
10 | dbbackup/management/__init__.py
11 | dbbackup/management/commands/__init__.py
12 |
13 | [report]
14 | # Regexes for lines to exclude from consideration
15 | exclude_lines =
16 | # Have to re-enable the standard pragma
17 | pragma: no cover
18 | noqa:
19 |
20 | # Don't complain about missing debug-only code:
21 | def __repr__
22 | def __str__
23 | if self\.debug
24 |
25 | # Don't complain if tests don't hit defensive assertion code:
26 | raise AssertionError
27 | raise NotImplementedError
28 |
29 | # Don't complain if non-runnable code isn't run:
30 | if 0:
31 | if __name__ == .__main__.:
32 | __all__
33 | import
34 | deprecated_warning
35 | in_development_warning
36 |
37 | ignore_errors = True
38 |
39 | [html]
40 | directory = coverage_html_report
41 |
--------------------------------------------------------------------------------
/.env-example:
--------------------------------------------------------------------------------
1 | # env variables in dbbackup/tests/settings.py
2 | DB_ENGINE
3 | DB_NAME
4 | DB_USER
5 | DB_PASSWORD
6 | DB_HOST
7 | CONNECTOR
8 | DJANGO_LOG_LEVEL=DEBUG
9 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug-report.yml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: Create a report to help us improve.
3 | labels: ["bug"]
4 | body:
5 | - type: textarea
6 | attributes:
7 | label: Current Situation
8 | description: Discuss what the current issue is, how to reproduce, and link to any relevant prior discussion/context.
9 | validations:
10 | required: true
11 | - type: textarea
12 | attributes:
13 | label: Proposed Actions
14 | description: Describe what ought to be done, and why that will address the reasons for action mentioned above.
15 | validations:
16 | required: false
17 | - type: textarea
18 | attributes:
19 | label: System Information
20 | description: Versions for things such as... Django-dbbackup, Python, Django, Operating System, etc.
21 | validations:
22 | required: false
23 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature-request.yml:
--------------------------------------------------------------------------------
1 | name: Feature request
2 | description: Suggest an idea for this project.
3 | labels: ["feature-request"]
4 | body:
5 | - type: textarea
6 | attributes:
7 | label: Feature description
8 | description: Describe what the feature is, why it is needed, and link any relevant prior discussion/context.
9 | validations:
10 | required: true
11 | - type: textarea
12 | attributes:
13 | label: Alternatives options
14 | description: Describe any alternatives to this feature, and why they were not chosen.
15 | validations:
16 | required: false
17 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Description
2 |
3 |
4 |
5 | ## Checklist
6 |
7 | Please update this checklist as you complete each item:
8 |
9 | - [ ] Tests have been developed for bug fixes or new functionality.
10 | - [ ] The changelog has been updated, if necessary.
11 | - [ ] Documentation has been updated, if necessary.
12 | - [ ] GitHub Issues closed by this PR have been linked.
13 |
14 | By submitting this pull request I agree that all contributions comply with this project's open source license(s).
15 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Test
3 |
4 | on:
5 | push:
6 | branches: [master]
7 | pull_request:
8 | branches: [master]
9 |
10 | jobs:
11 | build:
12 | runs-on: ubuntu-22.04
13 | strategy:
14 | matrix:
15 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | python -m pip install -r requirements/tests.txt
26 | python -m pip install -r requirements.txt
27 | # TODO: Remove this conditional when Python 3.7 is dropped and Flake8 executes on
28 | # all Python versions in matrix.
29 | - if: matrix.python-version == '3.8'
30 | name: Linting
31 | run: flake8
32 | # Environments are selected using tox-gh-actions configuration in tox.ini.
33 | - name: Test with tox
34 | run: tox -r
35 | - name: Upload coverage
36 | uses: codecov/codecov-action@v4
37 | with:
38 | name: Python ${{ matrix.python-version }} Codecov
39 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Publish PyPI Release (BROKEN)
3 |
4 | on:
5 | release:
6 | types: [published]
7 |
8 | jobs:
9 | release-package:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v4
14 | # - name: Set up Python
15 | # uses: actions/setup-python@v5
16 | # with:
17 | # python-version: "3.x"
18 |
19 | # - name: Install dependencies
20 | # run: |
21 | # python -m pip install -U pip
22 | # python -m pip install -U -r requirements/build.txt
23 | # python -m pip install -U -r requirements.txt
24 |
25 | # - name: Build package
26 | # run: |
27 | # python -m build --sdist --wheel --outdir dist .
28 | # twine check dist/*
29 |
30 | # - name: Upload packages to Jazzband
31 | # uses: pypa/gh-action-pypi-publish@release/v1
32 | # with:
33 | # user: jazzband
34 | # password: ${{ secrets.JAZZBAND_RELEASE_KEY }}
35 | # repository-url: https://jazzband.co/projects/django-dbbackup/upload
36 |
37 | # This workflopw is disabled due to the following error:
38 | #
39 | # ERROR HTTPError: 500 Internal Server Error from
40 | # https://jazzband.co/projects/django-dbbackup/upload
41 | # INTERNAL SERVER ERROR
42 |
43 | # Until we are transfered out of Jazzband, the workaround is releasing manually via personal PyPI accounts.
44 | # The following steps are required to release a new version of the package:
45 | # python -m pip install -U pip && pip install -U -r requirements/build.txt && pip install -U -r requirements.txt
46 | # python -m build --sdist --wheel --outdir dist . && twine check dist/*
47 | # twine upload dist/*
48 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | .env
11 | env/
12 | .venv
13 | venv/
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *,cover
48 | tests/media/
49 | coverage_html_report/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 |
58 | # Sphinx documentation
59 | docs/_build/
60 |
61 | # PyBuilder
62 | target/
63 |
64 | # IDEs
65 | .idea/
66 | *.sw[po]
67 | test-sqlite
68 | venv
69 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | repos:
3 | - repo: https://github.com/pre-commit/pre-commit-hooks
4 | rev: "v5.0.0"
5 | hooks:
6 | - id: check-merge-conflict
7 | - id: end-of-file-fixer
8 | exclude: ^docs/[^/]*\.svg$
9 | - id: requirements-txt-fixer
10 | - id: trailing-whitespace
11 | types: [python]
12 | - id: check-case-conflict
13 | - id: check-json
14 | - id: check-xml
15 | - id: check-toml
16 | - id: check-xml
17 | - id: check-yaml
18 | - id: debug-statements
19 | - id: check-added-large-files
20 | - id: check-symlinks
21 | - id: debug-statements
22 | - id: detect-aws-credentials
23 | args: ["--allow-missing-credentials"]
24 | - id: detect-private-key
25 | exclude: ^examples|(?:tests/ssl)/
26 | - repo: https://github.com/hadialqattan/pycln
27 | rev: v2.5.0
28 | hooks:
29 | - id: pycln
30 | args: ["--all"]
31 | - repo: https://github.com/asottile/yesqa
32 | rev: v1.5.0
33 | hooks:
34 | - id: yesqa
35 | - repo: https://github.com/pycqa/isort
36 | rev: "6.0.1"
37 | hooks:
38 | - id: isort
39 | args: ["--profile", "black"]
40 | - repo: https://github.com/psf/black
41 | rev: "25.1.0"
42 | hooks:
43 | - id: black
44 | - repo: https://github.com/asottile/pyupgrade
45 | rev: "v3.19.1"
46 | hooks:
47 | - id: pyupgrade
48 | args: ["--py37-plus", "--keep-mock"]
49 | - repo: https://github.com/hhatto/autopep8
50 | rev: "v2.3.2"
51 | hooks:
52 | - id: autopep8
53 | - repo: https://github.com/PyCQA/flake8
54 | rev: "7.2.0"
55 | hooks:
56 | - id: flake8
57 | exclude: "^docs/"
58 | - repo: https://github.com/Lucas-C/pre-commit-hooks-markup
59 | rev: "v1.0.1"
60 | hooks:
61 | - id: rst-linter
62 | files: ^[^/]+[.]rst$
63 | - repo: https://github.com/adrienverge/yamllint
64 | rev: "v1.37.1"
65 | hooks:
66 | - id: yamllint
67 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 | # Add to the black list. It should be a base name, not a
3 | # path. You may set this option multiple times.
4 | ignore=test
5 |
6 | # Pickle collected data for later comparisons.
7 | persistent=yes
8 |
9 | [MESSAGES CONTROL]
10 | disable=broad-except, fixme, missing-module-docstring, missing-class-docstring, missing-function-docstring, too-many-arguments, too-few-public-methods, abstract-method
11 |
12 | [TYPECHECK]
13 | # List of members which are set dynamically and missed by pylint inference
14 | # system, and so shouldn't trigger E0201 when accessed. Python regular
15 | # expressions are accepted.
16 | generated-members=async_request,objects
17 |
18 | [VARIABLES]
19 | # Tells wether we should check for unused import in __init__ files.
20 | init-import=no
21 |
22 | # A regular expression matching names used for dummy variables (i.e. not used).
23 | dummy-variables-rgx=_|dummy
24 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.12"
12 | # You can also specify other tool versions:
13 | # nodejs: "20"
14 | # rust: "1.70"
15 | # golang: "1.20"
16 |
17 | # Build documentation in the "docs/" directory with Sphinx
18 | sphinx:
19 | configuration: docs/conf.py
20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
21 | # builder: "dirhtml"
22 | # Fail on all warnings to avoid broken references
23 | # fail_on_warning: true
24 | # Optionally build your docs in additional formats such as PDF and ePub
25 | # formats:
26 | # - pdf
27 | # - epub
28 |
29 | # Optional but recommended, declare the Python requirements required
30 | # to build your documentation
31 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
32 | python:
33 | install:
34 | - requirements: requirements/docs.txt
35 |
--------------------------------------------------------------------------------
/.sourcery.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | refactor:
3 | skip: ["class-extract-method"]
4 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "eamodio.gitlens",
4 | "github.vscode-pull-request-github",
5 | "esbenp.prettier-vscode",
6 | "ms-python.vscode-pylance",
7 | "ms-python.python",
8 | "gruntfuggly.todo-tree",
9 | "sourcery.sourcery"
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "editor.detectIndentation": false,
3 | "editor.formatOnSave": true,
4 | "python.linting.flake8Enabled": true,
5 | "python.linting.pylintEnabled": true,
6 | "python.languageServer": "Pylance",
7 | "python.analysis.typeCheckingMode": "off",
8 | "python.formatting.provider": "black",
9 | "isort.args": [
10 | "--src=${workspaceFolder}"
11 | ],
12 | "terminal.integrated.scrollback": 10000,
13 | "git.autofetch": true,
14 | "prettier.tabWidth": 4,
15 | "prettier.useTabs": true,
16 | "prettier.endOfLine": "auto",
17 | "files.associations": {
18 | "**/requirements/*.txt": "pip-requirements"
19 | },
20 | "[jsonc]": {
21 | "editor.defaultFormatter": "vscode.json-language-features"
22 | },
23 | "[json]": {
24 | "editor.defaultFormatter": "vscode.json-language-features"
25 | },
26 | "[python]": {
27 | "editor.defaultFormatter": "ms-python.python"
28 | },
29 | "html.format.endWithNewline": true,
30 | "files.insertFinalNewline": true
31 | }
32 |
--------------------------------------------------------------------------------
/.yamllint.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | extends: default
3 |
4 | rules:
5 | line-length: disable
6 | comments: disable
7 | truthy: disable
8 |
--------------------------------------------------------------------------------
/AUTHORS.txt:
--------------------------------------------------------------------------------
1 | Primary Authors:
2 | * Mark (Archmonger)
3 | * John Hagen (johnthagen)
4 | * Michael Shepanski
5 | * Anthony Monthe (ZuluPro)
6 | * Benjamin Bach (benjaoming)
7 |
8 | Contributors:
9 | * Hannes Hapke
10 | * Joe Hu
11 | * Marco Braak
12 | * Nathan Duthoit
13 | * Rich Leland
14 | * Toumhi (Bitbucket)
15 | * Tobias McNulty
16 | * Grant McConnaughey
17 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2010, Michael Shepanski
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification,
5 | are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice,
8 | this list of conditions and the following disclaimer.
9 | * Redistributions in binary form must reproduce the above copyright notice,
10 | this list of conditions and the following disclaimer in the documentation
11 | and/or other materials provided with the distribution.
12 | * Neither the name django-dbbackup nor the names of its contributors
13 | may be used to endorse or promote products derived from this software without
14 | specific prior written permission.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
20 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
23 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include requirements *
2 | include requirements.txt
3 | include README.rst
4 | include LICENSE.txt
5 | include dbbackup/VERSION
6 | recursive-include dbbackup/tests/testapp/blobs *
7 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: all test clean docs
2 |
3 | clean:
4 | find . -name "*.pyc" -type f -delete
5 | find . -name "__pycache__" -type d -exec rm -rf {} \;
6 | find . -name "*.egg-info" -type d -exec rm -rf {} \; || true
7 | rm -rf build/ dist/ \
8 | coverage_html_report .coverage \
9 | *.egg
10 |
11 | test:
12 | python runtests.py
13 |
14 | install:
15 | python setup.py install
16 |
17 | build:
18 | python setup.py build
19 |
20 | docs:
21 | cd docs/ && make clean
22 | cd docs/ && make html
23 |
24 | upload:
25 | make clean
26 | python setup.py sdist bdist_wheel
27 | twine upload dist/*
28 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Django Database Backup
2 | ======================
3 |
4 | .. image:: https://github.com/jazzband/django-dbbackup/actions/workflows/build.yml/badge.svg
5 | :target: https://github.com/jazzband/django-dbbackup/actions
6 |
7 | .. image:: https://readthedocs.org/projects/django-dbbackup/badge/?version=stable
8 | :target: https://django-dbbackup.readthedocs.io/
9 | :alt: Documentation Status
10 |
11 | .. image:: https://codecov.io/gh/jazzband/django-dbbackup/branch/master/graph/badge.svg?token=zaYmStcsuX
12 | :target: https://codecov.io/gh/jazzband/django-dbbackup
13 |
14 | .. image:: https://jazzband.co/static/img/badge.svg
15 | :target: https://jazzband.co/
16 | :alt: Jazzband
17 |
18 | This Django application provides management commands to help backup and
19 | restore your project database and media files with various storages such as
20 | Amazon S3, Dropbox, local file storage or any Django storage.
21 |
22 | It is made to:
23 |
24 | - Allow you to secure your backup with GPG signature and encryption
25 | - Archive with compression
26 | - Deal easily with remote archiving
27 | - Keep your development database up to date
28 | - Use Crontab or Celery to setup automated backups
29 | - Manually backup and restore via Django management commands
30 |
31 | Docs
32 | ====
33 |
34 | See our official documentation at `Read The Docs`_.
35 |
36 | Why use DBBackup
37 | ================
38 |
39 | This software doesn't reinvent the wheel, in a few words it is a pipe between
40 | your Django project and your backup storage. It tries to use the traditional dump &
41 | restore mechanisms, apply compression and/or encryption and use the storage system you desire.
42 |
43 | It gives a simple interface to backup and restore your database or media
44 | files.
45 |
46 | Contributing
47 | ============
48 |
49 | .. image:: https://jazzband.co/static/img/jazzband.svg
50 | :target: https://jazzband.co/
51 | :alt: Jazzband
52 |
53 | This is a `Jazzband `_ project. By contributing you agree
54 | to abide by the `Contributor Code of Conduct `_
55 | and follow the `guidelines `_.
56 |
57 | All contribution are very welcomed, propositions, problems, bugs and
58 | enhancement are tracked with `GitHub issues`_ system and patches are submitted
59 | via `pull requests`_.
60 |
61 | We use GitHub Actions as continuous integration tools.
62 |
63 | .. _`Read The Docs`: https://django-dbbackup.readthedocs.org/
64 | .. _`GitHub issues`: https://github.com/jazzband/django-dbbackup/issues
65 | .. _`pull requests`: https://github.com/jazzband/django-dbbackup/pulls
66 | .. _Coveralls: https://coveralls.io/github/jazzband/django-dbbackup
67 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | ---
2 | coverage:
3 | status:
4 | project:
5 | default:
6 | target: 80% # the required coverage value
7 | threshold: 0.3% # the leniency in hitting the target
8 | patch:
9 | default:
10 | target: 80% # the required coverage value
11 | threshold: 0.3% # the leniency in hitting the target
12 |
--------------------------------------------------------------------------------
/contributing.md:
--------------------------------------------------------------------------------
1 | [](https://jazzband.co/)
2 |
3 | This is a [Jazzband](https://jazzband.co/) project. By contributing you agree to abide by the [Contributor Code of Conduct](https://jazzband.co/about/conduct) and follow the [guidelines](https://jazzband.co/about/guidelines).
4 |
--------------------------------------------------------------------------------
/dbbackup/VERSION:
--------------------------------------------------------------------------------
1 | 4.3.0
2 |
--------------------------------------------------------------------------------
/dbbackup/__init__.py:
--------------------------------------------------------------------------------
1 | """Management commands to help backup and restore a project database and media"""
2 |
3 | from pathlib import Path
4 |
5 | import django
6 |
7 | src_dir = Path(__file__).parent
8 | with (src_dir / "VERSION").open() as f:
9 | __version__ = f.read().strip()
10 | """The full version, including alpha/beta/rc tags."""
11 |
12 | VERSION = (x, y, z) = __version__.split(".")
13 | VERSION = ".".join(VERSION[:2])
14 | """The X.Y version. Needed for `docs/conf.py`."""
15 |
16 |
17 | if django.VERSION < (3, 2):
18 | default_app_config = "dbbackup.apps.DbbackupConfig"
19 |
--------------------------------------------------------------------------------
/dbbackup/apps.py:
--------------------------------------------------------------------------------
1 | """Apps for DBBackup"""
2 |
3 | from django.apps import AppConfig
4 | from django.utils.translation import gettext_lazy
5 |
6 | from dbbackup import log
7 |
8 |
9 | class DbbackupConfig(AppConfig):
10 | """
11 | Config for DBBackup application.
12 | """
13 |
14 | name = "dbbackup"
15 | label = "dbbackup"
16 | verbose_name = gettext_lazy("Backup and restore")
17 | default_auto_field = "django.db.models.AutoField"
18 |
19 | def ready(self):
20 | log.load()
21 |
--------------------------------------------------------------------------------
/dbbackup/checks.py:
--------------------------------------------------------------------------------
1 | import re
2 | from datetime import datetime
3 |
4 | from django.core.checks import Tags, Warning, register
5 |
6 | from dbbackup import settings
7 |
8 | W001 = Warning(
9 | "Invalid HOSTNAME parameter",
10 | hint="Set a non empty string to this settings.DBBACKUP_HOSTNAME",
11 | id="dbbackup.W001",
12 | )
13 | W002 = Warning(
14 | "Invalid STORAGE parameter",
15 | hint="Set a valid path to a storage in settings.DBBACKUP_STORAGE",
16 | id="dbbackup.W002",
17 | )
18 | W003 = Warning(
19 | "Invalid FILENAME_TEMPLATE parameter",
20 | hint="Include {datetime} to settings.DBBACKUP_FILENAME_TEMPLATE",
21 | id="dbbackup.W003",
22 | )
23 | W004 = Warning(
24 | "Invalid MEDIA_FILENAME_TEMPLATE parameter",
25 | hint="Include {datetime} to settings.DBBACKUP_MEDIA_FILENAME_TEMPLATE",
26 | id="dbbackup.W004",
27 | )
28 | W005 = Warning(
29 | "Invalid DATE_FORMAT parameter",
30 | hint="settings.DBBACKUP_DATE_FORMAT can contain only [A-Za-z0-9%_-]",
31 | id="dbbackup.W005",
32 | )
33 | W006 = Warning(
34 | "FAILURE_RECIPIENTS has been deprecated",
35 | hint="settings.DBBACKUP_FAILURE_RECIPIENTS is replaced by "
36 | "settings.DBBACKUP_ADMINS",
37 | id="dbbackup.W006",
38 | )
39 | W007 = Warning(
40 | "Invalid FILENAME_TEMPLATE parameter",
41 | hint="settings.DBBACKUP_FILENAME_TEMPLATE must not contain slashes ('/'). "
42 | "Did you mean to change the value for 'location'?",
43 | id="dbbackup.W007",
44 | )
45 | W008 = Warning(
46 | "Invalid MEDIA_FILENAME_TEMPLATE parameter",
47 | hint="settings.DBBACKUP_MEDIA_FILENAME_TEMPLATE must not contain slashes ('/')"
48 | "Did you mean to change the value for 'location'?",
49 | id="dbbackup.W007",
50 | )
51 |
52 |
53 | def check_filename_templates():
54 | return _check_filename_template(
55 | settings.FILENAME_TEMPLATE,
56 | W007,
57 | "db",
58 | ) + _check_filename_template(
59 | settings.MEDIA_FILENAME_TEMPLATE,
60 | W008,
61 | "media",
62 | )
63 |
64 |
65 | def _check_filename_template(filename_template, check_code, content_type) -> list:
66 | if callable(filename_template):
67 | params = {
68 | "servername": "localhost",
69 | "datetime": datetime.now().strftime(settings.DATE_FORMAT),
70 | "databasename": "default",
71 | "extension": "dump",
72 | "content_type": content_type,
73 | }
74 | filename_template = filename_template(params)
75 |
76 | if "/" in filename_template:
77 | return [check_code]
78 | return []
79 |
80 |
81 | @register(Tags.compatibility)
82 | def check_settings(app_configs, **kwargs):
83 | errors = []
84 | if not settings.HOSTNAME:
85 | errors.append(W001)
86 |
87 | if not settings.STORAGE or not isinstance(settings.STORAGE, str):
88 | errors.append(W002)
89 |
90 | if (
91 | not callable(settings.FILENAME_TEMPLATE)
92 | and "{datetime}" not in settings.FILENAME_TEMPLATE
93 | ):
94 | errors.append(W003)
95 |
96 | if (
97 | not callable(settings.MEDIA_FILENAME_TEMPLATE)
98 | and "{datetime}" not in settings.MEDIA_FILENAME_TEMPLATE
99 | ):
100 | errors.append(W004)
101 |
102 | if re.search(r"[^A-Za-z0-9%_-]", settings.DATE_FORMAT):
103 | errors.append(W005)
104 |
105 | if getattr(settings, "FAILURE_RECIPIENTS", None) is not None:
106 | errors.append(W006)
107 |
108 | errors += check_filename_templates()
109 |
110 | return errors
111 |
--------------------------------------------------------------------------------
/dbbackup/db/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/db/__init__.py
--------------------------------------------------------------------------------
/dbbackup/db/base.py:
--------------------------------------------------------------------------------
1 | """
2 | Base database connectors
3 | """
4 |
5 | import logging
6 | import os
7 | import shlex
8 | from importlib import import_module
9 | from subprocess import Popen
10 | from tempfile import SpooledTemporaryFile
11 |
12 | from django.core.files.base import File
13 |
14 | from dbbackup import settings, utils
15 |
16 | from . import exceptions
17 |
18 | logger = logging.getLogger("dbbackup.command")
19 | logger.setLevel(logging.DEBUG)
20 |
21 |
22 | CONNECTOR_MAPPING = {
23 | "django.db.backends.sqlite3": "dbbackup.db.sqlite.SqliteConnector",
24 | "django.db.backends.mysql": "dbbackup.db.mysql.MysqlDumpConnector",
25 | "django.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpBinaryConnector",
26 | "django.db.backends.postgresql_psycopg2": "dbbackup.db.postgresql.PgDumpBinaryConnector",
27 | "django.db.backends.oracle": None,
28 | "django_mongodb_engine": "dbbackup.db.mongodb.MongoDumpConnector",
29 | "djongo": "dbbackup.db.mongodb.MongoDumpConnector",
30 | "django.contrib.gis.db.backends.postgis": "dbbackup.db.postgresql.PgDumpGisConnector",
31 | "django.contrib.gis.db.backends.mysql": "dbbackup.db.mysql.MysqlDumpConnector",
32 | "django.contrib.gis.db.backends.oracle": None,
33 | "django.contrib.gis.db.backends.spatialite": "dbbackup.db.sqlite.SqliteConnector",
34 | "django_prometheus.db.backends.postgresql": "dbbackup.db.postgresql.PgDumpBinaryConnector",
35 | "django_prometheus.db.backends.sqlite3": "dbbackup.db.sqlite.SqliteConnector",
36 | "django_prometheus.db.backends.mysql": "dbbackup.db.mysql.MysqlDumpConnector",
37 | "django_prometheus.db.backends.postgis": "dbbackup.db.postgresql.PgDumpGisConnector",
38 | "django_s3_sqlite": "dbbackup.db.sqlite.SqliteConnector",
39 | }
40 |
41 | if settings.CUSTOM_CONNECTOR_MAPPING:
42 | CONNECTOR_MAPPING.update(settings.CUSTOM_CONNECTOR_MAPPING)
43 |
44 |
45 | def get_connector(database_name=None):
46 | """
47 | Get a connector from its database key in settings.
48 | """
49 | from django.db import DEFAULT_DB_ALIAS, connections
50 |
51 | # Get DB
52 | database_name = database_name or DEFAULT_DB_ALIAS
53 | connection = connections[database_name]
54 | engine = connection.settings_dict["ENGINE"]
55 | connector_settings = settings.CONNECTORS.get(database_name, {})
56 | connector_path = connector_settings.get("CONNECTOR", CONNECTOR_MAPPING[engine])
57 | connector_module_path = ".".join(connector_path.split(".")[:-1])
58 | module = import_module(connector_module_path)
59 | connector_name = connector_path.split(".")[-1]
60 | connector = getattr(module, connector_name)
61 | return connector(database_name, **connector_settings)
62 |
63 |
64 | class BaseDBConnector:
65 | """
66 | Base class for create database connector. This kind of object creates
67 | interaction with database and allow backup and restore operations.
68 | """
69 |
70 | extension = "dump"
71 | exclude = []
72 |
73 | def __init__(self, database_name=None, **kwargs):
74 | from django.db import DEFAULT_DB_ALIAS, connections
75 |
76 | self.database_name = database_name or DEFAULT_DB_ALIAS
77 | self.connection = connections[self.database_name]
78 | for attr, value in kwargs.items():
79 | setattr(self, attr.lower(), value)
80 |
81 | @property
82 | def settings(self):
83 | """Mix of database and connector settings."""
84 | if not hasattr(self, "_settings"):
85 | sett = self.connection.settings_dict.copy()
86 | sett.update(settings.CONNECTORS.get(self.database_name, {}))
87 | self._settings = sett
88 | return self._settings
89 |
90 | def generate_filename(self, server_name=None):
91 | return utils.filename_generate(self.extension, self.database_name, server_name)
92 |
93 | def create_dump(self):
94 | return self._create_dump()
95 |
96 | def _create_dump(self):
97 | """
98 | Override this method to define dump creation.
99 | """
100 | raise NotImplementedError("_create_dump not implemented")
101 |
102 | def restore_dump(self, dump):
103 | """
104 | :param dump: Dump file
105 | :type dump: file
106 | """
107 | return self._restore_dump(dump)
108 |
109 | def _restore_dump(self, dump):
110 | """
111 | Override this method to define dump creation.
112 | :param dump: Dump file
113 | :type dump: file
114 | """
115 | raise NotImplementedError("_restore_dump not implemented")
116 |
117 |
118 | class BaseCommandDBConnector(BaseDBConnector):
119 | """
120 | Base class for create database connector based on command line tools.
121 | """
122 |
123 | dump_prefix = ""
124 | dump_suffix = ""
125 | restore_prefix = ""
126 | restore_suffix = ""
127 |
128 | use_parent_env = True
129 | env = {}
130 | dump_env = {}
131 | restore_env = {}
132 |
133 | def run_command(self, command, stdin=None, env=None):
134 | """
135 | Launch a shell command line.
136 |
137 | :param command: Command line to launch
138 | :type command: str
139 | :param stdin: Standard input of command
140 | :type stdin: file
141 | :param env: Environment variable used in command
142 | :type env: dict
143 | :return: Standard output of command
144 | :rtype: file
145 | """
146 | logger.debug(command)
147 | cmd = shlex.split(command)
148 | stdout = SpooledTemporaryFile(
149 | max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR
150 | )
151 | stderr = SpooledTemporaryFile(
152 | max_size=settings.TMP_FILE_MAX_SIZE, dir=settings.TMP_DIR
153 | )
154 | full_env = os.environ.copy() if self.use_parent_env else {}
155 | full_env.update(self.env)
156 | full_env.update(env or {})
157 | try:
158 | if isinstance(stdin, File):
159 | process = Popen(
160 | cmd,
161 | stdin=stdin.open("rb"),
162 | stdout=stdout,
163 | stderr=stderr,
164 | env=full_env,
165 | )
166 | else:
167 | process = Popen(
168 | cmd, stdin=stdin, stdout=stdout, stderr=stderr, env=full_env
169 | )
170 | process.wait()
171 | if process.poll():
172 | stderr.seek(0)
173 | raise exceptions.CommandConnectorError(
174 | "Error running: {}\n{}".format(
175 | command, stderr.read().decode("utf-8")
176 | )
177 | )
178 | stdout.seek(0)
179 | stderr.seek(0)
180 | return stdout, stderr
181 | except OSError as err:
182 | raise exceptions.CommandConnectorError(
183 | f"Error running: {command}\n{str(err)}"
184 | )
185 |
--------------------------------------------------------------------------------
/dbbackup/db/exceptions.py:
--------------------------------------------------------------------------------
1 | """Exceptions for database connectors."""
2 |
3 |
4 | class ConnectorError(Exception):
5 | """Base connector error"""
6 |
7 |
8 | class DumpError(ConnectorError):
9 | """Error on dump"""
10 |
11 |
12 | class RestoreError(ConnectorError):
13 | """Error on restore"""
14 |
15 |
16 | class CommandConnectorError(ConnectorError):
17 | """Failing command"""
18 |
--------------------------------------------------------------------------------
/dbbackup/db/mongodb.py:
--------------------------------------------------------------------------------
1 | from dbbackup import utils
2 |
3 | from .base import BaseCommandDBConnector
4 |
5 |
6 | class MongoDumpConnector(BaseCommandDBConnector):
7 | """
8 | MongoDB connector, creates dump with ``mongodump`` and restore with
9 | ``mongorestore``.
10 | """
11 |
12 | dump_cmd = "mongodump"
13 | restore_cmd = "mongorestore"
14 | object_check = True
15 | drop = True
16 |
17 | def _create_dump(self):
18 | cmd = f"{self.dump_cmd} --db {self.settings['NAME']}"
19 | host = self.settings.get("HOST") or "localhost"
20 | port = self.settings.get("PORT") or 27017
21 | cmd += f" --host {host}:{port}"
22 | if self.settings.get("USER"):
23 | cmd += f" --username {self.settings['USER']}"
24 | if self.settings.get("PASSWORD"):
25 | cmd += f" --password {utils.get_escaped_command_arg(self.settings['PASSWORD'])}"
26 |
27 | if self.settings.get("AUTH_SOURCE"):
28 | cmd += f" --authenticationDatabase {self.settings['AUTH_SOURCE']}"
29 | for collection in self.exclude:
30 | cmd += f" --excludeCollection {collection}"
31 | cmd += " --archive"
32 | cmd = f"{self.dump_prefix} {cmd} {self.dump_suffix}"
33 | stdout, stderr = self.run_command(cmd, env=self.dump_env)
34 | return stdout
35 |
36 | def _restore_dump(self, dump):
37 | cmd = self.restore_cmd
38 | host = self.settings.get("HOST") or "localhost"
39 | port = self.settings.get("PORT") or 27017
40 | cmd += f" --host {host}:{port}"
41 | if self.settings.get("USER"):
42 | cmd += f" --username {self.settings['USER']}"
43 | if self.settings.get("PASSWORD"):
44 | cmd += f" --password {utils.get_escaped_command_arg(self.settings['PASSWORD'])}"
45 |
46 | if self.settings.get("AUTH_SOURCE"):
47 | cmd += f" --authenticationDatabase {self.settings['AUTH_SOURCE']}"
48 | if self.object_check:
49 | cmd += " --objcheck"
50 | if self.drop:
51 | cmd += " --drop"
52 | cmd += " --archive"
53 | cmd = f"{self.restore_prefix} {cmd} {self.restore_suffix}"
54 | return self.run_command(cmd, stdin=dump, env=self.restore_env)
55 |
--------------------------------------------------------------------------------
/dbbackup/db/mysql.py:
--------------------------------------------------------------------------------
1 | from dbbackup import utils
2 |
3 | from .base import BaseCommandDBConnector
4 |
5 |
6 | class MysqlDumpConnector(BaseCommandDBConnector):
7 | """
8 | MySQL connector, creates dump with ``mysqldump`` and restore with
9 | ``mysql``.
10 | """
11 |
12 | dump_cmd = "mysqldump"
13 | restore_cmd = "mysql"
14 |
15 | def _create_dump(self):
16 | cmd = f"{self.dump_cmd} {self.settings['NAME']} --quick"
17 | if self.settings.get("HOST"):
18 | cmd += f" --host={self.settings['HOST']}"
19 | if self.settings.get("PORT"):
20 | cmd += f" --port={self.settings['PORT']}"
21 | if self.settings.get("USER"):
22 | cmd += f" --user={self.settings['USER']}"
23 | if self.settings.get("PASSWORD"):
24 | cmd += f" --password={utils.get_escaped_command_arg(self.settings['PASSWORD'])}"
25 |
26 | for table in self.exclude:
27 | cmd += f" --ignore-table={self.settings['NAME']}.{table}"
28 | cmd = f"{self.dump_prefix} {cmd} {self.dump_suffix}"
29 | stdout, stderr = self.run_command(cmd, env=self.dump_env)
30 | return stdout
31 |
32 | def _restore_dump(self, dump):
33 | cmd = f"{self.restore_cmd} {self.settings['NAME']}"
34 | if self.settings.get("HOST"):
35 | cmd += f" --host={self.settings['HOST']}"
36 | if self.settings.get("PORT"):
37 | cmd += f" --port={self.settings['PORT']}"
38 | if self.settings.get("USER"):
39 | cmd += f" --user={self.settings['USER']}"
40 | if self.settings.get("PASSWORD"):
41 | cmd += f" --password={utils.get_escaped_command_arg(self.settings['PASSWORD'])}"
42 |
43 | cmd = f"{self.restore_prefix} {cmd} {self.restore_suffix}"
44 | stdout, stderr = self.run_command(cmd, stdin=dump, env=self.restore_env)
45 | return stdout, stderr
46 |
--------------------------------------------------------------------------------
/dbbackup/db/postgresql.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import List, Optional
3 | from urllib.parse import quote
4 |
5 | from .base import BaseCommandDBConnector
6 |
7 | logger = logging.getLogger("dbbackup.command")
8 |
9 |
10 | def create_postgres_uri(self):
11 | host = self.settings.get("HOST", "localhost")
12 | dbname = self.settings.get("NAME", "")
13 | user = quote(self.settings.get("USER") or "")
14 | password = self.settings.get("PASSWORD", "")
15 | password = f":{quote(password)}" if password else ""
16 | if not user:
17 | password = ""
18 | else:
19 | host = "@" + host
20 |
21 | port = ":{}".format(self.settings.get("PORT")) if self.settings.get("PORT") else ""
22 | dbname = f"--dbname=postgresql://{user}{password}{host}{port}/{dbname}"
23 | return dbname
24 |
25 |
26 | class PgDumpConnector(BaseCommandDBConnector):
27 | """
28 | PostgreSQL connector, it uses pg_dump`` to create an SQL text file
29 | and ``psql`` for restore it.
30 | """
31 |
32 | extension = "psql"
33 | dump_cmd = "pg_dump"
34 | restore_cmd = "psql"
35 | single_transaction = True
36 | drop = True
37 | schemas: Optional[List[str]] = []
38 |
39 | def _create_dump(self):
40 | cmd = f"{self.dump_cmd} "
41 | cmd = cmd + create_postgres_uri(self)
42 |
43 | for table in self.exclude:
44 | cmd += f" --exclude-table-data={table}"
45 |
46 | if self.drop:
47 | cmd += " --clean"
48 |
49 | if self.schemas:
50 | # First schema is not prefixed with -n
51 | # when using join function so add it manually.
52 | cmd += " -n " + " -n ".join(self.schemas)
53 |
54 | cmd = f"{self.dump_prefix} {cmd} {self.dump_suffix}"
55 | stdout, stderr = self.run_command(cmd, env=self.dump_env)
56 | return stdout
57 |
58 | def _restore_dump(self, dump):
59 | cmd = f"{self.restore_cmd} "
60 | cmd = cmd + create_postgres_uri(self)
61 |
62 | # without this, psql terminates with an exit value of 0 regardless of errors
63 | cmd += " --set ON_ERROR_STOP=on"
64 |
65 | if self.schemas:
66 | cmd += " -n " + " -n ".join(self.schemas)
67 |
68 | if self.single_transaction:
69 | cmd += " --single-transaction"
70 |
71 | cmd += " {}".format(self.settings["NAME"])
72 | cmd = f"{self.restore_prefix} {cmd} {self.restore_suffix}"
73 | stdout, stderr = self.run_command(cmd, stdin=dump, env=self.restore_env)
74 | return stdout, stderr
75 |
76 |
77 | class PgDumpGisConnector(PgDumpConnector):
78 | """
79 | PostgreGIS connector, same than :class:`PgDumpGisConnector` but enable
80 | postgis if not made.
81 | """
82 |
83 | psql_cmd = "psql"
84 |
85 | def _enable_postgis(self):
86 | cmd = f'{self.psql_cmd} -c "CREATE EXTENSION IF NOT EXISTS postgis;"'
87 | cmd += " --username={}".format(self.settings["ADMIN_USER"])
88 | cmd += " --no-password"
89 |
90 | if self.settings.get("HOST"):
91 | cmd += " --host={}".format(self.settings["HOST"])
92 |
93 | if self.settings.get("PORT"):
94 | cmd += " --port={}".format(self.settings["PORT"])
95 |
96 | return self.run_command(cmd)
97 |
98 | def _restore_dump(self, dump):
99 | if self.settings.get("ADMIN_USER"):
100 | self._enable_postgis()
101 | return super()._restore_dump(dump)
102 |
103 |
104 | class PgDumpBinaryConnector(PgDumpConnector):
105 | """
106 | PostgreSQL connector, it uses pg_dump`` to create an SQL text file
107 | and ``pg_restore`` for restore it.
108 | """
109 |
110 | extension = "psql.bin"
111 | dump_cmd = "pg_dump"
112 | restore_cmd = "pg_restore"
113 | single_transaction = True
114 | drop = True
115 | if_exists = False
116 | pg_options = None
117 |
118 | def _create_dump(self):
119 | cmd = f"{self.dump_cmd} "
120 | cmd = cmd + create_postgres_uri(self)
121 |
122 | cmd += " --format=custom"
123 | for table in self.exclude:
124 | cmd += f" --exclude-table-data={table}"
125 |
126 | if self.schemas:
127 | cmd += " -n " + " -n ".join(self.schemas)
128 |
129 | cmd = f"{self.dump_prefix} {cmd} {self.dump_suffix}"
130 | stdout, _ = self.run_command(cmd, env=self.dump_env)
131 | return stdout
132 |
133 | def _restore_dump(self, dump: str):
134 | """
135 | Restore a PostgreSQL dump using subprocess with argument list.
136 |
137 | Assumes that restore_prefix, restore_cmd, pg_options, and restore_suffix
138 | are either None, strings (single args), or lists of strings.
139 |
140 | Builds the command as a list.
141 | """
142 |
143 | dbname = create_postgres_uri(self)
144 | cmd = []
145 |
146 | # Flatten optional values
147 | if self.restore_prefix:
148 | cmd.extend(
149 | self.restore_prefix
150 | if isinstance(self.restore_prefix, list)
151 | else [self.restore_prefix]
152 | )
153 |
154 | if self.restore_cmd:
155 | cmd.extend(
156 | self.restore_cmd
157 | if isinstance(self.restore_cmd, list)
158 | else [self.restore_cmd]
159 | )
160 |
161 | if self.pg_options:
162 | cmd.extend(
163 | self.pg_options
164 | if isinstance(self.pg_options, list)
165 | else [self.pg_options]
166 | )
167 |
168 | cmd.extend([dbname])
169 |
170 | if self.single_transaction:
171 | cmd.extend(["--single-transaction"])
172 |
173 | if self.drop:
174 | cmd.extend(["--clean"])
175 |
176 | if self.schemas:
177 | for schema in self.schemas:
178 | cmd.extend(["-n", schema])
179 |
180 | if self.if_exists:
181 | cmd.extend(["--if-exists"])
182 |
183 | if self.restore_suffix:
184 | cmd.extend(
185 | self.restore_suffix
186 | if isinstance(self.restore_suffix, list)
187 | else [self.restore_suffix]
188 | )
189 |
190 | cmd_str = " ".join(cmd)
191 | stdout, _ = self.run_command(cmd_str, stdin=dump, env=self.dump_env)
192 |
193 | return stdout
194 |
--------------------------------------------------------------------------------
/dbbackup/db/sqlite.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from io import BytesIO
3 | from shutil import copyfileobj
4 | from tempfile import SpooledTemporaryFile
5 |
6 | from django.db import IntegrityError, OperationalError
7 |
8 | from .base import BaseDBConnector
9 |
10 | DUMP_TABLES = """
11 | SELECT "name", "type", "sql"
12 | FROM "sqlite_master"
13 | WHERE "sql" NOT NULL AND "type" == 'table'
14 | ORDER BY "name"
15 | """
16 | DUMP_ETC = """
17 | SELECT "name", "type", "sql"
18 | FROM "sqlite_master"
19 | WHERE "sql" NOT NULL AND "type" IN ('index', 'trigger', 'view')
20 | """
21 |
22 |
23 | class SqliteConnector(BaseDBConnector):
24 | """
25 | Create a dump at SQL layer like could make ``.dumps`` in sqlite3.
26 | Restore by evaluate the created SQL.
27 | """
28 |
29 | def _write_dump(self, fileobj):
30 | cursor = self.connection.cursor()
31 | cursor.execute(DUMP_TABLES)
32 | for table_name, _, sql in cursor.fetchall():
33 | if table_name.startswith("sqlite_") or table_name in self.exclude:
34 | continue
35 | if sql.startswith("CREATE TABLE"):
36 | sql = sql.replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS")
37 | # Make SQL commands in 1 line
38 | sql = sql.replace("\n ", "")
39 | sql = sql.replace("\n)", ")")
40 | fileobj.write(f"{sql};\n".encode())
41 |
42 | table_name_ident = table_name.replace('"', '""')
43 | res = cursor.execute(f'PRAGMA table_info("{table_name_ident}")')
44 | column_names = [str(table_info[1]) for table_info in res.fetchall()]
45 | q = """SELECT 'INSERT INTO "{0}" VALUES({1})' FROM "{0}";\n""".format(
46 | table_name_ident,
47 | ",".join(
48 | """'||quote("{}")||'""".format(col.replace('"', '""'))
49 | for col in column_names
50 | ),
51 | )
52 | query_res = cursor.execute(q)
53 | for row in query_res:
54 | fileobj.write(f"{row[0]};\n".encode())
55 | schema_res = cursor.execute(DUMP_ETC)
56 | for name, _, sql in schema_res.fetchall():
57 | if sql.startswith("CREATE INDEX"):
58 | sql = sql.replace("CREATE INDEX", "CREATE INDEX IF NOT EXISTS")
59 | fileobj.write(f"{sql};\n".encode())
60 | cursor.close()
61 |
62 | def create_dump(self):
63 | if not self.connection.is_usable():
64 | self.connection.connect()
65 | dump_file = SpooledTemporaryFile(max_size=10 * 1024 * 1024)
66 | self._write_dump(dump_file)
67 | dump_file.seek(0)
68 | return dump_file
69 |
70 | def restore_dump(self, dump):
71 | if not self.connection.is_usable():
72 | self.connection.connect()
73 | cursor = self.connection.cursor()
74 | sql_command = b""
75 | sql_is_complete = True
76 | for line in dump.readlines():
77 | sql_command = sql_command + line
78 | line_str = line.decode("UTF-8")
79 | if line_str.startswith("INSERT") and not line_str.endswith(");\n"):
80 | sql_is_complete = False
81 | continue
82 | if not sql_is_complete and line_str.endswith(");\n"):
83 | sql_is_complete = True
84 |
85 | if sql_is_complete:
86 | try:
87 | cursor.execute(sql_command.decode("UTF-8"))
88 | except (OperationalError, IntegrityError) as err:
89 | warnings.warn(f"Error in db restore: {err}")
90 | sql_command = b""
91 |
92 |
93 | class SqliteCPConnector(BaseDBConnector):
94 | """
95 | Create a dump by copy the binary data file.
96 | Restore by simply copy to the good location.
97 | """
98 |
99 | def create_dump(self):
100 | path = self.connection.settings_dict["NAME"]
101 | dump = BytesIO()
102 | with open(path, "rb") as db_file:
103 | copyfileobj(db_file, dump)
104 | dump.seek(0)
105 | return dump
106 |
107 | def restore_dump(self, dump):
108 | path = self.connection.settings_dict["NAME"]
109 | with open(path, "wb") as db_file:
110 | copyfileobj(dump, db_file)
111 |
--------------------------------------------------------------------------------
/dbbackup/log.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import django
4 | from django.utils.log import AdminEmailHandler
5 |
6 |
7 | class DbbackupAdminEmailHandler(AdminEmailHandler):
8 | def emit(self, record):
9 | # Monkey patch for old Django versions without send_mail method
10 | if django.VERSION < (1, 8):
11 | from . import utils
12 |
13 | django.core.mail.mail_admins = utils.mail_admins
14 | super().emit(record)
15 |
16 | def send_mail(self, subject, message, *args, **kwargs):
17 | from . import utils
18 |
19 | utils.mail_admins(
20 | subject, message, *args, connection=self.connection(), **kwargs
21 | )
22 |
23 |
24 | class MailEnabledFilter(logging.Filter):
25 | def filter(self, record):
26 | from .settings import SEND_EMAIL
27 |
28 | return SEND_EMAIL
29 |
30 |
31 | def load():
32 | mail_admins_handler = DbbackupAdminEmailHandler(include_html=True)
33 | mail_admins_handler.setLevel(logging.ERROR)
34 | mail_admins_handler.addFilter(MailEnabledFilter())
35 |
36 | logger = logging.getLogger("dbbackup")
37 | logger.setLevel(logging.INFO)
38 | logger.handlers = [mail_admins_handler]
39 |
--------------------------------------------------------------------------------
/dbbackup/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/management/__init__.py
--------------------------------------------------------------------------------
/dbbackup/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/management/commands/__init__.py
--------------------------------------------------------------------------------
/dbbackup/management/commands/_base.py:
--------------------------------------------------------------------------------
1 | """
2 | Abstract Command.
3 | """
4 |
5 | import logging
6 | import sys
7 | from optparse import make_option as optparse_make_option
8 | from shutil import copyfileobj
9 |
10 | import django
11 | from django.core.management.base import BaseCommand, CommandError
12 |
13 | from ...storage import StorageError
14 |
15 | USELESS_ARGS = ("callback", "callback_args", "callback_kwargs", "metavar")
16 | TYPES = {
17 | "string": str,
18 | "int": int,
19 | "long": int,
20 | "float": float,
21 | "complex": complex,
22 | "choice": list,
23 | }
24 | LOGGING_VERBOSITY = {
25 | 0: logging.WARN,
26 | 1: logging.INFO,
27 | 2: logging.DEBUG,
28 | 3: logging.DEBUG,
29 | }
30 |
31 |
32 | def make_option(*args, **kwargs):
33 | return args, kwargs
34 |
35 |
36 | class BaseDbBackupCommand(BaseCommand):
37 | """
38 | Base command class used for create all dbbackup command.
39 | """
40 |
41 | base_option_list = (
42 | make_option(
43 | "--noinput",
44 | action="store_false",
45 | dest="interactive",
46 | default=True,
47 | help="Tells Django to NOT prompt the user for input of any kind.",
48 | ),
49 | make_option(
50 | "-q",
51 | "--quiet",
52 | action="store_true",
53 | default=False,
54 | help="Tells Django to NOT output other text than errors.",
55 | ),
56 | )
57 | option_list = ()
58 |
59 | verbosity = 1
60 | quiet = False
61 | logger = logging.getLogger("dbbackup.command")
62 |
63 | def __init__(self, *args, **kwargs):
64 | self.option_list = self.base_option_list + self.option_list
65 | if django.VERSION < (1, 10):
66 | options = tuple(
67 | optparse_make_option(*_args, **_kwargs)
68 | for _args, _kwargs in self.option_list
69 | )
70 |
71 | self.option_list = options + BaseCommand.option_list
72 | super().__init__(*args, **kwargs)
73 |
74 | def add_arguments(self, parser):
75 | for args, kwargs in self.option_list:
76 | kwargs = {
77 | k: v
78 | for k, v in kwargs.items()
79 | if not k.startswith("_") and k not in USELESS_ARGS
80 | }
81 | parser.add_argument(*args, **kwargs)
82 |
83 | def _set_logger_level(self):
84 | level = 60 if self.quiet else LOGGING_VERBOSITY[int(self.verbosity)]
85 | self.logger.setLevel(level)
86 |
87 | def _ask_confirmation(self):
88 | answer = input("Are you sure you want to continue? [Y/n] ")
89 | if answer.lower().startswith("n"):
90 | self.logger.info("Quitting")
91 | sys.exit(0)
92 |
93 | def read_from_storage(self, path):
94 | return self.storage.read_file(path)
95 |
96 | def write_to_storage(self, file, path):
97 | self.logger.info("Writing file to %s", path)
98 | self.storage.write_file(file, path)
99 |
100 | def read_local_file(self, path):
101 | """Open file in read mode on local filesystem."""
102 | return open(path, "rb")
103 |
104 | def write_local_file(self, outputfile, path):
105 | """Write file to the desired path."""
106 | self.logger.info("Writing file to %s", path)
107 | outputfile.seek(0)
108 | with open(path, "wb") as fd:
109 | copyfileobj(outputfile, fd)
110 |
111 | def _get_backup_file(self, database=None, servername=None):
112 | if self.path:
113 | input_filename = self.path
114 | input_file = self.read_local_file(self.path)
115 | else:
116 | if self.filename:
117 | input_filename = self.filename
118 | # Fetch the latest backup if filepath not specified
119 | else:
120 | self.logger.info("Finding latest backup")
121 | try:
122 | input_filename = self.storage.get_latest_backup(
123 | encrypted=self.decrypt,
124 | compressed=self.uncompress,
125 | content_type=self.content_type,
126 | database=database,
127 | servername=servername,
128 | )
129 | except StorageError as err:
130 | raise CommandError(err.args[0]) from err
131 | input_file = self.read_from_storage(input_filename)
132 | return input_filename, input_file
133 |
134 | def _cleanup_old_backups(self, database=None, servername=None):
135 | """
136 | Cleanup old backups, keeping the number of backups specified by
137 | DBBACKUP_CLEANUP_KEEP.
138 | """
139 | self.storage.clean_old_backups(
140 | encrypted=self.encrypt,
141 | compressed=self.compress,
142 | content_type=self.content_type,
143 | database=database,
144 | servername=servername,
145 | )
146 |
--------------------------------------------------------------------------------
/dbbackup/management/commands/dbbackup.py:
--------------------------------------------------------------------------------
1 | """
2 | Command for backup database.
3 | """
4 |
5 | from django.core.management.base import CommandError
6 |
7 | from ... import settings, utils
8 | from ...db.base import get_connector
9 | from ...storage import StorageError, get_storage
10 | from ._base import BaseDbBackupCommand, make_option
11 |
12 |
13 | class Command(BaseDbBackupCommand):
14 | help = "Backup a database, encrypt and/or compress."
15 | content_type = "db"
16 |
17 | option_list = BaseDbBackupCommand.option_list + (
18 | make_option(
19 | "-c",
20 | "--clean",
21 | dest="clean",
22 | action="store_true",
23 | default=False,
24 | help="Clean up old backup files",
25 | ),
26 | make_option(
27 | "-d",
28 | "--database",
29 | help="Database(s) to backup specified by key separated by"
30 | " commas(default: all)",
31 | ),
32 | make_option(
33 | "-s",
34 | "--servername",
35 | help="Specify server name to include in backup filename",
36 | ),
37 | make_option(
38 | "-z",
39 | "--compress",
40 | action="store_true",
41 | default=False,
42 | help="Compress the backup files",
43 | ),
44 | make_option(
45 | "-e",
46 | "--encrypt",
47 | action="store_true",
48 | default=False,
49 | help="Encrypt the backup files",
50 | ),
51 | make_option(
52 | "-o", "--output-filename", default=None, help="Specify filename on storage"
53 | ),
54 | make_option(
55 | "-O",
56 | "--output-path",
57 | default=None,
58 | help="Specify where to store on local filesystem",
59 | ),
60 | make_option(
61 | "-x", "--exclude-tables", default=None, help="Exclude tables from backup"
62 | ),
63 | make_option(
64 | "-n",
65 | "--schema",
66 | action="append",
67 | default=[],
68 | help="Specify schema(s) to backup. Can be used multiple times.",
69 | ),
70 | )
71 |
72 | @utils.email_uncaught_exception
73 | def handle(self, **options):
74 | self.verbosity = options.get("verbosity")
75 | self.quiet = options.get("quiet")
76 | self._set_logger_level()
77 |
78 | self.clean = options.get("clean")
79 |
80 | self.servername = options.get("servername")
81 | self.compress = options.get("compress")
82 | self.encrypt = options.get("encrypt")
83 |
84 | self.filename = options.get("output_filename")
85 | self.path = options.get("output_path")
86 | self.exclude_tables = options.get("exclude_tables")
87 | self.storage = get_storage()
88 | self.schemas = options.get("schema")
89 |
90 | self.database = options.get("database") or ""
91 |
92 | for database_key in self._get_database_keys():
93 | self.connector = get_connector(database_key)
94 | if self.connector and self.exclude_tables:
95 | self.connector.exclude.extend(
96 | list(self.exclude_tables.replace(" ", "").split(","))
97 | )
98 | database = self.connector.settings
99 | try:
100 | self._save_new_backup(database)
101 | if self.clean:
102 | self._cleanup_old_backups(database=database_key)
103 | except StorageError as err:
104 | raise CommandError(err) from err
105 |
106 | def _get_database_keys(self):
107 | return self.database.split(",") if self.database else settings.DATABASES
108 |
109 | def _save_new_backup(self, database):
110 | """
111 | Save a new backup file.
112 | """
113 | self.logger.info("Backing Up Database: %s", database["NAME"])
114 | # Get backup, schema and name
115 | filename = self.connector.generate_filename(self.servername)
116 |
117 | if self.schemas:
118 | self.connector.schemas = self.schemas
119 |
120 | outputfile = self.connector.create_dump()
121 |
122 | # Apply trans
123 | if self.compress:
124 | compressed_file, filename = utils.compress_file(outputfile, filename)
125 | outputfile = compressed_file
126 |
127 | if self.encrypt:
128 | encrypted_file, filename = utils.encrypt_file(outputfile, filename)
129 | outputfile = encrypted_file
130 |
131 | # Set file name
132 | filename = self.filename or filename
133 | self.logger.debug("Backup size: %s", utils.handle_size(outputfile))
134 |
135 | # Store backup
136 | outputfile.seek(0)
137 |
138 | if self.path is None:
139 | self.write_to_storage(outputfile, filename)
140 |
141 | else:
142 | self.write_local_file(outputfile, self.path)
143 |
--------------------------------------------------------------------------------
/dbbackup/management/commands/dbrestore.py:
--------------------------------------------------------------------------------
1 | """
2 | Restore database.
3 | """
4 |
5 | from django.conf import settings
6 | from django.core.management.base import CommandError
7 | from django.db import connection
8 |
9 | from ... import utils
10 | from ...db.base import get_connector
11 | from ...storage import StorageError, get_storage
12 | from ._base import BaseDbBackupCommand, make_option
13 |
14 |
15 | class Command(BaseDbBackupCommand):
16 | help = "Restore a database backup from storage, encrypted and/or compressed."
17 | content_type = "db"
18 | no_drop = False
19 | pg_options = ""
20 |
21 | option_list = BaseDbBackupCommand.option_list + (
22 | make_option("-d", "--database", help="Database to restore"),
23 | make_option("-i", "--input-filename", help="Specify filename to backup from"),
24 | make_option(
25 | "-I", "--input-path", help="Specify path on local filesystem to backup from"
26 | ),
27 | make_option(
28 | "-s",
29 | "--servername",
30 | help="If backup file is not specified, filter the "
31 | "existing ones with the given servername",
32 | ),
33 | make_option(
34 | "-c",
35 | "--decrypt",
36 | default=False,
37 | action="store_true",
38 | help="Decrypt data before restoring",
39 | ),
40 | make_option(
41 | "-p", "--passphrase", help="Passphrase for decrypt file", default=None
42 | ),
43 | make_option(
44 | "-z",
45 | "--uncompress",
46 | action="store_true",
47 | default=False,
48 | help="Uncompress gzip data before restoring",
49 | ),
50 | make_option(
51 | "-n",
52 | "--schema",
53 | action="append",
54 | default=[],
55 | help="Specify schema(s) to restore. Can be used multiple times.",
56 | ),
57 | make_option(
58 | "-r",
59 | "--no-drop",
60 | action="store_true",
61 | default=False,
62 | help="Don't clean (drop) the database. This only works with mongodb and postgresql.",
63 | ),
64 | make_option(
65 | "--pg-options",
66 | dest="pg_options",
67 | default="",
68 | help="Additional pg_restore options, e.g. '--if-exists --no-owner'. Use quotes.",
69 | ),
70 | )
71 |
72 | def handle(self, *args, **options):
73 | """Django command handler."""
74 | self.verbosity = int(options.get("verbosity"))
75 | self.quiet = options.get("quiet")
76 | self._set_logger_level()
77 |
78 | try:
79 | connection.close()
80 | self.filename = options.get("input_filename")
81 | self.path = options.get("input_path")
82 | self.servername = options.get("servername")
83 | self.decrypt = options.get("decrypt")
84 | self.uncompress = options.get("uncompress")
85 | self.passphrase = options.get("passphrase")
86 | self.interactive = options.get("interactive")
87 | self.input_database_name = options.get("database")
88 | self.database_name, self.database = self._get_database(
89 | self.input_database_name
90 | )
91 | self.storage = get_storage()
92 | self.no_drop = options.get("no_drop")
93 | self.pg_options = options.get("pg_options", "")
94 | self.schemas = options.get("schema")
95 | self._restore_backup()
96 | except StorageError as err:
97 | raise CommandError(err) from err
98 |
99 | def _get_database(self, database_name: str):
100 | """Get the database to restore."""
101 | if not database_name:
102 | if len(settings.DATABASES) > 1:
103 | errmsg = (
104 | "Because this project contains more than one database, you"
105 | " must specify the --database option."
106 | )
107 | raise CommandError(errmsg)
108 | database_name = list(settings.DATABASES.keys())[0]
109 | if database_name not in settings.DATABASES:
110 | raise CommandError(f"Database {database_name} does not exist.")
111 | return database_name, settings.DATABASES[database_name]
112 |
113 | def _restore_backup(self):
114 | """Restore the specified database."""
115 | input_filename, input_file = self._get_backup_file(
116 | database=self.input_database_name, servername=self.servername
117 | )
118 |
119 | self.logger.info(
120 | "Restoring backup for database '%s' and server '%s'",
121 | self.database_name,
122 | self.servername,
123 | )
124 |
125 | if self.schemas:
126 | self.logger.info(f"Restoring schemas: {self.schemas}")
127 |
128 | self.logger.info(f"Restoring: {input_filename}")
129 |
130 | if self.decrypt:
131 | unencrypted_file, input_filename = utils.unencrypt_file(
132 | input_file, input_filename, self.passphrase
133 | )
134 | input_file.close()
135 | input_file = unencrypted_file
136 | if self.uncompress:
137 | uncompressed_file, input_filename = utils.uncompress_file(
138 | input_file, input_filename
139 | )
140 | input_file.close()
141 | input_file = uncompressed_file
142 |
143 | self.logger.info("Restore tempfile created: %s", utils.handle_size(input_file))
144 | if self.interactive:
145 | self._ask_confirmation()
146 |
147 | input_file.seek(0)
148 | self.connector = get_connector(self.database_name)
149 | if self.schemas:
150 | self.connector.schemas = self.schemas
151 | self.connector.drop = not self.no_drop
152 | self.connector.pg_options = self.pg_options
153 | self.connector.restore_dump(input_file)
154 |
--------------------------------------------------------------------------------
/dbbackup/management/commands/listbackups.py:
--------------------------------------------------------------------------------
1 | """
2 | List backups.
3 | """
4 |
5 | from ... import utils
6 | from ...storage import get_storage
7 | from ._base import BaseDbBackupCommand, make_option
8 |
9 | ROW_TEMPLATE = "{name:40} {datetime:20}"
10 | FILTER_KEYS = ("encrypted", "compressed", "content_type", "database")
11 |
12 |
13 | class Command(BaseDbBackupCommand):
14 | option_list = (
15 | make_option("-d", "--database", help="Filter by database name"),
16 | make_option(
17 | "-z",
18 | "--compressed",
19 | help="Exclude non-compressed",
20 | action="store_true",
21 | default=None,
22 | dest="compressed",
23 | ),
24 | make_option(
25 | "-Z",
26 | "--not-compressed",
27 | help="Exclude compressed",
28 | action="store_false",
29 | default=None,
30 | dest="compressed",
31 | ),
32 | make_option(
33 | "-e",
34 | "--encrypted",
35 | help="Exclude non-encrypted",
36 | action="store_true",
37 | default=None,
38 | dest="encrypted",
39 | ),
40 | make_option(
41 | "-E",
42 | "--not-encrypted",
43 | help="Exclude encrypted",
44 | action="store_false",
45 | default=None,
46 | dest="encrypted",
47 | ),
48 | make_option(
49 | "-c", "--content-type", help="Filter by content type 'db' or 'media'"
50 | ),
51 | )
52 |
53 | def handle(self, **options):
54 | self.quiet = options.get("quiet")
55 | self.storage = get_storage()
56 | files_attr = self.get_backup_attrs(options)
57 | if not self.quiet:
58 | title = ROW_TEMPLATE.format(name="Name", datetime="Datetime")
59 | self.stdout.write(title)
60 | for file_attr in files_attr:
61 | row = ROW_TEMPLATE.format(**file_attr)
62 | self.stdout.write(row)
63 |
64 | def get_backup_attrs(self, options):
65 | filters = {k: v for k, v in options.items() if k in FILTER_KEYS}
66 | filenames = self.storage.list_backups(**filters)
67 | return [
68 | {
69 | "datetime": utils.filename_to_date(filename).strftime("%x %X"),
70 | "name": filename,
71 | }
72 | for filename in filenames
73 | ]
74 |
--------------------------------------------------------------------------------
/dbbackup/management/commands/mediabackup.py:
--------------------------------------------------------------------------------
1 | """
2 | Save media files.
3 | """
4 |
5 | import os
6 | import tarfile
7 |
8 | from django.core.management.base import CommandError
9 |
10 | from ... import utils
11 | from ...storage import StorageError, get_storage, get_storage_class
12 | from ._base import BaseDbBackupCommand, make_option
13 |
14 |
15 | class Command(BaseDbBackupCommand):
16 | help = """Backup media files, gather all in a tarball and encrypt or
17 | compress."""
18 | content_type = "media"
19 |
20 | option_list = BaseDbBackupCommand.option_list + (
21 | make_option(
22 | "-c",
23 | "--clean",
24 | help="Clean up old backup files",
25 | action="store_true",
26 | default=False,
27 | ),
28 | make_option(
29 | "-s",
30 | "--servername",
31 | help="Specify server name to include in backup filename",
32 | ),
33 | make_option(
34 | "-z",
35 | "--compress",
36 | help="Compress the archive",
37 | action="store_true",
38 | default=False,
39 | ),
40 | make_option(
41 | "-e",
42 | "--encrypt",
43 | help="Encrypt the backup files",
44 | action="store_true",
45 | default=False,
46 | ),
47 | make_option(
48 | "-o", "--output-filename", default=None, help="Specify filename on storage"
49 | ),
50 | make_option(
51 | "-O",
52 | "--output-path",
53 | default=None,
54 | help="Specify where to store on local filesystem",
55 | ),
56 | )
57 |
58 | @utils.email_uncaught_exception
59 | def handle(self, **options):
60 | self.verbosity = options.get("verbosity")
61 | self.quiet = options.get("quiet")
62 | self._set_logger_level()
63 |
64 | self.encrypt = options.get("encrypt", False)
65 | self.compress = options.get("compress", False)
66 | self.servername = options.get("servername")
67 |
68 | self.filename = options.get("output_filename")
69 | self.path = options.get("output_path")
70 | try:
71 | self.media_storage = get_storage_class()()
72 | self.storage = get_storage()
73 | self.backup_mediafiles()
74 | if options.get("clean"):
75 | self._cleanup_old_backups(servername=self.servername)
76 |
77 | except StorageError as err:
78 | raise CommandError(err) from err
79 |
80 | def _explore_storage(self):
81 | """Generator of all files contained in media storage."""
82 | path = ""
83 | dirs = [path]
84 | while dirs:
85 | path = dirs.pop()
86 | subdirs, files = self.media_storage.listdir(path)
87 | for media_filename in files:
88 | yield os.path.join(path, media_filename)
89 | dirs.extend([os.path.join(path, subdir) for subdir in subdirs])
90 |
91 | def _create_tar(self, name):
92 | """Create TAR file."""
93 | fileobj = utils.create_spooled_temporary_file()
94 | mode = "w:gz" if self.compress else "w"
95 | tar_file = tarfile.open(name=name, fileobj=fileobj, mode=mode)
96 | for media_filename in self._explore_storage():
97 | tarinfo = tarfile.TarInfo(media_filename)
98 | media_file = self.media_storage.open(media_filename)
99 | tarinfo.size = len(media_file)
100 | tar_file.addfile(tarinfo, media_file)
101 | # Close the TAR for writing
102 | tar_file.close()
103 | return fileobj
104 |
105 | def backup_mediafiles(self):
106 | """
107 | Create backup file and write it to storage.
108 | """
109 | # Check for filename option
110 | if self.filename:
111 | filename = self.filename
112 | else:
113 | extension = f"tar{'.gz' if self.compress else ''}"
114 | filename = utils.filename_generate(
115 | extension, servername=self.servername, content_type=self.content_type
116 | )
117 |
118 | tarball = self._create_tar(filename)
119 | # Apply trans
120 | if self.encrypt:
121 | encrypted_file = utils.encrypt_file(tarball, filename)
122 | tarball, filename = encrypted_file
123 |
124 | self.logger.debug("Backup size: %s", utils.handle_size(tarball))
125 | # Store backup
126 | tarball.seek(0)
127 | if self.path is None:
128 | self.write_to_storage(tarball, filename)
129 | else:
130 | self.write_local_file(tarball, self.path)
131 |
--------------------------------------------------------------------------------
/dbbackup/management/commands/mediarestore.py:
--------------------------------------------------------------------------------
1 | """
2 | Restore media files.
3 | """
4 |
5 | import tarfile
6 |
7 | from ... import utils
8 | from ...storage import get_storage, get_storage_class
9 | from ._base import BaseDbBackupCommand, make_option
10 |
11 |
12 | class Command(BaseDbBackupCommand):
13 | help = """Restore a media backup from storage, encrypted and/or
14 | compressed."""
15 | content_type = "media"
16 |
17 | option_list = (
18 | make_option(
19 | "-i",
20 | "--input-filename",
21 | action="store",
22 | help="Specify filename to backup from",
23 | ),
24 | make_option(
25 | "-I", "--input-path", help="Specify path on local filesystem to backup from"
26 | ),
27 | make_option(
28 | "-s",
29 | "--servername",
30 | help="If backup file is not specified, filter the existing ones with the "
31 | "given servername",
32 | ),
33 | make_option(
34 | "-e",
35 | "--decrypt",
36 | default=False,
37 | action="store_true",
38 | help="Decrypt data before restoring",
39 | ),
40 | make_option(
41 | "-p", "--passphrase", default=None, help="Passphrase for decrypt file"
42 | ),
43 | make_option(
44 | "-z",
45 | "--uncompress",
46 | action="store_true",
47 | help="Uncompress gzip data before restoring",
48 | ),
49 | make_option(
50 | "-r", "--replace", help="Replace existing files", action="store_true"
51 | ),
52 | )
53 |
54 | def handle(self, *args, **options):
55 | """Django command handler."""
56 | self.verbosity = int(options.get("verbosity"))
57 | self.quiet = options.get("quiet")
58 | self._set_logger_level()
59 |
60 | self.servername = options.get("servername")
61 | self.decrypt = options.get("decrypt")
62 | self.uncompress = options.get("uncompress")
63 |
64 | self.filename = options.get("input_filename")
65 | self.path = options.get("input_path")
66 |
67 | self.replace = options.get("replace")
68 | self.passphrase = options.get("passphrase")
69 | self.interactive = options.get("interactive")
70 |
71 | self.storage = get_storage()
72 | self.media_storage = get_storage_class()()
73 | self._restore_backup()
74 |
75 | def _upload_file(self, name, media_file):
76 | if self.media_storage.exists(name):
77 | if not self.replace:
78 | return
79 | self.media_storage.delete(name)
80 | self.logger.info("%s deleted", name)
81 | self.media_storage.save(name, media_file)
82 | self.logger.info("%s uploaded", name)
83 |
84 | def _restore_backup(self):
85 | self.logger.info("Restoring backup for media files")
86 | input_filename, input_file = self._get_backup_file(servername=self.servername)
87 | self.logger.info("Restoring: %s", input_filename)
88 |
89 | if self.decrypt:
90 | unencrypted_file, input_filename = utils.unencrypt_file(
91 | input_file, input_filename, self.passphrase
92 | )
93 | input_file.close()
94 | input_file = unencrypted_file
95 |
96 | self.logger.debug("Backup size: %s", utils.handle_size(input_file))
97 | if self.interactive:
98 | self._ask_confirmation()
99 |
100 | input_file.seek(0)
101 | tar_file = (
102 | tarfile.open(fileobj=input_file, mode="r:gz")
103 | if self.uncompress
104 | else tarfile.open(fileobj=input_file, mode="r:")
105 | )
106 | # Restore file 1 by 1
107 | for media_file_info in tar_file:
108 | if media_file_info.path == "media":
109 | continue # Don't copy root directory
110 | media_file = tar_file.extractfile(media_file_info)
111 | if media_file is None:
112 | continue # Skip directories
113 | name = media_file_info.path.replace("media/", "")
114 | self._upload_file(name, media_file)
115 |
--------------------------------------------------------------------------------
/dbbackup/settings.py:
--------------------------------------------------------------------------------
1 | # DO NOT IMPORT THIS BEFORE django.configure() has been run!
2 |
3 | import socket
4 | import tempfile
5 |
6 | from django.conf import settings
7 |
8 | DATABASES = getattr(settings, "DBBACKUP_DATABASES", list(settings.DATABASES.keys()))
9 |
10 | # Fake host
11 | HOSTNAME = getattr(settings, "DBBACKUP_HOSTNAME", socket.gethostname())
12 |
13 | # Directory to use for temporary files
14 | TMP_DIR = getattr(settings, "DBBACKUP_TMP_DIR", tempfile.gettempdir())
15 | TMP_FILE_MAX_SIZE = getattr(settings, "DBBACKUP_TMP_FILE_MAX_SIZE", 10 * 1024 * 1024)
16 | TMP_FILE_READ_SIZE = getattr(settings, "DBBACKUP_TMP_FILE_READ_SIZE", 1024 * 1000)
17 |
18 | # Number of old backup files to keep
19 | CLEANUP_KEEP = getattr(settings, "DBBACKUP_CLEANUP_KEEP", 10)
20 | CLEANUP_KEEP_MEDIA = getattr(settings, "DBBACKUP_CLEANUP_KEEP_MEDIA", CLEANUP_KEEP)
21 | CLEANUP_KEEP_FILTER = getattr(settings, "DBBACKUP_CLEANUP_KEEP_FILTER", lambda x: False)
22 |
23 | MEDIA_PATH = getattr(settings, "DBBACKUP_MEDIA_PATH", settings.MEDIA_ROOT)
24 |
25 | DATE_FORMAT = getattr(settings, "DBBACKUP_DATE_FORMAT", "%Y-%m-%d-%H%M%S")
26 | FILENAME_TEMPLATE = getattr(
27 | settings,
28 | "DBBACKUP_FILENAME_TEMPLATE",
29 | "{databasename}-{servername}-{datetime}.{extension}",
30 | )
31 | MEDIA_FILENAME_TEMPLATE = getattr(
32 | settings, "DBBACKUP_MEDIA_FILENAME_TEMPLATE", "{servername}-{datetime}.{extension}"
33 | )
34 |
35 | GPG_ALWAYS_TRUST = getattr(settings, "DBBACKUP_GPG_ALWAYS_TRUST", False)
36 | GPG_RECIPIENT = GPG_ALWAYS_TRUST = getattr(settings, "DBBACKUP_GPG_RECIPIENT", None)
37 |
38 | STORAGE = getattr(settings, "DBBACKUP_STORAGE", None)
39 | STORAGE_OPTIONS = getattr(settings, "DBBACKUP_STORAGE_OPTIONS", {})
40 | # https://docs.djangoproject.com/en/5.1/ref/settings/#std-setting-STORAGES
41 | STORAGES_DBBACKUP_ALIAS = "dbbackup"
42 | DJANGO_STORAGES = getattr(settings, "STORAGES", {})
43 | django_dbbackup_storage = DJANGO_STORAGES.get(STORAGES_DBBACKUP_ALIAS, {})
44 |
45 | if not STORAGE:
46 | STORAGE = (
47 | django_dbbackup_storage.get("BACKEND")
48 | or "django.core.files.storage.FileSystemStorage"
49 | )
50 | if not STORAGE_OPTIONS:
51 | STORAGE_OPTIONS = django_dbbackup_storage.get("OPTIONS") or STORAGE_OPTIONS
52 |
53 | CONNECTORS = getattr(settings, "DBBACKUP_CONNECTORS", {})
54 |
55 | CUSTOM_CONNECTOR_MAPPING = getattr(settings, "DBBACKUP_CONNECTOR_MAPPING", {})
56 |
57 | DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
58 |
59 | # Mail
60 | SEND_EMAIL = getattr(settings, "DBBACKUP_SEND_EMAIL", True)
61 | SERVER_EMAIL = getattr(settings, "DBBACKUP_SERVER_EMAIL", settings.SERVER_EMAIL)
62 | FAILURE_RECIPIENTS = getattr(settings, "DBBACKUP_FAILURE_RECIPIENTS", None)
63 | if FAILURE_RECIPIENTS is None:
64 | ADMINS = getattr(settings, "DBBACKUP_ADMIN", settings.ADMINS)
65 | else:
66 | ADMINS = FAILURE_RECIPIENTS
67 | EMAIL_SUBJECT_PREFIX = getattr(settings, "DBBACKUP_EMAIL_SUBJECT_PREFIX", "[dbbackup] ")
68 |
--------------------------------------------------------------------------------
/dbbackup/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/commands/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/commands/test_base.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for base command class.
3 | """
4 |
5 | import logging
6 | import os
7 | from io import BytesIO
8 | from unittest.mock import patch
9 |
10 | from django.core.files import File
11 | from django.test import TestCase
12 |
13 | from dbbackup.management.commands._base import BaseDbBackupCommand
14 | from dbbackup.storage import get_storage
15 | from dbbackup.tests.utils import DEV_NULL, HANDLED_FILES
16 |
17 |
18 | class BaseDbBackupCommandSetLoggerLevelTest(TestCase):
19 | def setUp(self):
20 | self.command = BaseDbBackupCommand()
21 |
22 | def test_0_level(self):
23 | self.command.verbosity = 0
24 | self.command._set_logger_level()
25 | self.assertEqual(self.command.logger.level, logging.WARNING)
26 |
27 | def test_1_level(self):
28 | self.command.verbosity = 1
29 | self.command._set_logger_level()
30 | self.assertEqual(self.command.logger.level, logging.INFO)
31 |
32 | def test_2_level(self):
33 | self.command.verbosity = 2
34 | self.command._set_logger_level()
35 | self.assertEqual(self.command.logger.level, logging.DEBUG)
36 |
37 | def test_3_level(self):
38 | self.command.verbosity = 3
39 | self.command._set_logger_level()
40 | self.assertEqual(self.command.logger.level, logging.DEBUG)
41 |
42 | def test_quiet(self):
43 | self.command.quiet = True
44 | self.command._set_logger_level()
45 | self.assertGreater(self.command.logger.level, logging.ERROR)
46 |
47 |
48 | class BaseDbBackupCommandMethodsTest(TestCase):
49 | def setUp(self):
50 | HANDLED_FILES.clean()
51 | self.command = BaseDbBackupCommand()
52 | self.command.storage = get_storage()
53 |
54 | def test_read_from_storage(self):
55 | HANDLED_FILES["written_files"].append(["foo", File(BytesIO(b"bar"))])
56 | file_ = self.command.read_from_storage("foo")
57 | self.assertEqual(file_.read(), b"bar")
58 |
59 | def test_write_to_storage(self):
60 | self.command.write_to_storage(BytesIO(b"foo"), "bar")
61 | self.assertEqual(HANDLED_FILES["written_files"][0][0], "bar")
62 |
63 | def test_read_local_file(self):
64 | # setUp
65 | self.command.path = "/tmp/foo.bak"
66 | open(self.command.path, "w").close()
67 | # Test
68 | self.command.read_local_file(self.command.path)
69 | # tearDown
70 | os.remove(self.command.path)
71 |
72 | def test_write_local_file(self):
73 | fd, path = File(BytesIO(b"foo")), "/tmp/foo.bak"
74 | self.command.write_local_file(fd, path)
75 | self.assertTrue(os.path.exists(path))
76 | # tearDown
77 | os.remove(path)
78 |
79 | def test_ask_confirmation(self):
80 | # Yes
81 | with patch("dbbackup.management.commands._base.input", return_value="y"):
82 | self.command._ask_confirmation()
83 | with patch("dbbackup.management.commands._base.input", return_value="Y"):
84 | self.command._ask_confirmation()
85 | with patch("dbbackup.management.commands._base.input", return_value=""):
86 | self.command._ask_confirmation()
87 | with patch("dbbackup.management.commands._base.input", return_value="foo"):
88 | self.command._ask_confirmation()
89 | # No
90 | with patch("dbbackup.management.commands._base.input", return_value="n"):
91 | with self.assertRaises(SystemExit):
92 | self.command._ask_confirmation()
93 | with patch("dbbackup.management.commands._base.input", return_value="N"):
94 | with self.assertRaises(SystemExit):
95 | self.command._ask_confirmation()
96 | with patch("dbbackup.management.commands._base.input", return_value="No"):
97 | with self.assertRaises(SystemExit):
98 | self.command._ask_confirmation()
99 |
100 |
101 | class BaseDbBackupCommandCleanupOldBackupsTest(TestCase):
102 | def setUp(self):
103 | HANDLED_FILES.clean()
104 | self.command = BaseDbBackupCommand()
105 | self.command.stdout = DEV_NULL
106 | self.command.encrypt = False
107 | self.command.compress = False
108 | self.command.servername = "foo-server"
109 | self.command.storage = get_storage()
110 | HANDLED_FILES["written_files"] = [
111 | (f, None)
112 | for f in [
113 | "fooserver-2015-02-06-042810.tar",
114 | "fooserver-2015-02-07-042810.tar",
115 | "fooserver-2015-02-08-042810.tar",
116 | "foodb-fooserver-2015-02-06-042810.dump",
117 | "foodb-fooserver-2015-02-07-042810.dump",
118 | "foodb-fooserver-2015-02-08-042810.dump",
119 | "bardb-fooserver-2015-02-06-042810.dump",
120 | "bardb-fooserver-2015-02-07-042810.dump",
121 | "bardb-fooserver-2015-02-08-042810.dump",
122 | "hamdb-hamserver-2015-02-06-042810.dump",
123 | "hamdb-hamserver-2015-02-07-042810.dump",
124 | "hamdb-hamserver-2015-02-08-042810.dump",
125 | ]
126 | ]
127 |
128 | @patch("dbbackup.settings.CLEANUP_KEEP", 1)
129 | def test_clean_db(self):
130 | self.command.content_type = "db"
131 | self.command.database = "foodb"
132 | self.command._cleanup_old_backups(database="foodb")
133 | self.assertEqual(2, len(HANDLED_FILES["deleted_files"]))
134 | self.assertNotIn(
135 | "foodb-fooserver-2015-02-08-042810.dump", HANDLED_FILES["deleted_files"]
136 | )
137 |
138 | @patch("dbbackup.settings.CLEANUP_KEEP", 1)
139 | def test_clean_other_db(self):
140 | self.command.content_type = "db"
141 | self.command._cleanup_old_backups(database="bardb")
142 | self.assertEqual(2, len(HANDLED_FILES["deleted_files"]))
143 | self.assertNotIn(
144 | "bardb-fooserver-2015-02-08-042810.dump", HANDLED_FILES["deleted_files"]
145 | )
146 |
147 | @patch("dbbackup.settings.CLEANUP_KEEP", 1)
148 | def test_clean_other_server_db(self):
149 | self.command.content_type = "db"
150 | self.command._cleanup_old_backups(database="bardb")
151 | self.assertEqual(2, len(HANDLED_FILES["deleted_files"]))
152 | self.assertNotIn(
153 | "bardb-fooserver-2015-02-08-042810.dump", HANDLED_FILES["deleted_files"]
154 | )
155 |
156 | @patch("dbbackup.settings.CLEANUP_KEEP_MEDIA", 1)
157 | def test_clean_media(self):
158 | self.command.content_type = "media"
159 | self.command._cleanup_old_backups()
160 | self.assertEqual(2, len(HANDLED_FILES["deleted_files"]))
161 | self.assertNotIn(
162 | "foo-server-2015-02-08-042810.tar", HANDLED_FILES["deleted_files"]
163 | )
164 |
--------------------------------------------------------------------------------
/dbbackup/tests/commands/test_dbbackup.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for dbbackup command.
3 | """
4 |
5 | import os
6 | from unittest.mock import patch
7 |
8 | from django.test import TestCase
9 |
10 | from dbbackup.db.base import get_connector
11 | from dbbackup.management.commands.dbbackup import Command as DbbackupCommand
12 | from dbbackup.storage import get_storage
13 | from dbbackup.tests.utils import DEV_NULL, TEST_DATABASE, add_public_gpg, clean_gpg_keys
14 |
15 |
16 | @patch("dbbackup.settings.GPG_RECIPIENT", "test@test")
17 | @patch("sys.stdout", DEV_NULL)
18 | class DbbackupCommandSaveNewBackupTest(TestCase):
19 | def setUp(self):
20 | self.command = DbbackupCommand()
21 | self.command.servername = "foo-server"
22 | self.command.encrypt = False
23 | self.command.compress = False
24 | self.command.database = TEST_DATABASE["NAME"]
25 | self.command.storage = get_storage()
26 | self.command.connector = get_connector()
27 | self.command.stdout = DEV_NULL
28 | self.command.filename = None
29 | self.command.path = None
30 | self.command.schemas = []
31 |
32 | def tearDown(self):
33 | clean_gpg_keys()
34 |
35 | def test_func(self):
36 | self.command._save_new_backup(TEST_DATABASE)
37 |
38 | def test_compress(self):
39 | self.command.compress = True
40 | self.command._save_new_backup(TEST_DATABASE)
41 |
42 | def test_encrypt(self):
43 | add_public_gpg()
44 | self.command.encrypt = True
45 | self.command._save_new_backup(TEST_DATABASE)
46 |
47 | def test_path(self):
48 | self.command.path = "/tmp/foo.bak"
49 | self.command._save_new_backup(TEST_DATABASE)
50 | self.assertTrue(os.path.exists(self.command.path))
51 | # tearDown
52 | os.remove(self.command.path)
53 |
54 | def test_schema(self):
55 | self.command.schemas = ["public"]
56 | result = self.command._save_new_backup(TEST_DATABASE)
57 |
58 | self.assertIsNone(result)
59 |
60 | @patch("dbbackup.settings.DATABASES", ["db-from-settings"])
61 | def test_get_database_keys(self):
62 | with self.subTest("use --database from CLI"):
63 | self.command.database = "db-from-cli"
64 | self.assertEqual(self.command._get_database_keys(), ["db-from-cli"])
65 |
66 | with self.subTest("fallback to DBBACKUP_DATABASES"):
67 | self.command.database = ""
68 | self.assertEqual(self.command._get_database_keys(), ["db-from-settings"])
69 |
70 |
71 | @patch("dbbackup.settings.GPG_RECIPIENT", "test@test")
72 | @patch("sys.stdout", DEV_NULL)
73 | @patch("dbbackup.db.sqlite.SqliteConnector.create_dump")
74 | @patch("dbbackup.utils.handle_size", returned_value=4.2)
75 | class DbbackupCommandSaveNewMongoBackupTest(TestCase):
76 | def setUp(self):
77 | self.command = DbbackupCommand()
78 | self.command.servername = "foo-server"
79 | self.command.encrypt = False
80 | self.command.compress = False
81 | self.command.storage = get_storage()
82 | self.command.stdout = DEV_NULL
83 | self.command.filename = None
84 | self.command.path = None
85 | self.command.connector = get_connector("default")
86 | self.command.schemas = []
87 |
88 | def tearDown(self):
89 | clean_gpg_keys()
90 |
91 | def test_func(self, mock_run_commands, mock_handle_size):
92 | self.command._save_new_backup(TEST_DATABASE)
93 | self.assertTrue(mock_run_commands.called)
94 |
--------------------------------------------------------------------------------
/dbbackup/tests/commands/test_dbrestore.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for dbrestore command.
3 | """
4 |
5 | from shutil import copyfileobj
6 | from tempfile import mktemp
7 | from unittest.mock import patch
8 |
9 | from django.conf import settings
10 | from django.core.files import File
11 | from django.core.management.base import CommandError
12 | from django.test import TestCase
13 |
14 | from dbbackup import utils
15 | from dbbackup.db.base import get_connector
16 | from dbbackup.db.mongodb import MongoDumpConnector
17 | from dbbackup.db.postgresql import PgDumpConnector
18 | from dbbackup.management.commands.dbrestore import Command as DbrestoreCommand
19 | from dbbackup.settings import HOSTNAME
20 | from dbbackup.storage import get_storage
21 | from dbbackup.tests.utils import (
22 | DEV_NULL,
23 | HANDLED_FILES,
24 | TARED_FILE,
25 | TEST_DATABASE,
26 | TEST_MONGODB,
27 | add_private_gpg,
28 | clean_gpg_keys,
29 | get_dump,
30 | get_dump_name,
31 | )
32 |
33 |
34 | @patch("dbbackup.management.commands._base.input", return_value="y")
35 | class DbrestoreCommandRestoreBackupTest(TestCase):
36 | def setUp(self):
37 | self.command = DbrestoreCommand()
38 | self.command.stdout = DEV_NULL
39 | self.command.uncompress = False
40 | self.command.decrypt = False
41 | self.command.backup_extension = "bak"
42 | self.command.filename = "foofile"
43 | self.command.database = TEST_DATABASE
44 | self.command.passphrase = None
45 | self.command.interactive = True
46 | self.command.storage = get_storage()
47 | self.command.servername = HOSTNAME
48 | self.command.input_database_name = None
49 | self.command.database_name = "default"
50 | self.command.connector = get_connector("default")
51 | self.command.schemas = []
52 | HANDLED_FILES.clean()
53 |
54 | def tearDown(self):
55 | clean_gpg_keys()
56 |
57 | def test_no_filename(self, *args):
58 | # Prepare backup
59 | HANDLED_FILES["written_files"].append(
60 | (utils.filename_generate("default"), File(get_dump()))
61 | )
62 | # Check
63 | self.command.path = None
64 | self.command.filename = None
65 | self.command._restore_backup()
66 |
67 | def test_no_backup_found(self, *args):
68 | self.command.path = None
69 | self.command.filename = None
70 | with self.assertRaises(CommandError):
71 | self.command._restore_backup()
72 |
73 | def test_uncompress(self, *args):
74 | self.command.path = None
75 | compressed_file, self.command.filename = utils.compress_file(
76 | get_dump(), get_dump_name()
77 | )
78 | HANDLED_FILES["written_files"].append(
79 | (self.command.filename, File(compressed_file))
80 | )
81 | self.command.uncompress = True
82 | self.command._restore_backup()
83 |
84 | @patch("dbbackup.utils.getpass", return_value=None)
85 | def test_decrypt(self, *args):
86 | self.command.path = None
87 | self.command.decrypt = True
88 | encrypted_file, self.command.filename = utils.encrypt_file(
89 | get_dump(), get_dump_name()
90 | )
91 | HANDLED_FILES["written_files"].append(
92 | (self.command.filename, File(encrypted_file))
93 | )
94 | self.command._restore_backup()
95 |
96 | def test_path(self, *args):
97 | temp_dump = get_dump()
98 | dump_path = mktemp()
99 | with open(dump_path, "wb") as dump:
100 | copyfileobj(temp_dump, dump)
101 | self.command.path = dump.name
102 | self.command._restore_backup()
103 | self.command.decrypt = False
104 | self.command.filepath = get_dump_name()
105 | HANDLED_FILES["written_files"].append((self.command.filepath, get_dump()))
106 | self.command._restore_backup()
107 |
108 | @patch("dbbackup.management.commands.dbrestore.get_connector")
109 | @patch("dbbackup.db.base.BaseDBConnector.restore_dump")
110 | def test_schema(self, mock_restore_dump, mock_get_connector, *args):
111 | """Schema is only used for postgresql."""
112 | mock_get_connector.return_value = PgDumpConnector()
113 | mock_restore_dump.return_value = True
114 |
115 | mock_file = File(get_dump())
116 | HANDLED_FILES["written_files"].append((self.command.filename, mock_file))
117 |
118 | with self.assertLogs("dbbackup.command", "INFO") as cm:
119 | # Without
120 | self.command.path = None
121 | self.command._restore_backup()
122 | self.assertEqual(self.command.connector.schemas, [])
123 |
124 | # With
125 | self.command.path = None
126 | self.command.schemas = ["public"]
127 | self.command._restore_backup()
128 | self.assertEqual(self.command.connector.schemas, ["public"])
129 | self.assertIn(
130 | "INFO:dbbackup.command:Restoring schemas: ['public']",
131 | cm.output,
132 | )
133 |
134 | # With multiple
135 | self.command.path = None
136 | self.command.schemas = ["public", "other"]
137 | self.command._restore_backup()
138 | self.assertEqual(self.command.connector.schemas, ["public", "other"])
139 | self.assertIn(
140 | "INFO:dbbackup.command:Restoring schemas: ['public', 'other']",
141 | cm.output,
142 | )
143 |
144 | mock_get_connector.assert_called_with("default")
145 | mock_restore_dump.assert_called_with(mock_file)
146 |
147 |
148 | class DbrestoreCommandGetDatabaseTest(TestCase):
149 | def setUp(self):
150 | self.command = DbrestoreCommand()
151 |
152 | def test_give_db_name(self):
153 | name, db = self.command._get_database("default")
154 | self.assertEqual(name, "default")
155 | self.assertEqual(db, settings.DATABASES["default"])
156 |
157 | def test_no_given_db(self):
158 | name, db = self.command._get_database(None)
159 | self.assertEqual(name, "default")
160 | self.assertEqual(db, settings.DATABASES["default"])
161 |
162 | @patch("django.conf.settings.DATABASES", {"db1": {}, "db2": {}})
163 | def test_no_given_db_multidb(self):
164 | with self.assertRaises(CommandError):
165 | self.command._get_database({})
166 |
167 |
168 | @patch("dbbackup.management.commands._base.input", return_value="y")
169 | @patch(
170 | "dbbackup.management.commands.dbrestore.get_connector",
171 | return_value=MongoDumpConnector(),
172 | )
173 | @patch("dbbackup.db.mongodb.MongoDumpConnector.restore_dump")
174 | class DbMongoRestoreCommandRestoreBackupTest(TestCase):
175 | def setUp(self):
176 | self.command = DbrestoreCommand()
177 | self.command.stdout = DEV_NULL
178 | self.command.uncompress = False
179 | self.command.decrypt = False
180 | self.command.backup_extension = "bak"
181 | self.command.path = None
182 | self.command.filename = "foofile"
183 | self.command.database = TEST_MONGODB
184 | self.command.passphrase = None
185 | self.command.interactive = True
186 | self.command.storage = get_storage()
187 | self.command.connector = MongoDumpConnector()
188 | self.command.database_name = "mongo"
189 | self.command.input_database_name = None
190 | self.command.servername = HOSTNAME
191 | self.command.schemas = []
192 | HANDLED_FILES.clean()
193 | add_private_gpg()
194 |
195 | def test_mongo_settings_backup_command(self, mock_runcommands, *args):
196 | self.command.storage.file_read = TARED_FILE
197 | self.command.filename = TARED_FILE
198 | HANDLED_FILES["written_files"].append((TARED_FILE, open(TARED_FILE, "rb")))
199 | self.command._restore_backup()
200 | self.assertTrue(mock_runcommands.called)
201 |
--------------------------------------------------------------------------------
/dbbackup/tests/commands/test_listbackups.py:
--------------------------------------------------------------------------------
1 | from io import StringIO
2 | from unittest.mock import patch
3 |
4 | from django.core.management import execute_from_command_line
5 | from django.test import TestCase
6 |
7 | from dbbackup.management.commands.listbackups import Command as ListbackupsCommand
8 | from dbbackup.storage import get_storage
9 | from dbbackup.tests.utils import HANDLED_FILES
10 |
11 |
12 | class ListbackupsCommandTest(TestCase):
13 | def setUp(self):
14 | self.command = ListbackupsCommand()
15 | self.command.storage = get_storage()
16 | HANDLED_FILES["written_files"] = [
17 | (f, None)
18 | for f in [
19 | "2015-02-06-042810.bak",
20 | "2015-02-07-042810.bak",
21 | "2015-02-08-042810.bak",
22 | ]
23 | ]
24 |
25 | def test_get_backup_attrs(self):
26 | options = {}
27 | attrs = self.command.get_backup_attrs(options)
28 | self.assertEqual(len(HANDLED_FILES["written_files"]), len(attrs))
29 |
30 |
31 | class ListbackupsCommandArgComputingTest(TestCase):
32 | def setUp(self):
33 | HANDLED_FILES["written_files"] = [
34 | (f, None)
35 | for f in [
36 | "2015-02-06-042810_foo.db",
37 | "2015-02-06-042810_foo.db.gz",
38 | "2015-02-06-042810_foo.db.gpg",
39 | "2015-02-06-042810_foo.db.gz.gpg",
40 | "2015-02-06-042810_foo.tar",
41 | "2015-02-06-042810_foo.tar.gz",
42 | "2015-02-06-042810_foo.tar.gpg",
43 | "2015-02-06-042810_foo.tar.gz.gpg",
44 | "2015-02-06-042810_bar.db",
45 | "2015-02-06-042810_bar.db.gz",
46 | "2015-02-06-042810_bar.db.gpg",
47 | "2015-02-06-042810_bar.db.gz.gpg",
48 | "2015-02-06-042810_bar.tar",
49 | "2015-02-06-042810_bar.tar.gz",
50 | "2015-02-06-042810_bar.tar.gpg",
51 | "2015-02-06-042810_bar.tar.gz.gpg",
52 | ]
53 | ]
54 |
55 | def test_list(self):
56 | execute_from_command_line(["", "listbackups"])
57 |
58 | def test_filter_encrypted(self):
59 | stdout = StringIO()
60 | with patch("sys.stdout", stdout):
61 | execute_from_command_line(["", "listbackups", "--encrypted", "-q"])
62 | stdout.seek(0)
63 | stdout.readline()
64 | for line in stdout.readlines():
65 | self.assertIn(".gpg", line)
66 |
67 | def test_filter_not_encrypted(self):
68 | stdout = StringIO()
69 | with patch("sys.stdout", stdout):
70 | execute_from_command_line(["", "listbackups", "--not-encrypted", "-q"])
71 | stdout.seek(0)
72 | stdout.readline()
73 | for line in stdout.readlines():
74 | self.assertNotIn(".gpg", line)
75 |
76 | def test_filter_compressed(self):
77 | stdout = StringIO()
78 | with patch("sys.stdout", stdout):
79 | execute_from_command_line(["", "listbackups", "--compressed", "-q"])
80 | stdout.seek(0)
81 | stdout.readline()
82 | for line in stdout.readlines():
83 | self.assertIn(".gz", line)
84 |
85 | def test_filter_not_compressed(self):
86 | stdout = StringIO()
87 | with patch("sys.stdout", stdout):
88 | execute_from_command_line(["", "listbackups", "--not-compressed", "-q"])
89 | stdout.seek(0)
90 | stdout.readline()
91 | for line in stdout.readlines():
92 | self.assertNotIn(".gz", line)
93 |
94 | def test_filter_db(self):
95 | stdout = StringIO()
96 | with patch("sys.stdout", stdout):
97 | execute_from_command_line(["", "listbackups", "--content-type", "db", "-q"])
98 | stdout.seek(0)
99 | stdout.readline()
100 | for line in stdout.readlines():
101 | self.assertIn(".db", line)
102 |
103 | def test_filter_media(self):
104 | stdout = StringIO()
105 | with patch("sys.stdout", stdout):
106 | execute_from_command_line(
107 | ["", "listbackups", "--content-type", "media", "-q"]
108 | )
109 | stdout.seek(0)
110 | stdout.readline()
111 | for line in stdout.readlines():
112 | self.assertIn(".tar", line)
113 |
--------------------------------------------------------------------------------
/dbbackup/tests/commands/test_mediabackup.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for mediabackup command.
3 | """
4 |
5 | import contextlib
6 | import os
7 | import tempfile
8 |
9 | from django.test import TestCase
10 |
11 | from dbbackup.management.commands.mediabackup import Command as DbbackupCommand
12 | from dbbackup.storage import get_storage, get_storage_class
13 | from dbbackup.tests.utils import DEV_NULL, HANDLED_FILES, add_public_gpg
14 |
15 |
16 | class MediabackupBackupMediafilesTest(TestCase):
17 | def setUp(self):
18 | HANDLED_FILES.clean()
19 | self.command = DbbackupCommand()
20 | self.command.servername = "foo-server"
21 | self.command.storage = get_storage()
22 | self.command.stdout = DEV_NULL
23 | self.command.compress = False
24 | self.command.encrypt = False
25 | self.command.path = None
26 | self.command.media_storage = get_storage_class()()
27 | self.command.filename = None
28 |
29 | def tearDown(self):
30 | if self.command.path is not None:
31 | with contextlib.suppress(OSError):
32 | os.remove(self.command.path)
33 |
34 | def test_func(self):
35 | self.command.backup_mediafiles()
36 | self.assertEqual(1, len(HANDLED_FILES["written_files"]))
37 |
38 | def test_compress(self):
39 | self.command.compress = True
40 | self.command.backup_mediafiles()
41 | self.assertEqual(1, len(HANDLED_FILES["written_files"]))
42 | self.assertTrue(HANDLED_FILES["written_files"][0][0].endswith(".gz"))
43 |
44 | def test_encrypt(self):
45 | self.command.encrypt = True
46 | add_public_gpg()
47 | self.command.backup_mediafiles()
48 | self.assertEqual(1, len(HANDLED_FILES["written_files"]))
49 | outputfile = HANDLED_FILES["written_files"][0][1]
50 | outputfile.seek(0)
51 | self.assertTrue(outputfile.read().startswith(b"-----BEGIN PGP MESSAGE-----"))
52 |
53 | def test_compress_and_encrypt(self):
54 | self.command.compress = True
55 | self.command.encrypt = True
56 | add_public_gpg()
57 | self.command.backup_mediafiles()
58 | self.assertEqual(1, len(HANDLED_FILES["written_files"]))
59 | outputfile = HANDLED_FILES["written_files"][0][1]
60 | outputfile.seek(0)
61 | self.assertTrue(outputfile.read().startswith(b"-----BEGIN PGP MESSAGE-----"))
62 |
63 | def test_write_local_file(self):
64 | self.command.path = tempfile.mktemp()
65 | self.command.backup_mediafiles()
66 | self.assertTrue(os.path.exists(self.command.path))
67 | self.assertEqual(0, len(HANDLED_FILES["written_files"]))
68 |
69 | def test_output_filename(self):
70 | self.command.filename = "my_new_name.tar"
71 | self.command.backup_mediafiles()
72 | self.assertEqual(HANDLED_FILES["written_files"][0][0], self.command.filename)
73 |
--------------------------------------------------------------------------------
/dbbackup/tests/functional/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/functional/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Configuration and launcher for dbbackup tests.
3 | """
4 |
5 | import os
6 | import sys
7 | import tempfile
8 |
9 | from dotenv import load_dotenv
10 |
11 | test = len(sys.argv) <= 1 or sys.argv[1] == "test"
12 | if not test:
13 | load_dotenv()
14 |
15 | DEBUG = False
16 |
17 | BASE_DIR = os.path.dirname(os.path.abspath(__file__))
18 | TESTAPP_DIR = os.path.join(BASE_DIR, "testapp/")
19 | BLOB_DIR = os.path.join(TESTAPP_DIR, "blobs/")
20 |
21 | ADMINS = (("ham", "foo@bar"),)
22 | ALLOWED_HOSTS = ["*"]
23 | MIDDLEWARE_CLASSES = ()
24 | ROOT_URLCONF = "dbbackup.tests.testapp.urls"
25 | SECRET_KEY = "it's a secret to everyone"
26 | SITE_ID = 1
27 | MEDIA_ROOT = os.environ.get("MEDIA_ROOT") or tempfile.mkdtemp()
28 | INSTALLED_APPS = (
29 | "dbbackup",
30 | "dbbackup.tests.testapp",
31 | )
32 | DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
33 |
34 | DATABASES = {
35 | "default": {
36 | "ENGINE": os.environ.get("DB_ENGINE", "django.db.backends.sqlite3"),
37 | "NAME": os.environ.get("DB_NAME", ":memory:"),
38 | "USER": os.environ.get("DB_USER"),
39 | "PASSWORD": os.environ.get("DB_PASSWORD"),
40 | "HOST": os.environ.get("DB_HOST"),
41 | }
42 | }
43 | if os.environ.get("CONNECTOR"):
44 | CONNECTOR = {"CONNECTOR": os.environ["CONNECTOR"]}
45 | DBBACKUP_CONNECTORS = {"default": CONNECTOR}
46 |
47 | CACHES = {
48 | "default": {
49 | "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
50 | }
51 | }
52 |
53 | SERVER_EMAIL = "dbbackup@test.org"
54 |
55 | DBBACKUP_GPG_RECIPIENT = "test@test"
56 | DBBACKUP_GPG_ALWAYS_TRUST = (True,)
57 |
58 | DBBACKUP_STORAGE = os.environ.get("STORAGE", "dbbackup.tests.utils.FakeStorage")
59 | DBBACKUP_STORAGE_OPTIONS = dict(
60 | [
61 | keyvalue.split("=")
62 | for keyvalue in os.environ.get("STORAGE_OPTIONS", "").split(",")
63 | if keyvalue
64 | ]
65 | )
66 |
67 | # For testing the new storages setting introduced in Django 4.2
68 | STORAGES = {
69 | "default": {
70 | "BACKEND": "django.core.files.storage.FileSystemStorage",
71 | "OPTIONS": {},
72 | },
73 | "dbbackup": {
74 | "BACKEND": DBBACKUP_STORAGE,
75 | "OPTIONS": DBBACKUP_STORAGE_OPTIONS,
76 | },
77 | }
78 |
79 | LOGGING = {
80 | "version": 1,
81 | "disable_existing_loggers": False,
82 | "root": {"handlers": ["console"], "level": "DEBUG"},
83 | "handlers": {
84 | "console": {
85 | "level": os.getenv("DJANGO_LOG_LEVEL", "INFO"),
86 | "class": "logging.StreamHandler",
87 | "formatter": "simple",
88 | }
89 | },
90 | "formatters": {
91 | "verbose": {
92 | "format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
93 | "datefmt": "%d/%b/%Y %H:%M:%S",
94 | },
95 | "simple": {"format": "%(levelname)s %(message)s"},
96 | },
97 | "loggers": {
98 | "django.db.backends": {
99 | # uncomment to see all queries
100 | # 'level': 'DEBUG',
101 | "handlers": ["console"],
102 | }
103 | },
104 | }
105 |
106 | # let there be silence
107 | DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
108 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_checks.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 | from django.test import TestCase
4 |
5 | try:
6 | from dbbackup import checks
7 | from dbbackup.apps import DbbackupConfig
8 | except ImportError:
9 | checks = None
10 |
11 |
12 | def test_func(*args, **kwargs):
13 | return "foo"
14 |
15 |
16 | class ChecksTest(TestCase):
17 | def setUp(self):
18 | if checks is None:
19 | self.skipTest("Test framework has been released in Django 1.7")
20 |
21 | def test_check(self):
22 | self.assertFalse(checks.check_settings(DbbackupConfig))
23 |
24 | @patch("dbbackup.checks.settings.HOSTNAME", "")
25 | def test_hostname_invalid(self):
26 | expected_errors = [checks.W001]
27 | errors = checks.check_settings(DbbackupConfig)
28 | self.assertEqual(expected_errors, errors)
29 |
30 | @patch("dbbackup.checks.settings.STORAGE", "")
31 | def test_hostname_storage(self):
32 | expected_errors = [checks.W002]
33 | errors = checks.check_settings(DbbackupConfig)
34 | self.assertEqual(expected_errors, errors)
35 |
36 | @patch("dbbackup.checks.settings.FILENAME_TEMPLATE", test_func)
37 | def test_filename_template_is_callable(self):
38 | self.assertFalse(checks.check_settings(DbbackupConfig))
39 |
40 | @patch("dbbackup.checks.settings.FILENAME_TEMPLATE", "{datetime}.bak")
41 | def test_filename_template_is_string(self):
42 | self.assertFalse(checks.check_settings(DbbackupConfig))
43 |
44 | @patch("dbbackup.checks.settings.FILENAME_TEMPLATE", "foo.bak")
45 | def test_filename_template_no_date(self):
46 | expected_errors = [checks.W003]
47 | errors = checks.check_settings(DbbackupConfig)
48 | self.assertEqual(expected_errors, errors)
49 |
50 | @patch("dbbackup.checks.settings.MEDIA_FILENAME_TEMPLATE", test_func)
51 | def test_media_filename_template_is_callable(self):
52 | self.assertFalse(checks.check_settings(DbbackupConfig))
53 |
54 | @patch("dbbackup.checks.settings.MEDIA_FILENAME_TEMPLATE", "{datetime}.bak")
55 | def test_media_filename_template_is_string(self):
56 | self.assertFalse(checks.check_settings(DbbackupConfig))
57 |
58 | @patch("dbbackup.checks.settings.MEDIA_FILENAME_TEMPLATE", "foo.bak")
59 | def test_media_filename_template_no_date(self):
60 | expected_errors = [checks.W004]
61 | errors = checks.check_settings(DbbackupConfig)
62 | self.assertEqual(expected_errors, errors)
63 |
64 | @patch("dbbackup.checks.settings.DATE_FORMAT", "foo@net.pt")
65 | def test_date_format_warning(self):
66 | expected_errors = [checks.W005]
67 | errors = checks.check_settings(DbbackupConfig)
68 | self.assertEqual(expected_errors, errors)
69 |
70 | @patch("dbbackup.checks.settings.FAILURE_RECIPIENTS", "foo@net.pt")
71 | def test_Failure_recipients_warning(self):
72 | expected_errors = [checks.W006]
73 | errors = checks.check_settings(DbbackupConfig)
74 | self.assertEqual(expected_errors, errors)
75 |
76 | @patch("dbbackup.checks.settings.FILENAME_TEMPLATE", "foo/bar-{datetime}.ext")
77 | def test_db_filename_template_with_slash(self):
78 | expected_errors = [checks.W007]
79 | errors = checks.check_settings(DbbackupConfig)
80 | self.assertEqual(expected_errors, errors)
81 |
82 | @patch("dbbackup.checks.settings.FILENAME_TEMPLATE", lambda _: "foo/bar")
83 | def test_db_filename_template_callable_with_slash(self):
84 | expected_errors = [checks.W007]
85 | errors = checks.check_settings(DbbackupConfig)
86 | self.assertEqual(expected_errors, errors)
87 |
88 | @patch("dbbackup.checks.settings.MEDIA_FILENAME_TEMPLATE", "foo/bar-{datetime}.ext")
89 | def test_media_filename_template_with_slash(self):
90 | expected_errors = [checks.W008]
91 | errors = checks.check_settings(DbbackupConfig)
92 | self.assertEqual(expected_errors, errors)
93 |
94 | @patch("dbbackup.checks.settings.MEDIA_FILENAME_TEMPLATE", lambda _: "foo/bar")
95 | def test_media_filename_template_callable_with_slash(self):
96 | expected_errors = [checks.W008]
97 | errors = checks.check_settings(DbbackupConfig)
98 | self.assertEqual(expected_errors, errors)
99 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_connectors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/test_connectors/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/test_connectors/test_base.py:
--------------------------------------------------------------------------------
1 | import os
2 | from tempfile import SpooledTemporaryFile
3 |
4 | from django.test import TestCase
5 |
6 | from dbbackup.db import exceptions
7 | from dbbackup.db.base import BaseCommandDBConnector, BaseDBConnector, get_connector
8 |
9 |
10 | class GetConnectorTest(TestCase):
11 | def test_get_connector(self):
12 | connector = get_connector()
13 | self.assertIsInstance(connector, BaseDBConnector)
14 |
15 |
16 | class BaseDBConnectorTest(TestCase):
17 | def test_init(self):
18 | BaseDBConnector()
19 |
20 | def test_settings(self):
21 | connector = BaseDBConnector()
22 | connector.settings
23 |
24 | def test_generate_filename(self):
25 | connector = BaseDBConnector()
26 | connector.generate_filename()
27 |
28 |
29 | class BaseCommandDBConnectorTest(TestCase):
30 | def test_run_command(self):
31 | connector = BaseCommandDBConnector()
32 | stdout, stderr = connector.run_command("echo 123")
33 | self.assertEqual(stdout.read(), b"123\n")
34 | self.assertEqual(stderr.read(), b"")
35 |
36 | def test_run_command_error(self):
37 | connector = BaseCommandDBConnector()
38 | with self.assertRaises(exceptions.CommandConnectorError):
39 | connector.run_command("echa 123")
40 |
41 | def test_run_command_stdin(self):
42 | connector = BaseCommandDBConnector()
43 | stdin = SpooledTemporaryFile()
44 | stdin.write(b"foo")
45 | stdin.seek(0)
46 | # Run
47 | stdout, stderr = connector.run_command("cat", stdin=stdin)
48 | self.assertEqual(stdout.read(), b"foo")
49 | self.assertFalse(stderr.read())
50 |
51 | def test_run_command_with_env(self):
52 | connector = BaseCommandDBConnector()
53 | # Empty env
54 | stdout, stderr = connector.run_command("env")
55 | self.assertTrue(stdout.read())
56 | # env from self.env
57 | connector.env = {"foo": "bar"}
58 | stdout, stderr = connector.run_command("env")
59 | self.assertIn(b"foo=bar\n", stdout.read())
60 | # method override global env
61 | stdout, stderr = connector.run_command("env", env={"foo": "ham"})
62 | self.assertIn(b"foo=ham\n", stdout.read())
63 | # get a var from parent env
64 | os.environ["bar"] = "foo"
65 | stdout, stderr = connector.run_command("env")
66 | self.assertIn(b"bar=foo\n", stdout.read())
67 | # Conf overrides parendt env
68 | connector.env = {"bar": "bar"}
69 | stdout, stderr = connector.run_command("env")
70 | self.assertIn(b"bar=bar\n", stdout.read())
71 | # method overrides all
72 | stdout, stderr = connector.run_command("env", env={"bar": "ham"})
73 | self.assertIn(b"bar=ham\n", stdout.read())
74 |
75 | def test_run_command_with_parent_env(self):
76 | connector = BaseCommandDBConnector(use_parent_env=False)
77 | # Empty env
78 | stdout, stderr = connector.run_command("env")
79 | self.assertFalse(stdout.read())
80 | # env from self.env
81 | connector.env = {"foo": "bar"}
82 | stdout, stderr = connector.run_command("env")
83 | self.assertEqual(stdout.read(), b"foo=bar\n")
84 | # method override global env
85 | stdout, stderr = connector.run_command("env", env={"foo": "ham"})
86 | self.assertEqual(stdout.read(), b"foo=ham\n")
87 | # no var from parent env
88 | os.environ["bar"] = "foo"
89 | stdout, stderr = connector.run_command("env")
90 | self.assertNotIn(b"bar=foo\n", stdout.read())
91 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_connectors/test_mongodb.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | from unittest.mock import patch
3 |
4 | from django.test import TestCase
5 |
6 | from dbbackup.db.mongodb import MongoDumpConnector
7 |
8 |
9 | @patch(
10 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
11 | return_value=(BytesIO(b"foo"), BytesIO()),
12 | )
13 | class MongoDumpConnectorTest(TestCase):
14 | def test_create_dump(self, mock_dump_cmd):
15 | connector = MongoDumpConnector()
16 | dump = connector.create_dump()
17 | # Test dump
18 | dump_content = dump.read()
19 | self.assertTrue(dump_content)
20 | self.assertEqual(dump_content, b"foo")
21 | # Test cmd
22 | self.assertTrue(mock_dump_cmd.called)
23 |
24 | def test_create_dump_user(self, mock_dump_cmd):
25 | connector = MongoDumpConnector()
26 | # Without
27 | connector.settings.pop("USER", None)
28 | connector.create_dump()
29 | self.assertNotIn(" --user ", mock_dump_cmd.call_args[0][0])
30 | # With
31 | connector.settings["USER"] = "foo"
32 | connector.create_dump()
33 | self.assertIn(" --username foo", mock_dump_cmd.call_args[0][0])
34 |
35 | def test_create_dump_password(self, mock_dump_cmd):
36 | connector = MongoDumpConnector()
37 | # Without
38 | connector.settings.pop("PASSWORD", None)
39 | connector.create_dump()
40 | self.assertNotIn(" --password ", mock_dump_cmd.call_args[0][0])
41 | # With
42 | connector.settings["PASSWORD"] = "foo"
43 | connector.create_dump()
44 | self.assertIn(" --password foo", mock_dump_cmd.call_args[0][0])
45 |
46 | @patch(
47 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
48 | return_value=(BytesIO(), BytesIO()),
49 | )
50 | def test_restore_dump(self, mock_dump_cmd, mock_restore_cmd):
51 | connector = MongoDumpConnector()
52 | dump = connector.create_dump()
53 | connector.restore_dump(dump)
54 | # Test cmd
55 | self.assertTrue(mock_restore_cmd.called)
56 |
57 | @patch(
58 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
59 | return_value=(BytesIO(), BytesIO()),
60 | )
61 | def test_restore_dump_user(self, mock_dump_cmd, mock_restore_cmd):
62 | connector = MongoDumpConnector()
63 | dump = connector.create_dump()
64 | # Without
65 | connector.settings.pop("USER", None)
66 | connector.restore_dump(dump)
67 | self.assertNotIn(" --username ", mock_restore_cmd.call_args[0][0])
68 | # With
69 | connector.settings["USER"] = "foo"
70 | connector.restore_dump(dump)
71 | self.assertIn(" --username foo", mock_restore_cmd.call_args[0][0])
72 |
73 | @patch(
74 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
75 | return_value=(BytesIO(), BytesIO()),
76 | )
77 | def test_restore_dump_password(self, mock_dump_cmd, mock_restore_cmd):
78 | connector = MongoDumpConnector()
79 | dump = connector.create_dump()
80 | # Without
81 | connector.settings.pop("PASSWORD", None)
82 | connector.restore_dump(dump)
83 | self.assertNotIn(" --password ", mock_restore_cmd.call_args[0][0])
84 | # With
85 | connector.settings["PASSWORD"] = "foo"
86 | connector.restore_dump(dump)
87 | self.assertIn(" --password foo", mock_restore_cmd.call_args[0][0])
88 |
89 | @patch(
90 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
91 | return_value=(BytesIO(), BytesIO()),
92 | )
93 | def test_restore_dump_object_check(self, mock_dump_cmd, mock_restore_cmd):
94 | connector = MongoDumpConnector()
95 | dump = connector.create_dump()
96 | # Without
97 | connector.object_check = False
98 | connector.restore_dump(dump)
99 | self.assertNotIn("--objcheck", mock_restore_cmd.call_args[0][0])
100 | # With
101 | connector.object_check = True
102 | connector.restore_dump(dump)
103 | self.assertIn(" --objcheck", mock_restore_cmd.call_args[0][0])
104 |
105 | @patch(
106 | "dbbackup.db.mongodb.MongoDumpConnector.run_command",
107 | return_value=(BytesIO(), BytesIO()),
108 | )
109 | def test_restore_dump_drop(self, mock_dump_cmd, mock_restore_cmd):
110 | connector = MongoDumpConnector()
111 | dump = connector.create_dump()
112 | # Without
113 | connector.drop = False
114 | connector.restore_dump(dump)
115 | self.assertNotIn("--drop", mock_restore_cmd.call_args[0][0])
116 | # With
117 | connector.drop = True
118 | connector.restore_dump(dump)
119 | self.assertIn(" --drop", mock_restore_cmd.call_args[0][0])
120 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_connectors/test_mysql.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | from unittest.mock import patch
3 |
4 | from django.test import TestCase
5 |
6 | from dbbackup.db.mysql import MysqlDumpConnector
7 |
8 |
9 | @patch(
10 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
11 | return_value=(BytesIO(b"foo"), BytesIO()),
12 | )
13 | class MysqlDumpConnectorTest(TestCase):
14 | def test_create_dump(self, mock_dump_cmd):
15 | connector = MysqlDumpConnector()
16 | dump = connector.create_dump()
17 | # Test dump
18 | dump_content = dump.read()
19 | self.assertTrue(dump_content)
20 | self.assertEqual(dump_content, b"foo")
21 | # Test cmd
22 | self.assertTrue(mock_dump_cmd.called)
23 |
24 | def test_create_dump_host(self, mock_dump_cmd):
25 | connector = MysqlDumpConnector()
26 | # Without
27 | connector.settings.pop("HOST", None)
28 | connector.create_dump()
29 | self.assertNotIn(" --host=", mock_dump_cmd.call_args[0][0])
30 | # With
31 | connector.settings["HOST"] = "foo"
32 | connector.create_dump()
33 | self.assertIn(" --host=foo", mock_dump_cmd.call_args[0][0])
34 |
35 | def test_create_dump_port(self, mock_dump_cmd):
36 | connector = MysqlDumpConnector()
37 | # Without
38 | connector.settings.pop("PORT", None)
39 | connector.create_dump()
40 | self.assertNotIn(" --port=", mock_dump_cmd.call_args[0][0])
41 | # With
42 | connector.settings["PORT"] = 42
43 | connector.create_dump()
44 | self.assertIn(" --port=42", mock_dump_cmd.call_args[0][0])
45 |
46 | def test_create_dump_user(self, mock_dump_cmd):
47 | connector = MysqlDumpConnector()
48 | # Without
49 | connector.settings.pop("USER", None)
50 | connector.create_dump()
51 | self.assertNotIn(" --user=", mock_dump_cmd.call_args[0][0])
52 | # With
53 | connector.settings["USER"] = "foo"
54 | connector.create_dump()
55 | self.assertIn(" --user=foo", mock_dump_cmd.call_args[0][0])
56 |
57 | def test_create_dump_password(self, mock_dump_cmd):
58 | connector = MysqlDumpConnector()
59 | # Without
60 | connector.settings.pop("PASSWORD", None)
61 | connector.create_dump()
62 | self.assertNotIn(" --password=", mock_dump_cmd.call_args[0][0])
63 | # With
64 | connector.settings["PASSWORD"] = "foo"
65 | connector.create_dump()
66 | self.assertIn(" --password=foo", mock_dump_cmd.call_args[0][0])
67 |
68 | def test_create_dump_exclude(self, mock_dump_cmd):
69 | connector = MysqlDumpConnector()
70 | connector.settings["NAME"] = "db"
71 | # Without
72 | connector.create_dump()
73 | self.assertNotIn(" --ignore-table=", mock_dump_cmd.call_args[0][0])
74 | # With
75 | connector.exclude = ("foo",)
76 | connector.create_dump()
77 | self.assertIn(" --ignore-table=db.foo", mock_dump_cmd.call_args[0][0])
78 | # With several
79 | connector.exclude = ("foo", "bar")
80 | connector.create_dump()
81 | self.assertIn(" --ignore-table=db.foo", mock_dump_cmd.call_args[0][0])
82 | self.assertIn(" --ignore-table=db.bar", mock_dump_cmd.call_args[0][0])
83 |
84 | @patch(
85 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
86 | return_value=(BytesIO(), BytesIO()),
87 | )
88 | def test_restore_dump(self, mock_dump_cmd, mock_restore_cmd):
89 | connector = MysqlDumpConnector()
90 | dump = connector.create_dump()
91 | connector.restore_dump(dump)
92 | # Test cmd
93 | self.assertTrue(mock_restore_cmd.called)
94 |
95 | @patch(
96 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
97 | return_value=(BytesIO(), BytesIO()),
98 | )
99 | def test_restore_dump_host(self, mock_dump_cmd, mock_restore_cmd):
100 | connector = MysqlDumpConnector()
101 | dump = connector.create_dump()
102 | # Without
103 | connector.settings.pop("HOST", None)
104 | connector.restore_dump(dump)
105 | self.assertNotIn(" --host=foo", mock_restore_cmd.call_args[0][0])
106 | # With
107 | connector.settings["HOST"] = "foo"
108 | connector.restore_dump(dump)
109 | self.assertIn(" --host=foo", mock_restore_cmd.call_args[0][0])
110 |
111 | @patch(
112 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
113 | return_value=(BytesIO(), BytesIO()),
114 | )
115 | def test_restore_dump_port(self, mock_dump_cmd, mock_restore_cmd):
116 | connector = MysqlDumpConnector()
117 | dump = connector.create_dump()
118 | # Without
119 | connector.settings.pop("PORT", None)
120 | connector.restore_dump(dump)
121 | self.assertNotIn(" --port=", mock_restore_cmd.call_args[0][0])
122 | # With
123 | connector.settings["PORT"] = 42
124 | connector.restore_dump(dump)
125 | self.assertIn(" --port=42", mock_restore_cmd.call_args[0][0])
126 |
127 | @patch(
128 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
129 | return_value=(BytesIO(), BytesIO()),
130 | )
131 | def test_restore_dump_user(self, mock_dump_cmd, mock_restore_cmd):
132 | connector = MysqlDumpConnector()
133 | dump = connector.create_dump()
134 | # Without
135 | connector.settings.pop("USER", None)
136 | connector.restore_dump(dump)
137 | self.assertNotIn(" --user=", mock_restore_cmd.call_args[0][0])
138 | # With
139 | connector.settings["USER"] = "foo"
140 | connector.restore_dump(dump)
141 | self.assertIn(" --user=foo", mock_restore_cmd.call_args[0][0])
142 |
143 | @patch(
144 | "dbbackup.db.mysql.MysqlDumpConnector.run_command",
145 | return_value=(BytesIO(), BytesIO()),
146 | )
147 | def test_restore_dump_password(self, mock_dump_cmd, mock_restore_cmd):
148 | connector = MysqlDumpConnector()
149 | dump = connector.create_dump()
150 | # Without
151 | connector.settings.pop("PASSWORD", None)
152 | connector.restore_dump(dump)
153 | self.assertNotIn(" --password=", mock_restore_cmd.call_args[0][0])
154 | # With
155 | connector.settings["PASSWORD"] = "foo"
156 | connector.restore_dump(dump)
157 | self.assertIn(" --password=foo", mock_restore_cmd.call_args[0][0])
158 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_connectors/test_sqlite.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | from unittest.mock import mock_open, patch
3 |
4 | from django.db import connection
5 | from django.test import TestCase
6 |
7 | from dbbackup.db.sqlite import SqliteConnector, SqliteCPConnector
8 | from dbbackup.tests.testapp.models import CharModel, TextModel
9 |
10 |
11 | class SqliteConnectorTest(TestCase):
12 | def test_write_dump(self):
13 | dump_file = BytesIO()
14 | connector = SqliteConnector()
15 | connector._write_dump(dump_file)
16 | dump_file.seek(0)
17 | for line in dump_file:
18 | self.assertTrue(line.strip().endswith(b";"))
19 |
20 | def test_create_dump(self):
21 | connector = SqliteConnector()
22 | dump = connector.create_dump()
23 | self.assertTrue(dump.read())
24 |
25 | def test_create_dump_with_unicode(self):
26 | CharModel.objects.create(field="\xe9")
27 | connector = SqliteConnector()
28 | dump = connector.create_dump()
29 | self.assertTrue(dump.read())
30 |
31 | def test_create_dump_with_newline(self):
32 | TextModel.objects.create(
33 | field=f'INSERT ({"foo" * 5000}\nbar\n WHERE \nbaz IS\n "great" );\n'
34 | )
35 |
36 | connector = SqliteConnector()
37 | dump = connector.create_dump()
38 | self.assertTrue(dump.read())
39 |
40 | def test_restore_dump(self):
41 | TextModel.objects.create(field="T\nf\nw\nnl")
42 | connector = SqliteConnector()
43 | dump = connector.create_dump()
44 | connector.restore_dump(dump)
45 |
46 | def test_create_dump_with_virtual_tables(self):
47 | with connection.cursor() as c:
48 | c.execute("CREATE VIRTUAL TABLE lookup USING fts5(field)")
49 |
50 | connector = SqliteConnector()
51 | dump = connector.create_dump()
52 | self.assertTrue(dump.read())
53 |
54 |
55 | @patch("dbbackup.db.sqlite.open", mock_open(read_data=b"foo"), create=True)
56 | class SqliteCPConnectorTest(TestCase):
57 | def test_create_dump(self):
58 | connector = SqliteCPConnector()
59 | dump = connector.create_dump()
60 | dump_content = dump.read()
61 | self.assertTrue(dump_content)
62 | self.assertEqual(dump_content, b"foo")
63 |
64 | def test_restore_dump(self):
65 | connector = SqliteCPConnector()
66 | dump = connector.create_dump()
67 | connector.restore_dump(dump)
68 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_log.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from unittest.mock import patch
3 |
4 | import django
5 | from django.core import mail
6 | from django.test import TestCase
7 | from testfixtures import log_capture
8 |
9 | from dbbackup import log
10 |
11 |
12 | class LoggerDefaultTestCase(TestCase):
13 | @log_capture()
14 | def test_root(self, captures):
15 | logger = logging.getLogger()
16 | logger.debug("a noise")
17 | logger.info("a message")
18 | logger.warning("a warning")
19 | logger.error("an error")
20 | logger.critical("a critical error")
21 | captures.check(
22 | ("root", "DEBUG", "a noise"),
23 | ("root", "INFO", "a message"),
24 | ("root", "WARNING", "a warning"),
25 | ("root", "ERROR", "an error"),
26 | ("root", "CRITICAL", "a critical error"),
27 | )
28 |
29 | @log_capture()
30 | def test_django(self, captures):
31 | logger = logging.getLogger("django")
32 | logger.debug("a noise")
33 | logger.info("a message")
34 | logger.warning("a warning")
35 | logger.error("an error")
36 | logger.critical("a critical error")
37 | if django.VERSION < (1, 9):
38 | captures.check(
39 | ("django", "DEBUG", "a noise"),
40 | ("django", "INFO", "a message"),
41 | ("django", "WARNING", "a warning"),
42 | ("django", "ERROR", "an error"),
43 | ("django", "CRITICAL", "a critical error"),
44 | )
45 | else:
46 | captures.check(
47 | ("django", "INFO", "a message"),
48 | ("django", "WARNING", "a warning"),
49 | ("django", "ERROR", "an error"),
50 | ("django", "CRITICAL", "a critical error"),
51 | )
52 |
53 | @log_capture()
54 | def test_dbbackup(self, captures):
55 | logger = logging.getLogger("dbbackup")
56 | logger.debug("a noise")
57 | logger.info("a message")
58 | logger.warning("a warning")
59 | logger.error("an error")
60 | logger.critical("a critical error")
61 | captures.check(
62 | ("dbbackup", "INFO", "a message"),
63 | ("dbbackup", "WARNING", "a warning"),
64 | ("dbbackup", "ERROR", "an error"),
65 | ("dbbackup", "CRITICAL", "a critical error"),
66 | )
67 |
68 | @log_capture()
69 | def test_dbbackup_storage(self, captures):
70 | logger = logging.getLogger("dbbackup.storage")
71 | logger.debug("a noise")
72 | logger.info("a message")
73 | logger.warning("a warning")
74 | logger.error("an error")
75 | logger.critical("a critical error")
76 | captures.check(
77 | ("dbbackup.storage", "INFO", "a message"),
78 | ("dbbackup.storage", "WARNING", "a warning"),
79 | ("dbbackup.storage", "ERROR", "an error"),
80 | ("dbbackup.storage", "CRITICAL", "a critical error"),
81 | )
82 |
83 | @log_capture()
84 | def test_other_module(self, captures):
85 | logger = logging.getLogger("os.path")
86 | logger.debug("a noise")
87 | logger.info("a message")
88 | logger.warning("a warning")
89 | logger.error("an error")
90 | logger.critical("a critical error")
91 | captures.check(
92 | ("os.path", "DEBUG", "a noise"),
93 | ("os.path", "INFO", "a message"),
94 | ("os.path", "WARNING", "a warning"),
95 | ("os.path", "ERROR", "an error"),
96 | ("os.path", "CRITICAL", "a critical error"),
97 | )
98 |
99 |
100 | class DbbackupAdminEmailHandlerTest(TestCase):
101 | def setUp(self):
102 | self.logger = logging.getLogger("dbbackup")
103 |
104 | @patch("dbbackup.settings.SEND_EMAIL", True)
105 | def test_send_mail(self):
106 | # Test mail error
107 | msg = "Super msg"
108 | self.logger.error(msg)
109 | self.assertEqual(mail.outbox[0].subject, "[dbbackup] ERROR: Super msg")
110 | # Test don't mail below
111 | self.logger.warning(msg)
112 | self.assertEqual(len(mail.outbox), 1)
113 |
114 | @patch("dbbackup.settings.SEND_EMAIL", False)
115 | def test_send_mail_is_false(self):
116 | msg = "Super msg"
117 | self.logger.error(msg)
118 | self.assertEqual(len(mail.outbox), 0)
119 |
120 |
121 | class MailEnabledFilterTest(TestCase):
122 | @patch("dbbackup.settings.SEND_EMAIL", True)
123 | def test_filter_is_true(self):
124 | filter_ = log.MailEnabledFilter()
125 | self.assertTrue(filter_.filter("foo"))
126 |
127 | @patch("dbbackup.settings.SEND_EMAIL", False)
128 | def test_filter_is_false(self):
129 | filter_ = log.MailEnabledFilter()
130 | self.assertFalse(filter_.filter("foo"))
131 |
--------------------------------------------------------------------------------
/dbbackup/tests/test_storage.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 | from django.test import TestCase
4 |
5 | from dbbackup import utils
6 | from dbbackup.storage import Storage, get_storage, get_storage_class
7 | from dbbackup.tests.utils import HANDLED_FILES, FakeStorage
8 |
9 | DEFAULT_STORAGE_PATH = "django.core.files.storage.FileSystemStorage"
10 | STORAGE_OPTIONS = {"location": "/tmp"}
11 |
12 |
13 | class Get_StorageTest(TestCase):
14 | @patch("dbbackup.settings.STORAGE", DEFAULT_STORAGE_PATH)
15 | @patch("dbbackup.settings.STORAGE_OPTIONS", STORAGE_OPTIONS)
16 | def test_func(self, *args):
17 | self.assertIsInstance(get_storage(), Storage)
18 |
19 | def test_set_path(self):
20 | fake_storage_path = "dbbackup.tests.utils.FakeStorage"
21 | storage = get_storage(fake_storage_path)
22 | self.assertIsInstance(storage.storage, FakeStorage)
23 |
24 | @patch("dbbackup.settings.STORAGE", DEFAULT_STORAGE_PATH)
25 | def test_set_options(self, *args):
26 | storage = get_storage(options=STORAGE_OPTIONS)
27 | self.assertIn(
28 | storage.storage.__module__,
29 | # TODO: Remove "django.core.files.storage" case when dropping support for Django < 4.2.
30 | ("django.core.files.storage", "django.core.files.storage.filesystem"),
31 | )
32 |
33 | def test_get_storage_class(self):
34 | storage_class = get_storage_class(DEFAULT_STORAGE_PATH)
35 | self.assertIn(
36 | storage_class.__module__,
37 | ("django.core.files.storage", "django.core.files.storage.filesystem"),
38 | )
39 | self.assertIn(storage_class.__name__, ("FileSystemStorage", "DefaultStorage"))
40 |
41 | storage_class = get_storage_class("dbbackup.tests.utils.FakeStorage")
42 | self.assertEqual(storage_class.__module__, "dbbackup.tests.utils")
43 | self.assertEqual(storage_class.__name__, "FakeStorage")
44 |
45 | def test_default_storage_class(self):
46 | storage_class = get_storage_class()
47 | self.assertIn(
48 | storage_class.__module__,
49 | ("django.core.files.storage", "django.core.files.storage.filesystem"),
50 | )
51 | self.assertIn(storage_class.__name__, ("FileSystemStorage", "DefaultStorage"))
52 |
53 | def test_invalid_storage_class_path(self):
54 | with self.assertRaises(ImportError):
55 | get_storage_class("invalid.path.to.StorageClass")
56 |
57 | def test_storages_settings(self):
58 | from .settings import STORAGES
59 |
60 | self.assertIsInstance(STORAGES, dict)
61 | self.assertEqual(
62 | STORAGES["dbbackup"]["BACKEND"], "dbbackup.tests.utils.FakeStorage"
63 | )
64 |
65 | from dbbackup.settings import DJANGO_STORAGES, STORAGE
66 |
67 | self.assertIsInstance(DJANGO_STORAGES, dict)
68 | self.assertEqual(DJANGO_STORAGES, STORAGES)
69 | self.assertEqual(STORAGES["dbbackup"]["BACKEND"], STORAGE)
70 |
71 | storage = get_storage()
72 | self.assertEqual(storage.storage.__class__.__module__, "dbbackup.tests.utils")
73 | self.assertEqual(storage.storage.__class__.__name__, "FakeStorage")
74 |
75 | def test_storages_settings_options(self):
76 | from dbbackup.settings import STORAGE_OPTIONS
77 |
78 | from .settings import STORAGES
79 |
80 | self.assertEqual(STORAGES["dbbackup"]["OPTIONS"], STORAGE_OPTIONS)
81 |
82 |
83 | class StorageTest(TestCase):
84 | def setUp(self):
85 | self.storageCls = Storage
86 | self.storageCls.name = "foo"
87 | self.storage = Storage()
88 |
89 |
90 | class StorageListBackupsTest(TestCase):
91 | def setUp(self):
92 | HANDLED_FILES.clean()
93 | self.storage = get_storage()
94 | # foodb files
95 | HANDLED_FILES["written_files"] += [
96 | (utils.filename_generate(ext, "foodb"), None)
97 | for ext in ("db", "db.gz", "db.gpg", "db.gz.gpg")
98 | ]
99 | HANDLED_FILES["written_files"] += [
100 | (utils.filename_generate(ext, "hamdb", "fooserver"), None)
101 | for ext in ("db", "db.gz", "db.gpg", "db.gz.gpg")
102 | ]
103 | # Media file
104 | HANDLED_FILES["written_files"] += [
105 | (utils.filename_generate(ext, None, None, "media"), None)
106 | for ext in ("tar", "tar.gz", "tar.gpg", "tar.gz.gpg")
107 | ]
108 | HANDLED_FILES["written_files"] += [
109 | (utils.filename_generate(ext, "bardb", "barserver"), None)
110 | for ext in ("db", "db.gz", "db.gpg", "db.gz.gpg")
111 | ]
112 | # barserver files
113 | HANDLED_FILES["written_files"] += [("file_without_date", None)]
114 |
115 | def test_nofilter(self):
116 | files = self.storage.list_backups()
117 | self.assertEqual(len(HANDLED_FILES["written_files"]) - 1, len(files))
118 | for file in files:
119 | self.assertNotEqual("file_without_date", file)
120 |
121 | def test_encrypted(self):
122 | files = self.storage.list_backups(encrypted=True)
123 | for file in files:
124 | self.assertIn(".gpg", file)
125 |
126 | def test_compressed(self):
127 | files = self.storage.list_backups(compressed=True)
128 | for file in files:
129 | self.assertIn(".gz", file)
130 |
131 | def test_not_encrypted(self):
132 | files = self.storage.list_backups(encrypted=False)
133 | for file in files:
134 | self.assertNotIn(".gpg", file)
135 |
136 | def test_not_compressed(self):
137 | files = self.storage.list_backups(compressed=False)
138 | for file in files:
139 | self.assertNotIn(".gz", file)
140 |
141 | def test_content_type_db(self):
142 | files = self.storage.list_backups(content_type="db")
143 | for file in files:
144 | self.assertIn(".db", file)
145 |
146 | def test_database(self):
147 | files = self.storage.list_backups(database="foodb")
148 | for file in files:
149 | self.assertIn("foodb", file)
150 | self.assertNotIn("bardb", file)
151 | self.assertNotIn("hamdb", file)
152 |
153 | def test_servername(self):
154 | files = self.storage.list_backups(servername="fooserver")
155 | for file in files:
156 | self.assertIn("fooserver", file)
157 | self.assertNotIn("barserver", file)
158 | files = self.storage.list_backups(servername="barserver")
159 | for file in files:
160 | self.assertIn("barserver", file)
161 | self.assertNotIn("fooserver", file)
162 |
163 | def test_content_type_media(self):
164 | files = self.storage.list_backups(content_type="media")
165 | for file in files:
166 | self.assertIn(".tar", file)
167 |
168 | # def test_servername(self):
169 | # files = self.storage.list_backups(servername='barserver')
170 | # for file in files:
171 | # self.assertIn('barserver', file)
172 |
173 |
174 | class StorageGetLatestTest(TestCase):
175 | def setUp(self):
176 | self.storage = get_storage()
177 | HANDLED_FILES["written_files"] = [
178 | (f, None)
179 | for f in [
180 | "2015-02-06-042810.bak",
181 | "2015-02-07-042810.bak",
182 | "2015-02-08-042810.bak",
183 | ]
184 | ]
185 |
186 | def tearDown(self):
187 | HANDLED_FILES.clean()
188 |
189 | def test_func(self):
190 | filename = self.storage.get_latest_backup()
191 | self.assertEqual(filename, "2015-02-08-042810.bak")
192 |
193 |
194 | class StorageGetMostRecentTest(TestCase):
195 | def setUp(self):
196 | self.storage = get_storage()
197 | HANDLED_FILES["written_files"] = [
198 | (f, None)
199 | for f in [
200 | "2015-02-06-042810.bak",
201 | "2015-02-07-042810.bak",
202 | "2015-02-08-042810.bak",
203 | ]
204 | ]
205 |
206 | def tearDown(self):
207 | HANDLED_FILES.clean()
208 |
209 | def test_func(self):
210 | filename = self.storage.get_older_backup()
211 | self.assertEqual(filename, "2015-02-06-042810.bak")
212 |
213 |
214 | def keep_only_even_files(filename):
215 | from dbbackup.utils import filename_to_date
216 |
217 | return filename_to_date(filename).day % 2 == 0
218 |
219 |
220 | class StorageCleanOldBackupsTest(TestCase):
221 | def setUp(self):
222 | self.storage = get_storage()
223 | HANDLED_FILES.clean()
224 | HANDLED_FILES["written_files"] = [
225 | (f, None)
226 | for f in [
227 | "2015-02-06-042810.bak",
228 | "2015-02-07-042810.bak",
229 | "2015-02-08-042810.bak",
230 | ]
231 | ]
232 |
233 | def test_func(self):
234 | self.storage.clean_old_backups(keep_number=1)
235 | self.assertEqual(2, len(HANDLED_FILES["deleted_files"]))
236 |
237 | @patch("dbbackup.settings.CLEANUP_KEEP_FILTER", keep_only_even_files)
238 | def test_keep_filter(self):
239 | self.storage.clean_old_backups(keep_number=1)
240 | self.assertListEqual(["2015-02-07-042810.bak"], HANDLED_FILES["deleted_files"])
241 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/gpg/pubring.gpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/gpg/pubring.gpg
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/gpg/secring.gpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/gpg/secring.gpg
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/test.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/test.gz
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/test.txt.gpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/test.txt.gpg
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/test.txt.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/test.txt.gz
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/test.txt.gz.gpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/test.txt.gz.gpg
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/blobs/test.txt.tar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/blobs/test.txt.tar
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/management/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/management/commands/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/management/commands/count.py:
--------------------------------------------------------------------------------
1 | from django.core.management.base import BaseCommand
2 |
3 | from dbbackup.tests.testapp.models import CharModel
4 |
5 |
6 | class Command(BaseCommand):
7 | help = "Count things"
8 |
9 | def handle(self, **options):
10 | self.stdout.write(str(CharModel.objects.count()))
11 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/management/commands/feed.py:
--------------------------------------------------------------------------------
1 | from django.core.management.base import BaseCommand
2 |
3 | from dbbackup.tests.testapp.models import CharModel
4 |
5 |
6 | class Command(BaseCommand):
7 | help = "Count things"
8 |
9 | def handle(self, **options):
10 | for st in "abcde":
11 | CharModel.objects.create(field=st)
12 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | from django.db import migrations, models
2 |
3 |
4 | class Migration(migrations.Migration):
5 |
6 | initial = True
7 | dependencies = []
8 |
9 | operations = [
10 | migrations.CreateModel(
11 | name="CharModel",
12 | fields=[
13 | (
14 | "id",
15 | models.AutoField(
16 | verbose_name="ID",
17 | serialize=False,
18 | auto_created=True,
19 | primary_key=True,
20 | ),
21 | ),
22 | ("field", models.CharField(max_length=10)),
23 | ],
24 | ),
25 | migrations.CreateModel(
26 | name="FileModel",
27 | fields=[
28 | (
29 | "id",
30 | models.AutoField(
31 | verbose_name="ID",
32 | serialize=False,
33 | auto_created=True,
34 | primary_key=True,
35 | ),
36 | ),
37 | ("field", models.FileField(upload_to=".")),
38 | ],
39 | ),
40 | migrations.CreateModel(
41 | name="ForeignKeyModel",
42 | fields=[
43 | (
44 | "id",
45 | models.AutoField(
46 | verbose_name="ID",
47 | serialize=False,
48 | auto_created=True,
49 | primary_key=True,
50 | ),
51 | ),
52 | (
53 | "field",
54 | models.ForeignKey(to="testapp.CharModel", on_delete=models.CASCADE),
55 | ),
56 | ],
57 | ),
58 | migrations.CreateModel(
59 | name="ManyToManyModel",
60 | fields=[
61 | (
62 | "id",
63 | models.AutoField(
64 | verbose_name="ID",
65 | serialize=False,
66 | auto_created=True,
67 | primary_key=True,
68 | ),
69 | ),
70 | ("field", models.ManyToManyField(to="testapp.CharModel")),
71 | ],
72 | ),
73 | ]
74 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/migrations/0002_textmodel.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 4.0.1 on 2022-04-27 22:36
2 |
3 | from django.db import migrations, models
4 |
5 |
6 | class Migration(migrations.Migration):
7 |
8 | dependencies = [
9 | ("testapp", "0001_initial"),
10 | ]
11 |
12 | operations = [
13 | migrations.CreateModel(
14 | name="TextModel",
15 | fields=[
16 | (
17 | "id",
18 | models.AutoField(
19 | auto_created=True,
20 | primary_key=True,
21 | serialize=False,
22 | verbose_name="ID",
23 | ),
24 | ),
25 | ("field", models.TextField()),
26 | ],
27 | ),
28 | ]
29 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jazzband/django-dbbackup/d77d25ff881bc346461aab894036fb5d72ec98d0/dbbackup/tests/testapp/migrations/__init__.py
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class CharModel(models.Model):
5 | field = models.CharField(max_length=10)
6 |
7 |
8 | class TextModel(models.Model):
9 | field = models.TextField()
10 |
11 |
12 | class ForeignKeyModel(models.Model):
13 | field = models.ForeignKey(CharModel, on_delete=models.CASCADE)
14 |
15 |
16 | class ManyToManyModel(models.Model):
17 | field = models.ManyToManyField(CharModel)
18 |
19 |
20 | class FileModel(models.Model):
21 | field = models.FileField(upload_to=".")
22 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/urls.py:
--------------------------------------------------------------------------------
1 | urlpatterns = (
2 | # url(r'^admin/', include(admin.site.urls)),
3 | )
4 |
--------------------------------------------------------------------------------
/dbbackup/tests/testapp/views.py:
--------------------------------------------------------------------------------
1 | # Create your views here.
2 |
--------------------------------------------------------------------------------
/dbbackup/tests/utils.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import os
3 | import subprocess
4 |
5 | from django.conf import settings
6 | from django.core.files import File
7 | from django.core.files.storage import Storage
8 | from django.utils import timezone
9 |
10 | from dbbackup.db.base import get_connector
11 |
12 | BASE_FILE = os.path.join(settings.BLOB_DIR, "test.txt")
13 | ENCRYPTED_FILE = os.path.join(settings.BLOB_DIR, "test.txt.gpg")
14 | COMPRESSED_FILE = os.path.join(settings.BLOB_DIR, "test.txt.gz")
15 | TARED_FILE = os.path.join(settings.BLOB_DIR, "test.txt.tar")
16 | ENCRYPTED_COMPRESSED_FILE = os.path.join(settings.BLOB_DIR, "test.txt.gz.gpg")
17 | TEST_DATABASE = {
18 | "ENGINE": "django.db.backends.sqlite3",
19 | "NAME": "/tmp/foo.db",
20 | "USER": "foo",
21 | "PASSWORD": "bar",
22 | "HOST": "foo",
23 | "PORT": 122,
24 | }
25 | TEST_MONGODB = {
26 | "ENGINE": "django_mongodb_engine",
27 | "NAME": "mongo_test",
28 | "USER": "foo",
29 | "PASSWORD": "bar",
30 | "HOST": "foo",
31 | "PORT": 122,
32 | }
33 | TEST_DATABASE = settings.DATABASES["default"]
34 |
35 | GPG_PRIVATE_PATH = os.path.join(settings.BLOB_DIR, "gpg/secring.gpg")
36 | GPG_PUBLIC_PATH = os.path.join(settings.BLOB_DIR, "gpg/pubring.gpg")
37 | GPG_FINGERPRINT = "7438 8D4E 02AF C011 4E2F 1E79 F7D1 BBF0 1F63 FDE9"
38 | DEV_NULL = open(os.devnull, "w")
39 |
40 |
41 | class handled_files(dict):
42 | """
43 | Dict for gather information about fake storage and clean between tests.
44 | You should use the constant instance ``HANDLED_FILES`` and clean it
45 | before tests.
46 | """
47 |
48 | def __init__(self):
49 | super().__init__()
50 | self.clean()
51 |
52 | def clean(self):
53 | self["written_files"] = []
54 | self["deleted_files"] = []
55 |
56 |
57 | HANDLED_FILES = handled_files()
58 |
59 |
60 | class FakeStorage(Storage):
61 | name = "FakeStorage"
62 |
63 | def exists(self, name):
64 | return name in HANDLED_FILES["written_files"]
65 |
66 | def get_available_name(self, name, max_length=None):
67 | return name[:max_length]
68 |
69 | def get_valid_name(self, name):
70 | return name
71 |
72 | def listdir(self, path):
73 | return ([], [f[0] for f in HANDLED_FILES["written_files"]])
74 |
75 | def accessed_time(self, name):
76 | return timezone.now()
77 |
78 | created_time = modified_time = accessed_time
79 |
80 | def _open(self, name, mode="rb"):
81 | file_ = [f[1] for f in HANDLED_FILES["written_files"] if f[0] == name][0]
82 | file_.seek(0)
83 | return file_
84 |
85 | def _save(self, name, content):
86 | HANDLED_FILES["written_files"].append((name, File(content)))
87 | return name
88 |
89 | def delete(self, name):
90 | HANDLED_FILES["deleted_files"].append(name)
91 |
92 |
93 | def clean_gpg_keys():
94 | with contextlib.suppress(Exception):
95 | cmd = "gpg --batch --yes --delete-key '%s'" % GPG_FINGERPRINT
96 | subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
97 | with contextlib.suppress(Exception):
98 | cmd = "gpg --batch --yes --delete-secrect-key '%s'" % GPG_FINGERPRINT
99 | subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
100 |
101 |
102 | def add_private_gpg():
103 | cmd = f"gpg --import {GPG_PRIVATE_PATH}".split()
104 | subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
105 |
106 |
107 | def add_public_gpg():
108 | cmd = f"gpg --import {GPG_PUBLIC_PATH}".split()
109 | subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
110 |
111 |
112 | def callable_for_filename_template(datetime, **kwargs):
113 | return f"{datetime}_foo"
114 |
115 |
116 | def get_dump(database=TEST_DATABASE):
117 | return get_connector().create_dump()
118 |
119 |
120 | def get_dump_name(database=None):
121 | database = database or TEST_DATABASE
122 | return get_connector().generate_filename()
123 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
18 |
19 | help:
20 | @echo "Please use \`make ' where is one of"
21 | @echo " html to make standalone HTML files"
22 | @echo " dirhtml to make HTML files named index.html in directories"
23 | @echo " singlehtml to make a single large HTML file"
24 | @echo " pickle to make pickle files"
25 | @echo " json to make JSON files"
26 | @echo " htmlhelp to make HTML files and a HTML help project"
27 | @echo " qthelp to make HTML files and a qthelp project"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
31 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
32 | @echo " text to make text files"
33 | @echo " man to make manual pages"
34 | @echo " texinfo to make Texinfo files"
35 | @echo " info to make Texinfo files and run them through makeinfo"
36 | @echo " gettext to make PO message catalogs"
37 | @echo " changes to make an overview of all changed/added/deprecated items"
38 | @echo " linkcheck to check all external links for integrity"
39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
40 |
41 | clean:
42 | -rm -rf $(BUILDDIR)/*
43 |
44 | html:
45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
48 |
49 | dirhtml:
50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
51 | @echo
52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
53 |
54 | singlehtml:
55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
56 | @echo
57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
58 |
59 | pickle:
60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
61 | @echo
62 | @echo "Build finished; now you can process the pickle files."
63 |
64 | json:
65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
66 | @echo
67 | @echo "Build finished; now you can process the JSON files."
68 |
69 | htmlhelp:
70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
71 | @echo
72 | @echo "Build finished; now you can run HTML Help Workshop with the" \
73 | ".hhp project file in $(BUILDDIR)/htmlhelp."
74 |
75 | qthelp:
76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
77 | @echo
78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-dbbackup.qhcp"
81 | @echo "To view the help file:"
82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-dbbackup.qhc"
83 |
84 | devhelp:
85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
86 | @echo
87 | @echo "Build finished."
88 | @echo "To view the help file:"
89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/django-dbbackup"
90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-dbbackup"
91 | @echo "# devhelp"
92 |
93 | epub:
94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
95 | @echo
96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
97 |
98 | latex:
99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
100 | @echo
101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
103 | "(use \`make latexpdf' here to do that automatically)."
104 |
105 | latexpdf:
106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
107 | @echo "Running LaTeX files through pdflatex..."
108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
110 |
111 | text:
112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
113 | @echo
114 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
115 |
116 | man:
117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
118 | @echo
119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
120 |
121 | texinfo:
122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
123 | @echo
124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
125 | @echo "Run \`make' in that directory to run these through makeinfo" \
126 | "(use \`make info' here to do that automatically)."
127 |
128 | info:
129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
130 | @echo "Running Texinfo files through makeinfo..."
131 | make -C $(BUILDDIR)/texinfo info
132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
133 |
134 | gettext:
135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
136 | @echo
137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
138 |
139 | changes:
140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
141 | @echo
142 | @echo "The overview file is in $(BUILDDIR)/changes."
143 |
144 | linkcheck:
145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
146 | @echo
147 | @echo "Link check complete; look for any errors in the above output " \
148 | "or in $(BUILDDIR)/linkcheck/output.txt."
149 |
150 | doctest:
151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
152 | @echo "Testing of doctests in the sources finished, look at the " \
153 | "results in $(BUILDDIR)/doctest/output.txt."
154 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | Unreleased
5 | ----------
6 |
7 | * Nothing (yet)!
8 |
9 | 4.3.0 (2025-05-09)
10 | ----------
11 |
12 | * Add generic `--pg-options` to pass custom options to postgres.
13 | * Add option `--if-exists` for pg_dump command
14 | * Empty string as HOST for postgres unix domain socket connection is now supported.
15 | * Support Python 3.13 and Django 5.2
16 |
17 | 4.2.1 (2024-08-23)
18 | ----------
19 |
20 | * Add --no-drop option to dbrestore command to prevent dropping tables before restoring data.
21 | * Fix bug where sqlite dbrestore would fail if field data contains the line break character.
22 |
23 | 4.2.0 (2024-08-22)
24 | ------------------
25 |
26 | * Default HOST to localhost for postgres databases.
27 | * Add PostgreSQL Schema support
28 | * Fix restore of database from S3 storage by reintroducing inputfile.seek(0) to utils.uncompress_file
29 | * Add warning for filenames with slashes in them
30 | * Fix bug where dbbackup management command would not respect settings.py:DBBACKUP_DATABASES
31 | * Remove usage of deprecated 'get_storage_class' function in newer Django versions
32 | * Add support for new STORAGES (Django 4.2+) setting under the 'dbbackup' alias
33 |
34 | 4.1.0 (2024-01-14)
35 | ------------------
36 |
37 | * Fix restore fail after editing filename
38 | * Drop python 3.6
39 | * update links
40 | * Update doc for backup directory consistency
41 | * RESTORE_PREFIX for RESTORE_SUFFIX
42 | * Support Django 4.1, 4.2 and Python 3.11
43 | * Support Python 3.12 and Django 5.0
44 |
45 | 4.0.2 (2022-09-27)
46 | ------------------
47 |
48 | * support for prometheus wrapped dbs
49 | * Backup of SQLite fail if there are Virtual Tables (e.g. FTS tables).
50 | * Closes #460: python-gnupg version increase breaks unencrypt_file func…
51 |
52 | 4.0.1 (2022-07-09)
53 | ---------------------
54 |
55 | * As of this version, dbbackup is now within Jazzband! This version tests our Jazzband release CI, and adds miscellaneous refactoring/cleanup.
56 | * Fix GitHub Actions configuration
57 | * Enable functional tests in CI
58 | * Update settings.py comment
59 | * Jazzband transfer tasks
60 | * Refactoring and tooling
61 |
62 | 4.0.0b0 (2021-12-19)
63 | --------------------
64 |
65 | * Fix RemovedInDjango41Warning related to default_app_config
66 | * Add authentication database support for MongoDB
67 | * Remove six dependency
68 | * Explicitly support Python 3.6+.
69 | * Drop support for end of life Django versions. Currently support 2.2, 3.2, 4.0.
70 | * Replace ugettext_lazy with gettext_lazy
71 | * Changed logging settings from settings.py to late init
72 | * Fix authentication error when postgres is password protected
73 | * Use exclude-table-data instead of exclude-table
74 | * Add support for exclude tables data in the command interface
75 | * Move author and version information into setup.py to allow building package in isolated environment (e.g. with the ``build`` package).
76 | * Documentation fixes
77 |
78 |
79 | 3.3.0 (2020-04-14)
80 | ------------------
81 |
82 | * Documentation fixes
83 | * "output-filename" in mediabackup command
84 | * Fixes for test infrastructure and mongodb support
85 | * sqlite3: don't throw warnings if table already exists
86 | * Fixes for django3 and updated travis (and File handling)
87 | * Restoring from FTP
88 | * Fixes to run dbbackup management command in Postgres for non-latin Windows.
89 | * Apply changes from pull request 244; Update to include sftp storage
90 | * Quick fix for proper selection of DB name to restore
91 |
--------------------------------------------------------------------------------
/docs/commands.rst:
--------------------------------------------------------------------------------
1 | ========
2 | Commands
3 | ========
4 |
5 | The primary usage of DBBackup is with command line tools. By default,
6 | commands will create backups and upload to your defined storage or download
7 | and restore the latest backup.
8 |
9 | Arguments can be passed to commands to compress/uncompress and encrypt/decrypt.
10 |
11 | dbbackup
12 | ========
13 |
14 |
15 | Backup of database. ::
16 |
17 | $ ./manage.py dbbackup
18 | Backing Up Database: /tmp/tmp.x0kN9sYSqk
19 | Backup size: 3.3 KiB
20 | Writing file to tmp-zuluvm-2016-07-29-100954.dump
21 |
22 | Help
23 | ~~~~
24 |
25 | .. djcommand:: dbbackup.management.commands.dbbackup
26 |
27 |
28 | dbrestore
29 | =========
30 |
31 | Restore a database. ::
32 | $ ./manage.py dbrestore
33 | Restoring backup for database: /tmp/tmp.x0kN9sYSqk
34 | Finding latest backup
35 | Restoring: tmp-zuluvm-2016-07-29-100954.dump
36 | Restore tempfile created: 3.3 KiB
37 |
38 | Help
39 | ~~~~
40 |
41 | .. djcommand:: dbbackup.management.commands.dbrestore
42 |
43 | mediabackup
44 | ===========
45 |
46 | Backup media files, gather all in a tarball and encrypt or compress. ::
47 |
48 | $ ./manage.py mediabackup
49 | Backup size: 10.0 KiB
50 | Writing file to zuluvm-2016-07-04-081612.tar
51 |
52 | Help
53 | ~~~~
54 |
55 | .. djcommand:: dbbackup.management.commands.mediabackup
56 |
57 | mediarestore
58 | ============
59 |
60 | Restore media files, extract files from archive and put into media storage. ::
61 |
62 | $ ./manage.py mediarestore
63 | Restoring backup for media files
64 | Finding latest backup
65 | Reading file zuluvm-2016-07-04-082551.tar
66 | Restoring: zuluvm-2016-07-04-082551.tar
67 | Backup size: 10.0 KiB
68 | Are you sure you want to continue? [Y/n]
69 | 2 file(s) restored
70 |
71 | Help
72 | ~~~~
73 |
74 | .. djcommand:: dbbackup.management.commands.mediarestore
75 |
76 | listbackups
77 | ===========
78 |
79 | This command helps to list backups filtered by type (``'media'`` or ``'db'``),
80 | by compression or encryption.
81 |
82 | Help
83 | ~~~~
84 |
85 | .. djcommand:: dbbackup.management.commands.listbackups
86 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #
2 | # django-dbbackup documentation build configuration file, created by
3 | # sphinx-quickstart on Sun May 18 13:35:53 2014.
4 | #
5 | # This file is execfile()d with the current directory set to its containing dir.
6 | #
7 | # Note that not all possible configuration values are present in this
8 | # autogenerated file.
9 | #
10 | # All configuration values have a default; values that are commented out
11 | # serve to show the default.
12 |
13 | import os
14 | import sys
15 |
16 | import dbbackup
17 |
18 | # If extensions (or modules to document with autodoc) are in another directory,
19 | # add these directories to sys.path here. If the directory is relative to the
20 | # documentation root, use os.path.abspath to make it absolute, like shown here.
21 | # sys.path.insert(0, os.path.abspath('.'))
22 |
23 | # -- General configuration -----------------------------------------------------
24 |
25 | # If your documentation needs a minimal Sphinx version, state it here.
26 | # needs_sphinx = '1.0'
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be extensions
29 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
30 | extensions = [
31 | "djcommanddoc",
32 | ]
33 |
34 | # Add any paths that contain templates here, relative to this directory.
35 | templates_path = ["_templates"]
36 |
37 | # The suffix of source filenames.
38 | source_suffix = ".rst"
39 |
40 | # The encoding of source files.
41 | # source_encoding = 'utf-8-sig'
42 |
43 | # The master toctree document.
44 | master_doc = "index"
45 |
46 | # General information about the project.
47 | project = "django-dbbackup"
48 | copyright = "Mark Bakhit"
49 |
50 | # basepath
51 | path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
52 |
53 | sys.path = [path] + sys.path
54 | sys.path = [os.path.join(path, "dbbackup")] + sys.path
55 |
56 | os.environ["DJANGO_SETTINGS_MODULE"] = "dbbackup.tests.settings"
57 |
58 |
59 | # The version info for the project you're documenting, acts as replacement for
60 | # |version| and |release|, also used in various other places throughout the
61 | # built documents.
62 | #
63 | # The short X.Y version.
64 | version = dbbackup.VERSION
65 | # The full version, including alpha/beta/rc tags.
66 | release = dbbackup.__version__
67 |
68 | # The language for content autogenerated by Sphinx. Refer to documentation
69 | # for a list of supported languages.
70 | # language = None
71 |
72 | # There are two options for replacing |today|: either, you set today to some
73 | # non-false value, then it is used:
74 | # today = ''
75 | # Else, today_fmt is used as the format for a strftime call.
76 | # today_fmt = '%B %d, %Y'
77 |
78 | # List of patterns, relative to source directory, that match files and
79 | # directories to ignore when looking for source files.
80 | exclude_patterns = ["_build"]
81 |
82 | # The reST default role (used for this markup: `text`) to use for all documents.
83 | # default_role = None
84 |
85 | # If true, '()' will be appended to :func: etc. cross-reference text.
86 | # add_function_parentheses = True
87 |
88 | # If true, the current module name will be prepended to all description
89 | # unit titles (such as .. function::).
90 | # add_module_names = True
91 |
92 | # If true, sectionauthor and moduleauthor directives will be shown in the
93 | # output. They are ignored by default.
94 | # show_authors = False
95 |
96 | # The name of the Pygments (syntax highlighting) style to use.
97 | pygments_style = "sphinx"
98 |
99 | # A list of ignored prefixes for module index sorting.
100 | # modindex_common_prefix = []
101 |
102 |
103 | # -- Options for HTML output ---------------------------------------------------
104 |
105 | html_theme = "sphinx_rtd_theme"
106 |
107 |
108 | # Theme options are theme-specific and customize the look and feel of a theme
109 | # further. For a list of options available for each theme, see the
110 | # documentation.
111 | # html_theme_options = {}
112 |
113 | # Add any paths that contain custom themes here, relative to this directory.
114 | # html_theme_path = []
115 |
116 | # The name for this set of Sphinx documents. If None, it defaults to
117 | # " v documentation".
118 | # html_title = None
119 |
120 | # A shorter title for the navigation bar. Default is the same as html_title.
121 | # html_short_title = None
122 |
123 | # The name of an image file (relative to this directory) to place at the top
124 | # of the sidebar.
125 | # html_logo = None
126 |
127 | # The name of an image file (within the static path) to use as favicon of the
128 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
129 | # pixels large.
130 | # html_favicon = None
131 |
132 | # Add any paths that contain custom static files (such as style sheets) here,
133 | # relative to this directory. They are copied after the builtin static files,
134 | # so a file named "default.css" will overwrite the builtin "default.css".
135 | html_static_path = ["_static"]
136 |
137 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
138 | # using the given strftime format.
139 | # html_last_updated_fmt = '%b %d, %Y'
140 |
141 | # If true, SmartyPants will be used to convert quotes and dashes to
142 | # typographically correct entities.
143 | # html_use_smartypants = True
144 |
145 | # Custom sidebar templates, maps document names to template names.
146 | # html_sidebars = {}
147 |
148 | # Additional templates that should be rendered to pages, maps page names to
149 | # template names.
150 | # html_additional_pages = {}
151 |
152 | # If false, no module index is generated.
153 | # html_domain_indices = True
154 |
155 | # If false, no index is generated.
156 | # html_use_index = True
157 |
158 | # If true, the index is split into individual pages for each letter.
159 | # html_split_index = False
160 |
161 | # If true, links to the reST sources are added to the pages.
162 | # html_show_sourcelink = True
163 |
164 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
165 | # html_show_sphinx = True
166 |
167 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
168 | # html_show_copyright = True
169 |
170 | # If true, an OpenSearch description file will be output, and all pages will
171 | # contain a tag referring to it. The value of this option must be the
172 | # base URL from which the finished HTML is served.
173 | # html_use_opensearch = ''
174 |
175 | # This is the file name suffix for HTML files (e.g. ".xhtml").
176 | # html_file_suffix = None
177 |
178 | # Output file base name for HTML help builder.
179 | htmlhelp_basename = "django-dbbackupdoc"
180 |
181 |
182 | # -- Options for LaTeX output --------------------------------------------------
183 |
184 | latex_elements = {
185 | # The paper size ('letterpaper' or 'a4paper').
186 | # 'papersize': 'letterpaper',
187 | # The font size ('10pt', '11pt' or '12pt').
188 | # 'pointsize': '10pt',
189 | # Additional stuff for the LaTeX preamble.
190 | # 'preamble': '',
191 | }
192 |
193 | # Grouping the document tree into LaTeX files. List of tuples
194 | # (source start file, target name, title, author, documentclass [howto/manual]).
195 | latex_documents = [
196 | (
197 | "index",
198 | "django-dbbackup.tex",
199 | "django-dbbackup Documentation",
200 | "Michael Shepanski",
201 | "manual",
202 | ),
203 | ]
204 |
205 | # The name of an image file (relative to this directory) to place at the top of
206 | # the title page.
207 | # latex_logo = None
208 |
209 | # For "manual" documents, if this is true, then toplevel headings are parts,
210 | # not chapters.
211 | # latex_use_parts = False
212 |
213 | # If true, show page references after internal links.
214 | # latex_show_pagerefs = False
215 |
216 | # If true, show URL addresses after external links.
217 | # latex_show_urls = False
218 |
219 | # Documents to append as an appendix to all manuals.
220 | # latex_appendices = []
221 |
222 | # If false, no module index is generated.
223 | # latex_domain_indices = True
224 |
225 |
226 | # -- Options for manual page output --------------------------------------------
227 |
228 | # One entry per manual page. List of tuples
229 | # (source start file, name, description, authors, manual section).
230 | man_pages = [
231 | (
232 | "index",
233 | "django-dbbackup",
234 | "django-dbbackup Documentation",
235 | ["Michael Shepanski"],
236 | 1,
237 | )
238 | ]
239 |
240 | # If true, show URL addresses after external links.
241 | # man_show_urls = False
242 |
243 |
244 | # -- Options for Texinfo output ------------------------------------------------
245 |
246 | # Grouping the document tree into Texinfo files. List of tuples
247 | # (source start file, target name, title, author,
248 | # dir menu entry, description, category)
249 | texinfo_documents = [
250 | (
251 | "index",
252 | "django-dbbackup",
253 | "django-dbbackup Documentation",
254 | "Michael Shepanski",
255 | "django-dbbackup",
256 | "One line description of project.",
257 | "Miscellaneous",
258 | ),
259 | ]
260 |
261 | # Documents to append as an appendix to all manuals.
262 | # texinfo_appendices = []
263 |
264 | # If false, no module index is generated.
265 | # texinfo_domain_indices = True
266 |
267 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
268 | # texinfo_show_urls = 'footnote'
269 |
--------------------------------------------------------------------------------
/docs/configuration.rst:
--------------------------------------------------------------------------------
1 | Configuration
2 | =============
3 |
4 | General settings
5 | ----------------
6 |
7 | DBBACKUP_DATABASES
8 | ~~~~~~~~~~~~~~~~~~
9 |
10 | List of key entries for ``settings.DATABASES`` which shall be used to
11 | connect and create database backups.
12 |
13 | Default: ``list(settings.DATABASES.keys())`` (keys of all entries listed)
14 |
15 | DBBACKUP_TMP_DIR
16 | ~~~~~~~~~~~~~~~~
17 |
18 | Directory to be used in local filesystem for temporary files.
19 |
20 | Default: ``tempfile.gettempdir()``
21 |
22 | DBBACKUP_TMP_FILE_MAX_SIZE
23 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
24 |
25 | Maximum size in bytes for file handling in memory before a temporary
26 | file is written in ``DBBACKUP_TMP_DIR``.
27 |
28 | Default: ``10*1024*1024``
29 |
30 |
31 | DBBACKUP_CLEANUP_KEEP and DBBACKUP_CLEANUP_KEEP_MEDIA
32 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
33 |
34 | When issuing ``dbbackup`` and ``mediabackup`` with ``--clean `` option,
35 | the backup keeps this amount of most recent files. Older files are removed.
36 |
37 | Default: ``10`` (backups)
38 |
39 |
40 | DBBACKUP_CLEANUP_KEEP_FILTER
41 | ~~~~~~~~~~~~~~~~~~~~~~~
42 |
43 | A callable that takes a filename (of an old backup, to be cleaned) and returns
44 | a boolean indicating whether the backup should be kept (``True``) or deleted
45 | (``False``).
46 |
47 | Default: ``lambda filename: False``
48 |
49 | This can be used to keep monthly backups, for example.
50 |
51 |
52 | DBBACKUP_DATE_FORMAT
53 | ~~~~~~~~~~~~~~~~~~~~
54 |
55 | Date format to use for naming files. It must contain only alphanumerical
56 | characters, ``'_'``, ``'-'`` or ``'%'``.
57 |
58 | Default: ``'%Y-%m-%d-%H%M%S'``
59 |
60 |
61 | DBBACKUP_HOSTNAME
62 | ~~~~~~~~~~~~~~~~~
63 |
64 | Used to identify a backup by a server name in the file name.
65 |
66 | Default: ``socket.gethostname()``
67 |
68 |
69 | DBBACKUP_FILENAME_TEMPLATE
70 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
71 |
72 | The template to use when generating the backup filename. By default this is
73 | ``'{databasename}-{servername}-{datetime}.{extension}'``. This setting can
74 | also be made a function which takes the following keyword arguments:
75 |
76 | ::
77 |
78 | def backup_filename(databasename, servername, datetime, extension, content_type):
79 | pass
80 |
81 | DBBACKUP_FILENAME_TEMPLATE = backup_filename
82 |
83 | This allows you to modify the entire format of the filename, for example, if
84 | you want to take advantage of Amazon S3's automatic expiry feature, you need
85 | to prefix your backups differently based on when you want them to expire.
86 |
87 | ``{datetime}`` is rendered with ``DBBACKUP_DATE_FORMAT``.
88 |
89 |
90 | DBBACKUP_MEDIA_FILENAME_TEMPLATE
91 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
92 |
93 | Same as ``DBBACKUP_FILENAME_TEMPLATE`` but for media files backups.
94 |
95 |
96 | Encrypting your backups
97 | -----------------------
98 |
99 | Considering that you might be putting secure data on external servers and
100 | perhaps untrusted servers where it gets forgotten over time, it's always a
101 | good idea to encrypt backups.
102 |
103 | Just remember to keep the encryption keys safe, too!
104 |
105 |
106 | PGP
107 | ~~~
108 |
109 | You can encrypt a backup with the ``--encrypt`` option. The backup is done
110 | using GPG. ::
111 |
112 | python manage.py dbbackup --encrypt
113 |
114 | ...or when restoring from an encrypted backup: ::
115 |
116 | python manage.py dbrestore --decrypt
117 |
118 |
119 | Requirements:
120 |
121 | - Install the python package python-gnupg:
122 | ``pip install python-gnupg>=0.5.0``.
123 | - You need a GPG key. (`GPG manual`_)
124 | - Set the setting ``DBBACKUP_GPG_RECIPIENT`` to the name of the GPG key.
125 |
126 | .. _`GPG manual`: https://www.gnupg.org/gph/en/manual/c14.html
127 |
128 |
129 | DBBACKUP_GPG_ALWAYS_TRUST
130 | ~~~~~~~~~~~~~~~~~~~~~~~~~
131 |
132 | The encryption of the backup file fails if GPG does not trust the public
133 | encryption key. The solution is to set the option 'trust-model' to 'always'.
134 | By default this value is ``False``. Set this to ``True`` to enable this option.
135 |
136 |
137 | DBBACKUP_GPG_RECIPIENT
138 | ~~~~~~~~~~~~~~~~~~~~~~
139 |
140 | The name of the key that is used for encryption. This setting is only used
141 | when making a backup with the ``--encrypt`` or ``--decrypt`` option.
142 |
143 |
144 | Email configuration
145 | -------------------
146 |
147 | DBBACKUP_SEND_EMAIL
148 | ~~~~~~~~~~~~~~~~~~~
149 |
150 | Controls whether or not django-dbbackup sends an error email when an uncaught
151 | exception is received.
152 |
153 | Default: ``True``
154 |
155 |
156 | DBBACKUP_SERVER_EMAIL
157 | ~~~~~~~~~~~~~~~~~~~~~
158 |
159 | The email address that error messages come from, such as those sent to
160 | ``DBBACKUP_ADMINS``.
161 |
162 | Default: ``django.conf.settings.SERVER_EMAIL``
163 |
164 |
165 | DBBACKUP_ADMINS
166 | ~~~~~~~~~~~~~~~
167 |
168 | A list of all the people who get code error notifications. When ``DEBUG=False``
169 | and an operation raises an exception, DBBackup will email these people with the
170 | full exception information. This should be a tuple of (Full name,
171 | email address).
172 |
173 | Default: ``django.conf.settings.ADMINS``
174 |
175 | .. warning::
176 |
177 | ``DBBACKUP_FAILURE_RECIPIENTS`` was used before and is now deprecated
178 |
179 |
180 |
181 | DBBACKUP_EMAIL_SUBJECT_PREFIX
182 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
183 |
184 | Subject-line prefix for email messages sent by DBBackup.
185 |
186 | Default: ``'[dbbackup] '``
187 |
188 |
189 | Database configuration
190 | ----------------------
191 |
192 | By default, DBBackup uses parameters from ``settings.DATABASES`` but you can
193 | make an independent configuration, see `Database settings`_
194 |
195 |
196 | Storage configuration
197 | ---------------------
198 |
199 | You have to use a storage for your backups, see `Storage settings`_ for more.
200 |
201 | .. _`Database settings`: databases.html
202 | .. _`Storage settings`: storage.html
203 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | Contributing guide
2 | ==================
3 |
4 | Dbbackup is a free license software where all help are welcomed. This
5 | documentation aims to help users or developers to bring their contributions
6 | to this project.
7 |
8 | Submit a bug, issue or enhancement
9 | ----------------------------------
10 |
11 | All communication are made with `GitHub issues`_. Do not hesitate to open a
12 | issue if:
13 |
14 | - You have an improvement idea
15 | - You found a bug
16 | - You've got a question
17 | - More generally something seems wrong for you
18 |
19 | .. _`GitHub issues`: https://github.com/jazzband/django-dbbackup/issues
20 |
21 | Make a patch
22 | ------------
23 |
24 | We use `GitHub pull requests`_ to manage all patches. For a better handling
25 | of requests we advise you to:
26 |
27 | #. Fork the project and make a new branch
28 | #. Make your changes with tests if possible and documentation if needed
29 | #. Push changes to your fork repository and test it with Travis
30 | #. If it succeeds, open a pull request
31 | #. Bother us until we give you an answer
32 |
33 | .. note::
34 |
35 | We advise you to launch it with Python 2 & 3 before push and try it in
36 | Travis. DBBackup uses a lot of file operations, so breaks between Python
37 | versions are easy.
38 |
39 | .. _`GitHub pull requests`: https://github.com/jazzband/django-dbbackup/pulls
40 |
41 | Test environment
42 | ----------------
43 |
44 | We provides tools to help developers to quickly test and develop DBBackup.
45 | There are 2 majors scripts:
46 |
47 | * ``runtests.py``: Unit tests launcher and equivalent of ``manage.py`` in
48 | the test project.
49 | * ``functional.sh``: Shell script that use ``runtests.py`` to create a
50 | database backup and restore it, the same with media, and test if they are
51 | restored.
52 |
53 |
54 | ``runtests.py``
55 | ~~~~~~~~~~~~~~~
56 |
57 | You can test code on your local machine with the ``runtests.py`` script: ::
58 |
59 | python runtests.py
60 |
61 | But if argument are provided, it acts as ``manage.py`` so you can simply
62 | launch some other command to test deeply, example: ::
63 |
64 | # Enter in Python shell
65 | python runtests.py shell
66 |
67 | # Launch a particular test module
68 | python runtests.py test dbbackup.tests.test_utils
69 |
70 | All tests are stored in ``dbbackup.tests``.
71 |
72 |
73 | ``functional.sh``
74 | ~~~~~~~~~~~~~~~~~
75 |
76 | It tests at a higher level if backup/restore mechanism is alright. It
77 | becomes powerful because of the configuration you can give to it. See the next
78 | chapter for explanation about it.
79 |
80 |
81 | Configuration
82 | ~~~~~~~~~~~~~
83 |
84 | DBBackup contains a test Django project at ``dbbackup.tests`` and its
85 | ``settings`` module. This configuration takes care of the following
86 | environment variables:
87 |
88 | **DB_ENGINE** - Default: ``django.db.backends.sqlite3``
89 |
90 | Databank-Engine to use. See in django.db.backends for default backends.
91 |
92 | **DB_NAME** - Default: ``:memory:``
93 |
94 | Database name. Should be set correctly if a db other than sqlite3 is used.
95 |
96 | **DB_USER** - Default: ``None``
97 |
98 | DB Username
99 |
100 | **DB_PASSWORD** - Default: ``None``
101 |
102 | DB Password
103 |
104 | **DB_HOST** - Default: ``None``
105 |
106 | DB Host
107 |
108 |
109 | **MEDIA_ROOT** - Default= ``tempfile.mkdtemp()``
110 |
111 | Django's ``MEDIA_ROOT``, useful if you want test media backup from filesystem
112 |
113 | **STORAGE** - Default: ``dbbackup.tests.utils.FakeStorage``
114 |
115 | Storage used for backups
116 |
117 | **STORAGE_OPTIONS**
118 |
119 | Options for instantiating the chosen storage. It must be formatted as
120 | ``"key1=foo,key2=bar"`` and will be converted into a ``dict``.
121 |
122 | Online CI
123 | ---------
124 |
125 | We use `Travis`_ which tests Dbbackup with a matrix of components' versions: Several versions of Django and several versions of Python including 2, 3 and PyPy.
126 |
127 | .. image:: https://api.travis-ci.org/jazzband/django-dbbackup.svg
128 | :target: https://travis-ci.org/jazzband/django-dbbackup
129 |
130 | Code coverage is ensured with `Coveralls`_ and the project has not yet reached a minimum coverage limit.
131 |
132 | .. image:: https://coveralls.io/repos/jazzband/django-dbbackup/badge.svg?branch=master&service=github
133 | :target: https://coveralls.io/github/jazzband/django-dbbackup?branch=master
134 |
135 | Code health is checked with `Landscape`_
136 |
137 | .. image:: https://landscape.io/github/jazzband/django-dbbackup/master/landscape.svg?style=flat
138 | :target: https://landscape.io/github/jazzband/django-dbbackup/master
139 | :alt: Code Health
140 |
141 | .. _Travis: https://travis-ci.org/jazzband/django-dbbackup
142 | .. _Coveralls: https://coveralls.io/github/jazzband/django-dbbackup
143 | .. _Landscape: https://landscape.io/github/jazzband/django-dbbackup/
144 |
--------------------------------------------------------------------------------
/docs/databases.rst:
--------------------------------------------------------------------------------
1 | Database settings
2 | =================
3 |
4 | The following databases are supported by this application:
5 |
6 | - SQLite
7 | - MySQL
8 | - PostgreSQL
9 | - MongoDB
10 | - ...and any other that you might implement
11 |
12 | By default, DBBackup will try to use your database settings in ``DATABASES``
13 | to handle the database, but some databases require custom options so you might
14 | want to use different parameters for backups. That's why we included a
15 | ``DBBACKUP_CONNECTORS`` setting; it follows the form of the django ``DATABASES`` setting: ::
16 |
17 | DBBACKUP_CONNECTORS = {
18 | 'default': {
19 | 'USER': 'backupuser',
20 | 'PASSWORD': 'backuppassword',
21 | 'HOST': 'replica-for-backup'
22 | }
23 | }
24 |
25 | This configuration will allow you to use a replica with a different host and user,
26 | which is a great practice if you don't want to overload your main database.
27 |
28 | DBBackup uses ``Connectors`` for creating and restoring backups; below you'll see
29 | specific parameters for the built-in ones.
30 |
31 | Common
32 | ------
33 |
34 | All connectors have the following parameters:
35 |
36 | CONNECTOR
37 | ~~~~~~~~~
38 |
39 | Absolute path to a connector class by default is:
40 |
41 | - :class:`dbbackup.db.sqlite.SqliteConnector` for ``'django.db.backends.sqlite3'``
42 | - :class:`dbbackup.db.mysql.MysqlDumpConnector` for ``django.db.backends.mysql``
43 | - :class:`dbbackup.db.postgresql.PgDumpConnector` for ``django.db.backends.postgresql``
44 | - :class:`dbbackup.db.postgresql.PgDumpGisConnector` for ``django.contrib.gis.db.backends.postgis``
45 | - :class:`dbbackup.db.mongodb.MongoDumpConnector` for ``django_mongodb_engine``
46 |
47 | All supported built-in connectors are described in more detail below.
48 |
49 | Following database wrappers from ``django-prometheus`` module are supported:
50 |
51 | - ``django_prometheus.db.backends.postgresql`` for ``dbbackup.db.postgresql.PgDumpBinaryConnector``
52 | - ``django_prometheus.db.backends.sqlite3`` for ``dbbackup.db.sqlite.SqliteConnector``
53 | - ``django_prometheus.db.backends.mysql`` for ``dbbackup.db.mysql.MysqlDumpConnector``
54 | - ``django_prometheus.db.backends.postgis`` for ``dbbackup.db.postgresql.PgDumpGisConnector``
55 |
56 | EXCLUDE
57 | ~~~~~~~
58 |
59 | Tables to exclude from backup as list. This option may be unavailable for
60 | connectors when making snapshots.
61 |
62 | EXTENSION
63 | ~~~~~~~~~
64 |
65 | Extension of backup file name, default ``'dump'``.
66 |
67 | Command connectors
68 | ------------------
69 |
70 | Some connectors use a command line tool as a dump engine, ``mysqldump`` for
71 | example. These kinds of tools have common attributes:
72 |
73 | DUMP_CMD
74 | ~~~~~~~~
75 |
76 | Path to the command used to create a backup; default is the appropriate
77 | command supposed to be in your PATH, for example: ``'mysqldump'`` for MySQL.
78 |
79 | This setting is useful only for connectors using command line tools (children
80 | of :class:`dbbackup.db.base.BaseCommandDBConnector`)
81 |
82 | RESTORE_CMD
83 | ~~~~~~~~~~~
84 |
85 | Same as ``DUMP_CMD`` but used when restoring.
86 |
87 | DUMP_PREFIX and RESTORE_PREFIX
88 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
89 |
90 | String to include as prefix of dump or restore command. It will be added with
91 | a space between the launched command and its prefix.
92 |
93 | DUMP_SUFFIX and RESTORE_SUFFIX
94 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
95 |
96 | String to include as suffix of dump or restore command. It will be added with
97 | a space between the launched command and its suffix.
98 |
99 | ENV, DUMP_ENV and RESTORE_ENV
100 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
101 |
102 | Environment variables used during command running, default are ``{}``. ``ENV``
103 | is used for every command, ``DUMP_ENV`` and ``RESTORE_ENV`` override the
104 | values defined in ``ENV`` during the dedicated commands.
105 |
106 | USE_PARENT_ENV
107 | ~~~~~~~~~~~~~~
108 |
109 | Specify if the connector will use its parent's environment variables. By
110 | default it is ``True`` to keep ``PATH``.
111 |
112 | SQLite
113 | ------
114 |
115 | SQLite uses by default :class:`dbbackup.db.sqlite.SqliteConnector`.
116 |
117 | SqliteConnector
118 | ~~~~~~~~~~~~~~~
119 |
120 | It is in pure Python and copies the behavior of ``.dump`` command for creating a
121 | SQL dump.
122 |
123 | SqliteCPConnector
124 | ~~~~~~~~~~~~~~~~~
125 |
126 | You can also use :class:`dbbackup.db.sqlite.SqliteCPConnector` for making a
127 | simple raw copy of your database file, like a snapshot.
128 |
129 | In-memory database aren't dumpable with it.
130 |
131 | MySQL
132 | -----
133 |
134 | MySQL uses by default :class:`dbbackup.db.mysql.MysqlDumpConnector`. It uses
135 | ``mysqldump`` and ``mysql`` for its job.
136 |
137 | PostgreSQL
138 | ----------
139 |
140 | Postgres uses by default :class:`dbbackup.db.postgresql.PgDumpConnector`, but
141 | we advise you to use :class:`dbbackup.db.postgresql.PgDumpBinaryConnector`. The
142 | first one uses ``pg_dump`` and ``pqsl`` for its job, creating RAW SQL files.
143 |
144 | The second uses ``pg_restore`` with binary dump files.
145 |
146 | They can also use ``psql`` for launching administration command.
147 |
148 | SINGLE_TRANSACTION
149 | ~~~~~~~~~~~~~~~~~~
150 |
151 | When doing a restore, wrap everything in a single transaction, so errors
152 | cause a rollback.
153 |
154 | This corresponds to ``--single-transaction`` argument of ``psql`` and
155 | ``pg_restore``.
156 |
157 | Default: ``True``
158 |
159 | DROP
160 | ~~~~
161 |
162 | With ``PgDumpConnector``, it includes tables dropping statements in dump file.
163 | ``PgDumpBinaryConnector`` drops at restoring.
164 |
165 | This corresponds to ``--clean`` argument of ``pg_dump`` and ``pg_restore``.
166 |
167 | Default: ``True``
168 |
169 | IF_EXISTS
170 | ~~~~
171 |
172 | Use DROP ... IF EXISTS commands to drop objects in ``--clean`` mode of ``pg_dump``.
173 |
174 | Default: ``False``
175 |
176 | PostGIS
177 | -------
178 |
179 | Set in :class:`dbbackup.db.postgresql.PgDumpGisConnector`, it does the same as
180 | PostgreSQL but launches ``CREATE EXTENSION IF NOT EXISTS postgis;`` before
181 | restore database.
182 |
183 | PSQL_CMD
184 | ~~~~~~~~
185 |
186 | Path to ``psql`` command used for administration tasks like enable PostGIS
187 | for example, default is ``psql``.
188 |
189 |
190 | PASSWORD
191 | ~~~~~~~~
192 |
193 | If you fill this settings ``PGPASSWORD`` environment variable will be used
194 | with every commands. For security reason, we advise to use ``.pgpass`` file.
195 |
196 | ADMIN_USER
197 | ~~~~~~~~~~
198 |
199 | Username used for launch action with privileges, extension creation for
200 | example.
201 |
202 | ADMIN_PASSWORD
203 | ~~~~~~~~~~~~~~
204 |
205 | Password used for launch action with privileges, extension creation for
206 | example.
207 |
208 | SCHEMAS
209 | ~~~~~~~
210 |
211 | Specify schemas for database dumps by using a pattern-matching option,
212 | including both the selected schema and its contained objects.
213 | If not specified, the default behavior is to dump all non-system schemas in the target database.
214 | This feature is exclusive to PostgreSQL connectors, and users can choose multiple schemas for a customized dump.
215 |
216 | MongoDB
217 | -------
218 |
219 | MongoDB uses by default :class:`dbbackup.db.mongodb.MongoDumpConnector`. it
220 | uses ``mongodump`` and ``mongorestore`` for its job.
221 |
222 | For AuthEnabled MongoDB Connection, you need to add one custom option ``AUTH_SOURCE`` in your ``DBBACKUP_CONNECTORS``. ::
223 |
224 | DBBACKUP_CONNECTORS = {
225 | 'default': {
226 | ...
227 | 'AUTH_SOURCE': 'admin',
228 | }
229 | }
230 |
231 | Or in ``DATABASES`` one: ::
232 |
233 | DATABASES = {
234 | 'default': {
235 | ...
236 | 'AUTH_SOURCE': 'admin',
237 | }
238 | }
239 |
240 |
241 | OBJECT_CHECK
242 | ~~~~~~~~~~~~
243 |
244 | Validate documents before inserting in database (option ``--objcheck`` in command line), default is ``True``.
245 |
246 | DROP
247 | ~~~~
248 |
249 | Replace objects that are already in database, (option ``--drop`` in command line), default is ``True``.
250 |
251 | Custom connector
252 | ----------------
253 |
254 | Creating your connector is easy; create a children class from
255 | :class:`dbbackup.db.base.BaseDBConnector` and create ``_create_dump`` and
256 | ``_restore_dump``. If your connector uses a command line tool, inherit it from
257 | :class:`dbbackup.db.base.BaseCommandDBConnector`
258 |
259 | Connecting a Custom connector
260 | -----------------------------
261 |
262 | Here is an example, on how to easily connect a custom connector that you have created or even that you simply want to reuse: ::
263 |
264 | DBBACKUP_CONNECTOR_MAPPING = {
265 | 'transaction_hooks.backends.postgis': 'dbbackup.db.postgresql.PgDumpGisConnector',
266 | }
267 |
268 | Obviously instead of :class:`dbbackup.db.postgresql.PgDumpGisConnector` you can
269 | use the custom connector you have created yourself and ``transaction_hooks.backends.postgis``
270 | is simply the database engine name you are using.
271 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. building docs: cd django-dbbackup/docs && make html
2 |
3 | Django Database Backup
4 | ======================
5 |
6 | This Django application provides management commands to help backup and
7 | restore your project database and media files with various storages such as
8 | Amazon S3, DropBox or local file system.
9 |
10 | It is made to:
11 |
12 | - Allow you to secure your backup with GPG signature and encryption
13 | - Archive with compression
14 | - Deal easily with remote archiving
15 | - Keep your development database up to date
16 | - Use Crontab or Celery to setup automated backups
17 |
18 | Contents:
19 |
20 | .. toctree::
21 | :maxdepth: 1
22 |
23 | installation
24 | configuration
25 | databases
26 | storage
27 | commands
28 | integration
29 | contributing
30 | changelog
31 |
32 | Compatibility
33 | -------------
34 |
35 | As we want to ensure a lot of platforms will be able to save data before
36 | upgrading, Django-DBBackup supports PyPy, 3.2 to 3.5 and Django
37 | greater than 3.2.
38 |
39 | Other Resources
40 | ===============
41 |
42 | * `GitHub repository`_
43 | * `PyPI project`_
44 | * `Read The Docs`_
45 | * `GitHub issues`_
46 | * `GitHub pull requests`_
47 | * `Coveralls`_
48 |
49 | .. _`GitHub repository`: https://github.com/jazzband/django-dbbackup
50 | .. _`PyPI project`: https://pypi.python.org/pypi/django-dbbackup/
51 | .. _`Read The Docs`: https://django-dbbackup.readthedocs.org/
52 | .. _`GitHub issues`: https://github.com/jazzband/django-dbbackup/issues
53 | .. _`GitHub pull requests`: https://github.com/jazzband/django-dbbackup/pulls
54 | .. _`Coveralls`: https://coveralls.io/github/jazzband/django-dbbackup
55 |
56 | Indices and tables
57 | ==================
58 |
59 | * :ref:`genindex`
60 | * :ref:`modindex`
61 | * :ref:`search`
62 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | Installing on your system
5 | -------------------------
6 |
7 | Getting the latest stable release
8 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
9 |
10 | ::
11 |
12 | pip install django-dbbackup
13 |
14 | Getting the latest release from trunk
15 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
16 |
17 | In general, you should not be downloading and installing stuff
18 | directly from repositories -- especially not if you are backing
19 | up sensitive data.
20 |
21 | Security is important, bypassing PyPi repositories is a bad habit,
22 | because it will bypass the fragile key signatures authentication
23 | that are at least present when using PyPi repositories.
24 |
25 | ::
26 |
27 | pip install -e git+https://github.com/jazzband/django-dbbackup.git#egg=django-dbbackup
28 |
29 |
30 | Add it in your project
31 | ----------------------
32 |
33 | In your ``settings.py``, make sure you have the following things: ::
34 |
35 | INSTALLED_APPS = (
36 | ...
37 | 'dbbackup', # django-dbbackup
38 | )
39 |
40 | DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage'
41 | DBBACKUP_STORAGE_OPTIONS = {'location': '/my/backup/dir/'}
42 |
43 | Create the backup directory: ::
44 |
45 | mkdir /my/backup/dir/
46 |
47 | .. note::
48 |
49 | This configuration uses filesystem storage, but you can use any storage
50 | supported by Django API. See `Storage settings`_ for more information about it.
51 |
52 | .. _`Storage settings`: storage.html
53 |
54 |
55 | Testing that everything worked
56 | ------------------------------
57 |
58 | Now, you should be able to create your first backup by running: ::
59 |
60 | $ python manage.py dbbackup
61 |
62 | If your database was called ``default`` which is the normal Django behaviour
63 | of a single-database project, you should now see a new file in your backup
64 | directory.
65 |
--------------------------------------------------------------------------------
/docs/integration.rst:
--------------------------------------------------------------------------------
1 | Integration tutorials
2 | =====================
3 |
4 | .. note::
5 |
6 | If you have a custom and/or interesting way of use DBBackup, do not
7 | hesitate to make a pull request.
8 |
9 | Django-cron
10 | -----------
11 |
12 | Example of cron job with `django-cron`_ with file system storage: ::
13 |
14 | import os
15 | from django.core import management
16 | from django.conf import settings
17 | from django_cron import CronJobBase, Schedule
18 |
19 |
20 | class Backup(CronJobBase):
21 | RUN_AT_TIMES = ['6:00', '18:00']
22 | schedule = Schedule(run_at_times=RUN_AT_TIMES)
23 | code = 'my_app.Backup'
24 |
25 | def do(self):
26 | management.call_command('dbbackup')
27 |
28 | .. _`django-cron`: https://github.com/Tivix/django-cron
29 |
30 | Django-crontab
31 | --------------
32 |
33 | Example of cron job with `django-crontab`_ with file system storage:
34 |
35 | In `settings.py`: ::
36 |
37 | CRONTAB_COMMAND_SUFFIX = '2>&1'
38 | CRONJOBS = [
39 | ('0 5 * * *', 'core.backup.backup_job', '>> ' + os.path.join(CORE_DIR, 'backup/backup.log'))
40 | ]
41 |
42 | In `backup.py`: ::
43 |
44 | from datetime import datetime
45 | from django.core import management
46 |
47 | def backup_job():
48 | print("[{}] Backing up database and media files...".format(datetime.now()))
49 | management.call_command('dbbackup', '--clean')
50 | management.call_command('mediabackup', '--clean')
51 | print("[{}] Backup done!".format(datetime.now()))
52 |
53 |
54 | To add the cron job: ::
55 |
56 | python manage.py crontab add
57 |
58 | .. _`django-crontab`: https://github.com/kraiz/django-crontab
59 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
31 | echo. text to make text files
32 | echo. man to make manual pages
33 | echo. texinfo to make Texinfo files
34 | echo. gettext to make PO message catalogs
35 | echo. changes to make an overview over all changed/added/deprecated items
36 | echo. linkcheck to check all external links for integrity
37 | echo. doctest to run all doctests embedded in the documentation if enabled
38 | goto end
39 | )
40 |
41 | if "%1" == "clean" (
42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
43 | del /q /s %BUILDDIR%\*
44 | goto end
45 | )
46 |
47 | if "%1" == "html" (
48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
49 | if errorlevel 1 exit /b 1
50 | echo.
51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
52 | goto end
53 | )
54 |
55 | if "%1" == "dirhtml" (
56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
57 | if errorlevel 1 exit /b 1
58 | echo.
59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
60 | goto end
61 | )
62 |
63 | if "%1" == "singlehtml" (
64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
65 | if errorlevel 1 exit /b 1
66 | echo.
67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
68 | goto end
69 | )
70 |
71 | if "%1" == "pickle" (
72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
73 | if errorlevel 1 exit /b 1
74 | echo.
75 | echo.Build finished; now you can process the pickle files.
76 | goto end
77 | )
78 |
79 | if "%1" == "json" (
80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
81 | if errorlevel 1 exit /b 1
82 | echo.
83 | echo.Build finished; now you can process the JSON files.
84 | goto end
85 | )
86 |
87 | if "%1" == "htmlhelp" (
88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
89 | if errorlevel 1 exit /b 1
90 | echo.
91 | echo.Build finished; now you can run HTML Help Workshop with the ^
92 | .hhp project file in %BUILDDIR%/htmlhelp.
93 | goto end
94 | )
95 |
96 | if "%1" == "qthelp" (
97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
98 | if errorlevel 1 exit /b 1
99 | echo.
100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
101 | .qhcp project file in %BUILDDIR%/qthelp, like this:
102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-dbbackup.qhcp
103 | echo.To view the help file:
104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-dbbackup.ghc
105 | goto end
106 | )
107 |
108 | if "%1" == "devhelp" (
109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
110 | if errorlevel 1 exit /b 1
111 | echo.
112 | echo.Build finished.
113 | goto end
114 | )
115 |
116 | if "%1" == "epub" (
117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
118 | if errorlevel 1 exit /b 1
119 | echo.
120 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
121 | goto end
122 | )
123 |
124 | if "%1" == "latex" (
125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
126 | if errorlevel 1 exit /b 1
127 | echo.
128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
129 | goto end
130 | )
131 |
132 | if "%1" == "text" (
133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
134 | if errorlevel 1 exit /b 1
135 | echo.
136 | echo.Build finished. The text files are in %BUILDDIR%/text.
137 | goto end
138 | )
139 |
140 | if "%1" == "man" (
141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
142 | if errorlevel 1 exit /b 1
143 | echo.
144 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
145 | goto end
146 | )
147 |
148 | if "%1" == "texinfo" (
149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
150 | if errorlevel 1 exit /b 1
151 | echo.
152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
153 | goto end
154 | )
155 |
156 | if "%1" == "gettext" (
157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
158 | if errorlevel 1 exit /b 1
159 | echo.
160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
161 | goto end
162 | )
163 |
164 | if "%1" == "changes" (
165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
166 | if errorlevel 1 exit /b 1
167 | echo.
168 | echo.The overview file is in %BUILDDIR%/changes.
169 | goto end
170 | )
171 |
172 | if "%1" == "linkcheck" (
173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
174 | if errorlevel 1 exit /b 1
175 | echo.
176 | echo.Link check complete; look for any errors in the above output ^
177 | or in %BUILDDIR%/linkcheck/output.txt.
178 | goto end
179 | )
180 |
181 | if "%1" == "doctest" (
182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
183 | if errorlevel 1 exit /b 1
184 | echo.
185 | echo.Testing of doctests in the sources finished, look at the ^
186 | results in %BUILDDIR%/doctest/output.txt.
187 | goto end
188 | )
189 |
190 | :end
191 |
--------------------------------------------------------------------------------
/functional.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | make_db_test () {
4 | $PYTHON runtests.py migrate --noinput || exit 1
5 |
6 | $PYTHON runtests.py feed
7 | $PYTHON runtests.py dbbackup
8 | count1=$($PYTHON runtests.py count)
9 |
10 | $PYTHON runtests.py flush --noinput
11 |
12 | $PYTHON runtests.py dbrestore --noinput
13 | count2=$($PYTHON runtests.py count)
14 | }
15 |
16 | test_db_results () {
17 | if [[ "$count1" -eq "$count2" ]] ; then
18 | echo 'DB test succeed!'
19 | db_success=0
20 | else
21 | echo 'DB test Failed!'
22 | db_success=1
23 | fi
24 | }
25 |
26 | make_media_test () {
27 | echo foo > ${MEDIA_ROOT}foo
28 | mkdir -p ${MEDIA_ROOT}bar
29 | echo ham > ${MEDIA_ROOT}bar/ham
30 |
31 | $PYTHON runtests.py mediabackup
32 |
33 | rm -rf ${MEDIA_ROOT}*
34 | $PYTHON runtests.py mediarestore --noinput
35 | }
36 |
37 | test_media_results () {
38 | media_success=0
39 | [[ -f ${MEDIA_ROOT}foo ]] || media_success=1
40 | [[ $(cat ${MEDIA_ROOT}foo) -eq foo ]] || media_success=1
41 | [[ -d ${MEDIA_ROOT}bar ]] || media_success=1
42 | [[ $(cat ${MEDIA_ROOT}bar/ham) -eq ham ]] || media_success=1
43 | [[ -f ${MEDIA_ROOT}bar/ham ]] || media_success=1
44 | [[ "$media_success" -eq 0 ]] && echo "Media test succeed!" || echo "Media test failed!"
45 | }
46 |
47 |
48 | main () {
49 | if [[ -z "$DB_ENGINE" ]] || [[ "$DB_ENGINE" = "django.db.backends.sqlite3" ]]; then
50 | if [[ -z "$DB_NAME" ]]; then
51 | export DB_NAME="$(mktemp)"
52 | fi
53 | fi
54 | export PYTHON=${PYTHON:-python}
55 | export STORAGE="${STORAGE:-django.core.files.storage.FileSystemStorage}"
56 | export STORAGE_LOCATION="/tmp/backups/"
57 | export STORAGE_OPTIONS="${STORAGE_OPTIONS:-location=$STORAGE_LOCATION}"
58 | export MEDIA_ROOT="/tmp/media/"
59 |
60 | make_db_test
61 | test_db_results
62 |
63 | mkdir -p $STORAGE_LOCATION
64 | mkdir -p $MEDIA_ROOT
65 | make_media_test
66 | test_media_results
67 |
68 | if [[ -z "$DB_ENGINE" ]] || [[ "$DB_ENGINE" = "django.db.backends.sqlite3" ]]; then
69 | rm "$DB_NAME"
70 | fi
71 | rm -rf "$MEDIA_ROOT"
72 |
73 | return $((db_success + media_success))
74 | }
75 |
76 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
77 | main || exit 1
78 | fi
79 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | target-version = ['py37']
3 | extend-exclude = 'migrations'
4 |
5 | [tool.isort]
6 | profile = 'black'
7 | skip = 'migrations'
8 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | django>=3.2
2 | pytz
3 |
--------------------------------------------------------------------------------
/requirements/build.txt:
--------------------------------------------------------------------------------
1 | build
2 | setuptools
3 | tox>=4.0.0
4 | twine
5 | wheel
6 |
--------------------------------------------------------------------------------
/requirements/dev.txt:
--------------------------------------------------------------------------------
1 | black
2 | flake8
3 | isort
4 | pylint
5 | rope
6 |
--------------------------------------------------------------------------------
/requirements/docs.txt:
--------------------------------------------------------------------------------
1 | .
2 | docutils
3 | python-dotenv
4 | sphinx
5 | sphinx-django-command
6 | sphinx-rtd-theme
7 |
--------------------------------------------------------------------------------
/requirements/tests.txt:
--------------------------------------------------------------------------------
1 | coverage
2 | django-storages
3 | flake8
4 | pep8
5 | psycopg2
6 | pylint
7 | python-dotenv
8 | python-gnupg>=0.5.0
9 | pytz
10 | testfixtures
11 | tox>=4.0.0
12 | tox-gh-actions
13 |
--------------------------------------------------------------------------------
/runtests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | import django
6 | from django.conf import settings
7 | from django.core.management import execute_from_command_line
8 |
9 |
10 | def main(argv=None):
11 | os.environ["DJANGO_SETTINGS_MODULE"] = "dbbackup.tests.settings"
12 | argv = argv or []
13 | if len(argv) <= 1:
14 | from django.test.utils import get_runner
15 |
16 | if django.VERSION >= (1, 7):
17 | django.setup()
18 | TestRunner = get_runner(settings)
19 | test_runner = TestRunner()
20 | result = test_runner.run_tests(["dbbackup.tests"])
21 | return result
22 | execute_from_command_line(argv)
23 |
24 |
25 | if __name__ == "__main__":
26 | sys.exit(bool(main(sys.argv)))
27 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from pathlib import Path
4 |
5 | from setuptools import find_packages, setup
6 |
7 | root_dir = Path(__file__).parent
8 | src_dir = root_dir / "dbbackup"
9 | with (src_dir / "VERSION").open() as f:
10 | version = f.read().strip()
11 |
12 |
13 | def get_requirements():
14 | with (root_dir / "requirements.txt").open() as f:
15 | return f.read().splitlines()
16 |
17 |
18 | def get_test_requirements():
19 | with (root_dir / "requirements" / "tests.txt").open() as f:
20 | return f.read().splitlines()
21 |
22 |
23 | setup(
24 | name="django-dbbackup",
25 | version=version,
26 | description="Management commands to help backup and restore a project database and media.",
27 | author="Archmonger",
28 | author_email="archiethemonger@gmail.com",
29 | long_description=(root_dir / "README.rst").read_text(encoding="utf-8"),
30 | long_description_content_type="text/x-rst",
31 | python_requires=">=3.7",
32 | install_requires=get_requirements(),
33 | tests_require=get_test_requirements(),
34 | include_package_data=True,
35 | zip_safe=False,
36 | license="BSD",
37 | url="https://github.com/jazzband/django-dbbackup",
38 | keywords=[
39 | "django",
40 | "database",
41 | "media",
42 | "backup",
43 | "amazon",
44 | "s3",
45 | "dropbox",
46 | "sqlite",
47 | ],
48 | packages=find_packages(),
49 | classifiers=[
50 | "Development Status :: 5 - Production/Stable",
51 | "Environment :: Web Environment",
52 | "Environment :: Console",
53 | "Framework :: Django :: 3.2",
54 | "Framework :: Django :: 4.2",
55 | "Framework :: Django :: 5.0",
56 | "Framework :: Django :: 5.1",
57 | "Framework :: Django :: 5.2",
58 | "Intended Audience :: Developers",
59 | "Intended Audience :: System Administrators",
60 | "License :: OSI Approved :: BSD License",
61 | "Natural Language :: English",
62 | "Operating System :: OS Independent",
63 | "Programming Language :: Python",
64 | "Programming Language :: Python :: 3.7",
65 | "Programming Language :: Python :: 3.8",
66 | "Programming Language :: Python :: 3.9",
67 | "Programming Language :: Python :: 3.10",
68 | "Programming Language :: Python :: 3.11",
69 | "Programming Language :: Python :: 3.12",
70 | "Programming Language :: Python :: 3.13",
71 | "Topic :: Database",
72 | "Topic :: System :: Archiving",
73 | "Topic :: System :: Archiving :: Backup",
74 | "Topic :: System :: Archiving :: Compression",
75 | ],
76 | )
77 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py{37,38,39,310,311,312,313}-django{32,42,50,51,52,master},lint,docs,functional
3 |
4 | [testenv]
5 | passenv = *
6 | setenv =
7 | PYTHONDONTWRITEBYTECODE=1
8 | deps =
9 | -rrequirements/tests.txt
10 | django32: django>=3.2,<3.3
11 | django42: django>=4.2,<4.3
12 | django50: django>=5.0,<5.1
13 | django51: django>=5.1,<5.2
14 | django52: django>=5.2,<5.3
15 | djangomaster: https://github.com/django/django/archive/master.zip
16 | commands = {posargs:coverage run runtests.py}
17 |
18 | # Configure which test environments are run for each Github Actions Python version.
19 | [gh-actions]
20 | python =
21 | 3.7: py37-django{32},functional
22 | 3.8: py38-django{32,42},functional
23 | 3.9: py39-django{32,42},functional
24 | 3.10: py310-django{32,42,50,51,52},functional
25 | 3.11: py311-django{42,50,51,52},functional
26 | 3.12: py312-django{42,50,51,52},functional
27 | 3.13: py313-django{51,52},functional
28 |
29 | [testenv:lint]
30 | basepython = python
31 | deps =
32 | prospector
33 | commands = prospector dbbackup -0
34 |
35 | [testenv:docs]
36 | basepython = python
37 | allowlist_externals=make
38 | deps = -rrequirements/docs.txt
39 | commands = make docs
40 |
41 | [testenv:functional]
42 | basepython = python
43 | passenv = *
44 | allowlist_externals = bash
45 | deps =
46 | -rrequirements/tests.txt
47 | django
48 | mysqlclient
49 | psycopg2
50 | commands = {posargs:bash -x functional.sh}
51 |
52 |
53 | [testenv:functional-mongodb]
54 | basepython = python
55 | passenv = *
56 | allowlist_externals = bash
57 | deps =
58 | -rrequirements/tests.txt
59 | djongo
60 | commands = {posargs:bash -x functional.sh}
61 |
62 | [flake8]
63 | include = dbbackup
64 | exclude = tests, settings, venv, docs
65 | ignore = E501, E203, W503
66 |
--------------------------------------------------------------------------------