├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── borgapi ├── __init__.py ├── borgapi.py ├── capture.py ├── helpers.py └── options.py ├── install-virtenv.sh ├── pyproject.toml ├── requirements.txt ├── run-tests.sh ├── samples ├── .env.sample ├── exclude_patterns.sample ├── logging.conf.sample └── passphrase.sample ├── setup.py └── test ├── __init__.py ├── borgapi ├── __init__.py ├── test_01_borgapi.py ├── test_02_init.py ├── test_03_create.py ├── test_04_extract.py ├── test_05_rename.py ├── test_06_list.py ├── test_07_diff.py ├── test_08_delete.py ├── test_09_prune.py ├── test_10_info.py ├── test_11_mount.py ├── test_12_key.py ├── test_13_export_tar.py ├── test_14_config.py ├── test_15_benchmark_crud.py ├── test_16_compact.py ├── test_17_progress.py ├── test_18_recreate.py └── test_19_import_tar.py ├── res ├── test_env ├── test_logging.conf └── test_passphrase └── test_00_options.py /.gitignore: -------------------------------------------------------------------------------- 1 | test/temp 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.7.0] - 2025-01-20 10 | ## Added 11 | - Borg command `recreate` and `import-tar` [#24] 12 | - Async class! Now you can view output logs while the command is running and not only 13 | when it has completed. This makes the `--progress` flag not useless. [#21] 14 | - Support for Python version 3.12 and 3.13 15 | - Add `environ` argument to the `BorgAPI` constructor 16 | - Load default environmental variables to prevent the api from freezing while waiting for 17 | user input. They won't be overriden if they are already set. See README for list of 18 | variables that get set and their defaults. [#20] 19 | 20 | ### Changed 21 | - Borg version bumped to 1.4.0 22 | - Changed the version requirements for `borgbackup` and `python-dotenv` to use compatible 23 | release clause (`~=`) instead of a version matching (`==`) clause. This should allow any 24 | security patches published to still work while any minor chagnes will need to be verified. 25 | - Using `ruff` to lint and format now. Previously used `pylint` to lint and `black` and 26 | `isort` to format. 27 | 28 | ### Removed 29 | - No longer support Python 3.8 because borgbackup no longer supports that version. 30 | 31 | ## [0.6.1] - 2023-03-27 32 | ### Changed 33 | - Borg version bumped to 1.2.4 34 | - Dotenv version bumped to 1.0.0 in "requirements.txt" 35 | 36 | ## [0.6.0] - 2023-03-13 37 | ### Added 38 | - Borg command `compact` 39 | 40 | ### Changed 41 | - Borg version bumped to 1.2.3 42 | - Dotenv version bumped to 1.0.0 43 | - Dropped support for Python 3.6 and 3.7 44 | - Added support for Python 3.11 45 | - Default log level is "warning" instead of "info" 46 | - Capturing json output uses Borgs JsonFormatter for the logger 47 | - Log level is passed for each command so individual command calls can accurately refelct wanted 48 | output, before if `log_json` was used, it ignored log level 49 | 50 | ### Fixed 51 | - Mounting / Unmounting tests failing due to OS taking some time, added a longer sleep to prevent 52 | them from failing 53 | - `fuse` in the optional install requirements was the incorrect name of the package, 54 | changed to `llfuse` 55 | - Readme reflects acutally current return values for commands, had still referenced an old style 56 | 57 | ## [0.5.0] - 2021-06-17 58 | ### Changed 59 | - Commands not returns a single value 60 | - If multiple values are captured, a `dict` is returned with relevant key names 61 | - Single value is returned by itself 62 | - No captured value returns `None` 63 | 64 | ### Fixed 65 | - Missing benchmark test added back in 66 | 67 | ## [0.4.0] - 2021-06-11 68 | ### Changed 69 | - Type hint for command returns changed to custom type hint `BorgRunOutput` which is a tuple 70 | `Union[str, dict, None]` for stdout and stderr capture 71 | 72 | ### Fixed 73 | - `change` 74 | - `changes` argument now positional magic variable. (`*changes`) 75 | - Passing in strings to get the values returns a list 76 | - Passing in tuples to set the values returns None 77 | - `import` and `export`: 78 | - Appends `path` to end of args instead of trying to extend because only one path can be passed 79 | - `benchmark_crud`: 80 | - Command is now added as two seperate words instead of one 81 | (`"benchmark crud"` -> `["benchmark", "crud"]`) 82 | - `ExclusionOptions`: 83 | - `pattern` now processed as a list 84 | 85 | ## [0.3.1] - 2021-06-09 86 | ### Changed 87 | - Commands now return captured `stdout` and `stderr` instead of just `stdout` 88 | 89 | ### Fixed 90 | - `borg` removes the first value from the args list. The list being passed in started with the 91 | command name (eg `init`, `create`, `list`) so that was getting removed. Added `borgapi` as the 92 | first argument for it to get removed when the command is called now. 93 | 94 | ## [0.2.1] - 2021-05-15 95 | ### Added 96 | - Borg commands `serve`, `with-lock`, `break-lock`, and `benchmark crud` as 97 | `serve`, `with_lock`, `break_lock`, and `benchmark_crud` respectivly. 98 | 99 | ## [0.2.1-alpha.1] - 2021-05-15 100 | ### Added 101 | - Added missing options to the following commands: 102 | - prune: `keep_last` 103 | - mount: `o` 104 | - Added missing options to the following `Options` dataclasses: 105 | - CommonOptions: `debug_topic` 106 | 107 | ### Changed 108 | - Added `python-dotenv` to the Installation section in the README 109 | - Add warning at top of README file saying how this is not how the borg developers intended use. 110 | - Add roadmap items to the README 111 | - Add `config` to the "Command Quirks" section of readme 112 | 113 | ### Fixed 114 | - `__post_init__` was not being called by the `__init__` function, moved logic from `__post_init__` 115 | into `__init__` 116 | 117 | ## [0.2.0] - 2021-05-10 118 | ### Added 119 | - Loading environment variables using 120 | [python-dotenv v0.17.1](https://github.com/theskumar/python-dotenv/releases/tag/v0.17.1) 121 | - Info on how to set and unset environment variables 122 | 123 | ### Changed 124 | - Formated the README to have shorter line lenghts, no changes in display 125 | - Removed the Roadmap item for loading environment variables 126 | 127 | ## [0.1.2-alpha.1] - 2021-05-10 128 | ### Added 129 | - Sample files to help with understanding different setttings 130 | 131 | ### Changed 132 | - Moved the `.env.sample` file into the sample folder with the other added sample files 133 | 134 | ## [0.1.1] - 2021-05-10 135 | ### Added 136 | - Links in the README to the PyPi project and Github repository 137 | 138 | ### Changed 139 | - Homepage url for the project from selfhosted Gitea site to github. 140 | - License to MIT License from [Anti-Capitalist Software License](https://anticapitalist.software/) 141 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2021 Sean Slater 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BorgAPI 2 | 3 | A helpful wrapper for `borgbackup` to be able to easily use it in python scripts. 4 | 5 | **This is not supported use case by the `borg` developers. They only intend for it's use via a CLI.** 6 | Keeping parity with `borg` is the main goal of this api. 7 | 8 | ## Installation 9 | ``` 10 | pip install borgapi 11 | ``` 12 | 13 | Requires: 14 | * `borgbackup`: 1.4.0 15 | * `python-dotenv`: 1.0.1 16 | 17 | Supports Python 3.9 to 3.13 18 | 19 | ## Usage 20 | ```python 21 | import borgapi 22 | 23 | api = borgapi.BorgAPI(defaults={}, options={}) 24 | 25 | # Initalize new repository 26 | api.init("foo/bar", make_parent_dirs=True) 27 | 28 | # Create backup 29 | result = api.create("foo/bar::backup", "/home", "/mnt/baz", json=True) 30 | print(result['archive']["name"]) # backup 31 | print(result["repository"]["location"]) # foo/bar 32 | ``` 33 | 34 | ### BorgAPI Init arguments 35 | ```python 36 | class BorgAPI( 37 | defaults: dict = None, 38 | options: dict = None, 39 | log_level: str = "warning", 40 | log_json: bool = False, 41 | environ: dict = None, 42 | ) 43 | ``` 44 | * __defaults__: dictionary that has command names as keys and value that is a dict of 45 | command specific optional arguments 46 | ```python 47 | { 48 | "init": { 49 | "encryption": "repokey-blake2", 50 | "make_parent_dirs": True, 51 | }, 52 | "create": { 53 | "json": True, 54 | }, 55 | } 56 | ``` 57 | * __options__: dictionary that contain the optional arguments (common, exclusion, filesystem, and 58 | archive) used for every command (when valid). Options that aren't valid for a command will get 59 | filterd out. For example, `strip_components` will be passed into the `extract` command but not 60 | the `diff` command. 61 | ```python 62 | { 63 | "debug": True, 64 | "log_json": True, 65 | "exclue_from": "baz/spam.txt", 66 | "strip_components": 2, 67 | "sort": True, 68 | "json_lines": True, 69 | } 70 | ``` 71 | * __log_level__: default log level, can be overriden for a specific comand by passing in another 72 | level as and keyword argument 73 | * __log_json__: log lines written by logger are formatted as json lines, passed into the 74 | logging setup 75 | * __environ__: dictionary that contains environmental variables that should be set before running 76 | any commands. Useful for setting the passphrase or passcommand for the repository or other 77 | settings like that. See [Environment Variables](#Setting-Environment-Variables) section for 78 | how to set environmental variables after initalization or what the defaults are. 79 | ```python 80 | { 81 | "BORG_CHECK_I_KNOW_WHAT_I_AM_DOING": "YES", 82 | "BORG_PASSCOMMAND": "cat ~/.borg/password", 83 | } 84 | ``` 85 | 86 | ### Setting Environment Variables 87 | You are able to manage the environment variables used by borg to be able to use different settings 88 | for different repositories. 89 | 90 | When initialzing the `BorgAPI` object, you can include a dictionary with the `environ` argument. 91 | 92 | The following are the defaults that BorgAPI will always load so that user input does not hold up 93 | the app from progressing. 94 | ```ini 95 | BORG_EXIT_CODES=modern, 96 | BORG_PASSPHRASE="", 97 | BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=no, 98 | BORG_RELOCATED_REPO_ACCESS_IS_OK=no, 99 | BORG_CHECK_I_KNOW_WHAT_I_AM_DOING=NO, 100 | BORG_DELETE_I_KNOW_WHAT_I_AM_DOING=NO, 101 | ``` 102 | 103 | There are 3 ways you can set the variables after initialization: 104 | 1. `filename`: Path to a file that contains the variables and their values. See the 105 | [python-dotenv README](https://github.com/theskumar/python-dotenv/blob/master/README.md#file-format) 106 | for more information. 107 | 2. `dictionary`: Dictionary that contains the variable names as keys with their corresponding 108 | values set. 109 | 3. `**kwargs`: Argument names are the variable names and the values are what will be set. 110 | 111 | ```python 112 | api.set_environ(filename="foo/bar/.env") 113 | api.set_environ(dictionary={"FOO":"BAR", "SPAM":False}) 114 | api.set_environ(FOO="BAR", SPAM=False) 115 | ``` 116 | Only one value will be used if multiple set, `filename` has highest precedence, 117 | followed by `dictionary`, and fallback to `**kwargs`. 118 | 119 | If no values are given for any of the three things (ie. calling with no arguments), then the 120 | default behavior for `load_dotenv` from [python-dotenv](https://github.com/theskumar/python-dotenv) 121 | will be used, which is searching for a ".env" file somewhere above in the current file path. 122 | 123 | [Environment Variables](https://borgbackup.readthedocs.io/en/stable/usage/general.html#environment-variables) 124 | used by `borgbackup`. 125 | 126 | ### Removing Environment Variables 127 | If you want to unset a variable so it doesn't get used for another command you can use the 128 | `unset_environ` method. It'll remove any variables passed in from the current environment. 129 | If no variables are passed in, it'll remove the variables set from the last call to `set_environ`. 130 | 131 | ```python 132 | # Enironment = {} 133 | api.set_environ(dictionary={"FOO":"BAR", "SPAM":False}) 134 | # Enironment = {"FOO": "BAR", "SPAM": "False"} 135 | api.unset_environ("FOO") 136 | # Enironment = {"SPAM": "False"} 137 | api.set_environ(BAZ="HAM") 138 | # Enironment = {"SPAM": "False", "BAZ": "HAM"} 139 | api.unset_environ("OTHER") 140 | # Enironment = {"SPAM": "False", "BAZ": "HAM"} 141 | api.unset_environ() 142 | # Enironment = {"SPAM": "False"} 143 | ``` 144 | 145 | ## Borg Commands 146 | When using a borg command any of the arguments can be set as keyword arguments. 147 | The argument names are the long option names with dashes turned into underscores. 148 | So the `--storage-quota` argument in `init` gets turned into the keyword argument `storage_quota`. 149 | 150 | ```python 151 | api.init( 152 | repository="foor/bar", 153 | encryption="repokey", 154 | append_only=True, 155 | storage_quota="5G", 156 | make_parent_dirs=True, 157 | debug=True, 158 | log_json=True, 159 | ) 160 | 161 | diff_args = { 162 | sort: True, 163 | json_lines: True, 164 | debug: True, 165 | exclude_from: "./exclude_patterns.txt", 166 | } 167 | 168 | api.diff( 169 | "foo/bar::tuesday", 170 | "friday", 171 | "foo/bar", 172 | "/baz", 173 | **diff_args, 174 | ) 175 | ``` 176 | 177 | ### Available Borg Commands 178 | * init 179 | * create 180 | * extract 181 | * check 182 | * rename 183 | * list 184 | * diff 185 | * delete 186 | * prune 187 | * compact 188 | * info 189 | * mount 190 | * umount 191 | * key change-passphrase (key_change_passphrase) 192 | * key export (key_export) 193 | * key import (key_import) 194 | * upgrade 195 | * recreate 196 | * immport-tar (immport_tar) 197 | * export-tar (export_tar) 198 | * serve 199 | * config 200 | * with-lock (with_lock) 201 | * break-lock (break_lock) 202 | * benchmark crud (benchmark_crud) 203 | 204 | ### Command Quirks 205 | Things that were changed from the way the default borg commands work to make things a bit 206 | more manageable. 207 | 208 | * __init__ 209 | * `encryption` is an optional argument that defaults to `repokey` 210 | * __config__ 211 | * `borg config` can only change one key at a time 212 | * `*changes` can either be: 213 | * `NAME` to get the current value of the key 214 | * `(NAME, VALUE)` which will change they key 215 | * Any single string `NAME` values passed to `*change` will be returned as a list with their 216 | values in the order they were passed, tuple changes will not appear in that list 217 | 218 | ### Capturing Output 219 | `borg` commands display information different depending on what is asked for. 220 | For example, `create` with the `--list` option writes the file list to the logger. 221 | When the `--log-json` common flag is included it writes it to stderr. The `--stats` 222 | option writes to the logger, like the `--list` option does, but when `--json` is used, 223 | which outputs the stats info as a json object, it gets written to stdout. 224 | 225 | If either `json` or `log_json` is set, it'll try to convert the tuple output to json. 226 | If it is unable and there is output that is captured it'll return the plaintext value. 227 | If no output is captured, it returns `None` if expecting a string or `{}` (an empty 228 | dictionary) if expection some kind of JSON output. 229 | 230 | If multiple outputs are requested at the same time (like `--stats` and `--list`) the command 231 | will return a dictionary with aptly named keys (`--list` key is "list"). If only one output 232 | is requested than the bare value will be returned, not in a dictionary. 233 | 234 | #### Command Returns 235 | Commands not listed return no output (None) 236 | - create 237 | - list: `--list`, `--log-json` 238 | - stats: `--stats`, `--json` 239 | - extract 240 | - list: `--list`, `--log-json` 241 | - extract: `--stdout` 242 | - list: 243 | - list: always returns bare value 244 | - `--log-json`, `--json`, `--json-lines` 245 | - diff: 246 | - diff: always returns bare value 247 | - `--log-json`, `--json-lines` 248 | - delete: 249 | - stats: always returns bare value 250 | - `--stats` 251 | - prune: 252 | - list: `--list`, `--log-json` 253 | - stats: `--stats`, `--log-json` 254 | - compact: 255 | - returns bare value, when verbose or info is set 256 | - verbose: `--verbose`, `-v` 257 | - info: `--info` 258 | - info 259 | - always returns bare value 260 | - recreate: 261 | - list: `--list`, `--log-json` 262 | - stats: `--stats` 263 | - import tar 264 | - list: `--list` 265 | - stats: `--stats`, `--json` 266 | - export tar 267 | - list: `--list`, `--log-json` 268 | - tar: filename == "-" 269 | - config 270 | - list: `--list`, `--log-json` 271 | - changes: single values passed into `*changes` 272 | - benchmark crud 273 | - always returns bare value 274 | 275 | ## Roadmap 276 | - Start work on Borg's beta branch again and keeping up with those 277 | 278 | ## Links 279 | * [PyPi Project](https://pypi.org/project/borgapi) 280 | * [Github](https://github.com/spslater/borgapi) 281 | 282 | ## Contributing 283 | Help is greatly appreciated. First check if there are any issues open that relate to what you want 284 | to help with. Also feel free to make a pull request with changes / fixes you make. 285 | 286 | ## License 287 | [MIT License](https://opensource.org/licenses/MIT) 288 | -------------------------------------------------------------------------------- /borgapi/__init__.py: -------------------------------------------------------------------------------- 1 | """Interface for BorgBackup.""" 2 | 3 | __all__ = [ 4 | "BorgAPI", 5 | "BorgAPIAsync", 6 | "CommonOptions", 7 | "ExclusionOptions", 8 | "ExclusionInput", 9 | "ExclusionOutput", 10 | "FilesystemOptions", 11 | "ArchiveOptions", 12 | "ArchiveInput", 13 | "ArchivePattern", 14 | "ArchiveOutput", 15 | "CommandOptions", 16 | "Json", 17 | "Output", 18 | "Options", 19 | "OutputOptions", 20 | "ListStringIO", 21 | "PersistantHandler", 22 | "BorgLogCapture", 23 | "OutputCapture", 24 | ] 25 | 26 | from .borgapi import BorgAPI as BorgAPI 27 | from .borgapi import BorgAPIAsync as BorgAPIAsync 28 | from .capture import BorgLogCapture as BorgLogCapture 29 | from .capture import ListStringIO as ListStringIO 30 | from .capture import OutputCapture as OutputCapture 31 | from .capture import OutputOptions as OutputOptions 32 | from .capture import PersistantHandler as PersistantHandler 33 | from .helpers import Json as Json 34 | from .helpers import Options as Options 35 | from .helpers import Output as Output 36 | from .options import ArchiveInput as ArchiveInput 37 | from .options import ArchiveOptions as ArchiveOptions 38 | from .options import ArchiveOutput as ArchiveOutput 39 | from .options import ArchivePattern as ArchivePattern 40 | from .options import CommandOptions as CommandOptions 41 | from .options import CommonOptions as CommonOptions 42 | from .options import ExclusionInput as ExclusionInput 43 | from .options import ExclusionOptions as ExclusionOptions 44 | from .options import ExclusionOutput as ExclusionOutput 45 | from .options import FilesystemOptions as FilesystemOptions 46 | -------------------------------------------------------------------------------- /borgapi/borgapi.py: -------------------------------------------------------------------------------- 1 | """Run Borg backups.""" 2 | 3 | import functools 4 | import logging 5 | import os 6 | from asyncio import wrap_future 7 | from concurrent.futures import ThreadPoolExecutor 8 | from io import StringIO 9 | from json import decoder, loads 10 | from typing import Callable, Optional, Union 11 | 12 | import borg.archiver 13 | from dotenv import dotenv_values, load_dotenv 14 | 15 | from .capture import LOG_LVL, OutputCapture, OutputOptions 16 | from .helpers import ENVIRONMENT_DEFAULTS, Options, Output 17 | from .options import ( 18 | ArchiveInput, 19 | ArchiveOutput, 20 | ArchivePattern, 21 | CommandOptions, 22 | CommonOptions, 23 | ExclusionInput, 24 | ExclusionOptions, 25 | ExclusionOutput, 26 | FilesystemOptions, 27 | OptionsBase, 28 | ) 29 | 30 | __all__ = ["BorgAPI", "BorgAPIAsync"] 31 | 32 | 33 | class BorgAPIBase: 34 | """Automate borg in code. 35 | 36 | Base class for the wrapper. Contains all the non-Borg command calls. 37 | Should not be called by itself. Only really here for readability purposes. 38 | """ 39 | 40 | def __init__( 41 | self, 42 | defaults: dict = None, 43 | options: dict = None, 44 | log_level: str = LOG_LVL, 45 | log_json: bool = False, 46 | environ: dict = None, 47 | ): 48 | """Set the options to be used across the different command call. 49 | 50 | :param defaults: options for specific commands to always use, defaults to None 51 | :type defaults: dict, optional 52 | :param options: common flags for all commands, defaults to None 53 | :type options: dict, optional 54 | :param log_level: level to record logged messages at, defaults to LOG_LVL 55 | :type log_level: str, optional 56 | :param log_json: if the output should be in json or string format, defaults to False 57 | :type log_json: bool, optional 58 | :param environ: envirnmental variables to set for borg to use (ie BORG_PASSCOMMAND), 59 | defaults to None 60 | :type environ: dict, optional 61 | """ 62 | self.options = options or {} 63 | self.optionals = CommandOptions(defaults) 64 | self.archiver = borg.archiver.Archiver() 65 | self._previous_dotenv = [] 66 | self._set_environ_defaults() 67 | if environ is not None: 68 | self.set_environ(**environ) 69 | self.log_level = log_level 70 | if log_json: 71 | self.options.set("log_json", log_json) 72 | 73 | self.archiver.log_json = log_json 74 | borg.archiver.setup_logging(level=self.log_level, is_serve=False, json=log_json) 75 | logging.getLogger("borgapi") 76 | self._logger = logging.getLogger(__name__) 77 | 78 | self.output = OutputCapture() 79 | 80 | @staticmethod 81 | def _loads_json_lines(string: Union[str, list]) -> Union[dict, str, None]: 82 | result = None 83 | try: 84 | if type(string) is str: 85 | result = loads(string) 86 | elif type(string) is list: 87 | result = loads(f"[{','.join(string)}]") 88 | except decoder.JSONDecodeError: 89 | if type(string) is str: 90 | clean = f"[{','.join(string.splitlines())}]" 91 | elif type(string) is str: 92 | clean = str(string) 93 | try: 94 | result = loads(clean) 95 | except decoder.JSONDecodeError: 96 | try: 97 | multiline = "[" + string.replace("}{", "},{") + "]" 98 | result = loads(multiline) 99 | except decoder.JSONDecodeError: 100 | result = string or None 101 | return result 102 | 103 | @staticmethod 104 | def _build_result(*results: tuple[str, Output], log_json: bool = False) -> Output: 105 | if not results: 106 | return None 107 | if len(results) == 1: 108 | result = results[0][1] 109 | if len(result) == 1: 110 | return result[0] 111 | return result 112 | result = {} 113 | for name, value in results: 114 | result[name] = value 115 | return result 116 | 117 | def _run( 118 | self, 119 | arg_list: list, 120 | func: Callable, 121 | output_options: OutputOptions, 122 | ) -> dict: 123 | self._logger.debug("%s: %s", func.__name__, arg_list) 124 | arg_list.insert(0, "borgapi") 125 | arg_list = [str(arg) for arg in arg_list] 126 | args = self.archiver.get_args(arg_list, os.getenv("SSH_ORIGINAL_COMMAND", None)) 127 | 128 | prev_json = self.archiver.log_json 129 | log_json = getattr(args, "log_json", prev_json) 130 | self.archiver.log_json = log_json 131 | 132 | with self.output(output_options): 133 | try: 134 | func(args) 135 | except Exception as e: 136 | self._logger.error(e) 137 | raise e 138 | else: 139 | capture_result = self.output.getvalues() 140 | 141 | self.archiver.log_json = prev_json 142 | 143 | return capture_result 144 | 145 | def _get_option(self, value: dict, options_class: OptionsBase) -> OptionsBase: 146 | args = {**self.options, **(value or {})} 147 | return options_class(**args) 148 | 149 | def _get_option_list(self, value: dict, options_class: OptionsBase) -> list: 150 | option = self._get_option(value, options_class) 151 | return option.parse() 152 | 153 | def _get_log_level(self, options: dict) -> str: 154 | lvl = self.log_level 155 | if options.get("critical", False): 156 | lvl = "critical" 157 | elif options.get("error", False): 158 | lvl = "error" 159 | elif options.get("warning", False): 160 | lvl = "warning" 161 | elif options.get("info", False) or options.get("verbose", False): 162 | lvl = "info" 163 | elif options.get("debug", False): 164 | lvl = "debug" 165 | 166 | elif self.options.get("critical", False): 167 | lvl = "critical" 168 | elif self.options.get("error", False): 169 | lvl = "error" 170 | elif self.options.get("warning", False): 171 | lvl = "warning" 172 | elif self.options.get("info", False) or self.options.get("verbose", False): 173 | lvl = "info" 174 | elif self.options.get("debug", False): 175 | lvl = "debug" 176 | 177 | return lvl 178 | 179 | def _get_basic_results(self, output: dict, opts: OutputOptions) -> dict: 180 | result_list = [] 181 | if opts.stats_show: 182 | if opts.stats_json: 183 | result_list.append(("stats", self._loads_json_lines(output["stdout"]))) 184 | else: 185 | result_list.append(("stats", output["stats"])) 186 | 187 | if opts.list_show: 188 | if opts.list_json: 189 | result_list.append(("list", self._loads_json_lines(output["list"]))) 190 | else: 191 | result_list.append(("list", output["list"])) 192 | 193 | if opts.prog_show: 194 | if opts.prog_json: 195 | result_list.append(("prog", self._loads_json_lines(output["stderr"]))) 196 | else: 197 | result_list.append(("prog", output["stderr"])) 198 | 199 | return result_list 200 | 201 | def _set_environ_defaults(self): 202 | for key, value in ENVIRONMENT_DEFAULTS.items(): 203 | if os.getenv(key) is None: 204 | os.environ[key] = value 205 | 206 | 207 | class BorgAPI(BorgAPIBase): 208 | """Automate borg in code.""" 209 | 210 | def __init__( 211 | self, 212 | defaults: dict = None, 213 | options: dict = None, 214 | log_level: str = LOG_LVL, 215 | log_json: bool = False, 216 | environ: dict = None, 217 | ): 218 | """Set the options to be used across the different command call. 219 | 220 | :param defaults: options for specific commands to always use, defaults to None 221 | :type defaults: dict, optional 222 | :param options: common flags for all commands, defaults to None 223 | :type options: dict, optional 224 | :param log_level: level to record logged messages at, defaults to LOG_LVL 225 | :type log_level: str, optional 226 | :param log_json: if the output should be in json or string format, defaults to False 227 | :type log_json: bool, optional 228 | """ 229 | super().__init__(defaults, options, log_level, log_json, environ) 230 | 231 | def set_environ( 232 | self, 233 | filename: str = None, 234 | dictionary: dict = None, 235 | **kwargs: Options, 236 | ) -> None: 237 | """Load environment variables from file. 238 | 239 | If nothing is provided, load_dotenv's default value will be used. 240 | 241 | :param filename: path to environment file, defaults to None 242 | :type filename: str, optional 243 | :param dictionary: dictionary of environment variables to load, defaults to None 244 | :type dictionary: dict, optional 245 | :param **kwargs: Environment variables and their values as named args 246 | :type **kwargs: Options 247 | """ 248 | variables = {} 249 | if filename: 250 | self._logger.debug("Loading environment variables from %s", filename) 251 | variables = dotenv_values(filename) 252 | elif dictionary or kwargs: 253 | self._logger.debug("Loading dictionary with data: %s", variables) 254 | variables = dictionary or kwargs 255 | else: 256 | self._logger.debug('Looking for ".env" file to load variables from') 257 | variables = dotenv_values() 258 | 259 | self._previous_dotenv = variables.keys() 260 | 261 | with StringIO() as config: 262 | for key, value in variables.items(): 263 | config.write(f"{key}={value}\n") 264 | config.seek(0) 265 | load_dotenv(stream=config, override=True) 266 | config.close() 267 | 268 | def unset_environ(self, *variable: Optional[str]) -> None: 269 | """Remove variables from the environment. 270 | 271 | If no variable is provided the values set from the previous call to `set_environ` 272 | will be removed. 273 | 274 | :param *variable: variable names to remove 275 | :type *variable: Optional[str] 276 | """ 277 | variables = [k for k in variable if k in os.environ] or [ 278 | k for k in self._previous_dotenv if k in os.environ 279 | ] 280 | for var in variables: 281 | del os.environ[var] 282 | 283 | def init( 284 | self, 285 | repository: str, 286 | encryption: str = "repokey", 287 | **options: Options, 288 | ) -> Output: 289 | """Initialize an empty repository. 290 | 291 | A repository is a filesystem directory containing the deduplicated data 292 | from zero or more archives. 293 | 294 | :param repository: repository to create 295 | :type repository: str 296 | :param encryption: select encryption key mode; defaults to "repokey" 297 | :type encryption: str, optional 298 | :param **options: optional arguments specific to `init` and common options; defaults to {} 299 | :type **options: Options 300 | :return: Stdout of command, None if no output created, 301 | json dict if json flag used, str otherwise 302 | :rtype: Output 303 | """ 304 | common_options = self._get_option(options, CommonOptions) 305 | init_options = self.optionals.get("init", options) 306 | 307 | arg_list = [] 308 | arg_list.extend(self._get_option_list(options, CommonOptions)) 309 | arg_list.append("init") 310 | arg_list.extend(["--encryption", encryption]) 311 | arg_list.extend(init_options.parse()) 312 | arg_list.append(repository) 313 | 314 | opts = OutputOptions( 315 | log_lvl=self._get_log_level(options), 316 | log_json=common_options.log_json, 317 | prog_show=common_options.progress, 318 | prog_json=common_options.log_json, 319 | ) 320 | output = self._run(arg_list, self.archiver.do_init, output_options=opts) 321 | 322 | result_list = self._get_basic_results(output, opts) 323 | return self._build_result(*result_list, log_json=opts.log_json) 324 | 325 | def create( 326 | self, 327 | archive: str, 328 | *paths: str, 329 | **options: Options, 330 | ) -> Output: 331 | """Create a backup archive of all files found while recursively traversing specified paths. 332 | 333 | :param archive: name of archive to create (must be also a valid directory name) 334 | :type archive: str 335 | :param *paths: paths to archive 336 | :type *paths: str 337 | :param **options: optional arguments specific to `create` as well as exclusion, 338 | filesysem, archive, and common options; defaults to {} 339 | :type **options: Options 340 | :return: Stdout of command, None if no output created, 341 | dict if json flag used, str otherwise 342 | :rtype: Output 343 | """ 344 | common_options = self._get_option(options, CommonOptions) 345 | create_options = self.optionals.get("create", options) 346 | 347 | arg_list = [] 348 | arg_list.extend(common_options.parse()) 349 | arg_list.append("create") 350 | arg_list.extend(create_options.parse()) 351 | arg_list.extend(self._get_option_list(options, ExclusionInput)) 352 | arg_list.extend(self._get_option_list(options, FilesystemOptions)) 353 | arg_list.extend(self._get_option_list(options, ArchiveInput)) 354 | arg_list.append(archive) 355 | arg_list.extend(paths) 356 | 357 | opts = OutputOptions( 358 | log_lvl=self._get_log_level(options), 359 | log_json=common_options.log_json, 360 | stats_show=create_options.stats or create_options.json, 361 | stats_json=create_options.json, 362 | list_show=create_options.list, 363 | list_json=common_options.log_json, 364 | prog_show=common_options.progress, 365 | prog_json=common_options.log_json, 366 | ) 367 | output = self._run(arg_list, self.archiver.do_create, output_options=opts) 368 | 369 | result_list = self._get_basic_results(output, opts) 370 | if opts.list_show: 371 | if opts.list_json: 372 | result_list.remove(("list", [])) 373 | result_list.append(("list", self._loads_json_lines(output["stderr"]))) 374 | return self._build_result(*result_list, log_json=opts.log_json) 375 | 376 | def extract( 377 | self, 378 | archive: str, 379 | *paths: Optional[str], 380 | **options: Options, 381 | ) -> Output: 382 | """Extract the contents of an archive. 383 | 384 | :param archive: archive to extract 385 | :type archive: str 386 | :param *paths: paths to archive 387 | :type *paths: Optional[str] 388 | :param **options: optional arguments specific to `extract` as well as exclusion 389 | and common options; defaults to {} 390 | :type **options: Options 391 | :return: Stdout of command, None if no output created, 392 | dict if json flag used, str otherwise 393 | :rtype: Output 394 | """ 395 | common_options = self._get_option(options, CommonOptions) 396 | extract_options = self.optionals.get("extract", options) 397 | 398 | arg_list = [] 399 | arg_list.extend(common_options.parse()) 400 | arg_list.append("extract") 401 | arg_list.extend(extract_options.parse()) 402 | arg_list.extend(self._get_option_list(options, ExclusionOutput)) 403 | arg_list.append(archive) 404 | arg_list.extend(paths) 405 | 406 | opts = OutputOptions( 407 | raw_bytes=extract_options.stdout, 408 | log_lvl=self._get_log_level(options), 409 | log_json=common_options.log_json, 410 | list_show=extract_options.list, 411 | list_json=common_options.log_json, 412 | prog_show=common_options.progress, 413 | prog_json=common_options.log_json, 414 | ) 415 | output = self._run( 416 | arg_list, 417 | self.archiver.do_extract, 418 | output_options=opts, 419 | ) 420 | 421 | result_list = self._get_basic_results(output, opts) 422 | if opts.raw_bytes: 423 | result_list.append(("extract", output["stdout"])) 424 | 425 | return self._build_result(*result_list, log_json=opts.log_json) 426 | 427 | def check(self, *repository_or_archive: str, **options: Options) -> Output: 428 | """Verify the consistency of a repository and the corresponding archives. 429 | 430 | :param *repository_or_archive: repository or archive to check consistency of 431 | :type *repository_or_archive: str 432 | :param **options: optional arguments specific to `check` as well as archive 433 | and common options; defaults to {} 434 | :type **options: Options 435 | :return: Stdout of command, None if no output created, 436 | dict if json flag used, str otherwise 437 | :rtype: Output 438 | """ 439 | common_options = self._get_option(options, CommonOptions) 440 | check_options = self.optionals.get("check", options) 441 | 442 | arg_list = [] 443 | arg_list.extend(common_options.parse()) 444 | arg_list.append("check") 445 | arg_list.extend(check_options.parse()) 446 | arg_list.extend(self._get_option_list(options, ArchiveOutput)) 447 | arg_list.extend(repository_or_archive) 448 | 449 | opts = OutputOptions( 450 | log_lvl=self._get_log_level(options), 451 | log_json=common_options.log_json, 452 | prog_show=common_options.progress, 453 | prog_json=common_options.log_json, 454 | ) 455 | output = self._run(arg_list, self.archiver.do_check, output_options=opts) 456 | 457 | result_list = self._get_basic_results(output, opts) 458 | return self._build_result(*result_list, log_json=opts.log_json) 459 | 460 | def rename( 461 | self, 462 | archive: str, 463 | newname: str, 464 | **options: Options, 465 | ) -> Output: 466 | """Rename an archive in the repository. 467 | 468 | :param archive: archive to rename 469 | :type archive: str 470 | :param newname: the new archive name to use 471 | :type newname: str 472 | :param **options: optional arguments specific to `rename` as well as 473 | common options; defaults to {} 474 | :type **options: Options 475 | :return: Stdout of command, None if no output created, 476 | dict if json flag used, str otherwise 477 | :rtype: Output 478 | """ 479 | common_options = self._get_option(options, CommonOptions) 480 | 481 | arg_list = [] 482 | arg_list.extend(common_options.parse()) 483 | arg_list.append("rename") 484 | arg_list.append(archive) 485 | arg_list.append(newname) 486 | 487 | opts = OutputOptions( 488 | log_lvl=self._get_log_level(options), 489 | log_json=common_options.log_json, 490 | prog_show=common_options.progress, 491 | prog_json=common_options.log_json, 492 | ) 493 | output = self._run(arg_list, self.archiver.do_rename, output_options=opts) 494 | 495 | result_list = self._get_basic_results(output, opts) 496 | return self._build_result(*result_list, log_json=opts.log_json) 497 | 498 | def list( 499 | self, 500 | repository_or_archive: str, 501 | *paths: Optional[str], 502 | **options: Options, 503 | ) -> Output: 504 | """List the contents of a repository or an archive. 505 | 506 | :param repository_or_archive: repository or archive to list contents of 507 | :type repository_or_archive: str 508 | :param *paths: paths to list; patterns are supported 509 | :type *paths: Optional[str] 510 | :param **options: optional arguments specific to `list` as well as exclusion, 511 | archive, and common options; defaults to {} 512 | :type **options: Options 513 | :return: Stdout of command, None if no output created, 514 | dict if json flag used, str otherwise 515 | :rtype: Output 516 | """ 517 | common_options = self._get_option(options, CommonOptions) 518 | list_options = self.optionals.get("list", options) 519 | 520 | arg_list = [] 521 | arg_list.extend(common_options.parse()) 522 | arg_list.append("list") 523 | arg_list.extend(list_options.parse()) 524 | arg_list.extend(self._get_option_list(options, ArchiveOutput)) 525 | arg_list.extend(self._get_option_list(options, ExclusionOptions)) 526 | arg_list.append(repository_or_archive) 527 | arg_list.extend(paths) 528 | 529 | opts = OutputOptions( 530 | log_lvl=self._get_log_level(options), 531 | log_json=common_options.log_json, 532 | list_show=True, 533 | list_json=list_options.json_lines or list_options.json, 534 | prog_show=common_options.progress, 535 | prog_json=common_options.log_json, 536 | ) 537 | output = self._run(arg_list, self.archiver.do_list, output_options=opts) 538 | 539 | result_list = self._get_basic_results(output, opts) 540 | if opts.list_show: 541 | if opts.list_json: 542 | result_list.remove(("list", [])) 543 | result_list.append(("list", self._loads_json_lines(output["stdout"]))) 544 | else: 545 | result_list.remove(("list", "")) 546 | result_list.append(("list", output["stdout"])) 547 | 548 | return self._build_result(*result_list, log_json=opts.log_json) 549 | 550 | def diff( 551 | self, 552 | repo_archive_1: str, 553 | archive_2: str, 554 | *paths: Optional[str], 555 | **options: Options, 556 | ) -> Output: 557 | """Find the differences (file contents, user/group/mode) between archives. 558 | 559 | :param repo_archive_1: repository location and ARCHIVE1 name 560 | :type repo_archive_1: str 561 | :param archive_2: ARCHIVE2 name (no repository location allowed) 562 | :type archive_2: str 563 | :param *paths: paths of items inside the archives to compare; patterns are supported 564 | :type *paths: Optional[str] 565 | :param **options: optional arguments specific to `diff` as well as exclusion, and 566 | common options; defaults to {} 567 | :type **options: Options 568 | :return: Stdout of command, None if no output created, 569 | dict if json flag used, str otherwise 570 | :rtype: Output 571 | """ 572 | common_options = self._get_option(options, CommonOptions) 573 | diff_options = self.optionals.get("diff", options) 574 | 575 | arg_list = [] 576 | arg_list.extend(common_options.parse()) 577 | arg_list.append("diff") 578 | arg_list.extend(diff_options.parse()) 579 | arg_list.extend(self._get_option_list(options, ExclusionOptions)) 580 | arg_list.append(repo_archive_1) 581 | arg_list.append(archive_2) 582 | arg_list.extend(paths) 583 | 584 | opts = OutputOptions( 585 | log_lvl=self._get_log_level(options), 586 | log_json=diff_options.json_lines or common_options.log_json, 587 | prog_show=common_options.progress, 588 | prog_json=common_options.log_json, 589 | ) 590 | output = self._run(arg_list, self.archiver.do_diff, output_options=opts) 591 | 592 | result_list = self._get_basic_results(output, opts) 593 | if opts.log_json: 594 | result_list.append(("diff", self._loads_json_lines(output["stdout"]))) 595 | else: 596 | result_list.append(("diff", output["stdout"])) 597 | 598 | return self._build_result(*result_list, log_json=opts.log_json) 599 | 600 | def delete( 601 | self, 602 | repository_or_archive: str, 603 | *archives: Optional[str], 604 | **options: Options, 605 | ) -> Output: 606 | """Delete an archive from the repository or the complete repository. 607 | 608 | :param repository_or_archive: repository or archive to delete 609 | :type repository_or_archive: str 610 | :param *archives: archives to delete 611 | :type *archives: Optional[str] 612 | :param **options: optional arguments specific to `delete` as well as 613 | archive and common options; defaults to {} 614 | :type **options: Options 615 | :return: Stdout of command, None if no output created, 616 | dict if json flag used, str otherwise 617 | :rtype: Output 618 | """ 619 | common_options = self._get_option(options, CommonOptions) 620 | delete_options = self.optionals.get("delete", options) 621 | 622 | arg_list = [] 623 | arg_list.extend(common_options.parse()) 624 | arg_list.append("delete") 625 | arg_list.extend(delete_options.parse()) 626 | arg_list.extend(self._get_option_list(options, ArchiveOutput)) 627 | arg_list.append(repository_or_archive) 628 | arg_list.extend(archives) 629 | 630 | opts = OutputOptions( 631 | log_lvl=self._get_log_level(options), 632 | log_json=common_options.log_json, 633 | # no json option in this command 634 | stats_show=delete_options.stats, # or delete_options.json, 635 | # stats_json = delete_options.json, 636 | list_show=delete_options.list, 637 | list_json=common_options.log_json, 638 | prog_show=common_options.progress, 639 | prog_json=common_options.log_json, 640 | ) 641 | output = self._run(arg_list, self.archiver.do_delete, output_options=opts) 642 | 643 | result_list = self._get_basic_results(output, opts) 644 | return self._build_result(*result_list, log_json=opts.log_json) 645 | 646 | def prune(self, repository: str, **options: Options) -> Output: 647 | """Prune a repository by deleting all archives not matching the specified retention options. 648 | 649 | :param repository: repository to prune 650 | :type repository: str 651 | :param **options: optional arguments specific to `prune` as well as archive and 652 | common options; defaults to {} 653 | :type **options: Options 654 | :return: Stdout of command, None if no output created, 655 | dict if json flag used, str otherwise 656 | :rtype: Output 657 | """ 658 | common_options = self._get_option(options, CommonOptions) 659 | prune_options = self.optionals.get("prune", options) 660 | 661 | arg_list = [] 662 | arg_list.extend(common_options.parse()) 663 | arg_list.append("prune") 664 | arg_list.extend(prune_options.parse()) 665 | arg_list.extend(self._get_option_list(options, ArchivePattern)) 666 | arg_list.append(repository) 667 | 668 | opts = OutputOptions( 669 | log_lvl=self._get_log_level(options), 670 | log_json=common_options.log_json, 671 | # no json option for stats 672 | stats_show=prune_options.stats, # or prune_options.json, 673 | # stats_json = prune_options.json, 674 | list_show=prune_options.list, 675 | list_json=common_options.log_json, 676 | prog_show=common_options.progress, 677 | prog_json=common_options.log_json, 678 | ) 679 | output = self._run(arg_list, self.archiver.do_prune, output_options=opts) 680 | 681 | result_list = self._get_basic_results(output, opts) 682 | return self._build_result(*result_list, log_json=opts.log_json) 683 | 684 | def compact(self, repository: str, **options: Options) -> Output: 685 | """Compact frees repository space by compacting segments. 686 | 687 | :param repository: repository to compact 688 | :type repository: str 689 | :param **options: optional arguments specific to `compact` as well as archive and 690 | common options; defaults to {} 691 | :type **options: Options 692 | :return: Stdout of command, None if no output created, 693 | dict if json flag used, str otherwise 694 | :rtype: Output 695 | """ 696 | common_options = self._get_option(options, CommonOptions) 697 | compact_options = self.optionals.get("compact", options) 698 | 699 | arg_list = [] 700 | arg_list.extend(common_options.parse()) 701 | arg_list.append("compact") 702 | arg_list.extend(compact_options.parse()) 703 | arg_list.append(repository) 704 | 705 | opts = OutputOptions( 706 | log_lvl=self._get_log_level(options), 707 | log_json=common_options.log_json, 708 | repo_show=common_options.verbose, 709 | repo_json=common_options.log_json, 710 | prog_show=common_options.progress, 711 | prog_json=common_options.log_json, 712 | ) 713 | output = self._run(arg_list, self.archiver.do_compact, output_options=opts) 714 | 715 | result_list = self._get_basic_results(output, opts) 716 | if opts.repo_show: 717 | if opts.repo_json: 718 | result_list.append(("compact", self._loads_json_lines(output["repo"]))) 719 | else: 720 | result_list.append(("compact", output["repo"])) 721 | return self._build_result(*result_list, log_json=opts.log_json) 722 | 723 | def info(self, repository_or_archive: str, **options: Options) -> Output: 724 | """Display detailed information about the specified archive or repository. 725 | 726 | :param repository_or_archive: repository or archive to display information about 727 | :type repository_or_archive: str 728 | :param **options: optional arguments specific to `info` as well as archive and 729 | common options; defaults to {} 730 | :type **options: Options 731 | :return: Stdout of command, None if no output created, 732 | dict if json flag used, str otherwise 733 | :rtype: Output 734 | """ 735 | common_options = self._get_option(options, CommonOptions) 736 | info_options = self.optionals.get("info", options) 737 | 738 | arg_list = [] 739 | arg_list.extend(common_options.parse()) 740 | arg_list.append("info") 741 | arg_list.extend(info_options.parse()) 742 | arg_list.extend(self._get_option_list(options, ArchiveOutput)) 743 | arg_list.append(repository_or_archive) 744 | 745 | opts = OutputOptions( 746 | log_lvl=self._get_log_level(options), 747 | log_json=info_options.json or common_options.log_json, 748 | prog_show=common_options.progress, 749 | prog_json=common_options.log_json, 750 | ) 751 | output = self._run(arg_list, self.archiver.do_info, output_options=opts) 752 | 753 | result_list = self._get_basic_results(output, opts) 754 | if opts.log_json: 755 | result_list.append(("info", self._loads_json_lines(output["stdout"]))) 756 | else: 757 | result_list.append(("info", output["stdout"])) 758 | 759 | return self._build_result(*result_list, log_json=opts.log_json) 760 | 761 | def mount( 762 | self, 763 | repository_or_archive: str, 764 | mountpoint: str, 765 | *paths: Optional[str], 766 | **options: Options, 767 | ) -> Output: 768 | """Mount an archive as a FUSE filesystem. 769 | 770 | :param repository_or_archive: repository or archive to mount 771 | :type repository_or_archive: str 772 | :param mountpoint: where to mount filesystem 773 | :type mountpoint: str 774 | :param *paths: paths to extract; patterns are supported 775 | :type *paths: Optional[str] 776 | :param **options: optional arguments specific to `mount` as well as exclusion, 777 | archive, and common options; defaults to {} 778 | :type **options: Options 779 | :return: Stdout of command, None if no output created, 780 | dict if json flag used, str otherwise 781 | :rtype: Output 782 | """ 783 | common_options = self._get_option(options, CommonOptions) 784 | mount_options = self.optionals.get("mount", options) 785 | 786 | arg_list = [] 787 | arg_list.extend(common_options.parse()) 788 | arg_list.append("mount") 789 | arg_list.extend(mount_options.parse()) 790 | arg_list.extend(self._get_option_list(options, ArchiveOutput)) 791 | arg_list.extend(self._get_option_list(options, ExclusionOutput)) 792 | arg_list.append(repository_or_archive) 793 | arg_list.append(mountpoint) 794 | arg_list.extend(paths) 795 | 796 | opts = OutputOptions( 797 | log_lvl=self._get_log_level(options), 798 | log_json=common_options.log_json, 799 | prog_show=common_options.progress, 800 | prog_json=common_options.log_json, 801 | ) 802 | 803 | pid = os.fork() 804 | # child process, this one does the actual mount (in the foreground) 805 | if pid == 0: 806 | output = self._run(arg_list, self.archiver.do_mount, output_options=opts) 807 | 808 | result_list = self._get_basic_results(output, opts) 809 | return self._build_result(*result_list, log_json=opts.log_json) 810 | 811 | result_list = self._get_basic_results({}, opts) 812 | result_list.append(("mount", {"pid": pid, "cid": os.getpid()})) 813 | return self._build_result(*result_list, log_json=opts.log_json) 814 | 815 | def umount(self, mountpoint: str, **options: Options) -> Output: 816 | """Un-mount a FUSE filesystem that was mounted with `mount`. 817 | 818 | :param mountpoint: mountpoint of the filesystem to umount 819 | :type mountpoint: str 820 | :param **options: optional arguments specific to `umount` as well as 821 | common options; defaults to {} 822 | :type **options: Options 823 | :return: Stdout of command, None if no output created, 824 | dict if json flag used, str otherwise 825 | :rtype: Output 826 | """ 827 | common_options = self._get_option(options, CommonOptions) 828 | 829 | arg_list = [] 830 | arg_list.extend(common_options.parse()) 831 | arg_list.append("umount") 832 | arg_list.append(mountpoint) 833 | 834 | opts = OutputOptions( 835 | log_lvl=self._get_log_level(options), 836 | log_json=common_options.log_json, 837 | prog_show=common_options.progress, 838 | prog_json=common_options.log_json, 839 | ) 840 | output = self._run(arg_list, self.archiver.do_umount, output_options=opts) 841 | 842 | result_list = self._get_basic_results(output, opts) 843 | return self._build_result(*result_list, log_json=opts.log_json) 844 | 845 | def key_change_passphrase(self, repository: str, **options: Options) -> Output: 846 | """Change the passphrase protecting the repository encryption. 847 | 848 | :param repository: repository to modify 849 | :type repository: str 850 | :param **options: optional arguments specific to `key change-passphrase` as well as 851 | common options; defaults to {} 852 | :type **options: Options 853 | :return: Stdout of command, None if no output created, 854 | dict if json flag used, str otherwise 855 | :rtype: Output 856 | """ 857 | common_options = self._get_option(options, CommonOptions) 858 | 859 | arg_list = [] 860 | arg_list.extend(common_options.parse()) 861 | arg_list.extend(["key", "change-passphrase"]) 862 | arg_list.append(repository) 863 | 864 | opts = OutputOptions( 865 | log_lvl=self._get_log_level(options), 866 | log_json=common_options.log_json, 867 | prog_show=common_options.progress, 868 | prog_json=common_options.log_json, 869 | ) 870 | output = self._run(arg_list, self.archiver.do_change_passphrase, output_options=opts) 871 | 872 | result_list = self._get_basic_results(output, opts) 873 | return self._build_result(*result_list, log_json=opts.log_json) 874 | 875 | def key_export( 876 | self, 877 | repository: str, 878 | path: str, 879 | **options: Options, 880 | ) -> Output: 881 | """Copy repository encryption key to another location. 882 | 883 | :param repository: repository to get key for 884 | :type repository: str 885 | :param path: where to store the backup 886 | :type path: str 887 | :param **options: optional arguments specific to `key export` as well as 888 | common options; defaults to {} 889 | :type **options: Options 890 | :return: Stdout of command, None if no output created, 891 | dict if json flag used, str otherwise 892 | :rtype: Output 893 | """ 894 | common_options = self._get_option(options, CommonOptions) 895 | key_export_options = self.optionals.get("key_export", options) 896 | 897 | arg_list = [] 898 | arg_list.extend(common_options.parse()) 899 | arg_list.extend(["key", "export"]) 900 | arg_list.extend(key_export_options.parse()) 901 | arg_list.append(repository) 902 | arg_list.append(path) 903 | 904 | opts = OutputOptions( 905 | log_lvl=self._get_log_level(options), 906 | log_json=common_options.log_json, 907 | prog_show=common_options.progress, 908 | prog_json=common_options.log_json, 909 | ) 910 | output = self._run(arg_list, self.archiver.do_key_export, output_options=opts) 911 | 912 | result_list = self._get_basic_results(output, opts) 913 | return self._build_result(*result_list, log_json=opts.log_json) 914 | 915 | def key_import( 916 | self, 917 | repository: str, 918 | path: str, 919 | **options: Options, 920 | ) -> Output: 921 | """Restore a key previously backed up with the export command. 922 | 923 | :param repository: repository to get key for 924 | :type repository: str 925 | :param path: path to the backup (‘-‘ to read from stdin) 926 | :type path: str 927 | :param **options: optional arguments specific to `key import` as well as 928 | common options; defaults to {} 929 | :type **options: Options 930 | :return: Stdout of command, None if no output created, 931 | dict if json flag used, str otherwise 932 | :rtype: Output 933 | """ 934 | common_options = self._get_option(options, CommonOptions) 935 | key_import_options = self.optionals.get("key_import", options) 936 | 937 | arg_list = [] 938 | arg_list.extend(common_options.parse()) 939 | arg_list.extend(["key", "import"]) 940 | arg_list.extend(key_import_options.parse()) 941 | arg_list.append(repository) 942 | arg_list.append(path) 943 | 944 | opts = OutputOptions( 945 | log_lvl=self._get_log_level(options), 946 | log_json=common_options.log_json, 947 | prog_show=common_options.progress, 948 | prog_json=common_options.log_json, 949 | ) 950 | output = self._run(arg_list, self.archiver.do_key_import, output_options=opts) 951 | 952 | result_list = self._get_basic_results(output, opts) 953 | return self._build_result(*result_list, log_json=opts.log_json) 954 | 955 | def upgrade(self, repository: str, **options: Options) -> Output: 956 | """Upgrade an existing, local Borg repository. 957 | 958 | :param repository: path to the repository to be upgraded 959 | :type repository: str 960 | :param **options: optional arguments specific to `upgrade` as well as 961 | common options; defaults to {} 962 | :type **options: Options 963 | :return: Stdout of command, None if no output created, 964 | dict if json flag used, str otherwise 965 | :rtype: Output 966 | """ 967 | common_options = self._get_option(options, CommonOptions) 968 | upgrade_options = self.optionals.to_list("upgrade", options) 969 | 970 | arg_list = [] 971 | arg_list.extend(common_options.parse()) 972 | arg_list.append("upgrade") 973 | arg_list.extend(upgrade_options.parse()) 974 | arg_list.append(repository) 975 | 976 | opts = OutputOptions( 977 | log_lvl=self._get_log_level(options), 978 | log_json=common_options.log_json, 979 | prog_show=common_options.progress, 980 | prog_json=common_options.log_json, 981 | ) 982 | output = self._run(arg_list, self.archiver.do_upgrade, output_options=opts) 983 | 984 | result_list = self._get_basic_results(output, opts) 985 | return self._build_result(*result_list, log_json=opts.log_json) 986 | 987 | def recreate( 988 | self, 989 | repository_or_archive: str, 990 | *paths: Optional[str], 991 | **options: Options, 992 | ): 993 | """Recreate the contents of existing archives. 994 | 995 | :param repository_or_archive: repository or archive to recreate 996 | :type repository_or_archive: str 997 | :param *paths: paths to recreate; patterns are supported 998 | :type *paths: Optional[str] 999 | :param **options: optional arguments specific to `recreate` as well as exclusion and 1000 | common options; defaults to {} 1001 | :type **options: Options 1002 | :return: Output of command, None if no output created, 1003 | dict if json flag used, str otherwise 1004 | :rtype: Output 1005 | """ 1006 | common_options = self._get_option(options, CommonOptions) 1007 | recreate_options = self.optionals.get("recreate", options) 1008 | 1009 | arg_list = [] 1010 | arg_list.extend(common_options.parse()) 1011 | arg_list.append("recreate") 1012 | arg_list.extend(recreate_options.parse()) 1013 | arg_list.extend(self._get_option_list(options, ExclusionInput)) 1014 | arg_list.extend(self._get_option_list(options, ArchiveInput)) 1015 | arg_list.append(repository_or_archive) 1016 | arg_list.extend(paths) 1017 | 1018 | opts = OutputOptions( 1019 | log_lvl=self._get_log_level(options), 1020 | log_json=common_options.log_json, 1021 | stats_show=recreate_options.stats, 1022 | stats_json=False, # No json flag 1023 | list_show=recreate_options.list, 1024 | list_json=common_options.log_json, 1025 | prog_show=common_options.progress, 1026 | prog_json=common_options.log_json, 1027 | ) 1028 | output = self._run(arg_list, self.archiver.do_recreate, output_options=opts) 1029 | 1030 | result_list = self._get_basic_results(output, opts) 1031 | return self._build_result(*result_list, log_json=opts.log_json) 1032 | 1033 | def import_tar( 1034 | self, 1035 | archive: str, 1036 | tarfile: str, 1037 | **options: Options, 1038 | ): 1039 | """Create a backup archive from a tarball. 1040 | 1041 | :param archive: name of archive to create (must be also a valid directory name) 1042 | :type archive: str 1043 | :param tarfile: input tar file. “-” to read from stdin instead. 1044 | :type tarfile: str 1045 | :param **options: optional arguments specific to `import_tar` as well as exclusion and 1046 | common options; defaults to {} 1047 | :type **options: Options 1048 | :return: Output of command, None if no output created, 1049 | dict if json flag used, str otherwise 1050 | :rtype: Output 1051 | """ 1052 | common_options = self._get_option(options, CommonOptions) 1053 | import_tar_options = self.optionals.get("import_tar", options) 1054 | 1055 | arg_list = [] 1056 | arg_list.extend(common_options.parse()) 1057 | arg_list.append("import-tar") 1058 | arg_list.extend(import_tar_options.parse()) 1059 | arg_list.append(archive) 1060 | arg_list.append(tarfile) 1061 | 1062 | opts = OutputOptions( 1063 | log_lvl=self._get_log_level(options), 1064 | log_json=common_options.log_json, 1065 | stats_show=import_tar_options.stats or import_tar_options.json, 1066 | stats_json=import_tar_options.json, 1067 | list_show=import_tar_options.list, 1068 | list_json=common_options.log_json, 1069 | prog_show=common_options.progress, 1070 | prog_json=common_options.log_json, 1071 | ) 1072 | output = self._run(arg_list, self.archiver.do_import_tar, output_options=opts) 1073 | 1074 | result_list = self._get_basic_results(output, opts) 1075 | return self._build_result(*result_list, log_json=opts.log_json) 1076 | 1077 | def export_tar( 1078 | self, 1079 | archive: str, 1080 | file: str, 1081 | *paths: Optional[str], 1082 | **options: Options, 1083 | ) -> Output: 1084 | """Create a tarball from an archive. 1085 | 1086 | :param archive: archive to export 1087 | :type archive: str 1088 | :param file: output tar file. “-” to write to stdout instead. 1089 | :type file: str 1090 | :param *paths: paths of items inside the archives to compare; patterns are supported 1091 | :type *paths: Optional[str] 1092 | :param **options: optional arguments specific to `export-tar` as well as exclusion and 1093 | common options; defaults to {} 1094 | :type **options: Options 1095 | :return: Stdout of command, None if no output created, 1096 | dict if json flag used, str otherwise 1097 | :rtype: Output 1098 | """ 1099 | common_options = self._get_option(options, CommonOptions) 1100 | export_tar_options = self.optionals.get("export_tar", options) 1101 | 1102 | arg_list = [] 1103 | arg_list.extend(common_options.parse()) 1104 | arg_list.append("export-tar") 1105 | arg_list.extend(export_tar_options.parse()) 1106 | arg_list.extend(self._get_option_list(options, ExclusionOutput)) 1107 | arg_list.append(archive) 1108 | arg_list.append(file) 1109 | arg_list.extend(paths) 1110 | 1111 | opts = OutputOptions( 1112 | log_lvl=self._get_log_level(options), 1113 | log_json=common_options.log_json, 1114 | raw_bytes=(file == "-"), 1115 | list_show=export_tar_options.list, 1116 | list_json=common_options.log_json, 1117 | prog_show=common_options.progress, 1118 | prog_json=common_options.log_json, 1119 | ) 1120 | output = self._run( 1121 | arg_list, 1122 | self.archiver.do_export_tar, 1123 | output_options=opts, 1124 | ) 1125 | 1126 | result_list = self._get_basic_results(output, opts) 1127 | if opts.raw_bytes: 1128 | result_list.append(("tar", output["stdout"])) 1129 | 1130 | return self._build_result(*result_list, log_json=opts.log_json) 1131 | 1132 | def serve(self, **options: Options) -> Output: 1133 | """Start a repository server process. This command is usually not used manually. 1134 | 1135 | :return: Stdout of command, None if no output created, 1136 | dict if json flag used, str otherwise 1137 | :rtype: Output 1138 | """ 1139 | common_options = self._get_option(options, CommonOptions) 1140 | serve_options = self.optionals.to_list("serve", options) 1141 | 1142 | arg_list = [] 1143 | arg_list.extend(common_options.parse()) 1144 | arg_list.append("serve") 1145 | arg_list.extend(serve_options.parse()) 1146 | 1147 | opts = OutputOptions( 1148 | log_lvl=self._get_log_level(options), 1149 | log_json=common_options.log_json, 1150 | prog_show=common_options.progress, 1151 | prog_json=common_options.log_json, 1152 | ) 1153 | output = self._run(arg_list, self.archiver.do_serve, output_options=opts) 1154 | 1155 | result_list = self._get_basic_results(output, opts) 1156 | return self._build_result(*result_list, log_json=opts.log_json) 1157 | 1158 | def config( 1159 | self, 1160 | repository: str, 1161 | *changes: Union[str, tuple[str, str]], 1162 | **options: Options, 1163 | ) -> Output: 1164 | """Get and set options in a local repository or cache config file. 1165 | 1166 | :param repository: repository to configure 1167 | :type repository: str 1168 | :param *changes: config key, new value 1169 | :type *changes: Union[str, tuple[str, str]] 1170 | :param **options: optional arguments specific to `config` as well as 1171 | common options; defaults to {} 1172 | :type **options: Options 1173 | :return: Stdout of command, None if no output created, 1174 | dict if json flag used, str otherwise 1175 | :rtype: Output 1176 | """ 1177 | common_options = self._get_option(options, CommonOptions) 1178 | config_options = self.optionals.get("config", options) 1179 | 1180 | arg_list = [] 1181 | arg_list.extend(common_options.parse()) 1182 | arg_list.append("config") 1183 | arg_list.extend(config_options.parse()) 1184 | arg_list.extend(self._get_option_list(options, ExclusionOutput)) 1185 | arg_list.append(repository) 1186 | 1187 | opts = OutputOptions( 1188 | log_lvl=self._get_log_level(options), 1189 | log_json=common_options.log_json, 1190 | list_show=config_options.list, 1191 | list_json=False, 1192 | prog_show=common_options.progress, 1193 | prog_json=common_options.log_json, 1194 | ) 1195 | 1196 | result_list = [] 1197 | if not changes: 1198 | output = self._run(arg_list, self.archiver.do_config, output_options=opts) 1199 | result_list.extend(self._get_basic_results(output, opts)) 1200 | if opts.list_show: 1201 | result_list.remove(("list", "")) 1202 | result_list.append(("list", output["stdout"])) 1203 | 1204 | change_result = [] 1205 | for change in changes: 1206 | new_args = arg_list 1207 | if isinstance(change, tuple): 1208 | new_args.extend([change[0], change[1]]) 1209 | else: 1210 | new_args.extend([change]) 1211 | output = self._run(new_args, self.archiver.do_config, output_options=opts) 1212 | change_result.append(output["stdout"].strip()) 1213 | if change_result: 1214 | result_list.append(("changes", change_result)) 1215 | 1216 | return self._build_result(*result_list, log_json=opts.log_json) 1217 | 1218 | def with_lock( 1219 | self, 1220 | repository: str, 1221 | command: str, 1222 | *args: Union[str, int], 1223 | **options: Options, 1224 | ) -> Output: 1225 | """Run a user-specified command while the repository lock is held. 1226 | 1227 | :param repository: repository to lock 1228 | :type repository: str 1229 | :param command: command to run 1230 | :type command: str 1231 | :param *args: command arguments 1232 | :type *args: Union[str, int] 1233 | :param **options: optional arguments specific to `config` as well as 1234 | common options; defaults to {} 1235 | :type **options: Options 1236 | :return: Stdout of command, None if no output created, 1237 | dict if json flag used, str otherwise 1238 | :rtype: Output 1239 | """ 1240 | common_options = self._get_option(options, CommonOptions) 1241 | 1242 | arg_list = [] 1243 | arg_list.extend(common_options.parse()) 1244 | arg_list.append("with-lock") 1245 | arg_list.append(repository) 1246 | arg_list.append(command) 1247 | arg_list.extend(args) 1248 | 1249 | opts = OutputOptions( 1250 | log_lvl=self._get_log_level(options), 1251 | log_json=common_options.log_json, 1252 | prog_show=common_options.progress, 1253 | prog_json=common_options.log_json, 1254 | ) 1255 | output = self._run(arg_list, self.archiver.do_with_lock, output_options=opts) 1256 | 1257 | result_list = self._get_basic_results(output, opts) 1258 | return self._build_result(*result_list, log_json=opts.log_json) 1259 | 1260 | def break_lock(self, repository: str, **options: Options) -> Output: 1261 | """Break the repository and cache locks. 1262 | 1263 | :param repository: repository for which to break the locks 1264 | :type repository: str 1265 | :param **options: optional arguments specific to `config` as well as 1266 | common options; defaults to {} 1267 | :type **options: Options 1268 | :return: Stdout of command, None if no output created, 1269 | dict if json flag used, str otherwise 1270 | :rtype: Output 1271 | """ 1272 | common_options = self._get_option(options, CommonOptions) 1273 | 1274 | arg_list = [] 1275 | arg_list.extend(common_options.parse()) 1276 | arg_list.append("break-lock") 1277 | arg_list.append(repository) 1278 | 1279 | opts = OutputOptions( 1280 | log_lvl=self._get_log_level(options), 1281 | log_json=common_options.log_json, 1282 | prog_show=common_options.progress, 1283 | prog_json=common_options.log_json, 1284 | ) 1285 | output = self._run(arg_list, self.archiver.do_break_lock, output_options=opts) 1286 | 1287 | result_list = self._get_basic_results(output, opts) 1288 | return self._build_result(*result_list, log_json=opts.log_json) 1289 | 1290 | def benchmark_crud( 1291 | self, 1292 | repository: str, 1293 | path: str, 1294 | **options: Options, 1295 | ) -> Output: 1296 | """Benchmark borg CRUD (create, read, update, delete) operations. 1297 | 1298 | :param repository: repository to use for benchmark (must exist) 1299 | :type repository: str 1300 | :param path: path were to create benchmark input data 1301 | :type path: str 1302 | :param **options: optional arguments specific to `config` as well as 1303 | common options; defaults to {} 1304 | :type **options: Options 1305 | :return: Stdout of command, None if no output created, 1306 | dict if json flag used, str otherwise 1307 | :rtype: Output 1308 | """ 1309 | common_options = self._get_option(options, CommonOptions) 1310 | 1311 | arg_list = [] 1312 | arg_list.extend(common_options.parse()) 1313 | arg_list.extend(["benchmark", "crud"]) 1314 | arg_list.append(repository) 1315 | arg_list.append(path) 1316 | 1317 | opts = OutputOptions( 1318 | log_lvl=self._get_log_level(options), 1319 | log_json=common_options.log_json, 1320 | prog_show=common_options.progress, 1321 | prog_json=common_options.log_json, 1322 | ) 1323 | output = self._run(arg_list, self.archiver.do_benchmark_crud, output_options=opts) 1324 | 1325 | result_list = self._get_basic_results(output, opts) 1326 | result_list.append(("benchmark", output["stdout"])) 1327 | return self._build_result(*result_list, log_json=opts.log_json) 1328 | 1329 | 1330 | class BorgAPIAsync(BorgAPI): 1331 | """Async version of the :class:`BorgAPI`.""" 1332 | 1333 | CMDS = [ 1334 | "set_environ", 1335 | "unset_environ", 1336 | "init", 1337 | "create", 1338 | "extract", 1339 | "check", 1340 | "rename", 1341 | "list", 1342 | "diff", 1343 | "delete", 1344 | "prune", 1345 | "compact", 1346 | "info", 1347 | "mount", 1348 | "umount", 1349 | "key_change_passphrase", 1350 | "key_export", 1351 | "key_import", 1352 | "upgrade", 1353 | "recreate", 1354 | "import_tar", 1355 | "export_tar", 1356 | "serve", 1357 | "config", 1358 | "with_lock", 1359 | "break_lock", 1360 | "benchmark_crud", 1361 | ] 1362 | 1363 | def __init__(self, *args, **kwargs): 1364 | """Turn the commands in `:class:`BorgAPI` into async methods. 1365 | 1366 | View the `:class:`BorgAPI` for init arguments. 1367 | """ 1368 | super().__init__(*args, **kwargs) 1369 | 1370 | for cmd in self.CMDS: 1371 | synced = getattr(super(), cmd) 1372 | wrapped = self._force_async(synced) 1373 | setattr(self, cmd, wrapped) 1374 | 1375 | self.pool = ThreadPoolExecutor() 1376 | 1377 | def _force_async(self, fn): 1378 | """Turn a sync function to async function using threads.""" 1379 | 1380 | @functools.wraps(fn) 1381 | def wrapper(*args, **kwargs): 1382 | future = self.pool.submit(fn, *args, **kwargs) 1383 | return wrap_future(future) # make it awaitable 1384 | 1385 | return wrapper 1386 | -------------------------------------------------------------------------------- /borgapi/capture.py: -------------------------------------------------------------------------------- 1 | """Save Borg output to review after command call.""" 2 | 3 | import logging 4 | import sys 5 | from dataclasses import dataclass 6 | from io import BytesIO, StringIO, TextIOWrapper 7 | from types import TracebackType 8 | from typing import Optional, Union 9 | 10 | try: 11 | from typing import Self 12 | except ImportError: 13 | # Self isn't added to the typing library until version 3.11 14 | from typing import TypeVar 15 | 16 | Self = TypeVar("Self", bound="OutputCapture") 17 | 18 | from borg.logger import JsonFormatter 19 | 20 | from .helpers import Json 21 | 22 | __all__ = ["OutputOptions", "ListStringIO", "PersistantHandler", "BorgLogCapture", "OutputCapture"] 23 | 24 | LOG_LVL = "warning" 25 | 26 | 27 | @dataclass 28 | class OutputOptions: 29 | """Settings for what output should be saved.""" 30 | 31 | raw_bytes: bool = False 32 | log_lvl: str = LOG_LVL 33 | log_json: bool = False 34 | list_show: bool = False 35 | list_json: bool = False 36 | stats_show: bool = False 37 | stats_json: bool = False 38 | repo_show: bool = False 39 | repo_json: bool = False 40 | prog_show: bool = False 41 | prog_json: bool = False 42 | 43 | 44 | class ListStringIO(StringIO): 45 | """Save TextIO to a list of single lines.""" 46 | 47 | def __init__(self, initial_value="", newline="\n"): 48 | r"""Wrap StringIO to gobble written data and save to a list. 49 | 50 | :param initial_value: Initial value of buffer, passed to StringIO, defaults to '' 51 | :type initial_value: str, optional 52 | :param newline: What character to use for newlines, passed to StringIO, defaults to '\\n' 53 | :type newline: str, optional 54 | """ 55 | super().__init__(initial_value=initial_value, newline=newline) 56 | self.values = list() 57 | self.idx = 0 58 | 59 | def write(self, s: str, /): 60 | """Gobble written data and save it to a list right away. 61 | 62 | :param s: data to write to output 63 | :type s: str 64 | """ 65 | super().write(s) 66 | self.flush() 67 | val = self.getvalue() 68 | self.seek(0) 69 | self.truncate() 70 | dvals = val.replace("\r", "\n").splitlines(keepends=True) 71 | vals = [] 72 | for v in dvals: 73 | nv = v.rstrip() 74 | if v[-1] == "\n": 75 | nv = f"{nv}\n" 76 | if nv: 77 | vals.append(nv) 78 | if vals and self.values and self.values[-1][-1] != "\n": 79 | self.values[-1] = self.values[-1] + vals[0] 80 | self.values.extend(vals[1:]) 81 | else: 82 | self.values.extend(vals) 83 | 84 | def get(self) -> str: 85 | """Get next line of output data. 86 | 87 | :return: Next line of output, None if end of list 88 | and no new lines 89 | :rtype: str 90 | """ 91 | if self.idx >= len(self.values): 92 | return None 93 | rec = self.values[self.idx] 94 | self.idx += 1 95 | return rec 96 | 97 | def get_all(self) -> list[str]: 98 | """Get all data that has been written so far. 99 | 100 | :return: all lines written to output split on newlines 101 | :rtype: list[str] 102 | """ 103 | return self.values 104 | 105 | 106 | class PersistantHandler(logging.Handler): 107 | """Save logged information into a list of records.""" 108 | 109 | def __init__(self, json: bool = False): 110 | """Prep handler to be attached to a :class:`logging.Logger`. 111 | 112 | :param json: if the output should be saved as a json value 113 | instead of a string, defaults to False 114 | :type json: bool, optional 115 | """ 116 | super().__init__() 117 | self.records = list() 118 | self.idx = 0 119 | self.closed = False 120 | 121 | self.json = json 122 | 123 | fmt = "%(message)s" 124 | formatter = JsonFormatter(fmt) if json else logging.Formatter(fmt) 125 | self.setFormatter(formatter) 126 | self.setLevel("INFO") 127 | 128 | def emit(self, record: logging.LogRecord): 129 | """Log the record to the handlers internal list. 130 | 131 | Implements `logger.Handler.emit`. Should not be manually called. 132 | 133 | :param record: Logging record to be saved to the list. 134 | :type record: logging.LogRecord 135 | """ 136 | try: 137 | formatted = self.format(record) 138 | if not self.json: 139 | formatted = formatted.rstrip() 140 | if formatted: 141 | self.records.append(formatted) 142 | except Exception: 143 | self.handleError(record) 144 | 145 | def get(self) -> Union[str, Json]: 146 | """Retrieve the next record in the list. 147 | 148 | :return: Next item in the list if there is one available, otherwise `None` 149 | :rtype: Union[str, Json, None] 150 | """ 151 | if self.idx >= len(self.records): 152 | return None 153 | rec = self.records[self.idx] 154 | self.idx += 1 155 | return rec 156 | 157 | def get_all(self) -> list[Union[str, Json]]: 158 | """Retrieve full list of records. 159 | 160 | :return: _description_ 161 | :rtype: list[Union[str, Json]] 162 | """ 163 | return self.records 164 | 165 | def get_rest(self) -> list[Union[str, Json]]: 166 | """Retrieve remaining records starting at current index. 167 | 168 | :return: Unretrieved records in the list 169 | :rtype: list[Union[str, Json]] 170 | """ 171 | if self.idx < len(self.records): 172 | return self.records[self.idx :] 173 | return [] 174 | 175 | def __str__(self): 176 | """Join every record saved with newlines. 177 | 178 | :return: String of the records saved. 179 | :rtype: str 180 | """ 181 | return "\n".join([str(r) for r in self.records]) 182 | 183 | def value(self): 184 | """Return the records based on the output type (json or string). 185 | 186 | :return: All records in pretty-ish format 187 | :rtype: Union[str, list[Json]] 188 | """ 189 | if self.json: 190 | return self.records 191 | return str(self) 192 | 193 | def close(self): 194 | """Set a flag to know if any new records should be expected. 195 | 196 | When `self.closed` is `True`, then no new records will be written 197 | to the logger. 198 | """ 199 | self.closed = True 200 | 201 | def seek(self, idx: int = 0): 202 | """Set the index for getting the next record. 203 | 204 | Writing records always go to the end of the list. 205 | 206 | :param idx: position to start getting records from next, defaults to 0 207 | :type idx: int, optional 208 | """ 209 | self.idx = idx 210 | 211 | 212 | class BorgLogCapture: 213 | """Capture Borgs output to review after a command call.""" 214 | 215 | def __init__(self, logger: str, log_json: bool = False): 216 | """Attach handler to specified logger to gather output data. 217 | 218 | :param logger: Logger to get information from. 219 | :type logger: str 220 | :param log_json: save data as a json instead of a string, defaults to False 221 | :type log_json: bool, optional 222 | """ 223 | self.logger = logging.getLogger(logger) 224 | self.handler = PersistantHandler(log_json) 225 | self.logger.addHandler(self.handler) 226 | 227 | def get(self) -> Optional[Union[str, Json]]: 228 | """Get next value in the handler. 229 | 230 | :return: Next value to read from the handler if it exists. 231 | Otherwise will return None. 232 | :rtype: Optional[str] 233 | """ 234 | return self.handler.get() 235 | 236 | def get_all(self) -> list[Union[str, Json]]: 237 | """Get every logged record since the handler was attached. 238 | 239 | :return: list of each record entry 240 | :rtype: list 241 | """ 242 | return self.handler.get_all() 243 | 244 | def value(self): 245 | """Get full data from handler. 246 | 247 | :return: the full output of the data 248 | :rtype: str or dict 249 | """ 250 | return self.handler.value() 251 | 252 | def close(self): 253 | """Close handler and remove it from logger.""" 254 | self.handler.close() 255 | self.logger.removeHandler(self.handler) 256 | 257 | def __str__(self): 258 | """Join all lines together as single string block. 259 | 260 | :return: String of all data logged to handler 261 | :rtype: str 262 | """ 263 | return "\n".join(self.get_all()) 264 | 265 | 266 | class OutputCapture: 267 | """Capture stdout and stderr by redirecting to inmemory streams. 268 | 269 | :param raw: Expecting raw bytes from stdout and stderr 270 | :type raw: bool 271 | """ 272 | 273 | def __init__(self): 274 | """Create object to log Borg output.""" 275 | self.ready = False 276 | 277 | def __call__(self, opts: OutputOptions) -> Self: 278 | """Create handlers to use by a context manager. 279 | 280 | Clears out old handlers from previous calls and creates new 281 | ones for next Borg command to be used. 282 | 283 | :param opts: Display options 284 | :type opts: OutputOptions 285 | :return: After setup, the object needs to be pased to the context manager. 286 | :rtype: Self 287 | """ 288 | self.ready = False 289 | self.opts = opts 290 | self.raw = self.opts.raw_bytes 291 | self._init_stdout(self.raw) 292 | self._init_stderr() 293 | 294 | self.list_capture = None 295 | if self.opts.list_show: 296 | self.list_capture = BorgLogCapture("borg.output.list", self.opts.list_json) 297 | 298 | self.stats_capture = None 299 | if self.opts.stats_show: 300 | self.stats_capture = BorgLogCapture("borg.output.stats", self.opts.stats_json) 301 | 302 | self.repo_capture = None 303 | if self.opts.repo_show: 304 | self.repo_capture = BorgLogCapture("borg.repository", self.opts.repo_json) 305 | 306 | self.ready = True 307 | 308 | return self 309 | 310 | def _init_stdout(self, raw: bool): 311 | self._stdout = TextIOWrapper(BytesIO()) if raw else ListStringIO() 312 | self.stdout_original = sys.stdout 313 | sys.stdout = self._stdout 314 | 315 | def _init_stderr(self): 316 | self._stderr = ListStringIO() 317 | self.stderr_original = sys.stderr 318 | sys.stderr = self._stderr 319 | 320 | def getvalues(self) -> Union[str, bytes]: 321 | """Get the captured values from the redirected stdout and stderr. 322 | 323 | :return: Redirected values from stdout and stderr 324 | :rtype: Union[str, bytes] 325 | """ 326 | output = {} 327 | 328 | if self.raw: 329 | stdout_value = self._stdout.buffer.getvalue() 330 | else: 331 | stdout_value = "".join(self._stdout.get_all()) 332 | output["stdout"] = stdout_value 333 | output["stderr"] = "".join(self._stderr.get_all()) 334 | 335 | if self.opts.list_show: 336 | output["list"] = self.list_capture.value() 337 | if self.opts.stats_show: 338 | output["stats"] = self.stats_capture.value() 339 | if self.opts.repo_show: 340 | output["repo"] = self.repo_capture.value() 341 | 342 | return output 343 | 344 | def close(self): 345 | """Close the underlying IO streams and reset stdout and stderr.""" 346 | try: 347 | if not self.raw: 348 | self._stdout.close() 349 | self._stderr.close() 350 | if self.list_capture: 351 | self.list_capture.close() 352 | if self.stats_capture: 353 | self.stats_capture.close() 354 | if self.repo_capture: 355 | self.repo_capture.close() 356 | finally: 357 | sys.stdout = self.stdout_original 358 | sys.stderr = self.stderr_original 359 | self.ready = False 360 | 361 | def __enter__(self) -> Self: 362 | """Return the runtime context. 363 | 364 | No additional work needs to be done when entering a context. 365 | 366 | :return: Get `self` to use in a context 367 | :rtype: Self 368 | """ 369 | return self 370 | 371 | def __exit__( 372 | self, 373 | exc_type: Optional[type[BaseException]], 374 | exc_value: Optional[BaseException], 375 | traceback: Optional[TracebackType], 376 | ) -> bool: 377 | """Cleanup the capture when finished with a `with` context. 378 | 379 | Don't want to hide any exceptions during the context, so a return 380 | value of `True` 381 | 382 | :param exc_type: exception type 383 | :type exc_type: Optional[type[BaseException]] 384 | :param exc_value: exception that was raised 385 | :type exc_value: Optional[BaseException] 386 | :param traceback: traceback of the exception that was raised 387 | :type traceback: Optional[TracebackType] 388 | :return: Propogates any exception that happens during runtime. 389 | :rtype: bool 390 | """ 391 | self.close() 392 | return False 393 | 394 | def list(self): 395 | """Get buffer where list information is being logged.""" 396 | return self.list_capture 397 | 398 | def stats(self): 399 | """Get buffer where stats are being logged.""" 400 | return self.stats_capture 401 | 402 | def repository(self): 403 | """Get buffer where repository info is being logged.""" 404 | return self.repo_capture 405 | 406 | def progress(self): 407 | """Get buffer where progress is being logged.""" 408 | return self._stderr 409 | 410 | def stdout(self): 411 | """Get buffer stdout is being logged.""" 412 | return self._stdout.buffer if self.raw else self._stdout 413 | 414 | def stderr(self): 415 | """Get buffer stderr is being logged.""" 416 | return self._stderr 417 | -------------------------------------------------------------------------------- /borgapi/helpers.py: -------------------------------------------------------------------------------- 1 | """Assortment of methods to help with debugging.""" 2 | 3 | import sys 4 | from typing import Any, Union 5 | 6 | __all__ = ["Json", "Output", "Options"] 7 | 8 | Json = Union[list, dict] 9 | Output = Union[str, Json, None] 10 | Options = Union[bool, str, int] 11 | 12 | ENVIRONMENT_DEFAULTS = { 13 | "BORG_EXIT_CODES": "modern", 14 | "BORG_PASSPHRASE": "", 15 | "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK": "no", 16 | "BORG_RELOCATED_REPO_ACCESS_IS_OK": "no", 17 | "BORG_CHECK_I_KNOW_WHAT_I_AM_DOING": "NO", 18 | "BORG_DELETE_I_KNOW_WHAT_I_AM_DOING": "NO", 19 | } 20 | 21 | 22 | def force(*vals: Any) -> None: 23 | """Force print to stdout python started with.""" 24 | out = " ".join([str(v) for v in vals]) 25 | sys.__stdout__.write(out + "\n") 26 | return sys.__stdout__.flush() 27 | -------------------------------------------------------------------------------- /borgapi/options.py: -------------------------------------------------------------------------------- 1 | """Option Dataclasses.""" 2 | 3 | import logging 4 | import re 5 | from dataclasses import dataclass 6 | from typing import List, Optional, Set, Union 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | __all__ = [ 11 | "CommonOptions", 12 | "ExclusionOptions", 13 | "ExclusionInput", 14 | "ExclusionOutput", 15 | "FilesystemOptions", 16 | "ArchiveOptions", 17 | "ArchiveInput", 18 | "ArchivePattern", 19 | "ArchiveOutput", 20 | "CommandOptions", 21 | ] 22 | 23 | 24 | @dataclass 25 | class _DefaultField: 26 | """Field info in options classes.""" 27 | 28 | name: str 29 | type: type 30 | default: object 31 | 32 | 33 | @dataclass 34 | class OptionsBase: 35 | """Holds all the shared methods for the subclasses. 36 | 37 | Every subclass should use this __init__ method becuase it will only set the values that the 38 | dataclass supports and ignore the ones not part of it. This way the same options dict can be 39 | passed to every constructor and not have to worry about duplicating flags. 40 | """ 41 | 42 | def __init__(self, **kwargs): 43 | """Set options to be used for the subclasses.""" 44 | default = self._defaults() 45 | for option in kwargs: 46 | if option in default: 47 | setattr(self, option, kwargs[option]) 48 | 49 | @staticmethod 50 | def convert_name(value: str) -> str: 51 | """Add flag marker and replace underscores with dashes in name.""" 52 | return "--" + value.replace("_", "-") 53 | 54 | def _field_set(self, field: str) -> bool: 55 | default = self.__dataclass_fields__.get(field).default 56 | set_value = getattr(self, field) 57 | return set_value != default 58 | 59 | def _log_deprecated(self, old_field: str, new_field: str = None) -> None: 60 | if self._field_set(old_field): 61 | if new_field: 62 | logger.warning( 63 | "[DEPRECATED] %s, use `%s` instead", 64 | old_field, 65 | new_field, 66 | ) 67 | else: 68 | logger.warning("[DEPRECATED] %s, not being replaced", old_field) 69 | 70 | @classmethod 71 | def _defaults(cls) -> Set[str]: 72 | defaults = set() 73 | for field in cls.__dataclass_fields__.values(): 74 | defaults.add(field.name) 75 | return defaults 76 | 77 | @staticmethod 78 | def _is_list(type_): 79 | try: 80 | return issubclass(type_, list) 81 | except TypeError: 82 | return issubclass(type_.__origin__, list) 83 | 84 | def parse(self) -> List[Optional[Union[str, int]]]: 85 | """Turn options into list for argv. 86 | 87 | :return: options for the command line 88 | :rtype: List[Optional[Union[str, int]]] 89 | """ 90 | args = [] 91 | 92 | for key, value in self.__dataclass_fields__.items(): 93 | attr = getattr(self, key) 94 | if attr is not None and value.default != attr: 95 | flag = self.convert_name(key) 96 | if value.type is bool: 97 | if attr is not value.default: 98 | args.append(flag) 99 | elif value.type is str or value.type is int: 100 | args.extend([flag, attr]) 101 | elif self._is_list(value.type): 102 | for val in attr: 103 | args.extend([flag, val]) 104 | else: 105 | raise TypeError(f'Unrecognized flag type for "{key}": {value.type}') 106 | return args 107 | 108 | 109 | @dataclass 110 | class CommonOptions(OptionsBase): 111 | """Common Options for all Borg commands. 112 | 113 | :param critical: work on log level CRITICAL 114 | :type critical: bool 115 | :param error: work on log level ERROR 116 | :type error: bool 117 | :param warning: work on log level WARNING 118 | :type warning: bool 119 | :param info: work on log level INFO 120 | :type info: bool 121 | :param verbose: work on log level INFO 122 | :type verbose: bool 123 | :param debug: work on log level DEBUG 124 | :type debug: bool 125 | :param debug_topic: enable TOPIC debugging (can be specified multiple times). The logger path 126 | is borg.debug. if TOPIC is not fully qualified. 127 | :type debug_topic: List[str] 128 | :param progress: show progress information 129 | :type progress: bool 130 | :param log_json: output one JSON object per log line instead of formatted text. 131 | :type log_json: bool 132 | :param lock_wait: wait at most SECONDS for acquiring a repository/cache lock (default: 1). 133 | :type lock_wait: int 134 | :param bypass_lock: bypass locking mechanism 135 | :type bypass_lock: bool 136 | :param show_version: show/log the borg version 137 | :type show_version: bool 138 | :param show_rc: show/log the return code (rc) 139 | :type show_rc: bool 140 | :param umask: set umask to M (local and remote, default: 0077) 141 | :type umask: str 142 | :param remote_path: use PATH as borg executable on the remote (default: “borg”) 143 | :type remote_path: str 144 | :param remote_ratelimit: set remote network upload rate limit in kiByte/s (default: 0=unlimited) 145 | :type remote_ratelimit: int 146 | :param consider_part_files: treat part files like normal files (e.g. to list/extract them) 147 | :type consider_part_files: bool 148 | :param debug_profile: write execution profile in Borg format into FILE. For local use a 149 | Python-compatible file can be generated by suffixing FILE with “.pyprof” 150 | :type debug_profile: str 151 | :param rsh: Use this command to connect to the ‘borg serve’ process (default: ‘ssh’) 152 | :type rsh: str 153 | """ 154 | 155 | critical: bool = False 156 | error: bool = False 157 | warning: bool = False 158 | info: bool = False 159 | verbose: bool = False 160 | debug: bool = False 161 | debug_topic: List[str] = None 162 | progress: bool = False 163 | log_json: bool = False 164 | lock_wait: int = None 165 | bypass_lock: bool = False 166 | show_version: bool = False 167 | show_rc: bool = False 168 | umask: str = None 169 | remote_path: str = None 170 | remote_ratelimit: int = None 171 | consider_part_files: bool = False 172 | debug_profile: str = None 173 | rsh: str = None 174 | 175 | def __init__(self, **kwargs): 176 | """Set the common options for all commands.""" 177 | super().__init__(**kwargs) 178 | 179 | if isinstance(self.debug_topic, str): 180 | self.exclude = [self.exclude] 181 | if self.umask and not re.match(r"^[0-9]{4}", self.umask): 182 | raise ValueError("umask must be in format 0000 permission code, eg: 0077") 183 | 184 | 185 | @dataclass 186 | class ExclusionOptions(OptionsBase): 187 | """Options for excluding various files from backup. 188 | 189 | :param exclude: exclude paths matching PATTERN 190 | :type exclude: List[str] 191 | :param exclude_from: read exclude patterns from EXCLUDEFILE, one per line 192 | :type exclude_from: str 193 | :param pattern: include/exclude paths matching PATTERN (experimental) 194 | :type pattern: str 195 | :param patterns_from: read include/exclude patterns from PATTERNFILE, one per 196 | line (experimental) 197 | :type patterns_from: str 198 | """ 199 | 200 | exclude: List[str] = None 201 | exclude_from: str = None 202 | pattern: List[str] = None 203 | patterns_from: str = None 204 | 205 | def __init__(self, **kwargs): 206 | """Set the exclusion options for many commands.""" 207 | super().__init__(**kwargs) 208 | 209 | if isinstance(self.exclude, str): 210 | self.exclude = [self.exclude] 211 | if isinstance(self.pattern, str): 212 | self.pattern = [self.pattern] 213 | 214 | 215 | @dataclass 216 | class ExclusionInput(ExclusionOptions): 217 | """Exclusion Options when inputing data to the archive. 218 | 219 | :param exclude_caches: exclude directories that contain a CACHEDIR.TAG file 220 | (http://www.bford.info/cachedir/spec.html) 221 | :type exclude_caches: bool 222 | :param exclude_if_present: exclude directories that are tagged by containing a filesystem 223 | object with the given NAME 224 | :type exclude_if_present: List[str] 225 | :param keep_exclude_tags: if tag objects are specified with --exclude-if-present, don’t omit 226 | the tag objects themselves from the backup archive 227 | :type keep_exclude_tags: bool 228 | :param keep_tag_files: alternate to keep_exclude_tags 229 | :type keep_tag_files: bool 230 | :param exclude_nodump: exclude files flagged NODUMP 231 | :type exclude_nodump: bool 232 | """ 233 | 234 | exclude_caches: bool = False 235 | exclude_if_present: List[str] = None 236 | keep_exclude_tags: bool = False 237 | keep_tag_files: bool = False 238 | exclude_nodump: bool = False 239 | 240 | def __init__(self, **kwargs): 241 | """Set the exclusion options for input for many commands.""" 242 | super().__init__(**kwargs) 243 | 244 | if isinstance(self.exclude_if_present, str): 245 | self.exclude_if_present = [self.exclude_if_present] 246 | 247 | 248 | @dataclass 249 | class ExclusionOutput(ExclusionOptions): 250 | """Exclusion Options when outputing data in the archive. 251 | 252 | :param strip_componts: Remove the specified number of leading path elements. Paths with fewer 253 | elements will be silently skipped 254 | :type strip_componts: int 255 | """ 256 | 257 | strip_componts: int = None 258 | 259 | def __init__(self, **kwargs): 260 | """Set the exclusion options for output for many commands.""" 261 | super().__init__(**kwargs) 262 | 263 | 264 | @dataclass 265 | class FilesystemOptions(OptionsBase): 266 | """Options for how to handle filesystem attributes. 267 | 268 | :param one_file_system: stay in the same file system and do not store mount points of other 269 | file systems. This might behave different from your expectations, see the docs. 270 | :type one_file_system: bool 271 | :param numeric_owner: only store numeric user and group identifiers 272 | :type numeric_owner: bool 273 | :param noatime: do not store atime into archive 274 | :type noatime: bool 275 | :param noctime: do not store ctime into archive 276 | :type noctime: bool 277 | :param nobirthtime: do not store birthtime (creation date) into archive 278 | :type nobirthtime: bool 279 | :param nobsdflags: do not read and store bsdflags (e.g. NODUMP, IMMUTABLE) into archive 280 | :type nobsdflags: bool 281 | :param noacls: do not read and store ACLs into archive 282 | :type noacls: bool 283 | :param noxattrs: do not read and store xattrs into archive 284 | :type noxattrs: bool 285 | :param ignore_inode: ignore inode data in the file metadata cache used to detect 286 | unchanged files. 287 | :type ignore_inode: bool 288 | :param files_cache: operate files cache in MODE. default: ctime,size,inode 289 | :type files_cache: str 290 | :param read_special: open and read block and char device files as well as FIFOs as if they were 291 | regular files. Also follows symlinks pointing to these kinds of files. 292 | :type read_special: bool 293 | """ 294 | 295 | one_file_system: bool = False 296 | numeric_owner: bool = False 297 | noatime: bool = False 298 | noctime: bool = False 299 | nobirthtime: bool = False 300 | nobsdflags: bool = False 301 | noacls: bool = False 302 | noxattrs: bool = False 303 | ignore_inode: bool = False 304 | files_cache: str = None 305 | read_special: bool = False 306 | 307 | def __init__(self, **kwargs): 308 | """Set the filesystem options for many commands.""" 309 | super().__init__(**kwargs) 310 | 311 | 312 | @dataclass 313 | class ArchiveOptions(OptionsBase): 314 | """Options related to the archive.""" 315 | 316 | def __init__(self, **kwargs): 317 | """Set the archive options for many commands.""" 318 | super().__init__(**kwargs) 319 | 320 | 321 | @dataclass 322 | class ArchiveInput(ArchiveOptions): 323 | """Archive Options when inputing data to the archive. 324 | 325 | :param comment: add a comment text to the archive 326 | :type comment: str 327 | :param timestamp: manually specify the archive creation date/time 328 | (UTC, yyyy-mm-ddThh:mm:ss format). Alternatively, give a reference file/directory. 329 | :type timestamp: str 330 | :param checkpoint_interval: write checkpoint every SECONDS seconds (Default: 1800) 331 | :type checkpoint_interval: int 332 | :param chunker_params: specify the chunker parameters (CHUNK_MIN_EXP, CHUNK_MAX_EXP, 333 | HASH_MASK_BITS, HASH_WINDOW_SIZE). default: 19,23,21,4095 334 | :type chunker_params: str 335 | :param compression: select compression algorithm, see the output of the “borg help compression” 336 | command for details. 337 | :type compression: str 338 | """ 339 | 340 | comment: str = None 341 | timestamp: str = None 342 | checkpoint_interval: int = None 343 | chunker_params: str = None 344 | compression: str = None 345 | 346 | def __init__(self, **kwargs): 347 | """Set the input options for archives for many commands.""" 348 | super().__init__(**kwargs) 349 | 350 | 351 | @dataclass 352 | class ArchivePattern(ArchiveOptions): 353 | """Archive Options when outputing data in the archive. 354 | 355 | :param prefix: only consider archive names starting with this prefix. 356 | :type prefix: str 357 | :param glob_archives: only consider archive names matching the glob. 358 | sh: rules apply, see “borg help patterns”. --prefix and --glob-archives 359 | are mutually exclusive. 360 | :type glob_archives: str 361 | """ 362 | 363 | prefix: str = None 364 | glob_archives: str = None 365 | 366 | def __init__(self, **kwargs): 367 | """Set the output pattern options for archives many commands.""" 368 | super().__init__(**kwargs) 369 | 370 | 371 | @dataclass 372 | class ArchiveOutput(ArchivePattern): 373 | """Archive options when filtering output. 374 | 375 | :param sort_by: Comma-separated list of sorting keys; valid keys are: timestamp, name, id; 376 | default is: timestamp 377 | :type sort_by: str 378 | :param first: consider first N archives after other filters were applied 379 | :type first: int 380 | :param last: consider last N archives after other filters were applied 381 | :type last: int 382 | """ 383 | 384 | sort_by: str = None 385 | first: int = None 386 | last: int = None 387 | 388 | def __init__(self, **kwargs): 389 | """Set the output options for archives many commands.""" 390 | super().__init__(**kwargs) 391 | 392 | 393 | @dataclass 394 | class InitOptional(OptionsBase): 395 | """Init command options. 396 | 397 | :param append_only: create an append-only mode repository 398 | :type append_only: bool 399 | :param storage_quota: set storage quota of the new repository (e.g. 5G, 1.5T) 400 | borg default: no quota 401 | :type storage_quota: str 402 | :param make_parent_dirs: create the parent directories of the repository 403 | directory, if they are missing 404 | :type make_parent_dirs: bool 405 | """ 406 | 407 | append_only: bool = False 408 | storage_quota: str = None 409 | make_parent_dirs: bool = False 410 | 411 | def __init__(self, **kwargs): 412 | """Set the options for the `init` command.""" 413 | super().__init__(**kwargs) 414 | 415 | 416 | @dataclass 417 | class CreateOptional(OptionsBase): 418 | """Create command options. 419 | 420 | :param dry_run: do not create a backup archive 421 | :type dry_run: bool 422 | :param stats: print statistics for the created archive 423 | :type stats: bool 424 | :param list: output verbose list of items (files, dirs, …) 425 | :type list: bool 426 | :param filter: only display items with the given status characters 427 | :type filter: str 428 | :param json: output stats as JSON. Implies `stats` 429 | :type json: bool 430 | :param no_cache_sync: experimental: do not synchronize the cache. 431 | Implies not using the files cache 432 | :type no_cache_sync: bool 433 | :param no_files_cache: do not load/update the file metadata cache 434 | used to detect unchanged files 435 | :type no_files_cache: bool 436 | :param stdin_name: use NAME in archive for stdin data 437 | borg default: “stdin” 438 | :type stdin_name: str 439 | :param stdin_user: set user USER in archive for stdin data 440 | borg default: "root" 441 | :type stdin_user: str 442 | :param stdin_group: set group GROUP in archive for stdin data 443 | borg default: "root" 444 | :type stdin_group: str 445 | :param stdin_mode: set mode to M in archive for stdin data 446 | borg default: 0660 447 | :type stdin_mode: str 448 | """ 449 | 450 | dry_run: bool = False 451 | stats: bool = False 452 | list: bool = False 453 | filter: str = None 454 | json: bool = False 455 | no_cache_sync: bool = False 456 | no_files_cache: bool = False 457 | stdin_name: str = None 458 | stdin_user: str = None 459 | stdin_group: str = None 460 | stdin_mode: str = None 461 | 462 | def __init__(self, **kwargs): 463 | """Set the options for the `create` command.""" 464 | super().__init__(**kwargs) 465 | 466 | 467 | @dataclass 468 | class ExtractOptional(OptionsBase): 469 | """Extract command options. 470 | 471 | :param list: output verbose list of items (files, dirs, …) 472 | :type list: bool 473 | :param dry_run: do not actually change any files 474 | :type dry_run: bool 475 | :param numeric_owner: only obey numeric user and group identifiers 476 | :type numeric_owner: bool 477 | :param nobsdflags: do not extract/set bsdflags (e.g. NODUMP, IMMUTABLE) 478 | :type nobsdflags: bool 479 | :param noacls: do not extract/set ACLs 480 | :type noacls: bool 481 | :param noxattrs: do not extract/set xattrs 482 | :type noxattrs: bool 483 | :param stdout: write all extracted data to stdout 484 | :type stdout: bool 485 | :param sparse: create holes in output sparse file from all-zero chunks 486 | :type sparse: bool 487 | """ 488 | 489 | list: bool = False 490 | dry_run: bool = False 491 | numeric_owner: bool = False 492 | nobsdflags: bool = False 493 | noacls: bool = False 494 | noxattrs: bool = False 495 | stdout: bool = False 496 | sparse: bool = False 497 | 498 | def __init__(self, **kwargs): 499 | """Set the options for the `extract` command.""" 500 | super().__init__(**kwargs) 501 | 502 | 503 | @dataclass 504 | class CheckOptional(OptionsBase): 505 | """Check command options. 506 | 507 | :param repository_only: only perform repository checks 508 | :type repository_only: bool 509 | :param archives_only: only perform archives checks 510 | :type archives_only: bool 511 | :param verify_data: perform cryptographic archive data integrity 512 | verification conflicts with `repository_only` 513 | :type verify_data: bool 514 | :param repair: attempt to repair any inconsistencies found 515 | :type repair: bool 516 | :param save_space: work slower, but using less space 517 | :type save_space: bool 518 | """ 519 | 520 | repository_only: bool = False 521 | archives_only: bool = False 522 | verify_data: bool = False 523 | repair: bool = False 524 | save_space: bool = False 525 | 526 | def __init__(self, **kwargs): 527 | """Set the options for the `check` command.""" 528 | super().__init__(**kwargs) 529 | 530 | 531 | @dataclass 532 | class ListOptional(OptionsBase): 533 | """List command options. 534 | 535 | :param short: only print file/directory names, nothing else 536 | :type short: bool 537 | :param format: specify format for file listing 538 | borg default: “{mode} {user:6} {group:6} {size:8d} {mtime} {path}{extra}{NL}” 539 | :type format: str 540 | :param json: only valid for listing repository contents. Format output as JSON. 541 | The form of `format` is ignored, but keys used in it are added to the JSON output. 542 | Some keys are always present. Note: JSON can only represent text. 543 | A “barchive” key is therefore not available. 544 | :type json: bool 545 | :param json_lines: only valid for listing archive contents. Format output as JSON lines. 546 | The form of `format` is ignored, but keys used in it are added to the JSON output. 547 | Some keys are always present. Note: JSON can only represent text. 548 | A “bpath” key is therefore not available. 549 | :type json_lines: bool 550 | """ 551 | 552 | short: bool = False 553 | format: str = None 554 | json: bool = False 555 | json_lines: bool = False 556 | 557 | def __init__(self, **kwargs): 558 | """Set the options for the `list` command.""" 559 | super().__init__(**kwargs) 560 | 561 | 562 | @dataclass 563 | class DiffOptional(OptionsBase): 564 | """Diff command options. 565 | 566 | :param numeric_owner: only consider numeric user and group identifiers 567 | :type numeric_owner: bool 568 | :param same_chunker_params: override check of chunker parameters 569 | :type same_chunker_params: bool 570 | :param sort: srt the output lines by file path 571 | :type sort: bool 572 | :param json_lines: format output as JSON lines 573 | :type json_lines: bool 574 | """ 575 | 576 | numeric_owner: bool = False 577 | same_chunker_params: bool = False 578 | sort: bool = False 579 | json_lines: bool = False 580 | 581 | def __init__(self, **kwargs): 582 | """Set the options for the `diff` command.""" 583 | super().__init__(**kwargs) 584 | 585 | self._log_deprecated("numeric_owner", "numeric_ids") 586 | 587 | 588 | @dataclass 589 | class DeleteOptional(OptionsBase): 590 | """Delete command options. 591 | 592 | :param dry_run: do not change repository 593 | :type dry_run: bool 594 | :param stats: print statistics for the deleted archive 595 | :type stats: bool 596 | :param cache_only: delete only the local cache for the given repository 597 | :type cache_only: bool 598 | :param force: force deletion of corrupted archives 599 | :type force: bool 600 | :param save_space: work slower, but using less space 601 | :type save_space: bool 602 | """ 603 | 604 | dry_run: bool = False 605 | list: bool = False 606 | stats: bool = False 607 | cache_only: bool = False 608 | force: bool = False 609 | keep_security_info: bool = False 610 | save_space: bool = False 611 | checkpoint_interval: int = 1800 612 | 613 | def __init__(self, **kwargs): 614 | """Set the options for the `delete` command.""" 615 | super().__init__(**kwargs) 616 | 617 | 618 | @dataclass 619 | class PruneOptional(OptionsBase): 620 | """Prune command options. 621 | 622 | :param dry_run: do not change repository 623 | :type dry_run: bool 624 | :param force: force pruning of corrupted archives 625 | :type force: bool 626 | :param stats: print statistics for the deleted archive 627 | :type stats: bool 628 | :param list: output verbose list of archives it keeps/prunes 629 | :type list: bool 630 | :param keep_within: keep all archives within this time interval 631 | :type keep_within: str 632 | :param keep_last: number of secondly archives to keep 633 | :type keep_last: int 634 | :param keep_secondly: number of secondly archives to keep 635 | :type keep_secondly: int 636 | :param keep_minutely: number of minutely archives to keep 637 | :type keep_minutely: int 638 | :param keep_hourly: number of hourly archives to keep 639 | :type keep_hourly: int 640 | :param keep_daily: number of daily archives to keep 641 | :type keep_daily: int 642 | :param keep_weekly: number of weekly archives to keep 643 | :type keep_weekly: int 644 | :param keep_monthly: number of monthly archives to keep 645 | :type keep_monthly: int 646 | :param keep_yearly: number of yearly archives to keep 647 | :type keep_yearly: int 648 | :param save_space: work slower, but using less space 649 | :type save_space: bool 650 | """ 651 | 652 | dry_run: bool = False 653 | force: bool = False 654 | stats: bool = False 655 | list: bool = False 656 | keep_within: str = None 657 | keep_last: int = None 658 | keep_secondly: int = None 659 | keep_minutely: int = None 660 | keep_hourly: int = None 661 | keep_daily: int = None 662 | keep_weekly: int = None 663 | keep_monthly: int = None 664 | keep_yearly: int = None 665 | save_space: bool = False 666 | 667 | def __init__(self, **kwargs): 668 | """Set the options for the `prune` command.""" 669 | super().__init__(**kwargs) 670 | 671 | 672 | @dataclass 673 | class CompactOptional(OptionsBase): 674 | """Compact command options. 675 | 676 | :param cleanup_commits: cleanup commit-only 17-byte segment files 677 | :type cleanup_commits: bool 678 | :param threshold: set minimum threshold for saved space in PERCENT (Default: 10) 679 | :type threshold: int 680 | """ 681 | 682 | cleanup_commits: bool = False 683 | threshold: int = 10 684 | 685 | def __init__(self, **kwargs): 686 | """Set the options for the `compact` command.""" 687 | super().__init__(**kwargs) 688 | 689 | 690 | @dataclass 691 | class InfoOptional(OptionsBase): 692 | """Info command options. 693 | 694 | :param json: format output as JSON 695 | :type json: bool 696 | """ 697 | 698 | json: bool = False 699 | 700 | def __init__(self, **kwargs): 701 | """Set the options for the `info` command.""" 702 | super().__init__(**kwargs) 703 | 704 | 705 | @dataclass 706 | class MountOptional(OptionsBase): 707 | """Mount command options. 708 | 709 | :param foreground: stay in foreground, do not daemonize 710 | :type foreground: bool 711 | :param o: extra mount options 712 | :type o: str 713 | """ 714 | 715 | foreground: bool = True 716 | o: str = None 717 | 718 | def __init__(self, **kwargs): 719 | """Set the options for the `mount` command.""" 720 | super().__init__(**kwargs) 721 | 722 | 723 | @dataclass 724 | class KeyExportOptional(OptionsBase): 725 | """Key Export command options. 726 | 727 | :param paper: create an export suitable for printing and later type-in 728 | :type paper: bool 729 | :param qr_html: create an html file suitable for printing and later type-in or qr scan 730 | :type qr_html: bool 731 | """ 732 | 733 | paper: bool = False 734 | qr_html: bool = False 735 | 736 | def __init__(self, **kwargs): 737 | """Set the options for the `key export` command.""" 738 | super().__init__(**kwargs) 739 | 740 | 741 | @dataclass 742 | class KeyImportOptional(OptionsBase): 743 | """Key Import command options. 744 | 745 | :param paper: interactively import from a backup done with `paper` 746 | :type paper: bool 747 | """ 748 | 749 | paper: bool = False 750 | 751 | def __init__(self, **kwargs): 752 | """Set the options for the `key import` command.""" 753 | super().__init__(**kwargs) 754 | 755 | 756 | @dataclass 757 | class UpgradeOptional(OptionsBase): 758 | """Upgrade command options. 759 | 760 | :param dry_run: do not change repository 761 | :type dry_run: bool 762 | :param inplace: rewrite repository in place, with no chance of going 763 | back to older versions of the repository 764 | :type inplace: bool 765 | :param force: force upgrade 766 | :type force: bool 767 | :param tam: enable manifest authentication (in key and cache) 768 | :type tam: bool 769 | :param disable_tam: disable manifest authentication (in key and cache) 770 | :type disable_tam: bool 771 | """ 772 | 773 | dry_run: bool = False 774 | inplace: bool = False 775 | force: bool = False 776 | tam: bool = False 777 | disable_tam: bool = False 778 | 779 | def __init__(self, **kwargs): 780 | """Set the options for the `upgrade` command.""" 781 | super().__init__(**kwargs) 782 | 783 | 784 | @dataclass 785 | class RecreateOptional(OptionsBase): 786 | """Recreate command options. 787 | 788 | :param list: output verbose list of items (files, dirs, …) 789 | :type list: bool 790 | :param filter: only display items with the given status characters 791 | (listed in borg create --help) 792 | :type filter: str 793 | :param dry_run: do not change anything 794 | :type dry_run: bool 795 | :param stats: print statistics at end 796 | :type stats: bool 797 | :param target: create a new archive with the name ARCHIVE, do not replace existing archive 798 | (only applies for a single archive) 799 | :type target: str 800 | :param recompress: recompress data chunks according to MODE and --compression. Possible modes 801 | are `if-different`, `always`, `never`. If no MODE is given, if-different will be used. 802 | :type recompress: str 803 | """ 804 | 805 | list: bool = False 806 | filter: str = None 807 | dry_run: bool = False 808 | stats: bool = False 809 | 810 | # Custom Archive Options 811 | target: str = None 812 | recompress: str = None 813 | 814 | def __init__(self, **kwargs): 815 | """Set the options for the `recreate` command.""" 816 | super().__init__(**kwargs) 817 | 818 | 819 | @dataclass 820 | class ImportTarOptional(OptionsBase): 821 | """Import Tar command options. 822 | 823 | :param tar_filter: filter program to pipe data through 824 | :type tar_filter: str 825 | :param stats: print statistics for the created archive 826 | :type stats: bool 827 | :param list: output verbose list of items (files, dirs, …) 828 | :type list: bool 829 | :param filter: only display items with the given status characters 830 | :type filter: str 831 | :param json: output stats as JSON. Implies `stats` 832 | :type json: bool 833 | :param ignore_zeros: ignore zero-filled blocks in the input tarball 834 | :type ignore_zeros: bool 835 | """ 836 | 837 | tar_filter: str = None 838 | stats: bool = False 839 | list: bool = False 840 | filter: str = None 841 | json: bool = False 842 | ignore_zeros: bool = False 843 | 844 | def __init__(self, **kwargs): 845 | """Set the options for the `import-tar` command.""" 846 | super().__init__(**kwargs) 847 | 848 | 849 | @dataclass 850 | class ExportTarOptional(OptionsBase): 851 | """Export Tar command options. 852 | 853 | :param tar_filter: filter program to pipe data through 854 | :type tar_filter: str 855 | :param list: output verbose list of items (files, dirs, …) 856 | :type list: bool 857 | """ 858 | 859 | tar_filter: str = None 860 | list: bool = False 861 | 862 | def __init__(self, **kwargs): 863 | """Set the options for the `export-tar` command.""" 864 | super().__init__(**kwargs) 865 | 866 | 867 | @dataclass 868 | class ServeOptional(OptionsBase): 869 | """Serve command options. 870 | 871 | :param restrict_to_path: restrict repository access to PATH. 872 | Can be specified multiple times to allow the client access to several directories. 873 | Access to all sub-directories is granted implicitly; 874 | PATH doesn’t need to directly point to a repository 875 | :type restrict_to_path: str 876 | :param restrict_to_repository: restrict repository access. 877 | Only the repository located at PATH (no sub-directories are considered) is accessible. 878 | Can be specified multiple times to allow the client access to several repositories. 879 | Unlike `restrict_to_path` sub-directories are not accessible; 880 | PATH needs to directly point at a repository location. 881 | PATH may be an empty directory or the last element of PATH may not exist, 882 | in which case the client may initialize a repository there 883 | :type restrict_to_repository: str 884 | :param append_only: only allow appending to repository segment files 885 | :type append_only: bool 886 | :param storage_quota: Override storage quota of the repository (e.g. 5G, 1.5T). 887 | When a new repository is initialized, sets the storage quota on the new repository as well. 888 | borg default: no quota 889 | :type storage_quota: str 890 | """ 891 | 892 | restrict_to_path: str = None 893 | restrict_to_repository: str = None 894 | append_only: bool = False 895 | storage_quota: str = None 896 | 897 | def __init__(self, **kwargs): 898 | """Set the options for the `serve` command.""" 899 | super().__init__(**kwargs) 900 | 901 | 902 | @dataclass 903 | class ConfigOptional(OptionsBase): 904 | """Config command options. 905 | 906 | :param cache: get and set values from the repo cache 907 | :type cache: bool 908 | :param delete: delete the key from the config file 909 | :type delete: bool 910 | :param list: list the configuration of the repo 911 | :type list: bool 912 | """ 913 | 914 | cache: bool = False 915 | delete: bool = False 916 | list: bool = False 917 | 918 | def __init__(self, **kwargs): 919 | """Set the options for the `config` command.""" 920 | super().__init__(**kwargs) 921 | 922 | 923 | class CommandOptions: 924 | """Optional Arguments for the different commands.""" 925 | 926 | optional_classes = { 927 | "init": InitOptional, 928 | "create": CreateOptional, 929 | "extract": ExtractOptional, 930 | "check": CheckOptional, 931 | "list": ListOptional, 932 | "diff": DiffOptional, 933 | "delete": DeleteOptional, 934 | "prune": PruneOptional, 935 | "compact": CompactOptional, 936 | "info": InfoOptional, 937 | "mount": MountOptional, 938 | "key_export": KeyExportOptional, 939 | "key_import": KeyImportOptional, 940 | "upgrade": UpgradeOptional, 941 | "recreate": RecreateOptional, 942 | "import_tar": ImportTarOptional, 943 | "export_tar": ExportTarOptional, 944 | "serve": ServeOptional, 945 | "config": ConfigOptional, 946 | } 947 | 948 | def __init__(self, defaults: dict = None): 949 | """Set the defaults used for all commands. 950 | 951 | :param defaults: Specific flags to use for all commands, defaults to None 952 | :type defaults: dict, optional 953 | """ 954 | self.defaults = defaults or {} 955 | 956 | @classmethod 957 | def _get_optional(cls, command: str) -> OptionsBase: 958 | try: 959 | return cls.optional_classes[command] 960 | except KeyError as e: 961 | raise ValueError( 962 | f"Command `{command}` does not have any optional arguments or does not exist." 963 | ) from e 964 | 965 | def get(self, command: str, values: dict) -> OptionsBase: 966 | """Return OptionsBase with flags set for `command`. 967 | 968 | :param command: command being called 969 | :type command: str 970 | :param values: dictionary with values for flags 971 | :type values: dict 972 | :return: instance of command dataclass 973 | :rtype: OptionsBase 974 | """ 975 | optionals = {**self.defaults.get(command, {}), **(values or {})} 976 | return self._get_optional(command)(**optionals) 977 | 978 | def to_list(self, command: str, values: dict) -> list: 979 | """Parse args list for command. 980 | 981 | :param command: command name 982 | :type command: str 983 | :param values: options flags 984 | :type values: dict 985 | :return: list of converted flags 986 | :rtype: list 987 | """ 988 | return self.get(command, values).parse() 989 | -------------------------------------------------------------------------------- /install-virtenv.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if ! command -v pyenv &> /dev/null; then 4 | echo "pyenv not installed" && exit 1 5 | fi 6 | 7 | eval "$(pyenv init -)" >/dev/null 2>&1 8 | eval "$(pyenv init --path)" 9 | eval "$(pyenv virtualenv-init -)" 10 | 11 | function update() { 12 | pyenv activate "$1" 13 | python -m pip install --upgrade pip 14 | python -m pip install ruff~=0.9.1 15 | if [ -e "requirements.txt" ]; then 16 | python -m pip install -r requirements.txt --upgrade 17 | fi 18 | pyenv deactivate 19 | } 20 | 21 | ## PYTHON 3.9 ## 22 | if test -n $(pyenv versions | grep "borglatest-3.9"); then 23 | pyenv uninstall "borglatest-3.9" 24 | fi 25 | pyenv virtualenv "3.9.21" "borglatest-3.9" 26 | update "borglatest-3.9" 27 | 28 | ## PYTHON 3.10 ## 29 | if test -n $(pyenv versions | grep "borglatest-3.10"); then 30 | pyenv uninstall "borglatest-3.10" 31 | fi 32 | pyenv virtualenv "3.10.16" "borglatest-3.10" 33 | update "borglatest-3.10" 34 | 35 | ## PYTHON 3.11 ## 36 | if test -n $(pyenv versions | grep "borglatest-3.11"); then 37 | pyenv uninstall "borglatest-3.11" 38 | fi 39 | pyenv virtualenv "3.11.2" "borglatest-3.11" 40 | update "borglatest-3.11" 41 | 42 | ## PYTHON 3.12 ## 43 | if test -n $(pyenv versions | grep "borglatest-3.12"); then 44 | pyenv uninstall "borglatest-3.12" 45 | fi 46 | pyenv virtualenv "3.12.8" "borglatest-3.12" 47 | update "borglatest-3.12" 48 | 49 | ## PYTHON 3.13 ## 50 | if test -n $(pyenv versions | grep "borglatest-3.13"); then 51 | pyenv uninstall "borglatest-3.13" 52 | fi 53 | pyenv virtualenv "3.13.1" "borglatest-3.13" 54 | update "borglatest-3.13" 55 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools.packages.find] 6 | where = ["borgapi/"] 7 | 8 | [project] 9 | name = "borgapi" 10 | version = "0.7.0" 11 | authors = [{name = "Sean Slater", email = "seanslater@whatno.io"}] 12 | description = "Wrapper for borgbackup to easily use in code" 13 | readme = "README.md" 14 | license = {file = "LICENSE"} 15 | requires-python = ">=3.9" 16 | dependencies = [ 17 | "borgbackup[llfuse]~=1.4.0", 18 | "python-dotenv~=1.0.0", 19 | ] 20 | keywords = ["borgbackup", "backup", "api"] 21 | classifiers = [ 22 | "Development Status :: 4 - Beta", 23 | "Programming Language :: Python :: 3", 24 | "Programming Language :: Python :: 3.9", 25 | "Programming Language :: Python :: 3.10", 26 | "Programming Language :: Python :: 3.11", 27 | "Programming Language :: Python :: 3.12", 28 | "Programming Language :: Python :: 3.13", 29 | "Operating System :: OS Independent", 30 | "License :: OSI Approved :: MIT License", 31 | "Topic :: Utilities", 32 | "Topic :: System :: Archiving :: Backup", 33 | ] 34 | 35 | [project.urls] 36 | homepage = "https://github.com/spslater/borgapi" 37 | documentation = "https://github.com/spslater/borgapi/blob/master/README.md" 38 | repository = "https://github.com/spslater/borgapi.git" 39 | issues = "https://github.com/spslater/borgapi/issues" 40 | changelog = "https://github.com/spslater/borgapi/blob/master/CHANGELOG.md" 41 | 42 | [tool.ruff] 43 | line-length = 100 44 | indent-width = 4 45 | target-version = "py312" 46 | 47 | [tool.ruff.lint] 48 | select = ["E", "F", "I", "N", "D"] 49 | # A regular expression matching the name of dummy variables (i.e. expected to not be used). 50 | dummy-variable-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused" 51 | 52 | 53 | [tool.ruff.lint.pydocstyle] 54 | # waiting for `sphinx` convention to be available 55 | convention = "pep257" 56 | 57 | [tool.ruff.lint.pylint] 58 | # Maximum number of arguments for function / method. 59 | max-args = 5 60 | # Maximum number of boolean expressions in an if statement (see R0916). 61 | max-bool-expr = 5 62 | # Maximum number of branch for function / method body. 63 | max-branches = 12 64 | # Maximum number of locals for function / method body. 65 | max-locals = 15 66 | # Maximum number of public methods for a class (see R0904). 67 | max-public-methods = 20 68 | # Maximum number of return / yield for function / method body. 69 | max-returns = 6 70 | # Maximum number of statements in function / method body. 71 | max-statements = 50 72 | 73 | [tool.ruff.format] 74 | quote-style = "double" 75 | indent-style = "space" 76 | docstring-code-format = true 77 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | borgbackup[llfuse]~=1.4.0 2 | python-dotenv~=1.0.0 -------------------------------------------------------------------------------- /run-tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | versions=( 4 | "borglatest-3.9" 5 | "borglatest-3.10" 6 | "borglatest-3.11" 7 | "borglatest-3.12" 8 | "borglatest-3.13" 9 | ) 10 | 11 | eval "$(pyenv init -)" >/dev/null 2>&1 12 | eval "$(pyenv init --path)" 13 | eval "$(pyenv virtualenv-init -)" 14 | 15 | echo "Automate tests for supported Python versions" > "results.log" 16 | 17 | for version in "${versions[@]}"; do 18 | pyenv activate $version 19 | pip install -r requirements.txt 1>&2 2>/dev/null 20 | printf "$version: " 2>&1 | tee -a "results.log" 21 | if test "$1" = "-v"; then 22 | python -m unittest discover -v 2>&1 | tee -a "results.log" 23 | else 24 | python -m unittest discover 2>&1 | tee -a "results.log" 25 | fi 26 | pyenv deactivate 27 | done 28 | -------------------------------------------------------------------------------- /samples/.env.sample: -------------------------------------------------------------------------------- 1 | BORG_PASSCOMMAND="cat samples/passphrase.sample" 2 | BORG_LOGGING_CONF="samples/logging.conf.sample" 3 | -------------------------------------------------------------------------------- /samples/exclude_patterns.sample: -------------------------------------------------------------------------------- 1 | pp:venv 2 | pp:__pycache__ 3 | re:.*\~ 4 | re:.*\.swp -------------------------------------------------------------------------------- /samples/logging.conf.sample: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root 3 | 4 | [handlers] 5 | keys=logfile,stdout 6 | 7 | [formatters] 8 | keys=logfile 9 | 10 | [logger_root] 11 | level=INFO 12 | handlers=logfile,stdout 13 | 14 | [handler_logfile] 15 | class=FileHandler 16 | level=INFO 17 | formatter=logfile 18 | args=('borg.log', 'a+') 19 | 20 | [handler_stdout] 21 | class=StreamHandler 22 | level=INFO 23 | formatter=logfile 24 | args=(sys.stdout,) 25 | 26 | [formatter_logfile] 27 | format=%(asctime)s [%(levelname)s] %(message)s 28 | datefmt=%Y-%m-%d %H:%M:%S 29 | class=logging.Formatter 30 | 31 | -------------------------------------------------------------------------------- /samples/passphrase.sample: -------------------------------------------------------------------------------- 1 | CorrectHorseBatteryStaple 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """BorgAPI Package Setup.""" 2 | 3 | import setuptools 4 | 5 | setuptools.setup() 6 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | """BorgAPI testing module.""" 2 | -------------------------------------------------------------------------------- /test/borgapi/__init__.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: N802 2 | """Test BorgAPI module.""" 3 | 4 | import unittest 5 | from configparser import ConfigParser 6 | from os import getenv, makedirs, remove 7 | from os.path import exists, join 8 | from shutil import rmtree 9 | 10 | from dotenv import load_dotenv 11 | 12 | from borgapi import BorgAPI, BorgAPIAsync 13 | 14 | 15 | class BorgapiTests(unittest.TestCase): 16 | """Test for the borgbackup api.""" 17 | 18 | @staticmethod 19 | def assertFileExists(path, msg=None): 20 | """Assert if a path exists or not.""" 21 | if not exists(path): 22 | raise AssertionError(msg or f"{path} does not exist") 23 | 24 | @staticmethod 25 | def assertFileNotExists(path, msg=None): 26 | """Assert if a path exists or not.""" 27 | if exists(path): 28 | raise AssertionError(msg or f"{path} does exist") 29 | 30 | @staticmethod 31 | def assertKeyExists(key, dictionary, msg=None): 32 | """Assert a key exists in a dictionary.""" 33 | if key not in dictionary: 34 | raise AssertionError(msg or f"{key} does not exist in dictionary") 35 | 36 | @staticmethod 37 | def assertKeyNotExists(key, dictionary, msg=None): 38 | """Assert a key does not exist in a dictionary.""" 39 | if key in dictionary: 40 | raise AssertionError(msg or f"{key} exists in dictionary") 41 | 42 | @staticmethod 43 | def assertType(obj, type_, msg=None): 44 | """Assert an object is an instance of type.""" 45 | if not isinstance(obj, type_): 46 | raise AssertionError(msg or f"{obj} is not type {type_}, it is {type(obj)}") 47 | 48 | @staticmethod 49 | def assertAnyType(obj, *types, msg=None): 50 | """Assert an object is an instance of type.""" 51 | if not any([isinstance(obj, t) for t in types]): 52 | raise AssertionError(msg or f"{obj} is not any of {types}; it is {type(obj)}") 53 | 54 | @staticmethod 55 | def assertSubclass(obj, class_, msg=None): 56 | """Assert an object is an subclass of class.""" 57 | if not issubclass(obj, class_): 58 | raise AssertionError(msg or f"{obj} is not a subtype of {class_}") 59 | 60 | @staticmethod 61 | def assertNone(obj, msg=None): 62 | """Assert an object is None.""" 63 | if obj is not None: 64 | raise AssertionError(msg or f"Value is not None: {obj}") 65 | 66 | @staticmethod 67 | def assertNotNone(obj, msg=None): 68 | """Assert an object is None.""" 69 | if obj is None: 70 | raise AssertionError(msg or "Value is None") 71 | 72 | @staticmethod 73 | def _try_pass(error, func, *args, **kwargs): 74 | try: 75 | func(*args, **kwargs) 76 | except error: 77 | pass 78 | 79 | @staticmethod 80 | def _make_clean(directory): 81 | try: 82 | makedirs(directory) 83 | except FileExistsError: 84 | rmtree(directory) 85 | makedirs(directory) 86 | 87 | @staticmethod 88 | def _read_config(string=None, filename=None): 89 | """Convert config string into dictionary.""" 90 | config = ConfigParser() 91 | if filename: 92 | with open(filename, "r") as fp: 93 | config.read_file(fp) 94 | else: 95 | config.read_string(string) 96 | return config 97 | 98 | @staticmethod 99 | def _display(header, output, single=True): 100 | """Display captured output.""" 101 | if getenv("BORGAPI_TEST_OUTPUT_DISPLAY"): 102 | print(header) 103 | if single: 104 | print(output) 105 | else: 106 | for name, value in output.items(): 107 | print(f"~~~~~~~~~~ {name} ~~~~~~~~~~") 108 | print(value) 109 | print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 110 | print() 111 | 112 | @classmethod 113 | def setUpClass(cls): 114 | """Init environment for borg use.""" 115 | cls.temp = "test/temp" 116 | cls.data = join(cls.temp, "data") 117 | cls.repo = join(cls.temp, "repo") 118 | cls.logs = join(cls.temp, "logs") 119 | 120 | cls.archive = f"{cls.repo}::1" 121 | 122 | cls._try_pass(FileNotFoundError, rmtree, cls.data) 123 | cls._try_pass(FileNotFoundError, rmtree, cls.repo) 124 | if not getenv("BORGAPI_TEST_KEEP_LOGS"): 125 | cls._try_pass(FileNotFoundError, rmtree, cls.logs) 126 | 127 | cls.file_1 = join(cls.data, "file_1.txt") 128 | cls.file_1_text = "Hello World" 129 | cls.file_2 = join(cls.data, "file_2.txt") 130 | cls.file_2_text = "Goodbye Fools" 131 | cls.file_3 = join(cls.data, "file_3.txt") 132 | cls.file_3_text = "New File Added" 133 | 134 | cls._try_pass(FileExistsError, makedirs, cls.data) 135 | cls._try_pass(FileExistsError, makedirs, cls.repo) 136 | cls._try_pass(FileExistsError, makedirs, cls.logs) 137 | load_dotenv("test/res/test_env") 138 | 139 | @classmethod 140 | def tearDownClass(cls): 141 | """Remove temp directory.""" 142 | if not getenv("BORGAPI_TEST_KEEP_LOGS") and not getenv("BORGAPI_TEST_KEEP_TEMP"): 143 | cls._try_pass(FileNotFoundError, rmtree, cls.temp) 144 | cls._try_pass(OSError, rmtree, cls.temp) 145 | 146 | def _setUp(self): 147 | self._try_pass(FileExistsError, makedirs, self.temp) 148 | self._try_pass(FileExistsError, makedirs, self.data) 149 | self._try_pass(FileExistsError, makedirs, self.repo) 150 | self._try_pass(FileExistsError, makedirs, self.logs) 151 | 152 | with open(self.file_1, "w") as fp: 153 | fp.write(self.file_1_text) 154 | with open(self.file_2, "w") as fp: 155 | fp.write(self.file_2_text) 156 | 157 | self._try_pass(FileNotFoundError, remove, self.file_3) 158 | 159 | def setUp(self): 160 | """Init files data.""" 161 | self._setUp() 162 | 163 | self.api = BorgAPI() 164 | self.api.init(self.repo) 165 | 166 | def tearDown(self): 167 | """Reset mess made.""" 168 | if not getenv("BORGAPI_TEST_KEEP_TEMP"): 169 | self._try_pass(FileNotFoundError, rmtree, self.data) 170 | self._try_pass(OSError, rmtree, self.repo) 171 | self._try_pass(FileNotFoundError, rmtree, self.repo) 172 | if not getenv("BORGAPI_TEST_KEEP_LOGS"): 173 | self._try_pass(FileNotFoundError, rmtree, self.logs) 174 | self._try_pass(OSError, rmtree, self.temp) 175 | self._try_pass(FileNotFoundError, rmtree, self.temp) 176 | 177 | def _create_default(self): 178 | self.api.create(self.archive, self.data) 179 | 180 | 181 | class BorgapiAsyncTests(unittest.IsolatedAsyncioTestCase, BorgapiTests): 182 | """Test for the borgbackup api with async methods.""" 183 | 184 | def setUp(self): 185 | """Init files data.""" 186 | self._setUp() 187 | 188 | async def asyncSetUp(self): 189 | """Init files data for async use.""" 190 | self.api = BorgAPIAsync() 191 | await self.api.init(self.repo) 192 | 193 | async def _create_default(self): 194 | await self.api.create(self.archive, self.data) 195 | -------------------------------------------------------------------------------- /test/borgapi/test_01_borgapi.py: -------------------------------------------------------------------------------- 1 | """Test borgapi module.""" 2 | 3 | import logging 4 | import unittest 5 | from os import getenv 6 | 7 | from . import BorgapiAsyncTests, BorgapiTests 8 | 9 | 10 | class SingleTests(BorgapiTests): 11 | """Simple Command and Class Methodss tests.""" 12 | 13 | def test_01_borgapi_logger(self): 14 | """Verify loggers are setup correctly for borgapi.""" 15 | loggers = logging.root.manager.loggerDict 16 | self.assertIn("borgapi", loggers, "borgapi logger not present") 17 | self.assertIn("borg", loggers, "borg logger not present") 18 | 19 | def test_02_set_environ(self): 20 | """Set new env variable.""" 21 | key = "TEST_VARIABLE" 22 | 23 | with open(self.file_3, "w") as fp: 24 | fp.write(f"{key}={self.file_3_text}") 25 | self.api.set_environ(filename=self.file_3) 26 | got = getenv(key) 27 | self.assertEqual(got, self.file_3_text) 28 | 29 | self.api.set_environ(dictionary={key: self.file_1_text}) 30 | got = getenv(key) 31 | self.assertEqual(got, self.file_1_text) 32 | 33 | self.api.set_environ(**{key: self.file_2_text}) 34 | got = getenv(key) 35 | self.assertEqual(got, self.file_2_text) 36 | 37 | def test_03_unset_environ(self): 38 | """Remove env variable.""" 39 | key = "TEST_VARIABLE" 40 | 41 | self.api.set_environ(**{key: self.file_1_text}) 42 | got = getenv(key) 43 | self.assertEqual(got, self.file_1_text) 44 | self.api.unset_environ(key) 45 | got = getenv(key) 46 | self.assertFalse(got) 47 | 48 | with open(self.file_3, "w") as fp: 49 | fp.write(f"{key}={self.file_3_text}") 50 | self.api.set_environ(filename=self.file_3) 51 | self.api.unset_environ() 52 | got = getenv(key) 53 | self.assertFalse(got) 54 | 55 | @unittest.skip("WIP: Don't know what locking would be used for") 56 | def test_04_lock(self): 57 | """Don't know what locking would be used for, so don't know how to test.""" 58 | 59 | 60 | class SingleAsyncTests(BorgapiAsyncTests): 61 | """Simple Command and Class Methodss tests.""" 62 | 63 | async def test_01_set_environ(self): 64 | """Set new env variable.""" 65 | key = "TEST_VARIABLE" 66 | 67 | with open(self.file_3, "w") as fp: 68 | fp.write(f"{key}={self.file_3_text}") 69 | await self.api.set_environ(filename=self.file_3) 70 | got = getenv(key) 71 | self.assertEqual(got, self.file_3_text) 72 | 73 | await self.api.set_environ(dictionary={key: self.file_1_text}) 74 | got = getenv(key) 75 | self.assertEqual(got, self.file_1_text) 76 | 77 | await self.api.set_environ(**{key: self.file_2_text}) 78 | got = getenv(key) 79 | self.assertEqual(got, self.file_2_text) 80 | 81 | async def test_02_unset_environ(self): 82 | """Remove env variable.""" 83 | key = "TEST_VARIABLE" 84 | 85 | await self.api.set_environ(**{key: self.file_1_text}) 86 | got = getenv(key) 87 | self.assertEqual(got, self.file_1_text) 88 | await self.api.unset_environ(key) 89 | got = getenv(key) 90 | self.assertFalse(got) 91 | 92 | with open(self.file_3, "w") as fp: 93 | fp.write(f"{key}={self.file_3_text}") 94 | await self.api.set_environ(filename=self.file_3) 95 | await self.api.unset_environ() 96 | got = getenv(key) 97 | self.assertFalse(got) 98 | -------------------------------------------------------------------------------- /test/borgapi/test_02_init.py: -------------------------------------------------------------------------------- 1 | """Test init command.""" 2 | 3 | from os import urandom 4 | from os.path import join 5 | 6 | from borg.repository import Repository 7 | 8 | from . import BorgapiAsyncTests, BorgapiTests 9 | 10 | 11 | class InitTests(BorgapiTests): 12 | """Init command tests.""" 13 | 14 | def setUp(self): 15 | """Prepare data for init tests.""" 16 | super().setUp() 17 | self._make_clean(self.repo) 18 | 19 | def test_01_basic(self): 20 | """Initalize new repository.""" 21 | self.api.init(self.repo) 22 | self.assertFileExists(join(self.repo, "README")) 23 | 24 | def test_02_already_exists(self): 25 | """Initalize a repo in a directory where one already exists.""" 26 | self.api.init(self.repo) 27 | self.assertRaises( 28 | Repository.AlreadyExists, 29 | self.api.init, 30 | self.repo, 31 | msg="Duplicate repositroy overwrites old repo", 32 | ) 33 | 34 | def test_03_path_exists(self): 35 | """Initalize a repo in a directory where other data exists.""" 36 | self.api.init(self.repo) 37 | self.assertRaises( 38 | Repository.PathAlreadyExists, 39 | self.api.init, 40 | self.data, 41 | msg="Repositroy overwrites directory with other data", 42 | ) 43 | 44 | def test_04_make_parent(self): 45 | """Init a repo where parents don't exist with different flags.""" 46 | deep_repo = join(self.repo, "make/parents") 47 | 48 | self.assertRaises( 49 | Repository.ParentPathDoesNotExist, 50 | self.api.init, 51 | deep_repo, 52 | msg="Repository made with missing directories", 53 | ) 54 | self.api.init(deep_repo, make_parent_dirs=True) 55 | output = self.api.list(deep_repo, json=True) 56 | self.assertKeyExists("repository", output, "Repository not initalzied") 57 | 58 | def test_05_storage_quota(self): 59 | """Limit the size of the repo.""" 60 | self.api.init(self.repo, storage_quota="10M") 61 | with open(self.file_3, "wb") as fp: 62 | fp.write(urandom(10 * 1024 * 1024)) 63 | self.assertRaises( 64 | Repository.StorageQuotaExceeded, 65 | self.api.create, 66 | self.archive, 67 | self.data, 68 | msg="Stored more than quota allowed", 69 | ) 70 | 71 | def test_06_append_only(self): 72 | """Repo in append only mode.""" 73 | self.api.init(self.repo, append_only=True) 74 | output = self.api.config(self.repo, list=True) 75 | config = self._read_config(output) 76 | self.assertEqual( 77 | config["repository"]["append_only"], 78 | "1", 79 | "Repo not in append_only mode", 80 | ) 81 | 82 | 83 | class InitAsyncTests(BorgapiAsyncTests): 84 | """Init command tests.""" 85 | 86 | def setUp(self): 87 | """Prepare data for async init tests.""" 88 | super().setUp() 89 | # self._make_clean(self.repo) 90 | 91 | async def asyncSetUp(self): 92 | """Prepare async data for async init tests.""" 93 | await super().asyncSetUp() 94 | self._make_clean(self.repo) 95 | 96 | async def test_01_basic(self): 97 | """Initalize new repository.""" 98 | await self.api.init(self.repo) 99 | self.assertFileExists(join(self.repo, "README")) 100 | 101 | async def test_02_already_exists(self): 102 | """Initalize a repo in a directory where one already exists.""" 103 | await self.api.init(self.repo) 104 | with self.assertRaises( 105 | Repository.AlreadyExists, 106 | msg="Duplicate repositroy overwrites old repo", 107 | ): 108 | await self.api.init(self.repo) 109 | 110 | async def test_03_path_exists(self): 111 | """Initalize a repo in a directory where other data exists.""" 112 | await self.api.init(self.repo) 113 | with self.assertRaises( 114 | Repository.PathAlreadyExists, 115 | msg="Repositroy overwrites directory with other data", 116 | ): 117 | await self.api.init(self.data) 118 | 119 | async def test_04_make_parent(self): 120 | """Init a repo where parents don't exist with different flags.""" 121 | deep_repo = join(self.repo, "make/parents") 122 | 123 | with self.assertRaises( 124 | Repository.ParentPathDoesNotExist, 125 | msg="Repository made with missing directories", 126 | ): 127 | await self.api.init(deep_repo) 128 | await self.api.init(deep_repo, make_parent_dirs=True) 129 | output = await self.api.list(deep_repo, json=True) 130 | self.assertKeyExists("repository", output, "Repository not initalzied") 131 | 132 | async def test_05_storage_quota(self): 133 | """Limit the size of the repo.""" 134 | await self.api.init(self.repo, storage_quota="10M") 135 | with open(self.file_3, "wb") as fp: 136 | fp.write(urandom(10 * 1024 * 1024)) 137 | with self.assertRaises( 138 | Repository.StorageQuotaExceeded, 139 | msg="Stored more than quota allowed", 140 | ): 141 | await self.api.create(self.archive, self.data) 142 | 143 | async def test_06_append_only(self): 144 | """Repo in append only mode.""" 145 | await self.api.init(self.repo, append_only=True) 146 | output = await self.api.config(self.repo, list=True) 147 | config = self._read_config(output) 148 | self.assertEqual( 149 | config["repository"]["append_only"], 150 | "1", 151 | "Repo not in append_only mode", 152 | ) 153 | -------------------------------------------------------------------------------- /test/borgapi/test_03_create.py: -------------------------------------------------------------------------------- 1 | """Test create command.""" 2 | 3 | import sys 4 | from io import BytesIO, TextIOWrapper 5 | 6 | from borg.archive import Archive 7 | 8 | from . import BorgapiAsyncTests, BorgapiTests 9 | 10 | 11 | class CreateTests(BorgapiTests): 12 | """Create command tests.""" 13 | 14 | def test_01_basic(self): 15 | """Create new archive.""" 16 | self.api.create(self.archive, self.data) 17 | output = self.api.list(self.repo, json=True) 18 | num_archives = len(output["archives"]) 19 | self.assertEqual(num_archives, 1, "Archive not saved") 20 | archive_name = output["archives"][0]["name"] 21 | self.assertEqual(archive_name, "1", "Archive name does not match set name") 22 | 23 | def test_02_second(self): 24 | """Create second archive after data modification.""" 25 | self.api.create(self.archive, self.data) 26 | with open(self.file_3, "w+") as fp: 27 | fp.write("New Data") 28 | self.api.create(f"{self.repo}::2", self.data) 29 | 30 | output = self.api.list(self.repo, json=True) 31 | num_archives = len(output["archives"]) 32 | self.assertEqual(num_archives, 2, "Multiple archives not saved") 33 | archive_1_name = output["archives"][0]["name"] 34 | self.assertEqual(archive_1_name, "1", "Archive name does not match set name") 35 | archive_2_name = output["archives"][1]["name"] 36 | self.assertEqual(archive_2_name, "2", "Archive name does not match set name") 37 | 38 | def test_03_already_exists(self): 39 | """Create an archive with an existing name.""" 40 | self.api.create(self.archive, self.data) 41 | self.assertRaises( 42 | Archive.AlreadyExists, 43 | self.api.create, 44 | self.archive, 45 | self.data, 46 | ) 47 | 48 | def test_04_stdin(self): 49 | """Read input from stdin and save to archvie.""" 50 | temp_stdin = TextIOWrapper(BytesIO(bytes(self.file_3_text, "utf-8"))) 51 | sys.stdin = temp_stdin 52 | name = "file_3_stdin.txt" 53 | mode = "0777" # "-rwxrwxrwx" 54 | 55 | try: 56 | self.api.create( 57 | self.archive, 58 | "-", 59 | stdin_name=name, 60 | stdin_mode=mode, 61 | ) 62 | finally: 63 | temp_stdin.close() 64 | sys.stdin = sys.__stdin__ 65 | 66 | output = self.api.list(self.archive, json_lines=True) 67 | self.assertEqual(output["path"], name, "Unexpected file name") 68 | self.assertEqual(output["mode"], "-rwxrwxrwx", "Unexpected file mode") 69 | 70 | def test_04_output_string(self): 71 | """Create string info.""" 72 | output = self.api.create(self.archive, self.data, stats=True, list=True) 73 | self._display("create string info", output, False) 74 | self.assertType(output["stats"], str) 75 | self.assertType(output["list"], str) 76 | 77 | def test_05_output_json(self): 78 | """Create json info.""" 79 | output = self.api.create( 80 | self.archive, 81 | self.data, 82 | json=True, 83 | log_json=True, 84 | list=True, 85 | ) 86 | self._display("create string info", output, False) 87 | self.assertType(output["stats"], dict) 88 | self.assertAnyType(output["list"], list, dict) 89 | 90 | def test_06_output_mixed_1(self): 91 | """Create mixed output (stats json, list string).""" 92 | output = self.api.create(self.archive, self.data, json=True, list=True) 93 | self._display("create mixed output (stats json, list string)", output, False) 94 | self.assertType(output["stats"], dict) 95 | self.assertType(output["list"], str) 96 | 97 | def test_07_output_mixed_2(self): 98 | """Create mixed output (stats string, list json).""" 99 | output = self.api.create( 100 | self.archive, 101 | self.data, 102 | stats=True, 103 | log_json=True, 104 | list=True, 105 | ) 106 | self._display("create mixed output (stats string, list json)", output, False) 107 | self.assertType(output["stats"], str) 108 | self.assertAnyType(output["list"], list, dict) 109 | 110 | def test_08_list_string(self): 111 | """Create list string.""" 112 | output = self.api.create(self.archive, self.data, list=True) 113 | self._display("create list string", output) 114 | self.assertType(output, str) 115 | 116 | def test_09_stats_json(self): 117 | """Create stats json.""" 118 | output = self.api.create(self.archive, self.data, json=True) 119 | self._display("create stats json", output) 120 | self.assertType(output, dict) 121 | 122 | def test_10_list_json(self): 123 | """Create list json.""" 124 | output = self.api.create( 125 | self.archive, 126 | self.data, 127 | log_json=True, 128 | list=True, 129 | ) 130 | self._display("create list json", output) 131 | self.assertAnyType(output, list, dict) 132 | 133 | def test_11_stats_string(self): 134 | """Create stats string.""" 135 | output = self.api.create(self.archive, self.data, stats=True) 136 | self._display("create stats string", output) 137 | self.assertType(output, str) 138 | 139 | 140 | class CreateAsyncTests(BorgapiAsyncTests): 141 | """Create command tests.""" 142 | 143 | async def test_01_basic(self): 144 | """Create new archive.""" 145 | await self.api.create(self.archive, self.data) 146 | output = await self.api.list(self.repo, json=True) 147 | num_archives = len(output["archives"]) 148 | self.assertEqual(num_archives, 1, "Archive not saved") 149 | archive_name = output["archives"][0]["name"] 150 | self.assertEqual(archive_name, "1", "Archive name does not match set name") 151 | 152 | async def test_02_second(self): 153 | """Create second archive after data modification.""" 154 | await self.api.create(self.archive, self.data) 155 | with open(self.file_3, "w+") as fp: 156 | fp.write("New Data") 157 | await self.api.create(f"{self.repo}::2", self.data) 158 | 159 | output = await self.api.list(self.repo, json=True) 160 | num_archives = len(output["archives"]) 161 | self.assertEqual(num_archives, 2, "Multiple archives not saved") 162 | archive_1_name = output["archives"][0]["name"] 163 | self.assertEqual(archive_1_name, "1", "Archive name does not match set name") 164 | archive_2_name = output["archives"][1]["name"] 165 | self.assertEqual(archive_2_name, "2", "Archive name does not match set name") 166 | 167 | async def test_03_already_exists(self): 168 | """Create an archive with an existing name.""" 169 | await self.api.create(self.archive, self.data) 170 | with self.assertRaises(Archive.AlreadyExists): 171 | await self.api.create(self.archive, self.data) 172 | 173 | async def test_04_stdin(self): 174 | """Read input from stdin and save to archvie.""" 175 | temp_stdin = TextIOWrapper(BytesIO(bytes(self.file_3_text, "utf-8"))) 176 | sys.stdin = temp_stdin 177 | name = "file_3_stdin.txt" 178 | mode = "0777" # "-rwxrwxrwx" 179 | 180 | try: 181 | await self.api.create( 182 | self.archive, 183 | "-", 184 | stdin_name=name, 185 | stdin_mode=mode, 186 | ) 187 | finally: 188 | temp_stdin.close() 189 | sys.stdin = sys.__stdin__ 190 | 191 | output = await self.api.list(self.archive, json_lines=True) 192 | self.assertEqual(output["path"], name, "Unexpected file name") 193 | self.assertEqual(output["mode"], "-rwxrwxrwx", "Unexpected file mode") 194 | 195 | async def test_04_output_string(self): 196 | """Create string info.""" 197 | output = await self.api.create(self.archive, self.data, stats=True, list=True) 198 | self._display("create string info", output, False) 199 | self.assertType(output["stats"], str) 200 | self.assertType(output["list"], str) 201 | 202 | async def test_05_output_json(self): 203 | """Create json info.""" 204 | output = await self.api.create( 205 | self.archive, 206 | self.data, 207 | json=True, 208 | log_json=True, 209 | list=True, 210 | ) 211 | self._display("create string info", output, False) 212 | self.assertType(output["stats"], dict) 213 | self.assertAnyType(output["list"], list, dict) 214 | 215 | async def test_06_output_mixed_1(self): 216 | """Create mixed output (stats json, list string).""" 217 | output = await self.api.create(self.archive, self.data, json=True, list=True) 218 | self._display("create mixed output (stats json, list string)", output, False) 219 | self.assertType(output["stats"], dict) 220 | self.assertType(output["list"], str) 221 | 222 | async def test_07_output_mixed_2(self): 223 | """Create mixed output (stats string, list json).""" 224 | output = await self.api.create( 225 | self.archive, 226 | self.data, 227 | stats=True, 228 | log_json=True, 229 | list=True, 230 | ) 231 | self._display("create mixed output (stats string, list json)", output, False) 232 | self.assertType(output["stats"], str) 233 | self.assertAnyType(output["list"], list, dict) 234 | 235 | async def test_08_list_string(self): 236 | """Create list string.""" 237 | output = await self.api.create(self.archive, self.data, list=True) 238 | self._display("create list string", output) 239 | self.assertType(output, str) 240 | 241 | async def test_09_stats_json(self): 242 | """Create stats json.""" 243 | output = await self.api.create(self.archive, self.data, json=True) 244 | self._display("create stats json", output) 245 | self.assertType(output, dict) 246 | 247 | async def test_10_list_json(self): 248 | """Create list json.""" 249 | output = await self.api.create( 250 | self.archive, 251 | self.data, 252 | log_json=True, 253 | list=True, 254 | ) 255 | self._display("create list json", output) 256 | self.assertAnyType(output, list, dict) 257 | 258 | async def test_11_stats_string(self): 259 | """Create stats string.""" 260 | output = await self.api.create(self.archive, self.data, stats=True) 261 | self._display("create stats string", output) 262 | self.assertType(output, str) 263 | -------------------------------------------------------------------------------- /test/borgapi/test_04_extract.py: -------------------------------------------------------------------------------- 1 | """Test extract command.""" 2 | 3 | from os import remove 4 | 5 | from . import BorgapiAsyncTests, BorgapiTests 6 | 7 | 8 | class ExtractTests(BorgapiTests): 9 | """Extract command tests.""" 10 | 11 | def setUp(self): 12 | """Prepare data for extract tests.""" 13 | super().setUp() 14 | self._create_default() 15 | 16 | def test_01_basic(self): 17 | """Extract file.""" 18 | remove(self.file_1) 19 | self.assertFileNotExists(self.file_1) 20 | 21 | self.api.extract(self.archive, self.file_1) 22 | self.assertFileExists(self.file_1) 23 | 24 | def test_02_not_exist(self): 25 | """Extract path that does not exist.""" 26 | with self.assertLogs("borg", "WARNING") as logger: 27 | self.api.extract(self.archive, self.file_3) 28 | message = logger.records[0].getMessage() 29 | self.assertRegex( 30 | message, 31 | r".*?file_3.*never", 32 | "Warning not logged for bad path", 33 | ) 34 | 35 | def test_03_stdout(self): 36 | """Capture Extracted File.""" 37 | output = self.api.extract(self.archive, self.file_1, stdout=True) 38 | self.assertEqual( 39 | output, 40 | bytes(self.file_1_text, "utf-8"), 41 | "Extracted file text does not match", 42 | ) 43 | 44 | def test_04_output_string(self): 45 | """List to log.""" 46 | output = self.api.extract(self.archive, list=True, dry_run=True) 47 | self._display("extract 1", output) 48 | self.assertType(output, str) 49 | 50 | def test_05_output_json(self): 51 | """List to json.""" 52 | output = self.api.extract(self.archive, log_json=True, list=True, dry_run=True) 53 | self._display("extract 2", output) 54 | self.assertAnyType(output, list, dict) 55 | 56 | 57 | class ExtractAsyncTests(BorgapiAsyncTests): 58 | """Extract command tests.""" 59 | 60 | async def asyncSetUp(self): 61 | """Prepare async data for async extract tests.""" 62 | await super().asyncSetUp() 63 | await self._create_default() 64 | 65 | async def test_01_basic(self): 66 | """Extract file.""" 67 | remove(self.file_1) 68 | self.assertFileNotExists(self.file_1) 69 | 70 | await self.api.extract(self.archive, self.file_1) 71 | self.assertFileExists(self.file_1) 72 | 73 | async def test_02_not_exist(self): 74 | """Extract path that does not exist.""" 75 | with self.assertLogs("borg", "WARNING") as logger: 76 | await self.api.extract(self.archive, self.file_3) 77 | message = logger.records[0].getMessage() 78 | self.assertRegex( 79 | message, 80 | r".*?file_3.*never", 81 | "Warning not logged for bad path", 82 | ) 83 | 84 | async def test_03_stdout(self): 85 | """Capture Extracted File.""" 86 | output = await self.api.extract(self.archive, self.file_1, stdout=True) 87 | self.assertEqual( 88 | output, 89 | bytes(self.file_1_text, "utf-8"), 90 | "Extracted file text does not match", 91 | ) 92 | 93 | async def test_04_output_string(self): 94 | """List to log.""" 95 | output = await self.api.extract(self.archive, list=True, dry_run=True) 96 | self._display("extract 1", output) 97 | self.assertType(output, str) 98 | 99 | async def test_05_output_json(self): 100 | """List to json.""" 101 | output = await self.api.extract(self.archive, log_json=True, list=True, dry_run=True) 102 | self._display("extract 2", output) 103 | self.assertAnyType(output, list, dict) 104 | -------------------------------------------------------------------------------- /test/borgapi/test_05_rename.py: -------------------------------------------------------------------------------- 1 | """Test rename command.""" 2 | 3 | from borg.archive import Archive 4 | 5 | from . import BorgapiAsyncTests, BorgapiTests 6 | 7 | 8 | class RenameTests(BorgapiTests): 9 | """Rename command tests.""" 10 | 11 | def setUp(self): 12 | """Prepare data for rename tests.""" 13 | super().setUp() 14 | self._create_default() 15 | 16 | def test_01_basic(self): 17 | """Rename a archive.""" 18 | output = self.api.list(self.repo, json=True) 19 | original_name = output["archives"][0]["name"] 20 | self.api.rename(self.archive, "2") 21 | output = self.api.list(self.repo, json=True) 22 | new_name = output["archives"][0]["name"] 23 | self.assertNotEqual(new_name, original_name, "Name change did not occur") 24 | self.assertEqual(new_name, "2", "Name did not change to expected output") 25 | 26 | def test_02_no_exist(self): 27 | """Rename nonexistant archive.""" 28 | self.assertRaises( 29 | Archive.DoesNotExist, 30 | self.api.rename, 31 | f"{self.repo}::2", 32 | "3", 33 | msg="Renamed archive that does not exist", 34 | ) 35 | 36 | 37 | class RenameAsyncTests(BorgapiAsyncTests): 38 | """Rename command tests.""" 39 | 40 | async def asyncSetUp(self): 41 | """Prepare async data for async rename tests.""" 42 | await super().asyncSetUp() 43 | await self._create_default() 44 | 45 | async def test_01_basic(self): 46 | """Rename a archive.""" 47 | output = await self.api.list(self.repo, json=True) 48 | original_name = output["archives"][0]["name"] 49 | await self.api.rename(self.archive, "2") 50 | output = await self.api.list(self.repo, json=True) 51 | new_name = output["archives"][0]["name"] 52 | self.assertNotEqual(new_name, original_name, "Name change did not occur") 53 | self.assertEqual(new_name, "2", "Name did not change to expected output") 54 | 55 | async def test_02_no_exist(self): 56 | """Rename nonexistant archive.""" 57 | with self.assertRaises( 58 | Archive.DoesNotExist, 59 | msg="Renamed archive that does not exist", 60 | ): 61 | await self.api.rename(f"{self.repo}::2", "3") 62 | -------------------------------------------------------------------------------- /test/borgapi/test_06_list.py: -------------------------------------------------------------------------------- 1 | """Test list command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class ListTests(BorgapiTests): 7 | """List command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for list tests.""" 11 | super().setUp() 12 | self._create_default() 13 | 14 | def test_01_basic(self): 15 | """List repo archvies and archive files.""" 16 | output = self.api.list(self.repo, json=True) 17 | num_archvies = len(output["archives"]) 18 | self.assertEqual(num_archvies, 1, "Unexpected number of archives returned") 19 | 20 | output = self.api.list(self.archive, json_lines=True) 21 | num_files = len(output) 22 | self.assertEqual(num_files, 3, "Unexpected number of files returned") 23 | 24 | def test_02_repo_basic(self): 25 | """List repo.""" 26 | output = self.api.list(self.repo) 27 | self._display("list repo", output) 28 | self.assertType(output, str) 29 | 30 | def test_03_repo_short(self): 31 | """List repo short.""" 32 | output = self.api.list(self.repo, short=True) 33 | self._display("list repo short", output) 34 | self.assertType(output, str) 35 | 36 | def test_04_repo_json(self): 37 | """List repo json.""" 38 | output = self.api.list(self.repo, json=True) 39 | self._display("list repo json", output) 40 | self.assertAnyType(output, list, dict) 41 | output = self.api.list(self.repo, log_json=True) 42 | self._display("list repo log json", output) 43 | self.assertAnyType(output, str) 44 | 45 | def test_05_archive_basic(self): 46 | """List archive.""" 47 | output = self.api.list(self.archive) 48 | self._display("list archive", output) 49 | self.assertType(output, str) 50 | 51 | def test_06_archive_short(self): 52 | """List archive short.""" 53 | output = self.api.list(self.archive, short=True) 54 | self._display("list archive short", output) 55 | self.assertType(output, str) 56 | 57 | def test_07_archive_json(self): 58 | """List archive json.""" 59 | output = self.api.list(self.archive, json_lines=True) 60 | self._display("list archive json", output) 61 | self.assertAnyType(output, list, dict) 62 | 63 | 64 | class ListAsyncTests(BorgapiAsyncTests): 65 | """List command tests.""" 66 | 67 | async def asyncSetUp(self): 68 | """Prepare async data for async list tests.""" 69 | await super().asyncSetUp() 70 | await self._create_default() 71 | 72 | async def test_01_basic(self): 73 | """List repo archvies and archive files.""" 74 | output = await self.api.list(self.repo, json=True) 75 | num_archvies = len(output["archives"]) 76 | self.assertEqual(num_archvies, 1, "Unexpected number of archives returned") 77 | 78 | output = await self.api.list(self.archive, json_lines=True) 79 | num_files = len(output) 80 | self.assertEqual(num_files, 3, "Unexpected number of files returned") 81 | 82 | async def test_02_repo_basic(self): 83 | """List repo.""" 84 | output = await self.api.list(self.repo) 85 | self._display("list repo", output) 86 | self.assertType(output, str) 87 | 88 | async def test_03_repo_short(self): 89 | """List repo short.""" 90 | output = await self.api.list(self.repo, short=True) 91 | self._display("list repo short", output) 92 | self.assertType(output, str) 93 | 94 | async def test_04_repo_json(self): 95 | """List repo json.""" 96 | output = await self.api.list(self.repo, json=True) 97 | self._display("list repo json", output) 98 | self.assertAnyType(output, list, dict) 99 | output = await self.api.list(self.repo, log_json=True) 100 | self._display("list repo log json", output) 101 | self.assertAnyType(output, str) 102 | 103 | async def test_05_archive_basic(self): 104 | """List archive.""" 105 | output = await self.api.list(self.archive) 106 | self._display("list archive", output) 107 | self.assertType(output, str) 108 | 109 | async def test_06_archive_short(self): 110 | """List archive short.""" 111 | output = await self.api.list(self.archive, short=True) 112 | self._display("list archive short", output) 113 | self.assertType(output, str) 114 | 115 | async def test_07_archive_json(self): 116 | """List archive json.""" 117 | output = await self.api.list(self.archive, json_lines=True) 118 | self._display("list archive json", output) 119 | self.assertAnyType(output, list, dict) 120 | -------------------------------------------------------------------------------- /test/borgapi/test_07_diff.py: -------------------------------------------------------------------------------- 1 | """Test deff command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class DiffTests(BorgapiTests): 7 | """Diff command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for diff tests.""" 11 | super().setUp() 12 | self._create_default() 13 | 14 | def test_01_add_file(self): 15 | """Diff new file.""" 16 | with open(self.file_3, "w") as fp: 17 | fp.write(self.file_3_text) 18 | self.api.create(f"{self.repo}::2", self.data) 19 | output = self.api.diff(self.archive, "2", json_lines=True) 20 | self.assertType(output, list) 21 | self.assertGreaterEqual(len(output), 2) 22 | changes = set() 23 | for out in output: 24 | changes.add(out["changes"][0]["type"]) 25 | self.assertIn("added", changes, "New file not listed as added") 26 | 27 | def test_02_modify_file(self): 28 | """Diff modified file.""" 29 | with open(self.file_3, "w") as fp: 30 | fp.write(self.file_3_text) 31 | with open(self.file_2, "w") as fp: 32 | fp.write(self.file_3_text) 33 | self.api.create(f"{self.repo}::2", self.data) 34 | output = self.api.diff(self.archive, "2", json_lines=True, sort=True) 35 | self.assertType(output, list) 36 | modded_2 = None 37 | for out in output: 38 | if out["path"] == self.file_2: 39 | modded_2 = out 40 | break 41 | if modded_2 is None: 42 | raise AssertionError("File expected to change, but did not") 43 | modify_type = modded_2["changes"][0]["type"] 44 | self.assertEqual(modify_type, "modified", "Unexpected change type") 45 | 46 | def test_03_output(self): 47 | """Diff string.""" 48 | with open(self.file_3, "w") as fp: 49 | fp.write(self.file_3_text) 50 | self.api.create(f"{self.repo}::2", self.data) 51 | output = self.api.diff(self.archive, "2") 52 | self._display("diff sting", output) 53 | self.assertType(output, str) 54 | 55 | def test_04_output_json(self): 56 | """Diff json.""" 57 | with open(self.file_3, "w") as fp: 58 | fp.write(self.file_3_text) 59 | self.api.create(f"{self.repo}::2", self.data) 60 | output = self.api.diff(self.archive, "2", log_json=True) 61 | self._display("diff log json", output) 62 | self.assertAnyType(output, str) 63 | 64 | 65 | class DiffAsyncTests(BorgapiAsyncTests): 66 | """Diff command tests.""" 67 | 68 | async def asyncSetUp(self): 69 | """Prepare async data for async diff tests.""" 70 | await super().asyncSetUp() 71 | await self._create_default() 72 | 73 | async def test_01_add_file(self): 74 | """Diff new file.""" 75 | with open(self.file_3, "w") as fp: 76 | fp.write(self.file_3_text) 77 | await self.api.create(f"{self.repo}::2", self.data) 78 | output = await self.api.diff(self.archive, "2", json_lines=True) 79 | self.assertType(output, list) 80 | self.assertGreaterEqual(len(output), 2) 81 | changes = set() 82 | for out in output: 83 | changes.add(out["changes"][0]["type"]) 84 | self.assertIn("added", changes, "New file not listed as added") 85 | 86 | async def test_02_modify_file(self): 87 | """Diff modified file.""" 88 | with open(self.file_3, "w") as fp: 89 | fp.write(self.file_3_text) 90 | with open(self.file_2, "w") as fp: 91 | fp.write(self.file_3_text) 92 | await self.api.create(f"{self.repo}::2", self.data) 93 | output = await self.api.diff(self.archive, "2", json_lines=True, sort=True) 94 | self.assertType(output, list) 95 | modded_2 = None 96 | for out in output: 97 | if out["path"] == self.file_2: 98 | modded_2 = out 99 | break 100 | if modded_2 is None: 101 | raise AssertionError("File expected to change, but did not") 102 | modify_type = modded_2["changes"][0]["type"] 103 | self.assertEqual(modify_type, "modified", "Unexpected change type") 104 | 105 | async def test_03_output(self): 106 | """Diff string.""" 107 | with open(self.file_3, "w") as fp: 108 | fp.write(self.file_3_text) 109 | await self.api.create(f"{self.repo}::2", self.data) 110 | output = await self.api.diff(self.archive, "2") 111 | self._display("diff sting", output) 112 | self.assertType(output, str) 113 | 114 | async def test_04_output_json(self): 115 | """Diff json.""" 116 | with open(self.file_3, "w") as fp: 117 | fp.write(self.file_3_text) 118 | await self.api.create(f"{self.repo}::2", self.data) 119 | output = await self.api.diff(self.archive, "2", log_json=True) 120 | self._display("diff log json", output) 121 | self.assertAnyType(output, str) 122 | -------------------------------------------------------------------------------- /test/borgapi/test_08_delete.py: -------------------------------------------------------------------------------- 1 | """Test delete command.""" 2 | 3 | import unittest 4 | 5 | from borg.archive import Archive 6 | from borg.repository import Repository 7 | 8 | from . import BorgapiAsyncTests, BorgapiTests 9 | 10 | 11 | class DeleteTests(BorgapiTests): 12 | """Delete command tests.""" 13 | 14 | def test_01_repository(self): 15 | """Delete repository.""" 16 | self._create_default() 17 | self.api.delete(self.repo) 18 | self.assertRaises( 19 | Repository.DoesNotExist, 20 | self.api.list, 21 | self.repo, 22 | msg="Deleted repository still exists", 23 | ) 24 | 25 | # pylint: disable=invalid-name 26 | def test_02_repository_not_exist(self): 27 | """Delete repository that doesn't exist.""" 28 | self._make_clean(self.repo) 29 | self.assertRaises( 30 | Repository.InvalidRepository, 31 | self.api.delete, 32 | self.repo, 33 | msg="Deleted nonexistant repository", 34 | ) 35 | 36 | def test_03_archive(self): 37 | """Delete archive.""" 38 | self._create_default() 39 | self.api.delete(self.archive) 40 | self.assertRaises( 41 | Archive.DoesNotExist, 42 | self.api.list, 43 | self.archive, 44 | msg="Deleted archive still exists", 45 | ) 46 | 47 | def test_04_archive_not_exist(self): 48 | """Delete archvie that doesn't exist.""" 49 | with self.assertLogs("borg", "WARNING") as logger: 50 | self.api.delete(self.archive) 51 | 52 | message = logger.records[0].getMessage() 53 | self.assertRegex( 54 | message, 55 | r".*?1.*not found", 56 | "Warning not logged for bad archive name", 57 | ) 58 | 59 | def test_05_stats_string(self): 60 | """Archvie stats string.""" 61 | self._create_default() 62 | output = self.api.delete(self.archive, stats=True) 63 | self._display("delete 1", output) 64 | self.assertType(output, str) 65 | 66 | @unittest.skip("delete has no json option for stats") 67 | def test_06_stats_json(self): 68 | """Archvie stats json.""" 69 | self._create_default() 70 | output = self.api.delete(self.archive, stats=True, log_json=True) 71 | self._display("delete 2", output) 72 | self.assertType(output, list, dict) 73 | 74 | 75 | class DeleteAsyncTests(BorgapiAsyncTests): 76 | """Delete command tests.""" 77 | 78 | async def test_01_repository(self): 79 | """Delete repository.""" 80 | self._create_default() 81 | await self.api.delete(self.repo) 82 | with self.assertRaises( 83 | Repository.DoesNotExist, 84 | msg="Deleted repository still exists", 85 | ): 86 | await self.api.list(self.repo) 87 | 88 | # pylint: disable=invalid-name 89 | async def test_02_repository_not_exist(self): 90 | """Delete repository that doesn't exist.""" 91 | self._make_clean(self.repo) 92 | with self.assertRaises( 93 | Repository.InvalidRepository, 94 | msg="Deleted nonexistant repository", 95 | ): 96 | await self.api.delete(self.repo) 97 | 98 | async def test_03_archive(self): 99 | """Delete archive.""" 100 | self._create_default() 101 | await self.api.delete(self.archive) 102 | with self.assertRaises( 103 | Archive.DoesNotExist, 104 | msg="Deleted archive still exists", 105 | ): 106 | await self.api.list(self.archive) 107 | 108 | async def test_04_archive_not_exist(self): 109 | """Delete archvie that doesn't exist.""" 110 | with self.assertLogs("borg", "WARNING") as logger: 111 | await self.api.delete(self.archive) 112 | 113 | message = logger.records[0].getMessage() 114 | self.assertRegex( 115 | message, 116 | r".*?1.*not found", 117 | "Warning not logged for bad archive name", 118 | ) 119 | 120 | async def test_05_stats_string(self): 121 | """Archvie stats string.""" 122 | self._create_default() 123 | output = await self.api.delete(self.archive, stats=True) 124 | self._display("delete 1", output) 125 | self.assertType(output, str) 126 | 127 | @unittest.skip("delete has no json option for stats") 128 | async def test_06_stats_json(self): 129 | """Archvie stats json.""" 130 | self._create_default() 131 | output = await self.api.delete(self.archive, stats=True, log_json=True) 132 | self._display("delete 2", output) 133 | self.assertType(output, list, dict) 134 | -------------------------------------------------------------------------------- /test/borgapi/test_09_prune.py: -------------------------------------------------------------------------------- 1 | """Test prune command.""" 2 | 3 | from os import remove 4 | from time import sleep 5 | 6 | from . import BorgapiAsyncTests, BorgapiTests 7 | 8 | 9 | class PruneTests(BorgapiTests): 10 | """Prune command tests.""" 11 | 12 | def setUp(self): 13 | """Prepare data for async tests.""" 14 | super().setUp() 15 | self._create_default() 16 | sleep(1) 17 | with open(self.file_3, "w") as fp: 18 | fp.write(self.file_3_text) 19 | self.api.create(f"{self.repo}::2", self.data) 20 | sleep(1) 21 | remove(self.file_1) 22 | self.api.create(f"{self.repo}::3", self.data) 23 | sleep(1) 24 | with open(self.file_2, "w") as fp: 25 | fp.write(self.file_1_text) 26 | self.api.create(f"{self.repo}::4", self.data) 27 | sleep(1) 28 | remove(self.file_2) 29 | self.api.create(f"{self.repo}::5", self.data) 30 | sleep(1) 31 | 32 | # pylint: disable=invalid-sequence-index 33 | def test_01_basic(self): 34 | """Prune archives.""" 35 | self.api.prune(self.repo, keep_last="3") 36 | output = self.api.list(self.repo, json=True) 37 | num_archives = len(output["archives"]) 38 | self.assertEqual(num_archives, 3, "Unexpected number of archvies pruned") 39 | 40 | def test_02_output_list(self): 41 | """Prune output list.""" 42 | output = self.api.prune(self.repo, keep_last="3", dry_run=True, list=True) 43 | self._display("prune list", output) 44 | self.assertType(output, str) 45 | 46 | def test_03_output_stats(self): 47 | """Prune output stats.""" 48 | output = self.api.prune(self.repo, keep_last="3", dry_run=True, stats=True) 49 | self._display("prune stats", output) 50 | self.assertType(output, str) 51 | 52 | 53 | class PruneAsyncTests(BorgapiAsyncTests): 54 | """Prune command tests.""" 55 | 56 | async def asyncSetUp(self): 57 | """Prepare async data for async prune tests.""" 58 | await super().asyncSetUp() 59 | await self._create_default() 60 | sleep(1) 61 | with open(self.file_3, "w") as fp: 62 | fp.write(self.file_3_text) 63 | await self.api.create(f"{self.repo}::2", self.data) 64 | sleep(1) 65 | remove(self.file_1) 66 | await self.api.create(f"{self.repo}::3", self.data) 67 | sleep(1) 68 | with open(self.file_2, "w") as fp: 69 | fp.write(self.file_1_text) 70 | await self.api.create(f"{self.repo}::4", self.data) 71 | sleep(1) 72 | remove(self.file_2) 73 | await self.api.create(f"{self.repo}::5", self.data) 74 | sleep(1) 75 | 76 | # pylint: disable=invalid-sequence-index 77 | async def test_01_basic(self): 78 | """Prune archives.""" 79 | await self.api.prune(self.repo, keep_last="3") 80 | output = await self.api.list(self.repo, json=True) 81 | num_archives = len(output["archives"]) 82 | self.assertEqual(num_archives, 3, "Unexpected number of archvies pruned") 83 | 84 | async def test_02_output_list(self): 85 | """Prune output list.""" 86 | output = await self.api.prune(self.repo, keep_last="3", dry_run=True, list=True) 87 | self._display("prune list", output) 88 | self.assertType(output, str) 89 | 90 | async def test_03_output_stats(self): 91 | """Prune output stats.""" 92 | output = await self.api.prune(self.repo, keep_last="3", dry_run=True, stats=True) 93 | self._display("prune stats", output) 94 | self.assertType(output, str) 95 | -------------------------------------------------------------------------------- /test/borgapi/test_10_info.py: -------------------------------------------------------------------------------- 1 | """Test info command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class InfoTests(BorgapiTests): 7 | """Info command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for info tests.""" 11 | super().setUp() 12 | self._create_default() 13 | 14 | def test_01_repository(self): 15 | """Repository info.""" 16 | output = self.api.info(self.repo, json=True) 17 | self.assertKeyExists("cache", output) 18 | self.assertKeyNotExists("archives", output) 19 | 20 | def test_02_archive(self): 21 | """Archive info.""" 22 | output = self.api.info(self.archive, json=True) 23 | self.assertKeyExists("cache", output) 24 | self.assertKeyExists("archives", output) 25 | 26 | def test_03_repo_string(self): 27 | """Repo output string.""" 28 | output = self.api.info(self.repo) 29 | self._display("info repo string", output) 30 | self.assertType(output, str) 31 | 32 | def test_04_repo_json(self): 33 | """Repo output json.""" 34 | output = self.api.info(self.repo, json=True) 35 | self._display("info repo json", output) 36 | self.assertAnyType(output, list, dict) 37 | 38 | output = self.api.info(self.repo, log_json=True) 39 | self._display("info repo log json", output) 40 | self.assertType(output, str) 41 | 42 | def test_05_archive_string(self): 43 | """Archive output string.""" 44 | output = self.api.info(self.archive) 45 | self._display("info archive string", output) 46 | self.assertType(output, str) 47 | 48 | def test_06_archive_json(self): 49 | """Archive output json.""" 50 | output = self.api.info(self.archive, json=True) 51 | self._display("info archive json", output) 52 | self.assertAnyType(output, list, dict) 53 | 54 | 55 | class InfoAsyncTests(BorgapiAsyncTests): 56 | """Info command tests.""" 57 | 58 | async def asyncSetUp(self): 59 | """Prepare async data for info tests.""" 60 | await super().asyncSetUp() 61 | await self._create_default() 62 | 63 | async def test_01_repository(self): 64 | """Repository info.""" 65 | output = await self.api.info(self.repo, json=True) 66 | self.assertKeyExists("cache", output) 67 | self.assertKeyNotExists("archives", output) 68 | 69 | async def test_02_archive(self): 70 | """Archive info.""" 71 | output = await self.api.info(self.archive, json=True) 72 | self.assertKeyExists("cache", output) 73 | self.assertKeyExists("archives", output) 74 | 75 | async def test_03_repo_string(self): 76 | """Repo output string.""" 77 | output = await self.api.info(self.repo) 78 | self._display("info repo string", output) 79 | self.assertType(output, str) 80 | 81 | async def test_04_repo_json(self): 82 | """Repo output json.""" 83 | output = await self.api.info(self.repo, json=True) 84 | self._display("info repo json", output) 85 | self.assertAnyType(output, list, dict) 86 | 87 | output = await self.api.info(self.repo, log_json=True) 88 | self._display("info repo log json", output) 89 | self.assertType(output, str) 90 | 91 | async def test_05_archive_string(self): 92 | """Archive output string.""" 93 | output = await self.api.info(self.archive) 94 | self._display("info archive string", output) 95 | self.assertType(output, str) 96 | 97 | async def test_06_archive_json(self): 98 | """Archive output json.""" 99 | output = await self.api.info(self.archive, json=True) 100 | self._display("info archive json", output) 101 | self.assertAnyType(output, list, dict) 102 | -------------------------------------------------------------------------------- /test/borgapi/test_11_mount.py: -------------------------------------------------------------------------------- 1 | """Test mount and unmount commands.""" 2 | 3 | from os import getenv 4 | from os.path import join 5 | from shutil import rmtree 6 | from time import sleep 7 | 8 | from . import BorgapiAsyncTests, BorgapiTests 9 | 10 | 11 | class MountTests(BorgapiTests): 12 | """Mount and Unmount command tests.""" 13 | 14 | @classmethod 15 | def setUpClass(cls): 16 | """Prepare class data for mount tests.""" 17 | super().setUpClass() 18 | cls.mountpoint = join(cls.temp, "mount") 19 | cls.repo_file = join(cls.mountpoint, "1", cls.file_1) 20 | cls.archive_file = join(cls.mountpoint, cls.file_1) 21 | 22 | def setUp(self): 23 | """Prepare data for mount tests.""" 24 | if getenv("BORGAPI_TEST_MOUNT_SKIP"): 25 | self.skipTest("llfuse not setup") 26 | super().setUp() 27 | self._create_default() 28 | self._make_clean(self.mountpoint) 29 | 30 | def tearDown(self): 31 | """Remove data created for mount tests.""" 32 | if not getenv("BORGAPI_TEST_KEEP_TEMP"): 33 | rmtree(self.mountpoint) 34 | super().tearDown() 35 | 36 | def test_01_repository(self): 37 | """Mount and unmount a repository.""" 38 | output = self.api.mount(self.repo, self.mountpoint) 39 | sleep(5) 40 | self.assertTrue(output["pid"] != 0) 41 | self.assertFileExists(self.repo_file) 42 | self.api.umount(self.mountpoint) 43 | self.assertFileNotExists(self.repo_file) 44 | sleep(3) 45 | 46 | def test_02_archive(self): 47 | """Mount and unmount a archive.""" 48 | output = self.api.mount(self.archive, self.mountpoint) 49 | sleep(5) 50 | self.assertTrue(output["pid"] != 0) 51 | self.assertFileExists(self.archive_file) 52 | self.api.umount(self.mountpoint) 53 | self.assertFileNotExists(self.archive_file) 54 | sleep(3) 55 | 56 | 57 | class MountAsyncTests(BorgapiAsyncTests): 58 | """Mount and Unmount command tests.""" 59 | 60 | @classmethod 61 | def setUpClass(cls): 62 | """Prepare class data for mount tests.""" 63 | super().setUpClass() 64 | cls.mountpoint = join(cls.temp, "mount") 65 | cls.repo_file = join(cls.mountpoint, "1", cls.file_1) 66 | cls.archive_file = join(cls.mountpoint, cls.file_1) 67 | 68 | async def asyncSetUp(self): 69 | """Prepare async data for mount tests.""" 70 | if getenv("BORGAPI_TEST_MOUNT_SKIP"): 71 | self.skipTest("llfuse not setup") 72 | await super().asyncSetUp() 73 | await self._create_default() 74 | self._make_clean(self.mountpoint) 75 | 76 | def tearDown(self): 77 | """Remove data created for async mount tests.""" 78 | if not getenv("BORGAPI_TEST_KEEP_TEMP"): 79 | rmtree(self.mountpoint) 80 | super().tearDown() 81 | 82 | async def test_01_repository(self): 83 | """Mount and unmount a repository.""" 84 | output = await self.api.mount(self.repo, self.mountpoint) 85 | sleep(5) 86 | self.assertTrue(output["pid"] != 0) 87 | self.assertFileExists(self.repo_file) 88 | await self.api.umount(self.mountpoint) 89 | self.assertFileNotExists(self.repo_file) 90 | sleep(3) 91 | 92 | async def test_02_archive(self): 93 | """Mount and unmount a archive.""" 94 | output = await self.api.mount(self.archive, self.mountpoint) 95 | sleep(5) 96 | self.assertTrue(output["pid"] != 0) 97 | self.assertFileExists(self.archive_file) 98 | await self.api.umount(self.mountpoint) 99 | self.assertFileNotExists(self.archive_file) 100 | sleep(3) 101 | -------------------------------------------------------------------------------- /test/borgapi/test_12_key.py: -------------------------------------------------------------------------------- 1 | """Test key commands.""" 2 | 3 | from os.path import join 4 | from shutil import rmtree 5 | 6 | from . import BorgapiAsyncTests, BorgapiTests 7 | 8 | 9 | class KeyTests(BorgapiTests): 10 | """Key command tests.""" 11 | 12 | @classmethod 13 | def setUpClass(cls): 14 | """Prepare class data for key tests.""" 15 | super().setUpClass() 16 | cls.export_dir = join(cls.temp, "export") 17 | cls.key_file = join(cls.export_dir, "key.txt") 18 | 19 | def setUp(self): 20 | """Prepare data for key tests.""" 21 | super().setUp() 22 | self._make_clean(self.export_dir) 23 | self._create_default() 24 | 25 | def tearDown(self): 26 | """Remove data created for key tests.""" 27 | rmtree(self.export_dir) 28 | super().tearDown() 29 | 30 | def test_01_change_passphrase(self): 31 | """Change key passphrase.""" 32 | repo_config_file = join(self.repo, "config") 33 | repo_config = self._read_config(filename=repo_config_file) 34 | 35 | original_value = repo_config["repository"]["key"] 36 | 37 | self.api.set_environ(dictionary={"BORG_NEW_PASSPHRASE": "newpass"}) 38 | self.api.key_change_passphrase(self.repo) 39 | self.api.unset_environ("BORG_NEW_PASSPHRASE") 40 | 41 | repo_config = self._read_config(filename=repo_config_file) 42 | key_change_value = repo_config["repository"]["key"] 43 | 44 | self.assertNotEqual( 45 | key_change_value, 46 | original_value, 47 | "Changed key matches original", 48 | ) 49 | 50 | def test_02_export(self): 51 | """Export repo excryption key.""" 52 | self.api.key_export(self.repo, self.key_file) 53 | self.assertFileExists( 54 | self.key_file, 55 | "Repo key not exported to expected location", 56 | ) 57 | 58 | def test_03_export_paper(self): 59 | """Export repo excryption key.""" 60 | self.api.key_export(self.repo, self.key_file, paper=True) 61 | self.assertFileExists( 62 | self.key_file, 63 | "Repo key not exported to expected location", 64 | ) 65 | 66 | def test_04_import(self): 67 | """Import original key to repository.""" 68 | self.api.key_export(self.repo, self.key_file) 69 | 70 | repo_config_file = join(self.repo, "config") 71 | repo_config = self._read_config(filename=repo_config_file) 72 | original_value = repo_config["repository"]["key"] 73 | 74 | self.api.key_import(self.repo, self.key_file) 75 | 76 | repo_config = self._read_config(filename=repo_config_file) 77 | restored_value = repo_config["repository"]["key"] 78 | 79 | self.assertEqual( 80 | restored_value, 81 | original_value, 82 | "Restored key does not match original", 83 | ) 84 | 85 | 86 | class KeyAsyncTests(BorgapiAsyncTests): 87 | """Key command tests.""" 88 | 89 | @classmethod 90 | def setUpClass(cls): 91 | """Prepare class data for async key tests.""" 92 | super().setUpClass() 93 | cls.export_dir = join(cls.temp, "export") 94 | cls.key_file = join(cls.export_dir, "key.txt") 95 | 96 | async def asyncSetUp(self): 97 | """Prepare async data for async key tests.""" 98 | await super().asyncSetUp() 99 | self._make_clean(self.export_dir) 100 | await self._create_default() 101 | 102 | def tearDown(self): 103 | """Remove data created for async key tests.""" 104 | rmtree(self.export_dir) 105 | super().tearDown() 106 | 107 | async def test_01_change_passphrase(self): 108 | """Change key passphrase.""" 109 | repo_config_file = join(self.repo, "config") 110 | repo_config = self._read_config(filename=repo_config_file) 111 | 112 | original_value = repo_config["repository"]["key"] 113 | 114 | await self.api.set_environ(dictionary={"BORG_NEW_PASSPHRASE": "newpass"}) 115 | await self.api.key_change_passphrase(self.repo) 116 | await self.api.unset_environ("BORG_NEW_PASSPHRASE") 117 | 118 | repo_config = self._read_config(filename=repo_config_file) 119 | key_change_value = repo_config["repository"]["key"] 120 | 121 | self.assertNotEqual( 122 | key_change_value, 123 | original_value, 124 | "Changed key matches original", 125 | ) 126 | 127 | async def test_02_export(self): 128 | """Export repo excryption key.""" 129 | await self.api.key_export(self.repo, self.key_file) 130 | self.assertFileExists( 131 | self.key_file, 132 | "Repo key not exported to expected location", 133 | ) 134 | 135 | async def test_03_export_paper(self): 136 | """Export repo excryption key.""" 137 | await self.api.key_export(self.repo, self.key_file, paper=True) 138 | self.assertFileExists( 139 | self.key_file, 140 | "Repo key not exported to expected location", 141 | ) 142 | 143 | async def test_04_import(self): 144 | """Import original key to repository.""" 145 | await self.api.key_export(self.repo, self.key_file) 146 | 147 | repo_config_file = join(self.repo, "config") 148 | repo_config = self._read_config(filename=repo_config_file) 149 | original_value = repo_config["repository"]["key"] 150 | 151 | await self.api.key_import(self.repo, self.key_file) 152 | 153 | repo_config = self._read_config(filename=repo_config_file) 154 | restored_value = repo_config["repository"]["key"] 155 | 156 | self.assertEqual( 157 | restored_value, 158 | original_value, 159 | "Restored key does not match original", 160 | ) 161 | -------------------------------------------------------------------------------- /test/borgapi/test_13_export_tar.py: -------------------------------------------------------------------------------- 1 | """Test export tar command.""" 2 | 3 | from os.path import join 4 | from shutil import rmtree 5 | 6 | from . import BorgapiAsyncTests, BorgapiTests 7 | 8 | 9 | class ExportTarTests(BorgapiTests): 10 | """Export Tar command tests.""" 11 | 12 | def setUp(self): 13 | """Prepare data for expor tar tests.""" 14 | super().setUp() 15 | self._create_default() 16 | 17 | self.export_dir = join(self.temp, "export") 18 | self._make_clean(self.export_dir) 19 | self.tar_file = join(self.export_dir, "export.tar") 20 | 21 | def tearDown(self): 22 | """Remove data created for export tar tests.""" 23 | rmtree(self.export_dir) 24 | super().tearDown() 25 | 26 | def test_01_basic(self): 27 | """Export tar file.""" 28 | self.api.export_tar(self.archive, self.tar_file) 29 | self.assertFileExists(self.tar_file, "Tar file not exported") 30 | 31 | def test_02_stdout(self): 32 | """Export tar stdout.""" 33 | output = self.api.export_tar(self.archive, "-") 34 | self._display("export tar 2", output) 35 | self.assertType(output, bytes) 36 | 37 | def test_03_output_json(self): 38 | """Export tar output.""" 39 | output = self.api.export_tar(self.archive, self.tar_file, list=True) 40 | self._display("export tar 1", output) 41 | self.assertType(output, str) 42 | 43 | 44 | class ExportTarAsyncTests(BorgapiAsyncTests): 45 | """Export Tar command tests.""" 46 | 47 | async def asyncSetUp(self): 48 | """Prepare async data for async export tar tests.""" 49 | await super().asyncSetUp() 50 | await self._create_default() 51 | 52 | self.export_dir = join(self.temp, "export") 53 | self._make_clean(self.export_dir) 54 | self.tar_file = join(self.export_dir, "export.tar") 55 | 56 | def tearDown(self): 57 | """Remove async data created for async export tar tests.""" 58 | rmtree(self.export_dir) 59 | super().tearDown() 60 | 61 | async def test_01_basic(self): 62 | """Export tar file.""" 63 | await self.api.export_tar(self.archive, self.tar_file) 64 | self.assertFileExists(self.tar_file, "Tar file not exported") 65 | 66 | async def test_02_stdout(self): 67 | """Export tar stdout.""" 68 | output = await self.api.export_tar(self.archive, "-") 69 | self._display("export tar 2", output) 70 | self.assertType(output, bytes) 71 | 72 | async def test_03_output_json(self): 73 | """Export tar output.""" 74 | output = await self.api.export_tar(self.archive, self.tar_file, list=True) 75 | self._display("export tar 1", output) 76 | self.assertType(output, str) 77 | -------------------------------------------------------------------------------- /test/borgapi/test_14_config.py: -------------------------------------------------------------------------------- 1 | """Test config command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class ConfigTests(BorgapiTests): 7 | """Config command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for config tests.""" 11 | super().setUp() 12 | self._create_default() 13 | 14 | def test_01_list(self): 15 | """List config values for repo.""" 16 | output = self.api.config(self.repo, list=True) 17 | self._display("config list", output) 18 | self.assertType(output, str) 19 | repo_config = self._read_config(output) 20 | append_only = repo_config["repository"]["append_only"] 21 | self.assertEqual(append_only, "0", "Unexpected config value") 22 | 23 | def test_02_value(self): 24 | """List config value.""" 25 | output = self.api.config(self.repo, "additional_free_space") 26 | self._display("config value", output) 27 | self.assertType(output, str) 28 | self.assertEqual(output, "0", "Unexpected config value") 29 | 30 | def test_03_change(self): 31 | """Change config values in repo.""" 32 | self.api.config(self.repo, ("append_only", "1")) 33 | output = self.api.config(self.repo, list=True) 34 | repo_config = self._read_config(output) 35 | append_only = repo_config["repository"]["append_only"] 36 | self.assertEqual(append_only, "1", "Unexpected config value") 37 | 38 | def test_04_delete(self): 39 | """Delete config value from repo.""" 40 | self.api.config(self.repo, "additional_free_space", delete=True) 41 | output = self.api.config(self.repo, list=True) 42 | repo_config = self._read_config(output) 43 | additional_free_space = repo_config["repository"]["additional_free_space"] 44 | self.assertEqual(additional_free_space, "False", "Unexpected config value") 45 | 46 | 47 | class ConfigAsyncTests(BorgapiAsyncTests): 48 | """Config command tests.""" 49 | 50 | async def asyncSetUp(self): 51 | """Prepare async data for async config tests.""" 52 | await super().asyncSetUp() 53 | await self._create_default() 54 | 55 | async def test_01_list(self): 56 | """List config values for repo.""" 57 | output = await self.api.config(self.repo, list=True) 58 | self._display("config list", output) 59 | self.assertType(output, str) 60 | repo_config = self._read_config(output) 61 | append_only = repo_config["repository"]["append_only"] 62 | self.assertEqual(append_only, "0", "Unexpected config value") 63 | 64 | async def test_02_value(self): 65 | """List config value.""" 66 | output = await self.api.config(self.repo, "additional_free_space") 67 | self._display("config value", output) 68 | self.assertType(output, str) 69 | self.assertEqual(output, "0", "Unexpected config value") 70 | 71 | async def test_03_change(self): 72 | """Change config values in repo.""" 73 | await self.api.config(self.repo, ("append_only", "1")) 74 | output = await self.api.config(self.repo, list=True) 75 | repo_config = self._read_config(output) 76 | append_only = repo_config["repository"]["append_only"] 77 | self.assertEqual(append_only, "1", "Unexpected config value") 78 | 79 | async def test_04_delete(self): 80 | """Delete config value from repo.""" 81 | await self.api.config(self.repo, "additional_free_space", delete=True) 82 | output = await self.api.config(self.repo, list=True) 83 | repo_config = self._read_config(output) 84 | additional_free_space = repo_config["repository"]["additional_free_space"] 85 | self.assertEqual(additional_free_space, "False", "Unexpected config value") 86 | -------------------------------------------------------------------------------- /test/borgapi/test_15_benchmark_crud.py: -------------------------------------------------------------------------------- 1 | """Test benchmarck crud command.""" 2 | 3 | import unittest 4 | from os import getenv 5 | from os.path import join 6 | from shutil import rmtree 7 | 8 | from . import BorgapiAsyncTests, BorgapiTests 9 | 10 | 11 | class BenchmarkCrudTests(BorgapiTests): 12 | """Benchmark Crud command tests.""" 13 | 14 | def setUp(self): 15 | """Prepare data for benchmark crud tests.""" 16 | if getenv("BORGAPI_TEST_BENCHMARK_SKIP"): 17 | self.skipTest("Gotta go fast (only use for quick testing, not release)") 18 | super().setUp() 19 | 20 | @unittest.skip("WIP: Keeps failing in Dropbox folder, but not regular folder") 21 | def test_01_output(self): 22 | """Benchmark CRUD operations.""" 23 | benchmark_dir = join(self.temp, "benchmark") 24 | self._make_clean(benchmark_dir) 25 | output = self.api.benchmark_crud(self.repo, benchmark_dir) 26 | self._display("benchmark crud", output) 27 | self.assertType(output, str) 28 | rmtree(benchmark_dir) 29 | 30 | 31 | class BenchmarkCrudAsyncTests(BorgapiAsyncTests): 32 | """Benchmark Crud command tests.""" 33 | 34 | async def asyncSetUp(self): 35 | """Prepare async data for async benchmark crud tests.""" 36 | if getenv("BORGAPI_TEST_BENCHMARK_SKIP"): 37 | self.skipTest("Gotta go fast (only use for quick testing, not release)") 38 | await super().asyncSetUp() 39 | 40 | @unittest.skip("WIP: Keeps failing in Dropbox folder, but not regular folder") 41 | async def test_01_output(self): 42 | """Benchmark CRUD operations.""" 43 | benchmark_dir = join(self.temp, "benchmark") 44 | self._make_clean(benchmark_dir) 45 | output = await self.api.benchmark_crud(self.repo, benchmark_dir) 46 | self._display("benchmark crud", output) 47 | self.assertType(output, str) 48 | rmtree(benchmark_dir) 49 | -------------------------------------------------------------------------------- /test/borgapi/test_16_compact.py: -------------------------------------------------------------------------------- 1 | """Test deff command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class CompactTests(BorgapiTests): 7 | """Compact command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for compact tests.""" 11 | super().setUp() 12 | self._create_default() 13 | with open(self.file_3, "w") as fp: 14 | fp.write(self.file_3_text) 15 | self.api.create(f"{self.repo}::2", self.data) 16 | self.api.delete(self.archive) 17 | 18 | def test_01_output(self): 19 | """Compact string.""" 20 | output = self.api.compact(self.repo) 21 | self._display("compact sting", output) 22 | self.assertNone(output) 23 | 24 | def test_02_output_verbose(self): 25 | """Compact string.""" 26 | output = self.api.compact(self.repo, verbose=True) 27 | self._display("compact sting verbose", output) 28 | self.assertType(output, str) 29 | 30 | def test_03_output_json(self): 31 | """Compact json.""" 32 | output = self.api.compact(self.repo, verbose=True, log_json=True) 33 | self._display("compact log json", output) 34 | self.assertAnyType(output, list, dict) 35 | 36 | 37 | class CompactAsyncTests(BorgapiAsyncTests): 38 | """Compact command tests.""" 39 | 40 | async def asyncSetUp(self): 41 | """Prepare async data for async compact tests.""" 42 | await super().asyncSetUp() 43 | await self._create_default() 44 | with open(self.file_3, "w") as fp: 45 | fp.write(self.file_3_text) 46 | await self.api.create(f"{self.repo}::2", self.data) 47 | await self.api.delete(self.archive) 48 | 49 | async def test_01_output(self): 50 | """Compact string.""" 51 | output = await self.api.compact(self.repo) 52 | self._display("compact sting", output) 53 | self.assertNone(output) 54 | 55 | async def test_02_output_verbose(self): 56 | """Compact string.""" 57 | output = await self.api.compact(self.repo, verbose=True) 58 | self._display("compact sting verbose", output) 59 | self.assertType(output, str) 60 | 61 | async def test_03_output_json(self): 62 | """Compact json.""" 63 | output = await self.api.compact(self.repo, verbose=True, log_json=True) 64 | self._display("compact log json", output) 65 | self.assertAnyType(output, list, dict) 66 | -------------------------------------------------------------------------------- /test/borgapi/test_17_progress.py: -------------------------------------------------------------------------------- 1 | """Test deff command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class ProgressTests(BorgapiTests): 7 | """Compact command tests.""" 8 | 9 | def setUp(self): 10 | """Prepare data for progress tests.""" 11 | super().setUp() 12 | self._create_default() 13 | 14 | def test_01_output(self): 15 | """Compact with progress.""" 16 | output = self.api.create(f"{self.repo}::2", self.data, progress=True) 17 | self._display("compact with progress", output) 18 | self.assertNotNone(output) 19 | self.assertGreater(len(output), 0) 20 | 21 | 22 | class ProgressAsyncTests(BorgapiAsyncTests): 23 | """Compact command tests.""" 24 | 25 | async def asyncSetUp(self): 26 | """Prepare async data for async progress tests.""" 27 | await super().asyncSetUp() 28 | await self._create_default() 29 | 30 | async def test_01_output(self): 31 | """Compact with progress.""" 32 | output = await self.api.create(f"{self.repo}::2", self.data, progress=True) 33 | self._display("compact with progress", output) 34 | self.assertNotNone(output) 35 | self.assertGreater(len(output), 0) 36 | 37 | async def test_02_watching(self): 38 | """Capture progress before compacting is done.""" 39 | output = self.api.create(f"{self.repo}::2", self.data, progress=True, log_json=True) 40 | test = self.api.output.progress().get() 41 | while test is None: 42 | test = self.api.output.progress().get() 43 | await output 44 | self._display("stream", test) 45 | self.assertNotNone(test) 46 | self.assertGreater(len(test), 0) 47 | -------------------------------------------------------------------------------- /test/borgapi/test_18_recreate.py: -------------------------------------------------------------------------------- 1 | """Test create command.""" 2 | 3 | from . import BorgapiAsyncTests, BorgapiTests 4 | 5 | 6 | class RecreateTests(BorgapiTests): 7 | """Recreate command tests.""" 8 | 9 | def test_01_basic(self): 10 | """Recreate archive.""" 11 | self.api.create(self.archive, self.data, compression="lz4") 12 | self.api.recreate(self.archive, recompress="always", compression="zlib,9", target="2") 13 | output = self.api.list(self.repo, json=True) 14 | num_archives = len(output["archives"]) 15 | self.assertEqual(num_archives, 2, "Archive not recreated") 16 | 17 | def test_02_comment(self): 18 | """Change archive comment.""" 19 | self.api.create(self.archive, self.data, comment="first") 20 | self.api.recreate(self.archive, comment="second") 21 | output = self.api.info(self.archive, json=True) 22 | comment = output["archives"][0]["comment"] 23 | self.assertEqual(comment, "second", "Archive comment not updated") 24 | 25 | def test_03_remove_file(self): 26 | """Change archive comment.""" 27 | self.api.create(self.archive, self.data) 28 | with open(self.file_3, "w+") as fp: 29 | fp.write("New Data") 30 | archive_2 = f"{self.repo}::2" 31 | self.api.create(archive_2, self.data) 32 | self.api.recreate(self.repo, exclude=self.file_2) 33 | first = self.api.list(self.archive, json_lines=True) 34 | infirst = [v for v in first if v["path"] == self.file_2] 35 | self.assertEqual(len(first), 2, "Incorrect number of files in first archive.") 36 | self.assertEqual(len(infirst), 0, "Path removed from first archive.") 37 | second = self.api.list(archive_2, json_lines=True) 38 | insecond = [v for v in first if v["path"] == self.file_2] 39 | self.assertEqual(len(second), 3, "Incorrect number of files in second archive.") 40 | self.assertEqual(len(insecond), 0, "Path removed from second archive.") 41 | 42 | 43 | class RecreateAsyncTests(BorgapiAsyncTests): 44 | """Create command tests.""" 45 | 46 | async def test_01_basic(self): 47 | """Create new archive.""" 48 | await self.api.create(self.archive, self.data, compression="lz4") 49 | await self.api.recreate(self.archive, recompress="always", compression="zlib,9", target="2") 50 | output = await self.api.list(self.repo, json=True) 51 | num_archives = len(output["archives"]) 52 | self.assertEqual(num_archives, 2, "Archive not recreated") 53 | 54 | async def test_02_comment(self): 55 | """Change archive comment.""" 56 | await self.api.create(self.archive, self.data, comment="first") 57 | await self.api.recreate(self.archive, comment="second") 58 | output = await self.api.info(self.archive, json=True) 59 | comment = output["archives"][0]["comment"] 60 | self.assertEqual(comment, "second", "Archive comment not updated") 61 | 62 | async def test_03_remove_file(self): 63 | """Change archive comment.""" 64 | await self.api.create(self.archive, self.data) 65 | with open(self.file_3, "w+") as fp: 66 | fp.write("New Data") 67 | archive_2 = f"{self.repo}::2" 68 | await self.api.create(archive_2, self.data) 69 | await self.api.recreate(self.repo, exclude=self.file_2) 70 | first = await self.api.list(self.archive, json_lines=True) 71 | infirst = [v for v in first if v["path"] == self.file_2] 72 | self.assertEqual(len(first), 2, "Incorrect number of files in first archive.") 73 | self.assertEqual(len(infirst), 0, "Path removed from first archive.") 74 | second = await self.api.list(archive_2, json_lines=True) 75 | insecond = [v for v in first if v["path"] == self.file_2] 76 | self.assertEqual(len(second), 3, "Incorrect number of files in second archive.") 77 | self.assertEqual(len(insecond), 0, "Path removed from second archive.") 78 | -------------------------------------------------------------------------------- /test/borgapi/test_19_import_tar.py: -------------------------------------------------------------------------------- 1 | """Test export tar command.""" 2 | 3 | import sys 4 | from io import BytesIO, TextIOWrapper 5 | from os import getenv 6 | from os.path import join 7 | from shutil import rmtree 8 | 9 | from borg.archive import Archive 10 | 11 | from . import BorgapiAsyncTests, BorgapiTests 12 | 13 | 14 | class ImportTarTests(BorgapiTests): 15 | """Import Tar command tests.""" 16 | 17 | def setUp(self): 18 | """Prepare data for expor tar tests.""" 19 | super().setUp() 20 | self._create_default() 21 | 22 | self.export_dir = join(self.temp, "export") 23 | self._make_clean(self.export_dir) 24 | self.tar_file = join(self.export_dir, "export.tar") 25 | 26 | def tearDown(self): 27 | """Remove data created for export tar tests.""" 28 | if not getenv("BORGAPI_TEST_KEEP_TEMP"): 29 | rmtree(self.export_dir) 30 | super().tearDown() 31 | 32 | def test_01_basic(self): 33 | """Import tar file.""" 34 | archive_2 = f"{self.repo}::2" 35 | self.api.export_tar(self.archive, self.tar_file) 36 | self.assertRaises(Archive.DoesNotExist, self.api.info, archive_2) 37 | self.api.import_tar(archive_2, self.tar_file) 38 | output = self.api.info(archive_2, json=True) 39 | name = output["archives"][0]["name"] 40 | self.assertEqual(name, "2", "Archive not imported.") 41 | 42 | def test_02_stdin(self): 43 | """Import tar file from stdin.""" 44 | archive_2 = f"{self.repo}::2" 45 | self.api.export_tar(self.archive, self.tar_file) 46 | self.assertRaises(Archive.DoesNotExist, self.api.info, archive_2) 47 | with open(self.tar_file, "rb") as fp: 48 | tar_data = fp.read() 49 | temp_stdin = TextIOWrapper(BytesIO(tar_data)) 50 | sys.stdin = temp_stdin 51 | try: 52 | self.api.import_tar(archive_2, "-") 53 | finally: 54 | temp_stdin.close() 55 | sys.stdin = sys.__stdin__ 56 | output = self.api.info(archive_2, json=True) 57 | name = output["archives"][0]["name"] 58 | self.assertEqual(name, "2", "Archive not imported.") 59 | 60 | 61 | class ImportTarAsyncTests(BorgapiAsyncTests): 62 | """Import Tar command tests.""" 63 | 64 | async def asyncSetUp(self): 65 | """Prepare async data for async export tar tests.""" 66 | await super().asyncSetUp() 67 | await self._create_default() 68 | 69 | self.export_dir = join(self.temp, "export") 70 | self._make_clean(self.export_dir) 71 | self.tar_file = join(self.export_dir, "export.tar") 72 | 73 | def tearDown(self): 74 | """Remove async data created for async export tar tests.""" 75 | if not getenv("BORGAPI_TEST_KEEP_TEMP"): 76 | rmtree(self.export_dir) 77 | super().tearDown() 78 | 79 | async def test_01_basic(self): 80 | """Import async tar file.""" 81 | archive_2 = f"{self.repo}::2" 82 | await self.api.export_tar(self.archive, self.tar_file) 83 | with self.assertRaises(Archive.DoesNotExist): 84 | await self.api.info(archive_2) 85 | await self.api.import_tar(archive_2, self.tar_file) 86 | output = await self.api.info(archive_2, json=True) 87 | name = output["archives"][0]["name"] 88 | self.assertEqual(name, "2", "Archive not imported.") 89 | 90 | async def test_02_stdin(self): 91 | """Import async tar file from stdin.""" 92 | archive_2 = f"{self.repo}::2" 93 | await self.api.export_tar(self.archive, self.tar_file) 94 | with self.assertRaises(Archive.DoesNotExist): 95 | await self.api.info(archive_2) 96 | with open(self.tar_file, "rb") as fp: 97 | tar_data = fp.read() 98 | temp_stdin = TextIOWrapper(BytesIO(tar_data)) 99 | sys.stdin = temp_stdin 100 | try: 101 | await self.api.import_tar(archive_2, "-") 102 | finally: 103 | temp_stdin.close() 104 | sys.stdin = sys.__stdin__ 105 | output = await self.api.info(archive_2, json=True) 106 | name = output["archives"][0]["name"] 107 | self.assertEqual(name, "2", "Archive not imported.") 108 | -------------------------------------------------------------------------------- /test/res/test_env: -------------------------------------------------------------------------------- 1 | BORG_PASSCOMMAND="cat test/res/test_passphrase" 2 | BORG_LOGGING_CONF="test/res/test_logging.conf" 3 | BORG_CHECK_I_KNOW_WHAT_I_AM_DOING=YES 4 | BORG_DELETE_I_KNOW_WHAT_I_AM_DOING=YES -------------------------------------------------------------------------------- /test/res/test_logging.conf: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,borg 3 | 4 | [logger_root] 5 | level=NOTSET 6 | handlers=root 7 | 8 | [logger_borg] 9 | level=NOTSET 10 | handlers=borg 11 | propogate=1 12 | qualname=borg 13 | 14 | [handlers] 15 | keys=root,borg 16 | 17 | [handler_root] 18 | class=FileHandler 19 | level=NOTSET 20 | formatter=output 21 | args=('test/temp/logs/output.log',) 22 | 23 | [handler_borg] 24 | class=FileHandler 25 | level=NOTSET 26 | formatter=output 27 | args=('test/temp/logs/borg.log',) 28 | 29 | [formatters] 30 | keys=output 31 | 32 | [formatter_output] 33 | format=%(name)s [%(levelname)s] %(message)s 34 | datefmt=%Y-%m-%d %H:%M:%S 35 | class=logging.Formatter 36 | -------------------------------------------------------------------------------- /test/res/test_passphrase: -------------------------------------------------------------------------------- 1 | secret -------------------------------------------------------------------------------- /test/test_00_options.py: -------------------------------------------------------------------------------- 1 | """Test the Options module.""" 2 | 3 | import unittest 4 | 5 | from borgapi import CommonOptions, ExclusionOptions 6 | from borgapi.options import OptionsBase 7 | 8 | 9 | class OptionsTests(unittest.TestCase): 10 | """Test the Options Dataclasses.""" 11 | 12 | def test_convert(self): 13 | """Convert the name adds the dashes to the front and replaces underscores with dashes.""" 14 | name = "test_name" 15 | converted = OptionsBase.convert_name(name) 16 | self.assertEqual( 17 | converted, 18 | "--test-name", 19 | "Name conversion does not produce expected ouput", 20 | ) 21 | 22 | def test_defaults(self): 23 | """Defaults returns all the dataclass fields.""" 24 | common = CommonOptions._defaults() 25 | exclusion = ExclusionOptions._defaults() 26 | self.assertEqual( 27 | len(common), 28 | len(CommonOptions.__dataclass_fields__.keys()), 29 | "Number of Common Options does not match expected number", 30 | ) 31 | self.assertEqual( 32 | len(exclusion), 33 | len(ExclusionOptions.__dataclass_fields__.keys()), 34 | "Number of Exclusion Options does not match expected number", 35 | ) 36 | 37 | def test_parse(self): 38 | """Parsing produces formatted args list from class instance.""" 39 | expected_args = ["--warning", "--progress", "--log-json"] 40 | common_args = CommonOptions(warning=True, progress=True, log_json=True).parse() 41 | self.assertListEqual( 42 | common_args, 43 | expected_args, 44 | "Parsing boolean flags does not produce expected list output", 45 | ) 46 | 47 | expected_args = [ 48 | "--exclude", 49 | "foo", 50 | "--exclude", 51 | "bar", 52 | "--pattern", 53 | "baz", 54 | "--patterns-from", 55 | "spam/milk", 56 | ] 57 | exclusion_args = ExclusionOptions( 58 | exclude=["foo", "bar"], 59 | pattern="baz", 60 | patterns_from="spam/milk", 61 | ).parse() 62 | self.assertListEqual( 63 | exclusion_args, 64 | expected_args, 65 | "Parsing string flags does not produce expected output", 66 | ) 67 | 68 | 69 | if __name__ == "__main__": 70 | unittest.main() 71 | --------------------------------------------------------------------------------