├── .github └── workflows │ └── tests.yml ├── .gitignore ├── GETTING-STARTED.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── TESTING.md ├── actions_builtin.md ├── debian ├── changelog ├── compat ├── control ├── copyright ├── rules └── source │ ├── format │ └── options ├── make-test-chroot ├── run_coverage ├── run_type_checker ├── setup.py ├── tests ├── test_action.py ├── test_ansible_parameters.py ├── test_apt.py ├── test_blockinfile.py ├── test_command.py ├── test_conditionals.py ├── test_copy.py ├── test_facts.py ├── test_file.py ├── test_file_mixin.py ├── test_fileasset.py ├── test_modechange.py ├── test_pipeline.py ├── test_privs.py ├── test_role.py ├── test_runner_script.py ├── test_systemd.py ├── test_user.py └── test_zipapp.py ├── transilience ├── __init__.py ├── actions │ ├── __init__.py │ ├── action.py │ ├── apt.py │ ├── blockinfile.py │ ├── command.py │ ├── common.py │ ├── copy.py │ ├── facts │ │ ├── __init__.py │ │ ├── facts.py │ │ └── platform.py │ ├── file.py │ ├── git.py │ ├── misc.py │ ├── namespace.py │ ├── systemd.py │ └── user │ │ ├── TODO.py │ │ ├── __init__.py │ │ ├── action.py │ │ ├── backend.py │ │ ├── freebsd.py │ │ └── linux.py ├── ansible │ ├── __init__.py │ ├── conditionals.py │ ├── exceptions.py │ ├── parameters.py │ ├── role.py │ └── tasks.py ├── chroot.py ├── cmd │ ├── __init__.py │ └── doc.py ├── device.py ├── fileasset.py ├── hosts.py ├── playbook.py ├── role.py ├── runner.py ├── system │ ├── __init__.py │ ├── local.py │ ├── mitogen.py │ ├── pipeline.py │ └── system.py ├── template.py ├── unittest.py └── utils │ ├── __init__.py │ └── modechange.py └── update_docs /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | tests: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - name: Setup Python 11 | uses: actions/setup-python@v2 12 | - name: Install dependencies and project 13 | run: | 14 | sudo apt-get update 15 | sudo apt-get -y install systemd-container btrfs-progs eatmydata debootstrap python3-pip python3-nose2 16 | sudo python3 -m pip install . 17 | - name: Setup btrfs for tests 18 | run: | 19 | fallocate -l 1.5G testfile 20 | /usr/sbin/mkfs.btrfs testfile 21 | mkdir test_chroots 22 | sudo mount -o loop testfile test_chroots/ 23 | - name: Create master chroot 24 | run: sudo ./make-test-chroot buster 25 | - name: Run tests 26 | run: sudo nose2-3 --verbose 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # vim swapfiles 2 | *.swp 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | 8 | # Distribution / packaging 9 | .Python 10 | build/ 11 | develop-eggs/ 12 | dist/ 13 | sdist/ 14 | *.egg-info/ 15 | *.egg 16 | MANIFEST 17 | 18 | # Unit test / coverage reports 19 | htmlcov/ 20 | .coverage 21 | .coverage.* 22 | 23 | # mypy 24 | .mypy_cache/ 25 | 26 | /test_chroots/ 27 | -------------------------------------------------------------------------------- /GETTING-STARTED.md: -------------------------------------------------------------------------------- 1 | # Getting started with Transilience 2 | 3 | This is all still a prototype, and is subject to change as experiments 4 | continue. However, to play with it, here's how to setup a simple playbook. 5 | 6 | Playbooks are Python scripts. Here's the basic boilerplate: 7 | 8 | ```py 9 | #!/usr/bin/python3 10 | 11 | from dataclasses import dataclass 12 | import sys 13 | from transilience import Playbook, Host 14 | 15 | @dataclass 16 | class Server(Host): 17 | # Host vars go here 18 | ... 19 | 20 | 21 | class Play(Playbook): 22 | """ 23 | Name of this playbook 24 | """ 25 | 26 | def hosts(self): 27 | # See https://mitogen.networkgenomics.com/api.html#connection-methods 28 | # "ssh" is the name of the Router method: in this case `Router.ssh()` 29 | # All arguments after "ssh" are forwarded to `Router.ssh()` 30 | yield Server(name="server", args={ 31 | "type": "Mitogen", 32 | "method": "ssh", 33 | "hostname": "server.example.org", 34 | "username": "root", 35 | }) 36 | # Alternatively, you can execute on the local system, without Mitogen 37 | # yield Server(name="local", type="Local") 38 | 39 | def start(self, host: Host): 40 | 41 | # Add roles and start sending actions to be executed. All arguments after 42 | # the role name are forwarded to the Role constructor 43 | self.add_role("mail_aliases", aliases={ 44 | "transilience": "enrico", 45 | }) 46 | 47 | 48 | if __name__ == "__main__": 49 | sys.exit(Play().main()) 50 | ``` 51 | 52 | The `Playbook` class adds a basic command line interface: 53 | 54 | ``` 55 | $ ./provision --help 56 | usage: provision [-h] [-v] [--debug] 57 | 58 | Name of this playbook 59 | 60 | optional arguments: 61 | -h, --help show this help message and exit 62 | -v, --verbose verbose output 63 | --debug verbose output 64 | -C, --check do not perform changes, but check if changes would be needed 65 | ``` 66 | 67 | Roles are loaded as normal Python `roles.` modules, which are expected to 68 | contain a class called `Role`: 69 | 70 | ``` 71 | $ mkdir roles 72 | $ edit roles/mail_aliases.py 73 | ``` 74 | 75 | ```py 76 | from __future__ import annotations 77 | from typing import Dict 78 | from dataclasses import dataclass, field 79 | from transilience import role 80 | from transilience.actions import builtin 81 | 82 | 83 | @dataclass 84 | class Role(role.Role): 85 | # Role-level variables 86 | aliases: Dict[str, str] = field(default_factory=dict) 87 | 88 | def start(self): 89 | # Role-level variables are automatically exported to templates 90 | aliases = self.render_string("""{% for name, dest in aliases.items() %} 91 | {{name}}: {{dest}} 92 | {% endfor %}""")[ 93 | 94 | self.task(builtin.blockinfile( 95 | path="/etc/aliases", 96 | block=aliases, 97 | ), name="configure /etc/aliases", 98 | notify=RereadAliases, 99 | ) 100 | 101 | 102 | @dataclass 103 | class RereadAliases(role.Role): 104 | def start(self): 105 | self.task(builtin.command(argv=["newaliases"])) 106 | ``` 107 | 108 | Finally, run the playbook: 109 | 110 | ``` 111 | $ ./provision 112 | 2021-06-14 18:23:16 server: [changed 0.003s] mail_aliases configure /etc/aliases 113 | 2021-06-14 18:23:17 server: [changed 0.203s] RereadAliases Run newaliases 114 | 2021-06-18 15:57:53 server: 2 total actions: 0 unchanged, 2 changed, 0 skipped, 0 failed, 0 not executed. 115 | ``` 116 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | include LICENSE 3 | recursive-include tests *.py 4 | 5 | exclude make-test-chroot 6 | exclude run_coverage 7 | exclude run_type_checker 8 | exclude update_docs 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Transilience 2 | 3 | Python provisioning library. 4 | 5 | Ansible-like modules. Declarative actions. Generate actions with Python. No 6 | templatized YAML. Mitogen-based connections. 7 | 8 | Early stage proof of concept prototype. 9 | 10 | If you want to try playing with Transilience, see [GETTING-STARTED.md](GETTING-STARTED.md) for a 11 | guide on setting up a simple playbook. 12 | 13 | ## Usage 14 | 15 | ```py 16 | import mitogen 17 | from transilience.system import Mitogen 18 | from transilience import actions 19 | 20 | # Mitogen setup 21 | broker = mitogen.master.Broker() 22 | router = mitogen.master.Router(cls.broker) 23 | 24 | # Access the system 'workdir' as a 'local' connection. 25 | # You can use any connection method from 26 | # https://mitogen.networkgenomics.com/api.html#connection-methods 27 | # and arguments to the Mitogen constructor will be forwarded to it 28 | system = Mitogen("workdir", "local", router=cls.router) 29 | 30 | # Run a playbook 31 | system.system.run_actions([ 32 | actions.File( 33 | name="Create test dir", 34 | path="/tmp/test", 35 | state="directory", 36 | ), 37 | actions.File( 38 | name="Create test file", 39 | path="/tmp/test/testfile", 40 | state="touch", 41 | ), 42 | ]) 43 | ``` 44 | 45 | ## Requirements 46 | 47 | On the local system: 48 | 49 | * Required: Python version 3.7 or later. 50 | * Optional: [Mitogen](https://mitogen.networkgenomics.com/) to connect to 51 | remote systems; [Jinja2](https://jinja.palletsprojects.com/en/3.0.x/) for 52 | ansible-like templates. 53 | 54 | On remote systems: 55 | 56 | * Python 3 (tested on 3.7) 57 | 58 | ## Existing actions 59 | 60 | See [actions_builtin.md](actions_builtin.md). 61 | 62 | 63 | ## Design 64 | 65 | The basic ideas of Transilience: 66 | 67 | * Provisioning building blocks that you can reuse freely and follow a 68 | well-known API 69 | * A way to run them anywhere Mythogen can reach 70 | * Logic coded in straightforward Python instead of templated YAML 71 | 72 | In other words: 73 | 74 | * `transilience.actions` is a collection of idempotent, reusable provisioning 75 | macros in the style of Ansible tasks. They can be used without transilience. 76 | * `transilience.system` contains executors that can run actions anywhere 77 | [Mitogen](https://mitogen.networkgenomics.com/api.html#connection-methods) 78 | can reach 79 | * For provisioning, one can write a simple Python script that feeds Actions to 80 | local or remote systems. If an action depends on the results of previous 81 | actions, the logic can be coded in simple Python. 82 | 83 | 84 | ## Adding actions 85 | 86 | Actions are subclasses of `transilience.action.Action`, which is a 87 | [dataclass](https://docs.python.org/3/library/dataclasses.html) with an extra 88 | `run()` method. 89 | 90 | The `__post_init__` constructor can do preprocessing client-side. 91 | 92 | `run()` is the main function executed on the remote side. 93 | 94 | dataclass attributes are transmitted as they are on the remote side, filled 95 | further as the action is performed, and then sent back. See [Mitogen RPC serialization rules](https://mitogen.networkgenomics.com/getting_started.html#rpc-serialization-rules) 96 | for what types can be used. 97 | 98 | 99 | ## Why the name 100 | 101 | > **Transilience**: n. *A leap across or from one thing to another* 102 | > [1913 Webster] 103 | 104 | Set in the Hainish Cycle world from Ursula Le Guin novels, Transilience appears 105 | in the novels "A Fisherman of the Inland Sea", and "The Shobies' Story". 106 | 107 | 108 | ## Copyright 109 | 110 | Transilience is licensed under the GNU General Public License v3.0 or later. 111 | 112 | See [LICENSE](LICENSE) for the full text. 113 | 114 | Backends for the `user` action are adapted from 115 | [Ansible](https://github.com/ansible/ansible)'s sources. 116 | -------------------------------------------------------------------------------- /TESTING.md: -------------------------------------------------------------------------------- 1 | # Running unit tests 2 | 3 | Some of Transilience actions, like apt or systemd, need a containerized system 4 | to be tested. 5 | 6 | To run the tests, I built a simple and very fast system of ephemeral containers based on 7 | [systemd-nspawn](https://www.enricozini.org/blog/2021/debian/exploring-nspawn-for-cis/) 8 | and [btrfs snapshots](https://www.enricozini.org/blog/2021/debian/nspawn-runner-btrfs/), 9 | based on my work on [nspawn-runner](https://github.com/Truelite/nspawn-runner). 10 | 11 | ## Prerequisites 12 | 13 | ``` 14 | apt install systemd-container btrfs-progs eatmydata debootstrap 15 | ``` 16 | 17 | The `test_chroots/` directory needs to be on a `btrfs` filesystem. If you are 18 | using another filesystem, you can create one of about 1.5Gb, and mount it on 19 | `test_chroots`. 20 | 21 | You can even create one on a file: 22 | 23 | ``` 24 | $ fallocate -l 1.5G testfile 25 | $ /usr/sbin/mkfs.btrfs testfile 26 | $ sudo mount -o loop testfile test_chroots/ 27 | ``` 28 | 29 | Once you have `test_chroots/` on btrfs, you can use `make-test-chroot` to 30 | create the master chroot for the container: 31 | 32 | ``` 33 | sudo ./make-test-chroot buster 34 | ``` 35 | 36 | Note: this uses `eatmydata` to speed up debootstrap: you'll need the packages 37 | `btrfs-progs` and `eatmydata` installed, or you can remove the 'eatmydata' call 38 | from `make-test-chroot`. 39 | 40 | ## Running tests 41 | 42 | To start and stop the nspawn containers, the unit tests need to be run as root 43 | with `sudo`. The test suite drops root as soon as possible (see 44 | `unittest.ProcessPrivs`) and changes to `$SUDO_UID` and `$SUDO_GID`. 45 | 46 | They will temporarily regain root for as short as possible to start the 47 | container, stop it, and open a Mitogen connection to it. Look for `privs.root` 48 | in the code to see where this happens. 49 | 50 | To run the test, once `test_chroots` is set up, use `sudo` 51 | [`nose2`](https://docs.nose2.io/): 52 | 53 | ``` 54 | sudo nose2-3 55 | ``` 56 | -------------------------------------------------------------------------------- /actions_builtin.md: -------------------------------------------------------------------------------- 1 | # transilience.actions.builtin 2 | 3 | Documentation of the actions provided in module `transilience.actions.builtin`. 4 | 5 | 6 | ## apt 7 | 8 | Same as Ansible's 9 | [builtin.apt](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/apt_module.html). 10 | 11 | `force_apt_get` is ignored: `apt-get` is always used. 12 | 13 | Not yet implemented: 14 | 15 | * force 16 | * update_cache_retries 17 | * update_cache_retry_max_delay 18 | 19 | Parameters: 20 | 21 | * allow_unauthenticated [`bool`] = `False` 22 | * autoclean [`bool`] = `False` 23 | * autoremove [`bool`] = `False` 24 | * cache_valid_time [`int`] = `0` 25 | * deb [`List[str]`] 26 | * default_release [`Optional[str]`] = `None` 27 | * dpkg_options [`List[str]`] 28 | * fail_on_autoremove [`bool`] = `False` 29 | * force_apt_get [`bool`] = `False` 30 | * install_recommends [`Optional[bool]`] = `None` 31 | * name [`List[str]`] 32 | * only_upgrade [`bool`] = `False` 33 | * policy_rc_d [`Optional[int]`] = `None` 34 | * purge [`bool`] = `False` 35 | * state [`str`] = `'present'` 36 | * update_cache [`bool`] = `False` 37 | * upgrade [`str`] = `'no'` 38 | 39 | ## blockinfile 40 | 41 | Same as Ansible's 42 | [builtin.blockinfile](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/blockinfile_module.html). 43 | 44 | Not yet implemented: 45 | 46 | * backup 47 | * unsafe_writes 48 | * validate 49 | 50 | Parameters: 51 | 52 | * block [`Union[str, bytes]`] = `''` 53 | * create [`bool`] = `False` 54 | * group [`Union[str, int, None]`] = `None`: set group, as gid or group name 55 | * insertafter [`Optional[str]`] = `None` 56 | * insertbefore [`Optional[str]`] = `None` 57 | * marker [`str`] = `'# {mark} ANSIBLE MANAGED BLOCK'` 58 | * marker_begin [`str`] = `'BEGIN'` 59 | * marker_end [`str`] = `'END'` 60 | * mode [`Union[str, int, None]`] = `None`: set mode, as octal or any expression `chmod` can use 61 | * owner [`Union[str, int, None]`] = `None`: set owner, as uid or user name 62 | * path [`str`] = `''` 63 | * state [`Optional[str]`] = `None` 64 | 65 | ## command 66 | 67 | Same as Ansible's 68 | [builtin.command](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/command_module.html). 69 | 70 | Not yet implemented: 71 | 72 | * strip_empty_ends 73 | 74 | Parameters: 75 | 76 | * argv [`List[str]`] 77 | * chdir [`Optional[str]`] = `None` 78 | * cmd [`Optional[str]`] = `None` 79 | * creates [`Optional[str]`] = `None` 80 | * removes [`Optional[str]`] = `None` 81 | * stderr [`Optional[bytes]`] = `None` 82 | * stdin [`Union[str, bytes, None]`] = `None` 83 | * stdin_add_newline [`bool`] = `True` 84 | * stdout [`Optional[bytes]`] = `None` 85 | 86 | ## copy 87 | 88 | Same as Ansible's 89 | [builtin.copy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html). 90 | 91 | Not yet implemented: 92 | 93 | * backup 94 | * decrypt 95 | * directory_mode 96 | * force 97 | * local_follow 98 | * remote_src 99 | * unsafe_writes 100 | * validate 101 | * src as directory 102 | 103 | Parameters: 104 | 105 | * checksum [`Optional[str]`] = `None` 106 | * content [`Union[str, bytes, None]`] = `None` 107 | * dest [`str`] = `''` 108 | * follow [`bool`] = `True` 109 | * group [`Union[str, int, None]`] = `None`: set group, as gid or group name 110 | * mode [`Union[str, int, None]`] = `None`: set mode, as octal or any expression `chmod` can use 111 | * owner [`Union[str, int, None]`] = `None`: set owner, as uid or user name 112 | * src [`Optional[str]`] = `None` 113 | 114 | ## fail 115 | 116 | Fail with a custom message 117 | 118 | Same as Ansible's 119 | [builtin.fail](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/fail_module.html). 120 | 121 | Parameters: 122 | 123 | * msg [`str`] = `'Failed as requested from task'` 124 | 125 | ## file 126 | 127 | Same as Ansible's 128 | [builtin.file](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html). 129 | 130 | Not yet implemented: 131 | 132 | * access_time 133 | * modification_time 134 | * modification_time_format 135 | * unsafe_writes 136 | 137 | Parameters: 138 | 139 | * follow [`bool`] = `True`: set attributes of symlink destinations instead of the symlinks themselves 140 | * force [`bool`] = `False` 141 | * group [`Union[str, int, None]`] = `None`: set group, as gid or group name 142 | * mode [`Union[str, int, None]`] = `None`: set mode, as octal or any expression `chmod` can use 143 | * owner [`Union[str, int, None]`] = `None`: set owner, as uid or user name 144 | * path [`Optional[str]`] = `None`: Path to the file or directory being managed 145 | * recurse [`bool`] = `False`: Recursively apply attributes (only used with state=directory) 146 | * src [`Optional[str]`] = `None`: target of the link or hard link 147 | * state [`str`] = `'file'`: Valid: file, directory, link, hard, touch, absent 148 | 149 | ## noop 150 | 151 | Do nothing, successfully. 152 | 153 | Parameters: 154 | 155 | * changed [`bool`] = `False`: Set to True to pretend the action performed changes 156 | 157 | ## systemd 158 | 159 | Same as Ansible's 160 | [builtin.systemd](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/systemd_module.html) 161 | 162 | Parameters: 163 | 164 | * daemon_reexec [`bool`] = `False` 165 | * daemon_reload [`bool`] = `False` 166 | * enabled [`Optional[bool]`] = `None` 167 | * force [`bool`] = `False` 168 | * masked [`Optional[bool]`] = `None` 169 | * no_block [`bool`] = `False` 170 | * scope [`str`] = `'system'` 171 | * state [`Optional[str]`] = `None` 172 | * unit [`Optional[str]`] = `None` 173 | 174 | ## user 175 | 176 | Same as Ansible's 177 | [builtin.user](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/user_module.html) 178 | 179 | Parameters: 180 | 181 | * append [`bool`] = `False` 182 | * authorization [`Optional[str]`] = `None` 183 | * comment [`Optional[str]`] = `None` 184 | * create_home [`bool`] = `True` 185 | * expires [`Optional[float]`] = `None` 186 | * force [`bool`] = `False` 187 | * generate_ssh_key [`bool`] = `False` 188 | * group [`Optional[str]`] = `None` 189 | * groups [`List[str]`] 190 | * hidden [`Optional[bool]`] = `None` 191 | * home [`Optional[str]`] = `None` 192 | * local [`bool`] = `False` 193 | * login_class [`Optional[str]`] = `None` 194 | * move_home [`bool`] = `False` 195 | * name [`Optional[str]`] = `None` 196 | * non_unique [`bool`] = `False` 197 | * password [`Optional[str]`] = `None` 198 | * password_expire_max [`Optional[int]`] = `None` 199 | * password_expire_min [`Optional[int]`] = `None` 200 | * password_lock [`Optional[bool]`] = `None` 201 | * profile [`Optional[str]`] = `None` 202 | * remove [`bool`] = `False` 203 | * role [`Optional[str]`] = `None` 204 | * seuser [`Optional[str]`] = `None` 205 | * shell [`Optional[str]`] = `None` 206 | * skeleton [`Optional[str]`] = `None` 207 | * ssh_key_bits [`Optional[int]`] = `None` 208 | * ssh_key_comment [`Optional[str]`] = `None` 209 | * ssh_key_file [`Optional[str]`] = `None` 210 | * ssh_key_fingerprint [`Optional[str]`] = `None` 211 | * ssh_key_passphrase [`Optional[str]`] = `None` 212 | * ssh_key_pubkey [`Optional[str]`] = `None` 213 | * ssh_key_type [`str`] = `'rsa'` 214 | * state [`str`] = `'present'` 215 | * system [`bool`] = `False` 216 | * uid [`Optional[int]`] = `None` 217 | * update_password [`str`] = `'always'` 218 | 219 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | transilience (0.1.0-1) UNRELEASED; urgency=low 2 | 3 | * Initial packaging 4 | 5 | -- Enrico Zini Fri, 25 Feb 2022 13:54:18 +0100 6 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 13 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: transilience 2 | Section: python 3 | Priority: optional 4 | Maintainer: Enrico Zini 5 | Build-Depends: debhelper (>= 13), dh-python, 6 | python3-all, python3-yaml 7 | Standards-Version: 4.2.0 8 | 9 | Package: python3-transilience 10 | Architecture: all 11 | Depends: ${misc:Depends}, ${python3:Depends}, 12 | python3-yaml, python3-jinja2, python3-mitogen 13 | Recommends: ${python3:Recommends} 14 | Suggests: ${python3:Suggests} 15 | Description: Python provisioning library with ansible-like semantics 16 | Python provisioning library. 17 | . 18 | Ansible-like modules. Declarative actions. Generate actions with Python. No 19 | templatized YAML. Mitogen-based connections. 20 | . 21 | Early stage proof of concept prototype. 22 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: transilience 3 | Upstream-Contact: Enrico Zini 4 | Source: https://github.com/spanezz/transilience 5 | 6 | Files: * 7 | Copyright: 2020-2022 Enrico Zini 8 | License: GPL-3+ 9 | 10 | Files: debian/* 11 | Copyright: 2022, Enrico Zini 12 | License: GPL-3+ 13 | 14 | License: GPL-3+ 15 | The full text of the GPL version 3 is distributed in 16 | /usr/share/common-licenses/GPL-3 on Debian systems. 17 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #! /usr/bin/make -f 2 | 3 | export PYBUILD_NAME=transilience 4 | %: 5 | dh $@ --with python3 --buildsystem=pybuild 6 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /debian/source/options: -------------------------------------------------------------------------------- 1 | extend-diff-ignore="^[^/]+.egg-info/" 2 | -------------------------------------------------------------------------------- /make-test-chroot: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | NAME=${1:?"Usage: $0 chroot_name"} 4 | TEST_ROOT="$(dirname $0)/test_chroots" 5 | 6 | make_test_root() { 7 | test -d "$TEST_ROOT" || mkdir -p "$TEST_ROOT" 8 | 9 | test -e "$TEST_ROOT/CACHEDIR.TAG" || cat << EOF > "$TEST_ROOT/CACHEDIR.TAG" 10 | Signature: 8a477f597d28d172789f06886806bc55 11 | # chroots used for testing transilience, can be regenerated with make-test-chroot 12 | EOF 13 | } 14 | 15 | do_buster() { 16 | local name="$1" 17 | local root="$TEST_ROOT/$name" 18 | 19 | make_test_root 20 | 21 | sudo btrfs subvolume create "$root" 22 | sudo eatmydata debootstrap --variant=minbase --include=python3,dbus,systemd buster "$root" 23 | } 24 | 25 | if [ -d "$TEST_ROOT/$NAME" ] 26 | then 27 | echo "$TEST_ROOT/$NAME already exists" >&2 28 | exit 1 29 | fi 30 | 31 | do_$NAME $NAME 32 | -------------------------------------------------------------------------------- /run_coverage: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -x -e 4 | 5 | cd $(dirname "$0") 6 | 7 | sudo nose2-3 --with-coverage --coverage-report=html --verbose "$@" 8 | 9 | echo sensible-browser file://$(readlink -f htmlcov/index.html) 10 | -------------------------------------------------------------------------------- /run_type_checker: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -x -e 4 | 5 | cd $(dirname "$0") 6 | 7 | mypy transilience tests "$@" 8 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | 3 | from setuptools import find_packages, setup 4 | 5 | here = pathlib.Path(__file__).parent.resolve() 6 | long_description = (here / "README.md").read_text(encoding="utf-8") 7 | 8 | setup( 9 | name="transilience", 10 | version="0.1.0.dev0", 11 | description="A provisioning library", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://github.com/spanezz/transilience/", 15 | author="Enrico Zini", 16 | author_email="enrico@enricozini.org", 17 | classifiers=[ 18 | "Development Status :: 3 - Alpha", 19 | "Intended Audience :: Developers", 20 | "Intended Audience :: System Administrators", 21 | "Topic :: System :: Systems Administration", 22 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 23 | "Programming Language :: Python :: 3", 24 | "Programming Language :: Python :: 3.7", 25 | "Programming Language :: Python :: 3.8", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: 3 :: Only", 28 | ], 29 | packages=find_packages(where="."), 30 | python_requires=">=3.7, <4", 31 | install_requires=[ 32 | # "coloredlogs", # Optional 33 | # "yapf", # Optional 34 | "jinja2", 35 | "mitogen", 36 | "PyYAML", 37 | ], 38 | extras_require={ 39 | "device": ["parted"], 40 | }, 41 | ) 42 | -------------------------------------------------------------------------------- /tests/test_action.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from dataclasses import dataclass 3 | import unittest 4 | import json 5 | from transilience.actions import Action, builtin 6 | 7 | 8 | class TestAction(unittest.TestCase): 9 | def test_serialize_json(self): 10 | act = builtin.copy(content="'\"♥\x00".encode(), dest="/tmp/test") 11 | encoded = json.dumps(act.serialize_for_json()) 12 | 13 | dec = Action.deserialize_from_json(json.loads(encoded)) 14 | 15 | self.assertEqual(dec.content, act.content) 16 | -------------------------------------------------------------------------------- /tests/test_ansible_parameters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Dict, Any, ContextManager 3 | from contextlib import contextmanager 4 | from unittest import TestCase 5 | import tempfile 6 | import os 7 | from transilience.ansible import parameters 8 | from transilience.template import EngineFilesystem 9 | 10 | 11 | class MockRole: 12 | def __init__(self, **role_vars: Dict[str, Any]): 13 | self.vars = role_vars 14 | for k, v in role_vars.items(): 15 | setattr(self, k, v) 16 | self.template_engine = EngineFilesystem() 17 | self.lookup_file_path = None 18 | 19 | def render_string(self, value: str) -> str: 20 | return self.template_engine.render_string(value, self.vars) 21 | 22 | def render_file(self, path: str) -> str: 23 | return self.template_engine.render_file(path, self.vars) 24 | 25 | def lookup_file(self, path: str) -> str: 26 | return f"LOOKUP:{path}" 27 | 28 | @contextmanager 29 | def template(self, contents: str) -> ContextManager[str]: 30 | old_engine = self.template_engine 31 | with tempfile.TemporaryDirectory() as workdir: 32 | tpl_dir = os.path.join(workdir, "templates") 33 | os.makedirs(tpl_dir) 34 | tpl_file = os.path.join(tpl_dir, "tmp.html") 35 | with open(tpl_file, "wt") as fd: 36 | fd.write(contents) 37 | try: 38 | self.template_engine = EngineFilesystem([workdir]) 39 | self.lookup_file_path = workdir 40 | yield "tmp.html" 41 | finally: 42 | self.template_engine = old_engine 43 | self.lookup_file_path = None 44 | 45 | 46 | class TestParameters(TestCase): 47 | def test_any(self): 48 | role = MockRole() 49 | P = parameters.ParameterAny 50 | 51 | for value in (None, "string", 123, 0o123, [1, 2, 3], {"a": "b"}): 52 | p = P(value) 53 | self.assertEqual(repr(p), repr(value)) 54 | self.assertEqual(p.get_value(None), value) 55 | self.assertEqual(list(p.list_role_vars(role)), []) 56 | 57 | def test_octal(self): 58 | role = MockRole() 59 | P = parameters.ParameterOctal 60 | 61 | p = P(None) 62 | self.assertEqual(repr(p), "None") 63 | self.assertEqual(p.get_value(None), None) 64 | self.assertEqual(list(p.list_role_vars(role)), []) 65 | 66 | p = P("ugo+rx") 67 | self.assertEqual(repr(p), "'ugo+rx'") 68 | self.assertEqual(p.get_value(None), "ugo+rx") 69 | self.assertEqual(list(p.list_role_vars(role)), []) 70 | 71 | p = P(0o755) 72 | self.assertEqual(repr(p), "0o755") 73 | self.assertEqual(p.get_value(None), 0o755) 74 | self.assertEqual(list(p.list_role_vars(role)), []) 75 | 76 | def test_templated_string_list(self): 77 | role = MockRole(b="rendered") 78 | P = parameters.ParameterTemplatedStringList 79 | 80 | p = P("a,{{b}},c") 81 | self.assertEqual(repr(p), "self.render_string('a,{{b}},c').split(',')") 82 | self.assertEqual(p.get_value(role), ["a", "rendered", "c"]) 83 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 84 | 85 | def test_var_reference_string_list(self): 86 | role = MockRole(varname="a,b,c") 87 | P = parameters.ParameterVarReferenceStringList 88 | 89 | p = P("varname") 90 | self.assertEqual(repr(p), "self.varname.split(',')") 91 | self.assertEqual(p.get_value(role), ["a", "b", "c"]) 92 | self.assertEqual(set(p.list_role_vars(role)), {"varname"}) 93 | 94 | def test_template_path(self): 95 | role = MockRole(b="rendered") 96 | P = parameters.ParameterTemplatePath 97 | 98 | with role.template("test:{{b}}") as fname: 99 | p = P(fname) 100 | self.assertEqual(repr(p), f"self.render_file('templates/{fname}')") 101 | self.assertEqual(p.get_value(role), "test:rendered") 102 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 103 | 104 | def test_var_reference(self): 105 | role = MockRole(varname="a,b,c") 106 | P = parameters.ParameterVarReference 107 | 108 | p = P("varname") 109 | self.assertEqual(repr(p), "self.varname") 110 | self.assertEqual(p.get_value(role), "a,b,c") 111 | self.assertEqual(set(p.list_role_vars(role)), {"varname"}) 112 | 113 | def test_template_string(self): 114 | role = MockRole(b="rendered") 115 | P = parameters.ParameterTemplateString 116 | 117 | p = P("a,{{b}},c") 118 | self.assertEqual(repr(p), "self.render_string('a,{{b}},c')") 119 | self.assertEqual(p.get_value(role), "a,rendered,c") 120 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 121 | 122 | def test_parameter_list(self): 123 | role = MockRole(b="rendered") 124 | p = parameters.ParameterList([ 125 | parameters.ParameterAny("foo"), 126 | parameters.ParameterOctal(0o644), 127 | parameters.ParameterTemplatedStringList("a,{{b}}"), 128 | parameters.ParameterVarReference("b"), 129 | parameters.ParameterList([ 130 | parameters.ParameterAny("bar"), 131 | parameters.ParameterAny(32), 132 | parameters.ParameterAny(False), 133 | ]), 134 | ]) 135 | 136 | self.assertEqual( 137 | repr(p), "['foo', 0o644, self.render_string('a,{{b}}').split(','), self.b, ['bar', 32, False]]") 138 | self.assertEqual(p.get_value(role), ['foo', 0o644, ['a', 'rendered'], 'rendered', ['bar', 32, False]]) 139 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 140 | 141 | def test_parameter_dict(self): 142 | role = MockRole(b="rendered") 143 | p = parameters.ParameterDict({ 144 | "a": parameters.ParameterAny("foo"), 145 | "b": parameters.ParameterOctal(0o644), 146 | "c": parameters.ParameterTemplatedStringList("a,{{b}}"), 147 | "d": parameters.ParameterVarReference("b"), 148 | "e": parameters.ParameterList([ 149 | parameters.ParameterAny("bar"), 150 | parameters.ParameterAny(32), 151 | parameters.ParameterAny(False), 152 | ]), 153 | }) 154 | 155 | self.assertEqual( 156 | repr(p), 157 | "{'a': 'foo', 'b': 0o644, 'c': self.render_string('a,{{b}}').split(',')," 158 | " 'd': self.b, 'e': ['bar', 32, False]}") 159 | self.assertEqual(p.get_value(role), { 160 | 'a': 'foo', 161 | 'b': 0o644, 162 | 'c': ['a', 'rendered'], 163 | 'd': 'rendered', 164 | 'e': ['bar', 32, False], 165 | }) 166 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 167 | 168 | def test_var_file_reference(self): 169 | role = MockRole(b="filename") 170 | P = parameters.ParameterVarFileReference 171 | 172 | p = P("b") 173 | self.assertEqual(repr(p), "self.lookup_file(os.path.join('files', self.b))") 174 | self.assertEqual(p.get_value(role), "LOOKUP:files/filename") 175 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 176 | 177 | def test_templated_file_reference(self): 178 | role = MockRole(b="filename") 179 | P = parameters.ParameterTemplatedFileReference 180 | 181 | p = P("{{b}}") 182 | self.assertEqual(repr(p), "self.lookup_file(os.path.join('files', self.render_string('{{b}}')))") 183 | self.assertEqual(p.get_value(role), "LOOKUP:files/filename") 184 | self.assertEqual(set(p.list_role_vars(role)), {"b"}) 185 | 186 | def test_file_reference(self): 187 | role = MockRole() 188 | P = parameters.ParameterFileReference 189 | 190 | p = P("filename") 191 | self.assertEqual(repr(p), "self.lookup_file(os.path.join('files', 'filename'))") 192 | self.assertEqual(p.get_value(role), "LOOKUP:files/filename") 193 | self.assertEqual(set(p.list_role_vars(role)), set()) 194 | -------------------------------------------------------------------------------- /tests/test_command.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import tempfile 3 | import unittest 4 | from unittest import mock 5 | import shlex 6 | import os 7 | from transilience.actions import builtin, ResultState 8 | 9 | 10 | class TestCommand(unittest.TestCase): 11 | def assertRun(self, changed=True, **kwargs): 12 | with mock.patch("subprocess.run") as subprocess_run: 13 | act = builtin.command(check=True, **kwargs) 14 | act.action_run(None) 15 | if changed: 16 | self.assertEqual(act.result.state, ResultState.CHANGED) 17 | else: 18 | self.assertEqual(act.result.state, ResultState.NOOP) 19 | self.assertFalse(subprocess_run.called) 20 | 21 | act = builtin.command(**kwargs) 22 | act.action_run(None) 23 | if changed: 24 | self.assertEqual(act.result.state, ResultState.CHANGED) 25 | else: 26 | self.assertEqual(act.result.state, ResultState.NOOP) 27 | return act 28 | 29 | def test_basic(self): 30 | with tempfile.TemporaryDirectory() as workdir: 31 | payload = "♥ test content" 32 | testfile = os.path.join(workdir, "testfile") 33 | 34 | self.assertRun(argv=["touch", testfile]) 35 | self.assertTrue(os.path.exists(testfile)) 36 | 37 | self.assertRun(cmd="rm " + shlex.quote(testfile)) 38 | self.assertFalse(os.path.exists(testfile)) 39 | 40 | orig_cwd = os.getcwd() 41 | self.assertRun(argv=["touch", "testfile"], chdir=workdir) 42 | self.assertTrue(os.path.exists(testfile)) 43 | self.assertEqual(os.getcwd(), orig_cwd) 44 | 45 | self.assertRun(argv=["dd", "if=/dev/stdin", "of=" + testfile], stdin=payload) 46 | with open(testfile, "rt") as fd: 47 | self.assertEqual(fd.read(), payload + "\n") 48 | 49 | self.assertRun(argv=["dd", "if=/dev/stdin", "of=" + testfile], stdin=payload.encode()) 50 | with open(testfile, "rt") as fd: 51 | self.assertEqual(fd.read(), payload) 52 | 53 | self.assertRun(argv=["dd", "if=/dev/stdin", "of=" + testfile], stdin=payload, stdin_add_newline=False) 54 | with open(testfile, "rt") as fd: 55 | self.assertEqual(fd.read(), payload) 56 | 57 | def test_noop(self): 58 | with tempfile.TemporaryDirectory() as workdir: 59 | testfile = os.path.join(workdir, "testfile") 60 | 61 | self.assertRun(argv=["touch", testfile], creates=testfile) 62 | self.assertTrue(os.path.exists(testfile)) 63 | 64 | self.assertRun(argv=["touch", testfile], creates=testfile, changed=False) 65 | self.assertTrue(os.path.exists(testfile)) 66 | 67 | self.assertRun(argv=["rm", testfile], removes=testfile) 68 | self.assertFalse(os.path.exists(testfile)) 69 | 70 | self.assertRun(argv=["rm", testfile], removes=testfile, changed=False) 71 | self.assertFalse(os.path.exists(testfile)) 72 | 73 | def test_noop_relative(self): 74 | with tempfile.TemporaryDirectory() as workdir: 75 | testfile = os.path.join(workdir, "testfile") 76 | 77 | self.assertRun(argv=["touch", "testfile"], chdir=workdir, creates="testfile") 78 | self.assertTrue(os.path.exists(testfile)) 79 | 80 | self.assertRun(argv=["touch", "testfile"], chdir=workdir, creates="testfile", changed=False) 81 | self.assertTrue(os.path.exists(testfile)) 82 | 83 | self.assertRun(argv=["rm", "testfile"], chdir=workdir, removes="testfile") 84 | self.assertFalse(os.path.exists(testfile)) 85 | 86 | self.assertRun(argv=["rm", "testfile"], chdir=workdir, removes="testfile", changed=False) 87 | self.assertFalse(os.path.exists(testfile)) 88 | 89 | def test_output(self): 90 | payload = "♥ test content" 91 | 92 | res = self.assertRun(argv=["echo", payload]) 93 | self.assertEqual(res.stdout, (payload + "\n").encode()) 94 | self.assertEqual(res.stderr, b"") 95 | 96 | res = self.assertRun(argv=["dd", "if=/dev/stdin", "of=/dev/stderr", "status=none"], stdin=payload) 97 | self.assertEqual(res.stdout, b"") 98 | self.assertEqual(res.stderr, (payload + "\n").encode()) 99 | -------------------------------------------------------------------------------- /tests/test_conditionals.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from unittest import TestCase 3 | from transilience.ansible.conditionals import Conditional 4 | from transilience import template 5 | 6 | 7 | class TestConditionals(TestCase): 8 | def setUp(self): 9 | super().setUp() 10 | self.engine = template.EngineFilesystem() 11 | 12 | def test_var(self): 13 | c = Conditional(self.engine, "varname") 14 | self.assertEqual(c.list_role_vars(), set(("varname",))) 15 | self.assertEqual(c.evaluate({"varname": 3}), 3) 16 | self.assertIsNone(c.evaluate({"varname": None})) 17 | self.assertEqual(c.get_python_code(), "self.varname") 18 | 19 | def test_defined(self): 20 | c = Conditional(self.engine, "varname is defined") 21 | self.assertEqual(c.list_role_vars(), set(("varname",))) 22 | self.assertFalse(c.evaluate({})) 23 | self.assertFalse(c.evaluate({"varname": None})) 24 | self.assertTrue(c.evaluate({"varname": True})) 25 | self.assertTrue(c.evaluate({"varname": 0})) 26 | self.assertEqual(c.get_python_code(), "self.varname is not None") 27 | 28 | c = Conditional(self.engine, "varname is not defined") 29 | self.assertEqual(c.list_role_vars(), set(("varname",))) 30 | self.assertTrue(c.evaluate({})) 31 | self.assertTrue(c.evaluate({"varname": None})) 32 | self.assertFalse(c.evaluate({"varname": True})) 33 | self.assertFalse(c.evaluate({"varname": False})) 34 | self.assertFalse(c.evaluate({"varname": 0})) 35 | self.assertEqual(c.get_python_code(), "self.varname is None") 36 | 37 | c = Conditional(self.engine, "varname is not defined or not varname") 38 | self.assertEqual(c.list_role_vars(), set(("varname",))) 39 | self.assertTrue(c.evaluate({})) 40 | self.assertTrue(c.evaluate({"varname": None})) 41 | self.assertTrue(c.evaluate({"varname": False})) 42 | self.assertTrue(c.evaluate({"varname": 0})) 43 | self.assertFalse(c.evaluate({"varname": True})) 44 | self.assertEqual(c.get_python_code(), "(self.varname is None or not self.varname)") 45 | 46 | def test_undefined(self): 47 | c = Conditional(self.engine, "varname is undefined") 48 | self.assertEqual(c.list_role_vars(), set(("varname",))) 49 | self.assertTrue(c.evaluate({})) 50 | self.assertTrue(c.evaluate({"varname": None})) 51 | self.assertFalse(c.evaluate({"varname": True})) 52 | self.assertFalse(c.evaluate({"varname": 0})) 53 | self.assertEqual(c.get_python_code(), "self.varname is None") 54 | 55 | c = Conditional(self.engine, "varname is not undefined") 56 | self.assertEqual(c.list_role_vars(), set(("varname",))) 57 | self.assertFalse(c.evaluate({})) 58 | self.assertFalse(c.evaluate({"varname": None})) 59 | self.assertTrue(c.evaluate({"varname": True})) 60 | self.assertTrue(c.evaluate({"varname": False})) 61 | self.assertTrue(c.evaluate({"varname": 0})) 62 | self.assertEqual(c.get_python_code(), "self.varname is not None") 63 | 64 | c = Conditional(self.engine, "varname is not undefined and varname") 65 | self.assertEqual(c.list_role_vars(), set(("varname",))) 66 | self.assertFalse(c.evaluate({})) 67 | self.assertFalse(c.evaluate({"varname": None})) 68 | self.assertFalse(c.evaluate({"varname": False})) 69 | self.assertFalse(c.evaluate({"varname": 0})) 70 | self.assertTrue(c.evaluate({"varname": True})) 71 | self.assertEqual(c.get_python_code(), "(self.varname is not None and self.varname)") 72 | -------------------------------------------------------------------------------- /tests/test_copy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import tempfile 3 | import unittest 4 | import stat 5 | import os 6 | from transilience.unittest import FileModeMixin, ActionTestMixin, LocalTestMixin, LocalMitogenTestMixin 7 | from transilience.actions import builtin 8 | 9 | 10 | class CopyTests(FileModeMixin, ActionTestMixin): 11 | def run_copy(self, changed=True, **kwargs): 12 | # Try check mode 13 | with self.assertUnchanged(kwargs["dest"]): 14 | self.run_action(builtin.copy(check=True, **kwargs), changed=changed) 15 | 16 | # Try real mode 17 | self.run_action(builtin.copy(**kwargs), changed=changed) 18 | 19 | def test_create_src(self): 20 | with tempfile.TemporaryDirectory() as workdir: 21 | payload = "♥ test content" 22 | srcfile = os.path.join(workdir, "source") 23 | with open(srcfile, "wt") as fd: 24 | fd.write(payload) 25 | 26 | dstfile = os.path.join(workdir, "destination") 27 | 28 | self.system.share_file_prefix(workdir) 29 | self.run_copy( 30 | src=srcfile, 31 | dest=dstfile, 32 | mode=0o640, 33 | ) 34 | 35 | with open(dstfile, "rt") as fd: 36 | self.assertEqual(fd.read(), payload) 37 | 38 | st = os.stat(dstfile) 39 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 40 | 41 | def test_create_src_noop(self): 42 | with tempfile.TemporaryDirectory() as workdir: 43 | payload = "♥ test content" 44 | srcfile = os.path.join(workdir, "source") 45 | with open(srcfile, "wt") as fd: 46 | fd.write(payload) 47 | 48 | dstfile = os.path.join(workdir, "destination") 49 | with open(dstfile, "wt") as fd: 50 | fd.write(payload) 51 | os.fchmod(fd.fileno(), 0o640) 52 | 53 | self.system.share_file_prefix(workdir) 54 | self.run_copy( 55 | src=srcfile, 56 | dest=dstfile, 57 | mode=0o640, 58 | changed=False) 59 | 60 | with open(dstfile, "rt") as fd: 61 | self.assertEqual(fd.read(), payload) 62 | 63 | st = os.stat(dstfile) 64 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 65 | 66 | def test_create_src_perms_only(self): 67 | with tempfile.TemporaryDirectory() as workdir: 68 | payload = "♥ test content" 69 | srcfile = os.path.join(workdir, "source") 70 | with open(srcfile, "wt") as fd: 71 | fd.write(payload) 72 | 73 | dstfile = os.path.join(workdir, "destination") 74 | with open(dstfile, "wt") as fd: 75 | fd.write(payload) 76 | os.fchmod(fd.fileno(), 0o600) 77 | 78 | self.system.share_file_prefix(workdir) 79 | self.run_copy( 80 | src=srcfile, 81 | dest=dstfile, 82 | mode=0o640, 83 | ) 84 | 85 | with open(dstfile, "rt") as fd: 86 | self.assertEqual(fd.read(), payload) 87 | 88 | st = os.stat(dstfile) 89 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 90 | 91 | def test_create_content(self): 92 | with tempfile.TemporaryDirectory() as workdir: 93 | payload = "♥ test content" 94 | dstfile = os.path.join(workdir, "destination") 95 | 96 | self.run_copy( 97 | content=payload, 98 | dest=dstfile, 99 | mode=0o640, 100 | ) 101 | 102 | with open(dstfile, "rt") as fd: 103 | self.assertEqual(fd.read(), payload) 104 | 105 | st = os.stat(dstfile) 106 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 107 | 108 | def test_create_content_noop(self): 109 | with tempfile.TemporaryDirectory() as workdir: 110 | payload = "♥ test content" 111 | 112 | dstfile = os.path.join(workdir, "destination") 113 | with open(dstfile, "wt") as fd: 114 | fd.write(payload) 115 | os.fchmod(fd.fileno(), 0o640) 116 | 117 | self.run_copy( 118 | content=payload, 119 | dest=dstfile, 120 | mode=0o640, 121 | changed=False) 122 | 123 | with open(dstfile, "rt") as fd: 124 | self.assertEqual(fd.read(), payload) 125 | 126 | st = os.stat(dstfile) 127 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 128 | 129 | def test_create_content_perms_only(self): 130 | with tempfile.TemporaryDirectory() as workdir: 131 | payload = "♥ test content" 132 | srcfile = os.path.join(workdir, "source") 133 | with open(srcfile, "wt") as fd: 134 | fd.write(payload) 135 | 136 | dstfile = os.path.join(workdir, "destination") 137 | with open(dstfile, "wt") as fd: 138 | fd.write(payload) 139 | os.fchmod(fd.fileno(), 0o600) 140 | 141 | self.system.share_file_prefix(workdir) 142 | self.run_copy( 143 | content=payload, 144 | dest=dstfile, 145 | mode=0o640, 146 | ) 147 | 148 | with open(dstfile, "rt") as fd: 149 | self.assertEqual(fd.read(), payload) 150 | 151 | st = os.stat(dstfile) 152 | self.assertEqual(stat.S_IMODE(st.st_mode), 0o640) 153 | 154 | 155 | class TestCopyLocal(CopyTests, LocalTestMixin, unittest.TestCase): 156 | pass 157 | 158 | 159 | class TestCopyMitogen(CopyTests, LocalMitogenTestMixin, unittest.TestCase): 160 | pass 161 | -------------------------------------------------------------------------------- /tests/test_facts.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import platform 4 | from transilience.unittest import LocalTestMixin 5 | from transilience.actions import facts, ResultState 6 | 7 | 8 | class TestFacts(LocalTestMixin, unittest.TestCase): 9 | def load_facts(self, facts_cls): 10 | res = list(self.system.run_actions([facts_cls()])) 11 | self.assertEqual(len(res), 1) 12 | self.assertIsInstance(res[0], facts_cls) 13 | self.assertEqual(res[0].result.state, ResultState.NOOP) 14 | return res[0] 15 | 16 | def test_platform(self): 17 | res = self.load_facts(facts.Platform) 18 | self.assertEqual(res.ansible_system, platform.system()) 19 | -------------------------------------------------------------------------------- /tests/test_file_mixin.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, Union 3 | import contextlib 4 | import unittest 5 | from unittest import mock 6 | from transilience.actions.common import FileAction 7 | from transilience.unittest import FileModeMixin 8 | from transilience import system 9 | 10 | 11 | class ComputedPermsAction(FileAction): 12 | pass 13 | 14 | 15 | class TestComputeFsPerms(FileModeMixin, unittest.TestCase): 16 | @contextlib.contextmanager 17 | def umask(self, umask: Optional[int]): 18 | if umask is None: 19 | yield 20 | else: 21 | with mock.patch("os.umask", return_value=umask): 22 | yield 23 | 24 | def assertComputedPerms( 25 | self, 26 | mode: Union[None, int, str], 27 | orig: Optional[int], 28 | expected: int, 29 | is_dir: bool = False, 30 | umask: Optional[int] = None): 31 | with self.umask(umask): 32 | act = ComputedPermsAction(mode=mode) 33 | act.action_run(system.Local()) 34 | computed = act._compute_fs_perms(orig, is_dir=is_dir) 35 | self.assertFileModeEqual(computed, expected) 36 | 37 | def test_none(self): 38 | self.assertComputedPerms(mode=None, orig=None, expected=0o644, umask=0o022) 39 | self.assertComputedPerms(mode=None, orig=None, expected=0o755, umask=0o022, is_dir=True) 40 | self.assertComputedPerms(mode=None, orig=0o644, expected=None) 41 | 42 | def test_int(self): 43 | self.assertComputedPerms(mode=0o644, orig=None, expected=0o644) 44 | self.assertComputedPerms(mode=0o644, orig=0o644, expected=None) 45 | 46 | def test_str(self): 47 | self.assertComputedPerms(mode="u=rw,g=r,o=r", orig=None, expected=0o644) 48 | self.assertComputedPerms(mode="u=rw,g=r,o=r", orig=0o644, expected=None) 49 | 50 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=None, expected=0o644) 51 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=0o644, expected=None) 52 | 53 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=None, is_dir=True, expected=0o755) 54 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=0o644, is_dir=True, expected=0o755) 55 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=0o744, is_dir=True, expected=0o755) 56 | self.assertComputedPerms(mode="u=rwX,g=rX,o=rX", orig=0o755, is_dir=True, expected=None) 57 | 58 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=None, expected=0o664) 59 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=0o664, expected=None) 60 | 61 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=None, is_dir=True, expected=0o775) 62 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=0o664, is_dir=True, expected=0o775) 63 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=0o744, is_dir=True, expected=0o775) 64 | self.assertComputedPerms(mode="ug=rwX,o=rX", orig=0o775, is_dir=True, expected=None) 65 | 66 | self.assertComputedPerms(mode="u=rwX,g=rX,o=", orig=None, expected=0o640) 67 | self.assertComputedPerms(mode="u=rwX,g=rX,o=", orig=0o640, expected=None) 68 | self.assertComputedPerms(mode="u=rwX,g=rX,o=", orig=0o222, expected=0o640) 69 | self.assertComputedPerms(mode="u=rwX,g=rX,o=", orig=None, is_dir=True, expected=0o750) 70 | self.assertComputedPerms(mode="u=rwX,g=rX,o=", orig=0o222, is_dir=True, expected=0o750) 71 | 72 | self.assertComputedPerms(mode="u=rwx,g=rxs,o=", orig=None, expected=0o2750) 73 | self.assertComputedPerms(mode="u=rwx,g=rxs,o=", orig=0o2750, expected=None) 74 | self.assertComputedPerms(mode="u=rwx,g=rxs,o=", orig=0o222, expected=0o2750) 75 | self.assertComputedPerms(mode="u=rwx,g=rxs,o=", orig=None, is_dir=True, expected=0o2750) 76 | self.assertComputedPerms(mode="u=rwx,g=rxs,o=", orig=0o222, is_dir=True, expected=0o2750) 77 | 78 | def test_equal_x(self): 79 | # Ported from coreutils's test suite 80 | self.assertComputedPerms(mode="a=r,=x", orig=0o644, umask=0o005, expected=0o110) 81 | self.assertComputedPerms(mode="a=r,=xX", orig=0o644, umask=0o005, expected=0o110) 82 | self.assertComputedPerms(mode="a=r,=Xx", orig=0o644, umask=0o005, expected=0o110) 83 | self.assertComputedPerms(mode="a=r,=x,=X", orig=0o644, umask=0o005, expected=0o110) 84 | self.assertComputedPerms(mode="a=r,=X,=x", orig=0o644, umask=0o005, expected=0o110) 85 | 86 | def test_equals(self): 87 | # Ported from coreutils's test suite 88 | expected = { 89 | "u": 0o700, 90 | "g": 0o070, 91 | "o": 0o007, 92 | } 93 | for src in "ugo": 94 | for dest in "ugo": 95 | if src == dest: 96 | continue 97 | self.assertComputedPerms(mode=f"a=,{src}=rwx,{dest}={src},{src}=", orig=0o644, expected=expected[dest]) 98 | -------------------------------------------------------------------------------- /tests/test_fileasset.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import tempfile 4 | import zipfile 5 | import os 6 | import io 7 | from transilience.fileasset import FileAsset, LocalFileAsset, ZipFileAsset 8 | 9 | 10 | class TestFileAssets(unittest.TestCase): 11 | def assertRead(self, fa: FileAsset, expected: bytes): 12 | with fa.open() as fd: 13 | self.assertEqual(fd.read(), expected) 14 | 15 | with io.BytesIO() as fd: 16 | fa.copy_to(fd) 17 | self.assertEqual(fd.getvalue(), expected) 18 | 19 | def test_local_small(self): 20 | with tempfile.NamedTemporaryFile("w+b") as tf: 21 | test_content = "test content ♥".encode() 22 | tf.write(test_content) 23 | tf.flush() 24 | 25 | a = LocalFileAsset(tf.name) 26 | self.assertEqual(a.sha1sum(), "e5a07c60318532612d09da40e729bccf71018ed7") 27 | self.assertEqual(a.cached, test_content) 28 | self.assertRead(a, test_content) 29 | 30 | a1 = FileAsset.deserialize(a.serialize()) 31 | self.assertEqual(a1.cached, a.cached) 32 | self.assertEqual(a1.path, a.path) 33 | 34 | def test_local_big(self): 35 | with tempfile.NamedTemporaryFile("w+b") as tf: 36 | # One megabyte file asset 37 | os.ftruncate(tf.fileno(), 1024*1024) 38 | 39 | a = LocalFileAsset(tf.name) 40 | self.assertEqual(a.sha1sum(), "3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3") 41 | self.assertIsNone(a.cached) 42 | self.assertRead(a, bytes(1024*1024)) 43 | 44 | a1 = FileAsset.deserialize(a.serialize()) 45 | self.assertEqual(a1.cached, a.cached) 46 | self.assertEqual(a1.path, a.path) 47 | 48 | def test_zip(self): 49 | test_content = "test content ♥".encode() 50 | with tempfile.NamedTemporaryFile("w+b") as tf: 51 | with zipfile.ZipFile(tf, mode='w') as zf: 52 | zf.writestr("dir/testfile", test_content) 53 | 54 | a = ZipFileAsset(tf.name, "dir/testfile") 55 | self.assertEqual(a.sha1sum(), "e5a07c60318532612d09da40e729bccf71018ed7") 56 | self.assertEqual(a.cached, test_content) 57 | self.assertRead(a, test_content) 58 | 59 | a1 = FileAsset.deserialize(a.serialize()) 60 | self.assertEqual(a1.cached, a.cached) 61 | self.assertEqual(a1.path, a.path) 62 | 63 | def test_zip_big(self): 64 | with tempfile.NamedTemporaryFile("w+b") as tf: 65 | with zipfile.ZipFile(tf, mode='w') as zf: 66 | zf.writestr("dir/testfile", bytes(1024*1024)) 67 | 68 | a = ZipFileAsset(tf.name, "dir/testfile") 69 | self.assertEqual(a.sha1sum(), "3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3") 70 | self.assertIsNone(a.cached) 71 | self.assertRead(a, bytes(1024*1024)) 72 | 73 | a1 = FileAsset.deserialize(a.serialize()) 74 | self.assertEqual(a1.cached, a.cached) 75 | self.assertEqual(a1.path, a.path) 76 | -------------------------------------------------------------------------------- /tests/test_modechange.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import stat 4 | from transilience.utils import modechange 5 | 6 | 7 | class TestModeChange(unittest.TestCase): 8 | def test_compile_group(self): 9 | mc = modechange.ModeChange.compile_group("=644") 10 | self.assertEqual(mc.op, "=") 11 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 12 | self.assertEqual(mc.affected, modechange.CHMOD_MODE_BITS) 13 | self.assertEqual(mc.value, 0o644) 14 | self.assertEqual(mc.mentioned, modechange.CHMOD_MODE_BITS) 15 | 16 | mc = modechange.ModeChange.compile_group("u=rw") 17 | self.assertEqual(mc.op, "=") 18 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 19 | self.assertEqual(mc.affected, stat.S_ISUID | stat.S_IRWXU) 20 | self.assertEqual(mc.value, 21 | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | 22 | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) 23 | self.assertEqual(mc.mentioned, stat.S_ISUID | stat.S_IRWXU) 24 | 25 | mc = modechange.ModeChange.compile_group("u=rX") 26 | self.assertEqual(mc.op, "=") 27 | self.assertEqual(mc.flag, modechange.MODE_X_IF_ANY_X) 28 | self.assertEqual(mc.affected, stat.S_ISUID | stat.S_IRWXU) 29 | self.assertEqual(mc.value, 30 | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) 31 | self.assertEqual(mc.mentioned, stat.S_ISUID | stat.S_IRWXU) 32 | self.assertEqual(modechange.ModeChange.adjust(0o640, False, 000, [mc]), (0o440, mc.affected)) 33 | 34 | mc = modechange.ModeChange.compile_group("g=r") 35 | self.assertEqual(mc.op, "=") 36 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 37 | self.assertEqual(mc.affected, stat.S_ISGID | stat.S_IRWXG) 38 | self.assertEqual(mc.value, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) 39 | self.assertEqual(mc.mentioned, stat.S_ISGID | stat.S_IRWXG) 40 | 41 | mc = modechange.ModeChange.compile_group("g+r") 42 | self.assertEqual(mc.op, "+") 43 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 44 | self.assertEqual(mc.affected, stat.S_ISGID | stat.S_IRWXG) 45 | self.assertEqual(mc.value, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) 46 | self.assertEqual(mc.mentioned, stat.S_ISGID | stat.S_IRWXG) 47 | 48 | mc = modechange.ModeChange.compile_group("g+w") 49 | self.assertEqual(mc.op, "+") 50 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 51 | self.assertEqual(mc.affected, stat.S_ISGID | stat.S_IRWXG) 52 | self.assertEqual(mc.value, stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) 53 | self.assertEqual(mc.mentioned, stat.S_ISGID | stat.S_IRWXG) 54 | self.assertEqual(modechange.ModeChange.adjust(0o440, False, 000, [mc]), (0o460, 0o020)) 55 | 56 | mc = modechange.ModeChange.compile_group("o=r") 57 | self.assertEqual(mc.op, "=") 58 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 59 | self.assertEqual(mc.affected, stat.S_ISVTX | stat.S_IRWXO) 60 | self.assertEqual(mc.value, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) 61 | self.assertEqual(mc.mentioned, stat.S_ISVTX | stat.S_IRWXO) 62 | 63 | mc = modechange.ModeChange.compile_group("o=") 64 | self.assertEqual(mc.op, "=") 65 | self.assertEqual(mc.flag, modechange.MODE_ORDINARY_CHANGE) 66 | self.assertEqual(mc.affected, stat.S_ISVTX | stat.S_IRWXO) 67 | self.assertEqual(mc.value, 0) 68 | self.assertEqual(mc.mentioned, stat.S_ISVTX | stat.S_IRWXO) 69 | -------------------------------------------------------------------------------- /tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import uuid 4 | from transilience.actions import ResultState 5 | from transilience.actions.misc import Noop, Fail 6 | from transilience.unittest import LocalTestMixin 7 | from transilience.system import PipelineInfo 8 | 9 | 10 | class TestPipeline(LocalTestMixin, unittest.TestCase): 11 | def setUp(self): 12 | super().setUp() 13 | self.pipeline_id = str(uuid.uuid4()) 14 | 15 | def assertNoop(self, expected: str, changed: bool = False, when=None): 16 | pipeline_info = PipelineInfo(id=self.pipeline_id, when=when if when is not None else {}) 17 | act = self.system.execute_pipelined(Noop(changed=changed), pipeline_info) 18 | self.assertEqual(act.result.state, expected) 19 | return act 20 | 21 | def assertFail(self): 22 | pipeline_info = PipelineInfo(id=self.pipeline_id) 23 | act = self.system.execute_pipelined(Fail(msg="test"), pipeline_info) 24 | self.assertEqual(act.result.state, ResultState.FAILED) 25 | self.assertTrue(self.system.pipelines[self.pipeline_id].failed) 26 | self.assertNoop(ResultState.SKIPPED) 27 | 28 | def test_fail(self): 29 | self.assertNoop(ResultState.NOOP) 30 | self.assertNoop(ResultState.NOOP) 31 | self.assertNoop(ResultState.NOOP) 32 | self.assertFail() 33 | self.assertNoop(ResultState.SKIPPED) 34 | self.assertNoop(ResultState.SKIPPED) 35 | self.assertNoop(ResultState.SKIPPED) 36 | self.system.pipeline_clear_failed(self.pipeline_id) 37 | self.assertNoop(ResultState.NOOP) 38 | self.assertNoop(ResultState.NOOP) 39 | self.assertNoop(ResultState.NOOP) 40 | 41 | def test_when(self): 42 | n1 = self.assertNoop(ResultState.NOOP, changed=False) 43 | n2 = self.assertNoop(ResultState.CHANGED, changed=True) 44 | n3 = self.assertNoop(ResultState.SKIPPED, when={n1.uuid: [ResultState.CHANGED]}, changed=True) 45 | self.assertNoop(ResultState.CHANGED, when={n1.uuid: [ResultState.NOOP]}, changed=True) 46 | self.assertNoop(ResultState.CHANGED, when={n2.uuid: [ResultState.CHANGED]}, changed=True) 47 | self.assertNoop(ResultState.SKIPPED, when={n2.uuid: [ResultState.NOOP]}, changed=True) 48 | self.assertNoop(ResultState.CHANGED, when={n3.uuid: [ResultState.SKIPPED]}, changed=True) 49 | self.assertNoop(ResultState.CHANGED, when={n3.uuid: [ResultState.CHANGED, ResultState.SKIPPED]}, changed=True) 50 | self.assertNoop(ResultState.SKIPPED, when={n3.uuid: [ResultState.NOOP, ResultState.CHANGED]}, changed=True) 51 | 52 | self.system.pipeline_close(self.pipeline_id) 53 | self.assertNoop(ResultState.SKIPPED, when={n2.uuid: [ResultState.CHANGED]}, changed=True) 54 | -------------------------------------------------------------------------------- /tests/test_privs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import os 4 | from transilience.unittest import privs 5 | 6 | 7 | class TestPrivs(unittest.TestCase): 8 | def assertUnprivileged(self): 9 | uid, euid, suid = os.getresuid() 10 | self.assertEqual(uid, privs.user_uid) 11 | self.assertEqual(euid, privs.user_uid) 12 | self.assertEqual(suid, 0) 13 | 14 | gid, egid, sgid = os.getresgid() 15 | self.assertEqual(gid, privs.user_gid) 16 | self.assertEqual(egid, privs.user_gid) 17 | self.assertEqual(sgid, 0) 18 | 19 | def assertPrivileged(self): 20 | uid, euid, suid = os.getresuid() 21 | self.assertEqual(uid, 0) 22 | self.assertEqual(euid, 0) 23 | self.assertEqual(suid, privs.user_uid) 24 | 25 | gid, egid, sgid = os.getresgid() 26 | self.assertEqual(gid, 0) 27 | self.assertEqual(egid, 0) 28 | self.assertEqual(sgid, privs.user_gid) 29 | 30 | def test_default(self): 31 | self.assertTrue(privs.dropped) 32 | self.assertUnprivileged() 33 | 34 | def test_root(self): 35 | self.assertTrue(privs.dropped) 36 | self.assertUnprivileged() 37 | with privs.root(): 38 | self.assertFalse(privs.dropped) 39 | self.assertPrivileged() 40 | self.assertTrue(privs.dropped) 41 | self.assertUnprivileged() 42 | -------------------------------------------------------------------------------- /tests/test_role.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Union, Type 3 | from dataclasses import dataclass 4 | import unittest 5 | import tempfile 6 | import os 7 | from transilience.actions import ResultState, builtin 8 | from transilience.actions.misc import Noop 9 | from transilience.actions.facts import Facts 10 | from transilience.system import PipelineInfo 11 | from transilience.role import Role, with_facts 12 | from transilience.runner import PendingAction 13 | 14 | 15 | class MockRunner: 16 | def __init__(self): 17 | self.pending = [] 18 | self.template_engine = None 19 | 20 | def add_role(self, role_cls: Union[str, Type[Role]], **kw): 21 | name = role_cls.__name__ 22 | kw.setdefault("role_name", name) 23 | role = role_cls(**kw) 24 | role.name = name 25 | role.set_runner(self) 26 | start = getattr(role, "start", None) 27 | if start is not None: 28 | start() 29 | return role 30 | 31 | def add_pending_action(self, pending: PendingAction, pipeline_info: PipelineInfo): 32 | self.pending.append((pending, pipeline_info)) 33 | 34 | 35 | class TestRole(unittest.TestCase): 36 | def test_add_simple(self): 37 | class TestRole(Role): 38 | def start(self): 39 | self.add(builtin.noop()) 40 | 41 | runner = MockRunner() 42 | r = runner.add_role(TestRole) 43 | self.assertEqual(len(runner.pending), 1) 44 | pa, pi = runner.pending[0] 45 | self.assertIsInstance(pa.action, Noop) 46 | self.assertIsNone(pa.name) 47 | self.assertEqual(pa.notify, []) 48 | self.assertEqual(pa.then, []) 49 | self.assertEqual(pi.id, r.uuid) 50 | 51 | def test_add_named(self): 52 | class TestRole(Role): 53 | def start(self): 54 | self.add(builtin.noop(), name="test") 55 | 56 | runner = MockRunner() 57 | r = runner.add_role(TestRole) 58 | self.assertEqual(len(runner.pending), 1) 59 | pa, pi = runner.pending[0] 60 | self.assertIsInstance(pa.action, Noop) 61 | self.assertEqual(pa.name, "test") 62 | self.assertEqual(pa.notify, []) 63 | self.assertEqual(pa.then, []) 64 | self.assertEqual(pi.id, r.uuid) 65 | 66 | def test_add_notify(self): 67 | class TestRole(Role): 68 | def start(self): 69 | self.add(builtin.noop(), notify=[TestRole]) 70 | 71 | runner = MockRunner() 72 | r = runner.add_role(TestRole) 73 | self.assertEqual(len(runner.pending), 1) 74 | pa, pi = runner.pending[0] 75 | self.assertIsInstance(pa.action, Noop) 76 | self.assertIsNone(pa.name) 77 | self.assertEqual(pa.notify, [TestRole]) 78 | self.assertEqual(pa.then, []) 79 | self.assertEqual(pi.id, r.uuid) 80 | 81 | def test_add_notify_with(self): 82 | class TestRole(Role): 83 | def start(self): 84 | with self.notify(TestRole): 85 | self.add(builtin.noop(), notify=[Role]) 86 | 87 | runner = MockRunner() 88 | r = runner.add_role(TestRole) 89 | self.assertEqual(len(runner.pending), 1) 90 | pa, pi = runner.pending[0] 91 | self.assertIsInstance(pa.action, Noop) 92 | self.assertIsNone(pa.name) 93 | self.assertEqual(pa.notify, [TestRole, Role]) 94 | self.assertEqual(pa.then, []) 95 | self.assertEqual(pi.id, r.uuid) 96 | 97 | def test_add_when(self): 98 | class TestRole(Role): 99 | def start(self): 100 | a = self.add(builtin.noop()) 101 | self.add(builtin.noop(), when={a: ResultState.CHANGED}) 102 | 103 | runner = MockRunner() 104 | runner.add_role(TestRole) 105 | self.assertEqual(len(runner.pending), 2) 106 | pa, pi = runner.pending[1] 107 | self.assertIsInstance(pa.action, Noop) 108 | self.assertIsNone(pa.name) 109 | self.assertEqual(pa.notify, []) 110 | self.assertEqual(pa.then, []) 111 | self.assertEqual(pi.when, {runner.pending[0][0].action.uuid: [ResultState.CHANGED]}) 112 | 113 | def test_add_when_with(self): 114 | class TestRole(Role): 115 | def start(self): 116 | a = self.add(builtin.noop()) 117 | with self.when({a: ResultState.CHANGED}): 118 | self.add(builtin.noop()) 119 | 120 | runner = MockRunner() 121 | runner.add_role(TestRole) 122 | self.assertEqual(len(runner.pending), 2) 123 | pa, pi = runner.pending[1] 124 | self.assertIsInstance(pa.action, Noop) 125 | self.assertIsNone(pa.name) 126 | self.assertEqual(pa.notify, []) 127 | self.assertEqual(pa.then, []) 128 | self.assertEqual(pi.when, {runner.pending[0][0].action.uuid: [ResultState.CHANGED]}) 129 | 130 | def test_template_paths(self): 131 | with tempfile.TemporaryDirectory() as workdir: 132 | template_dir = os.path.join(workdir, "roles", "test", "templates") 133 | os.makedirs(template_dir) 134 | 135 | with open(os.path.join(template_dir, "tpl.html"), "wt") as fd: 136 | fd.write("Test: {{testvar}}") 137 | 138 | role = Role(role_name="test", role_assets_root=os.path.join(workdir, "roles", "test")) 139 | 140 | self.assertEqual(role.render_file("templates/tpl.html", testvar=42), "Test: 42") 141 | self.assertEqual(role.template_engine.list_file_template_vars("templates/tpl.html"), {"testvar"}) 142 | 143 | 144 | class TestFacts(unittest.TestCase): 145 | def test_inherit(self): 146 | @dataclass 147 | class F1(Facts): 148 | value1: int = 1 149 | 150 | @dataclass 151 | class F2(Facts): 152 | value2: int = 2 153 | 154 | @with_facts(F1) 155 | class Role1(Role): 156 | value3: int = 3 157 | 158 | @with_facts(F2) 159 | class Role2(Role1): 160 | value4: int = 4 161 | 162 | self.assertEqual(Role2._facts, (F1, F2)) 163 | r = Role2(role_name="test") 164 | self.assertEqual(r.value1, 1) 165 | self.assertEqual(r.value2, 2) 166 | self.assertEqual(r.value3, 3) 167 | self.assertEqual(r.value4, 4) 168 | 169 | def test_inherit1(self): 170 | @dataclass 171 | class F1(Facts): 172 | value1: int = 1 173 | 174 | @with_facts(F1) 175 | class Role1(Role): 176 | value3: int = 3 177 | 178 | @dataclass 179 | class Role2(Role1): 180 | value4: int = 4 181 | 182 | self.assertEqual(Role2._facts, (F1,)) 183 | r = Role2(role_name="test") 184 | self.assertEqual(r.value1, 1) 185 | self.assertEqual(r.value3, 3) 186 | self.assertEqual(r.value4, 4) 187 | 188 | def test_inherit_unique(self): 189 | @dataclass 190 | class F1(Facts): 191 | value1: int = 1 192 | 193 | @dataclass 194 | class F2(Facts): 195 | value2: int = 2 196 | 197 | @with_facts([F1, F2]) 198 | class Role1(Role): 199 | value3: int = 3 200 | 201 | @with_facts([F2, F1]) 202 | class Role2(Role1): 203 | value4: int = 4 204 | 205 | self.assertEqual(Role2._facts, (F1, F2)) 206 | r = Role2(role_name="test") 207 | self.assertEqual(r.value1, 1) 208 | self.assertEqual(r.value2, 2) 209 | self.assertEqual(r.value3, 3) 210 | self.assertEqual(r.value4, 4) 211 | -------------------------------------------------------------------------------- /tests/test_runner_script.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import tempfile 3 | import unittest 4 | import os 5 | from transilience.runner import Script 6 | from transilience.unittest import LocalTestMixin, LocalMitogenTestMixin 7 | 8 | 9 | class ScriptTests: 10 | def test_sequence(self): 11 | script = Script(system=self.system) 12 | with tempfile.TemporaryDirectory() as workdir: 13 | testdir = os.path.join(workdir, "testdir") 14 | script.builtin.file(state="directory", path=testdir) 15 | 16 | testfile = os.path.join(testdir, "testfile") 17 | script.builtin.file(state="touch", path=testfile) 18 | 19 | test_payload = "test payload ♥" 20 | script.builtin.copy(dest=testfile, content=test_payload) 21 | 22 | with open(testfile, "rt") as fd: 23 | self.assertEqual(fd.read(), test_payload) 24 | 25 | def test_error(self): 26 | script = Script() 27 | with tempfile.TemporaryDirectory() as workdir: 28 | testfile = os.path.join(workdir, "testfile") 29 | with self.assertRaises(Exception): 30 | script.builtin.file(state="file", path=testfile) 31 | 32 | 33 | class TestScriptLocal(ScriptTests, LocalTestMixin, unittest.TestCase): 34 | pass 35 | 36 | 37 | class TestScriptMitogen(ScriptTests, LocalMitogenTestMixin, unittest.TestCase): 38 | pass 39 | -------------------------------------------------------------------------------- /tests/test_systemd.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, List, Dict 3 | import unittest 4 | from unittest import mock 5 | import inspect 6 | import shlex 7 | import uuid 8 | from transilience.unittest import ActionTestMixin, LocalTestMixin, ChrootTestMixin 9 | from transilience.actions import builtin 10 | 11 | 12 | class TestSystemd(ActionTestMixin, LocalTestMixin, unittest.TestCase): 13 | def assertSystemd( 14 | self, 15 | file_state: Optional[str] = None, 16 | active_state: Optional[str] = None, 17 | called: Optional[List[str]] = None, 18 | changed=True, 19 | **kwargs): 20 | unit_info: Dict[str, str] = {} 21 | if file_state is not None: 22 | unit_info["UnitFileState"] = file_state 23 | if active_state is not None: 24 | unit_info["ActiveState"] = active_state 25 | if called is None: 26 | called = [] 27 | actual_called = [] 28 | 29 | def collect(args, **kw): 30 | self.assertIn("systemctl", args[0]) 31 | actual_called.append(" ".join(shlex.quote(a) for a in args[1:])) 32 | 33 | with mock.patch("transilience.actions.systemd.Systemd.get_unit_info", return_value=unit_info): 34 | # Try check mode first 35 | with mock.patch("subprocess.run", collect): 36 | orig = builtin.systemd(check=True, **kwargs) 37 | # Test generating a summary 38 | orig.action_summary() 39 | self.run_action(orig, changed=changed) 40 | self.assertEqual(actual_called, []) 41 | 42 | with mock.patch("subprocess.run", collect): 43 | orig = builtin.systemd(**kwargs) 44 | # Test generating a summary 45 | orig.action_summary() 46 | return self.run_action(orig, changed=changed), actual_called 47 | 48 | def test_daemon_reload(self): 49 | act, called = self.assertSystemd(daemon_reload=True, changed=True) 50 | self.assertEqual(called, ["daemon-reload"]) 51 | 52 | def test_daemon_reexec(self): 53 | act, called = self.assertSystemd(daemon_reexec=True, changed=True) 54 | self.assertEqual(called, ["daemon-reexec"]) 55 | 56 | def test_enable(self): 57 | act, called = self.assertSystemd( 58 | unit="test", file_state="disabled", active_state="inactive", enabled=False, changed=False) 59 | self.assertEqual(called, []) 60 | 61 | act, called = self.assertSystemd( 62 | unit="test", file_state="disabled", active_state="inactive", enabled=True, changed=True) 63 | self.assertEqual(called, ["enable test"]) 64 | 65 | act, called = self.assertSystemd( 66 | unit="test", file_state="enabled", active_state="inactive", enabled=True, changed=False) 67 | self.assertEqual(called, []) 68 | 69 | act, called = self.assertSystemd( 70 | unit="test", file_state="enabled", active_state="inactive", enabled=False, changed=True) 71 | self.assertEqual(called, ["disable test"]) 72 | 73 | def test_mask(self): 74 | act, called = self.assertSystemd( 75 | unit="test", file_state="disabled", active_state="inactive", masked=False, changed=False) 76 | self.assertEqual(called, []) 77 | 78 | act, called = self.assertSystemd( 79 | unit="test", file_state="disabled", active_state="inactive", masked=True, changed=True) 80 | self.assertEqual(called, ["mask test"]) 81 | 82 | act, called = self.assertSystemd( 83 | unit="test", file_state="masked", active_state="inactive", masked=True, changed=False) 84 | self.assertEqual(called, []) 85 | 86 | act, called = self.assertSystemd( 87 | unit="test", file_state="masked", active_state="inactive", masked=False, changed=True) 88 | self.assertEqual(called, ["unmask test"]) 89 | 90 | def test_start(self): 91 | act, called = self.assertSystemd( 92 | unit="test", file_state="enabled", active_state="inactive", state="stopped", changed=False) 93 | self.assertEqual(called, []) 94 | 95 | act, called = self.assertSystemd( 96 | unit="test", file_state="enabled", active_state="inactive", state="started", changed=True) 97 | self.assertEqual(called, ["start test"]) 98 | 99 | act, called = self.assertSystemd( 100 | unit="test", file_state="enabled", active_state="active", state="started", changed=False) 101 | self.assertEqual(called, []) 102 | 103 | act, called = self.assertSystemd( 104 | unit="test", file_state="enabled", active_state="active", state="stopped", changed=True) 105 | self.assertEqual(called, ["stop test"]) 106 | 107 | def test_reload(self): 108 | act, called = self.assertSystemd( 109 | unit="test", file_state="enabled", active_state="active", state="restarted", changed=True) 110 | self.assertEqual(called, ["restart test"]) 111 | 112 | act, called = self.assertSystemd( 113 | unit="test", file_state="enabled", active_state="inactive", state="restarted", changed=True) 114 | self.assertEqual(called, ["start test"]) 115 | 116 | act, called = self.assertSystemd( 117 | unit="test", file_state="enabled", active_state="active", state="reloaded", changed=True) 118 | self.assertEqual(called, ["reload test"]) 119 | 120 | act, called = self.assertSystemd( 121 | unit="test", file_state="enabled", active_state="inactive", state="reloaded", changed=True) 122 | self.assertEqual(called, ["start test"]) 123 | 124 | 125 | class TestSystemdReal(ActionTestMixin, ChrootTestMixin, unittest.TestCase): 126 | def assertSystemd(self, changed=True, **kwargs): 127 | orig = builtin.systemd(**kwargs) 128 | return self.run_action(orig, changed=changed) 129 | 130 | def setUp(self): 131 | self.unit_name = str(uuid.uuid4()) 132 | 133 | self.run_action(builtin.copy( 134 | dest=f"/usr/lib/systemd/system/{self.unit_name}.service", 135 | content=inspect.cleandoc(f""" 136 | [Unit] 137 | Description=Test Unit {self.unit_name} 138 | [Service] 139 | Type=simple 140 | ExecStart=/usr/bin/sleep 6h 141 | ExecReload=/bin/true 142 | [Install] 143 | WantedBy=multi-user.target 144 | """) 145 | ) 146 | ) 147 | 148 | self.run_action(builtin.systemd(daemon_reload=True), changed=True) 149 | 150 | def test_daemon_reload(self): 151 | self.assertSystemd(daemon_reload=True, changed=True) 152 | 153 | def test_daemon_reexec(self): 154 | self.assertSystemd(daemon_reexec=True, changed=True) 155 | 156 | def test_enable(self): 157 | self.assertSystemd(unit=self.unit_name, enabled=False, changed=False) 158 | self.assertSystemd(unit=self.unit_name, enabled=True, changed=True) 159 | self.assertSystemd(unit=self.unit_name, enabled=True, changed=False) 160 | self.assertSystemd(unit=self.unit_name, enabled=False, changed=True) 161 | self.assertSystemd(unit=self.unit_name, enabled=False, changed=False) 162 | 163 | def test_mask(self): 164 | self.assertSystemd(unit=self.unit_name, masked=False, changed=False) 165 | self.assertSystemd(unit=self.unit_name, masked=True, changed=True) 166 | self.assertSystemd(unit=self.unit_name, masked=True, changed=False) 167 | self.assertSystemd(unit=self.unit_name, masked=False, changed=True) 168 | self.assertSystemd(unit=self.unit_name, masked=False, changed=False) 169 | 170 | def test_start(self): 171 | self.assertSystemd(unit=self.unit_name, state="stopped", changed=False) 172 | self.assertSystemd(unit=self.unit_name, state="started", changed=True) 173 | self.assertSystemd(unit=self.unit_name, state="started", changed=False) 174 | self.assertSystemd(unit=self.unit_name, state="stopped", changed=True) 175 | 176 | def test_reload(self): 177 | self.assertSystemd(unit=self.unit_name, state="started", changed=True) 178 | self.assertSystemd(unit=self.unit_name, state="restarted", changed=True) 179 | self.assertSystemd(unit=self.unit_name, state="restarted", changed=True) 180 | self.assertSystemd(unit=self.unit_name, state="reloaded", changed=True) 181 | self.assertSystemd(unit=self.unit_name, state="reloaded", changed=True) 182 | -------------------------------------------------------------------------------- /tests/test_user.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | from transilience.unittest import ActionTestMixin, ChrootTestMixin 4 | from transilience.actions import builtin 5 | 6 | 7 | user_sequence = 0 8 | 9 | 10 | class TestUser(ActionTestMixin, ChrootTestMixin, unittest.TestCase): 11 | def setUp(self): 12 | global user_sequence 13 | self.user_name = f"user{user_sequence}" 14 | user_sequence += 1 15 | 16 | def assertUser(self, changed=True, **kwargs): 17 | kwargs.setdefault("name", self.user_name) 18 | 19 | # Quick and dirty way of testing check mode: if it performs actions, 20 | # the next call to the action should report not changed. 21 | # 22 | # There can be more serious tests after a refactoring of the User 23 | # action implementations 24 | orig = builtin.user(check=True, **kwargs) 25 | self.run_action(orig, changed=changed) 26 | 27 | orig = builtin.user(**kwargs) 28 | return self.run_action(orig, changed=changed) 29 | 30 | def test_create(self): 31 | act = self.assertUser(state="present") 32 | self.assertIsNotNone(act.uid) 33 | self.assertIsNotNone(act.group) 34 | self.assertIsNotNone(act.comment) 35 | self.assertIsNotNone(act.home) 36 | self.assertIsNotNone(act.shell) 37 | 38 | act = self.assertUser(state="present", changed=False) 39 | 40 | def test_remove(self): 41 | self.assertUser(state="present") 42 | self.assertUser(state="absent") 43 | self.assertUser(state="absent", changed=False) 44 | -------------------------------------------------------------------------------- /tests/test_zipapp.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import unittest 3 | import tempfile 4 | import os 5 | import yaml 6 | from transilience.unittest import LocalTestMixin, LocalMitogenTestMixin 7 | from transilience.role import Loader 8 | 9 | 10 | class ZipappTests: 11 | @classmethod 12 | def setUpClass(cls): 13 | super().setUpClass() 14 | cls.zipfile = tempfile.NamedTemporaryFile(mode="w+b", suffix=".zip") 15 | import zipfile 16 | with zipfile.PyZipFile(cls.zipfile, mode='w', optimize=2) as zf: 17 | # Create a directory entry. There seems to be nothing to do this in 18 | # zipfile's standard API, so I looked into zipfile sources to see 19 | # what it does in ZipInfo.from_file and ZipFile.write() 20 | role_info = zipfile.ZipInfo("roles/") 21 | role_info.external_attr = 0o700 << 16 # Unix attributes 22 | role_info.file_size = 0 23 | role_info.external_attr |= 0x10 # MS-DOS directory flag 24 | role_info.compress_size = 0 25 | role_info.CRC = 0 26 | zf.writestr(role_info, b"") 27 | 28 | role = [ 29 | { 30 | "name": "test task", 31 | "copy": { 32 | "src": "testfile", 33 | "dest": "{{workdir}}/testfile", 34 | }, 35 | } 36 | ] 37 | zf.writestr("roles/test/tasks/main.yaml", yaml.dump(role)) 38 | zf.writestr("roles/test/files/testfile", "♥") 39 | 40 | role = [ 41 | "from __future__ import annotations", 42 | "from transilience import actions, role", 43 | "from transilience.actions import builtin", 44 | "import os", 45 | "", 46 | "@role.with_facts([actions.facts.Platform])", 47 | "class Role(role.Role):", 48 | " workdir: str = None", 49 | " def all_facts_available(self):", 50 | " self.add(builtin.copy(", 51 | " src=self.lookup_file('files/testfile'),", 52 | " dest=os.path.join(self.workdir, 'testfile'),", 53 | " ))", 54 | ] 55 | zf.writestr("roles/test1.py", "\n".join(role)) 56 | zf.writestr("roles/test1/files/testfile", "♥") 57 | 58 | @classmethod 59 | def tearDownClass(cls): 60 | cls.zipfile.close() 61 | super().tearDownClass() 62 | 63 | def test_load_yaml(self): 64 | loader = Loader.create_from_path(self.zipfile.name) 65 | self.assertIsNotNone(loader) 66 | role_cls = loader.load("test") 67 | with tempfile.TemporaryDirectory() as workdir: 68 | self.run_role(role_cls, workdir=workdir) 69 | 70 | testfile = os.path.join(workdir, "testfile") 71 | with open(testfile, "rt") as fd: 72 | self.assertEqual(fd.read(), "♥") 73 | 74 | def test_load_module(self): 75 | loader = Loader.create_from_path(self.zipfile.name) 76 | self.assertIsNotNone(loader) 77 | role_cls = loader.load("test1") 78 | with tempfile.TemporaryDirectory() as workdir: 79 | self.run_role(role_cls, workdir=workdir) 80 | 81 | testfile = os.path.join(workdir, "testfile") 82 | with open(testfile, "rt") as fd: 83 | self.assertEqual(fd.read(), "♥") 84 | 85 | 86 | class TestLocal(ZipappTests, LocalTestMixin, unittest.TestCase): 87 | pass 88 | 89 | 90 | class TestMitogen(ZipappTests, LocalMitogenTestMixin, unittest.TestCase): 91 | pass 92 | -------------------------------------------------------------------------------- /transilience/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from . import actions 3 | from . import system 4 | from . import utils 5 | from .playbook import Playbook 6 | from .hosts import Host 7 | 8 | __all__ = ["actions", "system", "utils", "Playbook", "Host"] 9 | -------------------------------------------------------------------------------- /transilience/actions/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from .action import Action, ResultState 3 | from .namespace import Namespace, builtin 4 | 5 | # Import action modules so they can register with the builtin namespace 6 | from . import facts 7 | from . import misc # noqa 8 | from . import file # noqa 9 | from . import copy # noqa 10 | from . import blockinfile # noqa 11 | from . import apt # noqa 12 | from . import command # noqa 13 | from . import systemd # noqa 14 | from . import user # noqa 15 | from . import git # noqa 16 | 17 | __all__ = ["Action", "ResultState", "Namespace", "builtin", "facts"] 18 | -------------------------------------------------------------------------------- /transilience/actions/blockinfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, Union, List 3 | from dataclasses import dataclass 4 | import re 5 | from .common import FileAction 6 | from . import builtin 7 | 8 | if TYPE_CHECKING: 9 | import transilience.system 10 | 11 | 12 | @builtin.action(name="blockinfile") 13 | @dataclass 14 | class BlockInFile(FileAction): 15 | """ 16 | Same as Ansible's 17 | [builtin.blockinfile](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/blockinfile_module.html). 18 | 19 | Not yet implemented: 20 | 21 | * backup 22 | * unsafe_writes 23 | * validate 24 | """ 25 | path: str = "" 26 | block: Union[str, bytes] = "" 27 | insertafter: Optional[str] = None 28 | insertbefore: Optional[str] = None 29 | marker: str = "# {mark} ANSIBLE MANAGED BLOCK" 30 | marker_begin: str = "BEGIN" 31 | marker_end: str = "END" 32 | create: bool = False 33 | state: Optional[str] = None 34 | 35 | def __post_init__(self): 36 | super().__post_init__() 37 | if self.path == "": 38 | raise TypeError(f"{self.__class__}.path cannot be empty") 39 | 40 | if self.insertbefore is not None and self.insertafter is not None: 41 | raise ValueError(f"{self.__class__}: insertbefore and insertafter cannot both be set") 42 | 43 | if self.block == "": 44 | if self.state == "present": 45 | raise ValueError(f"{self.__class__}: then the block is empty, state bust be absent") 46 | elif self.state is None: 47 | self.state = "absent" 48 | else: 49 | if self.state is None: 50 | self.state = "present" 51 | 52 | def action_summary(self): 53 | return f"Edit block in {self.path!r}" 54 | 55 | def edit_lines(self, lines: List[bytes]): 56 | # Compute markers 57 | marker_begin = self.marker.format(mark=self.marker_begin).encode() 58 | marker_end = self.marker.format(mark=self.marker_end).encode() 59 | 60 | # Compute insert position 61 | if self.insertbefore is None: 62 | if self.insertafter in (None, "EOF"): 63 | pos = "EOF" 64 | insertre = None 65 | else: 66 | pos = "AFTER" 67 | insertre = re.compile(self.insertafter.encode(errors='surrogate_or_strict')) 68 | else: 69 | if self.insertbefore == "BOF": 70 | pos = "BOF" 71 | insertre = None 72 | else: 73 | pos = "BEFORE" 74 | insertre = re.compile(self.insertbefore.encode(errors='surrogate_or_strict')) 75 | 76 | # Block to insert/replace 77 | if self.block and self.state == "present": 78 | if isinstance(self.block, str): 79 | block = self.block.encode() 80 | else: 81 | block = self.block 82 | 83 | blocklines = [marker_begin + b"\n"] 84 | for line in block.splitlines(): 85 | if not line.endswith(b"\n"): 86 | line += b"\n" 87 | blocklines.append(line) 88 | blocklines.append(marker_end + b"\n") 89 | else: 90 | blocklines = [] 91 | 92 | # Look for the last matching block in the file, and for the last line 93 | # matching insertre 94 | line_begin = None 95 | last_block = None 96 | insertre_pos = None 97 | for lineno, line in enumerate(lines): 98 | # print("SCAN", lineno, line, line_begin, last_block, insertre_pos) 99 | if line_begin is None: 100 | if line.rstrip() == marker_begin: 101 | line_begin = lineno 102 | else: 103 | if line.rstrip() == marker_end: 104 | last_block = (line_begin, lineno) 105 | line_begin = None 106 | 107 | if insertre is not None and insertre.search(line): 108 | insertre_pos = lineno 109 | if line_begin is not None: 110 | last_block = (line_begin, lineno + 1) 111 | 112 | # Do the edit 113 | # print("EDIT", last_block, pos, insertre, blocklines, lines) 114 | if last_block is None: 115 | if pos == "EOF": 116 | lines += blocklines 117 | elif pos == "BOF": 118 | lines[0:0] = blocklines 119 | elif pos == "BEFORE": 120 | lines[insertre_pos:insertre_pos] = blocklines 121 | elif pos == "AFTER": 122 | lines[insertre_pos + 1:insertre_pos + 1] = blocklines 123 | else: 124 | lines[last_block[0]:last_block[1] + 1] = blocklines 125 | 126 | def action_run(self, system: transilience.system.System): 127 | super().action_run(system) 128 | path = self.get_path_object(self.path, follow=True) 129 | lines: List[bytes] 130 | if path is None: 131 | if not self.create: 132 | return 133 | dest = self.path 134 | lines = [] 135 | else: 136 | dest = path.path 137 | # Read the original contents of the file 138 | with open(dest, "rb") as fd: 139 | lines = fd.readlines() 140 | 141 | orig_lines = list(lines) 142 | self.edit_lines(lines) 143 | 144 | # If file exists, and contents would not be changed, don't write it 145 | if orig_lines == lines: 146 | self.set_path_object_permissions(path) 147 | return 148 | 149 | # Write out the new contents 150 | if self.check: 151 | self.set_changed() 152 | else: 153 | with self.write_file_atomically(dest, "wb") as fd: 154 | for line in lines: 155 | fd.write(line) 156 | -------------------------------------------------------------------------------- /transilience/actions/command.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, Union, List, Dict, Any, cast 3 | from dataclasses import dataclass, field 4 | import subprocess 5 | import glob 6 | import os 7 | import shlex 8 | from .action import Action 9 | from . import builtin 10 | 11 | if TYPE_CHECKING: 12 | import transilience.system 13 | 14 | 15 | @builtin.action(name="command") 16 | @dataclass 17 | class Command(Action): 18 | """ 19 | Same as Ansible's 20 | [builtin.command](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/command_module.html). 21 | 22 | Not yet implemented: 23 | 24 | * strip_empty_ends 25 | """ 26 | argv: List[str] = field(default_factory=list) 27 | chdir: Optional[str] = None 28 | cmd: Optional[str] = None 29 | creates: Optional[str] = None 30 | removes: Optional[str] = None 31 | stdin: Union[str, bytes, None] = None 32 | stdin_add_newline: bool = True 33 | 34 | # stdout and stderr filled on execution 35 | stdout: Optional[bytes] = None 36 | stderr: Optional[bytes] = None 37 | 38 | def __post_init__(self): 39 | super().__post_init__() 40 | if not self.argv and not self.cmd: 41 | raise TypeError(f"{self.__class__}: one of args and cmd needs to be set") 42 | 43 | def summary(self): 44 | if self.cmd: 45 | return f"Run {self.cmd!r}" 46 | else: 47 | return "Run " + " ".join(shlex.quote(x) for x in self.argv) 48 | 49 | def action_run(self, system: transilience.system.System): 50 | super().action_run(system) 51 | 52 | if self.creates: 53 | if self.chdir: 54 | creates = os.path.join(self.chdir, self.creates) 55 | else: 56 | creates = self.creates 57 | if glob.glob(creates): 58 | return 59 | 60 | if self.removes: 61 | if self.chdir: 62 | removes = os.path.join(self.chdir, self.removes) 63 | else: 64 | removes = self.removes 65 | if not glob.glob(removes): 66 | return 67 | 68 | kwargs: Dict[str, Any] = { 69 | "capture_output": True, 70 | "check": True, 71 | } 72 | if self.chdir: 73 | kwargs["cwd"] = self.chdir 74 | 75 | if self.stdin is not None: 76 | if isinstance(self.stdin, bytes): 77 | stdin = self.stdin 78 | else: 79 | if self.stdin_add_newline: 80 | stdin = (self.stdin + "\n").encode() 81 | else: 82 | stdin = self.stdin.encode() 83 | kwargs["input"] = stdin 84 | 85 | if self.argv: 86 | args = self.argv 87 | else: 88 | # We can cast, because __post_init__ makes sure self.cmd is not 89 | # None if self.argv is None 90 | args = shlex.split(cast(str, self.cmd)) 91 | 92 | self.set_changed() 93 | if not self.check: 94 | res = subprocess.run(args, **kwargs) 95 | self.stdout = res.stdout 96 | self.stderr = res.stderr 97 | -------------------------------------------------------------------------------- /transilience/actions/copy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, Union, List 3 | from dataclasses import dataclass 4 | import hashlib 5 | import os 6 | from ..fileasset import FileAsset, LocalFileAsset 7 | from .action import local_file 8 | from .common import FileAction 9 | from . import builtin 10 | 11 | if TYPE_CHECKING: 12 | import transilience.system 13 | 14 | 15 | @builtin.action(name="copy") 16 | @dataclass 17 | class Copy(FileAction): 18 | """ 19 | Same as Ansible's 20 | [builtin.copy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html). 21 | 22 | Not yet implemented: 23 | 24 | * backup 25 | * decrypt 26 | * directory_mode 27 | * force 28 | * local_follow 29 | * remote_src 30 | * unsafe_writes 31 | * validate 32 | * src as directory 33 | """ 34 | dest: str = "" 35 | src: Union[None, str, FileAsset] = local_file(None, "local file to be copied") 36 | content: Union[str, bytes, None] = None 37 | checksum: Optional[str] = None 38 | follow: bool = True 39 | 40 | def __post_init__(self): 41 | super().__post_init__() 42 | if self.dest == "": 43 | raise TypeError(f"{self.__class__}.dest cannot be empty") 44 | 45 | # If we are given a source file, compute its checksum 46 | if self.src is not None: 47 | if self.content is not None: 48 | raise ValueError(f"{self.__class__}: src and content cannot both be set") 49 | 50 | # Make sure src, if present, is a FileAsset 51 | if isinstance(self.src, str): 52 | self.src = LocalFileAsset(os.path.abspath(self.src)) 53 | 54 | if self.checksum is None: 55 | self.checksum = self.src.sha1sum() 56 | elif self.content is not None: 57 | if self.checksum is None: 58 | h = hashlib.sha1() 59 | if isinstance(self.content, str): 60 | h.update(self.content.encode()) 61 | else: 62 | h.update(self.content) 63 | self.checksum = h.hexdigest() 64 | else: 65 | raise ValueError(f"{self.__class__}: one of src and content needs to be set") 66 | 67 | def action_summary(self): 68 | if self.content is not None: 69 | return f"Replace contents of {self.dest!r}" 70 | else: 71 | return f"Copy {self.src!r} to {self.dest!r}" 72 | 73 | def list_local_files_needed(self) -> List[str]: 74 | res = super().list_local_files_needed() 75 | if self.src is not None: 76 | res.append(self.src) 77 | return res 78 | 79 | def write_content(self): 80 | """ 81 | Write destination file from self.content 82 | """ 83 | path = self.get_path_object(self.dest) 84 | if path is not None: 85 | # If file exists, checksum it, and if the hashes are the same, don't transfer 86 | checksum = path.sha1sum() 87 | if checksum == self.checksum: 88 | self.set_path_object_permissions(path) 89 | return 90 | dest = path.path 91 | else: 92 | dest = self.dest 93 | 94 | if isinstance(self.content, str): 95 | content = self.content.encode() 96 | else: 97 | content = self.content 98 | 99 | if self.check: 100 | self.set_changed() 101 | return 102 | 103 | with self.write_file_atomically(dest, "wb") as fd: 104 | fd.write(content) 105 | 106 | def write_src(self, system: transilience.system.System): 107 | """ 108 | Write destination file from a streamed self.src 109 | """ 110 | path = self.get_path_object(self.dest) 111 | if path is not None: 112 | # If file exists, checksum it, and if the hashes are the same, don't transfer 113 | checksum = path.sha1sum() 114 | if checksum == self.checksum: 115 | self.set_path_object_permissions(path) 116 | return 117 | dest = path.path 118 | else: 119 | dest = self.dest 120 | 121 | if self.check: 122 | self.set_changed() 123 | return 124 | 125 | with self.write_file_atomically(dest, "w+b") as fd: 126 | self.src.copy_to(fd) 127 | fd.seek(0) 128 | checksum = FileAsset.compute_file_sha1sum(fd) 129 | if checksum != self.checksum: 130 | raise RuntimeError(f"{self.dest!r} has SHA1 {checksum!r} after receiving it," 131 | f"but 'checksum' value is {self.checksum!r}") 132 | 133 | def action_run(self, system: transilience.system.System): 134 | super().action_run(system) 135 | if self.content is not None: 136 | self.write_content() 137 | else: 138 | self.write_src(system) 139 | -------------------------------------------------------------------------------- /transilience/actions/facts/__init__.py: -------------------------------------------------------------------------------- 1 | from .platform import Platform 2 | from .facts import Facts 3 | 4 | __all__ = ["Platform", "Facts"] 5 | -------------------------------------------------------------------------------- /transilience/actions/facts/facts.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, Dict, Any 3 | from dataclasses import dataclass 4 | from ..action import Action 5 | 6 | 7 | @dataclass 8 | class Facts(Action): 9 | """ 10 | Collect facts about the system 11 | """ 12 | pass 13 | -------------------------------------------------------------------------------- /transilience/actions/facts/platform.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional 3 | from dataclasses import dataclass 4 | import subprocess 5 | import platform 6 | import socket 7 | import shutil 8 | import re 9 | from .facts import Facts 10 | 11 | if TYPE_CHECKING: 12 | import transilience.system 13 | 14 | 15 | # i86pc is a Solaris and derivatives-ism 16 | SOLARIS_I86_RE_PATTERN = r'i([3456]86|86pc)' 17 | solaris_i86_re = re.compile(SOLARIS_I86_RE_PATTERN) 18 | 19 | 20 | # From ansible/module_utils/facts/system/platform.py 21 | @dataclass 22 | class Platform(Facts): 23 | """ 24 | Facts from the platform module 25 | """ 26 | ansible_system: Optional[str] = None 27 | ansible_kernel: Optional[str] = None 28 | ansible_kernel: Optional[str] = None 29 | ansible_kernel_version: Optional[str] = None 30 | ansible_machine: Optional[str] = None 31 | ansible_python_version: Optional[str] = None 32 | ansible_fqdn: Optional[str] = None 33 | ansible_hostname: Optional[str] = None 34 | ansible_nodename: Optional[str] = None 35 | ansible_domain: Optional[str] = None 36 | ansible_userspace_bits: Optional[str] = None 37 | ansible_architecture: Optional[str] = None 38 | ansible_userspace_architecture: Optional[str] = None 39 | ansible_machine_id: Optional[str] = None 40 | 41 | def action_summary(self): 42 | return "gather platform facts" 43 | 44 | def action_run(self, system: transilience.system.System): 45 | super().action_run(system) 46 | # platform.system() can be Linux, Darwin, Java, or Windows 47 | self.ansible_system = platform.system() 48 | self.ansible_kernel = platform.release() 49 | self.ansible_kernel_version = platform.version() 50 | self.ansible_machine = platform.machine() 51 | 52 | self.ansible_python_version = platform.python_version() 53 | 54 | self.ansible_fqdn = socket.getfqdn() 55 | self.ansible_hostname = platform.node().split('.')[0] 56 | self.ansible_nodename = platform.node() 57 | 58 | self.ansible_domain = '.'.join(self.ansible_fqdn.split('.')[1:]) 59 | 60 | arch_bits = platform.architecture()[0] 61 | 62 | self.ansible_userspace_bits = arch_bits.replace('bit', '') 63 | if self.ansible_machine == 'x86_64': 64 | self.ansible_architecture = self.ansible_machine 65 | if self.ansible_userspace_bits == '64': 66 | self.ansible_userspace_architecture = 'x86_64' 67 | elif self.ansible_userspace_bits == '32': 68 | self.ansible_userspace_architecture = 'i386' 69 | elif solaris_i86_re.search(self.ansible_machine): 70 | self.ansible_architecture = 'i386' 71 | if self.ansible_userspace_bits == '64': 72 | self.ansible_userspace_architecture = 'x86_64' 73 | elif self.ansible_userspace_bits == '32': 74 | self.ansible_userspace_architecture = 'i386' 75 | else: 76 | self.ansible_architecture = self.ansible_machine 77 | 78 | if self.ansible_system == 'AIX': 79 | # Attempt to use getconf to figure out architecture 80 | # fall back to bootinfo if needed 81 | getconf_bin = shutil.which('getconf') 82 | if getconf_bin: 83 | res = subprocess.run([getconf_bin, "MACHINE_ARCHITECTURE"], capture_output=True, text=True) 84 | if res.returncode == 0: 85 | data = res.stdout.splitlines() 86 | self.ansible_architecture = data[0] 87 | else: 88 | bootinfo_bin = shutil.which('bootinfo') 89 | if bootinfo_bin is not None: 90 | res = subprocess.run([bootinfo_bin, '-p'], capture_output=True, text=True) 91 | if res.returncode == 0: 92 | data = res.stdout.splitlines() 93 | self.ansible_architecture = data[0] 94 | elif self.ansible_system == 'OpenBSD': 95 | self.ansible_architecture = platform.uname()[5] 96 | 97 | machine_id = None 98 | for path in ("/var/lib/dbus/machine-id", "/etc/machine-id"): 99 | try: 100 | with open(path, "rt") as fd: 101 | machine_id = next(fd).strip() 102 | break 103 | except FileNotFoundError: 104 | pass 105 | 106 | if machine_id: 107 | self.ansible_machine_id = machine_id 108 | -------------------------------------------------------------------------------- /transilience/actions/file.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional 3 | from dataclasses import dataclass 4 | import tempfile 5 | import shutil 6 | import os 7 | from .common import FileAction, PathObject 8 | from . import builtin 9 | from .action import scalar 10 | 11 | if TYPE_CHECKING: 12 | import transilience.system 13 | 14 | 15 | @builtin.action(name="file") 16 | @dataclass 17 | class File(FileAction): 18 | """ 19 | Same as Ansible's 20 | [builtin.file](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html). 21 | 22 | Not yet implemented: 23 | 24 | * access_time 25 | * modification_time 26 | * modification_time_format 27 | * unsafe_writes 28 | """ 29 | path: Optional[str] = scalar(None, "Path to the file or directory being managed") 30 | state: str = scalar("file", "Valid: file, directory, link, hard, touch, absent") 31 | recurse: bool = scalar(False, "Recursively apply attributes (only used with state=directory)") 32 | src: Optional[str] = scalar(None, "target of the link or hard link") 33 | follow: bool = scalar(True, "set attributes of symlink destinations instead of the symlinks themselves") 34 | force: bool = False 35 | 36 | def __post_init__(self): 37 | super().__post_init__() 38 | if self.path is None: 39 | raise TypeError(f"{self.__class__}.path cannot be None") 40 | if self.recurse is True and self.state != "directory": 41 | raise ValueError(f"{self.__class__}.recurse only makes sense when state=directory") 42 | if self.state in ("link", "hard") and self.src is None: 43 | raise ValueError(f"{self.__class__} needs src when state {self.state}") 44 | 45 | def action_summary(self): 46 | if self.state == "file": 47 | return f"Set permissions/attributes of file {self.path!r}" 48 | elif self.state == "directory": 49 | return f"Setup directory {self.path!r}" 50 | elif self.state == "link": 51 | return f"Setup symlink {self.path!r}" 52 | elif self.state == "hard": 53 | return f"Setup hard link {self.path!r}" 54 | elif self.state == "touch": 55 | return f"Create file {self.path!r}" 56 | elif self.state == "absent": 57 | return f"Remove path {self.path!r}" 58 | else: 59 | return f"{self.__class__}: unknown state {self.state!r}" 60 | 61 | def do_file(self): 62 | path = self.get_path_object(self.path) 63 | if path is None: 64 | raise RuntimeError("f{path} does not exist") 65 | if path.isdir(): 66 | raise RuntimeError("f{path} is a directory") 67 | if path.islink(): 68 | raise RuntimeError("f{path} is a symlink") 69 | self.set_path_object_permissions(path) 70 | 71 | def _set_tree_perms(self, path: PathObject): 72 | for root, dirs, files in path.walk(): 73 | for fn in dirs: 74 | self.set_path_object_permissions( 75 | PathObject(os.path.join(root, fn), follow=False), record=False) 76 | for fn in files: 77 | self.set_path_object_permissions( 78 | PathObject(os.path.join(root, fn), follow=False), record=False) 79 | 80 | def _mkpath(self, path: str): 81 | parent = os.path.dirname(path) 82 | if not os.path.isdir(parent): 83 | self._mkpath(parent) 84 | 85 | self.log.info("%s: creating directory", path) 86 | if not self.check: 87 | os.mkdir(path) 88 | self.set_changed() 89 | 90 | patho = self.get_path_object(path) 91 | self.set_path_object_permissions(patho, record=False) 92 | 93 | def do_directory(self): 94 | path = self.get_path_object(self.path) 95 | if path is None: 96 | # TODO: review 97 | self._mkpath(self.path) 98 | self.set_changed() 99 | elif path.isdir(): 100 | if self.recurse: 101 | self._set_tree_perms(path) 102 | self.set_path_object_permissions(path) 103 | else: 104 | raise RuntimeError("f{path} exists and is not a directory") 105 | 106 | def do_link(self): 107 | path = self.get_path_object(self.path, follow=False) 108 | 109 | if path is not None: 110 | # Don't replace a non-link unless force is True 111 | if not path.islink() and not self.force: 112 | raise RuntimeError(f"{path} already exists, is not a link, and force is False") 113 | 114 | if path.isdir(): 115 | target = os.path.join(path.path, self.src) 116 | else: 117 | target = os.path.join(os.path.dirname(path.path), self.src) 118 | else: 119 | target = os.path.join(os.path.dirname(self.path), self.src) 120 | 121 | target_po = self.get_path_object(target, follow=False) 122 | if target_po is None and not self.force: 123 | raise RuntimeError(f"{target!r} does not exists, and force is False") 124 | 125 | if path is None: 126 | if not self.check: 127 | os.symlink(target, self.path) 128 | elif path.islink(): 129 | orig = os.readlink(self.path) 130 | if orig == target: 131 | return 132 | if not self.check: 133 | os.symlink(target, self.path) 134 | elif path.isdir(): 135 | # tempfile.mktemp is deprecated, but I cannot find a better way to 136 | # atomically create a symlink with a nonconflicting name. 137 | if not self.check: 138 | tmp = tempfile.mktemp(prefix=self.path) 139 | os.symlink(target, tmp) 140 | try: 141 | os.rmdir(path.path) 142 | os.rename(tmp, self.path) 143 | except Exception: 144 | os.unlink(tmp) 145 | raise 146 | else: 147 | # tempfile.mktemp is deprecated, but I cannot find a better way to 148 | # atomically create a symlink with a nonconflicting name 149 | if not self.check: 150 | tmp = tempfile.mktemp(prefix=self.path) 151 | os.symlink(target, tmp) 152 | try: 153 | os.rename(tmp, self.path) 154 | except Exception: 155 | os.unlink(tmp) 156 | raise 157 | 158 | self.set_changed() 159 | path = self.get_path_object(self.path, follow=False) 160 | self.set_path_object_permissions(path) 161 | 162 | def do_hard(self): 163 | path = self.get_path_object(self.path, follow=False) 164 | 165 | target_po = self.get_path_object(self.src, follow=False) 166 | if target_po is None: 167 | raise RuntimeError(f"{self.src!r} does not exist") 168 | 169 | if path is None: 170 | if not self.check: 171 | os.link(self.src, self.path) 172 | elif path.islink(): 173 | # tempfile.mktemp is deprecated, but I cannot find a better way to 174 | # atomically create a symlink with a nonconflicting name. 175 | if not self.check: 176 | tmp = tempfile.mktemp(prefix=self.path) 177 | os.link(self.src, tmp) 178 | try: 179 | os.unlink(self.path) 180 | os.rename(tmp, self.path) 181 | except Exception: 182 | os.unlink(tmp) 183 | raise 184 | elif path.isdir(): 185 | # tempfile.mktemp is deprecated, but I cannot find a better way to 186 | # atomically create a symlink with a nonconflicting name. 187 | if not self.check: 188 | tmp = tempfile.mktemp(prefix=self.path) 189 | os.link(self.src, tmp) 190 | try: 191 | os.rmdir(path.path) 192 | os.rename(tmp, self.path) 193 | except Exception: 194 | os.unlink(tmp) 195 | raise 196 | else: 197 | target = self.get_path_object(self.src, follow=False) 198 | # noop if it's a link to the same target 199 | if (target.st.st_dev, target.st.st_ino) == (path.st.st_dev, path.st.st_ino): 200 | return 201 | # tempfile.mktemp is deprecated, but I cannot find a better way to 202 | # atomically create a symlink with a nonconflicting name 203 | if not self.check: 204 | tmp = tempfile.mktemp(prefix=self.path) 205 | os.link(self.src, tmp) 206 | try: 207 | os.rename(tmp, self.path) 208 | except Exception: 209 | os.unlink(tmp) 210 | raise 211 | 212 | self.set_changed() 213 | path = self.get_path_object(self.path, follow=False) 214 | self.set_path_object_permissions(path) 215 | 216 | def do_touch(self): 217 | with self.create_file_if_missing(self.path) as fd: 218 | pass 219 | 220 | if fd is None: 221 | # The file already exists 222 | path = self.get_path_object(self.path) 223 | self.set_path_object_permissions(path) 224 | 225 | def do_absent(self): 226 | path = self.get_path_object(self.path) 227 | if path is None: 228 | return 229 | 230 | if path.isdir(): 231 | self.set_changed() 232 | if not self.check: 233 | shutil.rmtree(self.path, ignore_errors=False) 234 | self.log.info("%s: removed directory recursively") 235 | else: 236 | if not self.check: 237 | os.unlink(self.path) 238 | self.set_changed() 239 | self.log.info("%s: removed") 240 | 241 | def action_run(self, system: transilience.system.System): 242 | super().action_run(system) 243 | 244 | meth = getattr(self, f"do_{self.state}", None) 245 | if meth is None: 246 | raise NotImplementedError(f"File state {self.state!r} is not implemented") 247 | return meth() 248 | -------------------------------------------------------------------------------- /transilience/actions/misc.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING 3 | from dataclasses import dataclass 4 | from .action import Action, scalar 5 | from . import builtin 6 | 7 | if TYPE_CHECKING: 8 | import transilience.system 9 | 10 | 11 | @builtin.action(name="noop") 12 | @dataclass 13 | class Noop(Action): 14 | """ 15 | Do nothing, successfully. 16 | """ 17 | changed: bool = scalar(False, "Set to True to pretend the action performed changes") 18 | 19 | def action_summary(self): 20 | return "Do nothing" 21 | 22 | def action_run(self, system: transilience.system.System): 23 | super().action_run(system) 24 | if self.changed: 25 | self.set_changed() 26 | 27 | 28 | @builtin.action(name="fail") 29 | @dataclass 30 | class Fail(Action): 31 | """ 32 | Fail with a custom message 33 | 34 | Same as Ansible's 35 | [builtin.fail](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/fail_module.html). 36 | """ 37 | msg: str = "Failed as requested from task" 38 | 39 | def action_summary(self): 40 | return f"Fail: {self.msg}" 41 | 42 | def action_run(self, system: transilience.system.System): 43 | super().action_run(system) 44 | raise RuntimeError(self.msg) 45 | -------------------------------------------------------------------------------- /transilience/actions/namespace.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, Callable 3 | 4 | if TYPE_CHECKING: 5 | from .action import Action 6 | 7 | 8 | class Namespace: 9 | """ 10 | Registry for a group of actions 11 | """ 12 | 13 | def __init__(self, name: str): 14 | self.name = name 15 | 16 | def __str__(self): 17 | return self.name 18 | 19 | def __repr__(self): 20 | return f"Action namespace {self.name!r}" 21 | 22 | def action( 23 | self, 24 | factory: Optional[Callable[..., Action]] = None, 25 | *, 26 | name=None): 27 | if factory is None: 28 | def decorator(factory: Callable[..., Action]): 29 | nonlocal name 30 | if name is None: 31 | name = factory.__name__ 32 | setattr(self, name, factory) 33 | return factory 34 | return decorator 35 | else: 36 | name = factory.__name__ 37 | setattr(self, name, factory) 38 | return factory 39 | 40 | 41 | # Instantiate the builtin namespace right away, so that builtin modules can 42 | # register with it 43 | builtin = Namespace("builtin") 44 | -------------------------------------------------------------------------------- /transilience/actions/systemd.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, Dict 3 | from dataclasses import dataclass 4 | import subprocess 5 | import shutil 6 | import shlex 7 | import os 8 | from .action import Action 9 | from . import builtin 10 | 11 | if TYPE_CHECKING: 12 | import transilience.system 13 | 14 | 15 | @builtin.action(name="systemd") 16 | @dataclass 17 | class Systemd(Action): 18 | """ 19 | Same as Ansible's 20 | [builtin.systemd](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/systemd_module.html) 21 | """ 22 | scope: str = "system" 23 | no_block: bool = False 24 | force: bool = False 25 | daemon_reexec: bool = False 26 | daemon_reload: bool = False 27 | unit: Optional[str] = None 28 | enabled: Optional[bool] = None 29 | masked: Optional[bool] = None 30 | state: Optional[str] = None 31 | 32 | def action_summary(self): 33 | summary = "" 34 | 35 | if self.unit is not None: 36 | verbs = [] 37 | if self.masked is not None: 38 | verbs.append("mask") 39 | 40 | if self.enabled is not None: 41 | verbs.append("enable") 42 | 43 | if self.state == "started": 44 | verbs.append("start") 45 | elif self.state == "stopped": 46 | verbs.append("stop") 47 | elif self.state == "reloaded": 48 | verbs.append("reload") 49 | elif self.state == "restarted": 50 | verbs.append("restart") 51 | 52 | if verbs: 53 | summary = ", ".join(verbs) + " " + self.unit 54 | 55 | verbs = [] 56 | if self.daemon_reload: 57 | verbs.append("reload") 58 | if self.daemon_reexec: 59 | verbs.append("restart") 60 | 61 | if verbs: 62 | if summary: 63 | summary += " and " 64 | summary += ", ".join(verbs) + " systemd" 65 | 66 | if not summary: 67 | summary += "systemd action with nothing to do" 68 | 69 | if self.scope != "system": 70 | summary += f" [{self.scope} scope]" 71 | 72 | return summary 73 | 74 | def run_systemctl(self, *args, allow_check=False, **kw): 75 | """ 76 | Run systemctl with logging and common subprocess args 77 | 78 | If allow_check is True, run the command also in check mode 79 | """ 80 | kw.setdefault("env", self._default_env) 81 | kw.setdefault("check", True) 82 | kw.setdefault("capture_output", True) 83 | systemctl_cmd = self._default_systemctl_cmd + list(args) 84 | formatted_cmd = " ".join(shlex.quote(x) for x in systemctl_cmd) 85 | self.log.info("running %s", formatted_cmd) 86 | if self.check and not allow_check: 87 | return 88 | try: 89 | return subprocess.run(systemctl_cmd, **kw) 90 | except subprocess.CalledProcessError as e: 91 | self.log.error("%s: exited with code %d and stderr %r", formatted_cmd, e.returncode, e.stderr) 92 | raise 93 | 94 | def get_unit_info(self) -> Dict[str. str]: 95 | """ 96 | Fetch the current status of the unit 97 | 98 | Documentation of UnitFileState values can be found in man systemctl(1) 99 | """ 100 | res = self.run_systemctl("show", self.unit, "--no-page", check=False, text=True, allow_check=True) 101 | 102 | unit_info: Dict[str, str] = {} 103 | if res.returncode == 0: 104 | for line in res.stdout.splitlines(): 105 | k, v = line.strip().split("=", 1) 106 | unit_info[k] = v 107 | 108 | return unit_info 109 | 110 | def action_run(self, system: transilience.system.System): 111 | super().action_run(system) 112 | self._default_env = dict(os.environ) 113 | if "XDG_RUNTIME_DIR" not in self._default_env: 114 | self._default_env["XDG_RUNTIME_DIR"] = f"/run/user/{os.geteuid()}" 115 | 116 | systemctl = shutil.which("systemctl") 117 | if systemctl is None: 118 | raise RuntimeError("systemctl not found") 119 | 120 | self._default_systemctl_cmd = [systemctl] 121 | 122 | if self.scope != "system": 123 | self._default_systemctl_cmd.append(f"--{self.scope}") 124 | 125 | if self.no_block: 126 | self._default_systemctl_cmd.append("--no-block") 127 | 128 | if self.force: 129 | self._default_systemctl_cmd.append("--force") 130 | 131 | if self.daemon_reload: 132 | self.run_systemctl("daemon-reload") 133 | self.set_changed() 134 | 135 | if self.daemon_reexec: 136 | self.run_systemctl("daemon-reexec") 137 | self.set_changed() 138 | 139 | if self.unit is not None: 140 | # Documentation of UnitFileState values can be found in man systemctl(1) 141 | unit_info = self.get_unit_info() 142 | 143 | if self.masked is not None: 144 | orig_masked = unit_info.get("UnitFileState") == "masked" 145 | if self.masked != orig_masked: 146 | self.run_systemctl("mask" if self.masked else "unmask", self.unit) 147 | self.set_changed() 148 | 149 | if self.enabled is not None: 150 | orig_enabled = unit_info.get("UnitFileState") in ( 151 | "enabled", "enabled-runtime", "alias", "static", 152 | "indirect", "generated", "transient") 153 | if self.enabled != orig_enabled: 154 | self.run_systemctl("enable" if self.enabled else "disable", self.unit) 155 | self.set_changed() 156 | 157 | if self.state is not None: 158 | action = None 159 | cur_state = unit_info.get("ActiveState") 160 | # self.log.info("ActiveState pre: %r", cur_state) 161 | if cur_state is not None: 162 | if self.state == "started": 163 | if cur_state not in ("active", "activating"): 164 | action = "start" 165 | elif self.state == "stopped": 166 | if cur_state in ("active", "activating", "deactivating"): 167 | action = "stop" 168 | elif self.state == "reloaded": 169 | if cur_state not in ("active", "activating"): 170 | action = "start" 171 | else: 172 | action = "reload" 173 | elif self.state == "restarted": 174 | if cur_state not in ("active", "activating"): 175 | action = "start" 176 | else: 177 | action = "restart" 178 | 179 | if action is not None: 180 | self.run_systemctl(action, self.unit) 181 | self.set_changed() 182 | -------------------------------------------------------------------------------- /transilience/actions/user/__init__.py: -------------------------------------------------------------------------------- 1 | from . import action # noqa 2 | -------------------------------------------------------------------------------- /transilience/actions/user/action.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Optional, List 3 | from dataclasses import dataclass, field 4 | import platform 5 | import shlex 6 | import os 7 | from .. import builtin 8 | from ..action import Action 9 | 10 | if TYPE_CHECKING: 11 | import transilience.system 12 | from . import backend 13 | 14 | 15 | @builtin.action(name="user") 16 | @dataclass 17 | class User(Action): 18 | """ 19 | Same as Ansible's 20 | [builtin.user](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/user_module.html) 21 | """ 22 | name: Optional[str] = None 23 | state: str = "present" 24 | uid: Optional[int] = None 25 | hidden: Optional[bool] = None 26 | non_unique: bool = False 27 | seuser: Optional[str] = None 28 | group: Optional[str] = None 29 | groups: List[str] = field(default_factory=list) 30 | comment: Optional[str] = None 31 | shell: Optional[str] = None 32 | password: Optional[str] = None 33 | password_expire_max: Optional[int] = None 34 | password_expire_min: Optional[int] = None 35 | password_lock: Optional[bool] = None 36 | force: bool = False 37 | remove: bool = False 38 | create_home: bool = True 39 | move_home: bool = False 40 | skeleton: Optional[str] = None 41 | system: bool = False 42 | login_class: Optional[str] = None 43 | append: bool = False 44 | generate_ssh_key: bool = False 45 | ssh_key_bits: Optional[int] = None 46 | ssh_key_comment: Optional[str] = None 47 | ssh_key_file: Optional[str] = None 48 | ssh_key_passphrase: Optional[str] = None 49 | ssh_key_type: str = "rsa" 50 | ssh_key_fingerprint: Optional[str] = None # Result only 51 | ssh_key_pubkey: Optional[str] = None # Result only 52 | authorization: Optional[str] = None 53 | home: Optional[str] = None 54 | expires: Optional[float] = None 55 | local: bool = False 56 | profile: Optional[str] = None 57 | role: Optional[str] = None 58 | update_password: str = "always" 59 | 60 | def __post_init__(self): 61 | super().__post_init__() 62 | if self.name is None: 63 | raise TypeError(f"{self.__class__}.name cannot be None") 64 | 65 | # FIXME: not documented? 66 | # self.umask = module.params['umask'] 67 | # if self.umask is not None and self.local: 68 | # module.fail_json(msg="'umask' can not be used with 'local'") 69 | 70 | # if self.expires is not None: 71 | # try: 72 | # self.expires = time.gmtime(module.params['expires']) 73 | # except Exception as e: 74 | # module.fail_json(msg="Invalid value for 'expires' %s: %s" % (self.expires, to_native(e))) 75 | 76 | if self.ssh_key_file is None: 77 | self.ssh_key_file = os.path.join(".ssh", f"id_{self.ssh_key_type}") 78 | 79 | if not self.groups and self.append: 80 | raise ValueError("'append' is set, but no 'groups' are specified. Use 'groups' for appending new groups") 81 | 82 | def action_summary(self): 83 | if self.state == 'absent': 84 | return f"Remove user {self.name!r}" 85 | else: 86 | return f"Create user {self.name!r}" 87 | 88 | def get_backend(self) -> backend.Generic: 89 | system = platform.system() 90 | if system == "Linux": 91 | distribution: Optional[str] 92 | try: 93 | with open("/etc/os-release", "rt") as fd: 94 | for line in fd: 95 | k, v = line.split("=", 1) 96 | if k == "ID": 97 | distribution = shlex.split(v)[0] 98 | break 99 | else: 100 | distribution = None 101 | except FileNotFoundError: 102 | distribution = None 103 | 104 | if distribution == "alpine": 105 | from . import linux 106 | return linux.Alpine(self) 107 | else: 108 | from . import backend 109 | return backend.Generic(self) 110 | if system in ('FreeBSD', 'DragonFly'): 111 | from . import freebsd 112 | return freebsd.Generic(self) 113 | else: 114 | raise NotImplementedError(f"User backend for {system!r} platform is not available") 115 | 116 | def run_change_command(self, cmd: List[str], input: Optional[bytes] = None): 117 | if not self.check: 118 | self.run_command(cmd, input=input) 119 | self.set_changed() 120 | 121 | def action_run(self, system: transilience.system.System): 122 | super().action_run(system) 123 | 124 | backend = self.get_backend() 125 | 126 | backend.check_password_encrypted() 127 | 128 | if self.state == 'absent': 129 | backend.do_absent() 130 | elif self.state == 'present': 131 | backend.do_present() 132 | 133 | if backend.user_exists() and self.state == 'present': 134 | backend.do_update() 135 | 136 | # deal with password expire max 137 | if self.password_expire_max: 138 | if backend.user_exists(): 139 | backend.set_password_expire_max() 140 | 141 | # deal with password expire min 142 | if self.password_expire_min: 143 | if backend.user_exists(): 144 | backend.set_password_expire_min() 145 | -------------------------------------------------------------------------------- /transilience/actions/user/freebsd.py: -------------------------------------------------------------------------------- 1 | # Implementation adapter from Ansible's user module, which is Copyright: © 2012, 2 | # Stephen Fromm , and licensed under the GNU General Public 3 | # License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) 4 | from __future__ import annotations 5 | import time 6 | import os 7 | from . import backend 8 | 9 | 10 | class User(backend.User): 11 | """ 12 | This is a FreeBSD User manipulation class - it uses the pw command 13 | to manipulate the user database, followed by the chpass command 14 | to change the password. 15 | """ 16 | def get_shadowfile(self): 17 | return '/etc/master.passwd' 18 | 19 | def get_shadowfile_expire_index(self): 20 | return 6 21 | 22 | def get_date_format(self): 23 | return '%d-%b-%Y' 24 | 25 | def _handle_lock(self): 26 | info = self.user_info() 27 | if self.action.password_lock and not info.pw_passwd.startswith('*LOCKED*'): 28 | cmd = [self.action.find_command('pw'), 'lock', self.action.name] 29 | if self.action.uid is not None and info.pw_uid != self.action.uid: 30 | cmd.append('-u') 31 | cmd.append(str(self.action.uid)) 32 | self.action.run_change_command(cmd) 33 | elif self.action.password_lock is False and info.pw_passwd.startswith('*LOCKED*'): 34 | cmd = [self.action.find_command('pw'), 'unlock', self.action.name] 35 | if self.action.uid is not None and info.pw_uid != self.action.uid: 36 | cmd.append('-u') 37 | cmd.append(str(self.action.uid)) 38 | self.action.run_change_command(cmd) 39 | 40 | def remove_user(self): 41 | cmd = [self.action.find_command("pw"), 'userdel', '-n', self.action.name] 42 | if self.action.remove: 43 | cmd.append('-r') 44 | self.action.run_change_command(cmd) 45 | 46 | def create_user(self): 47 | cmd = [self.action.find_command("pw"), 'useradd', '-n', self.action.name] 48 | 49 | if self.action.uid is not None: 50 | cmd.append('-u') 51 | cmd.append(str(self.action.uid)) 52 | if self.action.non_unique: 53 | cmd.append('-o') 54 | 55 | if self.action.comment is not None: 56 | cmd.append('-c') 57 | cmd.append(self.action.comment) 58 | 59 | if self.action.home is not None: 60 | cmd.append('-d') 61 | cmd.append(self.action.home) 62 | 63 | if self.action.group is not None: 64 | if not self.group_exists(self.action.group): 65 | raise RuntimeError(f"Group {self.action.group!r} does not exist") 66 | cmd.append('-g') 67 | cmd.append(self.action.group) 68 | 69 | if self.action.groups is not None: 70 | groups = self.get_groups_set() 71 | cmd.append('-G') 72 | cmd.append(','.join(groups)) 73 | 74 | if self.action.create_home: 75 | cmd.append('-m') 76 | 77 | if self.action.skeleton is not None: 78 | cmd.append('-k') 79 | cmd.append(self.action.skeleton) 80 | 81 | # if self.umask is not None: 82 | # cmd.append('-K') 83 | # cmd.append('UMASK=' + self.umask) 84 | 85 | if self.action.shell is not None: 86 | cmd.append('-s') 87 | cmd.append(self.action.shell) 88 | 89 | if self.action.login_class is not None: 90 | cmd.append('-L') 91 | cmd.append(self.action.login_class) 92 | 93 | if self.action.expires is not None: 94 | cmd.append('-e') 95 | if self.action.expires < 0: 96 | cmd.append('0') 97 | else: 98 | cmd.append(str(self.action.expires)) 99 | 100 | # system cannot be handled currently - should we error if its requested? 101 | # create the user 102 | self.action.run_change_command(cmd) 103 | 104 | # we have to set the password in a second command 105 | if self.action.password is not None: 106 | cmd = [self.action.find_command('chpass'), '-p', self.action.password, self.action.name] 107 | self.action.run_change_command(cmd) 108 | 109 | # we have to lock/unlock the password in a distinct command 110 | self._handle_lock() 111 | 112 | def modify_user(self): 113 | cmd = [self.action.find_command("pw"), 'usermod', '-n', self.action.name] 114 | cmd_len = len(cmd) 115 | info = self.user_info() 116 | 117 | if self.action.uid is not None and info.pw_uid != self.action.uid: 118 | cmd.append('-u') 119 | cmd.append(str(self.action.uid)) 120 | 121 | if self.action.non_unique: 122 | cmd.append('-o') 123 | 124 | if self.action.comment is not None and info.pw_gecos != self.action.comment: 125 | cmd.append('-c') 126 | cmd.append(self.action.comment) 127 | 128 | if self.action.home is not None: 129 | if ((info.pw_dir != self.action.home and self.action.move_home) 130 | or (not os.path.exists(self.action.home) and self.action.create_home)): 131 | cmd.append('-m') 132 | if info.pw_dir != self.action.home: 133 | cmd.append('-d') 134 | cmd.append(self.action.home) 135 | 136 | if self.action.skeleton is not None: 137 | cmd.append('-k') 138 | cmd.append(self.action.skeleton) 139 | 140 | # if self.umask is not None: 141 | # cmd.append('-K') 142 | # cmd.append('UMASK=' + self.umask) 143 | 144 | if self.action.group is not None: 145 | if not self.group_exists(self.action.group): 146 | raise RuntimeError(f"Group {self.action.group!r} does not exist") 147 | ginfo = self.action.group_info(self.action.group) 148 | if info.pw_gid != ginfo.gr_gid: 149 | cmd.append('-g') 150 | cmd.append(self.action.group) 151 | 152 | if self.action.shell is not None and info.pw_shell != self.action.shell: 153 | cmd.append('-s') 154 | cmd.append(self.action.shell) 155 | 156 | if self.action.login_class is not None: 157 | # find current login class 158 | user_login_class = None 159 | shadowfile = self.get_shadowfile() 160 | if os.path.exists(shadowfile) and os.access(shadowfile, os.R_OK): 161 | with open(shadowfile, 'rt') as fd: 162 | match = self.action.name + ":" 163 | for line in fd: 164 | if line.startswith(match): 165 | user_login_class = line.split(':')[4] 166 | 167 | # act only if login_class change 168 | if self.action.login_class != user_login_class: 169 | cmd.append('-L') 170 | cmd.append(self.action.login_class) 171 | 172 | if self.action.groups is not None: 173 | current_groups = self.user_group_membership() 174 | groups = self.get_groups_set() 175 | 176 | group_diff = set(current_groups).symmetric_difference(groups) 177 | groups_need_mod = False 178 | 179 | if group_diff: 180 | if self.action.append: 181 | for g in groups: 182 | if g in group_diff: 183 | groups_need_mod = True 184 | break 185 | else: 186 | groups_need_mod = True 187 | 188 | if groups_need_mod: 189 | cmd.append('-G') 190 | new_groups = groups 191 | if self.action.append: 192 | new_groups = groups | set(current_groups) 193 | cmd.append(','.join(new_groups)) 194 | 195 | if self.action.expires is not None: 196 | current_expires = int(self.user_password()[1]) 197 | 198 | # If expiration is negative or zero and the current expiration is greater than zero, disable expiration. 199 | # In OpenBSD, setting expiration to zero disables expiration. It does not expire the account. 200 | if self.action.expires <= 0: 201 | if current_expires > 0: 202 | cmd.append('-e') 203 | cmd.append('0') 204 | else: 205 | # Convert days since Epoch to seconds since Epoch as struct_time 206 | current_expire_date = time.gmtime(current_expires) 207 | 208 | # Current expires is negative or we compare year, month, and day only 209 | if current_expires <= 0 or current_expire_date[:3] != time.gmtime(self.action.expires)[:3]: 210 | cmd.append('-e') 211 | cmd.append(str(self.action.expires)) 212 | 213 | # modify the user if cmd will do anything 214 | if cmd_len != len(cmd): 215 | self.action.run_change_command(cmd) 216 | 217 | # we have to set the password in a second command 218 | if (self.action.update_password == 'always' and self.action.password is not None and 219 | info.pw_passwd.lstrip('*LOCKED*') != self.action.password.lstrip('*LOCKED*')): 220 | cmd = [self.action.find_command('chpass'), '-p', self.action.password, self.action.name] 221 | self.action.run_change_command(cmd) 222 | 223 | # we have to lock/unlock the password in a distinct command 224 | self._handle_lock() 225 | -------------------------------------------------------------------------------- /transilience/actions/user/linux.py: -------------------------------------------------------------------------------- 1 | # Implementation adapter from Ansible's user module, which is Copyright: © 2012, 2 | # Stephen Fromm , and licensed under the GNU General Public 3 | # License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) 4 | from __future__ import annotations 5 | from . import backend 6 | 7 | 8 | class Alpine(backend.User): 9 | """ 10 | This is the class for use on systems that have adduser, deluser, and 11 | delgroup commands 12 | """ 13 | # That was the original comment on Ansible, but then it was only 14 | # instantiated on Alpine, and indeed on Debian, the arguments passed to 15 | # adduser are wrong. Linux systems will go with backend.User's basic 16 | # implementation, except I guess, Alpine 17 | 18 | def create_user(self): 19 | cmd = [self.action.find_command('adduser')] 20 | 21 | cmd.append('-D') 22 | 23 | if self.action.uid is not None: 24 | cmd.append('-u') 25 | cmd.append(self.action.uid) 26 | 27 | if self.action.group is not None: 28 | if not self.group_exists(self.action.group): 29 | raise RuntimeError(f"Group {self.action.group!r} does not exist") 30 | cmd.append('-G') 31 | cmd.append(self.action.group) 32 | 33 | if self.action.comment is not None: 34 | cmd.append('-g') 35 | cmd.append(self.action.comment) 36 | 37 | if self.action.home is not None: 38 | cmd.append('-h') 39 | cmd.append(self.action.home) 40 | 41 | if self.action.shell is not None: 42 | cmd.append('-s') 43 | cmd.append(self.action.shell) 44 | 45 | if not self.action.create_home: 46 | cmd.append('-H') 47 | 48 | if self.action.skeleton is not None: 49 | cmd.append('-k') 50 | cmd.append(self.action.skeleton) 51 | 52 | # if self.umask is not None: 53 | # cmd.append('-K') 54 | # cmd.append('UMASK=' + self.umask) 55 | 56 | if self.action.system: 57 | cmd.append('-S') 58 | 59 | cmd.append(self.action.name) 60 | 61 | self.action.run_change_command(cmd) 62 | 63 | if self.action.password is not None: 64 | cmd = [self.action.find_command("chpasswd")] 65 | cmd.append('--encrypted') 66 | data = f'{self.action.name}:{self.action.password}' 67 | self.action.run_change_command(cmd, input=data.encode()) 68 | 69 | # Add to additional groups 70 | if self.action.groups: 71 | adduser_cmd = self.action.find_command("adduser") 72 | for group in self.get_groups_set(): 73 | self.action.run_change_command([adduser_cmd, self.action.name, group]) 74 | 75 | def remove_user(self): 76 | cmd = [self.action.find_command('deluser'), self.action.name] 77 | if self.action.remove: 78 | cmd.append('--remove-home') 79 | self.action.run_change_command(cmd) 80 | 81 | def modify_user(self): 82 | current_groups = self.user_group_membership() 83 | groups = [] 84 | info = self.user_info() 85 | add_cmd_bin = self.action.find_command('adduser') 86 | remove_cmd_bin = self.action.find_command('delgroup') 87 | 88 | # Manage group membership 89 | if self.action.groups: 90 | groups = self.get_groups_set() 91 | group_diff = current_groups.symmetric_difference(groups) 92 | 93 | if group_diff: 94 | for g in groups: 95 | if g in group_diff: 96 | self.action.run_change_command([add_cmd_bin, self.action.name, g]) 97 | 98 | for g in group_diff: 99 | if g not in groups and not self.action.append: 100 | self.action.run_change_command([remove_cmd_bin, self.action.name, g]) 101 | 102 | # Manage password 103 | if (self.action.update_password == 'always' 104 | and self.action.password is not None 105 | and info[1] != self.action.password): 106 | cmd = [self.action.find_command('chpasswd'), '--encrypted'] 107 | data = f'{self.action.name}:{self.action.password}' 108 | self.action.run_change_command(cmd, input=data) 109 | -------------------------------------------------------------------------------- /transilience/ansible/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Type, Dict, Any 3 | import zipfile 4 | import os 5 | import yaml 6 | from .exceptions import RoleNotFoundError 7 | from .role import AnsibleRoleFilesystem, AnsibleRoleZip 8 | 9 | if TYPE_CHECKING: 10 | from ..role import Role 11 | YamlDict = Dict[str, Any] 12 | 13 | # Currently supported: 14 | # - actions in Transilience's builtin.* namespace 15 | # - arguments not supported by the Transilience action are detected and raise an exception 16 | # - template action (without block_start_string, block_end_string, 17 | # lstrip_blocks, newline_sequence, output_encoding, trim_blocks, validate, 18 | # variable_end_string, variable_start_string) 19 | # - jinja templates in string parameters, even when present inside lists and 20 | # dicts and nested lists and dicts 21 | # - variables from facts provided by transilience.actions.facts.Platform 22 | # - variables used in templates used in jitsi templates, both in strings and 23 | # in files 24 | # - notify/handlers if defined inside thet same role (cannot notify 25 | # handlers from other roles) 26 | # - when: expressions with: 27 | # - variable references 28 | # - is defined 29 | # - is undefined 30 | # - not 31 | # - and 32 | # - or 33 | 34 | 35 | class RoleLoader: 36 | def __init__(self, name: str): 37 | self.name = name 38 | 39 | def load_parsed_tasks(self, tasks: YamlDict): 40 | for task_info in tasks: 41 | self.ansible_role.add_task(task_info) 42 | 43 | def load_parsed_handlers(self, handlers: YamlDict): 44 | for info in handlers: 45 | h = self.ansible_role.create_handler_role(info["name"]) 46 | h.add_task(info) 47 | self.ansible_role.handlers[info["name"]] = h 48 | 49 | def load(self): 50 | self.load_handlers() 51 | self.load_tasks() 52 | 53 | def get_role_class(self) -> Type[Role]: 54 | return self.ansible_role.get_role_class() 55 | 56 | def get_python_code(self) -> str: 57 | lines = self.ansible_role.get_python_code_module() 58 | 59 | code = "\n".join(lines) 60 | try: 61 | from yapf.yapflib import yapf_api 62 | except ModuleNotFoundError: 63 | return code 64 | code, changed = yapf_api.FormatCode(code) 65 | return code 66 | 67 | 68 | class FilesystemRoleLoader(RoleLoader): 69 | def __init__(self, name: str, roles_root: str = "roles"): 70 | super().__init__(name) 71 | self.root = os.path.join(roles_root, name) 72 | self.ansible_role = AnsibleRoleFilesystem(name=name, root=self.root) 73 | 74 | def load_tasks(self): 75 | tasks_file = os.path.join(self.root, "tasks", "main.yaml") 76 | 77 | try: 78 | with open(tasks_file, "rt") as fd: 79 | tasks = yaml.load(fd, Loader=yaml.CLoader) 80 | except FileNotFoundError: 81 | raise RoleNotFoundError(self.name) 82 | 83 | self.load_parsed_tasks(tasks) 84 | 85 | def load_handlers(self): 86 | handlers_file = os.path.join(self.root, "handlers", "main.yaml") 87 | 88 | try: 89 | with open(handlers_file, "rt") as fd: 90 | handlers = yaml.load(fd, Loader=yaml.CLoader) 91 | except FileNotFoundError: 92 | return 93 | 94 | self.load_parsed_handlers(handlers) 95 | 96 | 97 | class ZipRoleLoader(RoleLoader): 98 | """ 99 | Load Ansible roles from zip files. 100 | 101 | From Python 3.9 we can replace this with importlib.resources, and have a 102 | generic loader for both data in zipfiles and data bundled with modules. 103 | Before Python 3.9, it is hard to deal with resources that are directory 104 | trees. 105 | """ 106 | def __init__(self, name: str, path: str): 107 | super().__init__(name) 108 | self.zipfile = zipfile.ZipFile(path, "r") 109 | self.ansible_role = AnsibleRoleZip(name=name, archive=self.zipfile, root=os.path.join("roles", self.name)) 110 | 111 | def load_tasks(self): 112 | try: 113 | with self.zipfile.open(os.path.join("roles", self.name, "tasks", "main.yaml"), "r") as fd: 114 | tasks = yaml.load(fd, Loader=yaml.CLoader) 115 | except KeyError: 116 | raise RoleNotFoundError(self.name) 117 | 118 | self.load_parsed_tasks(tasks) 119 | 120 | def load_handlers(self): 121 | try: 122 | with self.zipfile.open(os.path.join("roles", self.name, "handlers", "main.yaml"), "r") as fd: 123 | handlers = yaml.load(fd, Loader=yaml.CLoader) 124 | except KeyError: 125 | return 126 | 127 | self.load_parsed_handlers(handlers) 128 | -------------------------------------------------------------------------------- /transilience/ansible/conditionals.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Sequence, Callable, Dict, Any, Set 3 | import jinja2.parser 4 | import jinja2.meta 5 | import jinja2.visitor 6 | from jinja2 import nodes 7 | 8 | if TYPE_CHECKING: 9 | from .. import template 10 | 11 | 12 | class FindVars(jinja2.visitor.NodeVisitor): 13 | def __init__(self): 14 | self.found: Set[str] = set() 15 | 16 | def visit_Name(self, node): 17 | if node.ctx == "load": 18 | self.found.add(node.name) 19 | 20 | 21 | def to_python_code(node: nodes.Node) -> str: 22 | if isinstance(node, nodes.Name): 23 | if node.ctx == "load": 24 | return f"self.{node.name}" 25 | else: 26 | raise NotImplementedError(f"jinja2 Name nodes with ctx={node.ctx!r} are not supported: {node!r}") 27 | elif isinstance(node, nodes.Test): 28 | if node.name == "defined": 29 | return f"{to_python_code(node.node)} is not None" 30 | elif node.name == "undefined": 31 | return f"{to_python_code(node.node)} is None" 32 | else: 33 | raise NotImplementedError(f"jinja2 Test nodes with name={node.name!r} are not supported: {node!r}") 34 | elif isinstance(node, nodes.Not): 35 | if isinstance(node.node, nodes.Test): 36 | # Special case match well-known structures for more idiomatic Python 37 | if node.node.name == "defined": 38 | return f"{to_python_code(node.node.node)} is None" 39 | elif node.node.name == "undefined": 40 | return f"{to_python_code(node.node.node)} is not None" 41 | elif isinstance(node.node, nodes.Name): 42 | return f"not {to_python_code(node.node)}" 43 | return f"not ({to_python_code(node.node)})" 44 | elif isinstance(node, nodes.Or): 45 | return f"({to_python_code(node.left)} or {to_python_code(node.right)})" 46 | elif isinstance(node, nodes.And): 47 | return f"({to_python_code(node.left)} and {to_python_code(node.right)})" 48 | else: 49 | raise NotImplementedError(f"jinja2 {node.__class__} nodes are not supported: {node!r}") 50 | 51 | 52 | class Conditional: 53 | """ 54 | An Ansible conditional expression 55 | """ 56 | def __init__(self, engine: template.Engine, body: str): 57 | # Original unparsed expression 58 | self.body: str = body 59 | # Expression compiled to a callable 60 | self.expression: Callable = engine.env.compile_expression(body) 61 | parser = jinja2.parser.Parser(engine.env, body, state='variable') 62 | self.jinja2_ast: nodes.Node = parser.parse_expression() 63 | 64 | def to_jsonable(self) -> Dict[str, Any]: 65 | return { 66 | "node": "conditional", 67 | "body": self.body, 68 | } 69 | 70 | def list_role_vars(self) -> Sequence[str]: 71 | fv = FindVars() 72 | fv.visit(self.jinja2_ast) 73 | return fv.found 74 | 75 | def evaluate(self, ctx: Dict[str, Any]): 76 | ctx = {name: val for name, val in ctx.items() if val is not None} 77 | return self.expression(**ctx) 78 | 79 | def get_python_code(self) -> str: 80 | return to_python_code(self.jinja2_ast) 81 | -------------------------------------------------------------------------------- /transilience/ansible/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | 4 | class RoleNotFoundError(Exception): 5 | pass 6 | 7 | 8 | class RoleNotLoadedError(Exception): 9 | pass 10 | -------------------------------------------------------------------------------- /transilience/ansible/parameters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Any, Optional, List, Dict, Sequence 3 | import os 4 | import re 5 | 6 | if TYPE_CHECKING: 7 | from dataclasses import Field 8 | from ..role import Role 9 | 10 | 11 | re_template_start = re.compile(r"{{|{%|{#") 12 | re_single_var = re.compile(r"^{{\s*(\w*)\s*}}$") 13 | 14 | 15 | class Parameter: 16 | def list_role_vars(self, role: Role) -> Sequence[str]: 17 | """ 18 | List the name of template variables used by this parameter 19 | """ 20 | return () 21 | 22 | @classmethod 23 | def create(cls, f: Optional[Field], value: Any): 24 | if isinstance(value, str): 25 | # Hook for templated strings 26 | # 27 | # For reference, Jinja2 template detection in Ansible is in 28 | # template/__init__.py look for Templar.is_possibly_template, 29 | # Templar.is_template, and is_template 30 | if re_template_start.search(value): 31 | mo = re_single_var.match(value) 32 | if f is not None and f.metadata.get("type") == "local_file": 33 | if mo: 34 | return ParameterVarFileReference(value) 35 | else: 36 | return ParameterTemplatedFileReference(value) 37 | elif f is not None and f.type == "List[str]": 38 | if mo: 39 | return ParameterVarReferenceStringList(mo.group(1)) 40 | else: 41 | return ParameterTemplatedStringList(value) 42 | else: 43 | if mo: 44 | return ParameterVarReference(mo.group(1)) 45 | else: 46 | return ParameterTemplateString(value) 47 | elif f is not None and f.metadata.get("type") == "local_file": 48 | return ParameterFileReference(value) 49 | elif f is not None and f.type == "List[str]": 50 | return ParameterAny(value.split(',')) 51 | else: 52 | return ParameterAny(value) 53 | elif isinstance(value, int): 54 | if f.metadata.get("octal"): 55 | return ParameterOctal(value) 56 | else: 57 | return ParameterAny(value) 58 | elif isinstance(value, list): 59 | elements = [] 60 | for val in value: 61 | elements.append(cls.create(None, val)) 62 | return ParameterList(elements) 63 | elif isinstance(value, dict): 64 | elements = {} 65 | for name, val in value.items(): 66 | elements[name] = cls.create(None, val) 67 | return ParameterDict(elements) 68 | else: 69 | return ParameterAny(value) 70 | 71 | 72 | class ParameterList(Parameter): 73 | def __init__(self, parameters: List[Parameter]): 74 | self.parameters = parameters 75 | 76 | def list_role_vars(self, role: Role) -> Sequence[str]: 77 | for p in self.parameters: 78 | yield from p.list_role_vars(role) 79 | 80 | def get_value(self, role: Role): 81 | return [p.get_value(role) for p in self.parameters] 82 | 83 | def __repr__(self): 84 | return f"[{', '.join(repr(p) for p in self.parameters)}]" 85 | 86 | def to_jsonable(self) -> Dict[str, Any]: 87 | return { 88 | "node": "parameter", 89 | "type": "list", 90 | "value": [p.to_jsonable() for p in self.parameters], 91 | } 92 | 93 | 94 | class ParameterDict(Parameter): 95 | def __init__(self, parameters: Dict[str, Parameter]): 96 | self.parameters = parameters 97 | 98 | def list_role_vars(self, role: Role) -> Sequence[str]: 99 | for p in self.parameters.values(): 100 | yield from p.list_role_vars(role) 101 | 102 | def get_value(self, role: Role): 103 | return {name: p.get_value(role) for name, p in self.parameters.items()} 104 | 105 | def __repr__(self): 106 | parts = [] 107 | for name, p in self.parameters.items(): 108 | parts.append(f"{name!r}: {p!r}") 109 | return "{" + ', '.join(parts) + "}" 110 | 111 | def to_jsonable(self) -> Dict[str, Any]: 112 | return { 113 | "node": "parameter", 114 | "type": "dict", 115 | "value": [{name, p.to_jsonable()} for name, p in self.parameters.items()], 116 | } 117 | 118 | 119 | class ParameterAny(Parameter): 120 | def __init__(self, value: Any): 121 | self.value = value 122 | 123 | def get_value(self, role: Role): 124 | return self.value 125 | 126 | def __repr__(self): 127 | return repr(self.value) 128 | 129 | def to_jsonable(self) -> Dict[str, Any]: 130 | return { 131 | "node": "parameter", 132 | "type": "scalar", 133 | "value": self.value, 134 | } 135 | 136 | 137 | class ParameterOctal(ParameterAny): 138 | def __repr__(self): 139 | if isinstance(self.value, int): 140 | return f"0o{self.value:o}" 141 | else: 142 | return super().__repr__() 143 | 144 | def to_jsonable(self) -> Dict[str, Any]: 145 | return { 146 | "node": "parameter", 147 | "type": "octal", 148 | "value": "0o{self.value:o}" if isinstance(self.value, int) else self.value 149 | } 150 | 151 | 152 | class ParameterTemplatedStringList(ParameterAny): 153 | def list_role_vars(self, role: Role) -> Sequence[str]: 154 | return role.template_engine.list_string_template_vars(self.value) 155 | 156 | def __repr__(self): 157 | return f"self.render_string({self.value!r}).split(',')" 158 | 159 | def get_value(self, role: Role): 160 | return role.render_string(self.value).split(',') 161 | 162 | def to_jsonable(self) -> Dict[str, Any]: 163 | return { 164 | "node": "parameter", 165 | "type": "templated_string_list", 166 | "value": self.value 167 | } 168 | 169 | 170 | class ParameterVarReferenceStringList(ParameterAny): 171 | def list_role_vars(self, role: Role) -> Sequence[str]: 172 | yield self.value 173 | 174 | def __repr__(self): 175 | return f"self.{self.value}.split(',')" 176 | 177 | def get_value(self, role: Role): 178 | return getattr(role, self.value).split(',') 179 | 180 | def to_jsonable(self) -> Dict[str, Any]: 181 | return { 182 | "node": "parameter", 183 | "type": "var_reference_string_list", 184 | "value": self.value 185 | } 186 | 187 | 188 | class ParameterTemplatePath(ParameterAny): 189 | def list_role_vars(self, role: Role) -> Sequence[str]: 190 | yield from role.template_engine.list_file_template_vars(os.path.join("templates", self.value)) 191 | 192 | def __repr__(self): 193 | path = os.path.join("templates", self.value) 194 | return f"self.render_file({path!r})" 195 | 196 | def get_value(self, role: Role): 197 | path = os.path.join("templates", self.value) 198 | return role.render_file(path) 199 | 200 | def to_jsonable(self) -> Dict[str, Any]: 201 | return { 202 | "node": "parameter", 203 | "type": "template_path", 204 | "value": self.value 205 | } 206 | 207 | 208 | class ParameterVarReference(ParameterAny): 209 | def list_role_vars(self, role: Role) -> Sequence[str]: 210 | yield self.value 211 | 212 | def __repr__(self): 213 | return f"self.{self.value}" 214 | 215 | def get_value(self, role: Role): 216 | return getattr(role, self.value) 217 | 218 | def to_jsonable(self) -> Dict[str, Any]: 219 | return { 220 | "node": "parameter", 221 | "type": "var_reference", 222 | "value": self.value 223 | } 224 | 225 | 226 | class ParameterTemplateString(ParameterAny): 227 | def list_role_vars(self, role: Role) -> Sequence[str]: 228 | return role.template_engine.list_string_template_vars(self.value) 229 | 230 | def __repr__(self): 231 | return f"self.render_string({self.value!r})" 232 | 233 | def get_value(self, role: Role): 234 | return role.render_string(self.value) 235 | 236 | def to_jsonable(self) -> Dict[str, Any]: 237 | return { 238 | "node": "parameter", 239 | "type": "template_string", 240 | "value": self.value 241 | } 242 | 243 | 244 | class ParameterVarFileReference(ParameterAny): 245 | def list_role_vars(self, role: Role) -> Sequence[str]: 246 | yield self.value 247 | 248 | def __repr__(self): 249 | return f"self.lookup_file(os.path.join('files', self.{self.value}))" 250 | 251 | def get_value(self, role: Role): 252 | return role.lookup_file(os.path.join("files", getattr(role, self.value))) 253 | 254 | def to_jsonable(self) -> Dict[str, Any]: 255 | return { 256 | "node": "parameter", 257 | "type": "var_file_reference", 258 | "value": self.value 259 | } 260 | 261 | 262 | class ParameterTemplatedFileReference(ParameterAny): 263 | def list_role_vars(self, role: Role) -> Sequence[str]: 264 | return role.template_engine.list_string_template_vars(self.value) 265 | 266 | def __repr__(self): 267 | return f"self.lookup_file(os.path.join('files', self.render_string({self.value!r})))" 268 | 269 | def get_value(self, role: Role): 270 | return role.lookup_file(os.path.join("files", role.render_string(self.value))) 271 | 272 | def to_jsonable(self) -> Dict[str, Any]: 273 | return { 274 | "node": "parameter", 275 | "type": "templated_file_reference", 276 | "value": self.value 277 | } 278 | 279 | 280 | class ParameterFileReference(ParameterAny): 281 | def __repr__(self): 282 | return f"self.lookup_file(os.path.join('files', {self.value!r}))" 283 | 284 | def get_value(self, role: Role): 285 | return role.lookup_file(os.path.join("files", self.value)) 286 | 287 | def to_jsonable(self) -> Dict[str, Any]: 288 | return { 289 | "node": "parameter", 290 | "type": "file_reference", 291 | "value": self.value 292 | } 293 | -------------------------------------------------------------------------------- /transilience/ansible/role.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Type, Dict, Any, List, Optional, Set, Sequence 3 | from dataclasses import fields, field, make_dataclass 4 | import zipfile 5 | import shlex 6 | import re 7 | from ..actions import facts, builtin 8 | from ..role import Role, with_facts 9 | from .. import template 10 | from .tasks import Task, TaskTemplate 11 | from .conditionals import Conditional 12 | from .exceptions import RoleNotLoadedError 13 | 14 | if TYPE_CHECKING: 15 | YamlDict = Dict[str, Any] 16 | 17 | 18 | class AnsibleRole: 19 | def __init__(self, name: str, uses_facts: bool = True): 20 | self.name = name 21 | self.uses_facts = uses_facts 22 | self.tasks: List[Task] = [] 23 | self.handlers: Dict[str, "AnsibleRole"] = {} 24 | self.template_engine: template.Engine 25 | 26 | def add_task(self, task_info: YamlDict): 27 | candidates = [] 28 | 29 | for key in task_info.keys(): 30 | if key in ("name", "args", "notify", "when"): 31 | continue 32 | candidates.append(key) 33 | 34 | if len(candidates) != 1: 35 | raise RoleNotLoadedError(f"could not find a known module in task {task_info!r}") 36 | 37 | modname = candidates[0] 38 | if modname.startswith("ansible.builtin."): 39 | name = modname[16:] 40 | else: 41 | name = modname 42 | 43 | args: YamlDict 44 | if isinstance(task_info[name], dict): 45 | args = task_info[name] 46 | else: 47 | args = task_info.get("args", {}) 48 | # Fixups for command: in Ansible it can be a simple string instead 49 | # of a dict 50 | if name == "command": 51 | args["argv"] = shlex.split(task_info[name]) 52 | else: 53 | raise RoleNotLoadedError(f"ansible module argument for {modname} is not a dict") 54 | 55 | if name == "template": 56 | task = TaskTemplate(args, task_info) 57 | else: 58 | action_cls = getattr(builtin, name, None) 59 | if action_cls is None: 60 | raise RoleNotLoadedError(f"Action builtin.{name} not available in Transilience") 61 | 62 | transilience_name = f"builtin.{name}" 63 | 64 | task = Task(action_cls, args, task_info, transilience_name) 65 | 66 | notify = task_info.get("notify") 67 | if notify is not None: 68 | if isinstance(notify, str): 69 | notify = [notify] 70 | for name in notify: 71 | h = self.handlers[name] 72 | task.notify.append(h) 73 | 74 | when = task_info.get("when") 75 | if when is not None: 76 | if not isinstance(when, list): 77 | when = [when] 78 | for expr in when: 79 | cond = Conditional(self.template_engine, expr) 80 | task.conditionals.append(cond) 81 | 82 | self.tasks.append(task) 83 | 84 | def to_jsonable(self) -> Dict[str, Any]: 85 | return { 86 | "node": "role", 87 | "name": self.name, 88 | "python_name": self.get_python_name(), 89 | "uses_facts": self.uses_facts, 90 | "tasks": [t.to_jsonable() for t in self.tasks], 91 | "handlers": [h.to_jsonable() for h in self.handlers.values()], 92 | } 93 | 94 | def list_role_vars(self) -> Sequence[str]: 95 | role_vars: Set[str] = set() 96 | for task in self.tasks: 97 | role_vars.update(task.list_role_vars(self)) 98 | role_vars -= {f.name for f in fields(facts.Platform)} 99 | return role_vars 100 | 101 | def get_role_class_fields(self): 102 | fields = [] 103 | for name in sorted(self.list_role_vars()): 104 | fields.append((name, Any, field(default=None))) 105 | return fields 106 | 107 | def get_role_class_namespace(self): 108 | # If we have handlers, instantiate role classes for them 109 | handler_classes = {} 110 | for name, role_builder in self.handlers.items(): 111 | handler_classes[name] = role_builder.get_role_class() 112 | 113 | # Create all the functions to start actions in the role 114 | start_funcs = [] 115 | for role_action in self.tasks: 116 | start_funcs.append(role_action.get_start_func(handlers=handler_classes)) 117 | 118 | # Function that calls all the 'Action start' functions 119 | def role_main(self): 120 | for func in start_funcs: 121 | func(self) 122 | 123 | namespace = {} 124 | if self.uses_facts: 125 | namespace["start"] = lambda host: None 126 | namespace["all_facts_available"] = role_main 127 | else: 128 | namespace["start"] = role_main 129 | return namespace 130 | 131 | def get_role_class(self) -> Type[Role]: 132 | fields = self.get_role_class_fields() 133 | namespace = self.get_role_class_namespace() 134 | if self.uses_facts: 135 | role_cls = make_dataclass(self.name, fields, bases=(Role,), namespace=namespace) 136 | role_cls = with_facts(facts.Platform)(role_cls) 137 | else: 138 | role_cls = make_dataclass(self.name, fields, bases=(Role,), namespace=namespace) 139 | 140 | return role_cls 141 | 142 | def get_python_code_module(self) -> List[str]: 143 | lines = [ 144 | "from __future__ import annotations", 145 | "from typing import Any", 146 | "import os", 147 | "from transilience import role", 148 | "from transilience.actions import builtin, facts", 149 | "", 150 | ] 151 | 152 | handlers: Dict[str, str] = {} 153 | for name, handler in self.handlers.items(): 154 | lines += handler.get_python_code_role() 155 | lines.append("") 156 | handlers[name] = handler.get_python_name() 157 | 158 | lines += self.get_python_code_role("Role", handlers=handlers) 159 | 160 | return lines 161 | 162 | def get_python_name(self) -> str: 163 | name_components = re.sub(r"[^A-Za-z]+", " ", self.name).split() 164 | return "".join(c.capitalize() for c in name_components) 165 | 166 | def get_python_code_role(self, name=None, handlers: Optional[Dict[str, str]] = None) -> List[str]: 167 | if handlers is None: 168 | handlers = {} 169 | 170 | lines = [] 171 | if self.uses_facts: 172 | lines.append("@role.with_facts([facts.Platform])") 173 | 174 | if name is None: 175 | name = self.get_python_name() 176 | 177 | lines.append(f"class {name}(role.Role):") 178 | 179 | role_vars = self.list_role_vars() 180 | 181 | if role_vars: 182 | lines.append(" # Role variables used by templates") 183 | for name in sorted(role_vars): 184 | lines.append(f" {name}: Any = None") 185 | lines.append("") 186 | 187 | if self.uses_facts: 188 | lines.append(" def all_facts_available(self):") 189 | else: 190 | lines.append(" def start(self):") 191 | 192 | for task in self.tasks: 193 | for line in task.get_python(handlers=handlers): 194 | lines.append(" " * 8 + line) 195 | 196 | return lines 197 | 198 | 199 | class AnsibleRoleFilesystem(AnsibleRole): 200 | def __init__(self, name: str, root: str, uses_facts: bool = True): 201 | super().__init__(name, uses_facts=uses_facts) 202 | self.root = root 203 | self.template_engine: template.Engine = template.EngineFilesystem([self.root]) 204 | 205 | def create_handler_role(self, name: str) -> "AnsibleRoleFilesystem": 206 | return AnsibleRoleFilesystem(name, root=self.root, uses_facts=False) 207 | 208 | 209 | class AnsibleRoleZip(AnsibleRole): 210 | def __init__(self, name: str, archive: zipfile.ZipFile, root: str, uses_facts: bool = True): 211 | super().__init__(name, uses_facts=uses_facts) 212 | self.root = root 213 | self.archive = archive 214 | self.template_engine: template.Engine = template.EngineZip(archive=archive, root=root) 215 | 216 | def get_role_class_fields(self): 217 | fields = super().get_role_class_fields() 218 | fields.append(("role_assets_zipfile", str, self.archive.filename)) 219 | return fields 220 | 221 | def create_handler_role(self, name: str) -> "AnsibleRoleZip": 222 | return AnsibleRoleZip(name, archive=self.archive, root=self.root, uses_facts=False) 223 | -------------------------------------------------------------------------------- /transilience/ansible/tasks.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Type, Dict, Any, Optional, Callable, Sequence, List 3 | from dataclasses import fields 4 | from ..actions import builtin 5 | from ..role import Role 6 | from .parameters import Parameter, ParameterTemplatePath 7 | from .exceptions import RoleNotLoadedError 8 | 9 | if TYPE_CHECKING: 10 | from dataclasses import Field 11 | from ..actions import Action 12 | from .role import AnsibleRole 13 | from .conditionals import Conditional 14 | YamlDict = Dict[str, Any] 15 | 16 | 17 | class Task: 18 | """ 19 | Information extracted from a task in an Ansible playbook 20 | """ 21 | def __init__(self, action_cls: Type[Action], args: YamlDict, task_info: YamlDict, transilience_name: str): 22 | self.action_cls = action_cls 23 | self.parameters: Dict[str, Parameter] = {} 24 | self.task_info = task_info 25 | self.transilience_name = transilience_name 26 | # List of python names of handler roles notified by this task 27 | self.notify: List[AnsibleRole] = [] 28 | self.conditionals: List[Conditional] = [] 29 | 30 | # Build parameter list 31 | for f in fields(self.action_cls): 32 | value = args.pop(f.name, None) 33 | if value is None: 34 | continue 35 | self.add_parameter(f, value) 36 | 37 | if args: 38 | raise RoleNotLoadedError(f"Task {task_info!r} has unrecognized parameters {args!r}") 39 | 40 | def add_parameter(self, f: Field, value: Any): 41 | self.parameters[f.name] = Parameter.create(f, value) 42 | 43 | def list_role_vars(self, role: Role) -> Sequence[str]: 44 | """ 45 | List the names of role variables used by this task 46 | """ 47 | for p in self.parameters.values(): 48 | yield from p.list_role_vars(role) 49 | for c in self.conditionals: 50 | yield from c.list_role_vars() 51 | 52 | def to_jsonable(self) -> Dict[str, Any]: 53 | return { 54 | "node": "task", 55 | "action": self.transilience_name, 56 | "parameters": {name: p.to_jsonable() for name, p in self.parameters.items()}, 57 | "ansible_yaml": self.task_info, 58 | "notify": [h.get_python_name() for h in self.notify], 59 | "conditionals": [c.to_jsonable() for c in self.conditionals], 60 | } 61 | 62 | def get_start_func(self, handlers: Optional[Dict[str, Callable[[], None]]] = None): 63 | # If this task calls handlers, fetch the corresponding handler classes 64 | notify = self.task_info.get("notify") 65 | if not notify: 66 | notify_classes = None 67 | else: 68 | notify_classes = [] 69 | if isinstance(notify, str): 70 | notify = [notify] 71 | for name in notify: 72 | notify_classes.append(handlers[name]) 73 | 74 | def starter(role: Role): 75 | args = {name: p.get_value(role) for name, p in self.parameters.items()} 76 | role.add(self.action_cls(**args), name=self.task_info.get("name"), notify=notify_classes) 77 | return starter 78 | 79 | def get_python(self, handlers: Optional[Dict[str, str]] = None) -> List[str]: 80 | if handlers is None: 81 | handlers = {} 82 | 83 | fmt_args = [] 84 | for name, parm in self.parameters.items(): 85 | fmt_args.append(f"{name}={parm!r}") 86 | act_args = ", ".join(fmt_args) 87 | 88 | add_args = [ 89 | f"{self.transilience_name}({act_args})", 90 | f"name={self.task_info['name']!r}" 91 | ] 92 | 93 | if len(self.notify) == 1: 94 | add_args.append(f"notify={self.notify[0].get_python_name()}") 95 | elif len(self.notify) > 1: 96 | add_args.append(f"notify=[{', '.join(n.get_python_name() for n in self.notify)}]") 97 | 98 | lines = [] 99 | if self.conditionals: 100 | if len(self.conditionals) > 1: 101 | lines.append(f"if {' and '.join(c.get_python_code() for c in self.conditionals)}:") 102 | else: 103 | lines.append(f"if {self.conditionals[0].get_python_code()}:") 104 | lines.append(f" self.add({', '.join(add_args)})") 105 | else: 106 | lines.append(f"self.add({', '.join(add_args)})") 107 | 108 | return lines 109 | 110 | 111 | class TaskTemplate(Task): 112 | """ 113 | Task that maps ansible.builtin.template module to a Transilince 114 | builtin.copy action, plus template rendering on the Role's side 115 | """ 116 | def __init__(self, args: YamlDict, task_info: YamlDict): 117 | super().__init__(builtin.copy, args, task_info, "builtin.copy") 118 | 119 | def add_parameter(self, f: Field, value: Any): 120 | if f.name == "src": 121 | self.parameters["content"] = ParameterTemplatePath(value) 122 | else: 123 | super().add_parameter(f, value) 124 | -------------------------------------------------------------------------------- /transilience/cmd/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spanezz/transilience/b5c884bd3e161d70960f787e4a177f94cfd414d8/transilience/cmd/__init__.py -------------------------------------------------------------------------------- /transilience/cmd/doc.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Type, Dict, TextIO 3 | import dataclasses 4 | import contextlib 5 | import importlib 6 | import argparse 7 | import inspect 8 | import sys 9 | from transilience.actions import Action 10 | 11 | 12 | def document(module: str, to_document: Dict[str, Type[Action]], file=TextIO): 13 | print(f"""# {module} 14 | 15 | Documentation of the actions provided in module `{module}`. 16 | 17 | """, file=file) 18 | 19 | for name, action in sorted(to_document.items()): 20 | doc = inspect.getdoc(action) 21 | print(f"""## {name} 22 | 23 | {doc} 24 | 25 | Parameters: 26 | """, file=file) 27 | 28 | for field in sorted(dataclasses.fields(action), key=lambda x: x.name): 29 | if field.name in ("uuid", "result"): 30 | continue 31 | field_doc = f"* {field.name} [`{field.type}`]" 32 | if field.default is not dataclasses.MISSING: 33 | field_doc += f" = `{field.default!r}`" 34 | fdoc = field.metadata.get("doc") 35 | if fdoc is not None: 36 | field_doc += ": " + fdoc 37 | print(field_doc, file=file) 38 | 39 | print(file=file) 40 | 41 | 42 | 43 | @contextlib.contextmanager 44 | def output(args): 45 | if args.output: 46 | with open(args.output, "wt") as fd: 47 | yield fd 48 | else: 49 | yield sys.stdout 50 | 51 | 52 | def main(): 53 | parser = argparse.ArgumentParser(description="Generate documentation about transilience actions") 54 | parser.add_argument("-o", "--output", action="store", help="output file (default: stdout)") 55 | parser.add_argument("module", action="store", nargs="?", default="transilience.actions.builtin", 56 | help="module containing the actions to document (default: %(default)s)") 57 | args = parser.parse_args() 58 | 59 | try: 60 | mod = importlib.import_module(args.module) 61 | except ModuleNotFoundError: 62 | modname, _, member = args.module.rpartition(".") 63 | mod = importlib.import_module(modname) 64 | mod = getattr(mod, member) 65 | 66 | to_document: Dict[str, Type[Action]] = {} 67 | for name, value in inspect.getmembers(mod): 68 | if not isinstance(value, type): 69 | continue 70 | if not issubclass(value, Action): 71 | continue 72 | to_document[name] = value 73 | 74 | with output(args) as fd: 75 | document(args.module, to_document, fd) 76 | 77 | 78 | if __name__ == "__main__": 79 | sys.exit(main()) 80 | -------------------------------------------------------------------------------- /transilience/device.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Iterator, Dict 3 | from .utils import run 4 | from .system import Chroot 5 | import re 6 | import os 7 | import logging 8 | import json 9 | import contextlib 10 | import tempfile 11 | 12 | if TYPE_CHECKING: 13 | import parted 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class BlockDevice: 19 | """ 20 | Information and access to a generic block device 21 | """ 22 | def __init__(self, path: str): 23 | self.path = path 24 | self.refresh() 25 | 26 | 27 | class Disk(BlockDevice): 28 | """ 29 | Information and access to a block device for a whole disk 30 | """ 31 | @contextlib.contextmanager 32 | def parted_device(self) -> Iterator[parted.Device]: 33 | try: 34 | import parted 35 | except ModuleNotFoundError: 36 | raise NotImplementedError("Install pyparted (python3-parted in debian) to do partitioning work") 37 | 38 | device = parted.getDevice(self.path) 39 | try: 40 | yield device 41 | finally: 42 | device.close 43 | 44 | 45 | class DiskImage(Disk): 46 | def refresh(self): 47 | pass 48 | 49 | @contextlib.contextmanager 50 | def partitions(self) -> Dict[str, "Partition"]: 51 | """ 52 | Context manager that create loop devices to access partitions inside the 53 | image, and shuts them down at the end 54 | """ 55 | res = run(("kpartx", "-avs", self.path), text=True, capture_output=True) 56 | devs = {} 57 | re_mapping = re.compile(r"^add map (\S+)") 58 | for line in res.stdout.splitlines(): 59 | mo = re_mapping.match(line) 60 | if not mo: 61 | log.error("Unrecognised kpartx output line: %r", line) 62 | continue 63 | dev = Partition(os.path.join("/dev/mapper", mo.group(1))) 64 | devs[dev.label] = dev 65 | 66 | try: 67 | yield devs 68 | finally: 69 | res = run(("kpartx", "-ds", self.path)) 70 | 71 | 72 | class Partition(BlockDevice): 73 | """ 74 | Information and access to a block device for a disk partition 75 | """ 76 | def __init__(self, path: str): 77 | self.path = path 78 | self.refresh() 79 | 80 | def refresh(self): 81 | """ 82 | Update device information from lsblk 83 | """ 84 | info = run(("lsblk", "--json", "--output-all", "--bytes", self.path), capture_output=True) 85 | info = json.loads(info.stdout)["blockdevices"][0] 86 | self.label = info.get("label") 87 | self.fstype = info.get("fstype") 88 | 89 | @contextlib.contextmanager 90 | def ext4_dir_index_workaround(self) -> Iterator[None]: 91 | """ 92 | Temporarily disable dir_index of the ext4 filesystem to work around the 93 | issue at https://lkml.org/lkml/2018/12/27/155. 94 | 95 | See https://www.enricozini.org/blog/2019/himblick/ext4-and-32bit-arm-on-64bit-amd64/ 96 | """ 97 | if self.fstype != "ext4": 98 | yield 99 | return 100 | 101 | log.info("%s: disabling dir_index to workaround https://lkml.org/lkml/2018/12/27/155", self.path) 102 | run(("tune2fs", "-O", "^dir_index", self.path)) 103 | 104 | try: 105 | yield 106 | finally: 107 | log.info("%s: reenabling dir_index", self.path) 108 | run(("tune2fs", "-O", "dir_index", self.path)) 109 | log.info("%s: running e2fsck to reindex directories", self.path) 110 | run(("e2fsck", "-fy", self.path)) 111 | 112 | @contextlib.contextmanager 113 | def mount(self, path, *args) -> Iterator[None]: 114 | """ 115 | Mount this device on the given path for the duration of the context 116 | manager 117 | """ 118 | run(("mount", self.path, path, *args)) 119 | try: 120 | yield 121 | finally: 122 | run(("umount", path)) 123 | 124 | 125 | class RaspiImage(DiskImage): 126 | """ 127 | Access a Raspberry Pi OS disk image 128 | """ 129 | @contextlib.contextmanager 130 | def mount(self) -> Chroot: 131 | """ 132 | Context manager that mounts the raspbian system in a temporary directory 133 | and unmounts it on exit. 134 | 135 | It produces the path to the mounted filesystem 136 | """ 137 | with tempfile.TemporaryDirectory() as root: 138 | with self.partitions() as devs: 139 | boot = devs["boot"] 140 | rootfs = devs["rootfs"] 141 | with rootfs.ext4_dir_index_workaround(): 142 | with rootfs.mount(root): 143 | with boot.mount(os.path.join(root, "boot")): 144 | yield Chroot(root) 145 | -------------------------------------------------------------------------------- /transilience/fileasset.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Dict, Any, Optional, BinaryIO, ContextManager 3 | import contextlib 4 | import hashlib 5 | import zipfile 6 | import shutil 7 | 8 | 9 | class FileAsset: 10 | """ 11 | Generic interface for local file assets used by actions 12 | """ 13 | def __init__(self): 14 | # Cached contents of the file, if it's small 15 | self.cached: Optional[bytes] = None 16 | 17 | def serialize(self) -> Dict[str, Any]: 18 | res = {} 19 | if self.cached is not None: 20 | res["cached"] = self.cached 21 | return res 22 | 23 | @contextlib.contextmanager 24 | def open(self) -> ContextManager[BinaryIO]: 25 | raise NotImplementedError(f"{self.__class__}.open is not implemented") 26 | 27 | def copy_to(self, dst: BinaryIO): 28 | with self.open() as src: 29 | shutil.copyfileobj(src, dst) 30 | 31 | def sha1sum(self) -> str: 32 | """ 33 | Return the sha1sum of the file contents. 34 | 35 | If the file is small, cache its contents 36 | """ 37 | h = hashlib.sha1() 38 | size = 0 39 | to_cache = [] 40 | with self.open() as fd: 41 | while True: 42 | buf = fd.read(40960) 43 | if not buf: 44 | break 45 | size += len(buf) 46 | if size > 16384: 47 | to_cache = None 48 | else: 49 | to_cache.append(buf) 50 | h.update(buf) 51 | 52 | if to_cache is not None: 53 | self.cached = b"".join(to_cache) 54 | 55 | return h.hexdigest() 56 | 57 | @classmethod 58 | def compute_file_sha1sum(self, fd: BinaryIO) -> str: 59 | h = hashlib.sha1() 60 | while True: 61 | buf = fd.read(40960) 62 | if not buf: 63 | break 64 | h.update(buf) 65 | return h.hexdigest() 66 | 67 | @classmethod 68 | def deserialize(cls, data: Dict[str, Any]) -> "FileAsset": 69 | t = data.get("type") 70 | cached = data.get("cached") 71 | if t == "local": 72 | res = LocalFileAsset(data["path"]) 73 | res.cached = cached 74 | return res 75 | elif t == "zip": 76 | res = ZipFileAsset(data["archive"], data["path"]) 77 | res.cached = cached 78 | return res 79 | else: 80 | raise ValueError(f"Unknown file asset type {t!r}") 81 | 82 | 83 | class LocalFileAsset(FileAsset): 84 | """ 85 | FileAsset referring to a local file 86 | """ 87 | def __init__(self, path: str): 88 | super().__init__() 89 | self.path = path 90 | 91 | def serialize(self) -> Dict[str, Any]: 92 | res = super().serialize() 93 | res["type"] = "local" 94 | res["path"] = self.path 95 | return res 96 | 97 | @contextlib.contextmanager 98 | def open(self) -> ContextManager[BinaryIO]: 99 | with open(self.path, "rb") as fd: 100 | yield fd 101 | 102 | 103 | class ZipFileAsset(FileAsset): 104 | """ 105 | FileAsset referencing a file inside a zipfile 106 | """ 107 | def __init__(self, archive: str, path: str): 108 | super().__init__() 109 | self.archive = archive 110 | self.path = path 111 | 112 | def serialize(self) -> Dict[str, Any]: 113 | res = super().serialize() 114 | res["type"] = "zip" 115 | res["archive"] = self.archive 116 | res["path"] = self.path 117 | return res 118 | 119 | @contextlib.contextmanager 120 | def open(self) -> ContextManager[BinaryIO]: 121 | with zipfile.ZipFile(self.archive, "r") as zf: 122 | with zf.open(self.path) as fd: 123 | yield fd 124 | -------------------------------------------------------------------------------- /transilience/hosts.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Dict, Any 3 | from dataclasses import dataclass, field 4 | import transilience.system 5 | 6 | 7 | if TYPE_CHECKING: 8 | from transilience.system import System 9 | 10 | 11 | @dataclass 12 | class Host: 13 | """ 14 | A host to be provisioned. 15 | 16 | Hosts can be grouped by using a common plain dataclass as a mixin. 17 | """ 18 | name: str 19 | type: str = "Mitogen" 20 | args: Dict[str, Any] = field(default_factory=dict) 21 | 22 | def _make_system(self) -> System: 23 | cls = getattr(transilience.system, self.type) 24 | return cls(self.name, **self.args) 25 | -------------------------------------------------------------------------------- /transilience/playbook.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Sequence, Union, Type, Optional 3 | import threading 4 | import argparse 5 | import tempfile 6 | import inspect 7 | import logging 8 | import shutil 9 | import json 10 | import sys 11 | import os 12 | try: 13 | import coloredlogs 14 | except ModuleNotFoundError: 15 | coloredlogs = None 16 | from .runner import Runner 17 | from .role import Role, Loader 18 | from .hosts import Host 19 | 20 | 21 | class Playbook: 22 | def __init__(self): 23 | self.progress = logging.getLogger("progress") 24 | self.run_context = threading.local() 25 | self.role_loader: Optional[Loader] = None 26 | 27 | def setup_logging(self): 28 | FORMAT = "%(asctime)-15s %(levelname)s %(name)s %(message)s" 29 | PROGRESS_FORMAT = "%(asctime)-15s %(message)s" 30 | if self.args.debug: 31 | log_level = logging.DEBUG 32 | elif self.args.verbose: 33 | log_level = logging.INFO 34 | else: 35 | log_level = logging.WARN 36 | 37 | progress_formatter = None 38 | if coloredlogs is not None: 39 | coloredlogs.install(level=log_level, fmt=FORMAT, stream=sys.stderr) 40 | if log_level > logging.INFO: 41 | progress_formatter = coloredlogs.ColoredFormatter(fmt=PROGRESS_FORMAT) 42 | else: 43 | logging.basicConfig(level=log_level, stream=sys.stderr, format=FORMAT) 44 | if log_level > logging.INFO: 45 | progress_formatter = logging.Formatter(fmt=PROGRESS_FORMAT) 46 | 47 | handler = logging.StreamHandler(stream=sys.stderr) 48 | handler.setFormatter(progress_formatter) 49 | self.progress.addHandler(handler) 50 | self.progress.setLevel(logging.INFO) 51 | self.progress.propagate = False 52 | 53 | def make_argparser(self): 54 | description = inspect.getdoc(self) 55 | if not description: 56 | description = "Provision systems" 57 | 58 | parser = argparse.ArgumentParser(description=description) 59 | parser.add_argument("-v", "--verbose", action="store_true", 60 | help="verbose output") 61 | parser.add_argument("--debug", action="store_true", 62 | help="verbose output") 63 | parser.add_argument("-C", "--check", action="store_true", 64 | help="do not perform changes, but check if changes would be needed") 65 | parser.add_argument("--local", action="store", 66 | help="run the playbook for the given host name, on the local system") 67 | group = parser.add_mutually_exclusive_group() 68 | group.add_argument("--ansible-to-python", action="store", metavar="role", 69 | help="print the given Ansible role as Transilience Python code") 70 | group.add_argument("--ansible-to-ast", action="store", metavar="role", 71 | help="print the AST of the given Ansible role as understood by Transilience") 72 | group.add_argument("--zipapp", action="store", metavar="file.pyz", 73 | help="bundle this playbook in a self-contained executable python zipapp") 74 | 75 | return parser 76 | 77 | def hosts(self) -> Sequence[Host]: 78 | """ 79 | Generate a sequence with all the systems on which the playbook needs to run 80 | """ 81 | return () 82 | 83 | def thread_main(self, host: Host, check_mode: bool): 84 | """ 85 | Main entry point for per-host threads 86 | """ 87 | self.run_context.host = host 88 | self.run_context.runner = Runner(host, check_mode=check_mode) 89 | self.start(host) 90 | self.run_context.runner.main() 91 | 92 | def add_role(self, role_cls: Union[str, Type[Role]], **kw): 93 | """ 94 | Add a role to this thread's runner 95 | """ 96 | if not hasattr(self.run_context, "runner"): 97 | raise RuntimeError(f"{self.__class__.__name__}.add_role cannot be called outside of a host thread") 98 | if isinstance(role_cls, str): 99 | if self.role_loader is None: 100 | raise RuntimeError("could not find a way to load roles") 101 | role_cls = self.role_loader.load(role_cls) 102 | self.run_context.runner.add_role(role_cls, **kw) 103 | 104 | def start(self, host: Host): 105 | """ 106 | Start the playbook on the given runner. 107 | 108 | This method is called once for each system returned by systems() 109 | """ 110 | raise NotImplementedError(f"{self.__class__.__name__}.start is not implemented") 111 | 112 | def _load_ansible(self, name: str): 113 | """ 114 | Return an ansible.RoleLoader for a role with the given name 115 | """ 116 | from .ansible import FilesystemRoleLoader 117 | loader = FilesystemRoleLoader(name) 118 | loader.load() 119 | return loader 120 | 121 | def role_to_python(self, name: str, file=None): 122 | """ 123 | Print the Python code generated from the given Ansible role 124 | """ 125 | loader = self._load_ansible(name) 126 | print(loader.get_python_code(), file=file) 127 | 128 | def role_to_ast(self, name: str, file=None): 129 | """ 130 | Print the Python code generated from the given Ansible role 131 | """ 132 | if file is None: 133 | file = sys.stdout 134 | 135 | if not hasattr(file, "fileno"): 136 | indent = None 137 | elif os.isatty(file.fileno()): 138 | indent = 2 139 | else: 140 | indent = None 141 | 142 | loader = self._load_ansible(name) 143 | json.dump(loader.ansible_role.to_jsonable(), file, indent=indent) 144 | 145 | def zipapp(self, target: str, interpreter=None): 146 | """ 147 | Bundle this playbook into a self-contained zipapp 148 | """ 149 | import zipapp 150 | import jinja2 151 | import transilience 152 | if interpreter is None: 153 | interpreter = sys.executable 154 | 155 | if getattr(transilience.__loader__, "archive", None): 156 | # Recursively iterating module directories requires Python 3.9+ 157 | raise NotImplementedError("Cannot currently create a zipapp from a zipapp") 158 | 159 | with tempfile.TemporaryDirectory() as workdir: 160 | # Copy transilience 161 | shutil.copytree(os.path.dirname(__file__), os.path.join(workdir, "transilience")) 162 | # Copy jinja2 163 | shutil.copytree(os.path.dirname(jinja2.__file__), os.path.join(workdir, "jinja2")) 164 | # Copy argv[0] as __main__.py 165 | shutil.copy(sys.argv[0], os.path.join(workdir, "__main__.py")) 166 | # Copy argv[0]/roles 167 | # TODO: use self.role_loader to copy the role_dir 168 | role_dir = os.path.join(os.path.dirname(sys.argv[0]), "roles") 169 | if os.path.isdir(role_dir): 170 | shutil.copytree(role_dir, os.path.join(workdir, "roles")) 171 | # TODO: If roles/__init__.py does not exist, add it? 172 | # TODO: If roles/*/__init__.py does not exist, add it? 173 | # Turn everything into a zipapp 174 | zipapp.create_archive(workdir, target, interpreter=interpreter, compressed=True) 175 | 176 | def provision(self, hosts: Sequence[Host], check_mode: bool): 177 | """ 178 | Run provisioning on the given hosts 179 | """ 180 | if len(hosts) == 1: 181 | self.thread_main(hosts[0], check_mode) 182 | else: 183 | # Start all the runners in separate threads 184 | threads = [] 185 | for host in hosts: 186 | t = threading.Thread(target=self.thread_main, args=(host, check_mode)) 187 | threads.append(t) 188 | t.start() 189 | 190 | # Wait for all threads to complete 191 | for t in threads: 192 | t.join() 193 | 194 | def main(self): 195 | parser = self.make_argparser() 196 | self.args = parser.parse_args() 197 | self.setup_logging() 198 | 199 | if self.args.ansible_to_python: 200 | self.role_to_python(self.args.ansible_to_python) 201 | return 202 | 203 | if self.args.ansible_to_ast: 204 | self.role_to_ast(self.args.ansible_to_ast) 205 | return 206 | 207 | self.role_loader = Loader.create() 208 | 209 | if self.args.zipapp: 210 | self.zipapp(target=self.args.zipapp) 211 | return 212 | 213 | if self.args.local: 214 | hosts = [h for h in self.hosts() if h.name == self.args.local] 215 | if not hosts: 216 | raise RuntimeError(f"No host found matching {self.args.local}") 217 | for h in hosts: 218 | h.type = "Local" 219 | h.args = {} 220 | else: 221 | hosts = list(self.hosts()) 222 | 223 | self.provision(hosts, check_mode=self.args.check) 224 | -------------------------------------------------------------------------------- /transilience/system/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from .system import System, PipelineInfo 3 | from .mitogen import Mitogen 4 | from .local import Local 5 | 6 | __all__ = ["System", "PipelineInfo", "Mitogen", "Local"] 7 | -------------------------------------------------------------------------------- /transilience/system/local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Sequence, Generator, Optional 3 | import collections 4 | import uuid 5 | from .. import actions 6 | from .system import System, PipelineInfo 7 | from .pipeline import LocalPipelineMixin 8 | 9 | 10 | class LocalExecuteMixin: 11 | """ 12 | System implementation to execute actions locally 13 | """ 14 | def execute(self, action: actions.Action) -> actions.Action: 15 | with action.result.collect(): 16 | action.action_run(self) 17 | return action 18 | 19 | 20 | class Local(LocalExecuteMixin, LocalPipelineMixin, System): 21 | """ 22 | Work on the local system 23 | """ 24 | def __init__(self, name: Optional[str] = None): 25 | if name is None: 26 | name = "local" 27 | super().__init__(name) 28 | self.pending_actions = collections.deque() 29 | 30 | def run_actions(self, action_list: Sequence[actions.Action]) -> Generator[actions.Action, None, None]: 31 | """ 32 | Run a sequence of provisioning actions in the chroot 33 | """ 34 | pipeline = PipelineInfo(str(uuid.uuid4())) 35 | for act in action_list: 36 | yield self.execute_pipelined(act, pipeline) 37 | 38 | def send_pipelined(self, action: actions.Action, pipeline_info: PipelineInfo): 39 | """ 40 | Execute this action as part of a pipeline 41 | """ 42 | self.pending_actions.append((action, pipeline_info)) 43 | 44 | def receive_pipelined(self) -> Generator[actions.Action, None, None]: 45 | """ 46 | Receive results of the actions that have been sent so far. 47 | 48 | It is ok to enqueue new actions while this method runs 49 | """ 50 | while self.pending_actions: 51 | yield self.execute_pipelined(*self.pending_actions.popleft()) 52 | -------------------------------------------------------------------------------- /transilience/system/mitogen.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Dict, Optional, Sequence, Generator, Any, BinaryIO, ContextManager 3 | import collections 4 | import contextlib 5 | import threading 6 | import logging 7 | import uuid 8 | import io 9 | try: 10 | import mitogen 11 | import mitogen.core 12 | import mitogen.master 13 | import mitogen.service 14 | import mitogen.parent 15 | except ModuleNotFoundError: 16 | mitogen = None 17 | from .. import actions 18 | from ..fileasset import FileAsset, LocalFileAsset, ZipFileAsset 19 | from .system import System, PipelineInfo 20 | from .pipeline import LocalPipelineMixin 21 | from .local import LocalExecuteMixin 22 | 23 | log = logging.getLogger(__name__) 24 | 25 | _this_system_lock = threading.Lock() 26 | _this_system = None 27 | 28 | 29 | if mitogen is None: 30 | 31 | class Mitogen(System): 32 | def __init__(self, *args, **kw): 33 | raise NotImplementedError("the mitogen python module is not installed on this system") 34 | 35 | else: 36 | class MitogenCachedFileAsset(FileAsset): 37 | def __init__(self, cached: bytes, serialized: Dict[str, Any]): 38 | super().__init__() 39 | self.cached = cached 40 | self.serialized = serialized 41 | 42 | def serialize(self) -> Dict[str, Any]: 43 | return self.serialized 44 | 45 | @contextlib.contextmanager 46 | def open(self) -> ContextManager[BinaryIO]: 47 | with io.BytesIO(self.cached) as buf: 48 | yield buf 49 | 50 | def copy_to(self, dst: BinaryIO): 51 | dst.write(self.cached) 52 | 53 | class MitogenFileAsset(FileAsset): 54 | def __init__(self, local_mitogen: "LocalMitogen", remote_path: str): 55 | super().__init__() 56 | self.local_mitogen = local_mitogen 57 | self.remote_path = remote_path 58 | 59 | def serialize(self) -> Dict[str, Any]: 60 | res = super().serialize() 61 | res["type"] = "local" 62 | res["path"] = self.remote_path 63 | return res 64 | 65 | @contextlib.contextmanager 66 | def open(self) -> ContextManager[BinaryIO]: 67 | with io.BytesIO() as buf: 68 | self.copy_to(buf) 69 | buf.seek(0) 70 | yield buf 71 | 72 | def copy_to(self, dst: BinaryIO): 73 | ok, metadata = mitogen.service.FileService.get( 74 | context=self.local_mitogen.parent_context, 75 | path=self.remote_path, 76 | out_fp=dst, 77 | ) 78 | if not ok: 79 | raise IOError(f'Transfer of {self.path!r} was interrupted') 80 | 81 | class LocalMitogen(LocalExecuteMixin, LocalPipelineMixin, System): 82 | def __init__(self, parent_context: mitogen.core.Context, router: mitogen.core.Router): 83 | super().__init__("local_mitogen") 84 | self.parent_context = parent_context 85 | self.router = router 86 | 87 | def remap_file_asset(self, asset: FileAsset): 88 | if asset.cached is not None: 89 | return MitogenCachedFileAsset(asset.cached, asset.serialize()) 90 | elif isinstance(asset, LocalFileAsset): 91 | return MitogenFileAsset(self, asset.path) 92 | # elif isinstance(asset, ZipFileAsset): 93 | # return MitogenZipFileAsset(self, asset.archive, asset.path) 94 | else: 95 | raise NotImplementedError(f"Unable to handle File asset of type {asset.__class__!r}") 96 | 97 | class Mitogen(System): 98 | """ 99 | Access a system via Mitogen 100 | """ 101 | internal_broker = None 102 | internal_router = None 103 | 104 | def __init__(self, name: str, method: str, router: Optional[mitogen.master.Router] = None, **kw): 105 | super().__init__(name) 106 | if router is None: 107 | if self.internal_router is None: 108 | self.internal_broker = mitogen.master.Broker() 109 | self.internal_router = mitogen.master.Router(self.internal_broker) 110 | router = self.internal_router 111 | self.router = router 112 | self.file_service = mitogen.service.FileService(router) 113 | self.pool = mitogen.service.Pool(router=self.router, services=[self.file_service]) 114 | 115 | meth = getattr(self.router, method, None) 116 | if meth is None: 117 | raise KeyError(f"conncetion method {method!r} not available in mitogen") 118 | 119 | kw.setdefault("python_path", "/usr/bin/python3") 120 | self.context = meth(remote_name=name, **kw) 121 | 122 | self.pending_actions = collections.deque() 123 | 124 | def close(self): 125 | self.context.shutdown(wait=True) 126 | 127 | def share_file(self, pathname: str): 128 | self.file_service.register(pathname) 129 | 130 | def share_file_prefix(self, pathname: str): 131 | self.file_service.register_prefix(pathname) 132 | 133 | def execute(self, action: actions.Action) -> actions.Action: 134 | res = self.context.call(self._remote_run_actions, self.router.myself(), action.serialize()) 135 | return actions.Action.deserialize(res) 136 | 137 | def send_pipelined(self, action: actions.Action, pipeline_info: PipelineInfo): 138 | """ 139 | Execute this action as part of a pipeline 140 | """ 141 | serialized = action.serialize() 142 | serialized["__pipeline__"] = pipeline_info.serialize() 143 | self.pending_actions.append( 144 | self.context.call_async(self._remote_run_actions, self.router.myself(), serialized) 145 | ) 146 | 147 | def receive_pipelined(self) -> Generator[actions.Action, None, None]: 148 | """ 149 | Receive results of the actions that have been sent so far. 150 | 151 | It is ok to enqueue new actions while this method runs 152 | """ 153 | while self.pending_actions: 154 | yield actions.Action.deserialize(self.pending_actions.popleft().get().unpickle()) 155 | 156 | def pipeline_clear_failed(self, pipeline_id: str): 157 | self.context.call_no_reply(self._pipeline_clear_failed, pipeline_id) 158 | 159 | def pipeline_close(self, pipeline_id: str): 160 | self.context.call_no_reply(self._pipeline_close, pipeline_id) 161 | 162 | def run_actions(self, action_list: Sequence[actions.Action]) -> Generator[actions.Action, None, None]: 163 | """ 164 | Run a sequence of provisioning actions in the chroot 165 | """ 166 | pipeline = PipelineInfo(str(uuid.uuid4())) 167 | for act in action_list: 168 | self.send_pipelined(act, pipeline) 169 | yield from self.receive_pipelined() 170 | 171 | @classmethod 172 | def _pipeline_clear_failed(cls, pipeline_id: str): 173 | global _this_system, _this_system_lock 174 | with _this_system_lock: 175 | if _this_system is None: 176 | return 177 | system = _this_system 178 | system.pipeline_clear_failed(pipeline_id) 179 | 180 | @classmethod 181 | def _pipeline_close(self, pipeline_id: str): 182 | global _this_system, _this_system_lock 183 | with _this_system_lock: 184 | if _this_system is None: 185 | return 186 | system = _this_system 187 | system.pipeline_close(pipeline_id) 188 | 189 | @classmethod 190 | @mitogen.core.takes_router 191 | def _remote_run_actions( 192 | self, 193 | context: mitogen.core.Context, 194 | action: actions.Action, 195 | router: mitogen.core.Router = None) -> Dict[str, Any]: 196 | 197 | global _this_system, _this_system_lock 198 | with _this_system_lock: 199 | if _this_system is None: 200 | _this_system = LocalMitogen(parent_context=context, router=router) 201 | system = _this_system 202 | 203 | pipeline_info = action.pop("__pipeline__", None) 204 | 205 | # Convert LocalFileAsset to something that fetches via Mitogen 206 | file_assets = action.get("__file_assets__", None) 207 | if file_assets is None: 208 | file_assets = [] 209 | 210 | action = actions.Action.deserialize(action) 211 | for name in file_assets: 212 | setattr(action, name, 213 | system.remap_file_asset( 214 | getattr(action, name))) 215 | 216 | if pipeline_info is None: 217 | action = system.execute(action) 218 | else: 219 | pipeline = PipelineInfo.deserialize(pipeline_info) 220 | action = system.execute_pipelined(action, pipeline) 221 | return action.serialize() 222 | -------------------------------------------------------------------------------- /transilience/system/pipeline.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Dict 3 | from ..actions import ResultState 4 | 5 | 6 | if TYPE_CHECKING: 7 | from ..actions import Action 8 | from .system import PipelineInfo 9 | 10 | 11 | class Pipeline: 12 | """ 13 | State about a pipeline 14 | """ 15 | def __init__(self, id: str): 16 | self.id = id 17 | self.failed = False 18 | self.states: Dict[str, str] = {} 19 | 20 | 21 | class LocalPipelineMixin: 22 | """ 23 | Common functions to execute actions locally as part of a pipeline 24 | """ 25 | def __init__(self, *args, **kw): 26 | super().__init__(*args, **kw) 27 | self.pipelines: Dict[str, Pipeline] = {} 28 | 29 | def get_pipeline(self, pipeline_id: str) -> Pipeline: 30 | """ 31 | Create or retrieve a pipeline object for the given pipeline id 32 | """ 33 | res = self.pipelines.get(pipeline_id) 34 | if res is None: 35 | res = Pipeline(pipeline_id) 36 | self.pipelines[pipeline_id] = res 37 | return res 38 | 39 | def pipeline_clear_failed(self, pipeline_id: str): 40 | """ 41 | Clear the 'failed' status of a pipeline. 42 | 43 | After this method runs, actions will start being executed again even if 44 | an action previously failed 45 | """ 46 | pipeline = self.get_pipeline(pipeline_id) 47 | pipeline.failed = False 48 | 49 | def pipeline_close(self, pipeline_id: str): 50 | """ 51 | Dicard state about a pipeline. 52 | 53 | Call this method to cleanup internal state when a pipeline is done 54 | executing 55 | """ 56 | self.pipelines.pop(pipeline_id, None) 57 | 58 | def execute_pipelined(self, action: Action, pipeline_info: PipelineInfo) -> Action: 59 | """ 60 | Execute the action locally, returning its result immediately. 61 | 62 | It keeps pipeline metadata into account, and it can choose to skip the 63 | action or fail it instead of running it. 64 | """ 65 | pipeline = self.get_pipeline(pipeline_info.id) 66 | 67 | # Skip if a previous action failed 68 | if pipeline.failed: 69 | with action.result.collect(): 70 | action.action_run_pipeline_failed(self) 71 | pipeline.states[action.uuid] = action.result.state 72 | return action 73 | 74 | # Check "when" conditions 75 | for act_uuid, states in pipeline_info.when.items(): 76 | state = pipeline.states.get(act_uuid) 77 | if state is None or state not in states: 78 | with action.result.collect(): 79 | action.action_run_pipeline_skipped(self, "pipeline condition not met") 80 | pipeline.states[action.uuid] = action.result.state 81 | return action 82 | 83 | # Execute 84 | try: 85 | act = self.execute(action) 86 | except Exception: 87 | # Ignore any exception here. They'll be recorded in the action 88 | # result, and we later check that to fail the pipeline if they 89 | # happened 90 | act = action 91 | 92 | if act.result.state == ResultState.FAILED: 93 | pipeline.failed = True 94 | pipeline.states[act.uuid] = act.result.state 95 | return act 96 | -------------------------------------------------------------------------------- /transilience/system/system.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import TYPE_CHECKING, Type, Dict, Any, Callable, List 3 | from dataclasses import dataclass, field, asdict 4 | import threading 5 | 6 | if TYPE_CHECKING: 7 | from ..actions import Action 8 | 9 | 10 | @dataclass 11 | class PipelineInfo: 12 | """ 13 | Metadata to control the pipelined execution of an action 14 | """ 15 | id: str 16 | # Execute only when the state of all the given actions previous executed in 17 | # the same pipeline (identified by uuid) is one of those listed 18 | when: Dict[str, List[str]] = field(default_factory=dict) 19 | 20 | def serialize(self) -> Dict[str, Any]: 21 | """ 22 | Serialize this pipeline metadata as a dict 23 | """ 24 | return asdict(self) 25 | 26 | @classmethod 27 | def deserialize(cls, serialized: Dict[str, Any]) -> "PipelineInfo": 28 | """ 29 | Deserialize pipeline metadata form a dict 30 | """ 31 | return cls(**serialized) 32 | 33 | 34 | class System: 35 | """ 36 | Access a system to be provisioned 37 | """ 38 | def __init__(self, name: str): 39 | self.name = name 40 | # Objects that can be registered by actions as caches 41 | self.caches: Dict[Type[Action], Any] = {} 42 | self.caches_lock = threading.Lock() 43 | 44 | def close(self): 45 | """ 46 | Close the connection to this system 47 | """ 48 | pass 49 | 50 | def get_action_cache(self, action: Type[Action], default_factory: Callable[[], Any]): 51 | """ 52 | Lookup the registered cache for this action. 53 | 54 | If not found, creates it as the result of default_factory 55 | """ 56 | with self.caches_lock: 57 | res = self.caches.get(action) 58 | if res is None: 59 | res = default_factory() 60 | self.caches[action] = res 61 | return res 62 | 63 | def share_file(self, pathname: str): 64 | """ 65 | Register a pathname as exportable to children 66 | """ 67 | pass 68 | 69 | def share_file_prefix(self, pathname: str): 70 | """ 71 | Register a pathname prefix as exportable to children 72 | """ 73 | pass 74 | 75 | def execute(self, action: Action) -> Action: 76 | """ 77 | Execute an action immediately. 78 | 79 | For remote systems, this may have serious latency issues, since it 80 | requires a full round trip for each action that gets executed 81 | """ 82 | raise NotImplementedError(f"{self.__class__}.execute is not implemented") 83 | -------------------------------------------------------------------------------- /transilience/template.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, List, Dict, Any, Sequence, Union 3 | import zipfile 4 | import os 5 | import jinja2 6 | import jinja2.meta 7 | 8 | 9 | def finalize_value(val): 10 | """ 11 | Jinja2 finalize hook that renders None as the empty string 12 | """ 13 | # See: http://jinja.pocoo.org/docs/2.10/api/ under "finalize" 14 | # and https://stackoverflow.com/questions/11146619/suppress-none-output-as-string-in-jinja2 15 | if val is None: 16 | return "" 17 | else: 18 | return val 19 | 20 | 21 | class Engine: 22 | """ 23 | Jinja2 machinery tuned to render text templates 24 | """ 25 | def __init__(self, loader: jinja2.BaseLoader): 26 | self.env = jinja2.Environment( 27 | autoescape=False, 28 | trim_blocks=True, 29 | keep_trailing_newline=True, 30 | finalize=finalize_value, 31 | loader=loader) 32 | 33 | def render_string(self, template: str, ctx: Dict[str, Any]) -> str: 34 | """ 35 | Render a template from a string 36 | """ 37 | tpl = self.env.from_string(template) 38 | return tpl.render(**ctx) 39 | 40 | def render_file(self, path: str, ctx: Dict[str, Any]) -> str: 41 | """ 42 | Render a template from a file, relative to template_paths 43 | """ 44 | tpl = self.env.get_template(path) 45 | return tpl.render(**ctx) 46 | 47 | def list_string_template_vars(self, template: str) -> Sequence[str]: 48 | """ 49 | List the template variables used by this template string 50 | """ 51 | ast = self.env.parse(template) 52 | return jinja2.meta.find_undeclared_variables(ast) 53 | 54 | def list_file_template_vars(self, path: str) -> Sequence[str]: 55 | """ 56 | List the template variables used by this template string 57 | """ 58 | tpl = self.env.get_template(path) 59 | with open(tpl.filename, "rt") as fd: 60 | ast = self.env.parse(fd.read(), tpl.name, tpl.filename) 61 | return jinja2.meta.find_undeclared_variables(ast) 62 | 63 | 64 | class EngineFilesystem(Engine): 65 | def __init__(self, template_paths: Optional[List[str]] = None): 66 | if template_paths is None: 67 | template_paths = ["."] 68 | loader = jinja2.FileSystemLoader(template_paths) 69 | super().__init__(loader) 70 | 71 | 72 | class ZipLoader(jinja2.BaseLoader): 73 | def __init__(self, archive: zipfile.ZipFile, root: str): 74 | self.zipfile = archive 75 | self.root = root 76 | 77 | def get_source(self, environment: jinja2.Environment, template: str): 78 | path = os.path.join(self.root, template) 79 | with self.zipfile.open(path, "r") as fd: 80 | source = fd.read().decode() 81 | return source, None, lambda: True 82 | 83 | 84 | class EngineZip(Engine): 85 | def __init__(self, archive: Union[str, zipfile.ZipFile], root: str): 86 | if isinstance(archive, str): 87 | archive = zipfile.ZipFile(archive, "r") 88 | self.loader = ZipLoader(archive, root) 89 | super().__init__(self.loader) 90 | 91 | def list_file_template_vars(self, path: str) -> Sequence[str]: 92 | """ 93 | List the template variables used by this template string 94 | """ 95 | source = self.loader.get_source(self.env, path) 96 | ast = self.env.parse(source, os.path.basename(path), path) 97 | return jinja2.meta.find_undeclared_variables(ast) 98 | -------------------------------------------------------------------------------- /transilience/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Sequence, Optional 3 | import logging 4 | import contextlib 5 | import subprocess 6 | import shlex 7 | import os 8 | import tempfile 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | def run(cmd: Sequence[str], check: bool = True, **kw) -> subprocess.CompletedProcess: 14 | """ 15 | Logging wrapper to subprocess.run. 16 | 17 | Also, default check to True. 18 | """ 19 | log.info("Run %s", " ".join(shlex.quote(x) for x in cmd)) 20 | return subprocess.run(cmd, check=check, **kw) 21 | 22 | 23 | @contextlib.contextmanager 24 | def atomic_writer( 25 | fname: str, 26 | mode: str = "w+b", 27 | chmod: Optional[int] = 0o664, 28 | sync: bool = True, 29 | use_umask: bool = False, 30 | **kw): 31 | """ 32 | open/tempfile wrapper to atomically write to a file, by writing its 33 | contents to a temporary file in the same directory, and renaming it at the 34 | end of the block if no exception has been raised. 35 | 36 | :arg fname: name of the file to create 37 | :arg mode: passed to mkstemp/open 38 | :arg chmod: permissions of the resulting file 39 | :arg sync: if True, call fdatasync before renaming 40 | :arg use_umask: if True, apply umask to chmod 41 | 42 | All the other arguments are passed to open 43 | """ 44 | 45 | if use_umask: 46 | cur_umask = os.umask(0) 47 | os.umask(cur_umask) 48 | chmod &= ~cur_umask 49 | 50 | dirname = os.path.dirname(fname) 51 | if not os.path.isdir(dirname): 52 | os.makedirs(dirname) 53 | 54 | fd, abspath = tempfile.mkstemp(dir=dirname, text="b" not in mode, prefix=fname) 55 | outfd = open(fd, mode, closefd=True, **kw) 56 | try: 57 | yield outfd 58 | outfd.flush() 59 | if sync: 60 | os.fdatasync(fd) 61 | if chmod is not None: 62 | os.fchmod(fd, chmod) 63 | os.rename(abspath, fname) 64 | except Exception: 65 | os.unlink(abspath) 66 | raise 67 | finally: 68 | outfd.close() 69 | -------------------------------------------------------------------------------- /update_docs: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | python3 -m transilience.cmd.doc -o actions_builtin.md 3 | --------------------------------------------------------------------------------