├── lib ├── charms │ ├── __init__.py │ └── archive_auth_mirror │ │ ├── __init__.py │ │ ├── repository.py │ │ └── setup.py └── archive_auth_mirror │ ├── __init__.py │ ├── scripts │ ├── __init__.py │ ├── manage_user.py │ ├── reprepro_sign_helper.py │ └── mirror_archive.py │ ├── script.py │ ├── cron.py │ ├── ssh.py │ ├── lock.py │ ├── rsync.py │ ├── reprepro.py │ ├── gpg.py │ ├── mirror.py │ └── utils.py ├── unit_tests ├── __init__.py ├── fakes.py ├── test_lock.py ├── test_cron.py ├── test_manage_user.py ├── test_ssh.py ├── test_reprepro.py ├── test_rsync.py ├── test_utils.py ├── test_mirror.py ├── test_repository.py ├── test_setup.py └── test_gpg.py ├── wheelhouse.txt ├── .gitignore ├── requirements.txt ├── tests ├── 00-setup ├── 10-deploy └── 20-multi ├── interfaces └── ssh-peers │ ├── interface.yaml │ └── peers.py ├── .travis.yml ├── templates ├── script.j2 ├── reprepro-updates.j2 ├── reprepro-distributions.j2 └── nginx-static.j2 ├── layer.yaml ├── tox.ini ├── metadata.yaml ├── README.md ├── release-charm ├── Makefile ├── config.yaml ├── reactive └── archive_auth_mirror.py └── icon.svg /lib/charms/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /unit_tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /wheelhouse.txt: -------------------------------------------------------------------------------- 1 | gnupg 2 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/charms/archive_auth_mirror/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | __pycache__ 4 | .tox 5 | build 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | fixtures 2 | charmhelpers 3 | charms.reactive 4 | charm-test 5 | gnupg 6 | -------------------------------------------------------------------------------- /tests/00-setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo add-apt-repository -yu ppa:juju/stable 4 | sudo apt install -y amulet python-requests 5 | -------------------------------------------------------------------------------- /interfaces/ssh-peers/interface.yaml: -------------------------------------------------------------------------------- 1 | name: ssh-peers 2 | summary: A peer interface for exchanging ssh keys among units. 3 | version: 1 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: trusty 3 | sudo: required 4 | python: 5 | - "3.5" 6 | install: pip install flake8 tox 7 | script: tox 8 | addons: 9 | apt: 10 | packages: 11 | - gnupg 12 | - apache2-utils 13 | -------------------------------------------------------------------------------- /templates/script.j2: -------------------------------------------------------------------------------- 1 | #!{{ interpreter }} 2 | 3 | import sys 4 | from os import path 5 | 6 | sys.path.append(path.join(path.dirname(__file__), 'lib')) 7 | 8 | from archive_auth_mirror.scripts import {{ script_module }} 9 | 10 | {{ script_module }}.main() 11 | 12 | -------------------------------------------------------------------------------- /templates/reprepro-updates.j2: -------------------------------------------------------------------------------- 1 | {% for mirror in mirrors %}Name: update-repo-{{ mirror.local_suite }} 2 | Method: {{ mirror.url }} 3 | Suite: {{ mirror.remote_suite }} 4 | Components: {{ mirror.components }} 5 | Architectures: {{ mirror.archs }} 6 | VerifyRelease: {{ mirror.key }} 7 | 8 | {% endfor %} 9 | -------------------------------------------------------------------------------- /unit_tests/fakes.py: -------------------------------------------------------------------------------- 1 | """Fakes for unittests.""" 2 | 3 | 4 | class FakeHookEnv: 5 | """A fake hookenv object.""" 6 | 7 | def __init__(self, config=None): 8 | self._config = config or {} 9 | 10 | def config(self): 11 | return self._config 12 | 13 | def unit_public_ip(self): 14 | return '1.2.3.4' 15 | -------------------------------------------------------------------------------- /layer.yaml: -------------------------------------------------------------------------------- 1 | includes: 2 | - 'layer:basic' 3 | - 'layer:leadership' 4 | - 'layer:nginx' 5 | - 'layer:nagios' 6 | - 'interface:basic-auth-check' 7 | - 'interface:ssh-peers' 8 | repo: https://github.com/CanonicalLtd/archive-auth-mirror 9 | options: 10 | basic: 11 | packages: 12 | - gnupg 13 | - reprepro 14 | - apache2-utils 15 | use_venv: true 16 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py35, lint 3 | skipsdist = True 4 | 5 | [testenv] 6 | deps = 7 | -r{toxinidir}/requirements.txt 8 | setenv = 9 | PYTHONPATH = {toxinidir}/lib:{toxinidir}/reactive 10 | commands = 11 | {envpython} -m unittest discover unit_tests {posargs} 12 | 13 | [testenv:lint] 14 | deps = 15 | flake8 16 | commands = 17 | {envbindir}/flake8 --exclude build,.tox . 18 | -------------------------------------------------------------------------------- /templates/reprepro-distributions.j2: -------------------------------------------------------------------------------- 1 | {% for mirror in mirrors %}Codename: {{ mirror.local_suite }} 2 | Suite: {{ mirror.local_suite }} 3 | {%- if mirror.version %} 4 | Version: {{ mirror.version }} 5 | {%- endif %} 6 | Label: {{ mirror.origin }} 7 | Origin: {{ mirror.origin }} 8 | Components: {{ mirror.components }} 9 | Architectures: {{ mirror.archs }} 10 | SignWith: ! {{ sign_script }} 11 | Update: update-repo-{{ mirror.local_suite }} 12 | 13 | {% endfor %} 14 | -------------------------------------------------------------------------------- /metadata.yaml: -------------------------------------------------------------------------------- 1 | name: archive-auth-mirror 2 | summary: Sync an Ubuntu repository and provide an authenticate mirror 3 | maintainer: Canonical Landscape Team 4 | description: This charm provides an HTTPS mirror with BasicAuth of an Ubuntu archive. 5 | series: 6 | - xenial 7 | tags: 8 | - security 9 | requires: 10 | basic-auth-check: 11 | interface: basic-auth-check 12 | peers: 13 | ssh-peers: 14 | interface: ssh-peers 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | This charm deploys an application to mirror and periodically sync an Ubuntu 4 | archive and expose it through static file serve via Nginx. 5 | 6 | 7 | ## Managing basic authentication 8 | 9 | Credentials for basic authentication can be created with: 10 | 11 | ```bash 12 | juju run --application archive-auth-mirror '/srv/archive-auth-mirror/bin/manage-user add ' 13 | ``` 14 | 15 | If the user is already present, their password will be updated. 16 | 17 | To remove a user, run 18 | 19 | ```bash 20 | juju run --application archive-auth-mirror '/srv/archive-auth-mirror/bin/manage-user remove ' 21 | ``` 22 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/script.py: -------------------------------------------------------------------------------- 1 | """Common setup functions for scripts.""" 2 | 3 | import os 4 | import sys 5 | import logging 6 | from logging.handlers import SysLogHandler 7 | 8 | 9 | def setup_logger(level=logging.DEBUG, echo=False): 10 | """Setup and return the logger for the script. 11 | 12 | If echo is True, logging is also written to stderr. 13 | """ 14 | name = os.path.basename(sys.argv[0]) 15 | logger = logging.getLogger(name) 16 | logger.setLevel(level) 17 | logger.addHandler( 18 | SysLogHandler(address='/dev/log', facility=SysLogHandler.LOG_DAEMON)) 19 | if echo: 20 | logger.addHandler(logging.StreamHandler()) 21 | return logger 22 | -------------------------------------------------------------------------------- /release-charm: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | if [ "$#" -lt 2 ]; then 4 | echo "Usage $(basename $0) charm-dir charm-uri" 5 | exit 1 6 | fi 7 | 8 | RENDERED_CHARM_DIR="$1" 9 | CHARM_URI="$2" 10 | 11 | GIT_UPSTREAM_REPO="git@github.com:CanonicalLtd/archive-auth-mirror" 12 | GIT_UPSTREAM_REMOTE="upstream" 13 | 14 | # push the charm to the store and grab the returned version 15 | OUT=$(mktemp) 16 | charm push $RENDERED_CHARM_DIR $CHARM_URI | tee $OUT 17 | PUSHED_CHARM="$(cat $OUT | sed -n 's/^url: //p')" 18 | PUSHED_CHARM_REV=${PUSHED_CHARM##*-} 19 | rm $OUT 20 | 21 | REV_HASH=$(awk '{ print $NF; }' "$RENDERED_CHARM_DIR"/repo-info) 22 | 23 | # publish the charm 24 | charm release $PUSHED_CHARM --channel=edge 25 | charm grant $PUSHED_CHARM --acl read --set everyone 26 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/cron.py: -------------------------------------------------------------------------------- 1 | """Cron job configuration.""" 2 | # flake8: noqa 3 | 4 | import textwrap 5 | 6 | from .utils import get_paths 7 | 8 | 9 | CRONTAB_TEMPLATE = textwrap.dedent( 10 | ''' 11 | # m h dom mon dow user command 12 | */15 * * * * root {paths[bin]}/mirror-archive > /var/log/mirror-archive.log 2>&1 13 | ''') 14 | 15 | 16 | def install_crontab(paths=None): 17 | """Install the crontab file to periodically run the job.""" 18 | if paths is None: 19 | paths = get_paths() 20 | 21 | with paths['cron'].open('w') as fh: 22 | fh.write(CRONTAB_TEMPLATE.format(paths=paths)) 23 | 24 | 25 | def remove_crontab(paths=None): 26 | """Remove the crontab file for the job.""" 27 | if paths is None: 28 | paths = get_paths() 29 | 30 | cron_file = paths['cron'] 31 | if cron_file.exists(): 32 | cron_file.unlink() 33 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/ssh.py: -------------------------------------------------------------------------------- 1 | """Helper functions to deal with ssh.""" 2 | 3 | import subprocess 4 | 5 | 6 | def create_key(path): 7 | """Use ssh-keygen to create a new ssh key.""" 8 | subprocess.check_call([ 9 | 'ssh-keygen', '-q', '-f', str(path), '-t', 'rsa', '-N', '']) 10 | 11 | 12 | def add_authorized_key(public_key, authorized_keys_path): 13 | """Add the public key to the specified authorized_keys file.""" 14 | ssh_dir = authorized_keys_path.parent 15 | if not ssh_dir.exists(): 16 | ssh_dir.mkdir(0o700) 17 | if authorized_keys_path.exists(): 18 | authorized_keys = authorized_keys_path.read_text().splitlines() 19 | else: 20 | authorized_keys = [] 21 | public_key = public_key.strip() 22 | if public_key in authorized_keys: 23 | return 24 | with authorized_keys_path.open('a') as authorized_keys_file: 25 | authorized_keys_file.write(public_key + '\n') 26 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/lock.py: -------------------------------------------------------------------------------- 1 | """Locking using a lock file.""" 2 | 3 | import fcntl 4 | 5 | 6 | class AlreadyLocked(Exception): 7 | """A lock is already present.""" 8 | 9 | 10 | class LockFile: 11 | """A file used for locking.""" 12 | 13 | _fh = None 14 | 15 | def __init__(self, path): 16 | self.path = path 17 | 18 | def lock(self): 19 | """Lock the file.""" 20 | self.path.touch() 21 | self._fh = self.path.open('w') 22 | try: 23 | fcntl.flock(self._fh, fcntl.LOCK_EX | fcntl.LOCK_NB) 24 | except IOError: 25 | self._close() 26 | raise AlreadyLocked() 27 | 28 | def release(self): 29 | """Release the lock on the file.""" 30 | if self._fh is not None: 31 | fcntl.flock(self._fh, fcntl.LOCK_UN) 32 | self._close() 33 | 34 | def _close(self): 35 | self._fh.close() 36 | self._fh = None 37 | -------------------------------------------------------------------------------- /tests/10-deploy: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import unittest 4 | 5 | import requests 6 | 7 | import amulet 8 | 9 | 10 | class TestCharm(unittest.TestCase): 11 | 12 | def setUp(self): 13 | self.d = amulet.Deployment(series='xenial') 14 | self.d.add('archive-auth-mirror') 15 | self.d.setup(timeout=900) 16 | self.d.sentry.wait() 17 | 18 | self.unit = self.d.sentry['archive-auth-mirror'][0] 19 | self.unit_address = self.unit.info['public-address'] 20 | 21 | def test_10_basic_auth_required(self): 22 | """BasicAuth is required for the repository.""" 23 | url = 'http://{}'.format(self.unit_address) 24 | response = requests.get(url) 25 | self.assertEqual(401, response.status_code) 26 | self.assertIn('401 Authorization Required', response.text) 27 | 28 | def test_20_static_serve(self): 29 | """The service provides static file serving with BasicAuth.""" 30 | self.unit.run('/srv/archive-auth-mirror/bin/manage-user add foo bar') 31 | url = 'http://foo:bar@{}'.format(self.unit_address) 32 | response = requests.get(url) 33 | self.assertEqual(200, response.status_code) 34 | self.assertIn('Index of /', response.text) 35 | 36 | if __name__ == '__main__': 37 | unittest.main() 38 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/rsync.py: -------------------------------------------------------------------------------- 1 | """Wrapper around the rsync tool.""" 2 | 3 | import subprocess 4 | 5 | 6 | def rsync(host, path, rsh=None, delete=False): 7 | """Copy a filesytem tree using rsync. 8 | 9 | The path is synced at the same location on the remote host. 10 | 11 | path must be a pathlib.Path instance. 12 | 13 | If delete is specified, files not found in the source path are removed from 14 | the destination. 15 | 16 | """ 17 | path = str(path.absolute()) + '/' 18 | command = ['/usr/bin/rsync', '-a'] 19 | if rsh is not None: 20 | command.extend(['--rsh', rsh]) 21 | if delete: 22 | command.append('--delete') 23 | command.extend([path, '{}:{}'.format(host, path)]) 24 | subprocess.check_output(command) 25 | 26 | 27 | def rsync_multi(hosts, path, logger, rsh=None, delete=False): 28 | """Copy a filesystem tree using rsync to multiple hosts. 29 | 30 | If a call to rsync to a host fails, the error is logged via the provided 31 | logger and the next host is attempted. 32 | 33 | """ 34 | for host in hosts: 35 | try: 36 | logger.info('rsyncing {} to {}'.format(path, host)) 37 | rsync(host, path, rsh=rsh, delete=delete) 38 | except subprocess.CalledProcessError as error: 39 | logger.error( 40 | 'rsync to {} failed: {}'.format(host, error.output)) 41 | -------------------------------------------------------------------------------- /unit_tests/test_lock.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from charmtest import CharmTest 4 | 5 | from archive_auth_mirror.lock import LockFile, AlreadyLocked 6 | 7 | 8 | class LockFileTest(CharmTest): 9 | 10 | def setUp(self): 11 | super().setUp() 12 | self.file_path = Path(self.fakes.fs.root.path) / 'lock' 13 | self.lockfile = LockFile(self.file_path) 14 | self.addCleanup(self.lockfile.release) 15 | 16 | def test_lock_not_locked(self): 17 | """LockFile.lock creates the lockfile if it doesn't exist.""" 18 | self.lockfile.lock() 19 | self.assertTrue(self.file_path.exists()) 20 | 21 | def test_lock_locked(self): 22 | """If the file is already locked, and erorr is raised.""" 23 | self.lockfile.lock() 24 | # try to lock with another instance 25 | other_lockfile = LockFile(self.file_path) 26 | self.addCleanup(other_lockfile.release) 27 | self.assertRaises(AlreadyLocked, other_lockfile.lock) 28 | 29 | def test_release(self): 30 | """Lockfile.release unlocks the file.""" 31 | self.lockfile.lock() 32 | # try to lock with another instance 33 | other_lockfile = LockFile(self.file_path) 34 | self.addCleanup(other_lockfile.release) 35 | self.lockfile.release() 36 | # it's possible to lock the file with the other LockFile 37 | other_lockfile.lock() 38 | self.assertTrue(self.file_path.exists()) 39 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/reprepro.py: -------------------------------------------------------------------------------- 1 | """Wrapper around the reprepro tool.""" 2 | 3 | from subprocess import Popen, PIPE, CalledProcessError 4 | 5 | from .utils import get_paths 6 | 7 | 8 | class Reprepro: 9 | """Wrapper to execute reprepro commands.""" 10 | 11 | def __init__(self, logger, binary='/usr/bin/reprepro'): 12 | self._logger = logger 13 | self._binary = binary 14 | 15 | def execute(self, *args): 16 | """Execute the specified reprepro command.""" 17 | command = self._get_command(args) 18 | 19 | self._logger.debug('running "{}"'.format(' '.join(command))) 20 | with Popen(command, stdout=PIPE, stderr=PIPE) as process: 21 | for line in iter(process.stdout.readline, b''): 22 | self._logger.info(' ' + line.strip().decode('utf8')) 23 | 24 | return_code = process.wait() 25 | if return_code: 26 | for line in process.stderr.readlines(): 27 | self._logger.error(' ' + line.strip().decode('utf8')) 28 | raise CalledProcessError(return_code, command) 29 | 30 | def _get_command(self, args): 31 | paths = get_paths() 32 | command = [ 33 | self._binary, 34 | '--basedir', str(paths['reprepro']), 35 | '--confdir', str(paths['reprepro-conf']), 36 | '--outdir', str(paths['static'] / 'ubuntu'), 37 | '--gnupghome', str(paths['gnupghome'])] 38 | command.extend(args) 39 | return command 40 | -------------------------------------------------------------------------------- /tests/20-multi: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import unittest 4 | 5 | import requests 6 | 7 | import amulet 8 | 9 | 10 | class TestCharm(unittest.TestCase): 11 | 12 | def setUp(self): 13 | self.d = amulet.Deployment(series='xenial') 14 | self.d.add('archive-auth-mirror', units=2) 15 | self.d.setup(timeout=900) 16 | self.d.sentry.wait() 17 | 18 | self.unit1 = self.d.sentry['archive-auth-mirror'][0] 19 | self.unit2 = self.d.sentry['archive-auth-mirror'][1] 20 | self.unit_address1 = self.unit1.info['public-address'] 21 | self.unit_address2 = self.unit2.info['public-address'] 22 | 23 | def test_10_can_ssh_1_to_2(self): 24 | """Unit 1 can ssh to unit 2 using its private key.""" 25 | self.unit1.run( 26 | 'ssh -o StrictHostKeyChecking=no ' 27 | '-i /srv/archive-auth-mirror/ssh-key root@{} ' 28 | '-- touch /root/ssh-succeeded'.format(self.unit_address2)) 29 | self.assertEqual('', self.unit2.file_contents('/root/ssh-succeeded')) 30 | 31 | def test_20_can_ssh_2_to_1(self): 32 | """Unit 2 can ssh to unit 1 using its private key.""" 33 | self.unit2.run( 34 | 'ssh -o StrictHostKeyChecking=no ' 35 | '-i /srv/archive-auth-mirror/ssh-key root@{} ' 36 | '-- touch /root/ssh-succeeded'.format(self.unit_address1)) 37 | self.assertEqual('', self.unit1.file_contents('/root/ssh-succeeded')) 38 | 39 | 40 | if __name__ == '__main__': 41 | unittest.main() 42 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | CHARM_NAME = archive-auth-mirror 2 | CHARM_SERIES = xenial 3 | CHARM_OUTPUT := $(shell mktemp -d) 4 | RENDERED_CHARM_DIR = $(CHARM_OUTPUT)/$(CHARM_SERIES)/$(CHARM_NAME) 5 | CHARM_URI = cs:~yellow/$(CHARM_NAME) 6 | 7 | 8 | .PHONY: help 9 | help: ## Print help about available targets 10 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 11 | 12 | 13 | .PHONY: install-deps 14 | install-deps: TEST_DEPS = python3-dev tox 15 | install-deps: TEST_DEPS += $(shell python3 -c 'import yaml; print(" ".join(yaml.load(open("layer.yaml"))["options"]["basic"]["packages"]))') 16 | install-deps: ## Install test dependency deb packages 17 | sudo apt install $(TEST_DEPS) 18 | sudo snap install charm --classic 19 | 20 | .PHONY: charm-build 21 | charm-build: REV_HASH = $(shell git rev-parse HEAD) 22 | charm-build: export CHARM_INTERFACES_DIR = interfaces 23 | charm-build: ## Build the charm 24 | rm -rf $(CHARM_OUTPUT) 25 | mkdir -p $(CHARM_OUTPUT) 26 | charm build -s $(CHARM_SERIES) -o $(CHARM_OUTPUT) 27 | echo "commit-sha-1: $(REV_HASH)" > $(RENDERED_CHARM_DIR)/repo-info 28 | 29 | .PHONY: charm-push 30 | charm-push: charm-build ## Push the charm to the store and release it in the edge channel 31 | ./release-charm $(RENDERED_CHARM_DIR) $(CHARM_URI) 32 | 33 | .PHONY: clean 34 | clean: ## Clean up development artifacts 35 | rm -rf $(CHARM_OUTPUT) .tox 36 | 37 | .PHONY: test 38 | test: ## Run Python tests and linter 39 | tox 40 | 41 | .DEFAULT_GOAL := help 42 | -------------------------------------------------------------------------------- /unit_tests/test_cron.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from charmtest import CharmTest 4 | 5 | from archive_auth_mirror.utils import get_paths 6 | from archive_auth_mirror.cron import install_crontab, remove_crontab 7 | 8 | 9 | class InstallCrontabTest(CharmTest): 10 | 11 | def test_install_crontab(self): 12 | """install_crontab creates a crontab file""" 13 | root_dir = Path(self.fakes.fs.root.path) 14 | (root_dir / 'etc/cron.d').mkdir(parents=True) 15 | paths = get_paths(root_dir=root_dir) 16 | install_crontab(paths=paths) 17 | 18 | with paths['cron'].open() as fh: 19 | content = fh.read() 20 | 21 | script = paths['bin'] / 'mirror-archive' 22 | self.assertIn(str(script), content) 23 | 24 | 25 | class RemoveCrontabTest(CharmTest): 26 | 27 | def setUp(self): 28 | super().setUp() 29 | root_dir = Path(self.fakes.fs.root.path) 30 | (root_dir / 'etc/cron.d').mkdir(parents=True) 31 | self.paths = get_paths(root_dir=root_dir) 32 | 33 | def test_remove_crontab(self): 34 | """remove_crontab removes the crontab file.""" 35 | install_crontab(paths=self.paths) 36 | remove_crontab(paths=self.paths) 37 | self.assertFalse(self.paths['cron'].exists()) 38 | 39 | def test_remove_crontab_not_existent(self): 40 | """If the crontab file doesn't exist, remove_crontab no-ops.""" 41 | self.assertFalse(self.paths['cron'].exists()) 42 | remove_crontab(paths=self.paths) 43 | self.assertFalse(self.paths['cron'].exists()) 44 | -------------------------------------------------------------------------------- /unit_tests/test_manage_user.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from charmtest import CharmTest 4 | 5 | from archive_auth_mirror.scripts.manage_user import ( 6 | add_user, 7 | remove_user, 8 | list_users, 9 | ) 10 | 11 | 12 | class ManageUserTest(CharmTest): 13 | 14 | def setUp(self): 15 | super().setUp() 16 | self.auth_file = Path(self.fakes.fs.root.path) / 'auth-file' 17 | self.auth_file.touch() 18 | 19 | def test_add_user(self): 20 | """add_user adds the user with the specified password.""" 21 | add_user(self.auth_file, 'user1', 'pass1') 22 | add_user(self.auth_file, 'user2', 'pass2') 23 | self.assertIn('user1:', self.auth_file.read_text()) 24 | self.assertIn('user2:', self.auth_file.read_text()) 25 | 26 | def test_list_users(self): 27 | """list_users lists existing users.""" 28 | add_user(self.auth_file, 'user1', 'pass1') 29 | add_user(self.auth_file, 'user2', 'pass2') 30 | users = list_users(self.auth_file) 31 | self.assertEqual(['user1', 'user2'], users) 32 | 33 | def test_list_users_no_file(self): 34 | """list_users returns an empty list if the file is not present.""" 35 | self.assertEqual([], list_users(self.auth_file)) 36 | 37 | def test_remove_user(self): 38 | """remove_user removes a user.""" 39 | add_user(self.auth_file, 'user1', 'pass1') 40 | add_user(self.auth_file, 'user2', 'pass2') 41 | remove_user(self.auth_file, 'user1') 42 | self.assertNotIn('user1', self.auth_file.read_text()) 43 | self.assertIn('user2', self.auth_file.read_text()) 44 | -------------------------------------------------------------------------------- /lib/charms/archive_auth_mirror/repository.py: -------------------------------------------------------------------------------- 1 | """Repository configuration functions.""" 2 | 3 | import getpass 4 | 5 | from charmhelpers.core.templating import render 6 | 7 | from archive_auth_mirror.utils import ( 8 | get_paths, 9 | update_config, 10 | ) 11 | 12 | 13 | def configure_reprepro(mirrors, sign_key_fingerprint, sign_key_passphrase): 14 | """Create reprepro configuration files. 15 | 16 | The provided mirrors is a sequence of mirror.Mirror named tuples. 17 | """ 18 | paths = get_paths() 19 | # Explicitly pass owner and group for tests, otherwise root would be used. 20 | owner = group = getpass.getuser() 21 | # Render distributions file. 22 | target = str(paths['reprepro-conf'] / 'distributions') 23 | context = { 24 | 'mirrors': mirrors, 25 | 'sign_script': paths['bin'] / 'reprepro-sign-helper', 26 | } 27 | render(_DISTRIBUTIONS, target, context, owner=owner, group=group) 28 | # Render updates file. 29 | target = str(paths['reprepro-conf'] / 'updates') 30 | context = {'mirrors': mirrors} 31 | render(_UPDATES, target, context, owner=owner, group=group) 32 | # Update configuration. 33 | update_config( 34 | config_path=paths['config'], 35 | suites=[mirror.local_suite for mirror in mirrors], 36 | sign_key_id=sign_key_fingerprint) 37 | # Save the sign passphrase for the signing helper script. 38 | with paths['sign-passphrase'].open('w') as fh: 39 | fh.write(sign_key_passphrase) 40 | 41 | 42 | def disable_mirroring(get_paths=get_paths): 43 | """Disable mirroring.""" 44 | config = get_paths()['config'] 45 | if config.exists(): 46 | config.replace(config.with_suffix('.disabled')) 47 | 48 | 49 | _DISTRIBUTIONS = 'reprepro-distributions.j2' 50 | _UPDATES = 'reprepro-updates.j2' 51 | -------------------------------------------------------------------------------- /templates/nginx-static.j2: -------------------------------------------------------------------------------- 1 | {%- if auth_backends %} 2 | {%- if auth_cache_enabled %} 3 | # Set up a disk cache with a memory zone of 50MB, which can 4 | # store about 400k keys. If the cache data is not accessed 5 | # in the last {{ auth_cache_inactivity }}, then the cache is removed. 6 | proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=auth:50m inactive={{ auth_cache_inactivity }} use_temp_path=off; 7 | {% endif %} 8 | 9 | upstream auth_backend { 10 | # Define authentication backends, usually instances of esm-auth-server. 11 | {%- for host, port in auth_backends %} 12 | server {{ host }}:{{ port }}; 13 | {%- endfor %} 14 | } 15 | {% endif %} 16 | 17 | server { 18 | server_name {{ domain }}; 19 | listen {{ port }}; 20 | listen [::]:{{ port }}; 21 | 22 | root {{ document_root }}; 23 | 24 | location /ubuntu/pool/ { 25 | autoindex on; 26 | {%- if auth_backends %} 27 | auth_request /auth; 28 | {%- else %} 29 | auth_basic "{{ domain }} archive"; 30 | auth_basic_user_file {{ basic_auth_file }}; 31 | {%- endif %} 32 | } 33 | 34 | location / { 35 | autoindex on; 36 | } 37 | 38 | {% if auth_backends %} 39 | location = /auth { 40 | internal; 41 | {%- if auth_cache_enabled %} 42 | # Activate cache, for both ok and unauthorized auth responses. 43 | # This way we avoid contacting the contracts service for every 44 | # package when executing "apt upgrade". The cache key is the 45 | # HTTP basic auth header. 46 | proxy_cache auth; 47 | proxy_cache_valid 200 401 {{ auth_cache_duration }}; 48 | proxy_cache_key $http_authorization; 49 | {%- endif %} 50 | proxy_pass http://auth_backend/auth-check/; 51 | proxy_set_header Original-URI $request_uri; 52 | proxy_set_header Resource-Name {{ resource_name }}; 53 | } 54 | {%- endif %} 55 | } 56 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/gpg.py: -------------------------------------------------------------------------------- 1 | """GnuPG-related functions.""" 2 | 3 | import gnupg 4 | 5 | from .utils import get_paths 6 | 7 | 8 | class KeyRing(object): 9 | """The key ring is used to import GPG keys and return fingerprints.""" 10 | 11 | def __init__(self): 12 | homedir = str(get_paths()['gnupghome']) 13 | self.gpg = gnupg.GPG(homedir=homedir) 14 | 15 | def import_key(self, key): 16 | """Import the given GPG key and return its fingerprint.""" 17 | imported = self.gpg.import_keys(key) 18 | # Only return the last 8 chars of the fingerprint, since that's the 19 | # format used by reprepro. 20 | return imported.results[0]['fingerprint'][-8:] 21 | 22 | 23 | def export_public_key(key_id, path, gnupghome=None): 24 | """Export a public key in ASCII format to the specified path.""" 25 | if not gnupghome: 26 | gnupghome = str(get_paths()['gnupghome']) 27 | gpg = gnupg.GPG(homedir=gnupghome) 28 | material = gpg.export_keys(key_id) 29 | with path.open('w') as fh: 30 | fh.write(material) 31 | 32 | 33 | def inline_sign(key_id, unsigned_file, inline_sign_file, paths=None): 34 | """Create an inline-signed file.""" 35 | _sign( 36 | key_id, unsigned_file, inline_sign_file, paths, 37 | clearsign=True, detach=False) 38 | 39 | 40 | def detach_sign(key_id, unsigned_file, detach_sign_file, paths=None): 41 | """Create a detached signature for a file.""" 42 | _sign( 43 | key_id, unsigned_file, detach_sign_file, paths, 44 | clearsign=False, detach=True) 45 | 46 | 47 | def _sign(key_id, in_file, out_file, paths, **sign_options): 48 | if paths is None: 49 | paths = get_paths() 50 | gpg = gnupg.GPG(homedir=str(paths['gnupghome'])) 51 | 52 | passphrase = paths['sign-passphrase'].read_text().strip() 53 | 54 | with in_file.open() as in_fh: 55 | sign = gpg.sign(in_fh, default_key=key_id, passphrase=passphrase, 56 | **sign_options) 57 | out_file.write_text(str(sign)) 58 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/scripts/manage_user.py: -------------------------------------------------------------------------------- 1 | """Manage users for repository access through basic authentication.""" 2 | 3 | import subprocess 4 | import argparse 5 | 6 | from ..utils import get_paths 7 | 8 | 9 | def add_user(auth_file, user, passwd): 10 | """Add a user.""" 11 | subprocess.run( 12 | ['htpasswd', '-i', str(auth_file), user], 13 | input=passwd.encode('utf8'), stderr=subprocess.DEVNULL, 14 | check=True) 15 | 16 | 17 | def list_users(auth_file): 18 | """List existing users.""" 19 | if not auth_file.exists(): 20 | return [] 21 | with auth_file.open() as fh: 22 | return [line.split(':')[0] for line in fh] 23 | 24 | 25 | def remove_user(auth_file, user): 26 | """Remove a user.""" 27 | subprocess.check_call( 28 | ['htpasswd', '-D', str(auth_file), user], 29 | stderr=subprocess.DEVNULL) 30 | 31 | 32 | def get_parser(): 33 | parser = argparse.ArgumentParser(description=__doc__) 34 | subparsers = parser.add_subparsers( 35 | dest='action', help='action to perform') 36 | subparsers.required = True 37 | 38 | add_action = subparsers.add_parser( 39 | 'add', 40 | help='add a user. If the user exists, their password is updated') 41 | add_action.add_argument('user', help='the username to add') 42 | add_action.add_argument('password', help='the password for the user') 43 | 44 | subparsers.add_parser('list', help='list users') 45 | 46 | remove_action = subparsers.add_parser('remove', help='remove a user') 47 | remove_action.add_argument('user', help='the user to remove') 48 | 49 | return parser 50 | 51 | 52 | def main(): 53 | args = get_parser().parse_args() 54 | auth_file = get_paths()['basic-auth'] 55 | 56 | if args.action == 'add': 57 | add_user(auth_file, args.user, args.password) 58 | elif args.action == 'remove': 59 | remove_user(auth_file, args.user) 60 | elif args.action == 'list': 61 | for user in list_users(auth_file): 62 | print(user) 63 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/scripts/reprepro_sign_helper.py: -------------------------------------------------------------------------------- 1 | """Helper for reprepro to sign archive lists.""" 2 | 3 | import sys 4 | import argparse 5 | from pathlib import Path 6 | 7 | from ..utils import get_config 8 | from ..gpg import inline_sign, detach_sign 9 | from ..script import setup_logger 10 | 11 | 12 | def parse_args(args=None): 13 | parser = argparse.ArgumentParser( 14 | description='Wrapper for GPG signing for reprepro') 15 | parser.add_argument( 16 | 'unsigned_file', help='The file to sign.') 17 | parser.add_argument( 18 | 'inline_sign_file', help='Name of the inline-signed file') 19 | parser.add_argument( 20 | 'detach_sign_file', help='Name of the detached signature file') 21 | return parser.parse_args(args=args) 22 | 23 | 24 | def patch_release_file(path, packages_require_auth): 25 | """Insert some custom fields in the Release file.""" 26 | patch_path = path.with_suffix(".patched") 27 | with path.open() as file: 28 | with patch_path.open("x") as outfile: 29 | for line in file: 30 | if line.startswith('Codename:'): 31 | line = line.rstrip().split('-')[0] + '\n' 32 | elif packages_require_auth and line.startswith("MD5Sum:"): 33 | outfile.write("Packages-Require-Authorization: yes\n") 34 | outfile.write(line) 35 | patch_path.rename(path) 36 | 37 | 38 | def main(): 39 | logger = setup_logger() 40 | config = get_config() 41 | if not config: 42 | logger.error('no config file found') 43 | sys.exit(1) 44 | sign_key = config['sign-key-id'] 45 | 46 | args = parse_args() 47 | unsigned_file = Path(args.unsigned_file) 48 | patch_release_file( 49 | unsigned_file, config.get('packages-require-auth', False)) 50 | if args.inline_sign_file: 51 | inline_sign(sign_key, unsigned_file, Path(args.inline_sign_file)) 52 | if args.detach_sign_file: 53 | detach_sign(sign_key, unsigned_file, Path(args.detach_sign_file)) 54 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/mirror.py: -------------------------------------------------------------------------------- 1 | """Utilities and objects for working with Debian mirrors.""" 2 | 3 | from collections import ( 4 | Mapping, 5 | namedtuple, 6 | ) 7 | 8 | import yaml 9 | 10 | 11 | def from_config(keyring, mirrors, origin): 12 | """Return a list of mirrors as defined in the given config values. 13 | 14 | Raise a ValueError if config values are not valid. 15 | """ 16 | try: 17 | entries = yaml.safe_load(mirrors) 18 | except Exception as err: 19 | raise ValueError('cannot YAML decode mirrors value: {}'.format(err)) 20 | if not isinstance(entries, (list, tuple)): 21 | raise ValueError('mirrors value is not a list') 22 | results = [] 23 | for entry in entries: 24 | if not isinstance(entry, Mapping): 25 | raise ValueError('mirrors value is not a list of maps') 26 | try: 27 | debline = entry['deb-line'] 28 | pubkey = entry['pub-key'] 29 | except KeyError as err: 30 | raise ValueError('mirrors value is missing keys: {}'.format(err)) 31 | try: 32 | url, remote_suite, components = debline.split(' ', maxsplit=2) 33 | except (TypeError, ValueError): 34 | raise ValueError('invalid debline {!r}'.format(debline)) 35 | try: 36 | key = keyring.import_key(pubkey) 37 | except Exception as err: 38 | raise ValueError( 39 | 'cannot import GPG public key {!r}: {}'.format(pubkey, err)) 40 | results.append(Mirror( 41 | url=url, 42 | remote_suite=remote_suite, 43 | components=components, 44 | key=key, 45 | archs=entry.get('archs', 'source i386 amd64'), 46 | version=entry.get('version', ''), 47 | origin=origin, 48 | local_suite=entry.get('suite') or remote_suite, 49 | )) 50 | return tuple(results) 51 | 52 | 53 | # Mirror represents a debian mirror. 54 | Mirror = namedtuple( 55 | 'Mirror', 56 | 'url remote_suite components key archs version origin local_suite') 57 | -------------------------------------------------------------------------------- /interfaces/ssh-peers/peers.py: -------------------------------------------------------------------------------- 1 | from charmhelpers.core import hookenv 2 | from charms.reactive import ( 3 | flags, 4 | hook, 5 | RelationBase, 6 | scopes, 7 | ) 8 | 9 | 10 | class SshPeers(RelationBase): 11 | """Allow public ssh keys to be propagated among units. 12 | 13 | When a new peer connects, a new secret SSH key should be created, 14 | and public key should be propagated to the peer using 15 | set_local_public_key() 16 | 17 | The peer will then be notified about the public key and can add it 18 | to its authorized_keys file. 19 | """ 20 | 21 | scope = scopes.UNIT 22 | 23 | class states(flags.StateList): 24 | connected = flags.State('{relation_name}.connected') 25 | local_public_key = flags.State('{relation_name}.local-public-key') 26 | new_remote_public_key = flags.State( 27 | '{relation_name}.new-remote-public-key') 28 | 29 | @hook('{peers:ssh-peers}-relation-{joined}') 30 | def joined(self): 31 | self.set_state(self.states.connected) 32 | 33 | @hook('{peers:ssh-peers}-relation-{changed,joined}') 34 | def changed(self): 35 | previous_remote_public_key = self.get_local('remote-public-ssh-key') 36 | new_remote_public_key = self.get_remote('public-ssh-key') 37 | if not new_remote_public_key: 38 | hookenv.log("No remote public key set") 39 | self.remove_state(self.states.new_remote_public_key) 40 | return 41 | 42 | if new_remote_public_key != previous_remote_public_key: 43 | hookenv.log("New remote public key") 44 | self.set_local('remote-public-ssh-key', new_remote_public_key) 45 | self.set_state(self.states.new_remote_public_key) 46 | else: 47 | hookenv.log("Remote public key is still the same") 48 | 49 | @hook('{peers:ssh-peers}-relation-{departed}') 50 | def departed(self): 51 | # Remove the state that our relationship is now connected to our 52 | # principal layer(s) 53 | self.remove_state(self.states.connected) 54 | 55 | def set_local_public_key(self, public_key): 56 | relation_info = {'public-ssh-key': public_key} 57 | self.set_state(self.states.local_public_key) 58 | self.set_remote(**relation_info) 59 | -------------------------------------------------------------------------------- /unit_tests/test_ssh.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from pathlib import Path 3 | import shutil 4 | import stat 5 | import tempfile 6 | 7 | from archive_auth_mirror.ssh import ( 8 | add_authorized_key, 9 | create_key, 10 | ) 11 | 12 | 13 | class CreateKeyTest(unittest.TestCase): 14 | 15 | def setUp(self): 16 | self.tempdir = Path(tempfile.mkdtemp()) 17 | self.addCleanup(shutil.rmtree, str(self.tempdir)) 18 | 19 | def test_create_key(self): 20 | """create_key() creates a secret and public key. 21 | 22 | The secret key can only be read by the owner. 23 | """ 24 | ssh_key_path = self.tempdir / 'ssh-key' 25 | create_key(ssh_key_path) 26 | public_key_path = ssh_key_path.with_suffix('.pub') 27 | self.assertEqual(0o644, stat.S_IMODE(public_key_path.stat().st_mode)) 28 | self.assertEqual(0o600, stat.S_IMODE(ssh_key_path.stat().st_mode)) 29 | self.assertIn('BEGIN RSA PRIVATE KEY', ssh_key_path.read_text()) 30 | 31 | 32 | class AddAuthorizedKey(unittest.TestCase): 33 | 34 | def setUp(self): 35 | self.tempdir = Path(tempfile.mkdtemp()) 36 | self.addCleanup(shutil.rmtree, str(self.tempdir)) 37 | 38 | def test_add_authorized_key(self): 39 | """add_authorized_key() adds the given key.""" 40 | authorized_keys_path = self.tempdir / 'authorized-keys' 41 | add_authorized_key('key 1', authorized_keys_path) 42 | self.assertEqual( 43 | ['key 1'], authorized_keys_path.read_text().splitlines()) 44 | 45 | def test_add_authorized_key_whitespace(self): 46 | """Whitespace gets stripped before adding the key.""" 47 | authorized_keys_path = self.tempdir / 'authorized-keys' 48 | add_authorized_key('\nkey 1\n', authorized_keys_path) 49 | self.assertEqual( 50 | ['key 1'], authorized_keys_path.read_text().splitlines()) 51 | 52 | def test_add_authorized_key_multiple(self): 53 | """Existing keys are preserved.""" 54 | authorized_keys_path = self.tempdir / 'authorized-keys' 55 | add_authorized_key('key 1', authorized_keys_path) 56 | add_authorized_key('key 2', authorized_keys_path) 57 | self.assertEqual( 58 | ['key 1', 'key 2'], authorized_keys_path.read_text().splitlines()) 59 | 60 | def test_add_authorized_key_duplicate(self): 61 | """If the key already exists in the, it's not added again.""" 62 | authorized_keys_path = self.tempdir / 'authorized-keys' 63 | add_authorized_key('key 1', authorized_keys_path) 64 | add_authorized_key('key 1', authorized_keys_path) 65 | self.assertEqual( 66 | ['key 1'], authorized_keys_path.read_text().splitlines()) 67 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/scripts/mirror_archive.py: -------------------------------------------------------------------------------- 1 | """Mirror and update a repository.""" 2 | 3 | import functools 4 | import subprocess 5 | import sys 6 | 7 | from ..utils import get_paths, get_config 8 | from ..lock import LockFile, AlreadyLocked 9 | from ..reprepro import Reprepro 10 | from ..rsync import rsync_multi 11 | from ..script import setup_logger 12 | 13 | 14 | def main(): 15 | logger = setup_logger(echo=True) 16 | paths = get_paths() 17 | config = get_config() 18 | suites = config['suites'] 19 | other_units = config.get('ssh-peers', {}).keys() 20 | # Using StrictHostKeyChecking=no isn't ideal, but we don't yet 21 | # popolate known_hosts with the right keys. But we trust the 22 | # network, and we don't push any sensitive data. 23 | rsh = 'ssh -o StrictHostKeyChecking=no -i {}'.format(paths['ssh-key']) 24 | remote_sync = functools.partial(rsync_multi, other_units, rsh=rsh) 25 | lockfile = LockFile(paths['lockfile']) 26 | 27 | try: 28 | lockfile.lock() 29 | except AlreadyLocked: 30 | logger.error('another process is already running, exiting') 31 | sys.exit(1) 32 | 33 | logger.info('starting mirroring') 34 | 35 | reprepro = Reprepro(logger) 36 | try: 37 | logger.info('fetching new pool packages') 38 | reprepro.execute( 39 | '--show-percent', '--export=never', '--keepunreferencedfiles', 40 | 'update', *suites) 41 | 42 | logger.info('rsyncing new pool packages to peer units') 43 | remote_sync(paths['static'] / 'ubuntu', logger) 44 | 45 | logger.info('generating new dists directory') 46 | reprepro.execute('export', *suites) 47 | 48 | logger.info('rsyncing new dists directory to peer units') 49 | remote_sync(paths['static'] / 'ubuntu' / 'dists', logger) 50 | 51 | logger.info('deleting old pool packages') 52 | reprepro.execute('deleteunreferenced') 53 | 54 | logger.info('deleting old pool packages on peer units') 55 | remote_sync(paths['static'] / 'ubuntu', logger, delete=True) 56 | 57 | logger.info('rsyncing reprepro dir to peer units') 58 | # Push only the reprepro db and lists, not the conf dir. Each 59 | # unit should generate the conf dir themselves, so we don't push 60 | # it, since it contains the private signing key, which is 61 | # sensitive data. 62 | remote_sync(paths['reprepro'] / 'db', logger, delete=True) 63 | remote_sync(paths['reprepro'] / 'lists', logger, delete=True) 64 | 65 | logger.info('mirroring completed') 66 | except subprocess.CalledProcessError: 67 | logger.error('mirroring failed') 68 | sys.exit(1) 69 | finally: 70 | lockfile.release() 71 | -------------------------------------------------------------------------------- /lib/archive_auth_mirror/utils.py: -------------------------------------------------------------------------------- 1 | """Miscellaneous helper functions.""" 2 | 3 | from pathlib import Path 4 | 5 | import yaml 6 | 7 | 8 | def get_paths(root_dir=None): 9 | """Return path for the service tree. 10 | 11 | The filesystem tree for the service is as follows: 12 | 13 | /srv/archive-auth-mirror 14 | ├── basic-auth -- the file containing BasicAuth username/passwords 15 | ├── bin 16 | │ └── mirror-archive -- the mirroring script 17 | ├── config.yaml -- the script configuration file 18 | ├── mirror-archive.lock -- lockfile for the mirror-archive script 19 | ├── reprepro 20 | │ └── conf -- reprepro configuration files 21 | │ └── .gnupg -- GPG config for reprepro 22 | ├── sign-passphrase -- contains the passphrase for the GPG sign key 23 | ├── ssh-key -- the ssh key used by rsync 24 | └── static -- the root of the virtualhost, contains the repository 25 | """ 26 | if root_dir is None: 27 | root_dir = Path('/') 28 | base_dir = root_dir / 'srv/archive-auth-mirror' 29 | reprepro_dir = base_dir / 'reprepro' 30 | return { 31 | 'base': base_dir, 32 | 'cron': root_dir / 'etc/cron.d/archive-auth-mirror', 33 | 'bin': base_dir / 'bin', 34 | 'config': base_dir / 'config.yaml', 35 | 'static': base_dir / 'static', 36 | 'basic-auth': base_dir / 'basic-auth', 37 | 'sign-passphrase': base_dir / 'sign-passphrase', 38 | 'ssh-key': base_dir / 'ssh-key', 39 | 'authorized-keys': root_dir / 'root' / '.ssh' / 'authorized_keys', 40 | 'lockfile': base_dir / 'mirror-archive.lock', 41 | 'reprepro': reprepro_dir, 42 | 'reprepro-conf': reprepro_dir / 'conf', 43 | 'gnupghome': reprepro_dir / '.gnupg'} 44 | 45 | 46 | def get_config(config_path=None): 47 | """Return a dict with the service configuration.""" 48 | if config_path is None: 49 | config_path = get_paths()['config'] 50 | if not config_path.exists(): 51 | return {} 52 | with config_path.open() as fh: 53 | return yaml.load(fh) 54 | 55 | 56 | def update_config( 57 | config_path=None, suites=(), sign_key_id=None, new_ssh_peers=None, 58 | packages_require_auth=None): 59 | """Update the config with the given parameters.""" 60 | config = get_config(config_path=config_path) 61 | if suites: 62 | config['suites'] = suites 63 | if sign_key_id is not None: 64 | config['sign-key-id'] = sign_key_id 65 | if new_ssh_peers is not None: 66 | ssh_peers = config.get('ssh-peers', {}) 67 | ssh_peers.update(new_ssh_peers) 68 | config['ssh-peers'] = ssh_peers 69 | if packages_require_auth is not None: 70 | config['packages-require-auth'] = packages_require_auth 71 | with config_path.open('w') as config_file: 72 | yaml.dump(config, config_file) 73 | -------------------------------------------------------------------------------- /unit_tests/test_reprepro.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import tempfile 4 | import textwrap 5 | from pathlib import Path 6 | from subprocess import CalledProcessError 7 | 8 | from fixtures import TestWithFixtures, LoggerFixture 9 | 10 | from archive_auth_mirror.reprepro import Reprepro 11 | 12 | 13 | class RepreproTest(TestWithFixtures): 14 | 15 | def setUp(self): 16 | super().setUp() 17 | self.logger = self.useFixture(LoggerFixture(level=logging.DEBUG)) 18 | 19 | def test_execute(self): 20 | """The execute function calls reprepro with the specified args.""" 21 | reprepro = Reprepro(logging.getLogger(''), binary='/bin/echo') 22 | reprepro.execute('export', 'ubuntu') 23 | self.assertEqual( 24 | 'running "/bin/echo --basedir /srv/archive-auth-mirror/reprepro' 25 | ' --confdir /srv/archive-auth-mirror/reprepro/conf' 26 | ' --outdir /srv/archive-auth-mirror/static/ubuntu' 27 | ' --gnupghome /srv/archive-auth-mirror/reprepro/.gnupg' 28 | ' export ubuntu"\n' 29 | # output from the process 30 | ' --basedir /srv/archive-auth-mirror/reprepro ' 31 | '--confdir /srv/archive-auth-mirror/reprepro/conf ' 32 | '--outdir /srv/archive-auth-mirror/static/ubuntu ' 33 | '--gnupghome /srv/archive-auth-mirror/reprepro/.gnupg ' 34 | 'export ubuntu\n', 35 | self.logger.output) 36 | 37 | def test_execute_fail(self): 38 | """If the command fails, an exception is raised.""" 39 | reprepro = Reprepro(logging.getLogger(''), binary='/bin/false') 40 | with self.assertRaises(CalledProcessError) as context_manager: 41 | reprepro.execute('export', 'ubuntu') 42 | self.assertEqual(1, context_manager.exception.returncode) 43 | self.assertEqual( 44 | ['/bin/false', '--basedir', '/srv/archive-auth-mirror/reprepro', 45 | '--confdir', '/srv/archive-auth-mirror/reprepro/conf', 46 | '--outdir', '/srv/archive-auth-mirror/static/ubuntu', 47 | '--gnupghome', '/srv/archive-auth-mirror/reprepro/.gnupg', 48 | 'export', 'ubuntu'], 49 | context_manager.exception.cmd) 50 | 51 | def test_execute_log_error(self): 52 | """If the command fails, stderr is logged.""" 53 | fd, name = tempfile.mkstemp() 54 | os.close(fd) 55 | binary = Path(name) 56 | self.addCleanup(binary.unlink) 57 | binary.write_text( 58 | textwrap.dedent( 59 | '''#!/bin/sh 60 | echo fail >&2 61 | exit 1 62 | ''')) 63 | binary.chmod(0o700) 64 | 65 | reprepro = Reprepro(logging.getLogger(''), binary=str(binary)) 66 | with self.assertRaises(CalledProcessError): 67 | reprepro.execute('export', 'ubuntu') 68 | self.assertIn('fail\n', self.logger.output) 69 | -------------------------------------------------------------------------------- /lib/charms/archive_auth_mirror/setup.py: -------------------------------------------------------------------------------- 1 | """Service installation and configuration functions.""" 2 | 3 | from pathlib import Path 4 | 5 | from charmhelpers.core import hookenv, host 6 | from charmhelpers.core.templating import render 7 | 8 | from archive_auth_mirror.utils import get_paths 9 | 10 | 11 | REQUIRED_OPTIONS = frozenset(['mirrors', 'repository-origin', 'sign-gpg-key']) 12 | SCRIPTS = ('mirror-archive', 'manage-user', 'reprepro-sign-helper') 13 | 14 | 15 | def get_virtualhost_name(hookenv=hookenv): 16 | """Return the configured service URL or the unit address.""" 17 | service_url = hookenv.config().get('service-url') 18 | return service_url or hookenv.unit_public_ip() 19 | 20 | 21 | def get_virtualhost_config( 22 | auth_backends, resource_name, auth_cache_enabled, auth_cache_duration, 23 | auth_cache_inactivity, hookenv=hookenv): 24 | """Return the configuration for the static virtuahost.""" 25 | paths = get_paths() 26 | domain = get_virtualhost_name(hookenv=hookenv) 27 | return { 28 | 'domain': domain, 29 | 'document_root': str(paths['static']), 30 | 'auth_backends': auth_backends or [], 31 | 'resource_name': resource_name, 32 | 'auth_cache_enabled': auth_cache_enabled, 33 | 'auth_cache_duration': auth_cache_duration, 34 | 'auth_cache_inactivity': auth_cache_inactivity, 35 | 'basic_auth_file': str(paths['basic-auth'])} 36 | 37 | 38 | def install_resources(root_dir=None): 39 | """Create tree structure and install resources from the charm.""" 40 | paths = get_paths(root_dir=root_dir) 41 | for name in ('bin', 'reprepro-conf', 'static'): 42 | host.mkdir(str(paths[name]), perms=0o755) 43 | 44 | # the gpg directory should only be readable by root 45 | host.mkdir(str(paths['gnupghome']), perms=0o700) 46 | 47 | # create an empty basic-auth password file. It will be updated by a script 48 | # run as root, but it must be readable by the web server 49 | host.write_file( 50 | str(paths['basic-auth']), b'', group='www-data', perms=0o640) 51 | # create an empty sign passphrase file, only readable by root 52 | host.write_file(str(paths['sign-passphrase']), b'', perms=0o600) 53 | 54 | # install scripts 55 | for script in SCRIPTS: 56 | create_script_file(script, paths['bin']) 57 | # symlink the lib libary to make it available to scripts too 58 | (paths['bin'] / 'lib').symlink_to(Path.cwd() / 'lib') 59 | 60 | 61 | def create_script_file(name, bindir): 62 | """Write a python script file from the template.""" 63 | context = { 64 | 'interpreter': Path.cwd().parent / '.venv/bin/python3', 65 | 'script_module': name.replace('-', '_')} 66 | render('script.j2', str(bindir / name), context, perms=0o755) 67 | 68 | 69 | def missing_options(config): 70 | """Return a list of missing required option names if any. 71 | 72 | Return an empty list otherwise. 73 | """ 74 | return [opt for opt in REQUIRED_OPTIONS if config.get(opt) in ('', None)] 75 | -------------------------------------------------------------------------------- /config.yaml: -------------------------------------------------------------------------------- 1 | options: 2 | service-url: 3 | type: string 4 | description: "The URL of the service." 5 | default: "" 6 | resource-name: 7 | type: string 8 | description: "The name of the resource associated with this service" 9 | default: esm 10 | repository-origin: 11 | type: string 12 | description: "The value of the 'Origin' field of the repository." 13 | default: "Ubuntu" 14 | mirrors: 15 | type: string 16 | description: | 17 | A YAML describing one or more repositories to mirror. 18 | The YAML is structured as a list of maps, with each map including 19 | the "deb-line" and "pub-key" mandatory fields, and the "archs" and 20 | "version" optional fields. For instance: 21 | 22 | - deb-line: http[s]://[:@]/ 23 | pub-key: GPG public key for validating the repository 24 | archs: source i386 amd64 25 | version: 18.10 26 | suite: xenial-updates 27 | - deb-line: ... 28 | pub-key: ... 29 | 30 | The archs field defaults to "source i386 amd64". 31 | The version field, if not specified, is omitted. 32 | The suite is the local suite, in essence the dists subdirectory where the 33 | Release file can be found locally. The suite must uniquely identify a 34 | mirror. If not specified, it defaults to the value of the remote suite 35 | from the deb-line. In any case, the codename in the Release file is 36 | generated from the suite prefix, so that, for instance, "trusty", 37 | "trusty-updates" and "trusty-foo-bar" will all have "trusty" as their 38 | codename. 39 | 40 | The charm is blocked until this config value is provided. 41 | default: "" 42 | sign-gpg-key: 43 | type: string 44 | description: "GPG private key for signing the archive." 45 | default: "" 46 | sign-gpg-passphrase: 47 | type: string 48 | description: "Passphrase for the sign-gpg-key." 49 | default: "" 50 | auth-cache-enabled: 51 | type: boolean 52 | description: | 53 | Whether authorization caching is enabled. 54 | When enabled, both successful and unauthorized responses from the 55 | auth backend are cached. The cache expires or is deleted based on 56 | the "auth-cache-duration" and "auth-cache-inactivity" options below. 57 | Even when this option is set to true, caching is really only enabled 58 | when this application is related to an auth backend, like 59 | esm-auth-server. 60 | default: false 61 | auth-cache-duration: 62 | type: string 63 | description: Cache duration before expiration, like "15m" or "1h". 64 | default: 1h 65 | auth-cache-inactivity: 66 | type: string 67 | description: Cache inactivity before invalidation, like "15m" or "1h". 68 | default: 10m 69 | packages-require-auth: 70 | type: boolean 71 | description: | 72 | Whether the Packages-Require-Authorization field must be included in the 73 | Release file. This field provides a hint that package downloads will 74 | require authorization, hence allowing clients to prevent use of this 75 | repository if authorization has not been provided, avoiding problems with 76 | failing downloads. 77 | default: false 78 | -------------------------------------------------------------------------------- /unit_tests/test_rsync.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import logging 3 | from subprocess import CalledProcessError 4 | from unittest import TestCase, mock 5 | 6 | from fixtures import TestWithFixtures, LoggerFixture 7 | 8 | from archive_auth_mirror.rsync import rsync, rsync_multi 9 | 10 | 11 | class RsyncTest(TestCase): 12 | 13 | @mock.patch('subprocess.check_output') 14 | def test_rsync(self, mock_check_output): 15 | """The rsync copies a filesytem tree using rsync.""" 16 | rsync('1.2.3.4', Path('/foo/bar')) 17 | mock_check_output.assert_called_with( 18 | ['/usr/bin/rsync', '-a', '/foo/bar/', '1.2.3.4:/foo/bar/']) 19 | 20 | @mock.patch('subprocess.check_output') 21 | def test_rsync_delete(self, mock_check_output): 22 | """If the delete flag is True, the --delete option is passed.""" 23 | rsync('1.2.3.4', Path('/foo/bar'), delete=True) 24 | mock_check_output.assert_called_with( 25 | ['/usr/bin/rsync', '-a', '--delete', '/foo/bar/', 26 | '1.2.3.4:/foo/bar/']) 27 | 28 | @mock.patch('subprocess.check_output') 29 | def test_rsync_rsh(self, mock_check_output): 30 | """If the rsh flag is not None, the --rsh option is passed.""" 31 | rsync('1.2.3.4', Path('/foo/bar'), rsh='ssh -i my-identity') 32 | mock_check_output.assert_called_with( 33 | ['/usr/bin/rsync', '-a', '--rsh', 'ssh -i my-identity', 34 | '/foo/bar/', '1.2.3.4:/foo/bar/']) 35 | 36 | 37 | class RsyncMultiTest(TestWithFixtures): 38 | 39 | def setUp(self): 40 | super().setUp() 41 | self.logger = self.useFixture(LoggerFixture()) 42 | 43 | @mock.patch('subprocess.check_output') 44 | def test_each_host(self, mock_check_output): 45 | """The rsync call is performed for each host.""" 46 | rsync_multi( 47 | ['1.2.3.4', '5.6.7.8'], Path('/foo/bar'), logging.getLogger()) 48 | mock_check_output.assert_has_calls( 49 | [mock.call( 50 | ['/usr/bin/rsync', '-a', '/foo/bar/', '1.2.3.4:/foo/bar/']), 51 | mock.call( 52 | ['/usr/bin/rsync', '-a', '/foo/bar/', '5.6.7.8:/foo/bar/'])]) 53 | 54 | @mock.patch('subprocess.check_output') 55 | def test_log_failures(self, mock_check_output): 56 | """If copy to a host fails, it's logged.""" 57 | 58 | def check_output(cmd): 59 | if cmd[-1].startswith('1.2.3.4:'): 60 | raise CalledProcessError(1, cmd, output='something failed') 61 | 62 | mock_check_output.side_effect = check_output 63 | rsync_multi( 64 | ['1.2.3.4', '5.6.7.8'], Path('/foo/bar'), logging.getLogger()) 65 | self.assertIn( 66 | 'rsync to 1.2.3.4 failed: something failed\n', self.logger.output) 67 | # rsync to the second host is exectuted too 68 | mock_check_output.assert_any_call( 69 | ['/usr/bin/rsync', '-a', '/foo/bar/', '5.6.7.8:/foo/bar/']) 70 | 71 | @mock.patch('subprocess.check_output') 72 | def test_rsh(self, mock_check_output): 73 | """The rsh flag is passed through.""" 74 | rsync_multi( 75 | ['1.2.3.4'], Path('/foo/bar'), logging.getLogger(), 76 | rsh='ssh -i my-identity') 77 | mock_check_output.assert_called_with( 78 | ['/usr/bin/rsync', '-a', '--rsh', 'ssh -i my-identity', 79 | '/foo/bar/', '1.2.3.4:/foo/bar/']) 80 | -------------------------------------------------------------------------------- /unit_tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from unittest import TestCase 3 | 4 | import yaml 5 | 6 | from charmtest import CharmTest 7 | 8 | from archive_auth_mirror.utils import get_paths, get_config, update_config 9 | 10 | 11 | class GetPathsTest(TestCase): 12 | 13 | def test_get_paths(self): 14 | """get_paths returns service paths.""" 15 | paths = get_paths() 16 | self.assertEqual( 17 | {'base': Path('/srv/archive-auth-mirror'), 18 | 'cron': Path('/etc/cron.d/archive-auth-mirror'), 19 | 'bin': Path('/srv/archive-auth-mirror/bin'), 20 | 'config': Path('/srv/archive-auth-mirror/config.yaml'), 21 | 'static': Path('/srv/archive-auth-mirror/static'), 22 | 'basic-auth': Path('/srv/archive-auth-mirror/basic-auth'), 23 | 'sign-passphrase': Path( 24 | '/srv/archive-auth-mirror/sign-passphrase'), 25 | 'ssh-key': Path('/srv/archive-auth-mirror/ssh-key'), 26 | 'authorized-keys': Path('/root/.ssh/authorized_keys'), 27 | 'lockfile': Path('/srv/archive-auth-mirror/mirror-archive.lock'), 28 | 'reprepro': Path('/srv/archive-auth-mirror/reprepro'), 29 | 'reprepro-conf': Path('/srv/archive-auth-mirror/reprepro/conf'), 30 | 'gnupghome': Path('/srv/archive-auth-mirror/reprepro/.gnupg')}, 31 | paths) 32 | 33 | 34 | class GetConfigTest(CharmTest): 35 | 36 | def test_config_not_found(self): 37 | """If the config file is not found, get_config returns {}.""" 38 | self.assertEqual({}, get_config(config_path=Path('/not/here'))) 39 | 40 | def test_load_config(self): 41 | """If the file is found, it's content is returned as YAML.""" 42 | tempdir = Path(self.fakes.fs.root.path) 43 | config_path = tempdir / 'config.yaml' 44 | 45 | config = {'value1': 30, 'value2': 'foo'} 46 | with config_path.open('w') as fh: 47 | yaml.dump(config, stream=fh) 48 | self.assertEqual(config, get_config(config_path=config_path)) 49 | 50 | 51 | class UpdateConfigTest(CharmTest): 52 | 53 | def setUp(self): 54 | super().setUp() 55 | self.config_path = Path(self.fakes.fs.root.path) / 'config.yaml' 56 | 57 | def test_no_config(self): 58 | """update_config creates the config file if not present.""" 59 | update_config(config_path=self.config_path, suites=['bionic']) 60 | self.assertTrue(self.config_path.exists()) 61 | config = get_config(config_path=self.config_path) 62 | self.assertEqual(config, {'suites': ['bionic']}) 63 | 64 | def test_update_existing(self): 65 | """update_config updates the config file if it exists.""" 66 | update_config( 67 | config_path=self.config_path, suites=['xenial', 'bionic']) 68 | update_config( 69 | config_path=self.config_path, 70 | sign_key_id='AABBCC', 71 | packages_require_auth=True, 72 | ) 73 | config = get_config(config_path=self.config_path) 74 | self.assertEqual(config, { 75 | 'packages-require-auth': True, 76 | 'sign-key-id': 'AABBCC', 77 | 'suites': ['xenial', 'bionic'], 78 | }) 79 | 80 | def test_update_ssh_peers(self): 81 | """update_config adds new ssh-peers.""" 82 | update_config( 83 | config_path=self.config_path, new_ssh_peers={'1.2.3.4': 'aabb'}) 84 | update_config( 85 | config_path=self.config_path, new_ssh_peers={'5.6.7.8': 'ccdd'}) 86 | self.assertEqual( 87 | {'ssh-peers': { 88 | '1.2.3.4': 'aabb', 89 | '5.6.7.8': 'ccdd'}}, 90 | get_config(config_path=self.config_path)) 91 | -------------------------------------------------------------------------------- /unit_tests/test_mirror.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import yaml 4 | 5 | from archive_auth_mirror.mirror import ( 6 | from_config, 7 | Mirror, 8 | ) 9 | 10 | 11 | class KeyRing(object): 12 | """A key ring used for tests.""" 13 | 14 | def __init__(self, bad=False): 15 | self._bad = bad 16 | 17 | def import_key(self, key): 18 | if self._bad: 19 | raise TypeError('bad wolf') 20 | return key + '-fingerprint' 21 | 22 | 23 | class TestFromConfig(unittest.TestCase): 24 | 25 | tests = [{ 26 | 'about': 'invalid yaml', 27 | 'mirrors': ':', 28 | 'want_error': 'cannot YAML decode mirrors value: ', 29 | }, { 30 | 'about': 'invalid mirrors', 31 | 'mirrors': '42', 32 | 'want_error': 'mirrors value is not a list', 33 | }, { 34 | 'about': 'invalid entry in mirrors', 35 | 'mirrors': '["bad wolf"]', 36 | 'want_error': 'mirrors value is not a list of maps', 37 | }, { 38 | 'about': 'missing deb-line', 39 | 'mirrors': yaml.safe_dump([{ 40 | 'pub-key': 'pub', 41 | }]), 42 | 'want_error': "mirrors value is missing keys: 'deb-line'", 43 | }, { 44 | 'about': 'missing pub-key', 45 | 'mirrors': yaml.safe_dump([{ 46 | 'deb-line': 'https://user:pass@1.2.3.4/ubuntu bionic main misc', 47 | }]), 48 | 'want_error': "mirrors value is missing keys: 'pub-key'", 49 | }, { 50 | 'about': 'invalid deb-line', 51 | 'mirrors': yaml.safe_dump([{ 52 | 'deb-line': 'https://user:pass@1.2.3.4/ubuntu', 53 | 'pub-key': 'pub', 54 | }]), 55 | 'want_error': "invalid debline 'https://user:pass@1.2.3.4/ubuntu'", 56 | }, { 57 | 'about': 'invalid GPG key', 58 | 'keyring': KeyRing(bad=True), 59 | 'mirrors': yaml.safe_dump([{ 60 | 'deb-line': 'https://user:pass@1.2.3.4/ubuntu bionic main misc', 61 | 'pub-key': 'pub', 62 | }]), 63 | 'want_error': "cannot import GPG public key 'pub': bad wolf", 64 | }, { 65 | 'about': 'success', 66 | 'keyring': KeyRing(), 67 | 'mirrors': yaml.safe_dump([{ 68 | 'deb-line': 'https://user:pass@1.2.3.4/ubuntu bionic main misc', 69 | 'pub-key': 'pub1', 70 | 'version': '18.10', 71 | 'suite': 'bionic-updates', 72 | }, { 73 | 'deb-line': 'https://user:pass@4.3.2.1/ubuntu xenial main', 74 | 'pub-key': 'pub2', 75 | 'archs': 'i386', 76 | }]), 77 | 'want_results': ( 78 | Mirror( 79 | url='https://user:pass@1.2.3.4/ubuntu', 80 | remote_suite='bionic', 81 | components='main misc', 82 | key='pub1-fingerprint', 83 | archs='source i386 amd64', 84 | version='18.10', 85 | origin='Ubuntu', 86 | local_suite='bionic-updates', 87 | ), 88 | Mirror( 89 | url='https://user:pass@4.3.2.1/ubuntu', 90 | remote_suite='xenial', 91 | components='main', 92 | key='pub2-fingerprint', 93 | archs='i386', 94 | version='', 95 | origin='Ubuntu', 96 | local_suite='xenial', 97 | ), 98 | ), 99 | }, { 100 | 'about': 'success with single mirror and different origin', 101 | 'keyring': KeyRing(), 102 | 'mirrors': yaml.safe_dump([{ 103 | 'deb-line': 'https://user:pass@1.2.3.4/ubuntu bionic main misc', 104 | 'pub-key': 'pub1', 105 | 'version': '18.10', 106 | }]), 107 | 'origin': 'Gallifrey', 108 | 'want_results': ( 109 | Mirror( 110 | url='https://user:pass@1.2.3.4/ubuntu', 111 | remote_suite='bionic', 112 | components='main misc', 113 | key='pub1-fingerprint', 114 | archs='source i386 amd64', 115 | version='18.10', 116 | origin='Gallifrey', 117 | local_suite='bionic', 118 | ), 119 | ), 120 | }] 121 | 122 | def test_from_config(self): 123 | """Mirrors are generated from the config content.""" 124 | for test in self.tests: 125 | with self.subTest(test['about']): 126 | self.check( 127 | test.get('keyring'), 128 | test['mirrors'], 129 | test.get('origin', 'Ubuntu'), 130 | test.get('want_results'), 131 | test.get('want_error'), 132 | ) 133 | 134 | def check(self, keyring, mirrors, origin, want_results, want_error): 135 | if want_error: 136 | with self.assertRaises(ValueError) as ctx: 137 | from_config(keyring, mirrors, origin) 138 | self.assertIn(want_error, str(ctx.exception)) 139 | return 140 | results = from_config(keyring, mirrors, origin) 141 | self.assertEqual(results, want_results) 142 | -------------------------------------------------------------------------------- /reactive/archive_auth_mirror.py: -------------------------------------------------------------------------------- 1 | """Install and configure archive-auth-mirror to mirror an Ubuntu repository.""" 2 | 3 | from charmhelpers.core import hookenv 4 | from charms.layer.nginx import configure_site 5 | from charms.reactive import ( 6 | clear_flag, 7 | only_once, 8 | set_flag, 9 | when, 10 | when_not, 11 | ) 12 | 13 | from charms.archive_auth_mirror import ( 14 | repository, 15 | setup, 16 | ) 17 | from archive_auth_mirror import ( 18 | cron, 19 | gpg, 20 | mirror, 21 | ssh, 22 | utils, 23 | ) 24 | 25 | 26 | # Define options requiring an nginx reconfiguration. 27 | NGINX_OPTIONS = ( 28 | 'auth-cache-duration', 29 | 'auth-cache-enabled', 30 | 'auth-cache-inactivity', 31 | 'resource-name', 32 | ) 33 | 34 | 35 | def charm_flag(flag): 36 | """Return a reactive flag name for this charm.""" 37 | return 'archive-auth-mirror.' + flag 38 | 39 | 40 | @when_not(charm_flag('installed')) 41 | def install(): 42 | setup.install_resources() 43 | set_flag(charm_flag('installed')) 44 | 45 | 46 | @when(charm_flag('installed')) 47 | @only_once 48 | def create_ssh_key(): 49 | path = utils.get_paths()['ssh-key'] 50 | if not path.exists(): 51 | # only_once doesn't protect the handler from running if the line in 52 | # source code changes (so it can run again in an upgrade-charm hook) 53 | ssh.create_key(path) 54 | 55 | 56 | @when('basic-auth-check.joined') 57 | def reset_static_service(basic_auth_check): 58 | clear_flag(charm_flag('static-serve.configured')) 59 | 60 | 61 | @when(charm_flag('installed'), 'nginx.available', 'basic-auth-check.changed') 62 | @when_not(charm_flag('static-serve.configured')) 63 | def configure_static_service(basic_auth_check): 64 | _configure_static_serve(auth_backends=basic_auth_check.backends()) 65 | set_flag(charm_flag('static-serve.configured')) 66 | 67 | 68 | @when(charm_flag('installed'), 'nginx.available') 69 | @when_not(charm_flag('static-serve.configured')) 70 | def configure_static_service_no_basic_auth_check(): 71 | _configure_static_serve(auth_backends=[]) 72 | set_flag(charm_flag('static-serve.configured')) 73 | 74 | 75 | @when(charm_flag('installed')) 76 | @when('nginx.available', 'website.available') 77 | def configure_website(website): 78 | website.configure(port=hookenv.config()['port']) 79 | 80 | 81 | @when_not('ssh-peers.local-public-key') 82 | @when('ssh-peers.connected') 83 | def set_ssh_key(ssh_keys): 84 | public_key_path = str(utils.get_paths()['ssh-key']) + '.pub' 85 | with open(public_key_path, 'r') as public_key_file: 86 | public_key = public_key_file.read() 87 | ssh_keys.set_local_public_key(public_key) 88 | 89 | 90 | @when('ssh-peers.new-remote-public-key') 91 | def add_authorized_key(ssh_keys): 92 | remote_public_key = ssh_keys.get_remote('public-ssh-key') 93 | hookenv.log("Adding key: " + remote_public_key) 94 | ssh.add_authorized_key( 95 | remote_public_key, utils.get_paths()['authorized-keys']) 96 | ssh_peer = {ssh_keys.get_remote('private-address'): remote_public_key} 97 | utils.update_config( 98 | config_path=utils.get_paths()['config'], new_ssh_peers=ssh_peer) 99 | ssh_keys.remove_state(ssh_keys.states.new_remote_public_key) 100 | 101 | 102 | @when(charm_flag('static-serve.configured'), 'config.changed') 103 | @when('basic-auth-check.available') 104 | def config_changed_basic_auth(basic_auth_check): 105 | cfg = hookenv.config() 106 | if any(cfg.changed(option) for option in NGINX_OPTIONS): 107 | _configure_static_serve(auth_backends=basic_auth_check.backends()) 108 | 109 | 110 | @when(charm_flag('static-serve.configured'), 'config.changed') 111 | @when_not('basic-auth-check.available') 112 | def config_changed_no_basic_auth(): 113 | cfg = hookenv.config() 114 | if any(cfg.changed(option) for option in NGINX_OPTIONS): 115 | _configure_static_serve(auth_backends=[]) 116 | 117 | 118 | @when(charm_flag('static-serve.configured'), 'basic-auth-check.changed') 119 | def config_basic_auth_check_changed(basic_auth_check): 120 | _configure_static_serve(auth_backends=basic_auth_check.backends()) 121 | 122 | 123 | @when_not('basic-auth-check.available') 124 | @when('basic-auth-check.changed') 125 | def config_basic_auth_check_removed(basic_auth_check): 126 | _configure_static_serve(auth_backends=[]) 127 | 128 | 129 | @when(charm_flag('installed'), 'config.changed') 130 | def config_set(): 131 | config = hookenv.config() 132 | missing_options = setup.missing_options(config) 133 | if missing_options: 134 | hookenv.status_set( 135 | 'blocked', 136 | 'Mirroring is disabled as some configuration options are missing: ' 137 | '{}'.format(', '.join(missing_options))) 138 | return 139 | 140 | # Configure mirroring. 141 | keyring = gpg.KeyRing() 142 | mirrors = mirror.from_config( 143 | keyring, config['mirrors'], config['repository-origin'].strip()) 144 | sign_key_fingerprint = keyring.import_key(config['sign-gpg-key']) 145 | sign_key_passphrase = config.get('sign-gpg-passphrase', '').strip() 146 | repository.configure_reprepro( 147 | mirrors, sign_key_fingerprint, sign_key_passphrase) 148 | # Export the public key used to sign the repository. 149 | _export_sign_key(sign_key_fingerprint) 150 | hookenv.status_set('active', 'Mirroring configured') 151 | # Update scripts config. 152 | utils.update_config( 153 | config_path=utils.get_paths()['config'], 154 | packages_require_auth=config['packages-require-auth']) 155 | 156 | 157 | @when_not('config.set.mirrors', 'config.set.sign-gpg-key') 158 | def config_not_set(): 159 | repository.disable_mirroring() 160 | hookenv.status_set( 161 | 'blocked', 'Not all required configs set, mirroring is disabled') 162 | 163 | 164 | @when_not(charm_flag('job.enabled')) 165 | @when('leadership.is_leader') 166 | def install_cron(): 167 | cron.install_crontab() 168 | set_flag(charm_flag('job.enabled')) 169 | 170 | 171 | @when_not('leadership.is_leader') 172 | @when(charm_flag('job.enabled')) 173 | def remove_cron(): 174 | cron.remove_crontab() 175 | clear_flag(charm_flag('job.enabled')) 176 | 177 | 178 | def _configure_static_serve(auth_backends=None): 179 | """Configure the static file serve.""" 180 | cfg = hookenv.config() 181 | vhost_config = setup.get_virtualhost_config( 182 | auth_backends, cfg['resource-name'], cfg['auth-cache-enabled'], 183 | cfg['auth-cache-duration'], cfg['auth-cache-inactivity']) 184 | configure_site('archive-auth-mirror', 'nginx-static.j2', **vhost_config) 185 | 186 | 187 | def _export_sign_key(key_id): 188 | """Export the public key for the repo under the static serve.""" 189 | filename = utils.get_paths()['static'] / 'key.asc' 190 | gpg.export_public_key(key_id, filename) 191 | -------------------------------------------------------------------------------- /unit_tests/test_repository.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | import textwrap 3 | from unittest import mock 4 | from pathlib import Path 5 | 6 | import yaml 7 | 8 | from charmtest import CharmTest 9 | 10 | from archive_auth_mirror.utils import get_paths 11 | from archive_auth_mirror.scripts.reprepro_sign_helper import patch_release_file 12 | from archive_auth_mirror.mirror import Mirror 13 | from charms.archive_auth_mirror.repository import ( 14 | configure_reprepro, 15 | disable_mirroring, 16 | ) 17 | 18 | 19 | def make_reprepro_files(root_dir, mirrors): 20 | """A tiny wrapper around configure_reprepro with testing paths.""" 21 | paths = get_paths(root_dir=Path(root_dir)) 22 | with mock.patch( 23 | 'charms.archive_auth_mirror.repository.get_paths', return_value=paths 24 | ): 25 | configure_reprepro(mirrors, sign_key_fingerprint, sign_key_passphrase) 26 | return paths 27 | 28 | 29 | class ConfigureRepreproTest(CharmTest): 30 | 31 | def test_configuration_files(self): 32 | """configure_reprepro writes rerepro config files.""" 33 | paths = make_reprepro_files(self.fakes.fs.root.path, mirrors) 34 | self.assertEqual( 35 | textwrap.dedent( 36 | '''\ 37 | Codename: xenial-updates 38 | Suite: xenial-updates 39 | Version: 18.10 40 | Label: Ubuntu 41 | Origin: Ubuntu 42 | Components: main universe 43 | Architectures: source i386 amd64 44 | SignWith: ! {bin}/reprepro-sign-helper 45 | Update: update-repo-xenial-updates 46 | 47 | Codename: sid-security 48 | Suite: sid-security 49 | Label: Debian 50 | Origin: Debian 51 | Components: multiverse 52 | Architectures: source 53 | SignWith: ! {bin}/reprepro-sign-helper 54 | Update: update-repo-sid-security 55 | 56 | '''.format(**paths)), 57 | (paths['reprepro-conf'] / 'distributions').read_text()) 58 | self.assertEqual( 59 | textwrap.dedent( 60 | '''\ 61 | Name: update-repo-xenial-updates 62 | Method: https://user:pass@example.com/ubuntu 63 | Suite: xenial 64 | Components: main universe 65 | Architectures: source i386 amd64 66 | VerifyRelease: finger 67 | 68 | Name: update-repo-sid-security 69 | Method: https://user:pass@1.2.3.4/debian 70 | Suite: sid 71 | Components: multiverse 72 | Architectures: source 73 | VerifyRelease: finger 74 | 75 | '''), 76 | (paths['reprepro-conf'] / 'updates').read_text()) 77 | self.assertEqual( 78 | yaml.load(paths['config'].read_text()), { 79 | 'sign-key-id': 'finger', 80 | 'suites': ['xenial-updates', 'sid-security'], 81 | }) 82 | with paths['sign-passphrase'].open() as f: 83 | self.assertEqual(f.read(), 'secret') 84 | 85 | 86 | class PatchReleaseFileTest(CharmTest): 87 | 88 | def make_release_file(self, codename): 89 | """Create a release file for testing. Return its path.""" 90 | with tempfile.NamedTemporaryFile('w', delete=False) as f: 91 | f.write('Codename: {}\n'.format(codename)) 92 | f.write('Origin: anOrigin\n') 93 | f.write('MD5Sum: aSum\n') 94 | path = Path(f.name) 95 | self.addCleanup(path.unlink) 96 | return path 97 | 98 | def test_with_authorization(self): 99 | release_path = self.make_release_file('xenial') 100 | packages_require_auth = True 101 | patch_release_file(release_path, packages_require_auth) 102 | with release_path.open() as f: 103 | content = f.read() 104 | self.assertEqual( 105 | content, 106 | 'Codename: xenial\n' 107 | 'Origin: anOrigin\n' 108 | 'Packages-Require-Authorization: yes\n' 109 | 'MD5Sum: aSum\n' 110 | ) 111 | 112 | def test_without_authorization(self): 113 | release_path = self.make_release_file('trusty') 114 | packages_require_auth = False 115 | patch_release_file(release_path, packages_require_auth) 116 | with release_path.open() as f: 117 | content = f.read() 118 | self.assertEqual( 119 | content, 120 | 'Codename: trusty\n' 121 | 'Origin: anOrigin\n' 122 | 'MD5Sum: aSum\n' 123 | ) 124 | 125 | def test_with_two_words_suite(self): 126 | release_path = self.make_release_file('xenial-updates') 127 | packages_require_auth = True 128 | patch_release_file(release_path, packages_require_auth) 129 | with release_path.open() as f: 130 | content = f.read() 131 | self.assertEqual( 132 | content, 133 | 'Codename: xenial\n' 134 | 'Origin: anOrigin\n' 135 | 'Packages-Require-Authorization: yes\n' 136 | 'MD5Sum: aSum\n' 137 | ) 138 | 139 | def test_with_three_words_suite(self): 140 | release_path = self.make_release_file('bionic-foo-bar') 141 | packages_require_auth = False 142 | patch_release_file(release_path, packages_require_auth) 143 | with release_path.open() as f: 144 | content = f.read() 145 | self.assertEqual( 146 | content, 147 | 'Codename: bionic\n' 148 | 'Origin: anOrigin\n' 149 | 'MD5Sum: aSum\n' 150 | ) 151 | 152 | 153 | class DisableMirroringTest(CharmTest): 154 | 155 | def test_disable_mirroring(self): 156 | """disable_mirroring renames the script config file.""" 157 | paths = make_reprepro_files(self.fakes.fs.root.path, mirrors) 158 | 159 | config = paths['config'] 160 | self.assertTrue(config.exists()) 161 | orig_content = config.read_text() 162 | disable_mirroring(get_paths=lambda: paths) 163 | self.assertFalse(config.exists()) 164 | # The file is moved to .disabled 165 | disabled_file = config.with_suffix('.disabled') 166 | self.assertTrue(disabled_file.exists()) 167 | self.assertEqual(orig_content, disabled_file.read_text()) 168 | 169 | def test_disable_not_enabled(self): 170 | """Disabling mirror when not configured is a no-op.""" 171 | paths = get_paths(root_dir=Path(self.fakes.fs.root.path)) 172 | config = paths['config'] 173 | disable_mirroring(get_paths=lambda: paths) 174 | self.assertFalse(config.exists()) 175 | self.assertFalse(config.with_suffix('.disabled').exists()) 176 | 177 | def test_disable_twice(self): 178 | """disable_mirroring can be called multiple times.""" 179 | paths = get_paths(root_dir=Path(self.fakes.fs.root.path)) 180 | config = paths['config'] 181 | disable_mirroring(get_paths=lambda: paths) 182 | disable_mirroring(get_paths=lambda: paths) 183 | self.assertFalse(config.exists()) 184 | self.assertFalse(config.with_suffix('.disabled').exists()) 185 | 186 | 187 | mirrors = ( 188 | Mirror( 189 | url='https://user:pass@example.com/ubuntu', 190 | remote_suite='xenial', 191 | components='main universe', 192 | key='finger', 193 | archs='source i386 amd64', 194 | version='18.10', 195 | origin='Ubuntu', 196 | local_suite='xenial-updates', 197 | ), 198 | Mirror( 199 | url='https://user:pass@1.2.3.4/debian', 200 | remote_suite='sid', 201 | components='multiverse', 202 | key='finger', 203 | archs='source', 204 | version='', 205 | origin='Debian', 206 | local_suite='sid-security', 207 | ), 208 | ) 209 | sign_key_fingerprint, sign_key_passphrase = 'finger', 'secret' 210 | -------------------------------------------------------------------------------- /unit_tests/test_setup.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase, mock 2 | import os 3 | from pathlib import Path 4 | 5 | from testtools.matchers import ( 6 | FileContains, 7 | DirContains, 8 | Contains, 9 | ) 10 | 11 | from charmtest import CharmTest 12 | 13 | from charms.archive_auth_mirror.setup import ( 14 | create_script_file, 15 | get_virtualhost_name, 16 | get_virtualhost_config, 17 | install_resources, 18 | missing_options, 19 | ) 20 | 21 | from fakes import FakeHookEnv 22 | 23 | 24 | class GetVirtualhostNameTest(TestCase): 25 | 26 | def test_get_no_config(self): 27 | """If the 'service-url' config is not set, the unit IP is returned.""" 28 | hookenv = FakeHookEnv() 29 | self.assertEqual('1.2.3.4', get_virtualhost_name(hookenv=hookenv)) 30 | 31 | def test_get_with_config(self): 32 | """If the 'service-url' config is set, it's used as virtualhost.""" 33 | hookenv = FakeHookEnv(config={'service-url': 'example.com'}) 34 | self.assertEqual('example.com', get_virtualhost_name(hookenv=hookenv)) 35 | 36 | 37 | class GetVirtualhostConfigTest(CharmTest): 38 | 39 | def test_virtualhost_config(self): 40 | """get_virtualhost_config returns the config for the virtualhost.""" 41 | hookenv = FakeHookEnv() 42 | auth_backends = [] 43 | resource_name = 'esm' 44 | auth_cache_enabled = False 45 | auth_cache_duration = auth_cache_inactivity = "" 46 | config = get_virtualhost_config( 47 | auth_backends, resource_name, auth_cache_enabled, 48 | auth_cache_duration, auth_cache_inactivity, hookenv=hookenv) 49 | self.assertEqual( 50 | {'domain': '1.2.3.4', 51 | 'document_root': '/srv/archive-auth-mirror/static', 52 | 'auth_backends': [], 53 | 'auth_cache_enabled': False, 54 | 'auth_cache_duration': "", 55 | 'auth_cache_inactivity': "", 56 | 'basic_auth_file': '/srv/archive-auth-mirror/basic-auth', 57 | 'resource_name': 'esm'}, 58 | config) 59 | 60 | def test_virtualhost_config_auth_backends(self): 61 | """If backends are passed, they're included in the vhost config.""" 62 | hookenv = FakeHookEnv() 63 | auth_backends = [('1.2.3.4', '8080'), ('5.6.7.8', '9090')] 64 | resource_name = 'fips' 65 | auth_cache_enabled = False 66 | auth_cache_duration = auth_cache_inactivity = "" 67 | config = get_virtualhost_config( 68 | auth_backends, resource_name, auth_cache_enabled, 69 | auth_cache_duration, auth_cache_inactivity, hookenv=hookenv) 70 | self.assertEqual( 71 | {'domain': '1.2.3.4', 72 | 'document_root': '/srv/archive-auth-mirror/static', 73 | 'auth_backends': auth_backends, 74 | 'auth_cache_enabled': False, 75 | 'auth_cache_duration': "", 76 | 'auth_cache_inactivity': "", 77 | 'basic_auth_file': '/srv/archive-auth-mirror/basic-auth', 78 | 'resource_name': 'fips'}, 79 | config) 80 | 81 | def test_virtualhost_config_auth_cache_time(self): 82 | """If caching time is passed, it's included in the vhost config.""" 83 | hookenv = FakeHookEnv() 84 | auth_backends = [] 85 | resource_name = 'esm-apps' 86 | auth_cache_enabled = True 87 | auth_cache_duration, auth_cache_inactivity = "1h", "5m" 88 | config = get_virtualhost_config( 89 | auth_backends, resource_name, auth_cache_enabled, 90 | auth_cache_duration, auth_cache_inactivity, hookenv=hookenv) 91 | self.assertEqual( 92 | {'domain': '1.2.3.4', 93 | 'document_root': '/srv/archive-auth-mirror/static', 94 | 'auth_backends': [], 95 | 'auth_cache_enabled': True, 96 | 'auth_cache_duration': "1h", 97 | 'auth_cache_inactivity': "5m", 98 | 'basic_auth_file': '/srv/archive-auth-mirror/basic-auth', 99 | 'resource_name': 'esm-apps'}, 100 | config) 101 | 102 | 103 | class InstallResourcesTests(CharmTest): 104 | 105 | def setUp(self): 106 | super().setUp() 107 | self.root_dir = self.fakes.fs.root 108 | os.makedirs(self.root_dir.join('etc/cron.d')) 109 | 110 | patcher_chown = mock.patch('os.chown') 111 | patcher_chown.start() 112 | self.addCleanup(patcher_chown.stop) 113 | 114 | patcher_pwnam = mock.patch('pwd.getpwnam') 115 | mock_pwnam = patcher_pwnam.start() 116 | mock_pwnam.return_value.pw_uid = 0 117 | self.addCleanup(patcher_pwnam.stop) 118 | 119 | patcher_grnam = mock.patch('grp.getgrnam') 120 | mock_grnam = patcher_grnam.start() 121 | 122 | def getgrnam(group): 123 | gr_gid = 123 if group == 'www-data' else 0 124 | return mock.MagicMock(gr_gid=gr_gid) 125 | 126 | mock_grnam.side_effect = getgrnam 127 | self.addCleanup(patcher_grnam.stop) 128 | 129 | patcher_fchown = mock.patch('os.fchown') 130 | self.mock_fchown = patcher_fchown.start() 131 | self.addCleanup(patcher_fchown.stop) 132 | 133 | def test_tree(self): 134 | """The install_resources function creates the filesystem structure.""" 135 | install_resources(root_dir=Path(self.root_dir.path)) 136 | paths = ['basic-auth', 'bin', 'static', 'reprepro', 'sign-passphrase'] 137 | self.assertThat( 138 | self.root_dir.join('srv/archive-auth-mirror'), DirContains(paths)) 139 | 140 | def test_resources(self): 141 | """Resources from the charm are copied to the service tree.""" 142 | install_resources(root_dir=Path(self.root_dir.path)) 143 | self.assertThat( 144 | self.root_dir.join('srv/archive-auth-mirror/bin/mirror-archive'), 145 | FileContains(matcher=Contains("import mirror_archive"))) 146 | self.assertThat( 147 | self.root_dir.join('srv/archive-auth-mirror/bin/manage-user'), 148 | FileContains(matcher=Contains("import manage_user"))) 149 | sign_script_path = 'srv/archive-auth-mirror/bin/reprepro-sign-helper' 150 | self.assertThat( 151 | self.root_dir.join(sign_script_path), 152 | FileContains(matcher=Contains("import reprepro_sign_helper"))) 153 | auth_file = self.root_dir.join('srv/archive-auth-mirror/basic-auth') 154 | self.assertEqual(0o100640, os.stat(auth_file).st_mode) 155 | 156 | def test_basic_auth_file_owner(self): 157 | """The basic-auth file is group-owned by www-data.""" 158 | install_resources(root_dir=Path(self.root_dir.path)) 159 | # the file ownership is changed to the gid for www-data 160 | self.mock_fchown.assert_any_call(mock.ANY, 0, 123) 161 | 162 | 163 | class CreateScriptFileTest(CharmTest): 164 | 165 | def test_crate_script_file(self): 166 | """create_script_file renders a python script file.""" 167 | bindir = Path(self.fakes.fs.root.path) 168 | create_script_file('foo', bindir) 169 | script = bindir / 'foo' 170 | content = script.read_text() 171 | 172 | shebang = '#!{}/.venv/bin/python3\n'.format(Path.cwd().parent) 173 | self.assertTrue(content.startswith(shebang)) 174 | self.assertIn('from archive_auth_mirror.scripts import foo', content) 175 | self.assertIn('foo.main()', content) 176 | self.assertEqual(0o100755, script.stat().st_mode) 177 | 178 | 179 | class MissingOptionsTest(TestCase): 180 | 181 | def test_all_options(self): 182 | """If all required options are set, an empty list is returned.""" 183 | config = { 184 | 'mirrors': 'some mirrors', 185 | 'repository-origin': 'Ubuntu', 186 | 'sign-gpg-key': 'mykey', 187 | } 188 | self.assertEqual(missing_options(config), []) 189 | 190 | def test_option_not_present(self): 191 | """Names of options not included are returned.""" 192 | # no mirror-gpg-key 193 | config = { 194 | 'mirrors': 'some mirrors', 195 | 'sign-gpg-key': 'mykey', 196 | } 197 | self.assertEqual(missing_options(config), ['repository-origin']) 198 | 199 | def test_option_none(self): 200 | """Names of None options are returned.""" 201 | # no mirror-gpg-key 202 | config = { 203 | 'mirrors': None, 204 | 'repository-origin': 'Ubuntu', 205 | 'sign-gpg-key': None, 206 | } 207 | missing = sorted(missing_options(config)) 208 | self.assertEqual(list(missing), ['mirrors', 'sign-gpg-key']) 209 | 210 | def test_option_empty_string(self): 211 | """Names of empty options are returned.""" 212 | # no mirror-gpg-key 213 | config = { 214 | 'mirrors': 'some mirrors', 215 | 'repository-origin': 'Ubuntu', 216 | 'sign-gpg-key': '', 217 | } 218 | self.assertEqual(missing_options(config), ['sign-gpg-key']) 219 | -------------------------------------------------------------------------------- /unit_tests/test_gpg.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from unittest import mock 3 | 4 | from charmtest import CharmTest 5 | 6 | from archive_auth_mirror.utils import get_paths 7 | from archive_auth_mirror.gpg import ( 8 | detach_sign, 9 | export_public_key, 10 | KeyRing, 11 | inline_sign, 12 | ) 13 | 14 | 15 | PUBLIC_KEY_FINGERPRINT = 'E275C4776E00A6BAED081A97DE2922ADDC4EDFF2' 16 | PUBLIC_KEY_MATERIAL = ''' 17 | -----BEGIN PGP PUBLIC KEY BLOCK----- 18 | 19 | mQENBFjYx6EBCADPQeJWE7247I7YVHQPgYsK9OtuHJmRGsJ6YUNOk+p3pX+r9r4d 20 | CwBHoQcRJu+WAucsC09ow4o8CxwsdFdQ2RgFSioyGivoTR5Qwf3DUPBwgWJGRKLU 21 | ajrrhL/t5SaYgIppZRSvTy9tt/SEa0LdUsdK2hLMWL6UDalJgdzKOTerE9MLuyVK 22 | ygW4qld7gLYCyadOS3rhO5/+ygzy13jAMCpIGF0j0VVqmImJEtd4x0d2RPfOe2jO 23 | xVVCdGicH7YPorycKgRY1jVU5bqb+4nvfd2Efx5zGAwENeF04mTEflC/zYBO1HaW 24 | sHU2z2YIoyxCoxWQtUGVOCD5Q61Rhhst6LkDABEBAAG0ClNhbXBsZSBrZXmJATcE 25 | EwEIACEFAljYx6ECGwMFCwkIBwIGFQgJCgsCBBYCAwECHgECF4AACgkQ3ikirdxO 26 | 3/KKOgf/fFJ62OBid/L47g4YN6TbIS9fQvzmU/ouveRJvRNGXJEr1ZxA+uGj4S0v 27 | vfzmIBWmaQ89pd3hCwrhWq0enX1e7LChv1WBI76j1PSeSBYdJ7xp6W0Di05nQG9l 28 | 156FckZgYCAPE0cDDnFleL0F/6VkqXxWjq7H5nsBxP7VxuwHBYUdrzMztYkXLOf6 29 | AKmVHRD6E3kdNSMnIGNTPa+VAVRiyqOq0/Jtbb82tbdme8Ra31ts2ZtzJ5CURWRd 30 | Wv0xp5cTkxLhC5LyZk2j93kQCxhV09aJqijTicVtdz+Bxd4PLbkYN4omBUE0r9HR 31 | WoWaHrg8YaU9w+hl3hBRiqpCD9/cDLkBDQRY2MehAQgAz+kIRwGd1m8wmTAmlqMo 32 | BO1iY8eJ6bckD54Uik9Bx3rcJNacxk5kLNi/ZWHaqtnm7DiIQiLbHymi0vqjF4IB 33 | 8ZfnFh1veJ2PTwC7oXqSejyTcv4uIjGjLipcaIRTjNIG+jevxyLMpbskAZDLaSHv 34 | HP50vrBe1r1VPzOuE0JZS8wSccoaYRR+O6qKWwTaIZv5pz8JiGHP8D+9lJyyGKqp 35 | zGyi1tPuXcUkfjrqlUHdq/rCXxORXtUM30zEKyFyid/gmyuS6Q9hIU5AaNgYITYj 36 | jBFRaO9JiNpVerHfsLfhWw1bx/gsRzDAg93V+DhnkVT4eClL2weasRw+4U5V1Pas 37 | pQARAQABiQEfBBgBCAAJBQJY2MehAhsMAAoJEN4pIq3cTt/ypakH/0yBmWgaA+bP 38 | 5ArgP//KkWRnmIeeO+gp/Rp4XXb18V6h8DmLiY+nMHRdYslm8SXBFjOlfbdRKIdX 39 | FxnhkRt8I6WSgsYk/90Abqr2cdblnKz7igFN7jdTvUI7CYlEIr+DR9sAKIaW8lmP 40 | wL+5P/+r3g8M1BJ9lyDDdTPgI6PIqABpQIRex+FZETwP0fdIuC6AGskv0lNxN7OK 41 | jQugeKt5fEyoxL/ncV1HBW2rHZgS6caLZ9rrA2bhngftH57DL9prQT+L6Is+QJar 42 | X7j0r703KfTUogVbFc10dgSHEGvgIiKiuqM4dSqvmH8gIGu5b1lG7CrqWh9lHisf 43 | E2Te61Dxbmc= 44 | =RGEC 45 | -----END PGP PUBLIC KEY BLOCK----- 46 | ''' 47 | 48 | SECRET_KEY_FINGERPRINT = '838F411018AB1C7408B394F8964458199BF027F5' 49 | SECRET_KEY_MATERIAL = ''' 50 | -----BEGIN PGP PRIVATE KEY BLOCK----- 51 | 52 | lQOYBFjYyAkBCACoPWNx6eoSl9dzaZyXwyj2lgP/oEJs11nvhLL6MGTbBOE40ucT 53 | qtMXr2X96F/Cs3E82l4T3aaBqPSMHYFAkcmtUj+gfJUZlccJvYhZhOxKIyNfzrtf 54 | k/aI8Jv4kFs4QmZo20S7VgJD1Oe6nVFPpnixW4zncFBqgbQHqpgkoIIBy3FlG99q 55 | iZ/1ikwC+SG41lFfF8LdljDj//6YJ6KKToju4BqwH3t1z49T7okc+UDZqkHRErMU 56 | a2wJvyBXZFn+UwhJbVFdTS6+lkaESyfC/zBd+yrBqBZvCgaJCGd/W1aRLzEO5lHy 57 | TDv4J1qXBhPZcFe6/DV3rWtshHAowhLc/S9BABEBAAEAB/sF0NVytyTVzrTucuys 58 | XF06pPXvbMFPFOSxgHNVbb3WymTku3msdt/ENlZ+v/0rdFuKQHw3EJb0bXxCqbRx 59 | oHiJysmTSSs4TrKqNgiUG6G1cGCFK9bTV4CSvEqP/aGBoN38avQFy9PZN29pRo3s 60 | hHMwolsNFxdYjzJDf5Sx03CbEjh2mj3mYYoTNgaGWhGZZQ073aVSKq6qWPQ38PdA 61 | C2CvSLVi8zbihu6Sp1DMT1BDSTrOWXS54xwkxvp+ZbJbDxPoCrKEZ9wngRJdiRZj 62 | Ni7elFFQNvZSFd5zOjEjQzFUwneg6Ef+Q0PzZj0GusFn3GtwKSrfgUxLNqKDRSEe 63 | RL8JBADIvvvWC2gDAdBelvpbtX0XX0mx53qco8LPiS/l0Ph+7VPXO8HpFw7OVsHs 64 | fCrOTCoE2BdEEuvcNEeMs2OlqrZ15b5SSuSUraKwuPndH3aBp667YIqLJehlSsCc 65 | D3U4dgNLjGS7LtMpDuYyT5MMr9V9i75w0BUJ3TfKtFUgDgYclQQA1ovxT6SwcwBC 66 | AivNDy8s3BsqFUbJ9L++yV7zTwAdoGPQGa7xu4f2P2zxdk6kr54c2Ef+3x5x1DYR 67 | VpQa1c1RxStJAf+NNPJ4/t8ZBs4u5TRie2j+hZLmBMjqIitT1FKtSxxAgzWvoOBO 68 | YvxizBtxdfHtXYX1ZvuBG+OLW6B7MP0D/RVHSvbCGKV09XT1yuLl9hy7/LVRpuXh 69 | /M9kxmn8UIhz34wWC6bT2Sk9656jfAzuYF/dAulD55ROD+3Nhxvh+/H0/MNoek7T 70 | cMz8EZEFgIHs1Wi9g3BUqmFt7XziuQY1zqUdEUWI+aj5e187Vr3LTtrJnKHvuUp7 71 | VrsqbsGkr9KsSrq0ClNlY3JldCBrZXmJATcEEwEIACEFAljYyAkCGwMFCwkIBwIG 72 | FQgJCgsCBBYCAwECHgECF4AACgkQlkRYGZvwJ/XwPgf+MPyNpoCZS9JFqfVvnbW7 73 | MgHPijd1bxKbfzBb0I9Fr3DzDugxC11oDd+/Amo4iSWVaKJprYD+FaEmlZT3ku2i 74 | B+AchezOpq1Kprcw9srZVjUSFdgA0SR8AX4m0vNQnRZo0Y3rb731cHuae5wvF7+/ 75 | j7r1tKJhr3QFM5HAH1pbyq+0oI3QEQ7fLx+2CcAjOkhkqrKVVHeO+zbgMcxBOFKn 76 | 6w9FpH6+JEMo969i1wQUjwCAxMvOF5vK1aL0XLHJMfMJ/7mjDcxGeCu2deVtWLH/ 77 | EXed7tmrQ4rPSfcdZJ33AHmAlBnIzNkg5qGYgI1X1JfIh0tQPQwpJ717LqlDfUaL 78 | 7Z0DmARY2MgJAQgAr7AZ73tV/wTzWDoQcvxru3dKtsGLjbza9WV6tQqEcqVNMDKX 79 | 0/7sU9g+Kr/+sK8S4iha/i0okljLn0ulewm5Osfxf3MwZR/YJTy+DVM7S6dnqWsa 80 | W1TeT126cN61xPsmFb+/6Lb4XSTDdyP3KSIe341ymCGX+n10pgbC1y6oYQyR0ll2 81 | 3B9e2e7VsX7ymeBs7BjCEOE2Cr2cXTKjZ9+StUo9sczykUYAG3ZlC9CEv0enWj9E 82 | clqQY3BWwOE+ACzfPLPIFeyJknl2qfT7VWa9m6OEfSj0Ju1bj3RiWbk1vvP/g8Jc 83 | o466brsd1Pe2Fzx5JWydRuTel29e9YawKu8itwARAQABAAf6A25tE0BVbaWrGw4H 84 | RDep4v9xdiBRmXMW07QnsWGDNLoFx+s1C+7urrSKgks9rjW9KK2hGVXIdRNG5tWT 85 | ZdPKylsdXF/jkhYNIq0NCTWW8uNSIvz0htQhg2tROOMgqbg+Bi64kNMCBs+xAaKy 86 | MRt5fuREWLRPQ5Uvsg5vv8Qphbuwzi8gkAjuuc/kyQl4s8BWYFMjGFboHmlLUpU2 87 | F63+/3wwQ1L6q6zNh8pksNNmeUmpK/oQnju0R+5XQnkHxJjnAx6q4fRvk8+xVNuB 88 | zF4vzbTwN9S/cQocI8Gywx2tCuI/j8T68t5JiwxlmCr3IGIfkIPdHv3Ufr4FblA7 89 | Ys8oAQQAxCLocMo6edjfGyE7NsThDobtxGW5uLxr/ozxReH/bXqpT9myxFphEZk+ 90 | PFboGqLTysjTJHMQdOE44GDRAqz7W6DHwQiRPl/Ig8E6j/cuCu0LRYrDdBh7CIng 91 | LdXcel16651Hd4qZYeksnNzjlB8ad0X5VisxXgw2XW263ZgBWhUEAOVPdHlv/r4O 92 | IzWT0C+WY75DQ+gsWpkK1hHWjG4lDs9VMPq92pTiMxcwonZ2fVrGBpHXLM5WbM37 93 | StMDNJVmQmRnlgbtfi/y1lH/wrAxbKnnPw+XzaVMiUnZNhnzxkzeajBL+MBT6UN0 94 | XmAOXd+zFTe+w8AjHGehHpJDdq7EBTibA/4ubUnuEvaSXikFyNu1+WccXTuxvk11 95 | W9kgSNVEO3cYH9oiWdESBHbfKAqTcBoEcMqQ0t5d6EyzxjofnK8+O3YNKQjKazf0 96 | HrMsyjRyhN2yd1KqVGSwGeKEZQ/rv935ZkIfCLtB8Lmr45QimP5Xte9kjfxoC7Cr 97 | LaVKT4brGcAdjzmpiQEfBBgBCAAJBQJY2MgJAhsMAAoJEJZEWBmb8Cf1mTYIAJNE 98 | /rj8C6iAB8n/ZfgiHB9HMNQ6YodekxEbxMxaDqPbtit8R94tMRtRsKlUcw7EvtAB 99 | cvHbrBJv7AOEWrZuWtRUgEu+Dq1vFp9RsCX/FaIxqrBYh/g88q/lSQp/zpSZFWd5 100 | 94XvcVQ1lDvJ1ROiJoEKT3Y/sm1Gl4nVwOY+np8o8exFTSMlKVZcX2/gbmb4msW5 101 | ZXs5iVb+hp17IXF3xemTmi/6pDvKz1VGWSWRk0N8iR420KDklqFfTf4swFMwZOL+ 102 | c+m2MjhoHDHuTpoNw2sHbayvmo4vVF9fH1n/DUIjBbaspXZptpUqb/jt56MRtQo6 103 | FUB99LPi8uvx+QjcHLg= 104 | =Hw32 105 | -----END PGP PRIVATE KEY BLOCK----- 106 | ''' 107 | 108 | 109 | def make_keyring(homedir): 110 | """Create and return a keyring using the given home directory.""" 111 | paths = {'gnupghome': homedir} 112 | with mock.patch('archive_auth_mirror.gpg.get_paths', return_value=paths): 113 | return KeyRing() 114 | 115 | 116 | class KeyRingTest(CharmTest): 117 | 118 | def setUp(self): 119 | super().setUp() 120 | self.keyring = make_keyring(self.fakes.fs.root.path) 121 | 122 | def test_import_key(self): 123 | """import_key imports the key and returns its fingerprint.""" 124 | fingerprint = self.keyring.import_key(PUBLIC_KEY_MATERIAL) 125 | self.assertEqual(fingerprint, PUBLIC_KEY_FINGERPRINT[-8:]) 126 | 127 | 128 | class ExportPublicKeyTest(CharmTest): 129 | 130 | def test_export_public_key(self): 131 | """export_public_key exports the specified public key.""" 132 | gnupghome = self.fakes.fs.root.path 133 | public_key_file = Path(gnupghome) / 'public.asc' 134 | keyring = make_keyring(gnupghome) 135 | fingerprint = keyring.import_key(SECRET_KEY_MATERIAL) 136 | export_public_key(fingerprint, public_key_file, gnupghome=gnupghome) 137 | material = public_key_file.read_text() 138 | self.assertTrue( 139 | material.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----')) 140 | self.assertTrue( 141 | material.endswith('-----END PGP PUBLIC KEY BLOCK-----\n')) 142 | 143 | 144 | class InlineSignTest(CharmTest): 145 | 146 | def test_inline_sign(self): 147 | """inline_sign creates an inline signature for a file.""" 148 | paths = get_paths(root_dir=Path(self.fakes.fs.root.path)) 149 | paths['gnupghome'].mkdir(parents=True) 150 | paths['sign-passphrase'].write_text('') 151 | keyring = make_keyring(paths['gnupghome']) 152 | fingerprint = keyring.import_key(SECRET_KEY_MATERIAL) 153 | 154 | unsigned_file = Path(self.fakes.fs.root.join('unsigned')) 155 | unsigned_file.write_text('some text to sign') 156 | inline_sign_file = Path(self.fakes.fs.root.join('signed')) 157 | 158 | inline_sign(fingerprint, unsigned_file, inline_sign_file, paths=paths) 159 | 160 | signed_content = inline_sign_file.read_text() 161 | self.assertIn('some text to sign', signed_content) 162 | self.assertIn('-----BEGIN PGP SIGNATURE-----', signed_content) 163 | 164 | 165 | class DetachSignTest(CharmTest): 166 | 167 | def test_detach_sign(self): 168 | """detach_sign creates a detached signature for a file.""" 169 | paths = get_paths(root_dir=Path(self.fakes.fs.root.path)) 170 | paths['gnupghome'].mkdir(parents=True) 171 | paths['sign-passphrase'].write_text('') 172 | keyring = make_keyring(paths['gnupghome']) 173 | fingerprint = keyring.import_key(SECRET_KEY_MATERIAL) 174 | 175 | unsigned_file = Path(self.fakes.fs.root.join('unsigned')) 176 | unsigned_file.write_text('some text to sign') 177 | detach_sign_file = Path(self.fakes.fs.root.join('signature')) 178 | 179 | detach_sign(fingerprint, unsigned_file, detach_sign_file, paths=paths) 180 | 181 | signature = detach_sign_file.read_text() 182 | self.assertTrue(signature.startswith('-----BEGIN PGP SIGNATURE-----')) 183 | self.assertTrue(signature.endswith('-----END PGP SIGNATURE-----\n')) 184 | -------------------------------------------------------------------------------- /icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 19 | 21 | 31 | 33 | 37 | 41 | 42 | 46 | 51 | 57 | 62 | 67 | 73 | 74 | 78 | 83 | 89 | 94 | 99 | 105 | 106 | 109 | 114 | 120 | 121 | 122 | 126 | 130 | 131 | 132 | 158 | 161 | 165 | 169 | 173 | 177 | 178 | 180 | 181 | 183 | image/svg+xml 184 | 186 | 187 | 188 | 189 | 190 | 196 | 202 | 203 | 208 | 279 | 280 | --------------------------------------------------------------------------------