├── .gitignore ├── debian ├── docs ├── source │ └── format ├── clean ├── dirs ├── gbp.conf ├── rules ├── tests │ └── control ├── auto-upgrade-testing.postinst ├── copyright ├── control └── changelog ├── upgrade_testing ├── __init__.py ├── data │ ├── control │ ├── changelog │ └── upgrade ├── selftests │ ├── __init__.py │ ├── test_configspec.py │ └── test_provisionconfig.py ├── configspec │ ├── _utils.py │ ├── __init__.py │ ├── _filecopy.py │ └── _config.py ├── provisioning │ ├── backends │ │ ├── __init__.py │ │ ├── _base.py │ │ ├── _lxc.py │ │ ├── _ssh.py │ │ └── _qemu.py │ ├── __init__.py │ ├── _util.py │ ├── executors.py │ └── _provisionconfig.py ├── preparation │ ├── _testbed.py │ ├── __init__.py │ └── _hostprep.py └── command_line.py ├── .flake8 ├── pyproject.toml ├── tox.ini ├── .pre-commit-config.yaml ├── .github └── workflows │ └── ci.yaml ├── setup.py └── README.rst /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | -------------------------------------------------------------------------------- /debian/docs: -------------------------------------------------------------------------------- 1 | README.rst 2 | -------------------------------------------------------------------------------- /upgrade_testing/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (native) 2 | -------------------------------------------------------------------------------- /debian/clean: -------------------------------------------------------------------------------- 1 | upgrade_testing.egg-info/ 2 | -------------------------------------------------------------------------------- /debian/dirs: -------------------------------------------------------------------------------- 1 | var/cache/auto-upgrade-testing 2 | -------------------------------------------------------------------------------- /debian/gbp.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | debian-branch = main 3 | debian-tag = %(version)s 4 | -------------------------------------------------------------------------------- /upgrade_testing/data/control: -------------------------------------------------------------------------------- 1 | Tests: upgrade 2 | Restrictions: allow-stderr 3 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | %: 4 | dh $@ --with python3 --buildsystem=pybuild 5 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E999, W503 3 | exclude = venv_tests/ 4 | max-complexity = 7 5 | max-line-length = 80 6 | -------------------------------------------------------------------------------- /debian/tests/control: -------------------------------------------------------------------------------- 1 | Test-Command: auto-upgrade-testing --help 2 | Restrictions: superficial 3 | Depends: 4 | @, 5 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 79 3 | 4 | [tool.isort] 5 | profile = "black" 6 | line_length = 79 7 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = pre-commit 3 | skipsdist = true 4 | 5 | [testenv:pre-commit] 6 | deps = pre-commit==2.20.0 7 | commands = pre-commit run --all-files --show-diff-on-failure 8 | -------------------------------------------------------------------------------- /upgrade_testing/data/changelog: -------------------------------------------------------------------------------- 1 | auto-upgrade-testing (0.1-1) UNRELEASED; urgency=medium 2 | 3 | * Initial release. 4 | 5 | -- First Name Tue, 27 Oct 2015 13:57:34 -0400 6 | -------------------------------------------------------------------------------- /debian/auto-upgrade-testing.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e 4 | 5 | case "$1" in 6 | configure) 7 | chmod 1777 /var/cache/auto-upgrade-testing 8 | ;; 9 | esac 10 | 11 | #DEBHELPER# 12 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/ambv/black 3 | rev: 22.8.0 4 | hooks: 5 | - id: black 6 | - repo: https://github.com/pycqa/isort 7 | rev: 5.12.0 8 | hooks: 9 | - id: isort 10 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: ci 3 | 4 | on: [push, pull_request] 5 | 6 | jobs: 7 | tox: 8 | runs-on: ubuntu-20.04 9 | steps: 10 | - name: Synchronize the package index (apt update) 11 | run: sudo apt-get -qy update 12 | - name: Install dependencies 13 | run: sudo DEBIAN_FRONTEND=noninteractive apt-get -qy install tox 14 | - name: Git checkout 15 | uses: actions/checkout@v2 16 | - name: Run tox 17 | run: tox 18 | -------------------------------------------------------------------------------- /upgrade_testing/selftests/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | -------------------------------------------------------------------------------- /upgrade_testing/configspec/_utils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import pkg_resources 20 | 21 | 22 | def get_file_data_location(): 23 | import upgrade_testing 24 | 25 | return pkg_resources.resource_filename(upgrade_testing.__name__, "data") 26 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/backends/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | from upgrade_testing.provisioning.backends._lxc import LXCBackend 20 | from upgrade_testing.provisioning.backends._qemu import QemuBackend 21 | 22 | __all__ = ["LXCBackend", "QemuBackend"] 23 | -------------------------------------------------------------------------------- /upgrade_testing/preparation/_testbed.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | 20 | def get_testbed_storage_location(): 21 | # Any changes to this location will need to be updated in the autopkgtest 22 | # script (TMP_LOCATION var). 23 | return "/var/tmp/ubuntu-upgrade-testing" 24 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: auto-upgrade-testing 3 | Source: https://github.com/canonical/auto-upgrade-testing 4 | 5 | Files: * 6 | Copyright: 2015-2022 Canonical Ltd. 7 | License: GPL-3.0 8 | This program is free software: you can redistribute it and/or modify 9 | it under the terms of the GNU General Public License as published by 10 | the Free Software Foundation, version 3 of the License. 11 | . 12 | This package is distributed in the hope that it will be useful, 13 | but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | GNU General Public License for more details. 16 | . 17 | You should have received a copy of the GNU General Public License 18 | along with this program. If not, see . 19 | . 20 | On Debian systems, the complete text of the GNU General 21 | Public License version 3 can be found in "/usr/share/common-licenses/GPL-3". 22 | -------------------------------------------------------------------------------- /upgrade_testing/preparation/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | from upgrade_testing.preparation._hostprep import prepare_test_environment 20 | from upgrade_testing.preparation._testbed import get_testbed_storage_location 21 | 22 | __all__ = [ 23 | "get_testbed_storage_location", 24 | "prepare_test_environment", 25 | ] 26 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | from upgrade_testing.provisioning._provisionconfig import ( 20 | ProvisionSpecification, 21 | ) 22 | from upgrade_testing.provisioning._util import run_command_with_logged_output 23 | 24 | __all__ = ["ProvisionSpecification", "run_command_with_logged_output"] 25 | -------------------------------------------------------------------------------- /upgrade_testing/configspec/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | 20 | from upgrade_testing.configspec._config import definition_reader 21 | from upgrade_testing.configspec._filecopy import test_source_retriever 22 | from upgrade_testing.configspec._utils import get_file_data_location 23 | 24 | __all__ = [ 25 | "definition_reader", 26 | "get_file_data_location", 27 | "test_source_retriever", 28 | ] 29 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: auto-upgrade-testing 2 | Section: devel 3 | Priority: optional 4 | Maintainer: Ubuntu Developers 5 | Build-Depends: 6 | debhelper-compat (= 13), 7 | dh-python, 8 | python3, 9 | python3-flake8, 10 | python3-lxc, 11 | python3-paramiko, 12 | python3-pexpect, 13 | python3-retrying, 14 | python3-setuptools, 15 | python3-yaml, 16 | Standards-Version: 4.6.1 17 | Homepage: https://launchpad.net/auto-upgrade-testing 18 | Vcs-Git: https://github.com/canonical/auto-upgrade-testing.git 19 | Vcs-Browser: https://github.com/canonical/auto-upgrade-testing 20 | X-Python3-Version: >= 3.6 21 | Rules-Requires-Root: no 22 | 23 | Package: auto-upgrade-testing 24 | Architecture: all 25 | Depends: 26 | autopkgtest (>= 3), 27 | lxc-templates, 28 | python3-junitparser, 29 | python3-lxc, 30 | python3-paramiko, 31 | python3-pexpect, 32 | python3-pkg-resources, 33 | python3-retrying, 34 | python3-yaml, 35 | ${misc:Depends}, 36 | ${python3:Depends}, 37 | Description: Test release upgrades in a virtual environment 38 | A tool to do QA for release upgrades in ubuntu that performs upgrades 39 | in a virtual environment. 40 | -------------------------------------------------------------------------------- /upgrade_testing/selftests/test_configspec.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import unittest 20 | 21 | from upgrade_testing.configspec import _config as _c 22 | 23 | 24 | class HelperMethodTestCases(unittest.TestCase): 25 | def test_load_configdef_raises_ValueError_on_non_yaml_filename(self): 26 | self.assertRaises(ValueError, _c._load_configdef, "test.txt") 27 | 28 | def test_read_yaml_config_raises_on_nonexistant_file(self): 29 | self.assertRaises(FileNotFoundError, _c._read_yaml_config, "test.txt") 30 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # 4 | # Ubuntu System Tests 5 | # Copyright (C) 2014 Canonical 6 | # 7 | # This program is free software: you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation, either version 3 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License 18 | # along with this program. If not, see . 19 | # 20 | 21 | import sys 22 | 23 | from setuptools import find_packages, setup 24 | 25 | assert sys.version_info >= (3,), "Python 3 is required" 26 | 27 | 28 | VERSION = "1.0" 29 | 30 | 31 | setup( 32 | name="upgrade-testing", 33 | version=VERSION, 34 | description="Test framework for testing system upgrades.", 35 | url="https://launchpad.net/auto-upgrade-testing", 36 | license="GPLv3", 37 | packages=find_packages(), 38 | package_data={"upgrade_testing": ["data/*"]}, 39 | entry_points={ 40 | "console_scripts": [ 41 | "auto-upgrade-testing = upgrade_testing.command_line:main" 42 | ] 43 | }, 44 | ) 45 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/_util.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import logging 20 | import subprocess 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | def run_command_with_logged_output(command, shell=False): 26 | """Run provided command while outputting stdout & stderr in 'real time'. 27 | 28 | :returns: Returncode of command that was run. 29 | 30 | """ 31 | logger.debug("Running command: {}".format(command)) 32 | with subprocess.Popen( 33 | command, 34 | stdout=subprocess.PIPE, 35 | stderr=subprocess.STDOUT, 36 | bufsize=1, 37 | universal_newlines=True, 38 | shell=shell, 39 | ) as proc: 40 | for line in proc.stdout: 41 | logger.info(line.strip("\n")) 42 | proc.wait() 43 | return proc.returncode 44 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Upgrade Testing 3 | ================= 4 | 5 | Developers 6 | ========== 7 | 8 | Testing + pep8 9 | -------------- 10 | 11 | When making changes it is worth while checking them against the projects flake8 12 | checker (which has custom ignores). 13 | 14 | This can be achieved by running the script `.project-flake8.sh` found in the 15 | project root. 16 | This script is also run during package build. 17 | 18 | Testfile Spec 19 | ============= 20 | 21 | Provisioning Backends 22 | ===================== 23 | 24 | Different provisioning needs have different options that can either stored in 25 | the test spec itself or in a separate provisioning file. 26 | 27 | The following supported backends detail their available settings both required and optional. 28 | 29 | LXC 30 | --- 31 | 32 | Virtual Machine 33 | --------------- 34 | 35 | Output directory 36 | ================ 37 | 38 | Each test script run will have a unique directory prepared for it in the main 39 | artifacts directory. 40 | The path to this directory will be stored in the env-var TESTRUN_RESULTS_DIR. 41 | The naming convention of this directory is: '{post|pre}_{script_name}' 42 | (post/pre depending if it's run before or pafter the upgrade.) 43 | 44 | This directory will be a sub-directory within the full suite directory. This 45 | directory path is stored in TEST_RESULTS_DIR. 46 | 47 | For instance if you're running a pre-upgrade script named setup_background and 48 | a post-upgrade test script named test_background_exists and each script outputs details to a file: "${TESTRUN_RESULTS_DIR}/output.log" the directory structure will look like this:: 49 | 50 | -- $TEST_RESULTS_DIR/ 51 | ---- pre_setup_background/ # Known during the script run as $TESTRUN_RESULTS_DIR 52 | -------- output.log 53 | ---- post_test_background_exists/ # Known during the script run as $TESTRUN_RESULTS_DIR 54 | -------- output.log 55 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/backends/_base.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | 20 | class ProviderBackend: 21 | """Abstract baseclass for all provision backends.""" 22 | 23 | def __init__(self, **args): 24 | raise NotImplementedError( 25 | "Cannot be instatiated, please use an established backend" 26 | ) 27 | 28 | def available(self): 29 | """Return true if there is an instance of this backend that can <> the 30 | required settings.""" 31 | raise NotImplementedError() 32 | 33 | def create(self, adt_base_path): 34 | """Creates an instance of this backend adhering to the provided args. 35 | 36 | :param adt_base_path: string containing the base path to the version of 37 | adt to use. 38 | :raises ValueError: if an instance already exists that matches these 39 | requirements. 40 | 41 | """ 42 | raise NotImplementedError() 43 | 44 | def get_adt_run_args(self, **kwargs): 45 | """Return a list containing required args to pass to autopkgtest.""" 46 | raise NotImplementedError() 47 | 48 | def set_verbose(self, verbose): 49 | self.verbose = verbose 50 | 51 | @property 52 | def name(self): 53 | raise NotImplementedError() 54 | -------------------------------------------------------------------------------- /upgrade_testing/configspec/_filecopy.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | 20 | import logging 21 | import os 22 | import shutil 23 | import subprocess 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | 28 | def test_source_retriever(source_location, dest_dir): 29 | """Given a location path for the tests location retrieve a local copy. 30 | 31 | This allows us to copy across what we need to the testbed. 32 | 33 | :param location: string for location to retrieve the test directory 34 | from. 35 | :param dest_dir: where to move the files too. This could be a temp 36 | directory that gets cleaned up after a run, but that's not the 37 | responsibility of this method 38 | Currently support uri types: 39 | - 'file://' for local file locations 40 | - 'lp:' for launchpad bzr branch locations. 41 | 42 | :returns: string containing directory path to copy across 43 | 44 | """ 45 | if source_location.startswith("file://"): 46 | return _local_file_retrieval(source_location, dest_dir) 47 | elif source_location.startswith("lp:"): 48 | return _bzr_file_retrieval(source_location, dest_dir) 49 | else: 50 | raise ValueError("Unknown file protocol") 51 | 52 | 53 | def _local_file_retrieval(source, dest_dir): 54 | source_path = os.path.abspath(source.replace("file://", "")) 55 | shutil.copytree(source_path, dest_dir) 56 | return dest_dir 57 | 58 | 59 | def _bzr_file_retrieval(source, dest_dir): 60 | bzr_cmd = ["bzr", "export", dest_dir, source] 61 | try: 62 | subprocess.check_output(bzr_cmd) 63 | except subprocess.CalledProcessError: 64 | logger.error("Failed to export path: {}".format(source)) 65 | raise ValueError( 66 | "Unable to export from provided source: {}".format(source) 67 | ) 68 | 69 | return dest_dir 70 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | auto-upgrade-testing (0.3.1) kinetic; urgency=medium 2 | 3 | [ Brian Murray ] 4 | * debian/control: Add a dependency on python3-junitparser. 5 | 6 | [ Paride Legovini ] 7 | * d/tests: replace broken `upgrade` test with simple `--help` call 8 | 9 | -- Paride Legovini Fri, 16 Sep 2022 11:57:02 +0200 10 | 11 | auto-upgrade-testing (0.3) kinetic; urgency=medium 12 | 13 | * d/a-u-t.postinst: set sticky bit to /var/cache/auto-upgrade-testing 14 | * d/copyright: update copyright years 15 | * d/gbp.conf: add git-buildpackage config file 16 | * debian/*: wrap-and-sort -bast (cosmetic) 17 | * pyproject.toml: configure black and isort 18 | * pre-commit: run black and isort 19 | * tox: run the pre-commit checks 20 | * CI: run tox in a GitHub Actions workflow 21 | * Linting: apply black and isort 22 | 23 | -- Paride Legovini Tue, 06 Sep 2022 17:23:06 +0200 24 | 25 | auto-upgrade-testing (0.2) kinetic; urgency=medium 26 | 27 | * New version 0.2. 28 | * Main functional changes since 0.1: 29 | - virt-qemu: bump the reboot timeout to 300s 30 | - upgrade: allow LTS-to-LTS upgrades if available 31 | - fixed arguments of autopkgtest following migration from adt 32 | - Rename adt-* to autopkgtest-* 33 | - qemu backend: honor DEFAULT_CPU and DEFAULT_RAM 34 | - d/t/upgrade: the switches -p and -d for do-release-upgrade are mutually 35 | exclusive so only use -d 36 | - d/t/upgrade: drop support for deprecated/abandoned backends 37 | - d/t/upgrade: use the dist-upgrader from -proposed since that 38 | has fixes that affect upgrading e.g. LP: #1796193 39 | - Adding keep-overlay option to persist the resulting image file in 40 | order to be able to run system tests after the upgrade is done. 41 | - moved storage from /tmp (which gets clear on reboot) to /var/tmp 42 | * Main packaging changes: 43 | - d/control: Build-Depend on python3 instead of python3-all-dev 44 | - d/control: bump Standards-Version to 4.6.1, no changes needed 45 | - d/control: bump X-Python3-Version to >= 3.6 (Bionic's) 46 | - d/control: bump dh compat level to 13 (via debhelper-compat) 47 | - d/control: replace Vcs-Bzr with Vcs-Git 48 | - d/control: specify Rules-Requires-Root: no 49 | - d/control: update package long description 50 | - d/rules: drop dh_override_auto_build 51 | - d/clean: remove upgrade_testing.egg-info/ 52 | - d/copyright: switch to secure (https) URL for Format 53 | - d/copyright: update Source field with new repository location 54 | - d/s/format: switch to native packaging 55 | 56 | -- Paride Legovini Fri, 02 Sep 2022 14:12:01 +0200 57 | 58 | auto-upgrade-testing (0.1-1) trusty; urgency=medium 59 | 60 | * Initial release. (LP: #1546699) 61 | 62 | -- Max Brustkern Fri, 24 Jun 2016 13:32:34 -0400 63 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/backends/_lxc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import logging 20 | import os 21 | 22 | import lxc 23 | 24 | from upgrade_testing.provisioning._util import run_command_with_logged_output 25 | from upgrade_testing.provisioning.backends._base import ProviderBackend 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | class LXCBackend(ProviderBackend): 31 | def __init__(self, release, distribution, arch): 32 | """Provide backend capabilities as requested in the provision spec. 33 | 34 | :param provision_spec: ProvisionSpecification object containing backend 35 | details. 36 | 37 | """ 38 | self.release = release 39 | self.distro = distribution 40 | self.arch = arch 41 | 42 | def available(self): 43 | """Return true if an lxc container exists that matches the provided 44 | args. 45 | 46 | """ 47 | container_name = self._get_container_name() 48 | logger.info("Checking for {}".format(container_name)) 49 | return container_name in lxc.list_containers() 50 | 51 | def _get_container_name(self): 52 | return "autopkgtest-{}-{}".format(self.release, self.arch) 53 | 54 | def create(self, adt_base_path): 55 | """Create an lxc container.""" 56 | 57 | logger.info("Creating lxc container for run.") 58 | 59 | cmd = [ 60 | os.path.join(adt_base_path, "autopkgtest-build-lxc"), 61 | self.distro, 62 | self.release, 63 | self.arch, 64 | ] 65 | retcode = run_command_with_logged_output(cmd) 66 | if retcode != 0: 67 | raise RuntimeError("Failed to create lxc container.") 68 | 69 | logger.info("Container created.") 70 | 71 | def get_adt_run_args(self, **kwargs): 72 | return ["lxc", "-s", self._get_container_name()] 73 | 74 | @property 75 | def name(self): 76 | return "lxc" 77 | 78 | def __repr__(self): 79 | return "{classname}(release={release}, arch={arch})".format( 80 | classname=self.__class__.__name__, 81 | release=self.release, 82 | arch=self.arch, 83 | ) 84 | -------------------------------------------------------------------------------- /upgrade_testing/selftests/test_provisionconfig.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | 20 | import unittest 21 | 22 | from upgrade_testing.provisioning import _provisionconfig as _p 23 | 24 | 25 | class ReplacePlaceholdersTestCases(unittest.TestCase): 26 | def test_returns_untouched_string_when_no_tokens_involved(self): 27 | noplaceholder_string = "This is a normal string, no placeholders" 28 | self.assertEqual( 29 | noplaceholder_string, 30 | _p._replace_placeholders(noplaceholder_string, {}), 31 | ) 32 | 33 | def test_replaces_simple_token(self): 34 | base_string = "$FOO" 35 | expected_string = "texthaschanged" 36 | _token_lookup = dict(FOO=lambda: expected_string) 37 | self.assertEqual( 38 | _p._replace_placeholders(base_string, _token_lookup), 39 | expected_string, 40 | ) 41 | 42 | def test_replaces_simple_token_while_leaving_other_text_alone(self): 43 | base_string = "nottoken $FOO nottoken" 44 | expected_string = "texthaschanged" 45 | _token_lookup = dict(FOO=lambda: expected_string) 46 | self.assertEqual( 47 | _p._replace_placeholders(base_string, _token_lookup), 48 | "nottoken {} nottoken".format(expected_string), 49 | ) 50 | 51 | def test_leaves_non_tokens_alone(self): 52 | """FOO isn't a valid token it's lacking a '$' and must be left alone.""" 53 | base_string = "FOO" 54 | _token_lookup = dict(FOO=lambda: "FAIL") 55 | self.assertEqual( 56 | _p._replace_placeholders(base_string, _token_lookup), base_string 57 | ) 58 | 59 | def test_replaces_multiple_tokens_within_a_string(self): 60 | base_string = "$FOO and $BAR" 61 | expected_string = "123 and abc" 62 | _token_lookup = dict(FOO=lambda: "123", BAR=lambda: "abc") 63 | self.assertEqual( 64 | _p._replace_placeholders(base_string, _token_lookup), 65 | expected_string, 66 | ) 67 | 68 | def test_does_not_replace_unknown_token(self): 69 | base_string = "$FOO and $BAR" 70 | _token_lookup = dict(FOO=lambda: "123") 71 | self.assertEqual( 72 | _p._replace_placeholders(base_string, _token_lookup), 73 | "123 and $BAR", 74 | ) 75 | 76 | def test_replaces_superstr_first(self): 77 | """If there are tokens with similar names the right tokens must be 78 | replaced. 79 | 80 | """ 81 | base_string = "$FOOBAR and $FOO and $BAR" 82 | _token_lookup = dict( 83 | FOO=lambda: "foo", BAR=lambda: "bar", FOOBAR=lambda: "baz" 84 | ) 85 | self.assertEqual( 86 | _p._replace_placeholders(base_string, _token_lookup), 87 | "baz and foo and bar", 88 | ) 89 | 90 | def test_confirms_full_token_word(self): 91 | """Similar tokens must not be confused. i.e. FOOA is different to FOOB.""" 92 | base_string = "$FOOA and $FOOB" 93 | _token_lookup = dict( 94 | FOO=lambda: "FAIL", 95 | FOOA=lambda: "A", 96 | FOOB=lambda: "B", 97 | ) 98 | self.assertEqual( 99 | _p._replace_placeholders(base_string, _token_lookup), "A and B" 100 | ) 101 | 102 | 103 | class RenderBuildArgsTestCase(unittest.TestCase): 104 | def test_raises_ValueError_if_not_passed_list_of_strings(self): 105 | self.assertRaises(ValueError, _p._render_build_args, [1], "") 106 | 107 | def test_raises_TypeError_if_not_passed_list(self): 108 | self.assertRaises(TypeError, _p._render_build_args, "", "") 109 | 110 | def test_returns_empty_list_when_empty_list_passed_in(self): 111 | self.assertEqual(_p._render_build_args([], ""), []) 112 | 113 | def test_returns_unmodified_list_string(self): 114 | build_args = ["no tokens here"] 115 | self.assertEqual(_p._render_build_args(build_args, ""), build_args) 116 | 117 | def test_returns_list_string_with_tokens_modified(self): 118 | build_args = ["$PROFILE_PATH here", "$PROFILE_PATH there"] 119 | self.assertEqual( 120 | _p._render_build_args(build_args, "/tmp"), 121 | ["/tmp here", "/tmp there"], 122 | ) 123 | 124 | def test_returns_a_list_of_equal_elements_of_that_passed_in(self): 125 | build_args = ["1", "2", "3"] 126 | self.assertEqual( 127 | len(_p._render_build_args(build_args, "")), len(build_args) 128 | ) 129 | 130 | 131 | class QemuProvisionSpecificationTestCases(unittest.TestCase): 132 | def test_stores_passed_specification_details(self): 133 | """QemuProvisionSpecification must store the passed details regarding 134 | the qemu image and run details. 135 | 136 | """ 137 | spec = dict( 138 | releases=["release 1", "release 2"], 139 | arch="test arch", 140 | image_name="image name", 141 | build_args=["$PROFILE_PATH"], 142 | ) 143 | spec_path = "/test/path/test.yaml" 144 | 145 | qemu_spec = _p.QemuProvisionSpecification(spec, spec_path) 146 | 147 | self.assertEqual(qemu_spec.releases, spec["releases"]) 148 | self.assertEqual(qemu_spec.arch, spec["arch"]) 149 | self.assertEqual(qemu_spec.image_name, spec["image_name"]) 150 | self.assertEqual(qemu_spec.build_args, ["/test/path"]) 151 | self.assertEqual(qemu_spec.initial_state, "release 1") 152 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/executors.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2017 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | 20 | import os 21 | import sys 22 | import time 23 | from abc import ABCMeta, abstractmethod 24 | 25 | import paramiko 26 | 27 | TIMEOUT_CMD = 60 28 | TIMEOUT_CONNECT = 120 29 | TIMEOUT_WAIT_FOR_DEVICE = 120 30 | 31 | 32 | class Result: 33 | """Result of command with status and output properties.""" 34 | 35 | def __init__(self): 36 | self.status = None 37 | self.output = "" 38 | 39 | 40 | class SSHClient: 41 | """This class manages the paramiko ssh client""" 42 | 43 | def __init__(self): 44 | """The ssh can be initialized either through a password or with a 45 | private key file and the passphrase 46 | :param hostname: The hostname to connect 47 | :param user: The remote user in the host 48 | :param keyfile: The private key used to connect to the remote host 49 | :param password: The password used to connect to the remote host 50 | :param timeout: An optional timeout (in seconds) for the TCP connect 51 | """ 52 | self.client = paramiko.SSHClient() 53 | self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 54 | 55 | def connect(self, hostname, user, password, port, timeout=60): 56 | """Connect to remote host.""" 57 | timeout = float(timeout) 58 | self.client.connect( 59 | hostname, 60 | username=user, 61 | password=password, 62 | port=port, 63 | timeout=timeout, 64 | banner_timeout=timeout, 65 | ) 66 | 67 | def close(self): 68 | """Close the connection""" 69 | self.client.close() 70 | 71 | def run(self, cmd, timeout=TIMEOUT_CMD, log_stdout=True): 72 | """Run a command in the remote host. 73 | :param cmd: Command to run. 74 | :param timeout: Period to wait before raising TimeoutError. 75 | :param log_stdout: Whether to log output to stdout. 76 | :return: Result object containing command output and status code. 77 | """ 78 | channel = self.client.get_transport().open_session() 79 | channel.set_combine_stderr(True) 80 | end = time.time() + timeout 81 | result = Result() 82 | channel.exec_command(cmd) 83 | while not channel.exit_status_ready() or ( 84 | channel.exit_status_ready() and channel.recv_ready() 85 | ): 86 | if channel.recv_ready(): 87 | self._process_output( 88 | result, log_stdout, channel.recv(1024).decode() 89 | ) 90 | elif time.time() > end: 91 | print( 92 | "Timeout waiting {} seconds for command to " 93 | "complete: {}".format(timeout, cmd) 94 | ) 95 | raise TimeoutError 96 | time.sleep(0.2) 97 | # Add a new line after command has completed to ensure output 98 | # is separated. 99 | self._process_output(result, log_stdout, "\n") 100 | result.status = channel.recv_exit_status() 101 | return result 102 | 103 | def _process_output(self, result, log_stdout, content): 104 | """Save output to result and print to stdout if required.""" 105 | result.output += content 106 | if log_stdout: 107 | sys.stdout.write(content) 108 | sys.stdout.flush() 109 | 110 | def put(self, local_path, remote_path): 111 | """Copy a file through sftp from the local_path to the remote_path""" 112 | if not os.path.isfile(local_path): 113 | raise RuntimeError("File to copy does not exist") 114 | 115 | with self.client.open_sftp() as sftp: 116 | sftp.put(local_path, remote_path) 117 | 118 | def get(self, remote_path, local_path): 119 | """Copy a file through sftp from the remote_path to the local_path""" 120 | with self.client.open_sftp() as sftp: 121 | sftp.get(remote_path, local_path) 122 | 123 | if not os.path.isfile(local_path): 124 | raise RuntimeError("File couldn't be copied") 125 | 126 | 127 | class Executor: 128 | __metaclass__ = ABCMeta 129 | """Base class for all target executors.""" 130 | 131 | @abstractmethod 132 | def connect(self, username, password, port, host=None, timeout=None): 133 | pass 134 | 135 | @abstractmethod 136 | def close(self): 137 | pass 138 | 139 | @abstractmethod 140 | def run(self, cmd, timeout=None, log_stdout=True): 141 | pass 142 | 143 | @abstractmethod 144 | def run_sudo(self, cmd, timeout=None, log_stdout=True): 145 | pass 146 | 147 | def reboot(self): 148 | result = self.run_sudo("shutdown -r now") 149 | if result.status > 0: 150 | raise PermissionError("Reboot failed, check password.") 151 | 152 | def shutdown(self): 153 | result = self.run_sudo("shutdown now") 154 | if result.status > 0: 155 | raise PermissionError("Shutdown failed, check password.") 156 | 157 | @abstractmethod 158 | def wait_for_device(self, timeout=None): 159 | pass 160 | 161 | @abstractmethod 162 | def put(self, localpath, remotepath): 163 | pass 164 | 165 | @abstractmethod 166 | def get(self, remotepath, localpath): 167 | pass 168 | 169 | def _get_sudo_command(self, cmd): 170 | command = "sudo {}".format(cmd) 171 | if self.password: 172 | command = "echo {} | sudo -S {}".format(self.password, cmd) 173 | return command 174 | 175 | 176 | class SSHExecutor(Executor): 177 | def __init__(self): 178 | self.ssh_client = SSHClient() 179 | 180 | def connect( 181 | self, 182 | username, 183 | password, 184 | port, 185 | host="localhost", 186 | timeout=TIMEOUT_CONNECT, 187 | ): 188 | self.password = password 189 | count = max(1, timeout) 190 | for attempt in range(count): 191 | try: 192 | self.ssh_client.connect( 193 | host, username, password, port, timeout 194 | ) 195 | except TypeError: 196 | # This can happen when target not yet running so just try again 197 | time.sleep(1) 198 | except paramiko.ssh_exception.AuthenticationException: 199 | raise 200 | else: 201 | return 202 | raise RuntimeError("Could not connect to target.") 203 | 204 | def close(self): 205 | self.ssh_client.close() 206 | 207 | def _run(self, cmd, timeout=TIMEOUT_CMD, log_stdout=True): 208 | return self.ssh_client.run(cmd, timeout, log_stdout) 209 | 210 | def run(self, cmd, timeout=TIMEOUT_CMD, log_stdout=True): 211 | return self._run(cmd, timeout, log_stdout) 212 | 213 | def run_sudo(self, cmd, timeout=TIMEOUT_CMD, log_stdout=True): 214 | return self._run(self._get_sudo_command(cmd), timeout, log_stdout) 215 | 216 | def put(self, localpath, remotepath): 217 | self.ssh_client.put(localpath, remotepath) 218 | 219 | def get(self, remotepath, localpath): 220 | self.ssh_client.get(remotepath, localpath) 221 | -------------------------------------------------------------------------------- /upgrade_testing/preparation/_hostprep.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import json 20 | import logging 21 | import os 22 | import shutil 23 | import tempfile 24 | from collections import namedtuple 25 | from contextlib import contextmanager 26 | from distutils.spawn import find_executable 27 | from textwrap import dedent 28 | 29 | from upgrade_testing.configspec import ( 30 | get_file_data_location, 31 | test_source_retriever, 32 | ) 33 | from upgrade_testing.preparation._testbed import get_testbed_storage_location 34 | from upgrade_testing.provisioning import run_command_with_logged_output 35 | 36 | DEFAULT_GIT_URL = "git://anonscm.debian.org/autopkgtest/autopkgtest.git" 37 | 38 | logger = logging.getLogger(__name__) 39 | 40 | 41 | # Definition for the tempfiles that are created for a run and cleaned up 42 | # afterwards. 43 | TestrunTempFiles = namedtuple( 44 | "TestrunTempFiles", 45 | [ 46 | "adt_base_path", 47 | "adt_cmd", 48 | "run_config_file", 49 | "testrun_tmp_dir", 50 | "unbuilt_dir", 51 | "scripts", 52 | ], 53 | ) 54 | 55 | 56 | @contextmanager 57 | def prepare_test_environment(testsuite): 58 | """Return a TestrunTempFiles instance that is cleaned up once out of scope. 59 | 60 | Creates a temp directory an populates it with the required data structure 61 | to copy across to the testbed. 62 | Namely: 63 | - Test run config details 64 | - 'Dummy' debian/autopkgtest details for this run. 65 | 66 | :param testsuite: TestSpecification instance. 67 | 68 | """ 69 | 70 | try: 71 | temp_dir = tempfile.mkdtemp() 72 | run_config_path = _write_run_config(testsuite, temp_dir) 73 | unbuilt_dir = _create_autopkg_details(temp_dir) 74 | logger.info("Unbuilt dir: {}".format(unbuilt_dir)) 75 | 76 | scripts_path = os.path.join(temp_dir, "scripts") 77 | _copy_script_files(testsuite.scripts_location, scripts_path) 78 | 79 | if hasattr(testsuite, "scripts_data"): 80 | data_path = os.path.join(temp_dir, "scripts_data.json") 81 | with open(data_path, "w") as f: 82 | json.dump(testsuite.scripts_data, f) 83 | 84 | adt_base_path, adt_cmd = _get_adt_path(temp_dir) 85 | 86 | yield TestrunTempFiles( 87 | adt_base_path=adt_base_path, 88 | adt_cmd=adt_cmd, 89 | run_config_file=run_config_path, 90 | # Should we create a dir so that it won't interfer? 91 | unbuilt_dir=temp_dir, 92 | testrun_tmp_dir=temp_dir, 93 | scripts=scripts_path, 94 | ) 95 | finally: 96 | _cleanup_dir(temp_dir) 97 | 98 | 99 | def _copy_script_files(script_location, script_destination): 100 | return test_source_retriever(script_location, script_destination) 101 | 102 | 103 | def _cleanup_dir(dir): 104 | shutil.rmtree(dir) 105 | 106 | 107 | def _write_run_config(testsuite, temp_dir): 108 | """Write a config file for this run of testing. 109 | 110 | Populates a config file with the details from the test config spec as well 111 | as the dynamic details produced each run (temp dir etc.). 112 | 113 | """ 114 | run_config_file = tempfile.mkstemp(dir=temp_dir)[1] 115 | with open(run_config_file, "w") as f: 116 | config_string = dedent( 117 | """\ 118 | # Auto Upgrade Test Configuration 119 | PRE_TEST_LOCATION="{testbed_location}/scripts" 120 | POST_TEST_LOCATION="{testbed_location}/scripts" 121 | """.format( 122 | testbed_location=get_testbed_storage_location() 123 | ) 124 | ) 125 | f.write(config_string) 126 | pre_tests = " ".join(testsuite.pre_upgrade_scripts.executables) 127 | post_tests = " ".join(testsuite.post_upgrade_tests.executables) 128 | f.write('PRE_TESTS_TO_RUN="{}"\n'.format(pre_tests)) 129 | f.write('POST_TESTS_TO_RUN="{}"\n'.format(post_tests)) 130 | # Need to store the expected pristine system and the post-upgrade 131 | # system 132 | # Note: This will only support one upgrade, for first -> final 133 | f.write( 134 | 'INITIAL_SYSTEM_STATE="{}"\n'.format( 135 | testsuite.provisioning.initial_state 136 | ) 137 | ) 138 | f.write( 139 | 'POST_SYSTEM_STATE="{}"\n'.format( 140 | testsuite.provisioning.final_state 141 | ) 142 | ) 143 | f.write( 144 | "RUNNING_BACKEND={}\n".format(testsuite.provisioning.backend_name) 145 | ) 146 | f.write( 147 | "DO_RELEASE_UPGRADE_PROMPT={}\n".format( 148 | testsuite.provisioning.do_release_upgrade_prompt 149 | ) 150 | ) 151 | return run_config_file 152 | 153 | 154 | def _create_autopkg_details(temp_dir): 155 | """Create a 'dummy' debian dir structure for autopkg testing. 156 | 157 | Given a temp dir build the required dir tree and populate it with the 158 | needed files. 159 | 160 | The test file that is executed is already populated and part of this 161 | project. 162 | 163 | """ 164 | dir_tree = os.path.join(temp_dir, "debian") 165 | test_dir_tree = os.path.join(dir_tree, "tests") 166 | os.makedirs(test_dir_tree) 167 | 168 | source_dir = get_file_data_location() 169 | 170 | def _copy_file(dest, name): 171 | """Copy a file from the source data dir to dest.""" 172 | src = os.path.join(source_dir, name) 173 | dst = os.path.join(dest, name) 174 | shutil.copyfile(src, dst) 175 | 176 | _copy_file(test_dir_tree, "control") 177 | _copy_file(test_dir_tree, "upgrade") 178 | _copy_file(dir_tree, "changelog") 179 | 180 | # Main control file can be empty 181 | dummy_control = os.path.join(dir_tree, "control") 182 | open(dummy_control, "a").close() 183 | 184 | return dir_tree 185 | 186 | 187 | def _get_adt_path(tmp_dir): 188 | # Check if we need to get a git version of autopkgtest 189 | # (If environment variables are set or a local version can't be found) 190 | git_url = os.environ.get("AUTOPKGTEST_GIT_REPO", None) 191 | git_hash = os.environ.get("AUTOPKGTEST_GIT_HASH", None) 192 | local_adt = _get_local_adt() 193 | if git_url or git_hash or local_adt is None: 194 | git_url = git_url or DEFAULT_GIT_URL 195 | logger.info("Fetching autopkgtest from git url: %s", git_url) 196 | git_trunk_path = os.path.join(tmp_dir, "local_autopkgtest") 197 | git_command = ["git", "clone", git_url, git_trunk_path] 198 | retval = run_command_with_logged_output(git_command) 199 | if retval != 0: 200 | raise ChildProcessError( 201 | "{} exited with status {}".format(git_command, retval) 202 | ) 203 | if git_hash: 204 | logger.info("Checking out specific git hash: %s", git_hash) 205 | git_hash_command = [ 206 | "git", 207 | "--git-dir", 208 | os.path.join(git_trunk_path, ".git"), 209 | "--work-tree", 210 | git_trunk_path, 211 | "checkout", 212 | git_hash, 213 | ] 214 | run_command_with_logged_output(git_hash_command) 215 | adt_path = os.path.join(git_trunk_path, "tools") 216 | adt_cmd = os.path.join(git_trunk_path, "run-from-checkout") 217 | else: 218 | logger.info("Using installed autopkgtest:") 219 | run_command_with_logged_output(["dpkg-query", "-W", "autopkgtest"]) 220 | adt_path, adt_cmd = local_adt 221 | adt_cmd = os.path.join(adt_path, adt_cmd) 222 | return (adt_path, adt_cmd) 223 | 224 | 225 | def _get_local_adt(): 226 | path = find_executable("autopkgtest") 227 | if path: 228 | return path.rsplit("/", 1) 229 | return None 230 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/_provisionconfig.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import logging 20 | import os 21 | import re 22 | 23 | from upgrade_testing.provisioning import backends 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | 28 | class ProvisionSpecification: 29 | def __init__(self): 30 | raise NotImplementedError() 31 | 32 | @property 33 | def system_states(self): 34 | # Note: Rename from releases 35 | raise NotImplementedError() 36 | 37 | @property 38 | def initial_state(self): 39 | """Return the string indicating the required initial system state.""" 40 | raise NotImplementedError() 41 | 42 | @property 43 | def final_state(self): 44 | """Return the string indicating the required final system state.""" 45 | raise NotImplementedError() 46 | 47 | @property 48 | def backend_name(self): 49 | """Return the name of the provision backend.""" 50 | return self.backend.name 51 | 52 | def backend_available(self): 53 | """Return True if the provisioning backend is available.""" 54 | return self.backend.available() 55 | 56 | def create(self, adt_base_path): 57 | """Provision the stored backend.""" 58 | return self.backend.create(adt_base_path) 59 | 60 | def close(self): 61 | return self.backend.close() if hasattr(self.backend, "close") else None 62 | 63 | def get_adt_run_args(self, **kwargs): 64 | """Return list with the adt args for this provisioning backend.""" 65 | raise NotImplementedError() 66 | 67 | def set_verbose(self, verbose): 68 | self.backend.set_verbose(verbose) 69 | 70 | @staticmethod 71 | def from_testspec(spec, spec_path): 72 | backend_name = spec["provisioning"]["backend"] 73 | spec_type = get_specification_type(backend_name) 74 | return spec_type(spec["provisioning"], spec_path) 75 | 76 | @staticmethod 77 | def from_provisionspec(spec, spec_path): 78 | # A provision spec is almost the same as a testdef provision spec 79 | # except it doesn't have the parent stanza. 80 | backend_name = spec["backend"] 81 | spec_type = get_specification_type(backend_name) 82 | return spec_type(spec, spec_path) 83 | 84 | 85 | def get_specification_type(spec_name): 86 | __spec_map = dict( 87 | lxc=LXCProvisionSpecification, 88 | qemu=QemuProvisionSpecification, 89 | ) 90 | try: 91 | return __spec_map[spec_name] 92 | except KeyError: 93 | logger.error("Unknown spec name: {}".format(spec_name)) 94 | raise 95 | 96 | 97 | class LXCProvisionSpecification(ProvisionSpecification): 98 | def __init__(self, provision_config, provision_path): 99 | # Defaults to ubuntu 100 | self.distribution = provision_config.get("distribution", "ubuntu") 101 | self.releases = provision_config["releases"] 102 | self.arch = provision_config["arch"] 103 | self.do_release_upgrade_prompt = provision_config.get( 104 | "do_release_upgrade_prompt", "" 105 | ) 106 | self._provisionconfig_path = provision_path 107 | 108 | self.backend = backends.LXCBackend( 109 | self.initial_state, self.distribution, self.arch 110 | ) 111 | 112 | @property 113 | def system_states(self): 114 | # Note: Rename from releases 115 | return self.releases 116 | 117 | @property 118 | def initial_state(self): 119 | """Return the string indicating the required initial system state.""" 120 | return self.releases[0] 121 | 122 | @property 123 | def final_state(self): 124 | """Return the string indicating the required final system state.""" 125 | return self.releases[-1] 126 | 127 | def get_adt_run_args(self, **kwargs): 128 | """Return list with the adt args for this provisioning backend.""" 129 | return self.backend.get_adt_run_args(**kwargs) 130 | 131 | def __repr__(self): 132 | return "{classname}(backend={backend}, distribution={dist}, releases={releases})".format( # NOQA 133 | classname=self.__class__.__name__, 134 | backend=self.backend, 135 | dist=self.distribution, 136 | releases=self.releases, 137 | ) 138 | 139 | 140 | class QemuProvisionSpecification(ProvisionSpecification): 141 | def __init__(self, provision_config, provision_path): 142 | self._provisionconfig_path = provision_path 143 | 144 | self.releases = provision_config["releases"] 145 | self.arch = provision_config.get("arch", "amd64") 146 | self.do_release_upgrade_prompt = provision_config.get( 147 | "do_release_upgrade_prompt", "" 148 | ) 149 | self.image_name = provision_config.get( 150 | "image_name", 151 | "autopkgtest-{}-{}-cloud.img".format( 152 | self.initial_state, self.arch 153 | ), 154 | ) 155 | provision_config_directory = os.path.dirname( 156 | os.path.abspath(provision_path) 157 | ) 158 | self.build_args = _render_build_args( 159 | provision_config.get("build_args", []), provision_config_directory 160 | ) 161 | logger.info("Using build args: {}".format(self.build_args)) 162 | 163 | self.packages = provision_config.get("packages") 164 | self.verbose = False 165 | 166 | self.backend = backends.QemuBackend( 167 | self.initial_state, 168 | self.arch, 169 | self.image_name, 170 | self.packages, 171 | self.build_args, 172 | ) 173 | 174 | @property 175 | def system_states(self): 176 | # Note: Rename from releases 177 | return self.releases 178 | 179 | @property 180 | def initial_state(self): 181 | """Return the string indicating the required initial system state.""" 182 | return self.releases[0] 183 | 184 | @property 185 | def final_state(self): 186 | """Return the string indicating the required final system state.""" 187 | return self.releases[-1] 188 | 189 | def get_adt_run_args(self, **kwargs): 190 | """Return list with the adt args for this provisioning backend.""" 191 | return self.backend.get_adt_run_args(**kwargs) 192 | 193 | def __repr__(self): 194 | return "{classname}(backend={backend}, distribution={dist}, releases={releases})".format( # NOQA 195 | classname=self.__class__.__name__, 196 | backend=self.backend, 197 | dist=self.distribution, 198 | releases=self.releases, 199 | ) 200 | 201 | 202 | def _render_build_args(build_args, profile_path): 203 | """Modify build args if required, returns a build args list.append 204 | 205 | For instance replaces any tokens in the string with the relevant parts. 206 | 207 | :param build_args: A list of strings. 208 | :param profile_path: String containing the path of the profile file in use. 209 | :returns: A list containing the build arg strings. 210 | 211 | """ 212 | _token_lookup = dict(PROFILE_PATH=lambda: profile_path) 213 | 214 | if not isinstance(build_args, list): 215 | raise TypeError("build_args must be a list") 216 | if not all(isinstance(s, str) for s in build_args): 217 | raise ValueError("build_args must contain strings.") 218 | 219 | new_args = [] 220 | for arg in build_args: 221 | new_args.append(_replace_placeholders(arg, _token_lookup)) 222 | return new_args 223 | 224 | 225 | def _replace_placeholders(original_string, token_lookup): 226 | token_strings = list(token_lookup.keys()) 227 | # Ensure we replace the longest tokens first so we don't confuse substrings 228 | # (i.e. do $FOOBAR before $FOO otherwise we'll get $BAR) 229 | token_strings.sort(reverse=True) 230 | 231 | for token in token_strings: 232 | result = re.sub( 233 | r"\${}".format(token), token_lookup[token](), original_string 234 | ) 235 | original_string = result 236 | 237 | return original_string 238 | -------------------------------------------------------------------------------- /upgrade_testing/configspec/_config.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import logging 20 | import os 21 | from collections import namedtuple 22 | 23 | import yaml 24 | 25 | from upgrade_testing.provisioning import ProvisionSpecification 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | ScriptStore = namedtuple("ScriptStore", ["executables", "location"]) 31 | 32 | 33 | class TestSpecification: 34 | """Wraps details about the specification. 35 | 36 | :param provision_settings: ProvisionSpecification object. 37 | 38 | i.e. the provisioning parts etc. 39 | """ 40 | 41 | def __init__(self, details, provision_spec): 42 | self.provisioning = provision_spec 43 | 44 | try: 45 | self._reader(details) 46 | except KeyError as e: 47 | logger.error( 48 | "Missing required configuration detail: {}".format(str(e)) 49 | ) 50 | 51 | def _reader(self, details): 52 | self.name = details["testname"] 53 | 54 | self.scripts_location = _get_script_location_path( 55 | details, self.provisioning._provisionconfig_path 56 | ) 57 | 58 | self.pre_upgrade_scripts = ScriptStore( 59 | *_generate_script_list( 60 | details["pre_upgrade_scripts"], self.scripts_location 61 | ) 62 | ) 63 | self.post_upgrade_tests = ScriptStore( 64 | *_generate_script_list( 65 | details["post_upgrade_tests"], self.scripts_location 66 | ) 67 | ) 68 | 69 | self.scripts_data = details.get("scripts_data", None) 70 | 71 | backend_args = details.get("backend_args", []) 72 | self.backend_args = [ 73 | arg.format(scripts_location=self.scripts_location) 74 | for arg in backend_args 75 | ] 76 | 77 | @property 78 | def test_source(self): 79 | if self._test_source_dir is None: 80 | return "./" 81 | else: 82 | return self._test_source_dir 83 | 84 | def __repr__(self): 85 | return "{classname}(name={name}, provisioning={prov})".format( 86 | classname=self.__class__.__name__, 87 | name=self.name, 88 | prov=self.provisioning, 89 | ) 90 | 91 | 92 | def _get_script_location_path(provision_details, provisionfile_path): 93 | """Return the full path for a script location.""" 94 | # If script_location starts with ./ or ../ then we need to get the abs path 95 | # of the provision file and append it. 96 | location = provision_details.get("scripts_location", None) 97 | if location is None: 98 | return location 99 | if location.startswith("file://."): 100 | provisionfile_dir = os.path.dirname(provisionfile_path) 101 | full_path = os.path.abspath( 102 | os.path.join(provisionfile_dir, location.replace("file://", "")) 103 | ) 104 | return "file://{}".format(full_path) 105 | # Seems location is a full abs path already. 106 | return location 107 | 108 | 109 | def _generate_script_list(scripts_or_path, script_source_path=None): 110 | """Return a tuple containing a list of script names and a location string. 111 | 112 | Source can either be a path that contains executable files or a list of 113 | names of an executable 114 | 115 | """ 116 | 117 | if isinstance(scripts_or_path, list): 118 | return _get_list_of_scripts_locations( 119 | scripts_or_path, script_source_path 120 | ) 121 | 122 | abs_path = _get_abs_script_location(scripts_or_path, script_source_path) 123 | 124 | if os.path.isdir(abs_path): 125 | return _get_list_of_scripts_in_directory(abs_path) 126 | 127 | raise ValueError( 128 | "No scripts found. {} is neither a path or list of scripts" 129 | ) 130 | 131 | 132 | def _get_list_of_scripts_locations(scripts, script_source_path): 133 | """Return a tuple containing lists of scripts and their location path. 134 | 135 | :raises ValueError: If `script_source_path` is None. 136 | :raises ValueError: If a declared script is not found on the filesystem. 137 | 138 | :returns: tuple containing a list of script names and a string containing 139 | the location path. 140 | 141 | """ 142 | if script_source_path is None: 143 | raise ValueError("No script location supplied for scripts") 144 | sane_script_location = script_source_path.replace("file://", "") 145 | # scripts is already a list of scripts. 146 | for f in scripts: 147 | if not os.path.isfile(os.path.join(sane_script_location, f)): 148 | raise ValueError( 149 | 'Supplied script "{}" was not found at: {}'.format( 150 | f, sane_script_location 151 | ) 152 | ) 153 | return (scripts, script_source_path) 154 | 155 | 156 | def _get_abs_script_location(script_path, script_source_path): 157 | """Return absolute path for script location.""" 158 | if script_source_path is not None: 159 | return os.path.abspath( 160 | os.path.join( 161 | script_source_path.replace("file://", ""), script_path 162 | ) 163 | ) 164 | else: 165 | return script_path 166 | 167 | 168 | def _get_list_of_scripts_in_directory(abs_path): 169 | """Return tuple containing list of scripts and location path. 170 | 171 | :raises ValueError: If no executable scripts can be found at the supplied 172 | location. 173 | 174 | """ 175 | script_file_list = _get_executable_files(abs_path) 176 | if not script_file_list: 177 | raise ValueError( 178 | "No executatble scripts found at location: {}".format(abs_path) 179 | ) 180 | # Update the script_location path to suit. 181 | return (script_file_list, "file://{}".format(abs_path)) 182 | 183 | 184 | def _get_executable_files(abs_path): 185 | def is_executable(path): 186 | return os.path.isfile(path) and os.access(path, os.X_OK) 187 | 188 | return [ 189 | f 190 | for f in os.listdir(abs_path) 191 | if is_executable(os.path.join(abs_path, f)) 192 | ] 193 | 194 | 195 | def definition_reader(testdef_filepath, provisiondef_filepath=None): 196 | """Produce a TestSpecification from the provided testdef file. 197 | 198 | Given a provisiondef file path too incorporates those details into the 199 | specification otherwise collects these details from the testspec. 200 | Will raise an exception if this is incorrect. 201 | 202 | :raises KeyError: if there is any invalid or unknown config details. 203 | 204 | """ 205 | testdef = _load_configdef(testdef_filepath) 206 | 207 | specs = [] 208 | for test in testdef: 209 | if provisiondef_filepath is None: 210 | provision_details = ProvisionSpecification.from_testspec( 211 | test, testdef_filepath 212 | ) 213 | else: 214 | # Perhaps we want to be able to pass args to the commandline 215 | # instead of writing a file? We would always fudge that and 216 | # write to a file-like object and use that instead. 217 | provision_details = ProvisionSpecification.from_provisionspec( 218 | _load_configdef(provisiondef_filepath), provisiondef_filepath 219 | ) 220 | 221 | specs.append(TestSpecification(test, provision_details)) 222 | return specs 223 | 224 | 225 | def _load_configdef(testdef_filepath): 226 | # Need a better way to confirm this. 227 | if testdef_filepath.endswith(".yaml"): 228 | return _read_yaml_config(testdef_filepath) 229 | else: 230 | raise ValueError( 231 | "Unknown configuration file format: {}".format(testdef_filepath) 232 | ) 233 | 234 | 235 | def _read_yaml_config(filepath): 236 | try: 237 | with open(filepath, "r") as f: 238 | return yaml.safe_load(f) 239 | except FileNotFoundError as e: 240 | err_msg = "Unable to open config file: {}".format(filepath) 241 | logger.error(err_msg) 242 | e.args += (err_msg,) 243 | raise 244 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/backends/_ssh.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import errno 20 | import logging 21 | import os 22 | import socket 23 | import subprocess 24 | import time 25 | 26 | import pexpect 27 | from retrying import retry 28 | 29 | from upgrade_testing.provisioning.backends._base import ProviderBackend 30 | from upgrade_testing.provisioning.executors import SSHExecutor 31 | 32 | CACHE_DIR = "/var/cache/auto-upgrade-testing" 33 | 34 | logger = logging.getLogger(__name__) 35 | TIMEOUT_CMD = 60 36 | TIMEOUT_CONNECT = 120 37 | TIMEOUT_WAIT_FOR_DEVICE = 120 38 | 39 | 40 | class SshBackend(ProviderBackend): 41 | 42 | # We can change the Backends to require just what they need. In this case 43 | # it would be distribution, release name (, arch) 44 | def __init__( 45 | self, 46 | release, 47 | arch, 48 | image_name, 49 | build_args=[], 50 | username=None, 51 | password=None, 52 | device_ip=None, 53 | ): 54 | """Provide backend capabilities as requested in the provision spec. 55 | 56 | :param provision_spec: ProvisionSpecification object containing backend 57 | details. 58 | 59 | """ 60 | self.release = release 61 | self.arch = arch 62 | self.image_name = image_name 63 | self.build_args = build_args 64 | self.executor = SSHExecutor() 65 | self.username = username or "ubuntu" 66 | self.password = password or "ubuntu" 67 | self.connected = False 68 | self.key_file = None 69 | self.device_ip = device_ip or "localhost" 70 | self.port = -1 71 | 72 | def available(self): 73 | """Return true if a qemu exists that matches the provided args.""" 74 | image_name = self.image_name 75 | logger.info("Checking for {}".format(image_name)) 76 | return image_name in os.listdir(CACHE_DIR) 77 | 78 | def create(self, adt_base_path): 79 | raise NotImplementedError("Cannot create ssh backend directly") 80 | 81 | def get_adt_run_args(self, **kwargs): 82 | return [ 83 | "ssh", 84 | "--port", 85 | str(self.port), 86 | "--login", 87 | self.username, 88 | "--password", 89 | self.password, 90 | "--identity", 91 | self.key_file, 92 | "--hostname", 93 | self.device_ip, 94 | "--reboot", 95 | ] 96 | 97 | @property 98 | def name(self): 99 | return "ssh" 100 | 101 | def __repr__(self): 102 | return "{classname}(release={release})".format( 103 | classname=self.__class__.__name__, release=self.release 104 | ) 105 | 106 | port_from = 22220 107 | port_to = 23000 108 | 109 | def connect(self, timeout=TIMEOUT_CONNECT): 110 | if not self.connected: 111 | self.enable_ssh() 112 | self.executor.connect( 113 | self.username, 114 | self.password, 115 | self.port, 116 | self.device_ip, 117 | timeout, 118 | ) 119 | self.connected = True 120 | 121 | def close(self): 122 | if self.connected: 123 | self.executor.close() 124 | self.connected = False 125 | 126 | def reboot(self): 127 | self.executor.reboot() 128 | 129 | def shutdown(self): 130 | self.executor.shutdown() 131 | 132 | def put(self, src, dst): 133 | self.executor.put(src, dst) 134 | 135 | def run(self, command, timeout=TIMEOUT_CMD, log_stdout=True): 136 | return self.executor.run(command, timeout, log_stdout) 137 | 138 | def run_sudo(self, command, timeout=TIMEOUT_CMD, log_stdout=True): 139 | return self.executor.run_sudo(command, timeout, log_stdout) 140 | 141 | def find_free_port(self): 142 | for port in range(self.port_from, self.port_to): 143 | try: 144 | s = socket.create_connection(("127.0.0.1", port)) 145 | except socket.error as e: 146 | if e.errno == errno.ECONNREFUSED: 147 | # This means port is not currently used, so use this one 148 | self.port = port 149 | return 150 | else: 151 | pass 152 | else: 153 | # port is already taken 154 | s.close() 155 | raise RuntimeError("Could not find free port for SSH connection.") 156 | 157 | def enable_ssh(self): 158 | """Enable ssh using public key.""" 159 | self._wait_for_device() 160 | self._get_ssh_id_path() 161 | if not self._try_public_key_login(): 162 | self._update_device_host_key() 163 | self._copy_ssh_id_to_device() 164 | self._verify_ssh_connect() 165 | 166 | def _wait_for_device(self, timeout=TIMEOUT_CONNECT): 167 | end = time.time() + timeout 168 | while time.time() < end: 169 | try: 170 | s = socket.create_connection( 171 | (self.device_ip, str(self.port)), timeout 172 | ) 173 | except ConnectionRefusedError: 174 | time.sleep(1) 175 | else: 176 | s.close() 177 | return 178 | raise TimeoutError( 179 | "Could not connect to {} " 180 | "port {}.".format(self.device_ip, self.port) 181 | ) 182 | 183 | def _update_device_host_key(self): 184 | hosts_path = os.path.expanduser("~/.ssh/known_hosts") 185 | subprocess.call( 186 | [ 187 | "ssh-keygen", 188 | "-f", 189 | hosts_path, 190 | "-R", 191 | "[{}]:{}".format(self.device_ip, self.port), 192 | ] 193 | ) 194 | 195 | @retry( 196 | stop_max_attempt_number=20, 197 | wait_fixed=2000, 198 | retry_on_exception=lambda exception: isinstance( 199 | exception, RuntimeError 200 | ), 201 | ) 202 | def _copy_ssh_id_to_device(self): 203 | pub_path = "{}.pub".format(self.key_file) 204 | home_ssh = "/home/{u}/.ssh".format(u=self.username) 205 | authorized_keys = os.path.join(home_ssh, "authorized_keys") 206 | self._run(["mkdir", "-p", home_ssh]) 207 | self._put(pub_path, authorized_keys) 208 | self._run(["chown", "{u}:{u}".format(u=self.username), "-R", home_ssh]) 209 | self._run(["chmod", "700", home_ssh]) 210 | self._run(["chmod", "600", authorized_keys]) 211 | 212 | def _get_ssh_id_path(self): 213 | match = False 214 | for id in ["~/.ssh/id_rsa", "~/.ssh/id_autopkgtest"]: 215 | path = os.path.expanduser(id) 216 | if os.path.exists(path): 217 | match = True 218 | break 219 | if not match: 220 | subprocess.check_call( 221 | ["ssh-keygen", "-q", "-t", "rsa", "-f", path, "-N", ""] 222 | ) 223 | self.key_file = path 224 | 225 | def _try_public_key_login(self): 226 | """Try and log in using public key. If this succeeds then there is no 227 | need to do any further ssh setup. 228 | :return: True if login was successful, False otherwise 229 | """ 230 | cmd = " ".join( 231 | [ 232 | "ssh", 233 | "-p", 234 | str(self.port), 235 | "-o", 236 | "UserKnownHostsFile=/dev/null", 237 | "-o", 238 | "StrictHostKeyChecking=no", 239 | "-i", 240 | self.key_file, 241 | "-l", 242 | self.username, 243 | self.device_ip, 244 | ] 245 | ) 246 | child = pexpect.spawn(cmd) 247 | try: 248 | index = child.expect( 249 | [r"\$", "password", "denied"], timeout=TIMEOUT_CONNECT 250 | ) 251 | except (pexpect.exceptions.TIMEOUT, pexpect.exceptions.EOF): 252 | index = -1 253 | finally: 254 | child.close() 255 | return index == 0 256 | 257 | def _verify_ssh_connect(self): 258 | """Verify that an ssh connection can be established without using 259 | password. 260 | """ 261 | for count in range(20): 262 | if self._try_public_key_login(): 263 | return 264 | else: 265 | time.sleep(1) 266 | raise RuntimeError("Could not create ssh connection") 267 | 268 | def _run(self, commands, timeout=TIMEOUT_CMD): 269 | """Run a command setting up an ssh connection using password.""" 270 | ssh_cmd = [ 271 | "ssh", 272 | "-o", 273 | "StrictHostKeyChecking=no", 274 | "-p", 275 | str(self.port), 276 | "{}@{}".format(self.username, self.device_ip), 277 | ] 278 | self._run_with_password(ssh_cmd + commands, self.password, timeout) 279 | 280 | def _put(self, src, dst, timeout=TIMEOUT_CMD): 281 | """Put file onto device setting up an ssh connection using password.""" 282 | scp_cmd = [ 283 | "scp", 284 | "-o", 285 | "StrictHostKeyChecking=no", 286 | "-P", 287 | str(self.port), 288 | src, 289 | "{}@{}:{}".format(self.username, self.device_ip, dst), 290 | ] 291 | self._run_with_password(scp_cmd, self.password, timeout) 292 | 293 | def _run_with_password(self, commands, password, timeout=TIMEOUT_CMD): 294 | """Run command expecting a password prompt to be displayed.""" 295 | command = " ".join(commands) 296 | child = pexpect.spawn(command) 297 | try: 298 | child.expect("password", timeout=timeout) 299 | except pexpect.exceptions.EOF: 300 | # No password prompt is displayed, so just continue 301 | pass 302 | else: 303 | child.sendline(password) 304 | if child.expect([pexpect.EOF, "denied"], timeout=timeout): 305 | raise PermissionError( 306 | 'Check password is correct: "{}"'.format(password) 307 | ) 308 | finally: 309 | child.close() 310 | if child.exitstatus: 311 | raise RuntimeError("Error running {}".format(command)) 312 | -------------------------------------------------------------------------------- /upgrade_testing/command_line.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Ubuntu Upgrade Testing 4 | # Copyright (C) 2014, 2015 Canonical 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | 20 | import datetime 21 | import logging 22 | import os 23 | import subprocess 24 | import sys 25 | import tempfile 26 | from argparse import ArgumentParser 27 | 28 | import junitparser 29 | import yaml 30 | 31 | from upgrade_testing.configspec import definition_reader 32 | from upgrade_testing.preparation import ( 33 | get_testbed_storage_location, 34 | prepare_test_environment, 35 | ) 36 | 37 | logger = logging.getLogger(__name__) 38 | 39 | 40 | def setup_logging(): 41 | """Ensure logging is doing something sensible.""" 42 | root = logging.getLogger() 43 | root.setLevel(logging.INFO) 44 | 45 | ch = logging.StreamHandler(sys.stdout) 46 | formatter = logging.Formatter( 47 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 48 | ) 49 | ch.setFormatter(formatter) 50 | root.addHandler(ch) 51 | 52 | 53 | def parse_args(): 54 | """ 55 | do_setup (better name) if the backend isn't setup do it. 56 | """ 57 | parser = ArgumentParser("Run system tests for Upgrade Tests.") 58 | parser.add_argument( 59 | "--config", "-c", help="The config file to use for this run." 60 | ) 61 | parser.add_argument( 62 | "--provision", 63 | default=False, 64 | action="store_true", 65 | help="Provision the requested backend", 66 | ) 67 | parser.add_argument( 68 | "--verbose-provision", 69 | "-v", 70 | action="store_true", 71 | help="Provision with with build output enabled.", 72 | ) 73 | parser.add_argument( 74 | "--force-provision", 75 | "-f", 76 | action="store_true", 77 | help="Provision a new image regardless of cache.", 78 | ) 79 | parser.add_argument( 80 | "--results-dir", 81 | help="Directory to store results generated during the run.", 82 | ) 83 | parser.add_argument( 84 | "--adt-args", 85 | "-a", 86 | default="", 87 | help="Arguments to pass through to the autopkgtest runner.", 88 | ) 89 | parser.add_argument( 90 | "--keep-overlay", 91 | "-k", 92 | default=False, 93 | action="store_true", 94 | help="Whether to keep the resulting overlay image", 95 | ) 96 | return parser.parse_args() 97 | 98 | 99 | def get_output_dir(args): 100 | # This will be updated to take in the directory in which to create it in 101 | # and will be renamed create_... as all it will do is create the ts dir. 102 | """Return directory path that the results should be put into. 103 | 104 | If no directory is provided in the commandline args then create a temp 105 | dir. It is the responsibility of the script runner to clean up this temp 106 | dir. 107 | 108 | Within this base dir a timestamped directory will be created in which the 109 | output will reside. 110 | 111 | """ 112 | if args.results_dir is not None: 113 | base_dir = args.results_dir 114 | else: 115 | base_dir = tempfile.mkdtemp(prefix="upgrade-tests") 116 | logger.info("Creating folder for results.") 117 | 118 | ts_dir = datetime.datetime.now().strftime("%Y%m%d.%H%M%S") 119 | full_path = os.path.join(os.path.abspath(base_dir), ts_dir) 120 | 121 | logger.info("Creating results dir: {}".format(full_path)) 122 | os.makedirs(full_path, exist_ok=True) 123 | 124 | return full_path 125 | 126 | 127 | def display_results(output_dir, exit_status): 128 | artifacts_directory = os.path.join(output_dir, "artifacts", "upgrade_run") 129 | logger.info("Results can be found here: {}".format(artifacts_directory)) 130 | 131 | results_yaml = os.path.join(artifacts_directory, "runner_results.yaml") 132 | with open(results_yaml, "r") as f: 133 | results = yaml.safe_load(f) 134 | 135 | # this can be html/xml/whatver 136 | test_suite = junitparser.TestSuite("Auto Upgrade Testing") 137 | output = [] 138 | output.append("Pre script results:") 139 | for test, result in results.get("pre_script_output", {}).items(): 140 | output.append("\t{test}: {result}".format(test=test, result=result)) 141 | test_case = junitparser.TestCase(test) 142 | if result == "FAIL": 143 | test_case.result = [junitparser.Failure("Test Failed")] 144 | test_suite.add_testcase(test_case) 145 | 146 | output.append("Upgrade result: ") 147 | autopkgtest_upgrade = junitparser.TestCase("upgrade") 148 | if exit_status.returncode == 0: 149 | output.append("\tPASS") 150 | else: 151 | output.append(f"\tFAIL: {exit_status}") 152 | autopkgtest_upgrade.result = [junitparser.Failure(f"{exit_status}")] 153 | test_suite.add_testcase(autopkgtest_upgrade) 154 | 155 | output.append("Post upgrade test results:") 156 | for test, result in results.get("post_test_output", {}).items(): 157 | output.append("\t{test}: {result}".format(test=test, result=result)) 158 | test_case = junitparser.TestCase(test) 159 | if result == "FAIL": 160 | test_case.result = [junitparser.Failure("Test Failed")] 161 | test_suite.add_testcase(test_case) 162 | 163 | xml = junitparser.JUnitXml() 164 | xml.add_testsuite(test_suite) 165 | xml.write(os.path.join(artifacts_directory, "junit.xml")) 166 | print("\n".join(output)) 167 | 168 | 169 | def execute_adt_run( 170 | testsuite, testrun_files, output_dir, adt_args="", keep_overlay=False 171 | ): 172 | """Prepare the autopkgtest to execute. 173 | 174 | Copy all the files into the expected place etc. 175 | 176 | :param testsuite: Dict containing testsuite details 177 | :param test_file_name: filepath for . . . 178 | """ 179 | # we can change 'test_source_retriever' so that it uses the testurn_files 180 | # and doesn't need to worry about cleanup. 181 | adt_run_command = get_adt_run_command( 182 | testsuite.provisioning, 183 | testrun_files, 184 | output_dir, 185 | testsuite.backend_args, 186 | adt_args, 187 | keep_overlay, 188 | ) 189 | return subprocess.run(adt_run_command) 190 | 191 | 192 | def get_adt_run_command( 193 | provisioning, 194 | testrun_files, 195 | results_dir, 196 | backend_args=[], 197 | adt_args="", 198 | keep_overlay=False, 199 | ): 200 | """Construct the adt command to run. 201 | 202 | :param provisioning: upgrade_testing.provisioning.ProvisionSpecification 203 | object to retrieve adt details from. 204 | :param testrun_files: upgrade_testing._hostprep.TestrunTempFiles object 205 | providing temp/setup directory details 206 | :param results_dir: The directory path in which to place any artifacts and 207 | results from the run. 208 | 209 | """ 210 | # Default autopkgtest hardcoded adt command 211 | adt_cmd = [ 212 | testrun_files.adt_cmd, 213 | "-B", 214 | "-U", 215 | "-d", 216 | "--user=root", 217 | testrun_files.unbuilt_dir, 218 | "--output-dir={}".format(results_dir), 219 | ] + adt_args.split() 220 | 221 | # Copy across the test scripts. 222 | scripts_dest_dir = "{testbed_location}/scripts/".format( 223 | testbed_location=get_testbed_storage_location() 224 | ) 225 | copy_cmd = "--copy={src}:{dest}".format( 226 | src=testrun_files.scripts, dest=scripts_dest_dir 227 | ) 228 | adt_cmd.append(copy_cmd) 229 | 230 | # Need to get some env vars across to the testbed. Namely tests to run and 231 | # test locations. 232 | adt_cmd.append( 233 | "--copy={config}:{testbed_location}/auto_upgrade_test_settings".format( 234 | config=testrun_files.run_config_file, 235 | testbed_location=get_testbed_storage_location(), 236 | ) 237 | ) 238 | 239 | backend_args = ( 240 | provisioning.get_adt_run_args( 241 | tmp_dir=testrun_files.testrun_tmp_dir, keep_overlay=keep_overlay 242 | ) 243 | + backend_args 244 | ) 245 | 246 | return adt_cmd + ["--"] + backend_args 247 | 248 | 249 | def main(): 250 | setup_logging() 251 | args = parse_args() 252 | 253 | try: 254 | test_def_details = definition_reader(args.config) 255 | except KeyError as e: 256 | logger.error( 257 | "Unable to parse configuration file ({}): key {} not found".format( 258 | args.config, e 259 | ) 260 | ) 261 | sys.exit(1) 262 | except ValueError as e: 263 | logger.error( 264 | "Unable to parse configuration file details from config {}.\n" 265 | "ERROR: {}".format(args.config, e) 266 | ) 267 | sys.exit(1) 268 | 269 | # For each test definition ensure that the required backend is available, 270 | # if not either error or create it (depending on args.) 271 | for testsuite in test_def_details: 272 | # TODO: This could be improved to look something like: 273 | # testuite.provisioning.prepare(provision=create) 274 | # Note this could raise an exception. 275 | 276 | with prepare_test_environment(testsuite) as created_files: 277 | if ( 278 | args.force_provision 279 | or not testsuite.provisioning.backend_available() 280 | ): 281 | if args.provision: 282 | logger.debug("Provising backend.") 283 | testsuite.provisioning.set_verbose(args.verbose_provision) 284 | testsuite.provisioning.create(created_files.adt_base_path) 285 | else: 286 | logger.error( 287 | "No available backend for test: {}".format( 288 | testsuite.name 289 | ) 290 | ) 291 | continue 292 | else: 293 | logger.info("Backend is available.") 294 | 295 | # Setup output dir 296 | output_dir = get_output_dir(args) 297 | 298 | exit_status = execute_adt_run( 299 | testsuite, 300 | created_files, 301 | output_dir, 302 | args.adt_args, 303 | args.keep_overlay, 304 | ) 305 | 306 | testsuite.provisioning.close() 307 | 308 | display_results(output_dir, exit_status) 309 | 310 | sys.exit(exit_status.returncode) 311 | 312 | 313 | if __name__ == "__main__": 314 | main() 315 | -------------------------------------------------------------------------------- /upgrade_testing/provisioning/backends/_qemu.py: -------------------------------------------------------------------------------- 1 | # 2 | # Ubuntu Upgrade Testing 3 | # Copyright (C) 2015 Canonical 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | 19 | import logging 20 | import os 21 | import shlex 22 | import shutil 23 | import signal 24 | import subprocess 25 | import tempfile 26 | import threading 27 | 28 | from paramiko.ssh_exception import SSHException 29 | 30 | from upgrade_testing.provisioning._util import run_command_with_logged_output 31 | from upgrade_testing.provisioning.backends._ssh import SshBackend 32 | 33 | CACHE_DIR = "/var/cache/auto-upgrade-testing" 34 | OVERLAY_DIR = os.path.join(CACHE_DIR, "overlay") 35 | QEMU_LAUNCH_OPTS = ( 36 | "{qemu} -m {ram} -smp {cpu} -pidfile {workdir}/qemu.pid -rtc base=localtime " 37 | "-cpu core2duo -enable-kvm " 38 | ) 39 | QEMU_SYSTEM_AMD64 = "qemu-system-x86_64" 40 | QEMU_SYSTEM_I386 = "qemu-system-i386" 41 | ARCH_AMD64 = "amd64" 42 | ARCH_I386 = "i386" 43 | QEMU_DISPLAY_OPTS = "-display sdl " 44 | QEMU_DISPLAY_VGA_OPTS = "-vga qxl " 45 | QEMU_SOUND_OPTS = "-soundhw all " 46 | QEMU_DISPLAY_HEADLESS = "-display none " 47 | QEMU_NET_OPTS = "-net nic,model=virtio -net user" 48 | QEMU_PORT_OPTS = ",hostfwd=tcp::{port}-:22 " 49 | QEMU_DISK_IMAGE_OPTS = "-drive file={disk_img},if=virtio " 50 | QEMU_DISK_IMAGE_OVERLAY_OPTS = ( 51 | "-drive file={overlay_img},cache=unsafe,if=virtio,index=0 " 52 | ) 53 | DEFAULT_RAM = "3072" 54 | DEFAULT_CPU = "2" 55 | TIMEOUT_REBOOT = "300" 56 | HEADLESS = True 57 | 58 | logger = logging.getLogger(__name__) 59 | 60 | 61 | class QemuBackend(SshBackend): 62 | 63 | # We can change the Backends to require just what they need. In this case 64 | # it would be distribution, release name (, arch) 65 | def __init__(self, release, arch, image_name, packages, build_args=[]): 66 | """Provide backend capabilities as requested in the provision spec. 67 | 68 | :param provision_spec: ProvisionSpecification object containing backend 69 | details. 70 | 71 | """ 72 | super().__init__(release, arch, image_name, build_args) 73 | self.release = release 74 | self.arch = arch 75 | self.image_name = image_name 76 | self.build_args = build_args 77 | self.packages = packages 78 | self.working_dir = tempfile.mkdtemp() 79 | self.qemu_runner = None 80 | self.find_free_port() 81 | 82 | def available(self): 83 | """Return true if a qemu exists that matches the provided args.""" 84 | image_name = self.image_name 85 | logger.info("Checking for {}".format(image_name)) 86 | return image_name in os.listdir(CACHE_DIR) 87 | 88 | def create(self, adt_base_path): 89 | """Create a qemu image.""" 90 | 91 | logger.info("Creating qemu image for run.") 92 | cmd = "{builder_cmd} -a {arch} -r {release} -o {output} --userdata {userdata} {verbose} {args}".format( 93 | builder_cmd=os.path.join( 94 | adt_base_path, "autopkgtest-buildvm-ubuntu-cloud" 95 | ), 96 | arch=self.arch, 97 | release=self.release, 98 | output=CACHE_DIR, 99 | userdata=self.create_custom_cloud_init(), 100 | verbose="-v" if self.verbose else "", 101 | args=" ".join(self.build_args), 102 | ) 103 | 104 | run_command_with_logged_output(cmd, shell=True) 105 | 106 | initial_image_name = "autopkgtest-{}-{}.img".format( 107 | self.release, self.arch 108 | ) 109 | initial_image_path = os.path.join(CACHE_DIR, initial_image_name) 110 | final_image_path = os.path.join(CACHE_DIR, self.image_name) 111 | os.rename(initial_image_path, final_image_path) 112 | logger.info("Image created.") 113 | 114 | def close(self): 115 | if self.qemu_runner: 116 | try: 117 | self.shutdown() 118 | except PermissionError: 119 | print( 120 | "Shutdown sudo command failed. " 121 | 'Check password: "{}".'.format(self.password) 122 | ) 123 | self.stop_qemu() 124 | except SSHException: 125 | self.stop_qemu() 126 | finally: 127 | self.qemu_runner.join(timeout=5) 128 | shutil.rmtree(self.working_dir) 129 | self.working_dir = None 130 | self.qemu_runner = None 131 | super().close() 132 | 133 | def reboot(self): 134 | self.close() 135 | self.connect() 136 | 137 | def stop_qemu(self): 138 | pid_file = os.path.join(self.working_dir, "qemu.pid") 139 | with open(pid_file) as f: 140 | pid = int(f.read().strip()) 141 | os.kill(pid, signal.SIGTERM) 142 | 143 | def get_adt_run_args(self, keep_overlay=False, **kwargs): 144 | if keep_overlay: 145 | self.qemu_runner = self.launch_qemu( 146 | self.image_name, 147 | kwargs.get("ram", DEFAULT_RAM), 148 | kwargs.get("cpu", DEFAULT_CPU), 149 | kwargs.get("headless", HEADLESS), 150 | port=self.port, 151 | overlay=os.path.join(OVERLAY_DIR, self.image_name), 152 | ) 153 | super().connect() 154 | return super().get_adt_run_args() 155 | return [ 156 | "qemu", 157 | "-c", 158 | DEFAULT_CPU, 159 | "--ram-size", 160 | DEFAULT_RAM, 161 | "--timeout-reboot", 162 | TIMEOUT_REBOOT, 163 | os.path.join(CACHE_DIR, self.image_name), 164 | ] 165 | 166 | def create_custom_cloud_init(self): 167 | userdata = """#cloud-config 168 | timezone: UTC 169 | password: ubuntu 170 | chpasswd: { expire: False } 171 | ssh_pwauth: True 172 | manage_etc_hosts: True 173 | apt: 174 | primary: 175 | - arches: default 176 | uri: http://archive.ubuntu.com/ubuntu 177 | proxy: 178 | package_reboot_if_required: true 179 | package_update: true 180 | package_upgrade: true 181 | packages: 182 | # linux-generic is necessary to get a graphical session 183 | - linux-generic 184 | # packages required for testing 185 | - apport-noui 186 | - eatmydata 187 | - ubuntu-release-upgrader-core 188 | # packages wanted by profile 189 | %(packages)s 190 | write_files: 191 | - content: | 192 | [Unit] 193 | Description=auto-upgrade-testing root shell on %%I 194 | ConditionPathExists=/dev/%%I 195 | 196 | [Service] 197 | ExecStart=/bin/sh 198 | StandardInput=tty-fail 199 | StandardOutput=tty 200 | StandardError=tty 201 | TTYPath=/dev/%%I 202 | SendSIGHUP=yes 203 | # ignore I/O errors on unusable tty 204 | SuccessExitStatus=0 208 SIGHUP SIGINT SIGTERM SIGPIPE 205 | 206 | [Install] 207 | WantedBy=multi-user.target 208 | path: /etc/systemd/system/auto-upgrade-testing@.service 209 | runcmd: 210 | # configure serial console for autopkgtest access 211 | - ln -sf /dev/null /etc/systemd/system/auto-upgrade-testing.service 212 | - ln -sf /etc/systemd/system/auto-upgrade-testing@.service /etc/systemd/system/multi-user.target.wants/auto-upgrade-testing@ttyS1.service 213 | - ln -sf /etc/systemd/system/auto-upgrade-testing@.service /etc/systemd/system/multi-user.target.wants/auto-upgrade-testing@hvc1.service 214 | power_state: 215 | delay: now 216 | mode: poweroff 217 | message: Image creation finished, powering off 218 | timeout: 2 219 | condition: true""" % { 220 | "packages": "\n".join([f" - {x}" for x in self.packages] or []) 221 | } 222 | userdata_path = os.path.join(self.working_dir, "user-data") 223 | with open(userdata_path, "w") as f: 224 | f.write(userdata) 225 | return userdata_path 226 | 227 | def create_overlay_image(self, overlay_img): 228 | """Create an overlay image for specified base image.""" 229 | overlay_dir = os.path.dirname(overlay_img) 230 | if os.path.isfile(overlay_img): 231 | os.remove(overlay_img) 232 | elif not os.path.isdir(overlay_dir): 233 | os.makedirs(overlay_dir) 234 | subprocess.check_call( 235 | [ 236 | "qemu-img", 237 | "create", 238 | "-f", 239 | "qcow2", 240 | "-b", 241 | os.path.join(CACHE_DIR, self.image_name), 242 | "-F", 243 | "qcow2", 244 | overlay_img, 245 | ] 246 | ) 247 | subprocess.check_call(["sudo", "chmod", "777", overlay_img]) 248 | 249 | @property 250 | def name(self): 251 | return "qemu" 252 | 253 | def __repr__(self): 254 | return "{classname}(release={release})".format( 255 | classname=self.__class__.__name__, release=self.release 256 | ) 257 | 258 | @staticmethod 259 | def get_architecture(): 260 | """Return architecture string for system.""" 261 | return ( 262 | subprocess.check_output(["dpkg", "--print-architecture"]) 263 | .decode() 264 | .strip() 265 | ) 266 | 267 | def get_qemu_path(self): 268 | """Return path of qemu-system executable for system.""" 269 | if self.get_architecture() == ARCH_AMD64: 270 | target = QEMU_SYSTEM_AMD64 271 | else: 272 | target = QEMU_SYSTEM_I386 273 | return subprocess.check_output(["which", target]).decode().strip() 274 | 275 | def get_disk_args(self, overlay): 276 | """Return qemu-system disk args. If overlay is specified then an overlay 277 | image at that path will be created and specified in returned arguments. 278 | If no overlay is none then the base image will be returned in 279 | the arguments. 280 | :param overlay: Path of overlay image to use, otherwise None 281 | if not needed. 282 | :return: Disk image arguments as string. 283 | """ 284 | if overlay: 285 | self.create_overlay_image(overlay) 286 | return QEMU_DISK_IMAGE_OVERLAY_OPTS.format(overlay_img=overlay) 287 | else: 288 | return QEMU_DISK_IMAGE_OPTS.format(disk_img=self.image_name) 289 | 290 | @staticmethod 291 | def get_display_args(headless): 292 | """Return qemu-system display arguments based on headless parameter. 293 | :param headless: Whether qemu-system should run in headless mode or not. 294 | :return: Display parameters for required display state. 295 | """ 296 | if headless: 297 | return QEMU_DISPLAY_HEADLESS + QEMU_DISPLAY_VGA_OPTS 298 | else: 299 | return QEMU_DISPLAY_OPTS + QEMU_DISPLAY_VGA_OPTS + QEMU_SOUND_OPTS 300 | 301 | def launch_qemu(self, img, ram, cpu, headless, port, overlay): 302 | """Boot the qemu from a different thread to stop this thread from being 303 | blocked whilst the qemu is running. 304 | """ 305 | runner = threading.Thread( 306 | target=self._launch_qemu, 307 | args=(self.working_dir, img, ram, cpu, headless, port, overlay), 308 | ) 309 | runner.start() 310 | return runner 311 | 312 | def _launch_qemu( 313 | self, 314 | working_dir, 315 | disk_image_path, 316 | ram, 317 | cpu, 318 | headless, 319 | port=None, 320 | overlay=None, 321 | ): 322 | """Launch qemu-system to install the iso file into the disk image. 323 | :param working_dir: Working directory to use. 324 | :param disk_image_path: Path of the disk image file used for 325 | installation. 326 | :param ram: Amount of ram allocated to qemu. 327 | :param cpu: Number of cpus allocated to qemu. 328 | :param headless: Whether to run installer in headless mode or not. 329 | :param port: Host port number to enable port forwarding to qemu port 22. 330 | 331 | """ 332 | cmd = self.get_qemu_launch_command( 333 | working_dir, disk_image_path, ram, cpu, headless, port, overlay 334 | ) 335 | print(" ".join(cmd)) 336 | subprocess.check_call(cmd) 337 | 338 | def get_qemu_launch_command( 339 | self, work_dir, disk_img, ram, cpu, headless, port=None, overlay=None 340 | ): 341 | """Return command to launch qemu process using optional install parameters. 342 | :param work_dir: Working directory to use. 343 | :param disk_img: Path of the disk image file used for installation. 344 | :param ram: Amount of ram allocated to qemu. 345 | :param cpu: Number of cpus allocated to qemu. 346 | :param headless: Whether to run installer in headless mode or not. 347 | :return: Qemu launch command string. 348 | :param port: Host port number to enable port forwarding to qemu port 22. 349 | :param overlay: path to the overlay image to be created 350 | """ 351 | # Create command base with resource parameters 352 | cmd = QEMU_LAUNCH_OPTS.format( 353 | qemu=self.get_qemu_path(), 354 | ram=ram, 355 | cpu=cpu, 356 | disk_img=disk_img, 357 | workdir=work_dir, 358 | ) 359 | # Get disk args including overlay image if specified 360 | cmd += self.get_disk_args(overlay) 361 | # Add display parameters 362 | cmd += self.get_display_args(headless) 363 | # Add network. This must preceed the port forwarding option. 364 | cmd += QEMU_NET_OPTS 365 | # Add port forwarding if specified 366 | if port: 367 | cmd += QEMU_PORT_OPTS.format(port=port) 368 | else: 369 | # Add space to separate options 370 | cmd += " " 371 | return shlex.split(cmd) 372 | -------------------------------------------------------------------------------- /upgrade_testing/data/upgrade: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | TMP_LOCATION="/var/tmp/ubuntu-upgrade-testing" 4 | BASE_LOCATION="${ADT_ARTIFACTS}/upgrade_run_config" 5 | SCRIPTS_LOCATION="${BASE_LOCATION}/scripts" 6 | # Currently experimenting with using yaml output for run results (test 7 | # pass/fail etc.) for now, then we'll use something better 8 | TEST_RESULTS_DIR="${ADT_ARTIFACTS}/upgrade_run" 9 | TEST_RESULT_FILE="${TEST_RESULTS_DIR}/runner_results.yaml" 10 | CANARY_NAME="/tmp/upgrade_script_reboot_canary" 11 | INITIAL_TESTBED_READY_FLAG="${TMP_LOCATION}/initial_testbed_ready" 12 | 13 | # Only copy on the first run through 14 | if [ ! -d "${BASE_LOCATION}" ]; then 15 | mkdir "${BASE_LOCATION}" 16 | mv "${TMP_LOCATION}/scripts" "${BASE_LOCATION}" 17 | mv "${TMP_LOCATION}/auto_upgrade_test_settings" "${BASE_LOCATION}" 18 | fi 19 | 20 | # This is put in a known place by the wrapper script and contains the details 21 | # of above (as they will change each run). 22 | CONFIG_FILE="${BASE_LOCATION}/auto_upgrade_test_settings" 23 | # shellcheck disable=SC1090 24 | source "${CONFIG_FILE}" 25 | export TEST_RESULTS_DIR 26 | 27 | HAVE_REBOOTED=$ADT_REBOOT_MARK 28 | 29 | # Get more verbose logging from do-release-upgrade calls 30 | # made across the script. 31 | export DEBUG_UPDATE_MANAGER=1 32 | 33 | STATUS=0 34 | 35 | function upgrade_log() { 36 | local output=$1 37 | echo -e "auto-upgrade [$(date +%R:%S)]: ${output}" 38 | } 39 | 40 | # Called indirectly, through `trap` 41 | # shellcheck disable=SC2317 42 | function cleanup() { 43 | # Collect the results at exit so we cover both successful runs and 44 | # failures. 45 | collect_results 46 | cp "${CONFIG_FILE}" "${TEST_RESULTS_DIR}" 47 | upgrade_log "Cleaning up configuration files." 48 | rm -r "${BASE_LOCATION}" 49 | } 50 | 51 | function main() { 52 | # Ensure we don't have any mix-ups with multiple runs on the same testbed. 53 | trap cleanup EXIT 54 | 55 | upgrade_log "Running on ${RUNNING_BACKEND}" 56 | 57 | initial_testbed_setup 58 | 59 | if [ -z "${HAVE_REBOOTED}" ]; then 60 | upgrade_log "Beginning from the start." 61 | create_reboot_canary 62 | 63 | output_running_system 64 | 65 | do_setup 66 | 67 | exit_if_not_running_initial_system 68 | 69 | pre_tests 70 | STATUS=$? 71 | exit_with_log_if_nonzero $STATUS "ERROR: Something went during the prerun scripts." 72 | 73 | store_prereboot_details 74 | do_upgrade_and_maybe_reboot 75 | else 76 | upgrade_log "Skipping pre-tests as we have rebooted." 77 | fi 78 | 79 | # If we have rebooted we pick up from here. 80 | output_running_system 81 | exit_if_reboot_canary_exists 82 | exit_if_havent_upgraded 83 | 84 | # Check if we need to do another upgrade/reboot 85 | if need_another_upgrade; then 86 | echo "Appears we're in a multi-part upgrade. Upgrading/rebooting again." 87 | create_reboot_canary 88 | do_upgrade_and_maybe_reboot 89 | else 90 | exit_if_not_running_expected_post_system 91 | 92 | # No need to explicitly exit here as we're at the end. 93 | post_tests 94 | STATUS=$? 95 | fi 96 | check_no_apt_errors 97 | 98 | exit $STATUS 99 | } 100 | 101 | function check_no_apt_errors() { 102 | upgrade_log "Checking that the running system has a healthy apt state" 103 | apt-get check 104 | STATUS=$? 105 | if [[ "${STATUS}" == "0" ]]; then 106 | upgrade_log "apt is in a healthy state!" 107 | else 108 | upgrade_log "apt is not in a healthy state!" 109 | fi 110 | } 111 | 112 | function exit_with_log_if_nonzero() { 113 | local retcode=$1 114 | local error_message=$2 115 | if (( retcode != 0 )); then 116 | upgrade_log "ERROR: ${error_message}" 117 | exit 1 118 | fi 119 | } 120 | 121 | function exit_if_not_running_initial_system() { 122 | local running_system 123 | running_system=$(_get_running_system_name) 124 | upgrade_log "Checking that running system (${running_system}) is ${INITIAL_SYSTEM_STATE}" 125 | if [ "${INITIAL_SYSTEM_STATE}" != "${running_system}" ]; then 126 | upgrade_log "ERROR: Expected ${INITIAL_SYSTEM_STATE} got ${running_system}" 127 | # Is there a better way than just exiting here? 128 | exit 1 129 | fi 130 | } 131 | 132 | # Can we de-dupe these methods too? 133 | function exit_if_not_running_expected_post_system() { 134 | local running_system 135 | running_system=$(_get_running_system_name) 136 | upgrade_log "Checking that running system (${running_system}) is ${POST_SYSTEM_STATE}" 137 | if [ "${POST_SYSTEM_STATE}" != "${running_system}" ]; then 138 | upgrade_log "ERROR: Expected ${POST_SYSTEM_STATE} got ${running_system}" 139 | # Is there a better way than just exiting here? 140 | exit 1 141 | fi 142 | } 143 | 144 | function exit_if_havent_upgraded() { 145 | local running_system_version 146 | running_system_version=$(get_current_version) 147 | upgrade_log "Checking that an upgrade has occured." 148 | if [ "${BEFORE_REBOOT_VERSION}" != "" ] && [ "${running_system_version}" == "${BEFORE_REBOOT_VERSION}" ]; then 149 | upgrade_log "ERROR: Still the same system version after reboot" 150 | exit 1 151 | fi 152 | } 153 | 154 | function create_reboot_canary() { 155 | touch "${CANARY_NAME}" 156 | } 157 | 158 | function store_prereboot_details() { 159 | # Store details that we'll use after a reboot. 160 | # Current running version as we way need to reboot between versions. 161 | echo "BEFORE_REBOOT_VERSION=$(get_current_version)" >> "${CONFIG_FILE}" 162 | } 163 | 164 | function exit_if_reboot_canary_exists() { 165 | if [ -f "${CANARY_NAME}" ]; then 166 | upgrade_log "ERROR: system has not rebooted" 167 | exit 1 168 | fi 169 | } 170 | 171 | function _get_running_system_name() { 172 | lsb_release -sc 173 | } 174 | 175 | function pre_tests() { 176 | # Script setup and run. For each test: 177 | # - create a output dir for the results and make available to script 178 | # - Run script 179 | # - Log success or failure of script 180 | echo "pre_script_output:" >> "${TEST_RESULT_FILE}" 181 | success=0 182 | for test in $PRE_TESTS_TO_RUN; do 183 | 184 | local this_script_results="${TEST_RESULTS_DIR}/pre_${test}/" 185 | mkdir "${this_script_results}" 186 | export TESTRUN_RESULTS_DIR=$this_script_results 187 | 188 | local FULL_TEST_SCRIPT_PATH="${SCRIPTS_LOCATION}/${test}" 189 | upgrade_log "Running test: ${FULL_TEST_SCRIPT_PATH} -- Results: ${this_script_results}" 190 | ${FULL_TEST_SCRIPT_PATH} 191 | 192 | local test_result=$? 193 | if (( test_result != 0 )); then 194 | echo " \"${test}\": FAIL" >> "${TEST_RESULT_FILE}" 195 | success=1 196 | else 197 | echo " \"${test}\": PASS" >> "${TEST_RESULT_FILE}" 198 | fi 199 | done 200 | return $success 201 | } 202 | 203 | function post_tests() { 204 | # Script setup and run. For each test: 205 | # - create a output dir for the results and make available to script 206 | # - Run script 207 | # - Log success or failure of script 208 | echo "post_test_output:" >> "$TEST_RESULT_FILE" 209 | success=0 210 | for test in $POST_TESTS_TO_RUN; do 211 | local this_script_results="${TEST_RESULTS_DIR}/post_${test}/" 212 | mkdir "${this_script_results}" 213 | export TESTRUN_RESULTS_DIR=$this_script_results 214 | 215 | local FULL_TEST_SCRIPT_PATH="${SCRIPTS_LOCATION}/${test}" 216 | upgrade_log "Running test: ${FULL_TEST_SCRIPT_PATH} -- Results: ${this_script_results}" 217 | 218 | ${FULL_TEST_SCRIPT_PATH} 219 | 220 | local test_result=$? 221 | if (( test_result != 0 )); then 222 | echo " \"${test}\": FAIL" >> "$TEST_RESULT_FILE" 223 | success=1 224 | else 225 | echo " \"${test}\": PASS" >> "$TEST_RESULT_FILE" 226 | fi 227 | done 228 | return $success 229 | } 230 | 231 | function initial_testbed_setup() { 232 | if ! [ -f "${INITIAL_TESTBED_READY_FLAG}" ]; then 233 | export DEBIAN_FRONTEND=noninteractive 234 | upgrade_log "Making sure initial testbed is fully up to date" 235 | apt update -y && apt dist-upgrade -y 236 | if [ -f /var/run/reboot-required ]; then 237 | upgrade_log "System needs reboot before upgrading" 238 | maybe_reboot 239 | fi 240 | if [ -n "${HAVE_REBOOTED}" ]; then 241 | # Clear out reboot status to let the upgrade take over from the start 242 | upgrade_log "Clearing out reboot flag" 243 | HAVE_REBOOTED="" 244 | fi 245 | upgrade_log "Initial testbed is fully ready" 246 | touch "${INITIAL_TESTBED_READY_FLAG}" 247 | fi 248 | } 249 | 250 | function do_setup() { 251 | upgrade_log "Performing run setup." 252 | # Make sure the output results file is available and proper yaml. 253 | mkdir "${TEST_RESULTS_DIR}" 254 | echo "---" >> "${TEST_RESULT_FILE}" 255 | 256 | upgrade_log "Make sure /tmp is a tmpfs." 257 | rm -f "/etc/systemd/system/tmp.mount" 258 | } 259 | 260 | function need_another_upgrade() { 261 | # Check if we're not running the right version 262 | # If not are we able to upgrade to the right version? 263 | local running_system 264 | running_system=$(_get_running_system_name) 265 | if [ "${POST_SYSTEM_STATE}" != "${running_system}" ]; then 266 | potential_upgrade_version=$(get_potential_upgrade_version) 267 | current_version=$(get_current_version) 268 | echo "Comparing ${potential_upgrade_version} against ${current_version}" 269 | # we can upgrade further and the upgrade target is greater than our current system. 270 | if [ "${potential_upgrade_version}" ] && version_lt "${current_version}" "${potential_upgrade_version}"; then 271 | return 0 272 | fi 273 | fi 274 | return 1 275 | } 276 | 277 | function get_current_version() { 278 | lsb_release -rs 279 | } 280 | 281 | function get_potential_upgrade_version() { 282 | # Attempt to get the version that we would upgrade to. Attempts to use 283 | # development version if needed. 284 | # Might return an empty string if there are no upgrade candidates at all. 285 | local version 286 | version=$(do-release-upgrade -p -c | awk '/New release/ {print $3}' | tr -d \') 287 | if [ ! "${version}" ]; then 288 | # Lets try for a development version 289 | version=$(do-release-upgrade -c -d | awk '/New release/ {print $3}' | tr -d \') 290 | fi 291 | echo "${version}" 292 | } 293 | 294 | # version_lte and version_lt taken from: http://stackoverflow.com/a/4024263 295 | function version_lte() { 296 | [ "$1" = "$(echo -e "$1\n$2" | sort --version-sort | head -n1)" ] 297 | } 298 | 299 | function version_lt() { 300 | ([ "$1" = "$2" ] && return 1) || version_lte "$1" "$2" 301 | } 302 | 303 | function do_upgrade_and_maybe_reboot() { 304 | initial="${INITIAL_SYSTEM_STATE}" 305 | current="$(_get_running_system_name)" 306 | target="${POST_SYSTEM_STATE}" 307 | upgrade_log "Attempting to upgrade from ${current} to ${target} (started from ${initial})" 308 | 309 | do_normal_upgrade 310 | exit_with_log_if_nonzero $STATUS "ERROR: Something went wrong with the upgrade." 311 | maybe_reboot 312 | 313 | exit_with_log_if_nonzero $STATUS "ERROR: Something went wrong with the upgrade." 314 | 315 | upgrade_log "Upgrading complete." 316 | check_no_apt_errors 317 | } 318 | 319 | 320 | function do_normal_upgrade() { 321 | upgrade_log "Starting machine upgrade." 322 | 323 | export DEBIAN_FRONTEND=noninteractive 324 | # Ensure we have do-release-upgrade 325 | apt-get update 326 | apt-get -y dist-upgrade 327 | apt-get -y install openssh-server update-manager-core 328 | 329 | kernel=$(uname -r) 330 | pre_upgrade_kernel_check=$(dpkg -l linux-*-$kernel) 331 | 332 | # Allow upgrade from lts to non-lts if there's not lts to upgrade to 333 | local version 334 | local dev_version 335 | 336 | if [ "$DO_RELEASE_UPGRADE_PROMPT" != "" ]; then 337 | upgrade_log "Prompt set explicitely to '${DO_RELEASE_UPGRADE_PROMPT}' by profile" 338 | sed -i "s/^Prompt=.*\$/Prompt=${DO_RELEASE_UPGRADE_PROMPT}/" /etc/update-manager/release-upgrades 339 | else 340 | upgrade_log "Prompt not set explicitely by profile, allow changing it if needed" 341 | if grep '^Prompt=lts' /etc/update-manager/release-upgrades; then 342 | # Check for an LTS to LTS upgrade 343 | version=$(do-release-upgrade -c | awk '/New release/ {print $3}' | tr -d \') 344 | dev_version=$(do-release-upgrade -d -c | awk '/New release/ {print $3}' | tr -d \') 345 | if [ -z "${version}" ] && [ -z "${dev_version}" ]; then 346 | upgrade_log "No LTS version available, allowing 'normal' upgrades" 347 | # No LTS release to upgrade to. Enable non-LTS upgrades. 348 | sed -i 's/Prompt=lts/Prompt=normal/' /etc/update-manager/release-upgrades 349 | fi 350 | fi 351 | fi 352 | 353 | # Although a dist-upgrader tarball will always be in $release-proposed 354 | # the meta-release-proposed file usually doesn't get updated until u-r-u 355 | # has been SRU'ed. 356 | # Our preference is to test the dist-upgrader in -proposed and fall back to 357 | # the one referenced (-updates or release pocket) in the meta-release file. 358 | version=$(do-release-upgrade -p -c | awk '/New release/ {print $3}' | tr -d \') 359 | if [ -z "${version}" ]; then 360 | upgrade_log "Proposed version not found: falling back to devel release" 361 | do-release-upgrade -d -f DistUpgradeViewNonInteractive 362 | else 363 | upgrade_log "Proposed version found: ${version}, using it" 364 | do-release-upgrade -p -f DistUpgradeViewNonInteractive 365 | fi 366 | post_upgrade_kernel_check=$(dpkg -l linux-*-$kernel) 367 | if [[ "${pre_upgrade_kernel_check}" != "${post_upgrade_kernel_check}" ]]; then 368 | upgrade_log "Different packages are installed for the kernel booted during upgrade!" 369 | upgrade_log "Pre upgrade kernel was ${pre_upgrade_kernel_check}" 370 | upgrade_log "Post upgrade kernel was ${post_upgrade_kernel_check}" 371 | STATUS=1 372 | else 373 | upgrade_log "Kernel check okay!" 374 | fi 375 | check_no_apt_errors 376 | 377 | STATUS=$? 378 | } 379 | 380 | function maybe_reboot() { 381 | # Check if we actually want to reboot . . . 382 | reboot_function="/tmp/autopkgtest-reboot" 383 | if [ -f ${reboot_function} ]; then 384 | upgrade_log "Rebooting the system." 385 | if [ "${RUNNING_BACKEND}" = "lxc" ]; then 386 | # lxc reboot is doing something different to expected. 387 | rm "${CANARY_NAME}" 388 | fi 389 | eval $reboot_function 'upgradetests' 390 | else 391 | upgrade_log "This testbed does not support rebooting." 392 | exit 1 393 | fi 394 | } 395 | 396 | # Called indirectly, through `trap` 397 | # shellcheck disable=SC2317 398 | function collect_results() { 399 | # Move any files of interest into $TEST_RESULTS_DIR 400 | upgrade_log "Collecting system details." 401 | system_details_dir="${TEST_RESULTS_DIR}/system_details" 402 | mkdir "${system_details_dir}" 403 | cp -fr /var/log/dist-upgrade "${system_details_dir}/dist-upgrade/" 404 | cp /var/log/dpkg.log "${system_details_dir}/" 405 | cp -fr /etc/apt/ "${system_details_dir}/apt/" 406 | cp -fr /etc/update-manager/ "${system_details_dir}/update-manager/" 407 | } 408 | 409 | function output_running_system() { 410 | echo "Currently running: $(lsb_release -a)" 411 | } 412 | 413 | main "$@" 414 | --------------------------------------------------------------------------------