├── debian ├── compat ├── source │ ├── format │ └── options ├── remoto.lintian-overrides ├── rules ├── gbp.conf ├── control ├── changelog └── copyright ├── remoto ├── tests │ ├── __init__.py │ ├── backends │ │ ├── __init__.py │ │ ├── test_docker.py │ │ ├── test_kubernetes.py │ │ ├── test_local.py │ │ └── test_backends.py │ ├── fake_module.py │ ├── conftest.py │ ├── test_connection.py │ ├── test_util.py │ ├── test_rsync.py │ ├── test_process.py │ └── test_log.py ├── backends │ ├── ssh.py │ ├── podman.py │ ├── openshift.py │ ├── local.py │ ├── kubernetes.py │ ├── docker.py │ └── __init__.py ├── __init__.py ├── exc.py ├── log.py ├── util.py ├── connection.py ├── file_sync.py └── process.py ├── requirements.txt ├── MANIFEST.in ├── tox.ini ├── .gitignore ├── LICENSE ├── .github └── workflows │ └── pythonpackage.yml ├── setup.py ├── .devcontainer ├── Dockerfile └── devcontainer.json ├── CHANGELOG.rst └── README.rst /debian/compat: -------------------------------------------------------------------------------- 1 | 10 2 | -------------------------------------------------------------------------------- /remoto/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | execnet 2 | -------------------------------------------------------------------------------- /remoto/tests/backends/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (native) 2 | -------------------------------------------------------------------------------- /debian/source/options: -------------------------------------------------------------------------------- 1 | extend-diff-ignore="\.egg-info$" -------------------------------------------------------------------------------- /remoto/backends/ssh.py: -------------------------------------------------------------------------------- 1 | from . import BaseConnection as SshConnection 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include setup.py 2 | include LICENSE 3 | include README.rst 4 | -------------------------------------------------------------------------------- /debian/remoto.lintian-overrides: -------------------------------------------------------------------------------- 1 | # Package has not yet been submitted to Debian. 2 | new-package-should-close-itp-bug 3 | -------------------------------------------------------------------------------- /remoto/backends/podman.py: -------------------------------------------------------------------------------- 1 | from .docker import DockerConnection 2 | 3 | 4 | class PodmanConnection(DockerConnection): 5 | 6 | executable = 'podman' 7 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | export DH_VERBOSE=1 4 | 5 | export PYBUILD_NAME=remoto 6 | 7 | %: 8 | dh $@ --with python3 --buildsystem=pybuild 9 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27, py33, py36, py37 3 | 4 | [testenv] 5 | deps = 6 | pytest 7 | py26,py27: mock 8 | commands = py.test -v remoto/tests 9 | -------------------------------------------------------------------------------- /remoto/backends/openshift.py: -------------------------------------------------------------------------------- 1 | from .kubernetes import KubernetesConnection 2 | 3 | 4 | class OpenshiftConnection(KubernetesConnection): 5 | 6 | executable = 'oc' 7 | -------------------------------------------------------------------------------- /remoto/__init__.py: -------------------------------------------------------------------------------- 1 | from .connection import Connection 2 | from .file_sync import rsync 3 | from . import process 4 | from . import connection 5 | 6 | 7 | __version__ = '1.2.1' 8 | -------------------------------------------------------------------------------- /remoto/exc.py: -------------------------------------------------------------------------------- 1 | import execnet 2 | 3 | HostNotFound = execnet.HostNotFound 4 | RemoteError = execnet.RemoteError 5 | TimeoutError = execnet.TimeoutError 6 | DataFormatError = execnet.DataFormatError 7 | -------------------------------------------------------------------------------- /debian/gbp.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | debian-branch = xenial 3 | pristine-tar = True 4 | upstream-tag = upstream/%(version)s 5 | 6 | [dch] 7 | # Use git committer as author 8 | git-author = True 9 | # Use a simple message format 10 | multimaint-merge = False 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg 8 | *.egg-info 9 | dist 10 | build 11 | eggs 12 | parts 13 | var 14 | sdist 15 | develop-eggs 16 | .installed.cfg 17 | lib64 18 | 19 | # Installer logs 20 | pip-log.txt 21 | 22 | # Unit test / coverage reports 23 | .coverage* 24 | .tox 25 | nosetests.xml 26 | 27 | # Translations 28 | *.mo 29 | 30 | # Mr Developer 31 | .mr.developer.cfg 32 | .project 33 | .pydevproject 34 | -------------------------------------------------------------------------------- /remoto/tests/fake_module.py: -------------------------------------------------------------------------------- 1 | """ 2 | this is just a stub module to use to test the `import_module` functionality in 3 | remoto 4 | """ 5 | import sys 6 | 7 | 8 | def function(conn): 9 | return True 10 | 11 | 12 | def fails(): 13 | raise Exception('failure from fails() function') 14 | 15 | 16 | def unexpected_fail(): 17 | sys.exit(1) 18 | 19 | 20 | def noop(): 21 | sys.exit(0) 22 | 23 | 24 | def passes(): 25 | pass 26 | 27 | 28 | def remote_interpreter(): 29 | return sys.executable 30 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: remoto 2 | Maintainer: Ken Dreyer 3 | Section: python 4 | Priority: optional 5 | Build-Depends: 6 | debhelper (>= 10), 7 | dh-python, 8 | python3, 9 | python3-execnet, 10 | python3-pytest, 11 | python3-setuptools, 12 | Standards-Version: 4.2.1 13 | X-Python-Version: >=3.7 14 | 15 | Package: python3-remoto 16 | Architecture: all 17 | Depends: ${misc:Depends}, ${python3:Depends} 18 | Description: Execute remote commands or processes 19 | A very simplistic remote-command-executor using ssh and Python in the remote 20 | end. 21 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | remoto (1.1.3) stable; urgency=medium 2 | 3 | * New upstream release 4 | 5 | -- Ceph Release Team Fri, 22 Mar 2019 06:03:44 +0000 6 | 7 | remoto (0.0.29-0ubuntu0.16.04.2) xenial; urgency=medium 8 | 9 | * d/patches: drop bundled execnet, rework patches for gbp 10 | * d/control: add extended description for lintian 11 | * d/control: Build-Depends execnet 12 | 13 | -- Ken Dreyer Mon, 03 Jul 2017 16:19:14 -0600 14 | 15 | remoto (0.0.29-0ubuntu0.16.04.1) xenial; urgency=medium 16 | 17 | * Latest upstream release 18 | 19 | -- Ken Dreyer Thu, 29 Jun 2017 11:22:45 -0600 20 | -------------------------------------------------------------------------------- /remoto/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class Capture(object): 5 | 6 | def __init__(self, *a, **kw): 7 | self.a = a 8 | self.kw = kw 9 | self.calls = [] 10 | self.return_values = kw.get('return_values', False) 11 | self.always_returns = kw.get('always_returns', False) 12 | 13 | def __call__(self, *a, **kw): 14 | self.calls.append({'args': a, 'kwargs': kw}) 15 | if self.always_returns: 16 | return self.always_returns 17 | if self.return_values: 18 | return self.return_values.pop() 19 | 20 | 21 | class Factory(object): 22 | 23 | def __init__(self, **kw): 24 | for k, v in kw.items(): 25 | setattr(self, k, v) 26 | -------------------------------------------------------------------------------- /remoto/backends/local.py: -------------------------------------------------------------------------------- 1 | from . import BaseConnection 2 | import socket 3 | 4 | 5 | class LocalConnection(BaseConnection): 6 | 7 | def __init__(self, **kw): 8 | # hostname gets ignored, and forced to be localhost always 9 | kw.pop('hostname', None) 10 | super(LocalConnection, self).__init__( 11 | hostname='localhost', 12 | detect_sudo=False, 13 | **kw 14 | ) 15 | 16 | def _make_connection_string(self, hostname, _needs_ssh=None, use_sudo=None): 17 | interpreter = self.interpreter 18 | if use_sudo is not None: 19 | if use_sudo: 20 | interpreter = 'sudo ' + interpreter 21 | elif self.sudo: 22 | interpreter = 'sudo ' + interpreter 23 | return 'popen//python=%s' % interpreter 24 | -------------------------------------------------------------------------------- /remoto/tests/test_connection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from remoto.connection import get 3 | 4 | 5 | base_names = [ 6 | 'ssh', 'oc', 'openshift', 'kubernetes', 'k8s', 'local', 'popen', 'localhost', 'docker', 'podman', 7 | ] 8 | 9 | capitalized_names = [n.capitalize() for n in base_names] 10 | 11 | spaced_names = [" %s " % n for n in base_names] 12 | 13 | valid_names = base_names + capitalized_names + spaced_names 14 | 15 | 16 | class TestGet(object): 17 | 18 | @pytest.mark.parametrize('name', valid_names) 19 | def test_valid_names(self, name): 20 | conn_class = get(name) 21 | assert conn_class.__name__.endswith('Connection') 22 | 23 | def test_fallback(self): 24 | conn_class = get('non-existent') 25 | assert conn_class.__name__ == 'BaseConnection' 26 | 27 | def test_custom_fallback(self): 28 | conn_class = get('non-existent', 'openshift') 29 | assert conn_class.__name__ == 'OpenshiftConnection' 30 | -------------------------------------------------------------------------------- /remoto/log.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def reporting(conn, result, timeout=None): 4 | timeout = timeout or conn.global_timeout # -1 a.k.a. wait for ever 5 | log_map = { 6 | 'debug': conn.logger.debug, 7 | 'error': conn.logger.error, 8 | 'warning': conn.logger.warning 9 | } 10 | 11 | while True: 12 | try: 13 | received = result.receive(timeout) 14 | level_received, message = list(received.items())[0] 15 | if not isinstance(message, str): 16 | message = message.decode('utf-8') 17 | log_map[level_received](message.strip('\r\n')) 18 | except EOFError: 19 | break 20 | except Exception as err: 21 | # the things we need to do here :( 22 | # because execnet magic, we cannot catch this as 23 | # `except TimeoutError` 24 | if err.__class__.__name__ == 'TimeoutError': 25 | msg = 'No data was received after %s seconds, disconnecting...' % timeout 26 | conn.logger.warning(msg) 27 | break 28 | raise 29 | -------------------------------------------------------------------------------- /remoto/backends/kubernetes.py: -------------------------------------------------------------------------------- 1 | from . import BaseConnection 2 | 3 | 4 | class KubernetesConnection(BaseConnection): 5 | 6 | executable = 'kubectl' 7 | remote_import_system = 'json' 8 | 9 | def __init__(self, pod_name, namespace=None, context=None, **kw): 10 | self.namespace = namespace 11 | self.context = context 12 | self.pod_name = pod_name 13 | super(KubernetesConnection, self).__init__(hostname='localhost', **kw) 14 | 15 | def command_template(self): 16 | base_command = [self.executable] 17 | if self.context: 18 | base_command.extend(['--context', self.context]) 19 | 20 | base_command.extend(['exec', '-i']) 21 | 22 | if self.namespace: 23 | base_command.extend(['-n', self.namespace]) 24 | 25 | base_command.extend([ 26 | self.pod_name, 27 | '--', 28 | '/bin/sh', 29 | '-c' 30 | ]) 31 | return base_command 32 | 33 | def cmd(self, cmd): 34 | tmpl = self.command_template() 35 | tmpl.append(' '.join(cmd)) 36 | return tmpl 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2013 Alfredo Deza 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy 5 | of this software and associated documentation files (the "Software"), to deal 6 | in the Software without restriction, including without limitation the rights 7 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the Software is 9 | furnished to do so, subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in all 12 | copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 17 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 18 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 19 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 20 | OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /remoto/util.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def admin_command(sudo, command): 4 | """ 5 | If sudo is needed, make sure the command is prepended 6 | correctly, otherwise return the command as it came. 7 | 8 | :param sudo: A boolean representing the intention of having a sudo command 9 | (or not) 10 | :param command: A list of the actual command to execute with Popen. 11 | """ 12 | if sudo: 13 | if not isinstance(command, list): 14 | command = [command] 15 | return ['sudo'] + [cmd for cmd in command] 16 | return command 17 | 18 | 19 | class RemoteError(object): 20 | 21 | def __init__(self, traceback): 22 | self.orig_traceback = traceback 23 | self.exception_line = '' 24 | self.exception_name = self.get_exception_name() 25 | 26 | def get_exception_name(self): 27 | for tb_line in reversed(self.orig_traceback.split('\n')): 28 | if tb_line: 29 | for word in tb_line.split(): 30 | if word.endswith(':'): # exception! 31 | self.exception_line = tb_line 32 | return word.strip().strip(':') 33 | -------------------------------------------------------------------------------- /remoto/tests/test_util.py: -------------------------------------------------------------------------------- 1 | from remoto import util 2 | 3 | 4 | class TestAdminCommand(object): 5 | 6 | def test_prepend_list_if_sudo(self): 7 | result = util.admin_command(True, ['ls']) 8 | assert result == ['sudo', 'ls'] 9 | 10 | def test_skip_prepend_if_not_sudo(self): 11 | result = util.admin_command(False, ['ls']) 12 | assert result == ['ls'] 13 | 14 | def test_command_that_is_not_a_list(self): 15 | result = util.admin_command(True, 'ls') 16 | assert result == ['sudo', 'ls'] 17 | 18 | 19 | class TestRemoteError(object): 20 | 21 | def setup_method(self): 22 | self.traceback = ('\n').join([ 23 | 'Traceback (most recent call last):', 24 | ' File "", line 1, in ', 25 | "NameError: name 'foo' is not defined" 26 | ]) 27 | 28 | def test_exception_name(self): 29 | error = util.RemoteError(self.traceback) 30 | assert error.exception_name == 'NameError' 31 | 32 | def test_exception_line(self): 33 | error = util.RemoteError(self.traceback) 34 | assert error.exception_line == "NameError: name 'foo' is not defined" 35 | -------------------------------------------------------------------------------- /.github/workflows/pythonpackage.yml: -------------------------------------------------------------------------------- 1 | name: Python package 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | strategy: 10 | max-parallel: 4 11 | matrix: 12 | python-version: [2.7, 3.5, 3.6, 3.7] 13 | 14 | steps: 15 | - uses: actions/checkout@v1 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v1 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install --upgrade setuptools 24 | python setup.py develop 25 | - name: Lint with flake8 26 | run: | 27 | pip install flake8 28 | # stop the build if there are Python syntax errors or undefined names 29 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 30 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 31 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 32 | - name: Test with pytest 33 | run: | 34 | pip install pytest 35 | pip install mock 36 | pytest 37 | -------------------------------------------------------------------------------- /remoto/tests/test_rsync.py: -------------------------------------------------------------------------------- 1 | try: 2 | from unittest.mock import Mock, patch 3 | except ImportError: 4 | from mock import Mock, patch 5 | from remoto import file_sync 6 | 7 | 8 | class TestRsync(object): 9 | 10 | def make_fake_sync(self): 11 | fake_sync = Mock() 12 | fake_sync.return_value = fake_sync 13 | fake_sync.targets = [] 14 | fake_sync.add_target = lambda gw, destination: fake_sync.targets.append(destination) 15 | return fake_sync 16 | 17 | @patch('remoto.file_sync.Connection', Mock()) 18 | def test_rsync_fallback_to_host_list(self): 19 | fake_sync = self.make_fake_sync() 20 | with patch('remoto.file_sync._RSync', fake_sync): 21 | file_sync.rsync('host1', '/source', '/destination') 22 | 23 | # should've added just one target 24 | assert len(fake_sync.targets) == 1 25 | 26 | @patch('remoto.file_sync.Connection', Mock()) 27 | def test_rsync_use_host_list(self): 28 | fake_sync = self.make_fake_sync() 29 | with patch('remoto.file_sync._RSync', fake_sync): 30 | file_sync.rsync( 31 | ['host1', 'host2'], '/source', '/destination') 32 | 33 | # should've added just one target 34 | assert len(fake_sync.targets) == 2 35 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | module_file = open("remoto/__init__.py").read() 4 | metadata = dict(re.findall(r"__([a-z]+)__\s*=\s*['\"]([^'\"]*)['\"]", module_file)) 5 | long_description = open('README.rst').read() 6 | install_requires = [] 7 | 8 | from setuptools import setup, find_packages 9 | 10 | 11 | setup( 12 | name = 'remoto', 13 | description = 'Execute remote commands or processes.', 14 | packages = find_packages(), 15 | author = 'Alfredo Deza', 16 | author_email = 'contact@deza.pe', 17 | version = metadata['version'], 18 | url = 'http://github.com/alfredodeza/remoto', 19 | license = "MIT", 20 | zip_safe = False, 21 | keywords = "remote, commands, unix, ssh, socket, execute, terminal", 22 | install_requires=[ 23 | 'execnet', 24 | ] + install_requires, 25 | long_description = long_description, 26 | classifiers = [ 27 | 'Development Status :: 4 - Beta', 28 | 'Intended Audience :: Developers', 29 | 'License :: OSI Approved :: MIT License', 30 | 'Topic :: Utilities', 31 | 'Operating System :: MacOS :: MacOS X', 32 | 'Operating System :: POSIX', 33 | 'Programming Language :: Python :: 2.6', 34 | 'Programming Language :: Python :: 2.7', 35 | 'Programming Language :: Python :: 3.3', 36 | ] 37 | ) 38 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0 2 | Upstream-Name: remoto 3 | Source: https://github.com/alfredodeza/remoto 4 | 5 | Files: * 6 | Copyright: 2013 Alfredo Deza 7 | License: Expat 8 | 9 | Files: debian/* 10 | Copyright: 2017 Red Hat, Inc. 11 | License: Expat 12 | 13 | License: Expat 14 | Permission is hereby granted, free of charge, to any person obtaining a 15 | copy of this software and associated documentation files (the "Software"), 16 | to deal in the Software without restriction, including without limitation 17 | the rights to use, copy, modify, merge, publish, distribute, sublicense, 18 | and/or sell copies of the Software, and to permit persons to whom the 19 | Software is furnished to do so, subject to the following conditions: 20 | . 21 | The above copyright notice and this permission notice shall be included in 22 | all copies or substantial portions of the Software. 23 | . 24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 25 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 26 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 27 | THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 28 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 29 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 30 | DEALINGS IN THE SOFTWARE. 31 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.217.4/containers/python-3/.devcontainer/base.Dockerfile 2 | 3 | # [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster 4 | ARG VARIANT="3.10-bullseye" 5 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 6 | 7 | # Define the path to the virtualenv to work with 8 | ARG VENV_PATH="/home/vscode/venv" 9 | 10 | # Copy the requirements over, create the virtualenv, and install the dependencies onto the virtualenv 11 | # Note it uses '~/venv/' on purpose because vscode automatically detects that and activates it for you 12 | # when the container starts 13 | COPY requirements.txt /tmp/pip-tmp/ 14 | RUN su vscode -c "python -m venv /home/vscode/venv" \ 15 | && ${VENV_PATH}/bin/pip --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ 16 | && rm -rf /tmp/pip-tmp 17 | 18 | # [Optional] Uncomment this section to install additional OS packages. 19 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 20 | # && apt-get -y install --no-install-recommends 21 | 22 | # [Optional] Uncomment this line to install global node packages. 23 | # RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 -------------------------------------------------------------------------------- /remoto/backends/docker.py: -------------------------------------------------------------------------------- 1 | from . import BaseConnection 2 | 3 | 4 | class DockerConnection(BaseConnection): 5 | """ 6 | This connection class allows to (optionally) define a remote hostname 7 | to connect that holds a given container:: 8 | 9 | >>> conn = DockerConnection(hostname='srv-1', container_id='asdf-lkjh') 10 | 11 | Either ``container_id`` or ``container_name`` can be provided to connect to 12 | a given container. 13 | 14 | .. note:: ``hostname`` defaults to 'localhost' when undefined 15 | """ 16 | 17 | executable = 'docker' 18 | remote_import_system = 'json' 19 | 20 | def __init__(self, hostname=None, container_id=None, container_name=None, user=None, **kw): 21 | self.hostname = hostname or 'localhost' 22 | self.identifier = container_id or container_name 23 | if not self.identifier: 24 | raise TypeError('Either container_id or container_name must be provided') 25 | self.user = user 26 | super(DockerConnection, self).__init__(hostname=self.hostname, **kw) 27 | 28 | def command_template(self): 29 | if self.user: 30 | prefix = [ 31 | self.executable, 'exec', '-i', 32 | '-u', self.user, 33 | self.identifier, '/bin/sh', '-c' 34 | ] 35 | else: 36 | prefix = [ 37 | self.executable, 'exec', '-i', 38 | self.identifier, '/bin/sh', '-c' 39 | ] 40 | return prefix 41 | 42 | def cmd(self, cmd): 43 | tmpl = self.command_template() 44 | tmpl.append(' '.join(cmd)) 45 | return tmpl 46 | -------------------------------------------------------------------------------- /remoto/connection.py: -------------------------------------------------------------------------------- 1 | import logging 2 | # compatibility for older clients that rely on the previous ``Connection`` class 3 | from remoto.backends import BaseConnection as Connection # noqa 4 | from remoto.backends import ssh, openshift, kubernetes, local, podman, docker, needs_ssh 5 | 6 | 7 | logger = logging.getLogger('remoto') 8 | 9 | 10 | def get(name, fallback='ssh'): 11 | """ 12 | Retrieve the matching backend class from a string. If no backend can be 13 | matched, it raises an error. 14 | 15 | >>> get('ssh') 16 | 17 | >>> get() 18 | 19 | >>> get('non-existent') 20 | 21 | >>> get('non-existent', 'openshift') 22 | 23 | """ 24 | mapping = { 25 | 'ssh': ssh.SshConnection, 26 | 'oc': openshift.OpenshiftConnection, 27 | 'openshift': openshift.OpenshiftConnection, 28 | 'kubernetes': kubernetes.KubernetesConnection, 29 | 'k8s': kubernetes.KubernetesConnection, 30 | 'local': local.LocalConnection, 31 | 'popen': local.LocalConnection, 32 | 'localhost': local.LocalConnection, 33 | 'docker': docker.DockerConnection, 34 | 'podman': podman.PodmanConnection, 35 | } 36 | if not name: 37 | # fallsback to just plain local/ssh 38 | name = 'ssh' 39 | 40 | name = name.strip().lower() 41 | connection_class = mapping.get(name) 42 | if not connection_class: 43 | logger.warning('no connection backend found for: "%s"' % name) 44 | if fallback: 45 | logger.info('falling back to "%s"' % fallback) 46 | # this assumes that ``fallback`` is a valid mapping name 47 | return mapping.get(fallback) 48 | return connection_class 49 | -------------------------------------------------------------------------------- /remoto/tests/backends/test_docker.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | from remoto.backends import docker 3 | 4 | 5 | class TestDockerConnection(object): 6 | 7 | def test_missing_container_identifier(self): 8 | with raises(TypeError): 9 | docker.DockerConnection(hostname='node1') 10 | 11 | def test_defaults_to_localhost_name(self): 12 | conn = docker.DockerConnection(container_name='container-1') 13 | assert conn.hostname == 'localhost' 14 | 15 | def test_defaults_to_localhost_id(self): 16 | conn = docker.DockerConnection(container_id='asdf-lkjh') 17 | assert conn.hostname == 'localhost' 18 | 19 | 20 | class TestCommandTemplate(object): 21 | 22 | def test_with_user(self): 23 | conn = docker.DockerConnection(container_id='asdf-lkjh', user='root') 24 | tmpl = conn.command_template() 25 | assert tmpl == [ 26 | 'docker', 'exec', '-i', 27 | '-u', 'root', 28 | 'asdf-lkjh', '/bin/sh', '-c'] 29 | 30 | def test_no_user(self): 31 | conn = docker.DockerConnection(container_id='asdf-lkjh') 32 | tmpl = conn.command_template() 33 | assert tmpl == [ 34 | 'docker', 'exec', '-i', 35 | 'asdf-lkjh', '/bin/sh', '-c' 36 | ] 37 | 38 | 39 | class TestCommand(object): 40 | 41 | def test_user_conn_appends(self): 42 | conn = docker.DockerConnection(container_id='asdf-lkjh', user='root') 43 | result = conn.cmd(['ceph', '--version']) 44 | assert result == [ 45 | 'docker', 'exec', '-i', '-u', 'root', 46 | 'asdf-lkjh', '/bin/sh', '-c', 'ceph --version' 47 | ] 48 | 49 | def test_default_appends(self): 50 | conn = docker.DockerConnection(container_id='asdf-lkjh') 51 | result = conn.cmd(['ceph', 'health']) 52 | assert result == [ 53 | 'docker', 'exec', '-i', 54 | 'asdf-lkjh', '/bin/sh', '-c', 'ceph health' 55 | ] 56 | -------------------------------------------------------------------------------- /remoto/file_sync.py: -------------------------------------------------------------------------------- 1 | import execnet 2 | from remoto.backends import basic_remote_logger 3 | from remoto.backends import BaseConnection as Connection 4 | 5 | 6 | class _RSync(execnet.RSync): 7 | """ 8 | Inherits from ``execnet.RSync`` so that we can log nicely with the user 9 | logger instance (if any) back with the ``_report_send_file`` method 10 | """ 11 | 12 | def __init__(self, sourcedir, callback=None, verbose=True, logger=None): 13 | self.logger = logger 14 | super(_RSync, self).__init__(sourcedir, callback, verbose) 15 | 16 | def _report_send_file(self, gateway, modified_rel_path): 17 | if self._verbose: 18 | self.logger.info("syncing file: %s" % modified_rel_path) 19 | 20 | 21 | def rsync(hosts, source, destination, logger=None, sudo=False): 22 | """ 23 | Grabs the hosts (or single host), creates the connection object for each 24 | and set the rsync execnet engine to push the files. 25 | 26 | It assumes that all of the destinations for the different hosts is the 27 | same. This deviates from what execnet does because it has the flexibility 28 | to push to different locations. 29 | """ 30 | logger = logger or basic_remote_logger() 31 | sync = _RSync(source, logger=logger) 32 | 33 | # setup_targets 34 | if not isinstance(hosts, list): 35 | hosts = [hosts] 36 | 37 | for host in hosts: 38 | conn = Connection( 39 | host, 40 | logger, 41 | sudo, 42 | ) 43 | sync.add_target(conn.gateway, destination) 44 | 45 | return sync.send() 46 | 47 | 48 | def rsync_conn(conn, source, destination, logger=None): 49 | """ 50 | Reuses the passed connection to rsync ``source`` to ``destination``. Uses 51 | execnet under the hood. 52 | """ 53 | logger = logger or basic_remote_logger() 54 | sync = _RSync(source, logger=logger) 55 | sync.add_target(conn.gateway, destination) 56 | 57 | return sync.send() 58 | -------------------------------------------------------------------------------- /remoto/tests/backends/test_kubernetes.py: -------------------------------------------------------------------------------- 1 | from remoto.backends import kubernetes 2 | 3 | 4 | class TestCommandTemplate(object): 5 | 6 | def test_using_podname_only(self): 7 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf') 8 | tmpl = conn.command_template() 9 | assert tmpl == ['kubectl', 'exec', '-i', 'rook-ceph-asdf', '--', '/bin/sh', '-c'] 10 | 11 | def test_using_namespace(self): 12 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph') 13 | tmpl = conn.command_template() 14 | assert tmpl == [ 15 | 'kubectl', 'exec', '-i', '-n', 'rook-ceph', 16 | 'rook-ceph-asdf', '--', '/bin/sh', '-c' 17 | ] 18 | 19 | def test_using_context(self): 20 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf', context='4') 21 | tmpl = conn.command_template() 22 | assert tmpl == [ 23 | 'kubectl', '--context', '4', 'exec', '-i', 24 | 'rook-ceph-asdf', '--', '/bin/sh', '-c' 25 | ] 26 | 27 | def test_using_context_and_namespace(self): 28 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph', context='4') 29 | tmpl = conn.command_template() 30 | assert tmpl == [ 31 | 'kubectl', '--context', '4', 'exec', '-i', '-n', 'rook-ceph', 32 | 'rook-ceph-asdf', '--', '/bin/sh', '-c' 33 | ] 34 | 35 | 36 | class TestCommand(object): 37 | 38 | def test_podname_conn_appends(self): 39 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph') 40 | result = conn.cmd(['ceph', '--version']) 41 | assert result == [ 42 | 'kubectl', 'exec', '-i', '-n', 'rook-ceph', 43 | 'rook-ceph-asdf', '--', '/bin/sh', '-c', 'ceph --version' 44 | ] 45 | 46 | def test_namespace_appends(self): 47 | conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph') 48 | result = conn.cmd(['ceph', 'health']) 49 | assert result == [ 50 | 'kubectl', 'exec', '-i', '-n', 'rook-ceph', 51 | 'rook-ceph-asdf', '--', '/bin/sh', '-c', 'ceph health' 52 | ] 53 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.217.4/containers/python-3 3 | { 4 | "name": "Python 3", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "context": "..", 8 | "args": { 9 | // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 10 | // Append -bullseye or -buster to pin to an OS version. 11 | // Use -bullseye variants on local on arm64/Apple Silicon. 12 | "VARIANT": "3.10-bullseye", 13 | // Options 14 | "NODE_VERSION": "none" 15 | } 16 | }, 17 | 18 | // Set remote *default* container specific settings.json values on container create. 19 | "settings": { 20 | "python.defaultInterpreterPath": "/home/vscode/venv/bin/python", 21 | "python.linting.enabled": true, 22 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", 23 | "python.linting.banditEnabled": true, 24 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", 25 | "python.linting.flake8Enabled": true, 26 | // disabled, pylint is too aggressive 27 | //"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" 28 | //"python.linting.pylintEnabled": true, 29 | }, 30 | 31 | // Add the IDs of extensions you want installed when the container is created. 32 | // any personal preferences for extensions can be added in settings.json with remote.containers.defaultExtensions 33 | // see https://code.visualstudio.com/docs/remote/containers#_always-installed-extensions 34 | "extensions": [ 35 | "ms-python.python", 36 | "ms-python.vscode-pylance" 37 | ], 38 | 39 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 40 | // "forwardPorts": [], 41 | 42 | // Use 'postCreateCommand' to run commands after the container is created 43 | // this ensures that the project is ready to be developed on with changes 44 | // being immediately available and testable. This can't be done in the Dockerfile 45 | "postCreateCommand": "/home/vscode/venv/bin/python setup.py develop", 46 | 47 | // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 48 | "remoteUser": "vscode" 49 | } 50 | -------------------------------------------------------------------------------- /remoto/tests/test_process.py: -------------------------------------------------------------------------------- 1 | try: 2 | from unittest.mock import Mock, patch 3 | except ImportError: 4 | from mock import Mock, patch 5 | from remoto import process 6 | import subprocess 7 | 8 | class TestExtendPath(object): 9 | 10 | def setup_method(self): 11 | self.path = '/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin' 12 | 13 | def test_no_environment_sets_path(self): 14 | fake_conn = Mock() 15 | fake_conn.gateway.remote_exec.return_value = fake_conn 16 | fake_conn.receive.return_value = {} 17 | result = process.extend_env(fake_conn, {}) 18 | assert result['env']['PATH'] == self.path 19 | 20 | def test_custom_path_does_not_get_overridden(self): 21 | fake_conn = Mock() 22 | fake_conn.gateway.remote_exec.return_value = fake_conn 23 | fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'} 24 | result = process.extend_env(fake_conn, {}) 25 | new_path = result['env']['PATH'] 26 | assert new_path.endswith(self.path) 27 | assert '/home/alfredo/bin' in new_path 28 | 29 | def test_custom_env_var_extends_existing_env(self): 30 | fake_conn = Mock() 31 | fake_conn.gateway.remote_exec.return_value = fake_conn 32 | fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'} 33 | result = process.extend_env(fake_conn, {'extend_env': {'CEPH_VOLUME_DEBUG': '1'}}) 34 | new_path = result['env']['PATH'] 35 | assert result['env']['PATH'].endswith(self.path) 36 | assert result['env']['CEPH_VOLUME_DEBUG'] == '1' 37 | 38 | def test_extend_env_gets_removed(self): 39 | fake_conn = Mock() 40 | fake_conn.gateway.remote_exec.return_value = fake_conn 41 | fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'} 42 | result = process.extend_env(fake_conn, {'extend_env': {'CEPH_VOLUME_DEBUG': '1'}}) 43 | assert result.get('extend_env') is None 44 | 45 | @patch('subprocess.Popen') 46 | def test_remote_check_command_encoding_returns_bytes(self,fake_popen): 47 | fake_conn = Mock() 48 | fake_conn.gateway.remote_exec.return_value = fake_conn 49 | fake_conn.receive.return_value = {} 50 | 51 | test_str = "test string" 52 | fake_comm = Mock() 53 | fake_comm.communicate.return_value = iter([test_str,'']) 54 | fake_popen.return_value = fake_comm 55 | 56 | process._remote_check(fake_conn,cmd='', stdin=test_str) 57 | 58 | assert isinstance(fake_comm.communicate.call_args[0][0],bytes) 59 | 60 | -------------------------------------------------------------------------------- /remoto/tests/test_log.py: -------------------------------------------------------------------------------- 1 | from pytest import raises 2 | from remoto import log 3 | from remoto.exc import TimeoutError 4 | try: 5 | from unittest.mock import Mock 6 | except ImportError: 7 | from mock import Mock 8 | 9 | 10 | class TestReporting: 11 | 12 | def test_reporting_when_channel_is_empty(self): 13 | conn = Mock() 14 | result = Mock() 15 | result.receive.side_effect = EOFError 16 | log.reporting(conn, result) 17 | 18 | def test_write_debug_statements(self): 19 | conn = Mock() 20 | result = Mock() 21 | result.receive.side_effect = [{'debug': 'a debug message'}, EOFError] 22 | log.reporting(conn, result) 23 | assert conn.logger.debug.called is True 24 | assert conn.logger.info.called is False 25 | 26 | def test_write_info_statements(self): 27 | conn = Mock() 28 | result = Mock() 29 | result.receive.side_effect = [{'error': 'an error message'}, EOFError] 30 | log.reporting(conn, result) 31 | assert conn.logger.debug.called is False 32 | assert conn.logger.error.called is True 33 | 34 | def test_strip_new_lines(self): 35 | conn = Mock() 36 | result = Mock() 37 | result.receive.side_effect = [{'error': 'an error message\n\n'}, EOFError] 38 | log.reporting(conn, result) 39 | message = conn.logger.error.call_args[0][0] 40 | assert message == 'an error message' 41 | 42 | def test_strip_new_line(self): 43 | conn = Mock() 44 | result = Mock() 45 | result.receive.side_effect = [{'error': 'an error message\n'}, EOFError] 46 | log.reporting(conn, result) 47 | message = conn.logger.error.call_args[0][0] 48 | assert message == 'an error message' 49 | 50 | def test_strip_new_line_and_carriage_return(self): 51 | conn = Mock() 52 | result = Mock() 53 | result.receive.side_effect = [{'error': 'an error message\r\n'}, EOFError] 54 | log.reporting(conn, result) 55 | message = conn.logger.error.call_args[0][0] 56 | assert message == 'an error message' 57 | 58 | def test_strip_return(self): 59 | conn = Mock() 60 | result = Mock() 61 | result.receive.side_effect = [{'error': 'an error message\r'}, EOFError] 62 | log.reporting(conn, result) 63 | message = conn.logger.error.call_args[0][0] 64 | assert message == 'an error message' 65 | 66 | def test_timeout_error(self): 67 | conn = Mock() 68 | result = Mock() 69 | result.receive.side_effect = TimeoutError 70 | log.reporting(conn, result) 71 | message = conn.logger.warning.call_args[0][0] 72 | assert 'No data was received after ' in message 73 | 74 | def test_raises_other_errors(self): 75 | conn = Mock() 76 | result = Mock() 77 | result.receive.side_effect = OSError 78 | with raises(OSError): 79 | log.reporting(conn, result) 80 | -------------------------------------------------------------------------------- /remoto/tests/backends/test_local.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from remoto.backends import local 3 | 4 | 5 | class TestLocalConnection(object): 6 | 7 | def test_hostname_gets_ignored(self): 8 | conn = local.LocalConnection(hostname='node1') 9 | assert conn.hostname == 'localhost' 10 | 11 | def test_defaults_to_localhost_name(self): 12 | conn = local.LocalConnection() 13 | assert conn.hostname == 'localhost' 14 | 15 | 16 | class TestMakeConnectionstring(object): 17 | 18 | def test_makes_sudo_python_no_ssh(self): 19 | conn = local.LocalConnection(sudo=True, eager=False, interpreter='python') 20 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: False) 21 | assert conn_string == 'popen//python=sudo python' 22 | 23 | def test_makes_sudo_python_with_ssh(self): 24 | conn = local.LocalConnection(sudo=True, eager=False, interpreter='python') 25 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: True) 26 | assert conn_string == 'popen//python=sudo python' 27 | 28 | def test_makes_sudo_python_with_ssh_options_ignored(self): 29 | conn = local.LocalConnection( 30 | sudo=True, eager=False, 31 | interpreter='python', ssh_options='-F vagrant_ssh_config') 32 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: True) 33 | assert conn_string == 'popen//python=sudo python' 34 | 35 | def test_makes_python_no_ssh(self): 36 | conn = local.LocalConnection(sudo=False, eager=False, interpreter='python') 37 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: False) 38 | assert conn_string == 'popen//python=python' 39 | 40 | def test_makes_sudo_python_with_forced_sudo(self): 41 | conn = local.LocalConnection(sudo=True, eager=False, interpreter='python') 42 | conn_string = conn._make_connection_string( 43 | 'srv1', 44 | _needs_ssh=lambda x: False, use_sudo=True 45 | ) 46 | assert conn_string == 'popen//python=sudo python' 47 | 48 | def test_does_not_make_sudo_python_with_forced_sudo(self): 49 | conn = local.LocalConnection(sudo=True, eager=False, interpreter='python') 50 | conn_string = conn._make_connection_string( 51 | 'srv1', 52 | _needs_ssh=lambda x: False, use_sudo=False 53 | ) 54 | assert conn_string == 'popen//python=python' 55 | 56 | def test_detects_python3(self, monkeypatch): 57 | monkeypatch.setattr(sys, 'version_info', (3, 5, 1)) 58 | conn = local.LocalConnection(sudo=True, eager=False) 59 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: False) 60 | assert conn_string == 'popen//python=sudo python3' 61 | 62 | def test_detects_python2(self, monkeypatch): 63 | monkeypatch.setattr(sys, 'version_info', (2, 7, 11)) 64 | conn = local.LocalConnection(sudo=False, eager=False) 65 | conn_string = conn._make_connection_string('srv1', _needs_ssh=lambda x: True) 66 | assert conn_string == 'popen//python=python2' 67 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | 1.2.1 2 | ----- 3 | 18-May-2021 4 | 5 | * Prevent deadlock situations by avoiding ``stderr.read()`` and 6 | ``stdout.read()`` in favor of using ``communicate()`` 7 | 8 | 9 | 1.2.0 10 | ----- 11 | 20-Apr-2020 12 | 13 | * Allow to specify ``python_executable`` for remote execution, helping virtualenvs set remotely. 14 | 15 | 16 | 1.1.4 17 | ----- 18 | 29-Apr-2019 19 | 20 | * Add ``needs_ssh`` to the ``connection`` module to fix a backwards incompatible change 21 | 22 | 23 | 1.1.3 24 | ----- 25 | 22-Mar-2019 26 | 27 | * No code changes - adding packaging files for Debian 28 | 29 | 30 | 1.1.2 31 | ----- 32 | 13-Mar-2019 33 | 34 | * Try a few different executables (not only ``python``) to check for a working 35 | one, in order of preference, starting with ``python3`` and ultimately falling 36 | back to the connection interpreter 37 | 38 | 39 | 1.1.1 40 | ----- 41 | 13-Mar-2019 42 | 43 | * Fix an issue with remote Python interpreters that might not be ``python``, 44 | like in distros that use ``python3`` or similar. 45 | 46 | 47 | 1.1.0 48 | ----- 49 | 26-Feb-2019 50 | 51 | * Allow to specify ``--context`` to kubernetes connections 52 | * When a remote exception happens using the ``JsonModuleExecute``, include both 53 | stderr and stdout. 54 | 55 | 56 | 1.0.0 57 | ----- 58 | 13-Feb-2019 59 | 60 | * Create other connection backends aside from ssh and local: kubernetes, 61 | podman, docker, and openshift. 62 | * Adds new remote function/module execution model for non-native (for execnet) backends, so that 63 | modules will work in backends like kubernetes. 64 | * Create a helper (``remoto.connection.get()``) for retrieving connection 65 | backends based on strings 66 | * Increase the test coverage. 67 | * Allow using ``localhost``, ``127.0.0.1``, and ``127.0.1.1`` to detect local 68 | connections (before the full hostname was required, as returned by 69 | ``socket.gethostname()``) 70 | * No longer require creating ``logging`` loggers to pass in to connection 71 | classes, it will create a basic one when undefined. 72 | 73 | 74 | 0.0.35 75 | ------ 76 | 8-Jan-2019 77 | 78 | * Fix the botched 0.0.34 version which had stale commits from 0.0.32 - No code 79 | changes. 80 | 81 | 82 | 0.0.34 83 | ------ 84 | 12-Dec-2018 85 | 86 | * Allow ``ssh_options`` to extend ssh flags in the ``Connection()`` object 87 | 88 | 89 | 0.0.33 90 | ------ 91 | 17-Jul-2018 92 | 93 | * ``extend_env`` needs to be removed from ``**kw`` **only** when present. 94 | 95 | 96 | 0.0.32 97 | ------ 98 | 16-Jul-2018 99 | 100 | * ``extend_env`` needs to be removed from ``**kw`` as it is being passed onto 101 | subprocess, which renders it invalid 102 | 103 | 104 | 0.0.31 105 | ------ 106 | 10-Jul-2018 107 | 108 | * Extend environment variables, do not overwrite 109 | 110 | 111 | 0.0.30 112 | ------ 113 | 05-Jul-2016 114 | 115 | * Fix test issue with py3 116 | * Remove vendored execnet 117 | * Include tests when building 118 | * Strip carriage-returns from messages in logs 119 | 120 | 0.0.29 121 | ------ 122 | 17-May-2016 123 | * Catch possible errors when remotes are missing the right Python interpreter 124 | 125 | 0.0.28 126 | ------ 127 | 11-May-2016 128 | * Avoid needless list comprehension that caused issues with Python 3 129 | * Do not bare return when clients expect a three item tuple always 130 | * Fix an issue where ``process.check`` would need to raise exit but the 131 | response had an error. 132 | 133 | 22-Dec-2015 134 | 0.0.27 135 | ------ 136 | 22-Dec-2015 137 | * Fix a problem where stderr/stdout variables would be undefined on certain 138 | conditions when running a remote command. 139 | 140 | 0.0.26 141 | ------ 142 | 15-Dec-2015 143 | * Fix (issue 19) where stdout and stderr would be prematurely ended and not 144 | fully logged. 145 | 146 | 0.0.25 147 | ------ 148 | 21-Apr-2015 149 | * Fix (issue 15) where a child process could finish but output would not be 150 | flushed to stdout/stderr. 151 | 152 | 0.0.24 153 | ------ 154 | * Ship the ``LICENSE`` file and ``tests`` directory as part of the 155 | distribution. 156 | 157 | 0.0.23 158 | ------ 159 | * Output the exact same order of remote ``stdout`` and ``stderr`` 160 | 161 | 0.0.22 162 | ------ 163 | * Create a better detection mechanism for remote ``sudo`` needs 164 | 165 | 0.0.21 166 | ------ 167 | * Do not override remote environment variables to set the ``$PATH`` 168 | 169 | 0.0.20 170 | ------ 171 | * Fix unneeded ssh connection when using FQDN hosts 172 | 173 | 0.0.19 174 | ------ 175 | * Fix ``vendor.py`` to really include the proper tag for ``execnet`` 176 | 177 | 0.0.18 178 | ------ 179 | * Use execnet 1.2post2 that fixes a problem with ``None`` globals (see issue 180 | #1) 181 | 182 | 0.0.17 183 | ------ 184 | * add some imports to init so that they are easier to use 185 | * make vendor libraries optional 186 | 187 | 0.0.16 188 | ------ 189 | * spit stdout before stderr as errors should be read last 190 | 191 | 0.0.15 192 | ------ 193 | * eat typeerror when closing the connection (execnet) 194 | 195 | 0.0.14 196 | ------ 197 | * Use new execnet 1.2.0 198 | * use new connection defaults for execent 199 | 200 | 0.0.13 201 | ------ 202 | * Add a ``sync`` function to be able to synchronize directories between hosts. 203 | 204 | 0.0.12 205 | ------ 206 | * Map ``stderr`` to ``WARNING`` log level 207 | * Do not spit out ``remoto``'s own tracebacks when raising remote errors 208 | because some exception occurred just do it for non-remoto exceptions 209 | * Use version 1.1.1 of execnet with patches. 210 | 211 | 0.0.11 212 | ------ 213 | * Catch more TypeError problems when closing the connections. 214 | 215 | 0.0.10 216 | ------ 217 | * Allow configuration to raise on non-zero exit status 218 | 219 | 0.0.9 220 | ----- 221 | * If the exit status is non-zero on the remote end, raise an exception 222 | 223 | 0.0.8 224 | ----- 225 | * Raise RuntimeError on remote exceptions so others can actually 226 | catch that. 227 | 228 | 0.0.7 229 | ----- 230 | * Patches execnet to allow local popen with sudo python 231 | 232 | 0.0.6 233 | ----- 234 | * Add a global timeout option 235 | * All processes use PATH variables passed to Popen 236 | * Do not mangle commands if they need sudo 237 | * Allow sudo python 238 | 239 | 0.0.5 240 | ----- 241 | * Allow more than one thread to be started in the connection 242 | * log at debug level the name of the function to be remotely 243 | executed 244 | 245 | 0.0.4 246 | ----- 247 | * Create a way to execute functions remotely 248 | 249 | 0.0.3 250 | ----- 251 | * If the hostname passed in to the connection matches the local 252 | hostname, then do a local connection (not an ssh one) 253 | 254 | 0.0.2 255 | ----- 256 | * Allow a context manager for running one-off commands with the connection 257 | object. 258 | * ``process.run`` can now take in a timeout value so that it does not hang in 259 | remote processes 260 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | remoto 2 | ====== 3 | A very simplistic remote-command-executor using connections to hosts (``ssh``, 4 | local, containers, and several others are supported) and Python in the remote 5 | end. 6 | 7 | All the heavy lifting is done by execnet, while this minimal API provides the 8 | bare minimum to handle easy logging and connections from the remote end. 9 | 10 | ``remoto`` is a bit opinionated as it was conceived to replace helpers and 11 | remote utilities for ``ceph-deploy``, a tool to run remote commands to configure 12 | and setup the distributed file system Ceph. `ceph-medic 13 | `_ uses remoto as well to inspect Ceph 14 | clusters. 15 | 16 | 17 | Example Usage 18 | ------------- 19 | The usage aims to be extremely straightforward, with a very minimal set of 20 | helpers and utilities for remote processes and logging output. 21 | 22 | The most basic example will use the ``run`` helper to execute a command on the 23 | remote end. It does require a logging object, which needs to be one that, at 24 | the very least, has both ``error`` and ``debug``. Those are called for 25 | ``stderr`` and ``stdout`` respectively. 26 | 27 | This is how it would look with a basic logger passed in:: 28 | 29 | >>> conn = remoto.Connection('hostname') 30 | >>> run(conn, ['ls', '-a']) 31 | INFO:hostname:Running command: ls -a 32 | DEBUG:hostname:. 33 | DEBUG:hostname:.. 34 | DEBUG:hostname:.bash_history 35 | DEBUG:hostname:.bash_logout 36 | DEBUG:hostname:.bash_profile 37 | DEBUG:hostname:.bashrc 38 | DEBUG:hostname:.lesshst 39 | DEBUG:hostname:.pki 40 | DEBUG:hostname:.ssh 41 | DEBUG:hostname:.vim 42 | DEBUG:hostname:.viminfo 43 | 44 | The ``run`` helper will display the ``stderr`` and ``stdout`` as ``ERROR`` and 45 | ``DEBUG`` respectively. 46 | 47 | For other types of usage (like checking exit status codes, or raising upon 48 | them) ``remoto`` does provide them too. 49 | 50 | Remote Commands 51 | =============== 52 | 53 | ``process.run`` 54 | --------------- 55 | Calling remote commands can be done in a few different ways. The most simple 56 | one is with ``process.run``:: 57 | 58 | >>> from remoto.process import run 59 | >>> from remoto import connection 60 | >>> Connection = connection.get('ssh') 61 | >>> conn = Connection('myhost') 62 | >>> run(conn, ['whoami']) 63 | INFO:myhost:Running command: whoami 64 | DEBUG:myhost:root 65 | 66 | Note however, that you are not capturing results or information from the remote 67 | end. The intention here is only to be able to run a command and log its output. 68 | It is a *fire and forget* call. 69 | 70 | 71 | ``process.check`` 72 | ----------------- 73 | This callable, allows the caller to deal with the ``stderr``, ``stdout`` and 74 | exit code. It returns it in a 3 item tuple:: 75 | 76 | >>> from remoto.process import check 77 | >>> check(conn, ['ls', '/nonexistent/path']) 78 | ([], ['ls: cannot access /nonexistent/path: No such file or directory'], 2) 79 | 80 | Note that the ``stdout`` and ``stderr`` items are returned as lists with the ``\n`` 81 | characters removed. 82 | 83 | This is useful if you need to process the information back locally, as opposed 84 | to just firing and forgetting (while logging, like ``process.run``). 85 | 86 | 87 | Remote Functions 88 | ================ 89 | There are two supported ways to execute functions on the remote side. The 90 | library that ``remoto`` uses to connect (``execnet``) only supports a few 91 | backends *natively*, and ``remoto`` has extended this ability for other backend 92 | connections like kubernetes. 93 | 94 | The remote function capabilities are provided by ``LegacyModuleExecute`` and 95 | ``JsonModuleExecute``. By default, both ``ssh`` and ``local`` connection will 96 | use the legacy execution class, and everything else will use the ``legacy`` 97 | class. The ``ssh`` and ``local`` connections can still be forced to use the new 98 | module execution by setting:: 99 | 100 | conn.remote_import_system = 'json' 101 | 102 | 103 | ``json`` 104 | -------- 105 | The default module for ``docker``, ``kubernetes``, ``podman``, and 106 | ``openshift``. It does not require any magic on the module to be executed, 107 | however it is worth noting that the library *will* add the following bit of 108 | magic when sending the module to the remote end for execution:: 109 | 110 | 111 | if __name__ == '__main__': 112 | import json, traceback 113 | obj = {'return': None, 'exception': None} 114 | try: 115 | obj['return'] = function_name(*a) 116 | except Exception: 117 | obj['exception'] = traceback.format_exc() 118 | try: 119 | print(json.dumps(obj).decode('utf-8')) 120 | except AttributeError: 121 | print(json.dumps(obj)) 122 | 123 | This allows the system to execute ``function_name`` (replaced by the real 124 | function to be executed with its arguments), grab any results, serialize them 125 | with ``json`` and send them back for local processing. 126 | 127 | 128 | If you had a function in a module named ``foo`` that looks like this:: 129 | 130 | import os 131 | 132 | def listdir(path): 133 | return os.listdir(path) 134 | 135 | To be able to execute that ``listdir`` function remotely you would need to pass 136 | the module to the connection object and then call that function:: 137 | 138 | >>> import foo 139 | >>> conn = Connection('hostname') 140 | >>> remote_foo = conn.import_module(foo) 141 | >>> remote_foo.listdir('.') 142 | ['.bash_logout', 143 | '.profile', 144 | '.veewee_version', 145 | '.lesshst', 146 | 'python', 147 | '.vbox_version', 148 | 'ceph', 149 | '.cache', 150 | '.ssh'] 151 | 152 | Note that functions to be executed remotely **cannot** accept objects as 153 | arguments, just normal Python data structures, like tuples, lists and 154 | dictionaries. Also safe to use are ints and strings. 155 | 156 | 157 | ``legacy`` 158 | ---------- 159 | When using the ``legacy`` execution model (the default for ``local`` and 160 | ``ssh`` connections), modules are required to add the following to the end of 161 | that module:: 162 | 163 | if __name__ == '__channelexec__': 164 | for item in channel: 165 | channel.send(eval(item)) 166 | 167 | This piece of code is fully compatible with the ``json`` execution model, and 168 | would not cause conflicts. 169 | 170 | 171 | Automatic detection for ssh connections 172 | --------------------------------------- 173 | There is automatic detection for the need to connect remotely (via SSH) or not 174 | that it is infered by the hostname of the current host (vs. the host that is 175 | connecting to). 176 | 177 | If the local host has the same as the remote hostname, a local connection (via 178 | `Popen`) will be opened and that will be used instead of `ssh`, and avoiding 179 | the issues of being able to ssh into the same host. 180 | 181 | Automatic detection for using `sudo` 182 | ------------------------------------ 183 | This magical detection can be enabled by using the `detect_sudo` flag in the 184 | `Connection` class. It is disabled by default. 185 | 186 | When enabled, it will prefix any command with `sudo`. This is useful for 187 | libraries that need super user permissions and want to avoid passing `sudo` 188 | everywhere, which can be non-trivial if dealing with `root` users that are 189 | connecting via SSH. 190 | -------------------------------------------------------------------------------- /remoto/process.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | from .log import reporting 3 | from .util import admin_command, RemoteError 4 | 5 | 6 | def _remote_run(channel, cmd, **kw): 7 | import subprocess 8 | import sys 9 | from select import select 10 | stop_on_nonzero = kw.pop('stop_on_nonzero', True) 11 | 12 | process = subprocess.Popen( 13 | cmd, 14 | stdout=subprocess.PIPE, 15 | stderr=subprocess.PIPE, 16 | close_fds=True, 17 | **kw 18 | ) 19 | 20 | while True: 21 | reads, _, _ = select( 22 | [process.stdout.fileno(), process.stderr.fileno()], 23 | [], [] 24 | ) 25 | 26 | for descriptor in reads: 27 | if descriptor == process.stdout.fileno(): 28 | read = process.stdout.readline() 29 | if read: 30 | channel.send({'debug': read}) 31 | sys.stdout.flush() 32 | 33 | if descriptor == process.stderr.fileno(): 34 | read = process.stderr.readline() 35 | if read: 36 | channel.send({'warning': read}) 37 | sys.stderr.flush() 38 | 39 | if process.poll() is not None: 40 | # ensure we do not have anything pending in stdout or stderr 41 | # unfortunately, we cannot abstract this repetitive loop into its 42 | # own function because execnet does not allow for non-global (or 43 | # even nested functions). This must be repeated here. 44 | while True: 45 | err_read = out_read = None 46 | for descriptor in reads: 47 | if descriptor == process.stdout.fileno(): 48 | out_read = process.stdout.readline() 49 | if out_read: 50 | channel.send({'debug': out_read}) 51 | sys.stdout.flush() 52 | 53 | if descriptor == process.stderr.fileno(): 54 | err_read = process.stderr.readline() 55 | if err_read: 56 | channel.send({'warning': err_read}) 57 | sys.stderr.flush() 58 | # At this point we have gone through all the possible 59 | # descriptors and `read` was empty, so we now can break out of 60 | # this since all stdout/stderr has been properly flushed to 61 | # logging 62 | if not err_read and not out_read: 63 | break 64 | 65 | break 66 | 67 | returncode = process.wait() 68 | if returncode != 0: 69 | if stop_on_nonzero: 70 | raise RuntimeError( 71 | "command returned non-zero exit status: %s" % returncode 72 | ) 73 | else: 74 | channel.send({'warning': "command returned non-zero exit status: %s" % returncode}) 75 | 76 | 77 | def extend_env(conn, arguments): 78 | """ 79 | get the remote environment's env so we can explicitly add the path without 80 | wiping out everything 81 | """ 82 | # retrieve the remote environment variables for the host 83 | try: 84 | result = conn.gateway.remote_exec("import os; channel.send(os.environ.copy())") 85 | env = result.receive() 86 | except Exception: 87 | conn.logger.exception('failed to retrieve the remote environment variables') 88 | env = {} 89 | 90 | # get the $PATH and extend it (do not overwrite) 91 | path = env.get('PATH', '') 92 | env['PATH'] = path + '/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin' 93 | arguments['env'] = env 94 | if arguments.get('extend_env'): 95 | for key, value in arguments['extend_env'].items(): 96 | arguments['env'][key] = value 97 | arguments.pop('extend_env') 98 | return arguments 99 | 100 | 101 | def run(conn, command, exit=False, timeout=None, **kw): 102 | """ 103 | A real-time-logging implementation of a remote subprocess.Popen call where 104 | a command is just executed on the remote end and no other handling is done. 105 | 106 | :param conn: A connection oject 107 | :param command: The command to pass in to the remote subprocess.Popen 108 | :param exit: If this call should close the connection at the end 109 | :param timeout: How many seconds to wait after no remote data is received 110 | (defaults to wait for ever) 111 | """ 112 | stop_on_error = kw.pop('stop_on_error', True) 113 | if not kw.get('env'): 114 | # get the remote environment's env so we can explicitly add 115 | # the path without wiping out everything 116 | kw = extend_env(conn, kw) 117 | 118 | command = conn.cmd(command) 119 | 120 | timeout = timeout or conn.global_timeout 121 | conn.logger.info('Running command: %s' % ' '.join(admin_command(conn.sudo, command))) 122 | result = conn.execute(_remote_run, cmd=command, **kw) 123 | try: 124 | reporting(conn, result, timeout) 125 | except Exception: 126 | remote_trace = traceback.format_exc() 127 | remote_error = RemoteError(remote_trace) 128 | if remote_error.exception_name == 'RuntimeError': 129 | conn.logger.error(remote_error.exception_line) 130 | else: 131 | for tb_line in remote_trace.split('\n'): 132 | conn.logger.error(tb_line) 133 | if stop_on_error: 134 | raise RuntimeError( 135 | 'Failed to execute command: %s' % ' '.join(command) 136 | ) 137 | if exit: 138 | conn.exit() 139 | 140 | 141 | def _remote_check(channel, cmd, **kw): 142 | import subprocess 143 | stdin = kw.pop('stdin', None) 144 | process = subprocess.Popen( 145 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, **kw 146 | ) 147 | 148 | if stdin: 149 | if not isinstance(stdin, bytes): 150 | stdin = stdin.encode('utf-8', errors='ignore') 151 | stdout_stream, stderr_stream = process.communicate(stdin) 152 | else: 153 | stdout_stream, stderr_stream = process.communicate() 154 | 155 | try: 156 | stdout_stream = stdout_stream.decode('utf-8') 157 | stderr_stream = stderr_stream.decode('utf-8') 158 | except AttributeError: 159 | pass 160 | 161 | stdout = stdout_stream.splitlines() 162 | stderr = stderr_stream.splitlines() 163 | channel.send((stdout, stderr, process.wait())) 164 | 165 | 166 | def check(conn, command, exit=False, timeout=None, **kw): 167 | """ 168 | Execute a remote command with ``subprocess.Popen`` but report back the 169 | results in a tuple with three items: stdout, stderr, and exit status. 170 | 171 | This helper function *does not* provide any logging as it is the caller's 172 | responsibility to do so. 173 | """ 174 | command = conn.cmd(command) 175 | 176 | stop_on_error = kw.pop('stop_on_error', True) 177 | timeout = timeout or conn.global_timeout 178 | if not kw.get('env'): 179 | # get the remote environment's env so we can explicitly add 180 | # the path without wiping out everything 181 | kw = extend_env(conn, kw) 182 | 183 | conn.logger.info('Running command: %s' % ' '.join(admin_command(conn.sudo, command))) 184 | result = conn.execute(_remote_check, cmd=command, **kw) 185 | response = None 186 | try: 187 | response = result.receive(timeout) 188 | except Exception as err: 189 | # the things we need to do here :( 190 | # because execnet magic, we cannot catch this as 191 | # `except TimeoutError` 192 | if err.__class__.__name__ == 'TimeoutError': 193 | msg = 'No data was received after %s seconds, disconnecting...' % timeout 194 | conn.logger.warning(msg) 195 | # there is no stdout, stderr, or exit code but make the exit code 196 | # an error condition (non-zero) regardless 197 | return [], [], -1 198 | else: 199 | remote_trace = traceback.format_exc() 200 | remote_error = RemoteError(remote_trace) 201 | if remote_error.exception_name == 'RuntimeError': 202 | conn.logger.error(remote_error.exception_line) 203 | else: 204 | for tb_line in remote_trace.split('\n'): 205 | conn.logger.error(tb_line) 206 | if stop_on_error: 207 | raise RuntimeError( 208 | 'Failed to execute command: %s' % ' '.join(command) 209 | ) 210 | if exit: 211 | conn.exit() 212 | return response 213 | -------------------------------------------------------------------------------- /remoto/backends/__init__.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import json 3 | import socket 4 | import sys 5 | import execnet 6 | import logging 7 | from remoto.process import check 8 | 9 | 10 | class BaseConnection(object): 11 | """ 12 | Base class for Connection objects. Provides a generic interface to execnet 13 | for setting up the connection 14 | """ 15 | executable = '' 16 | remote_import_system = 'legacy' 17 | 18 | def __init__(self, hostname, logger=None, sudo=False, threads=1, eager=True, 19 | detect_sudo=False, use_ssh=False, interpreter=None, ssh_options=None): 20 | self.sudo = sudo 21 | self.hostname = hostname 22 | self.ssh_options = ssh_options 23 | self.logger = logger or basic_remote_logger() 24 | self.remote_module = None 25 | self.channel = None 26 | self.use_ssh = use_ssh 27 | self.global_timeout = None # wait for ever 28 | 29 | self.interpreter = interpreter or 'python%s' % sys.version_info[0] 30 | 31 | if eager: 32 | try: 33 | if detect_sudo: 34 | self.sudo = self._detect_sudo() 35 | self.gateway = self._make_gateway(hostname) 36 | except OSError: 37 | self.logger.error( 38 | "Can't communicate with remote host, possibly because " 39 | "%s is not installed there" % self.interpreter 40 | ) 41 | raise 42 | 43 | def _make_gateway(self, hostname): 44 | self.group = execnet.Group() 45 | gateway = self.group.makegateway( 46 | self._make_connection_string(hostname) 47 | ) 48 | gateway.reconfigure(py2str_as_py3str=False, py3str_as_py2str=False) 49 | return gateway 50 | 51 | def _detect_sudo(self, _execnet=None): 52 | """ 53 | ``sudo`` detection has to create a different connection to the remote 54 | host so that we can reliably ensure that ``getuser()`` will return the 55 | right information. 56 | 57 | After getting the user info it closes the connection and returns 58 | a boolean 59 | """ 60 | exc = _execnet or execnet 61 | gw = exc.makegateway( 62 | self._make_connection_string(self.hostname, use_sudo=False) 63 | ) 64 | 65 | channel = gw.remote_exec( 66 | 'import getpass; channel.send(getpass.getuser())' 67 | ) 68 | 69 | result = channel.receive() 70 | gw.exit() 71 | 72 | if result == 'root': 73 | return False 74 | self.logger.debug('connection detected need for sudo') 75 | return True 76 | 77 | def _make_connection_string(self, hostname, _needs_ssh=None, use_sudo=None): 78 | _needs_ssh = _needs_ssh or needs_ssh 79 | interpreter = self.interpreter 80 | if use_sudo is not None: 81 | if use_sudo: 82 | interpreter = 'sudo ' + interpreter 83 | elif self.sudo: 84 | interpreter = 'sudo ' + interpreter 85 | 86 | if _needs_ssh(hostname) or self.use_ssh: 87 | if self.ssh_options: 88 | return 'ssh=%s %s//python=%s' % ( 89 | self.ssh_options, hostname, interpreter 90 | ) 91 | else: 92 | return 'ssh=%s//python=%s' % (hostname, interpreter) 93 | return 'popen//python=%s' % interpreter 94 | 95 | def __enter__(self): 96 | return self 97 | 98 | def __exit__(self, exc_type, exc_val, exc_tb): 99 | self.group.terminate(timeout=1.0) 100 | return False 101 | 102 | def cmd(self, cmd): 103 | """ 104 | In the base connection class, this method just returns the ``cmd`` 105 | as-is. Other implementations will end up doing transformations to the 106 | command by prefixing it with other flags needed. See 107 | :class:`KubernetesConnection` for an example 108 | """ 109 | return cmd 110 | 111 | def execute(self, function, **kw): 112 | return self.gateway.remote_exec(function, **kw) 113 | 114 | def exit(self): 115 | self.group.terminate(timeout=1.0) 116 | 117 | def import_module(self, module, python_executable=None): 118 | """ 119 | Allows remote execution of a local module. Depending on the 120 | ``remote_import_system`` attribute it may use execnet's implementation 121 | or remoto's own based on JSON. 122 | 123 | .. note:: It is not possible to use execnet's remote execution model on 124 | connections that aren't SSH or Local. 125 | """ 126 | if self.remote_import_system is not None: 127 | if self.remote_import_system == 'json': 128 | self.remote_module = JsonModuleExecute(self, module, self.logger, 129 | python_executable=python_executable) 130 | else: 131 | self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger) 132 | else: 133 | self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger) 134 | return self.remote_module 135 | 136 | def has_connection(self): 137 | if self.gateway: 138 | return self.gateway.hasreceiver() 139 | return False 140 | 141 | 142 | class LegacyModuleExecute(object): 143 | """ 144 | This (now legacy) class, is the way ``execnet`` does its remote module 145 | execution: it sends it over a channel, and does a send/receive for 146 | exchanging information. This only works when there is native support in 147 | execnet for a given connection. This currently means it would only work for 148 | ssh and local (Popen) connections, and will not work for anything like 149 | kubernetes or containers. 150 | """ 151 | 152 | def __init__(self, gateway, module, logger=None): 153 | self.channel = gateway.remote_exec(module) 154 | self.module = module 155 | self.logger = logger 156 | 157 | def __getattr__(self, name): 158 | if not hasattr(self.module, name): 159 | msg = "module %s does not have attribute %s" % (str(self.module), name) 160 | raise AttributeError(msg) 161 | docstring = self._get_func_doc(getattr(self.module, name)) 162 | 163 | def wrapper(*args): 164 | arguments = self._convert_args(args) 165 | if docstring: 166 | self.logger.debug(docstring) 167 | self.channel.send("%s(%s)" % (name, arguments)) 168 | try: 169 | return self.channel.receive() 170 | except Exception as error: 171 | # Error will come as a string of a traceback, remove everything 172 | # up to the actual exception since we do get garbage otherwise 173 | # that points to non-existent lines in the compiled code 174 | exc_line = str(error) 175 | for tb_line in reversed(str(error).split('\n')): 176 | if tb_line: 177 | exc_line = tb_line 178 | break 179 | raise RuntimeError(exc_line) 180 | 181 | return wrapper 182 | 183 | def _get_func_doc(self, func): 184 | try: 185 | return getattr(func, 'func_doc').strip() 186 | except AttributeError: 187 | return '' 188 | 189 | def _convert_args(self, args): 190 | if args: 191 | if len(args) > 1: 192 | arguments = str(args).rstrip(')').lstrip('(') 193 | else: 194 | arguments = str(args).rstrip(',)').lstrip('(') 195 | else: 196 | arguments = '' 197 | return arguments 198 | 199 | 200 | dump_template = """ 201 | if __name__ == '__main__': 202 | import json, traceback 203 | obj = {'return': None, 'exception': None} 204 | try: 205 | obj['return'] = %s%s 206 | except Exception: 207 | obj['exception'] = traceback.format_exc() 208 | try: 209 | print(json.dumps(obj).decode('utf-8')) 210 | except AttributeError: 211 | print(json.dumps(obj)) 212 | """ 213 | 214 | 215 | class JsonModuleExecute(object): 216 | """ 217 | This remote execution class allows to ship Python code over to the remote 218 | node, load it via ``stdin`` and call any function with arguments. The 219 | resulting response is dumped over JSON so that it can get printed to 220 | ``stdout``, then captured locally, loaded into regular Python and returned. 221 | 222 | If the remote end generates an exception with a traceback, that is captured 223 | as well and raised accordingly. 224 | """ 225 | 226 | def __init__(self, conn, module, logger=None, python_executable=None): 227 | self.conn = conn 228 | self.module = module 229 | self._module_source = inspect.getsource(module) 230 | self.logger = logger 231 | self.python_executable = python_executable 232 | 233 | def __getattr__(self, name): 234 | if not hasattr(self.module, name): 235 | msg = "module %s does not have attribute %s" % (str(self.module), name) 236 | raise AttributeError(msg) 237 | docstring = self._get_func_doc(getattr(self.module, name)) 238 | 239 | def wrapper(*args): 240 | if docstring: 241 | self.logger.debug(docstring) 242 | if len(args): 243 | source = self._module_source + dump_template % (name, repr(args)) 244 | else: 245 | source = self._module_source + dump_template % (name, '()') 246 | 247 | # check python interpreter 248 | if self.python_executable is None: 249 | self.python_executable = get_python_executable(self.conn) 250 | 251 | out, err, code = check(self.conn, [self.python_executable], stdin=source.encode('utf-8')) 252 | if not out: 253 | if not err: 254 | err = [ 255 | 'Traceback (most recent call last):', 256 | ' File "", in ', 257 | 'Exception: error calling "%s"' % name 258 | ] 259 | if code: 260 | raise Exception('Unexpected remote exception: \n%s\n%s' % ('\n'.join(out), '\n'.join(err))) 261 | # at this point, there was no stdout, and the exit code was 0, 262 | # we must return so that we don't fail trying to serialize back 263 | # the JSON 264 | return 265 | response = json.loads(out[0]) 266 | if response['exception']: 267 | raise Exception(response['exception']) 268 | return response['return'] 269 | 270 | return wrapper 271 | 272 | def _get_func_doc(self, func): 273 | try: 274 | return getattr(func, 'func_doc').strip() 275 | except AttributeError: 276 | return '' 277 | 278 | 279 | def basic_remote_logger(): 280 | logging.basicConfig() 281 | logger = logging.getLogger(socket.gethostname()) 282 | logger.setLevel(logging.DEBUG) 283 | return logger 284 | 285 | 286 | def needs_ssh(hostname, _socket=None): 287 | """ 288 | Obtains remote hostname of the socket and cuts off the domain part 289 | of its FQDN. 290 | """ 291 | if hostname.lower() in ['localhost', '127.0.0.1', '127.0.1.1']: 292 | return False 293 | _socket = _socket or socket 294 | fqdn = _socket.getfqdn() 295 | if hostname == fqdn: 296 | return False 297 | local_hostname = _socket.gethostname() 298 | local_short_hostname = local_hostname.split('.')[0] 299 | if local_hostname == hostname or local_short_hostname == hostname: 300 | return False 301 | return True 302 | 303 | 304 | def get_python_executable(conn): 305 | """ 306 | Try to determine the remote Python version so that it can be used 307 | when executing. Avoids the problem of different Python versions, or distros 308 | that do not use ``python`` but do ``python3`` 309 | """ 310 | # executables in order of preference: 311 | executables = ['python3', 'python', 'python2.7'] 312 | for executable in executables: 313 | conn.logger.debug('trying to determine remote python executable with %s' % executable) 314 | out, err, code = check(conn, ['which', executable]) 315 | if code: 316 | conn.logger.warning('skipping %s, was not found in path' % executable) 317 | else: 318 | try: 319 | return out[0].strip() 320 | except IndexError: 321 | conn.logger.warning('could not parse stdout: %s' % out) 322 | 323 | # if all fails, we just return whatever the main connection had 324 | conn.logger.info('Falling back to using interpreter: %s' % conn.interpreter) 325 | return conn.interpreter 326 | -------------------------------------------------------------------------------- /remoto/tests/backends/test_backends.py: -------------------------------------------------------------------------------- 1 | import sys 2 | try: 3 | from unittest.mock import Mock, patch 4 | except ImportError: 5 | from mock import Mock, patch 6 | import pytest 7 | from remoto import backends 8 | from remoto.backends import local 9 | from remoto.tests import fake_module 10 | from remoto.tests.conftest import Capture, Factory 11 | 12 | 13 | class FakeSocket(object): 14 | 15 | def __init__(self, gethostname, getfqdn=None): 16 | self.gethostname = lambda: gethostname 17 | self.getfqdn = lambda: getfqdn or gethostname 18 | 19 | 20 | class TestJsonModuleExecute(object): 21 | 22 | def test_execute_returns_casted_boolean(self): 23 | conn = local.LocalConnection() 24 | conn.remote_import_system = 'json' 25 | remote_fake_module = conn.import_module(fake_module) 26 | assert remote_fake_module.function(None) is True 27 | 28 | def test_execute_can_raise_remote_exceptions(self): 29 | conn = local.LocalConnection() 30 | conn.remote_import_system = 'json' 31 | remote_fake_module = conn.import_module(fake_module) 32 | with pytest.raises(Exception) as error: 33 | assert remote_fake_module.fails() 34 | assert 'Exception: failure from fails() function' in str(error.value) 35 | 36 | def test_execute_can_raise_unexpected_remote_exceptions(self): 37 | conn = local.LocalConnection() 38 | conn.remote_import_system = 'json' 39 | remote_fake_module = conn.import_module(fake_module) 40 | with pytest.raises(Exception) as error: 41 | remote_fake_module.unexpected_fail() 42 | assert 'error calling "unexpected_fail"' in str(error.value) 43 | assert 'Unexpected remote exception' in str(error.value) 44 | 45 | def test_execute_noop(self): 46 | conn = local.LocalConnection() 47 | conn.remote_import_system = 'json' 48 | remote_fake_module = conn.import_module(fake_module) 49 | assert remote_fake_module.noop() is None 50 | 51 | def test_execute_passes_is_none(self): 52 | conn = local.LocalConnection() 53 | conn.remote_import_system = 'json' 54 | remote_fake_module = conn.import_module(fake_module) 55 | assert remote_fake_module.passes() is None 56 | 57 | def test_execute_wrong_interpreter(self): 58 | conn = local.LocalConnection() 59 | conn.remote_import_system = 'json' 60 | remote_fake_module = conn.import_module(fake_module) 61 | remote_fake_module.python_executable = 'python9' 62 | with pytest.raises(Exception) as error: 63 | remote_fake_module.passes() 64 | assert 'Failed to execute command: python9' in str(error.value) 65 | 66 | def test_fallback_interpreter(self, monkeypatch, capsys): 67 | monkeypatch.setattr(backends, 'check', lambda *a, **kw: ('', '', 1)) 68 | conn = local.LocalConnection() 69 | conn.remote_import_system = 'json' 70 | remote_fake_module = conn.import_module(fake_module) 71 | try: 72 | remote_fake_module.passes() 73 | except Exception: 74 | pass 75 | assert remote_fake_module.python_executable is not None 76 | 77 | def test_python_executable(self): 78 | python_executable = sys.executable 79 | conn = local.LocalConnection() 80 | conn.remote_import_system = 'json' 81 | remote_fake_module = conn.import_module(fake_module, 82 | python_executable=python_executable) 83 | assert remote_fake_module.remote_interpreter() == python_executable 84 | 85 | def test_wrong_python_executable(self): 86 | python_executable = '/path/to/python' 87 | conn = local.LocalConnection() 88 | conn.remote_import_system = 'json' 89 | remote_fake_module = conn.import_module(fake_module, 90 | python_executable=python_executable) 91 | with pytest.raises(Exception) as error: 92 | remote_fake_module.remote_interpreter() 93 | assert 'Failed to execute command: {}'.format(python_executable) in str(error.value) 94 | 95 | 96 | class TestNeedsSsh(object): 97 | 98 | def test_short_hostname_matches(self): 99 | socket = FakeSocket('foo.example.org') 100 | assert backends.needs_ssh('foo', socket) is False 101 | 102 | def test_long_hostname_matches(self): 103 | socket = FakeSocket('foo.example.org') 104 | assert backends.needs_ssh('foo.example.org', socket) is False 105 | 106 | def test_hostname_does_not_match(self): 107 | socket = FakeSocket('foo') 108 | assert backends.needs_ssh('meh', socket) is True 109 | 110 | def test_fqdn_hostname_matches_short_hostname(self): 111 | socket = FakeSocket('foo', getfqdn='foo.example.org') 112 | assert backends.needs_ssh('foo.example.org', socket) is False 113 | 114 | @pytest.mark.parametrize('hostname', ['localhost', '127.0.0.1', '127.0.1.1']) 115 | def test_local_hostname(self, hostname): 116 | assert backends.needs_ssh(hostname) is False 117 | 118 | 119 | class FakeGateway(object): 120 | def __init__(self, connected=True): 121 | self.connected = connected 122 | 123 | def remote_exec(self, module): 124 | pass 125 | 126 | def hasreceiver(self): 127 | return self.connected 128 | 129 | class TestLegacyRemoteModule(object): 130 | 131 | def setup(self): 132 | self.conn = backends.BaseConnection('localhost', sudo=True, eager=False) 133 | self.gateway = FakeGateway() 134 | self.conn.gateway = self.gateway 135 | 136 | def test_importing_it_sets_it_as_remote_module(self): 137 | self.conn.import_module(fake_module) 138 | assert fake_module == self.conn.remote_module.module 139 | 140 | def test_importing_it_returns_the_module_too(self): 141 | remote_foo = self.conn.import_module(fake_module) 142 | assert remote_foo.module == fake_module 143 | 144 | def test_execute_the_remote_module_send(self): 145 | stub_channel = Factory(send=Capture(), receive=Capture()) 146 | self.conn.gateway.channel = self.conn.gateway 147 | remote_foo = self.conn.import_module(fake_module) 148 | remote_foo.channel = stub_channel 149 | remote_foo.function('argument') 150 | assert stub_channel.send.calls[0]['args'][0] == "function('argument')" 151 | 152 | def test_execute_the_remote_module_receive(self): 153 | stub_channel = Factory(receive=Capture(return_values=[True]), send=Capture()) 154 | self.conn.gateway.channel = self.conn.gateway 155 | remote_foo = self.conn.import_module(fake_module) 156 | remote_foo.channel = stub_channel 157 | assert remote_foo.function('argument') is True 158 | 159 | def test_has_connection(self): 160 | assert self.conn.has_connection() is True 161 | 162 | # Disconnect the gateway 163 | self.gateway.connected = False 164 | assert self.conn.has_connection() is False 165 | 166 | 167 | class TestLegacyModuleExecuteArgs(object): 168 | 169 | def setup(self): 170 | self.remote_module = backends.LegacyModuleExecute(FakeGateway(), None) 171 | 172 | def test_single_argument(self): 173 | assert self.remote_module._convert_args(('foo',)) == "'foo'" 174 | 175 | def test_more_than_one_argument(self): 176 | args = ('foo', 'bar', 1) 177 | assert self.remote_module._convert_args(args) == "'foo', 'bar', 1" 178 | 179 | def test_dictionary_as_argument(self): 180 | args = ({'some key': 1},) 181 | assert self.remote_module._convert_args(args) == "{'some key': 1}" 182 | 183 | 184 | class TestLegacyModuleExecuteGetAttr(object): 185 | 186 | def setup(self): 187 | self.remote_module = backends.LegacyModuleExecute(FakeGateway(), None) 188 | 189 | def test_raise_attribute_error(self): 190 | with pytest.raises(AttributeError) as err: 191 | self.remote_module.foo() 192 | assert err.value.args[0] == 'module None does not have attribute foo' 193 | 194 | 195 | class TestMakeConnectionString(object): 196 | 197 | def test_makes_sudo_python_no_ssh(self): 198 | conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python') 199 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False) 200 | assert conn_string == 'popen//python=sudo python' 201 | 202 | def test_makes_sudo_python_with_ssh(self): 203 | conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python') 204 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True) 205 | assert conn_string == 'ssh=localhost//python=sudo python' 206 | 207 | def test_makes_sudo_python_with_ssh_options(self): 208 | conn = backends.BaseConnection( 209 | 'localhost', sudo=True, eager=False, 210 | interpreter='python', ssh_options='-F vagrant_ssh_config') 211 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True) 212 | assert conn_string == 'ssh=-F vagrant_ssh_config localhost//python=sudo python' 213 | 214 | def test_makes_python_no_ssh(self): 215 | conn = backends.BaseConnection('localhost', sudo=False, eager=False, interpreter='python') 216 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False) 217 | assert conn_string == 'popen//python=python' 218 | 219 | def test_makes_python_with_ssh(self): 220 | conn = backends.BaseConnection('localhost', sudo=False, eager=False, interpreter='python') 221 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True) 222 | assert conn_string == 'ssh=localhost//python=python' 223 | 224 | def test_ssh_is_forced(self): 225 | conn = backends.BaseConnection('localhost', sudo=False, eager=False, use_ssh=True, interpreter='python') 226 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False) 227 | assert conn_string == 'ssh=localhost//python=python' 228 | 229 | def test_makes_sudo_python_with_forced_ssh(self): 230 | conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python') 231 | conn_string = conn._make_connection_string( 232 | 'localhost', _needs_ssh=lambda x: False, use_sudo=True 233 | ) 234 | assert conn_string == 'popen//python=sudo python' 235 | 236 | def test_does_not_make_sudo_python_with_forced_sudo(self): 237 | conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python') 238 | conn_string = conn._make_connection_string( 239 | 'localhost', _needs_ssh=lambda x: False, use_sudo=False 240 | ) 241 | assert conn_string == 'popen//python=python' 242 | 243 | def test_detects_python3(self): 244 | with patch.object(sys, 'version_info', (3, 5, 1)): 245 | conn = backends.BaseConnection('localhost', sudo=True, eager=False) 246 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False) 247 | assert conn_string == 'popen//python=sudo python3' 248 | 249 | def test_detects_python2(self): 250 | with patch.object(sys, 'version_info', (2, 7, 11)): 251 | conn = backends.BaseConnection('localhost', sudo=False, eager=False) 252 | conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True) 253 | assert conn_string == 'ssh=localhost//python=python2' 254 | 255 | 256 | class TestDetectSudo(object): 257 | 258 | def setup(self): 259 | self.execnet = Mock() 260 | self.execnet.return_value = self.execnet 261 | self.execnet.makegateway.return_value = self.execnet 262 | self.execnet.remote_exec.return_value = self.execnet 263 | 264 | def test_does_not_need_sudo(self): 265 | self.execnet.receive.return_value = 'root' 266 | conn = backends.BaseConnection('localhost', sudo=True, eager=False) 267 | assert conn._detect_sudo(_execnet=self.execnet) is False 268 | 269 | def test_does_need_sudo(self): 270 | self.execnet.receive.return_value = 'alfredo' 271 | conn = backends.BaseConnection('localhost', sudo=True, eager=False) 272 | assert conn._detect_sudo(_execnet=self.execnet) is True 273 | 274 | 275 | class TestGetPythonExecutable(object): 276 | 277 | def test_non_zero(self, monkeypatch): 278 | monkeypatch.setattr(backends, 'check', lambda *a, **kw: ([], [], 1)) 279 | conn = local.LocalConnection() 280 | result = backends.get_python_executable(conn) 281 | assert result == conn.interpreter 282 | 283 | def test_no_stdout(self, monkeypatch): 284 | monkeypatch.setattr(backends, 'check', lambda *a, **kw: ([], [], 0)) 285 | conn = local.LocalConnection() 286 | result = backends.get_python_executable(conn) 287 | assert result == conn.interpreter 288 | 289 | def test_which(self, monkeypatch): 290 | monkeypatch.setattr(backends, 'check', lambda *a, **kw: (['/usr/bin/python17'], [], 0)) 291 | conn = local.LocalConnection() 292 | result = backends.get_python_executable(conn) 293 | assert result == '/usr/bin/python17' 294 | --------------------------------------------------------------------------------