├── .gitattributes ├── .gitignore ├── .readthedocs.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── Exceptions.rst │ ├── Job.rst │ ├── PipelineNode.rst │ ├── PipelineNodeFactory.rst │ ├── Shell.rst │ ├── Stream.rst │ ├── StreamFactory.rst │ ├── _static │ └── css │ │ └── custom.css │ ├── conf.py │ └── index.rst ├── examples ├── MountContext.py └── MountImage.py ├── pylintrc ├── run_tests.py ├── setup.cfg ├── setup.py ├── shtk ├── Job.py ├── PipelineNode.py ├── PipelineNodeFactory.py ├── Shell.py ├── Stream.py ├── StreamFactory.py ├── _AsyncUtil.py ├── __init__.py ├── _version.py ├── test │ ├── Job │ │ ├── Job.py │ │ └── __init__.py │ ├── Operators │ │ ├── Operators.py │ │ └── __init__.py │ ├── PipelineNode │ │ ├── PipelineChannel.py │ │ ├── PipelineProcess.py │ │ └── __init__.py │ ├── PipelineNodeFactory │ │ ├── PipelineChannelFactory.py │ │ ├── PipelineProcessFactory.py │ │ └── __init__.py │ ├── Shell │ │ ├── Shell.py │ │ └── __init__.py │ ├── Stream │ │ ├── FileStream.py │ │ ├── ManualStream.py │ │ ├── NullStream.py │ │ ├── PipeStream.py │ │ ├── Stream.py │ │ └── __init__.py │ ├── StreamFactory │ │ ├── FileStreamFactory.py │ │ ├── ManualStreamFactory.py │ │ ├── NullStreamFactory.py │ │ ├── PipeStreamFactory.py │ │ └── __init__.py │ ├── __init__.py │ ├── coveragerc │ ├── echo_env.py │ ├── test_util.py │ └── util │ │ ├── __init__.py │ │ └── util.py └── util.py ├── test_exception.py ├── test_interactive.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | shtk/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | venv-*/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | 135 | # pytype static type analyzer 136 | .pytype/ 137 | 138 | # Cython debug symbols 139 | cython_debug/ 140 | 141 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Build documentation in the docs/ directory with Sphinx 9 | sphinx: 10 | configuration: docs/source/conf.py 11 | 12 | # Optionally build your docs in additional formats such as PDF 13 | formats: 14 | - pdf 15 | 16 | # Optionally set the version of Python and requirements required to build your docs 17 | python: 18 | version: 3.7 19 | install: 20 | - requirements: docs/requirements.txt 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, Jon Roose 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include shtk/_version.py 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python Shell Toolkit (SHTK) 2 | 3 | Python SHTK is a python module that seeks to make replacing shell scripts with 4 | Python scripts an easier process. Python has a number of syntax advantages 5 | over traditional shell scripting languages such as BASH, including: 6 | * Classes 7 | * Modules 8 | * With statements 9 | * Try/Except statements 10 | * Async and await for coroutines 11 | 12 | The module and package oriented structure of Python's toolchain enables broad 13 | code re-use and redistribution. Python also benefits from a wide selection of 14 | built-in modules, and expands itself via the wide assortment of packages 15 | that can be quickly installed using its built-in package manager. 16 | 17 | Finally, built-in automated test harnesses and long-standing code-quality 18 | integrations make it easy to review, document, test, and maintain its 19 | libraries. 20 | 21 | SHTK is written with the assumption that you want to run more than one command. 22 | Towards this end, improvements over Python's built-in subprocess library 23 | include: 24 | * Much shorter code -- designed to be as close to BASH as possible 25 | * Easy piping of stdout to other commands' stdin 26 | * Easy redirects to files 27 | * Shell objects to track and manage cwd and environment variables 28 | * An evaluate() function that returns the text a command wrote to stdout 29 | * Optional NonzeroExitCodeException raised in response to non-zero exit codes 30 | * Connects commands to sys.stdin, sys.stdout, and sys.stderr by default 31 | 32 | The author's primary intended use cases for Python SHTK include replacing BASH 33 | scripts that automate builds of disk images, docker containers, and system 34 | configurations. 35 | 36 | ## Installation 37 | Using pip you can install shtk as follows: 38 | ``` 39 | pip3 install shtk 40 | ``` 41 | 42 | Or you can install the module from source as follows: 43 | ``` 44 | pip3 install . 45 | ``` 46 | 47 | ## Tests 48 | To run the automated tests, run the following command from the project's root 49 | directory: 50 | 51 | ``` 52 | pip3 install coverage 53 | python3 run_tests.py 54 | ``` 55 | 56 | ## Documentation 57 | The documentation is publically available at https://shtk.readthedocs.org 58 | 59 | To build the documentation from source, run the following which generates 60 | documention in ./docs/html/index.html 61 | 62 | ``` 63 | cd docs 64 | make html 65 | cd .. 66 | ``` 67 | 68 | ## Examples 69 | 70 | ``` 71 | import shtk 72 | 73 | sh = shtk.Shell.get_shell() 74 | 75 | ls = sh.command('ls') 76 | wc = sh.command('wc') 77 | cat = sh.command('cat') 78 | sleep = sh.command('sleep') 79 | touch = sh.command('touch') 80 | 81 | #touch tmp.txt 82 | sh(touch('tmp.txt')) 83 | 84 | #cat tmp.txt 85 | sh(cat('tmp.txt')) 86 | 87 | #cat tmp.txt | wc -l 88 | sh(cat('tmp.txt') | wc('-l')) 89 | 90 | #wc -l < tmp.txt 91 | sh(wc('-l').stdin('tmp.txt')) 92 | 93 | #ls | wc -l > /dev/null 94 | sh(ls | wc('-l').stdout(None)) 95 | 96 | #ls | wc -l > tmp.txt 97 | sh(ls | wc('-l').stdout('tmp.txt')) 98 | 99 | #ls | wc -l >> tmp.txt 100 | sh(ls | wc('-l').stdout('tmp.txt', mode='a')) 101 | 102 | with open('test_file1.txt', 'w') as fout: 103 | msg = """ 104 | abc 105 | xyz 106 | The quick brown fox jumps over the lazy dog. 107 | """.lstrip() 108 | print(msg, file=fout) 109 | 110 | try: 111 | # ls test_file2.txt 2> /dev/null | wc -l 112 | sh( 113 | ls('test_file2.txt').stderr('/dev/null') | wc('-l') 114 | ) 115 | except shtk.NonzeroExitCodeException: 116 | print("Caught a failure") 117 | 118 | sh( 119 | ls('test_file1.txt') 120 | ) 121 | 122 | #echo $(ls | wc -l) 123 | print(sh.evaluate(ls | wc('-l')).strip()) 124 | 125 | ``` 126 | 127 | More examples can be found in the source code's examples directory, but they're 128 | still under construction. 129 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | #@python3 ./build_shtk_rst.py > source/shtk.rst 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx-rtd-theme 2 | 3 | -------------------------------------------------------------------------------- /docs/source/Exceptions.rst: -------------------------------------------------------------------------------- 1 | Exceptions 2 | ========== 3 | 4 | shtk defines certain custom exceptions to indicate when certain events occur. 5 | 6 | shtk.NonzeroExitCodeException 7 | ----------------------------- 8 | 9 | .. autoclass:: shtk.NonzeroExitCodeException 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | :inherited-members: 14 | 15 | -------------------------------------------------------------------------------- /docs/source/Job.rst: -------------------------------------------------------------------------------- 1 | Jobs 2 | ==== 3 | 4 | Jobs represent and control running pipelines. They are returned by 5 | shtk.Shell.run() and used by other internal methods of shtk.Shell, such as 6 | shtk.Shell.evaluate(). Provided functionality includes starting, killing, and 7 | awaiting completion of process pipelines. 8 | 9 | The Job.run() method is also responsible for instantiating 10 | shtk.PipelineNodeFactory templates to create shtk.PipelineNode instances, as 11 | well as instantiating StreamFactory templates to create shtk.Stream instances. 12 | 13 | shtk.Job 14 | -------- 15 | 16 | .. autoclass:: shtk.Job 17 | :members: 18 | :undoc-members: 19 | :show-inheritance: 20 | :inherited-members: 21 | -------------------------------------------------------------------------------- /docs/source/PipelineNode.rst: -------------------------------------------------------------------------------- 1 | Pipeline Nodes 2 | ============== 3 | shtk.PipelineNodeFactory subclasses are templates used to define the properties 4 | of shtk.PipelineNode instances. shtk.PipelineNode instances are nodes within a 5 | directed acyclic graph (DAG) that represent a process pipeline. 6 | 7 | The leaf nodes of this DAG are always PipelineProcess instances representing an 8 | individual process that is part of the pipeline. 9 | 10 | PipelineNode instances can be used to communicate, start, and stop individual 11 | processes within a process pipeline. 12 | 13 | PipelineNode classes are typically instantiated by other elements of SHTK, 14 | rather than being manually instantiated by the end user. 15 | 16 | shtk.PipelineNode 17 | ----------------- 18 | 19 | .. autoclass:: shtk.PipelineNode 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :inherited-members: 24 | 25 | shtk.PipelineChannel 26 | -------------------- 27 | 28 | .. autoclass:: shtk.PipelineChannel 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :inherited-members: 33 | 34 | shtk.PipelineProcess 35 | -------------------- 36 | 37 | .. autoclass:: shtk.PipelineProcess 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :inherited-members: 42 | 43 | -------------------------------------------------------------------------------- /docs/source/PipelineNodeFactory.rst: -------------------------------------------------------------------------------- 1 | Pipeline Node Factories 2 | ======================= 3 | shtk.PipelineNodeFactory subclasses are templates used to define the properties 4 | of shtk.PipelineNode instances. shtk.PipelineNode instances are nodes within a 5 | directed acyclic graph (DAG) that represent a process pipeline. 6 | 7 | shtk.PipelineNodeFactory instances are useful because they enable (1) running 8 | process pipelines multiple times (2) displaying the commands that will be run 9 | as part of the pipeline prior to executing the command and (3) composing 10 | partial pipelines into more complex process pipelines. 11 | 12 | PipelineNodeFactory instances are usually constructed by a 13 | shtk.Shell.command(), or by using the pipe operator to connect multiple 14 | PipelineProcessFactory instances together into a single process pipeline. 15 | 16 | shtk.PipelineNodeFactory 17 | ------------------------ 18 | 19 | .. autoclass:: shtk.PipelineNodeFactory 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :inherited-members: 24 | 25 | shtk.PipelineChannelFactory 26 | --------------------------- 27 | 28 | .. autoclass:: shtk.PipelineChannelFactory 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :inherited-members: 33 | 34 | shtk.PipelineProcessFactory 35 | --------------------------- 36 | 37 | .. autoclass:: shtk.PipelineProcessFactory 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :inherited-members: 42 | 43 | -------------------------------------------------------------------------------- /docs/source/Shell.rst: -------------------------------------------------------------------------------- 1 | Shells 2 | ====== 3 | 4 | Shells are used to abstract away the inner workings of shtk, and are meant to 5 | be the primary interface end users interact with. shtk.Shell instances serve 6 | as the standard mechanism for building and running command pipelines. 7 | 8 | shtk.Shell 9 | ---------- 10 | 11 | .. autoclass:: shtk.Shell 12 | :members: 13 | :undoc-members: 14 | :show-inheritance: 15 | :inherited-members: 16 | -------------------------------------------------------------------------------- /docs/source/Stream.rst: -------------------------------------------------------------------------------- 1 | Streams 2 | ======= 3 | shtk.Stream instances are pairs of file-like objects (one for reading data from 4 | the process, one for writing data to the process) used for communication with 5 | running processes. If a stream is one way (e.g. FileStream) then the 6 | underlying file-like objects reader or writer may be handles to os.devnull. 7 | 8 | shtk.Stream instances are usually constructed internally within SHTK, rather 9 | than being directly instantiated by the end user. 10 | 11 | shtk.Stream 12 | ----------- 13 | 14 | .. autoclass:: shtk.Stream 15 | :members: 16 | :undoc-members: 17 | :show-inheritance: 18 | :inherited-members: 19 | 20 | shtk.FileStream 21 | --------------- 22 | 23 | .. autoclass:: shtk.FileStream 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | :inherited-members: 28 | 29 | shtk.ManualStream 30 | ----------------- 31 | 32 | .. autoclass:: shtk.ManualStream 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | :inherited-members: 37 | 38 | shtk.NullStream 39 | --------------- 40 | 41 | .. autoclass:: shtk.NullStream 42 | :members: 43 | :undoc-members: 44 | :show-inheritance: 45 | :inherited-members: 46 | 47 | shtk.PipeStream 48 | --------------- 49 | 50 | .. autoclass:: shtk.PipeStream 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | :inherited-members: 55 | -------------------------------------------------------------------------------- /docs/source/StreamFactory.rst: -------------------------------------------------------------------------------- 1 | Stream Factories 2 | ================ 3 | shtk.StreamFactory subclasses are templates used to define the properties of 4 | shtk.Stream instances. shtk.Stream instances are pairs of file-like objects 5 | (one for reading data from the process, one for writing data to the process) 6 | used for communication with running processes. 7 | 8 | shtk.StreamFactory instances are typically constructed via calls to 9 | shtk.PipelineNodeFactory.stdin(), shtk.PipelineNodeFactory.stdout(), and 10 | shtk.PipelineNodeFactory.stderr(). 11 | 12 | shtk.StreamFactory 13 | ------------------ 14 | 15 | .. autoclass:: shtk.StreamFactory 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | :inherited-members: 20 | 21 | shtk.FileStreamFactory 22 | ---------------------- 23 | 24 | .. autoclass:: shtk.FileStreamFactory 25 | :members: 26 | :undoc-members: 27 | :show-inheritance: 28 | :inherited-members: 29 | 30 | shtk.ManualStreamFactory 31 | ------------------------ 32 | 33 | .. autoclass:: shtk.ManualStreamFactory 34 | :members: 35 | :undoc-members: 36 | :show-inheritance: 37 | :inherited-members: 38 | 39 | shtk.NullStreamFactory 40 | ---------------------- 41 | 42 | .. autoclass:: shtk.NullStreamFactory 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | :inherited-members: 47 | 48 | shtk.PipeStreamFactory 49 | ---------------------- 50 | 51 | .. autoclass:: shtk.PipeStreamFactory 52 | :members: 53 | :undoc-members: 54 | :show-inheritance: 55 | :inherited-members: 56 | 57 | -------------------------------------------------------------------------------- /docs/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .rst-content .section ul li > ul { 2 | margin-top: 0px; 3 | } 4 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | 13 | import os 14 | import sys 15 | sys.path.insert(0, os.path.abspath('../..')) 16 | 17 | import sphinx_rtd_theme 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'SHTK' 23 | copyright = '2021, Jon Roose' 24 | author = 'Jon Roose' 25 | 26 | # The full version, including alpha/beta/rc tags 27 | release = 'v0.9.2' 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'sphinx.ext.napoleon', 37 | 'sphinx.ext.autodoc', 38 | 'sphinx_rtd_theme' 39 | ] 40 | 41 | # Add any paths that contain templates here, relative to this directory. 42 | templates_path = ['_templates'] 43 | 44 | # List of patterns, relative to source directory, that match files and 45 | # directories to ignore when looking for source files. 46 | # This pattern also affects html_static_path and html_extra_path. 47 | exclude_patterns = [] 48 | 49 | 50 | # -- Options for HTML output ------------------------------------------------- 51 | 52 | # The theme to use for HTML and HTML Help pages. See the documentation for 53 | # a list of builtin themes. 54 | # 55 | html_theme = 'sphinx_rtd_theme' 56 | 57 | # Add any paths that contain custom static files (such as style sheets) here, 58 | # relative to this directory. They are copied after the builtin static files, 59 | # so a file named "default.css" will overwrite the builtin "default.css". 60 | html_static_path = ['_static'] 61 | 62 | html_css_files = [ 63 | 'css/custom.css' 64 | ] 65 | 66 | napoleon_google_docstring = True 67 | napoleon_numpy_docstring = False 68 | autodoc_member_order = 'bysource' 69 | 70 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. SHTK documentation master file, created by 2 | sphinx-quickstart on Sun Jan 24 21:28:10 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to SHTK's documentation! 7 | ================================ 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | Shell 13 | Job 14 | StreamFactory 15 | Stream 16 | PipelineNodeFactory 17 | PipelineNode 18 | Exceptions 19 | 20 | -------------------------------------------------------------------------------- /examples/MountContext.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Context managers and other utilities for mounting partitions from python. 4 | """ 5 | 6 | import contextlib 7 | import os 8 | import pathlib 9 | import time 10 | 11 | import parted 12 | 13 | import shtk 14 | 15 | def find_partition(parted_disk, name=None, number=None): 16 | """ 17 | Finds a partition within a pyparted disk 18 | 19 | Returns the first partition matching all requirements whose value is not 20 | None. If no partitions match all requirements, then None is returned. 21 | 22 | Args: 23 | parted_disk (parted.Disk): a Disk object built by pyparted 24 | name (None or str): the name of the partition 25 | number (None or str): the partition number 26 | 27 | Returns: 28 | None or parted.Partition: 29 | The first matching partition (or None) 30 | """ 31 | 32 | def match(expected, observed): 33 | if expected is None: 34 | return None 35 | return expected == observed 36 | 37 | for partition in parted_disk.partitions: 38 | tests = [] 39 | 40 | tests.append(match(name, partition.name)) 41 | tests.append(match(number, partition.number)) 42 | 43 | if not any(test is False for test in tests): 44 | return partition 45 | 46 | return None 47 | 48 | class LoopbackContext: 49 | """ 50 | Context manager that calls losetup to create a temporary loopback device 51 | 52 | Creates a new loopback device using losetup upon __enter__(). Deletes the 53 | loopback device upon __exit__(). 54 | 55 | Attributes: 56 | filepath (str or pathlib.Path): The path of the source file 57 | offset (int): The offset (in bytes) from the beginning of the source 58 | file 59 | sizelimit (int): The maximum size (in bytes) of the new loopback device 60 | device (None or str): The filepath of the new loopback device (None 61 | prior to __enter__() and after __exit__()) 62 | 63 | """ 64 | def __init__(self, filepath, offset, sizelimit): 65 | """ 66 | filepath (str): The path of the source file 67 | offset (int): The offset (in bytes) from the beginning of the source 68 | file 69 | sizelimit (int): The maximum size (in bytes) of the new loopback device 70 | """ 71 | 72 | self.filepath = pathlib.Path(filepath) 73 | self.offset = int(offset) 74 | self.sizelimit = int(sizelimit) 75 | self.device = None 76 | 77 | def __enter__(self): 78 | """ 79 | Calls the losetup command to create a new loopback device. 80 | 81 | Returns: 82 | str: 83 | The filepath of the temporary loopback device 84 | """ 85 | shell = shtk.Shell.get_shell() 86 | losetup = shell.command('losetup') 87 | self.device = shell.evaluate( 88 | losetup( 89 | '--find', '--show', 90 | '--offset', str(self.offset), 91 | '--sizelimit', str(self.sizelimit), 92 | self.filepath 93 | ) 94 | ).strip() 95 | 96 | return self 97 | 98 | def __exit__(self, *exc): 99 | """ 100 | Calls the losetup command to delete the new loopback device 101 | 102 | Args: 103 | exc (list): list of unused exception information (exc_type, 104 | exc_val, exc_tb) 105 | 106 | Returns: 107 | None: 108 | Does not suppress exceptions 109 | """ 110 | 111 | shell = shtk.Shell.get_shell() 112 | losetup = shell.command('losetup') 113 | shell( 114 | losetup('-d', self.device) 115 | ) 116 | 117 | @classmethod 118 | def from_partition(cls, filepath, name=None, number=None): 119 | """ 120 | Factory method creating a loopback_context from a partition table 121 | 122 | Args: 123 | filepath (str or pathlib.Path): the location of the partition table 124 | name (str or None): the name of the partition to mount. Note that 125 | this must be the partition label (or similar). Filesystem 126 | labels won't work. 127 | number (int or None): the number of the partition to mount. Note 128 | that disk numbers may differ from the order of the partitions 129 | within the partition table. 130 | 131 | Raises: 132 | RuntimeError: 133 | When no matching partition is found 134 | 135 | Returns: 136 | LoopbackContext: 137 | A new instance of LoopbackContext bound to the first matching 138 | parititon. 139 | """ 140 | 141 | device = parted.getDevice(filepath) 142 | disk = parted.newDisk(device) 143 | part = find_partition(disk, name=name, number=number) 144 | if part is None: 145 | raise RuntimeError("Failed to find matching partition within partition table") 146 | offset = part.geometry.start * part.geometry.device.sectorSize 147 | return cls(pathlib.Path(filepath), offset=offset, sizelimit=part.getLength('B')) 148 | 149 | class MountContext: 150 | """ 151 | Context manager that calls mount and umount to temporarily mount a device 152 | 153 | Attributes: 154 | source (str or pathlib.Path): the source filepath for the mount command 155 | target (str or pathlib.Path): the destination directory for the mount 156 | command 157 | args (list of str): the arguments passed to the mount command 158 | missing_ok (boolean): whether the target directory should be created if 159 | it doesn't exist. 160 | cleanup_missing (boolean): whether the target directory should be 161 | removed if it was created by MountContext as a result of 162 | missing_ok. 163 | rmdir_target (boolean): whether the target diretory should be deleted 164 | upon unmounting (set upon mounting) 165 | """ 166 | def __init__(self, 167 | source, target, types=None, options=None, bind=None, rbind=None, 168 | missing_ok=False, cleanup_missing_target=True, umount_recursive=False, 169 | ): 170 | """ 171 | Constructor for MountContext 172 | 173 | Args: 174 | source (str): the source filepath for the mount command 175 | target (str): the destination directory for the mount command 176 | types (list of str): allowable filesystem types for the mount 177 | options (list of str): extra arguments passed to the mount command 178 | bind (boolean): adds '--bind' to the mount command's args when True 179 | rbind (boolean): adds '--rbind' to the mount command's args when 180 | True. Implicitly sets umount_recursive. 181 | missing_ok (boolean): create the target directory if it doesn't 182 | exist 183 | umount_recursive (boolean): force a recursive unmount 184 | cleanup_missing_target (boolean): whether we should delete the 185 | target after use if we created it while mounting (see 186 | missing_ok). 187 | """ 188 | if os.geteuid() != 0: 189 | raise RuntimeError("You must be root to mount volumes") 190 | 191 | self.source = pathlib.Path(source) 192 | self.target = pathlib.Path(target) 193 | self.missing_ok = missing_ok 194 | self.cleanup_missing_target = cleanup_missing_target 195 | self.rmdir_target = False 196 | 197 | self.args = [] 198 | 199 | if bind: 200 | self.args.append("--bind") 201 | 202 | if rbind: 203 | self.args.append("--rbind") 204 | 205 | if rbind or umount_recursive: 206 | self.umount_recursive = True 207 | else: 208 | self.umount_recursive = False 209 | 210 | if types: 211 | self.args.extend(('--types', ",".join(str(x) for x in types))) 212 | 213 | if options: 214 | self.args.extend(("--options", ",".join(str(x) for x in options))) 215 | 216 | self.args.append(str(source)) 217 | self.args.append(str(target)) 218 | 219 | def __enter__(self): 220 | """ 221 | Calls the losetup command to create a new loopback device. 222 | 223 | Returns: 224 | MountContext: 225 | self 226 | """ 227 | shell = shtk.Shell.get_shell() 228 | mount = shell.command('mount') 229 | 230 | if not self.target.exists(): 231 | if self.missing_ok: 232 | self.rmdir_target = self.cleanup_missing_target 233 | self.target.mkdir(exist_ok=True) 234 | else: 235 | raise RuntimeError(f"Directory {self.target!s} does not exist") 236 | else: 237 | self.rmdir_target = False 238 | 239 | if not self.target.is_dir(): 240 | raise RuntimeError(f"{self.target!s} is not a directory") 241 | 242 | shell( 243 | mount(*self.args) 244 | ) 245 | 246 | if '--rbind' in self.args: 247 | time.sleep(0.5) # Wait for make-rslave to propagate 248 | shell( 249 | mount('--make-rslave', self.target) 250 | ) 251 | 252 | return self 253 | 254 | def __exit__(self, *exc): 255 | """ 256 | Calls the umount command to unmount the target mount path 257 | 258 | Args: 259 | exc (list): list of unused exception information (exc_type, 260 | exc_val, exc_tb) 261 | 262 | Returns: 263 | None: 264 | Does not suppress exceptions 265 | """ 266 | shell = shtk.Shell.get_shell() 267 | umount = shell.command('umount') 268 | if self.umount_recursive: 269 | shell( 270 | umount('--recursive', self.target) 271 | ) 272 | else: 273 | shell( 274 | umount(self.target) 275 | ) 276 | if self.rmdir_target: 277 | os.rmdir(self.target) 278 | 279 | class ManyMountContext(contextlib.ExitStack): 280 | """ 281 | A subclass of contextlib.ExitStack() with a helper method for mounting 282 | """ 283 | def mount(self, source, target, part_name=None, part_number=None, **kwargs): 284 | """ 285 | Creates a new MountContext and adds it to the context stack 286 | 287 | If part_name or part_number are provided, source will be assumed to be 288 | a partition table image, rather than a raw parititon. An individual 289 | partition will be found and bound to a loopback device using 290 | losetup.from_partition(source, name=part_name, number=part_number). 291 | 292 | All additional keyword arguments are forwarded to MountContext(). 293 | 294 | Args: 295 | source (str): the source filepath for the mount command 296 | target (str): the destination directory for the mount command 297 | part_name (str): the name of the partition to mount 298 | part_number (int): the number of the partition to mount 299 | """ 300 | 301 | if part_name is not None or part_number is not None: 302 | loop = LoopbackContext.from_partition(source, name=part_name, number=part_number) 303 | loop = self.enter_context(loop) 304 | self.enter_context( 305 | MountContext( 306 | source=loop.device, 307 | target=target, 308 | **kwargs 309 | ) 310 | ) 311 | else: 312 | self.enter_context(MountContext(source = source, target = target, **kwargs)) 313 | 314 | class ChrootMountContext(ManyMountContext): 315 | """ 316 | A subclass of ManyMountContext that bind mounts essential directories 317 | 318 | Upon enter'ing the context the directories target/{dev,proc,sys} are 319 | mounted sequentially using the --rbind to the mount command. The umount 320 | command will unmount them upon __exit__(). 321 | 322 | If the directories target/{dev,proc,sys} do not exist they will be created 323 | only if the instance is initialized with missing_ok=True. 324 | 325 | Attributes: 326 | target (str): target directory to bind directories within 327 | source (str): source directory to bind-mount from 328 | missing_ok (boolean): when true, missing directories under target will 329 | be created automatically 330 | """ 331 | 332 | def __init__(self, target, source='/', missing_ok=False): 333 | super().__init__() 334 | 335 | self.source = pathlib.Path(source) 336 | self.target = pathlib.Path(target) 337 | self.missing_ok = missing_ok 338 | 339 | def __enter__(self): 340 | """ 341 | Bind-mounts the subdirectories target/{dev,proc,sys} using a 342 | MountContext and adds the context to the internal exit stack. 343 | 344 | Returns: 345 | ChrootMountContext: 346 | self 347 | """ 348 | super().__enter__() 349 | 350 | self.mount( 351 | source = self.source / 'proc', 352 | target = self.target / 'proc', 353 | types = ['proc'], 354 | missing_ok = self.missing_ok 355 | ) 356 | 357 | self.mount( 358 | source = self.source / 'sys', 359 | target = self.target / 'sys', 360 | types = ['sysfs'], 361 | missing_ok = self.missing_ok 362 | ) 363 | 364 | self.mount( 365 | source = self.source / 'dev', 366 | target = self.target / 'dev', 367 | rbind = True, 368 | missing_ok = self.missing_ok 369 | ) 370 | 371 | return self 372 | 373 | if __name__ == "__main__": 374 | import sys 375 | with ManyMountContext() as mnt: 376 | src = sys.argv[1] 377 | dst = sys.argv[2] 378 | mnt.mount(src, dst, missing_ok=True, part_number=1) 379 | mnt.enter_context(ChrootMountContext(dst)) 380 | print("Press Control+C to exit") 381 | while 1: 382 | time.sleep(60) # Wait for control+c 383 | -------------------------------------------------------------------------------- /examples/MountImage.py: -------------------------------------------------------------------------------- 1 | """ 2 | This example file is intended to demonstrate how to use SHTK to mount a Debian 3 | or Ubuntu image and install vim inside it via chroot. All mount points are 4 | automatically unmounted after success or failure of installation. 5 | 6 | Args: 7 | image_path (str): Path of the image to mount 8 | mount_path (str): Path of the directory to mount on 9 | """ 10 | 11 | import contextlib 12 | import pathlib 13 | import sys 14 | 15 | import shtk 16 | 17 | class Mount: 18 | """ 19 | Manages a mount. Works so long as Python doesn't segfault or similar. 20 | 21 | Args: 22 | src_path (str or pathlib.Path): The device to mount from 23 | dst_path (str or pathlib.Path): The directory to mount on 24 | 25 | Raises: 26 | shtk.NonzeroExitCodeException: 27 | If the mount or unmount returns a non-zero exit code. 28 | """ 29 | def __init__(self, src_path, dst_path, options=[]): 30 | self.src_path = str(src_path) 31 | self.dst_path = str(dst_path) 32 | self.options = list(options) 33 | 34 | def __enter__(self): 35 | sh = shtk.Shell.get_shell() 36 | mount = sh.command('mount', user='root') 37 | sh(mount(*self.options, "--", self.src_path, self.dst_path)) 38 | return self 39 | 40 | def __exit__(self, exc_type, exc_val, exc_tb): 41 | sh = shtk.Shell.get_shell() 42 | umount = sh.command('umount', user='root') 43 | sh(umount('-l', self.dst_path)) 44 | 45 | @contextlib.contextmanager 46 | def PrepChroot(image_path, mount_path): 47 | """ 48 | Mounts an image and prepares it for chroot usage 49 | 50 | Args: 51 | image_path (pathlib.Path or str): The image file to mount. 52 | mount_path (pathlib.Path or str): The directory on which to mount 53 | the image. 54 | 55 | Raises: 56 | shtk.NonzeroExitCodeException: 57 | If any mount or unmount returns a non-zero exit code. 58 | """ 59 | image_path = pathlib.Path(image_path) 60 | mount_path = pathlib.Path(mount_path) 61 | with contextlib.ExitStack() as stack: 62 | stack.enter_context(Mount(image_path, mount_path, options=('-o', 'loop'))) 63 | stack.enter_context(Mount('none', mount_path / 'proc', options=('-t', 'proc'))) 64 | stack.enter_context(Mount('none', mount_path / 'sys', options=('-t', 'sysfs'))) 65 | stack.enter_context(Mount('/dev', mount_path / 'dev', options=('--rbind',))) 66 | stack.enter_context(Mount('devpts', mount_path / 'dev' / 'pts', options=('-t', 'devpts'))) 67 | stack.enter_context(Mount('/run', mount_path / 'run', options=('--rbind',))) 68 | 69 | yield stack 70 | 71 | def main(image_path, mount_path): 72 | """ 73 | Mounts an image and runs `chroot apt -y install vim` 74 | 75 | Args: 76 | image_path (pathlib.Path or str): The image file to mount. 77 | mount_path (pathlib.Path or str): The directory on which to mount 78 | the image. 79 | 80 | Raises: 81 | shtk.NonzeroExitCodeException: 82 | If any mount or unmount returns a non-zero exit code. 83 | """ 84 | with shtk.Shell() as sh: 85 | with PrepChroot(image_path, mount_path): 86 | chroot = sh.command('chroot', user='root') 87 | sh(chroot('apt', '-y', 'install', 'vim')) 88 | 89 | if __name__ == "__main__": 90 | main(sys.argv[1], sys.argv[2]) 91 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Specify a score threshold to be exceeded before program exits with error. 9 | fail-under=10.0 10 | 11 | # Add files or directories to the blacklist. They should be base names, not 12 | # paths. 13 | ignore=CVS 14 | 15 | # Add files or directories matching the regex patterns to the blacklist. The 16 | # regex matches against base names, not paths. 17 | ignore-patterns=shtk/test 18 | 19 | # Python code to execute, usually for sys.path manipulation such as 20 | # pygtk.require(). 21 | #init-hook= 22 | 23 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 24 | # number of processors available to use. 25 | jobs=1 26 | 27 | # Control the amount of potential inferred values when inferring a single 28 | # object. This can help the performance when dealing with large functions or 29 | # complex, nested conditions. 30 | limit-inference-results=100 31 | 32 | # List of plugins (as comma separated values of python module names) to load, 33 | # usually to register additional checkers. 34 | load-plugins= 35 | 36 | # Pickle collected data for later comparisons. 37 | persistent=yes 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence= 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | disable=print-statement, 64 | parameter-unpacking, 65 | unpacking-in-except, 66 | old-raise-syntax, 67 | backtick, 68 | long-suffix, 69 | old-ne-operator, 70 | old-octal-literal, 71 | import-star-module-level, 72 | non-ascii-bytes-literal, 73 | raw-checker-failed, 74 | bad-inline-option, 75 | locally-disabled, 76 | file-ignored, 77 | suppressed-message, 78 | useless-suppression, 79 | deprecated-pragma, 80 | use-symbolic-message-instead, 81 | apply-builtin, 82 | basestring-builtin, 83 | buffer-builtin, 84 | cmp-builtin, 85 | coerce-builtin, 86 | execfile-builtin, 87 | file-builtin, 88 | long-builtin, 89 | raw_input-builtin, 90 | reduce-builtin, 91 | standarderror-builtin, 92 | unicode-builtin, 93 | xrange-builtin, 94 | coerce-method, 95 | delslice-method, 96 | getslice-method, 97 | setslice-method, 98 | no-absolute-import, 99 | old-division, 100 | dict-iter-method, 101 | dict-view-method, 102 | next-method-called, 103 | metaclass-assignment, 104 | indexing-exception, 105 | raising-string, 106 | reload-builtin, 107 | oct-method, 108 | hex-method, 109 | nonzero-method, 110 | cmp-method, 111 | input-builtin, 112 | round-builtin, 113 | intern-builtin, 114 | unichr-builtin, 115 | map-builtin-not-iterating, 116 | zip-builtin-not-iterating, 117 | range-builtin-not-iterating, 118 | filter-builtin-not-iterating, 119 | using-cmp-argument, 120 | eq-without-hash, 121 | div-method, 122 | idiv-method, 123 | rdiv-method, 124 | exception-message-attribute, 125 | invalid-str-codec, 126 | sys-max-int, 127 | bad-python3-import, 128 | deprecated-string-function, 129 | deprecated-str-translate-call, 130 | deprecated-itertools-function, 131 | deprecated-types-field, 132 | next-method-defined, 133 | dict-items-not-iterating, 134 | dict-keys-not-iterating, 135 | dict-values-not-iterating, 136 | deprecated-operator-function, 137 | deprecated-urllib-function, 138 | xreadlines-attribute, 139 | deprecated-sys-function, 140 | exception-escape, 141 | comprehension-escape, 142 | no-else-return, 143 | too-few-public-methods, 144 | too-many-arguments, 145 | wildcard-import, 146 | too-many-branches, 147 | too-many-instance-attributes, 148 | unspecified-encoding, 149 | consider-using-with 150 | 151 | # Enable the message, report, category or checker with the given id(s). You can 152 | # either give multiple identifier separated by comma (,) or put this option 153 | # multiple time (only on the command line, not in the configuration file where 154 | # it should appear only once). See also the "--disable" option for examples. 155 | enable=c-extension-no-member 156 | 157 | 158 | [REPORTS] 159 | 160 | # Python expression which should return a score less than or equal to 10. You 161 | # have access to the variables 'error', 'warning', 'refactor', and 'convention' 162 | # which contain the number of messages in each category, as well as 'statement' 163 | # which is the total number of statements analyzed. This score is used by the 164 | # global evaluation report (RP0004). 165 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 166 | 167 | # Template used to display messages. This is a python new-style format string 168 | # used to format the message information. See doc for all details. 169 | #msg-template= 170 | 171 | # Set the output format. Available formats are text, parseable, colorized, json 172 | # and msvs (visual studio). You can also give a reporter class, e.g. 173 | # mypackage.mymodule.MyReporterClass. 174 | output-format=text 175 | 176 | # Tells whether to display a full report or only the messages. 177 | reports=no 178 | 179 | # Activate the evaluation score. 180 | score=yes 181 | 182 | 183 | [REFACTORING] 184 | 185 | # Maximum number of nested blocks for function / method body 186 | max-nested-blocks=5 187 | 188 | # Complete name of functions that never returns. When checking for 189 | # inconsistent-return-statements if a never returning function is called then 190 | # it will be considered as an explicit return statement and no message will be 191 | # printed. 192 | never-returning-functions=sys.exit 193 | 194 | 195 | [LOGGING] 196 | 197 | # The type of string formatting that logging methods do. `old` means using % 198 | # formatting, `new` is for `{}` formatting. 199 | logging-format-style=old 200 | 201 | # Logging modules to check that the string format arguments are in logging 202 | # function parameter format. 203 | logging-modules=logging 204 | 205 | 206 | [SIMILARITIES] 207 | 208 | # Ignore comments when computing similarities. 209 | ignore-comments=yes 210 | 211 | # Ignore docstrings when computing similarities. 212 | ignore-docstrings=yes 213 | 214 | # Ignore imports when computing similarities. 215 | ignore-imports=no 216 | 217 | # Minimum lines number of a similarity. 218 | min-similarity-lines=4 219 | 220 | 221 | [FORMAT] 222 | 223 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 224 | expected-line-ending-format= 225 | 226 | # Regexp for a line that is allowed to be longer than the limit. 227 | ignore-long-lines=^\s*(# )??$ 228 | 229 | # Number of spaces of indent required inside a hanging or continued line. 230 | indent-after-paren=4 231 | 232 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 233 | # tab). 234 | indent-string=' ' 235 | 236 | # Maximum number of characters on a single line. 237 | max-line-length=100 238 | 239 | # Maximum number of lines in a module. 240 | max-module-lines=1000 241 | 242 | # Allow the body of a class to be on the same line as the declaration if body 243 | # contains single statement. 244 | single-line-class-stmt=no 245 | 246 | # Allow the body of an if to be on the same line as the test if there is no 247 | # else. 248 | single-line-if-stmt=no 249 | 250 | 251 | [MISCELLANEOUS] 252 | 253 | # List of note tags to take in consideration, separated by a comma. 254 | notes=FIXME, 255 | XXX, 256 | TODO 257 | 258 | # Regular expression of note tags to take in consideration. 259 | #notes-rgx= 260 | 261 | 262 | [SPELLING] 263 | 264 | # Limits count of emitted suggestions for spelling mistakes. 265 | max-spelling-suggestions=4 266 | 267 | # Spelling dictionary name. Available dictionaries: none. To make it work, 268 | # install the python-enchant package. 269 | spelling-dict= 270 | 271 | # List of comma separated words that should not be checked. 272 | spelling-ignore-words= 273 | 274 | # A path to a file that contains the private dictionary; one word per line. 275 | spelling-private-dict-file= 276 | 277 | # Tells whether to store unknown words to the private dictionary (see the 278 | # --spelling-private-dict-file option) instead of raising a message. 279 | spelling-store-unknown-words=no 280 | 281 | 282 | [STRING] 283 | 284 | # This flag controls whether inconsistent-quotes generates a warning when the 285 | # character used as a quote delimiter is used inconsistently within a module. 286 | check-quote-consistency=no 287 | 288 | # This flag controls whether the implicit-str-concat should generate a warning 289 | # on implicit string concatenation in sequences defined over several lines. 290 | check-str-concat-over-line-jumps=no 291 | 292 | 293 | [VARIABLES] 294 | 295 | # List of additional names supposed to be defined in builtins. Remember that 296 | # you should avoid defining new builtins when possible. 297 | additional-builtins= 298 | 299 | # Tells whether unused global variables should be treated as a violation. 300 | allow-global-unused-variables=yes 301 | 302 | # List of strings which can identify a callback function by name. A callback 303 | # name must start or end with one of those strings. 304 | callbacks=cb_, 305 | _cb 306 | 307 | # A regular expression matching the name of dummy variables (i.e. expected to 308 | # not be used). 309 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 310 | 311 | # Argument names that match this expression will be ignored. Default to name 312 | # with leading underscore. 313 | ignored-argument-names=_.*|^ignored_|^unused_ 314 | 315 | # Tells whether we should check for unused import in __init__ files. 316 | init-import=no 317 | 318 | # List of qualified module names which can have objects that can redefine 319 | # builtins. 320 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 321 | 322 | 323 | [BASIC] 324 | 325 | # Naming style matching correct argument names. 326 | argument-naming-style=snake_case 327 | 328 | # Regular expression matching correct argument names. Overrides argument- 329 | # naming-style. 330 | #argument-rgx= 331 | 332 | # Naming style matching correct attribute names. 333 | attr-naming-style=snake_case 334 | 335 | # Regular expression matching correct attribute names. Overrides attr-naming- 336 | # style. 337 | #attr-rgx= 338 | 339 | # Bad variable names which should always be refused, separated by a comma. 340 | bad-names=foo, 341 | bar, 342 | baz, 343 | toto, 344 | tutu, 345 | tata 346 | 347 | # Bad variable names regexes, separated by a comma. If names match any regex, 348 | # they will always be refused 349 | bad-names-rgxs= 350 | 351 | # Naming style matching correct class attribute names. 352 | class-attribute-naming-style=any 353 | 354 | # Regular expression matching correct class attribute names. Overrides class- 355 | # attribute-naming-style. 356 | #class-attribute-rgx= 357 | 358 | # Naming style matching correct class names. 359 | class-naming-style=PascalCase 360 | 361 | # Regular expression matching correct class names. Overrides class-naming- 362 | # style. 363 | #class-rgx= 364 | 365 | # Naming style matching correct constant names. 366 | const-naming-style=UPPER_CASE 367 | 368 | # Regular expression matching correct constant names. Overrides const-naming- 369 | # style. 370 | #const-rgx= 371 | 372 | # Minimum line length for functions/classes that require docstrings, shorter 373 | # ones are exempt. 374 | docstring-min-length=-1 375 | 376 | # Naming style matching correct function names. 377 | function-naming-style=snake_case 378 | 379 | # Regular expression matching correct function names. Overrides function- 380 | # naming-style. 381 | #function-rgx= 382 | 383 | # Good variable names which should always be accepted, separated by a comma. 384 | good-names=i, 385 | j, 386 | k, 387 | ex, 388 | Run, 389 | Job, 390 | util, 391 | setup, 392 | shtk, 393 | rc, 394 | cd, 395 | _, 396 | _default_shell 397 | 398 | # Good variable names regexes, separated by a comma. If names match any regex, 399 | # they will always be accepted 400 | good-names-rgxs= 401 | 402 | # Include a hint for the correct naming format with invalid-name. 403 | include-naming-hint=no 404 | 405 | # Naming style matching correct inline iteration names. 406 | inlinevar-naming-style=any 407 | 408 | # Regular expression matching correct inline iteration names. Overrides 409 | # inlinevar-naming-style. 410 | #inlinevar-rgx= 411 | 412 | # Naming style matching correct method names. 413 | method-naming-style=snake_case 414 | 415 | # Regular expression matching correct method names. Overrides method-naming- 416 | # style. 417 | #method-rgx= 418 | 419 | # Naming style matching correct module names. 420 | module-naming-style=PascalCase 421 | 422 | # Regular expression matching correct module names. Overrides module-naming- 423 | # style. 424 | #module-rgx= 425 | 426 | # Colon-delimited sets of names that determine each other's naming style when 427 | # the name regexes allow several styles. 428 | name-group= 429 | 430 | # Regular expression which should only match function or class names that do 431 | # not require a docstring. 432 | no-docstring-rgx=^_ 433 | 434 | # List of decorators that produce properties, such as abc.abstractproperty. Add 435 | # to this list to register other decorators that produce valid properties. 436 | # These decorators are taken in consideration only for invalid-name. 437 | property-classes=abc.abstractproperty 438 | 439 | # Naming style matching correct variable names. 440 | variable-naming-style=snake_case 441 | 442 | # Regular expression matching correct variable names. Overrides variable- 443 | # naming-style. 444 | #variable-rgx= 445 | 446 | 447 | [TYPECHECK] 448 | 449 | # List of decorators that produce context managers, such as 450 | # contextlib.contextmanager. Add to this list to register other decorators that 451 | # produce valid context managers. 452 | contextmanager-decorators=contextlib.contextmanager 453 | 454 | # List of members which are set dynamically and missed by pylint inference 455 | # system, and so shouldn't trigger E1101 when accessed. Python regular 456 | # expressions are accepted. 457 | generated-members= 458 | 459 | # Tells whether missing members accessed in mixin class should be ignored. A 460 | # mixin class is detected if its name ends with "mixin" (case insensitive). 461 | ignore-mixin-members=yes 462 | 463 | # Tells whether to warn about missing members when the owner of the attribute 464 | # is inferred to be None. 465 | ignore-none=yes 466 | 467 | # This flag controls whether pylint should warn about no-member and similar 468 | # checks whenever an opaque object is returned when inferring. The inference 469 | # can return multiple potential results while evaluating a Python object, but 470 | # some branches might not be evaluated, which results in partial inference. In 471 | # that case, it might be useful to still emit no-member and other checks for 472 | # the rest of the inferred objects. 473 | ignore-on-opaque-inference=yes 474 | 475 | # List of class names for which member attributes should not be checked (useful 476 | # for classes with dynamically set attributes). This supports the use of 477 | # qualified names. 478 | ignored-classes=optparse.Values,thread._local,_thread._local 479 | 480 | # List of module names for which member attributes should not be checked 481 | # (useful for modules/projects where namespaces are manipulated during runtime 482 | # and thus existing member attributes cannot be deduced by static analysis). It 483 | # supports qualified module names, as well as Unix pattern matching. 484 | ignored-modules= 485 | 486 | # Show a hint with possible names when a member name was not found. The aspect 487 | # of finding the hint is based on edit distance. 488 | missing-member-hint=yes 489 | 490 | # The minimum edit distance a name should have in order to be considered a 491 | # similar match for a missing member name. 492 | missing-member-hint-distance=1 493 | 494 | # The total number of similar names that should be taken in consideration when 495 | # showing a hint for a missing member. 496 | missing-member-max-choices=1 497 | 498 | # List of decorators that change the signature of a decorated function. 499 | signature-mutators= 500 | 501 | 502 | [DESIGN] 503 | 504 | # Maximum number of arguments for function / method. 505 | max-args=5 506 | 507 | # Maximum number of attributes for a class (see R0902). 508 | max-attributes=7 509 | 510 | # Maximum number of boolean expressions in an if statement (see R0916). 511 | max-bool-expr=5 512 | 513 | # Maximum number of branch for function / method body. 514 | max-branches=12 515 | 516 | # Maximum number of locals for function / method body. 517 | max-locals=15 518 | 519 | # Maximum number of parents for a class (see R0901). 520 | max-parents=7 521 | 522 | # Maximum number of public methods for a class (see R0904). 523 | max-public-methods=20 524 | 525 | # Maximum number of return / yield for function / method body. 526 | max-returns=6 527 | 528 | # Maximum number of statements in function / method body. 529 | max-statements=50 530 | 531 | # Minimum number of public methods for a class (see R0903). 532 | min-public-methods=2 533 | 534 | 535 | [CLASSES] 536 | 537 | # List of method names used to declare (i.e. assign) instance attributes. 538 | defining-attr-methods=__init__, 539 | __new__, 540 | setUp, 541 | __post_init__ 542 | 543 | # List of member names, which should be excluded from the protected access 544 | # warning. 545 | exclude-protected=_asdict, 546 | _fields, 547 | _replace, 548 | _source, 549 | _make 550 | 551 | # List of valid names for the first argument in a class method. 552 | valid-classmethod-first-arg=cls 553 | 554 | # List of valid names for the first argument in a metaclass class method. 555 | valid-metaclass-classmethod-first-arg=cls 556 | 557 | 558 | [IMPORTS] 559 | 560 | # List of modules that can be imported at any level, not just the top level 561 | # one. 562 | allow-any-import-level= 563 | 564 | # Allow wildcard imports from modules that define __all__. 565 | allow-wildcard-with-all=no 566 | 567 | # Analyse import fallback blocks. This can be used to support both Python 2 and 568 | # 3 compatible code, which means that the block might have code that exists 569 | # only in one or another interpreter, leading to false positives when analysed. 570 | analyse-fallback-blocks=no 571 | 572 | # Deprecated modules which should not be used, separated by a comma. 573 | deprecated-modules=optparse,tkinter.tix 574 | 575 | # Create a graph of external dependencies in the given file (report RP0402 must 576 | # not be disabled). 577 | ext-import-graph= 578 | 579 | # Create a graph of every (i.e. internal and external) dependencies in the 580 | # given file (report RP0402 must not be disabled). 581 | import-graph= 582 | 583 | # Create a graph of internal dependencies in the given file (report RP0402 must 584 | # not be disabled). 585 | int-import-graph= 586 | 587 | # Force import order to recognize a module as part of the standard 588 | # compatibility libraries. 589 | known-standard-library= 590 | 591 | # Force import order to recognize a module as part of a third party library. 592 | known-third-party=enchant 593 | 594 | # Couples of modules and preferred modules, separated by a comma. 595 | preferred-modules= 596 | 597 | 598 | [EXCEPTIONS] 599 | 600 | # Exceptions that will emit a warning when being caught. Defaults to 601 | # "BaseException, Exception". 602 | overgeneral-exceptions=BaseException, 603 | Exception 604 | -------------------------------------------------------------------------------- /run_tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Custom test harness for shtk""" 3 | 4 | import os 5 | import unittest 6 | import coverage 7 | import pathlib 8 | 9 | def main(): 10 | testsdir = pathlib.Path('./shtk/test').resolve() 11 | packagedir = testsdir.parent 12 | 13 | covdir = pathlib.Path("./coverage").resolve() 14 | covdir.mkdir(exist_ok=True) 15 | 16 | covhtml = covdir / "html" 17 | covhtml.mkdir(exist_ok=True) 18 | 19 | covdata = covdir / "datafile" 20 | 21 | print((packagedir / "*").resolve()) 22 | print((testsdir / "*").resolve()) 23 | 24 | cov = coverage.Coverage( 25 | data_file = str(covdata.resolve()), 26 | include = str((packagedir / "*").resolve()), 27 | omit = str((testsdir / "*").resolve()), 28 | config_file = str((testsdir / "coveragerc").resolve()) 29 | ) 30 | 31 | cov.start() 32 | suite = unittest.TestSuite() 33 | import shtk 34 | from shtk.test.test_util import test_registry 35 | 36 | for test in test_registry['all']: 37 | suite.addTest(test()) 38 | 39 | runner = unittest.TextTestRunner(verbosity=2) 40 | runner.run(suite) 41 | cov.stop() 42 | cov.save() 43 | cov.html_report(directory=str(covhtml)) 44 | 45 | main() 46 | 47 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = shtk 3 | description = Shell ToolKit (SHTK) 4 | long_description = file: README.md, LICENSE 5 | long_description_content_type = text/markdown 6 | keywords = shell subprocess 7 | author = Jon Roose 8 | author_email = jroose@gmail.com 9 | maintainer = Jon Roose 10 | maintainer_email = jroose@gmail.com 11 | license = BSD 3-Clause License 12 | classifiers = 13 | License :: OSI Approved :: BSD License 14 | Programming Language :: Python :: 3 15 | 16 | [options.extras_require] 17 | test = 18 | coverage 19 | 20 | [versioneer] 21 | VCS = git 22 | versionfile_source = shtk/_version.py 23 | tag_prefix = v 24 | 25 | 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Setup script for SHTK 4 | """ 5 | 6 | from setuptools import setup, find_packages 7 | import versioneer 8 | 9 | setup( 10 | version=versioneer.get_version(), 11 | cmdclass=versioneer.get_cmdclass(), 12 | packages=find_packages(include=['shtk'], exclude=['shtk.tests', 'shtk.tests.*']) 13 | ) 14 | -------------------------------------------------------------------------------- /shtk/Job.py: -------------------------------------------------------------------------------- 1 | """Declares the Job class used to run and track subprocess pipelines.""" 2 | import asyncio 3 | import io 4 | import os 5 | import shlex 6 | import signal 7 | 8 | from .util import export 9 | from .PipelineNode import PipelineNode 10 | 11 | __all__ = [] 12 | 13 | @export 14 | class NonzeroExitCodeException(Exception): 15 | """ 16 | Raised when a process within an SHTK job exits with a nonzero return code. 17 | 18 | Args: 19 | processes (list): The PipelineProcess instances to include in the error 20 | message 21 | """ 22 | def __init__(self, processes): 23 | self.processes = processes 24 | message = io.StringIO() 25 | print("One or more of the following processes returned non-zero return code:", file=message) 26 | for proc in self.processes: 27 | rc = proc.proc.returncode 28 | args = [] 29 | for arg in proc.args: 30 | args.append(shlex.quote(str(arg))) 31 | 32 | print(f" [{rc:3}] {' '.join(args)}", file=message) 33 | 34 | self.message = message.getvalue() 35 | message.close() 36 | 37 | super().__init__(self.message) 38 | 39 | 40 | @export 41 | class Job: 42 | """ 43 | Instantiates PipelienNodeFactory instances to run subprocesses. 44 | 45 | Job objects instantiate a PipelineNodeFactory template to create 46 | PipelineNode's that run and track the progress of a command pipeline. 47 | 48 | Args: 49 | pipeline_factory (PipelineNodeFactory): The command pipeline template 50 | that will be instantiated by the Job instance. 51 | cwd (str or Pathlib.Path): The current working directory in which to 52 | run the pipeline processes. 53 | env (dict): The environment variables to pass to processes run within 54 | the pipleine 55 | event_loop (None or asyncio.AbstractEventLoop): The event loop to use 56 | for asyncio based processing. If None is passed a new event loop 57 | is created from asyncio.new_event_loop() instead. 58 | user (None, int, or str): The user that will be used to setreuid() any 59 | child processes. The behavior is the same as that of the user arg 60 | to subprocess.Popen(). 61 | group (None, int, or str): The user that will be used to setregid() any 62 | child processes. The behavior is the same as that of the group arg 63 | to subprocess.Popen(). 64 | close_fds (bool): If true, close_fds will be passed to the equivalent 65 | of subprocess.Popen() (Default value = True). 66 | 67 | Attributes: 68 | cwd (str or Pathlib.Path): The current working directory in which to 69 | run the pipeline processes. 70 | environment (dict): The environment variables to pass to processes run 71 | within the pipleine 72 | event_loop (None or asyncio.AbstractEventLoop): The event loop to use 73 | for asyncio based processing. If None is passed a new event loop 74 | is created from asyncio.new_event_loop() instead. 75 | pipeline (None, PipelineNode): The running PipelineNode generated by 76 | PipelineFactory.build in run(). 77 | pipeline_factory (PipelineNodeFactory): The command pipeline template 78 | that will be instantiated by the Job instance. 79 | user (None, int, or str): The user that will be used to setreuid() any 80 | child processes. The behavior is the same as that of the user arg 81 | to subprocess.Popen(). 82 | group (None, int, or str): The user that will be used to setregid() any 83 | child processes. The behavior is the same as that of the group arg 84 | to subprocess.Popen(). 85 | close_fds (bool): If true, close_fds will be passed to the equivalent 86 | of subprocess.Popen(). 87 | 88 | 89 | """ 90 | def __init__( 91 | self, pipeline_factory, cwd=None, env=None, event_loop=None, 92 | user=None, group=None, close_fds=True 93 | ): 94 | if env is None: 95 | self.environment = {} 96 | else: 97 | self.environment = dict(env) 98 | 99 | self.pipeline_factory = pipeline_factory 100 | self.pipeline = None 101 | self.user = user 102 | self.group = group 103 | self.close_fds = close_fds 104 | 105 | if event_loop is None: 106 | self.event_loop = asyncio.new_event_loop() 107 | else: 108 | self.event_loop = event_loop 109 | 110 | if cwd is not None: 111 | self.cwd = cwd 112 | else: 113 | self.cwd = os.getcwd() 114 | 115 | @property 116 | def stdin(self): 117 | """ 118 | Returns the pipeline's stdin_stream.writer(), or None 119 | 120 | Returns: 121 | Pipeline's stdin_stream.writer(), or None if no pipeline is 122 | running. 123 | """ 124 | if self.pipeline is not None: 125 | return self.pipeline.stdin_stream.writer() 126 | else: 127 | return None 128 | 129 | @property 130 | def stdout(self): 131 | """ 132 | Returns the pipeline's stdout_stream.reader(), or None 133 | 134 | Returns: 135 | Pipeline's stdout_stream.reader(), or None if no pipeline is 136 | running. 137 | """ 138 | if self.pipeline is not None: 139 | return self.pipeline.stdout_stream.reader() 140 | else: 141 | return None 142 | 143 | @property 144 | def stderr(self): 145 | """ 146 | Returns the pipeline's stderr_stream.reader(), or None 147 | 148 | Returns: 149 | Pipeline's stderr_stream.reader(), or None if no pipeline is 150 | running. 151 | """ 152 | if self.pipeline is not None: 153 | return self.pipeline.stderr_stream.reader() 154 | else: 155 | return None 156 | 157 | async def run_async(self, stdin_factory, stdout_factory, stderr_factory): 158 | """ 159 | Creates and runs a new pipeline 160 | 161 | Instantiates and runs a pipeline based on the PipelineNodeFactory 162 | provided to the Job's constructor. 163 | 164 | Args: 165 | stdin_factory (StreamFactory): the StreamFactory to instantiate to 166 | create the job's default stdin stream. 167 | stdout_factory (StreamFactory): the StreamFactory to instantiate to 168 | create the job's default stdout stream. 169 | stderr_factory (StreamFactory): the StreamFactory to instantiate to 170 | create the job's default stderr stream. 171 | """ 172 | 173 | if self.pipeline is not None: 174 | raise RuntimeError("Jobs can only be run once. Use retry() to run the job again.") 175 | 176 | stdin_stream = stdin_factory.build(self) 177 | stdout_stream = stdout_factory.build(self) 178 | stderr_stream = stderr_factory.build(self) 179 | 180 | self.pipeline = await self.pipeline_factory.build( 181 | self, 182 | stdin_stream=stdin_stream, 183 | stdout_stream=stdout_stream, 184 | stderr_stream=stderr_stream 185 | ) 186 | 187 | stdin_stream.close() 188 | stdout_stream.close() 189 | stderr_stream.close() 190 | 191 | if self.pipeline.stdin_stream is not None: 192 | self.pipeline.stdin_stream.close() 193 | 194 | if self.pipeline.stdout_stream is not None: 195 | self.pipeline.stdout_stream.close() 196 | 197 | if self.pipeline.stderr_stream is not None: 198 | self.pipeline.stderr_stream.close() 199 | 200 | def run(self, stdin_factory, stdout_factory, stderr_factory): 201 | """ 202 | Creates and runs a new pipeline 203 | 204 | Synchronously wrapper for run_async. 205 | 206 | Args: 207 | stdin_factory (StreamFactory): the StreamFactory to instantiate to 208 | create the job's default stdin stream. 209 | stdout_factory (StreamFactory): the StreamFactory to instantiate to 210 | create the job's default stdout stream. 211 | stderr_factory (StreamFactory): the StreamFactory to instantiate to 212 | create the job's default stderr stream. 213 | 214 | """ 215 | return self.event_loop.run_until_complete( 216 | self.run_async( 217 | stdin_factory=stdin_factory, 218 | stdout_factory=stdout_factory, 219 | stderr_factory=stderr_factory 220 | ) 221 | ) 222 | 223 | async def wait_async(self, pipeline_node=None, exceptions=True): 224 | """ 225 | Waits for all processes in the pipeline to complete 226 | 227 | Waits for all processes in the pipleine to complete checks the return 228 | codes of each command. 229 | 230 | Args: 231 | pipeline_node (PipelineNode or None): The pipeline node to wait for 232 | exceptions (Boolean): When true returns an exception when processes 233 | exit with non-zero return codes 234 | 235 | Returns: 236 | A tuple of exit codes from the completed proceses 237 | 238 | Raises: 239 | NonzeroExitCodeException: When a process returns a non-zero return code 240 | RuntimeError: When called on a Job that has not invoked Job.run() 241 | 242 | """ 243 | 244 | if pipeline_node is None: 245 | if self.pipeline is None: 246 | raise RuntimeError("Cannot wait for a Job that has not yet called Job.run()") 247 | pipeline_node = self.pipeline 248 | elif not isinstance(pipeline_node, PipelineNode): 249 | raise ValueError("Argument pipeline_node must be a PipelineNode instance or None") 250 | 251 | ret = await pipeline_node.wait_async() 252 | 253 | if exceptions: 254 | if any(rc != 0 for rc in ret): 255 | raise NonzeroExitCodeException(x for x in pipeline_node.flatten_children()) 256 | 257 | return tuple(ret) 258 | 259 | def wait(self, pipeline_node=None, exceptions=True): 260 | """ 261 | Synchronous wrapper for the wait_async() method. 262 | 263 | Waits for all processes in the pipleine to complete checks the return 264 | codes of each command. 265 | 266 | Args: 267 | pipeline_node (PipelineNode or None): The pipeline node to wait for 268 | exceptions (Boolean): When true returns an exception when processes 269 | exit with non-zero return codes 270 | 271 | Returns: 272 | A tuple of exit codes from the completed proceses 273 | 274 | Raises: 275 | NonzeroExitCodeException: When a process returns a non-zero return code 276 | 277 | """ 278 | 279 | return self.event_loop.run_until_complete( 280 | self.wait_async(pipeline_node=pipeline_node, exceptions=exceptions) 281 | ) 282 | 283 | def send_signal(self, signum): #pylint: disable=no-self-use 284 | """ 285 | Sends a signal to all child ProcessNode processes. 286 | 287 | Args: 288 | signum (int): the signal to send. 289 | """ 290 | self.pipeline.send_signal(signum) 291 | 292 | def terminate(self): 293 | """ 294 | Sends a signal.SIGTERM to all child ProcessNode processes. 295 | """ 296 | self.send_signal(signal.SIGTERM) 297 | 298 | def kill(self): 299 | """ 300 | Sends a signal.SIGKILL to all child ProcessNode processes. 301 | """ 302 | self.send_signal(signal.SIGKILL) 303 | -------------------------------------------------------------------------------- /shtk/PipelineNode.py: -------------------------------------------------------------------------------- 1 | """ 2 | PipelineNode instances are used to track and manage subprocesses run by shtk 3 | Shells. 4 | """ 5 | 6 | import abc 7 | import asyncio 8 | import signal 9 | import sys 10 | 11 | from .util import export 12 | 13 | __all__ = [] 14 | 15 | @export 16 | class PipelineNode(abc.ABC): 17 | """ 18 | Abstract base class for subprocess management nodes 19 | 20 | Attributes: 21 | children (list of PipelineNode): children of this node 22 | stdin_stream (None or Stream): Stream to use for stdin 23 | stdout_stream (None or Stream): Stream to use for stdout 24 | stderr_stream (None or Stream): Stream to use for stderr 25 | """ 26 | def __init__(self, event_loop): 27 | self.children = [] 28 | self.stdin_stream = None 29 | self.stderr_stream = None 30 | self.stdout_stream = None 31 | self.event_loop = event_loop 32 | 33 | @classmethod 34 | async def create(cls, *args, **kwargs): 35 | """ 36 | Instantiates and runs the node 37 | 38 | Args: 39 | *args: passed to the constructor 40 | **kwargs: passed to the constructor 41 | 42 | Returns: 43 | PipelineNode: 44 | The instantiated and run node. 45 | """ 46 | instance = cls(*args, **kwargs) 47 | await instance.run() 48 | return instance 49 | 50 | async def run(self): 51 | """ 52 | Runs the process 53 | """ 54 | 55 | self.stdin_stream.close_reader() 56 | self.stdout_stream.close_writer() 57 | self.stderr_stream.close_writer() 58 | 59 | @staticmethod 60 | async def _get_return_code(rc_list, idx, coro): 61 | rc_list[idx] = await coro 62 | 63 | def flatten_children(self): 64 | """ 65 | Flattens the PipelineNode DAG into a list of PipelineProcess objects 66 | using a depth-first search. 67 | 68 | Returns: 69 | list of PipelineProcess: 70 | All child PipelineProcess nodes 71 | """ 72 | 73 | ret = [] 74 | if len(self.children) > 0: 75 | for child in self.children: 76 | ret.extend(PipelineNode.flatten_children(child)) 77 | else: 78 | ret.append(self) 79 | 80 | return ret 81 | 82 | def send_signal(self, signum): #pylint: disable=no-self-use 83 | """ 84 | Sends a signal to all child ProcessNode processes. 85 | 86 | Args: 87 | signum (int): the signal to send. 88 | """ 89 | 90 | poll_result = self.poll() 91 | 92 | for child, rc in zip(self.flatten_children(), poll_result): 93 | if rc is None: 94 | try: 95 | child.proc.send_signal(signum) 96 | except ProcessLookupError: 97 | pass 98 | 99 | def terminate(self): 100 | """ 101 | Sends a signal.SIGTERM to all child ProcessNode processes. 102 | """ 103 | self.send_signal(signal.SIGTERM) 104 | 105 | def kill(self): 106 | """ 107 | Sends a signal.SIGKILL to all child ProcessNode processes. 108 | """ 109 | self.send_signal(signal.SIGKILL) 110 | 111 | async def poll_async(self, ret): 112 | """ 113 | Gets the return codes of all child ProcessNodes 114 | 115 | Args: 116 | ret (list of [int, None]): a list that will be modified to contain 117 | a collection of return codes from flattened child ProcessNodes. 118 | Child processes that have exited will be represented by their 119 | return code. Child processes that have not exited will be 120 | represented by None. 121 | """ 122 | 123 | ret.clear() 124 | 125 | tasks = [] 126 | for it_child, child in enumerate(self.flatten_children()): 127 | ret.append(None) 128 | coro = self._get_return_code(ret, it_child, child.proc.wait()) 129 | task = self.event_loop.create_task(coro) 130 | tasks.append(task) 131 | 132 | try: 133 | for task in tasks: 134 | await task 135 | except asyncio.CancelledError: 136 | for task in tasks: 137 | try: 138 | if not task.done(): 139 | task.cancel() 140 | await task 141 | except asyncio.CancelledError: 142 | pass 143 | else: 144 | return ret 145 | 146 | def poll(self, timeout=1e-6): 147 | """ 148 | Synchronous wrapper for poll_async(). Gets the return codes of all 149 | child ProcessNodes. 150 | 151 | Returns: 152 | list of (int or None): A list containing return codes from 153 | flattened child ProcessNodes. Child processes that have exited 154 | will be represented by their integer return code. Child 155 | processes that have not exited will be represented by None. 156 | """ 157 | 158 | ret = [] 159 | 160 | try: 161 | self.event_loop.run_until_complete( 162 | asyncio.wait_for( 163 | self.poll_async(ret), 164 | timeout=timeout 165 | ) 166 | ) 167 | except asyncio.TimeoutError: 168 | pass 169 | 170 | return ret 171 | 172 | async def wait_async(self): 173 | """ 174 | Waits for and retrieves the return codes of all child ProcessNodes. 175 | 176 | Returns: 177 | list of int: 178 | A list of return codes from a flattened collection of child 179 | processes. 180 | """ 181 | 182 | return await self.poll_async([]) 183 | 184 | def wait(self): 185 | """ 186 | Synchronous wrapper for wait_async(). 187 | 188 | Returns: 189 | list of int: 190 | A list of return codes from a flattened collection of child 191 | processes. 192 | """ 193 | return self.event_loop.run_until_complete(self.wait_async()) 194 | 195 | @abc.abstractmethod 196 | def __repr__(self): 197 | pass 198 | 199 | @abc.abstractmethod 200 | def __str__(self): 201 | pass 202 | 203 | @export 204 | class PipelineChannel(PipelineNode): 205 | """ 206 | Represents a pipeline of commands 207 | 208 | Args: 209 | left (PipelineNode): A PipelineNode whose stdout is (usually) fed to 210 | right 211 | right (PipelineNode): A PipelineNode whose stdin is (usually) 212 | read from left 213 | 214 | Attributes: 215 | left (PipelineNode): The left PipelineNode 216 | right (PipelineNode): The right PipelineNode 217 | """ 218 | def __init__(self, event_loop, left, right): 219 | super().__init__(event_loop) 220 | 221 | self.left = left 222 | self.right = right 223 | 224 | self.stdin_stream = self.left.stdin_stream 225 | self.stdout_stream = self.right.stdout_stream 226 | self.stderr_stream = self.right.stderr_stream 227 | 228 | self.children.extend((self.left, self.right)) 229 | 230 | def __repr__(self): 231 | return f"{self.left!r} | {self.right!r}" 232 | 233 | def __str__(self): 234 | return f"{self.left!s} | {self.right!s}" 235 | 236 | @export 237 | class PipelineProcess(PipelineNode): 238 | """ 239 | An interface representing subprocesses. 240 | 241 | Args: 242 | cwd (str or pathlib.Path): The current working directory 243 | args (list of str or pathlib.Path): The arguments for the process 244 | (including the base command). 245 | env (dict of str): The environment variables for the process 246 | stdin_stream (Stream): The Stream whose .reader() is used as stdin 247 | stdout_stream (Stream): The Stream whose .writer() is used as stdout 248 | stderr_stream (Stream): The Stream whose .writer() is used as stderr 249 | user (None, int, or str): The user to pass to 250 | asyncio.create_subprocess_exec(). Requires Python >= 3.9. 251 | group (None, int, or str): The group to pass to 252 | asyncio.create_subprocess_exec(). Requires Python >= 3.9. 253 | close_fds (bool): If true, close_fds will be passed to the equivalent 254 | of subprocess.Popen(). 255 | 256 | Raises: 257 | AssertionError: When len(args) <= 0 258 | """ 259 | def __init__( 260 | self, event_loop, cwd, args, env, stdin_stream, stdout_stream, 261 | stderr_stream, user=None, group=None, close_fds=True 262 | ): 263 | super().__init__(event_loop) 264 | 265 | self.cwd = cwd 266 | self.args = args 267 | self.environment = dict(env) 268 | self.proc = None 269 | self.wait_future = None 270 | self.user = user 271 | self.group = group 272 | self.close_fds = close_fds 273 | 274 | self.stdin_stream = stdin_stream 275 | self.stdout_stream = stdout_stream 276 | self.stderr_stream = stderr_stream 277 | 278 | assert len(self.args) > 0 279 | 280 | async def run(self): 281 | """ 282 | Runs the process using asyncio.create_subprocess_exec() 283 | """ 284 | 285 | extra_kwargs = {} 286 | if self.user is not None: 287 | extra_kwargs['user'] = self.user 288 | if (sys.version_info.major, sys.version_info.minor) < (3, 9): 289 | raise NotImplementedError("Running subprocesses as a different user requires Python version >= 3.9") #pylint: disable=line-too-long 290 | 291 | if self.group is not None: 292 | extra_kwargs['group'] = self.group 293 | if (sys.version_info.major, sys.version_info.minor) < (3, 9): 294 | raise NotImplementedError("Running subprocesses as a different group requires Python version >= 3.9") #pylint: disable=line-too-long 295 | 296 | proc_start = asyncio.create_subprocess_exec( 297 | *self.args, 298 | stdin=self.stdin_stream.reader(), 299 | stdout=self.stdout_stream.writer(), 300 | stderr=self.stderr_stream.writer(), 301 | cwd=self.cwd, 302 | env=self.environment, 303 | restore_signals=True, 304 | close_fds=self.close_fds, 305 | **extra_kwargs 306 | ) 307 | 308 | self.proc = await proc_start 309 | 310 | await super().run() 311 | 312 | def __repr__(self): 313 | return f"PipelineProcess(cwd={self.cwd!r}, args={self.args!r}, env={self.environment!r}, stdin_stream={self.stdin_stream!r}, stdout_stream={self.stdout_stream!r}, stderr_stream={self.stderr_stream!r})" #pylint: disable=line-too-long 314 | 315 | def __str__(self): 316 | return f"PipelineProcess(args={self.args!r})" 317 | -------------------------------------------------------------------------------- /shtk/PipelineNodeFactory.py: -------------------------------------------------------------------------------- 1 | """ 2 | PipelineNodeFactory instances are templates used to instantiate associated 3 | PipelineNode classes. They allow a pipeline configuration to be run 4 | independently multiple times. 5 | """ 6 | 7 | import abc 8 | import asyncio 9 | import contextlib 10 | import pathlib 11 | 12 | from .PipelineNode import * #pylint: disable=unused-wildcard-import 13 | from .StreamFactory import StreamFactory, FileStreamFactory, NullStreamFactory, PipeStreamFactory 14 | from .util import export 15 | 16 | __all__ = [] 17 | 18 | @export 19 | class PipelineNodeFactory(abc.ABC): 20 | """ 21 | Abstract base class defining a template for building PipelineNode's 22 | 23 | Args: 24 | stdin_factory (None or StreamFactory): Template for stdin Stream's 25 | (Default value: None) 26 | stdout_factory (None or StreamFactory): Template for stdout Stream's 27 | (Default value: None) 28 | stderr_factory (None or StreamFactory): Template for stderr Stream's 29 | (Default value: None) 30 | 31 | Attributes: 32 | stdin_factory (None or StreamFactory): Template for stdin Stream's 33 | (Default value: None) 34 | stdout_factory (None or StreamFactory): Template for stdout Stream's 35 | (Default value: None) 36 | stderr_factory (None or StreamFactory): Template for stderr Stream's 37 | (Default value: None) 38 | children (list of PipelineNodeFactory): templates for children 39 | """ 40 | def __init__(self, stdin_factory=None, stdout_factory=None, stderr_factory=None): 41 | self.stdin_factory = stdin_factory 42 | self.stdout_factory = stdout_factory 43 | self.stderr_factory = stderr_factory 44 | self.children = [] 45 | 46 | @staticmethod 47 | def _create_stream_factory(arg, mode): 48 | if isinstance(arg, StreamFactory): 49 | return arg 50 | elif isinstance(arg, str): 51 | return FileStreamFactory(arg, mode) 52 | elif isinstance(arg, pathlib.Path): 53 | return FileStreamFactory(arg, mode) 54 | elif arg is None: 55 | return NullStreamFactory() 56 | else: 57 | raise TypeError( 58 | f"Argument `arg` must be instance of StreamFactory or str, not {type(arg)}" 59 | ) 60 | 61 | def stdin(self, arg, mode='r'): 62 | """ 63 | Sets the stdin stream factory (in-place) 64 | 65 | Args: 66 | arg (str, pathlib.Path, StreamFactory, or None): If arg is an str or 67 | pathlib.Path, it is treated as a filename and stdin will be read 68 | from that file. 69 | 70 | If arg is a StreamFactory it is used directly to create streams 71 | for stdin. 72 | 73 | If None, stdin reads from os.devnull 74 | mode: The mode in which to open the file, if opened. Only relevant 75 | if arg is a str or pathlib.Path. Must be one of ('r', 'rb'). 76 | (Default value = 'r') 77 | 78 | Returns: 79 | PipelineNodeFactory: 80 | Altered self 81 | 82 | """ 83 | acceptable_modes = ('r', 'rb') 84 | if mode not in acceptable_modes: 85 | raise ValueError(f"Argument `mode` must be one of {acceptable_modes}") 86 | 87 | self.stdin_factory = self._create_stream_factory(arg, mode) 88 | 89 | return self 90 | 91 | def stdout(self, arg, mode='w'): 92 | """ 93 | Sets the stdout stream factory (in-place) 94 | 95 | Args: 96 | arg (str, pathlib.Path, StreamFactory, or None): If arg is an str or 97 | pathlib.Path, it is treated as a filename and stdout will write 98 | to that file. 99 | 100 | If arg is a StreamFactory it is used directly to create streams 101 | for stdout. 102 | 103 | If None, stdout writes to os.devnull 104 | mode: The mode in which to open the file, if opened. Only relevant 105 | if arg is a str or pathlib.Path. Must be one of ('w', 'wb', 'a', 106 | 'ab'). (Default value = 'w') 107 | 108 | Returns: 109 | PipelineNodeFactory: 110 | Altered self 111 | """ 112 | acceptable_modes = ('w', 'a', 'wb', 'ab') 113 | if mode not in acceptable_modes: 114 | raise ValueError(f"Argument `mode` must be one of {acceptable_modes}") 115 | 116 | self.stdout_factory = self._create_stream_factory(arg, mode) 117 | 118 | return self 119 | 120 | def stderr(self, arg, mode='w'): 121 | """ 122 | Sets the stderr stream factory (in-place) 123 | 124 | Args: 125 | arg (str, pathlib.Path, StreamFactory, or None): If arg is an str or 126 | pathlib.Path, it is treated as a filename and stderr will write 127 | to that file. 128 | 129 | If arg is a StreamFactory it is used directly to create streams 130 | for stderr. 131 | 132 | If None, stderr writes to os.devnull 133 | mode: The mode in which to open the file, if opened. Only relevant 134 | if arg is a str or pathlib.Path. Must be one of ('w', 'wb', 'a', 135 | 'ab'). (Default value = 'w') 136 | 137 | Returns: 138 | PipelineNodeFactory: 139 | Altered self 140 | """ 141 | acceptable_modes = ('w', 'a', 'wb', 'ab') 142 | if mode not in acceptable_modes: 143 | raise ValueError(f"Argument `mode` must be one of {acceptable_modes}") 144 | 145 | self.stderr_factory = self._create_stream_factory(arg, mode) 146 | 147 | return self 148 | 149 | def __or__(self, other): 150 | """ 151 | Shorthand to create a PipelineChannelFactory(self, other) 152 | 153 | Args: 154 | other (PipelineNodeFactory): the child process to pipe stdout to 155 | 156 | Returns: 157 | PipelineChannelFactory: 158 | The constructed PipelineChannelFactory instance 159 | """ 160 | return PipelineChannelFactory(self, other) 161 | 162 | async def build(self, job, stdin_stream=None, stdout_stream=None, stderr_stream=None): 163 | """ 164 | Creates and executes PipelineNode's and self-defined StreamFactories 165 | 166 | If self.std{in,out,err}_factory is defined, it is pased to the child as 167 | the preferred stream. Otherwise the std{in,out,err}_stream parameters 168 | are used. 169 | 170 | Args: 171 | job (Job): job from which to pull environment variables, current 172 | working directory, etc. 173 | stdin_stream (Stream): Stream instance to pass to PipelineNode as stdin 174 | stdout_stream (Stream): Stream instance to pass to PipelineNode as stdout 175 | stderr_stream (Stream): Stream instance to pass to PipelineNode as stderr 176 | 177 | Returns: 178 | PipelineNode: 179 | The constructed PipelineNode instance 180 | 181 | """ 182 | 183 | need_to_close = [] 184 | 185 | if self.stdin_factory is not None: 186 | stdin_stream = self.stdin_factory.build(job) 187 | need_to_close.append(stdin_stream.close_reader) 188 | elif stdin_stream is None: 189 | raise ValueError("stdin_stream must not be None when not overriden by stdin()") 190 | 191 | if self.stdout_factory is not None: 192 | stdout_stream = self.stdout_factory.build(job) 193 | need_to_close.append(stdout_stream.close_writer) 194 | elif stdout_stream is None: 195 | raise ValueError("stdout_stream must not be None when not overriden by stdout()") 196 | 197 | if self.stderr_factory is not None: 198 | stderr_stream = self.stderr_factory.build(job) 199 | need_to_close.append(stderr_stream.close_writer) 200 | elif stderr_stream is None: 201 | raise ValueError("stderr_stream must not be None when not overriden by stderr()") 202 | 203 | ret = await self.build_inner(job, stdin_stream, stdout_stream, stderr_stream) 204 | 205 | for closer in need_to_close: 206 | closer() 207 | 208 | return ret 209 | 210 | @abc.abstractmethod 211 | async def build_inner(self, job, stdin_stream, stdout_stream, stderr_stream): 212 | """ 213 | Abstract method used for instantiating PipelineNodes. This method 214 | is wrapped by build() which handles stream management prior to passing 215 | them to build_inner(). 216 | 217 | Args: 218 | job (Job): The job from which to pull the current working directory 219 | and environment variables for subprocesses. 220 | stdin_stream (Stream): The Stream instance to be used as the 221 | PipelineNode's stdin_stream. 222 | stdout_stream (Stream): The Stream instance to be used as the 223 | PipelineNode's stdout_stream. 224 | stderr_stream (Stream): The Stream instance to be used as the 225 | PipelineNode's stderr_stream. 226 | 227 | Returns: 228 | PipelineNode: 229 | An instantiated PipelineNode. 230 | """ 231 | 232 | @export 233 | class PipelineChannelFactory(PipelineNodeFactory): 234 | """ 235 | PipelineChannelFactory is a template for creating PipelineChannel. 236 | 237 | PipelineChannelFactory creates PipelineChannel instances representing a 238 | chain of subprocesses with each feeding stdout to the next subprocess's 239 | stdin. 240 | 241 | Args: 242 | left (PipelineNodeFactory): A PipelineNodeFactory that will create a 243 | series of processes that should write to stdout. 244 | right (PipelineNodeFactory): A PipelineNodeFactory that will create a 245 | series of processes that should read from stdin. 246 | 247 | Attributes: 248 | left (PipelineNodeFactory): A PipelineNodeFactory that will create a 249 | series of processes that should write to stdout. 250 | right (PipelineNodeFactory): A PipelineNodeFactory that will create a 251 | series of processes that should read from stdin. 252 | children (list of PipelineNodeFactory): [left, right] 253 | pipe_stream (PipeStreamFactory): The PipeStreamFactory that will be 254 | used to redirect left's stdout to right's stdin. 255 | """ 256 | def __init__(self, left, right, **kwargs): 257 | super().__init__(**kwargs) 258 | 259 | if not isinstance(left, PipelineNodeFactory): 260 | raise TypeError("Argument `left` must be instance of PipelineNodeFactory") 261 | 262 | if not isinstance(right, PipelineNodeFactory): 263 | raise TypeError("Argument `right` must be instance of PipelineNodeFactory") 264 | 265 | self.left = left 266 | self.right = right 267 | self.children = [left, right] 268 | self.pipe_stream = PipeStreamFactory() 269 | 270 | async def build_inner(self, job, stdin_stream, stdout_stream, stderr_stream): 271 | """Instantiates a PipelineChannel""" 272 | pipe_stream = self.pipe_stream.build(job) 273 | 274 | with contextlib.closing(self.pipe_stream.build(job)) as pipe_stream: 275 | left_task = asyncio.create_task(self.left.build( 276 | job=job, 277 | stdin_stream=stdin_stream, 278 | stdout_stream=pipe_stream, 279 | stderr_stream=stderr_stream 280 | )) 281 | 282 | right_task = asyncio.create_task(self.right.build( 283 | job, 284 | stdin_stream=pipe_stream, 285 | stdout_stream=stdout_stream, 286 | stderr_stream=stderr_stream 287 | )) 288 | 289 | return await PipelineChannel.create( 290 | job.event_loop, 291 | left=await left_task, 292 | right=await right_task 293 | ) 294 | 295 | @export 296 | class PipelineProcessFactory(PipelineNodeFactory): 297 | """ 298 | Template for a PipelineProcess which runs a command as a subprocess 299 | 300 | Args: 301 | *args (list of str or pathlib.Path): The command to run and its 302 | arguments for the instantiated PipelineProcess instances. 303 | environment (dict): The environment variables to use for the 304 | instantiated PipelineProcess instances (Default value = {}). 305 | cwd (str or pathlib.Path): The current working directory for the 306 | instantiated PipelineProcess instances (Default value = None). 307 | close_fds (bool): If true, close_fds will be passed to the equivalent 308 | of subprocess.Popen(). 309 | """ 310 | def __init__(self, *args, env=None, cwd=None, close_fds=True): 311 | super().__init__() 312 | self.args = args 313 | self.cwd = cwd 314 | self.close_fds = close_fds 315 | 316 | if env is None: 317 | self.environment = {} 318 | else: 319 | self.environment = dict(env) 320 | 321 | def __call__(self, *args, **env): 322 | """ 323 | Appends arguments and/or environment variables to a copy of self 324 | 325 | Args: 326 | *args: command arguments to append to the template 327 | **env: environment variables to add to the template 328 | 329 | Returns: 330 | PipelineProcessFactory: 331 | A copy of self with the extra args and envs 332 | """ 333 | return PipelineProcessFactory( 334 | *self.args, *args, 335 | env=dict(self.environment, **env), 336 | cwd=self.cwd, 337 | close_fds=self.close_fds 338 | ) 339 | 340 | async def build_inner(self, job, stdin_stream, stdout_stream, stderr_stream): 341 | """Instantiates a PipelineProcess""" 342 | env = dict(job.environment, **self.environment) 343 | 344 | cwd = self.cwd or job.cwd 345 | 346 | return await PipelineProcess.create( 347 | job.event_loop, 348 | cwd=cwd, 349 | env=env, 350 | args=self.args, 351 | stdin_stream=stdin_stream, 352 | stdout_stream=stdout_stream, 353 | stderr_stream=stderr_stream, 354 | user=job.user, 355 | group=job.group, 356 | close_fds=job.close_fds 357 | ) 358 | -------------------------------------------------------------------------------- /shtk/Shell.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shells are the primary way of interacting with shtk. You can use them to 3 | define and run a series of commands called a Pipeline as subprocesses of the 4 | Python script. 5 | """ 6 | 7 | import asyncio 8 | import atexit 9 | import collections 10 | import contextlib 11 | import json 12 | import os 13 | import os.path 14 | import pathlib 15 | import shlex 16 | import subprocess 17 | import sys 18 | import threading 19 | 20 | from .Job import Job 21 | from .util import export, which, Pipe # pylint: disable=unused-import 22 | from .PipelineNodeFactory import PipelineProcessFactory 23 | from .StreamFactory import ManualStreamFactory, PipeStreamFactory 24 | from ._AsyncUtil import AsyncHelper 25 | 26 | __all__ = [] 27 | 28 | @export 29 | class Shell: # pylint: disable=too-many-arguments, too-many-instance-attributes 30 | """ 31 | A shell object tracks pre-requisite information (e.g. cwd and environment 32 | variables) necessary to run commands as subprocesses. A shell is also a 33 | context manager that exposes environment variables and other info to 34 | subshells and subprocesses, while also setting itself as the default shell 35 | within managed code. 36 | 37 | Args: 38 | cwd (str, pathlib.Path): Current working directory for subprocesses. 39 | env (bool): If provided Shell will use these key-value pairs as 40 | environment variables. Otherwise Shell inherits from the currently 41 | active Shell, or _default_shell. 42 | umask (int): Controls the default umask for subprocesses 43 | stdin (file-like): Default stdin stream for subprocesses (defaults to 44 | sys.stdin) 45 | stdout (file-like): Default stdout stream for subprocesses (defaults to 46 | sys.stdout) 47 | stderr (file-like): Default stderr stream for subprocesses (defaults to 48 | sys.stderr) 49 | exceptions (bool): Whether exceptions should be raised when non-zero 50 | exit codes are returned by subprocesses. 51 | user (None, int, str): Run subprocesses as the given user. If None, 52 | run as same user as this process. If int, run as user with 53 | uid=user. If str, run as user with name=user. Requires Python >= 54 | 3.9. 55 | group (None, int, str): Run subprocesses as the given group. If None, 56 | run as same group as this process. If int, run as group with 57 | gid=group. If str, run as group with name=group. Requires Python 58 | >= 3.9. 59 | """ 60 | _thread_vars = collections.defaultdict(dict) 61 | 62 | def __init__( 63 | self, cwd=None, env=None, umask=None, stdin=None, stdout=None, 64 | stderr=None, exceptions=True, user=None, group=None 65 | ): 66 | self.lock = threading.RLock() 67 | self.exceptions = exceptions 68 | self.event_loop = None 69 | 70 | with self.lock: 71 | 72 | if env is None: 73 | self.environment = dict(Shell.get_shell().environment) 74 | else: 75 | self.environment = dict(env) 76 | 77 | if env is None: 78 | self.environment = dict(Shell.get_shell().environment) 79 | else: 80 | self.environment = dict(env) 81 | 82 | if cwd is None: 83 | cwd = Shell.get_shell().cwd 84 | 85 | if user is None and Shell.get_shell() is not None: 86 | user = Shell.get_shell().user 87 | 88 | if group is None and Shell.get_shell() is not None: 89 | group = Shell.get_shell().group 90 | 91 | self.user = user 92 | self.group = group 93 | 94 | self.cwd = pathlib.Path(cwd) 95 | self.pwd = None 96 | 97 | if umask is None: 98 | umask_grep = subprocess.check_output([ 99 | 'grep', 'Umask', f"/proc/{os.getpid()}/status" 100 | ]) 101 | umask = int(umask_grep.split()[-1], 8) 102 | self.umask = umask 103 | 104 | if stdin is None: 105 | stdin = sys.stdin 106 | self.stdin = stdin 107 | 108 | if stdout is None: 109 | stdout = sys.stdout 110 | self.stdout = stdout 111 | 112 | if stderr is None: 113 | stderr = sys.stderr 114 | self.stderr = stderr 115 | 116 | def command(self, name): 117 | """ 118 | Creates a PipelineProcessFactory suitable for executing a command 119 | 120 | Args: 121 | name (str or pathlib.Path): Name or path of the command to run. If 122 | an absolute or relative path is provided (must contain a '/' 123 | character) then the command will be loaded from the specified 124 | location. Otherwise the locations specified by the $PATH 125 | environment variable will be checked for suitable executable 126 | and readable files with the appropriate name. 127 | 128 | If name is an str, then the name will be passed through 129 | os.path.expanduser prior to lookup. 130 | 131 | Returns: 132 | PipelineProcessFactory: 133 | A PipelineProcessFactory node representing the command to be 134 | executed. 135 | 136 | Raises: 137 | RuntimeError: command cannot be found 138 | RuntimeError: command filepath is not readable 139 | RuntimeError: command filepath is not executable 140 | """ 141 | 142 | if isinstance(name, str): 143 | name = os.path.expanduser(name) 144 | 145 | if isinstance(name, pathlib.Path) or '/' in name: 146 | path = pathlib.Path(name) 147 | if path.is_absolute(): 148 | command_path = path.resolve() 149 | else: 150 | command_path = (self.cwd / path).resolve() 151 | else: 152 | command_path = which(name, path=self.environment['PATH']) 153 | 154 | if command_path is None or not command_path.is_file(): 155 | raise RuntimeError(f"{name}: command not found") 156 | 157 | if not os.access(command_path, os.R_OK): 158 | raise RuntimeError(f"{command_path}: is not readable") 159 | 160 | if not os.access(command_path, os.X_OK): 161 | raise RuntimeError(f"{command_path}: is not executable") 162 | 163 | return PipelineProcessFactory(command_path) 164 | 165 | def cd(self, path): 166 | """ 167 | Changes the default current working directory for subprocesses built by the Shell 168 | 169 | Args: 170 | path (str or pathlib.Path): Changes directory to provided path such 171 | that managed subprocesses will use this directory as their 172 | default current working directory. If '-' is provided, returns 173 | to previous working directory. 174 | 175 | Raises: 176 | RuntimeError: raised if path is not a directory 177 | """ 178 | with self.lock: 179 | if path == '-': 180 | path = self.pwd 181 | 182 | path = pathlib.Path(path) 183 | if not path.is_absolute(): 184 | new_cwd = (self.cwd / path).resolve() 185 | else: 186 | new_cwd = path.resolve() 187 | 188 | if not new_cwd.is_dir(): 189 | raise RuntimeError(f"{new_cwd!s} is not a directory") 190 | 191 | self.pwd = self.cwd 192 | self.cwd = new_cwd 193 | 194 | @contextlib.contextmanager 195 | def cd_manager(self, new_wd): 196 | """ 197 | Contextmanager for Shell.cd() returns to previous dir after exit 198 | 199 | Args: 200 | new_wd (str or pathlib.Path): directory to change to 201 | 202 | Yields: 203 | pathlib.Path: The new self.cwd 204 | """ 205 | old_wd = self.cwd 206 | self.cd(new_wd) 207 | yield self.cwd 208 | self.cd(old_wd) 209 | 210 | def export(self, **env): 211 | """ 212 | Sets environment variables passed as keyword arguments 213 | 214 | Args: 215 | **env (dict): List of key-value pairs that will set as environment 216 | variables for the Shell() 217 | """ 218 | with self.lock: 219 | for key, value in env.items(): 220 | self.environment[key] = value 221 | 222 | def getenv(self, name): 223 | """ 224 | Gets the value of an environment variable within the Shell 225 | 226 | Args: 227 | name (str): Name of the environment variable to evaluate 228 | 229 | Returns: 230 | str: The value of the named environment variable 231 | 232 | """ 233 | return self.environment[name] 234 | 235 | @classmethod 236 | def _get_thread_vars(cls): 237 | thread_id = threading.get_ident() 238 | return cls._thread_vars[thread_id] 239 | 240 | @classmethod 241 | def get_shell(cls): 242 | """ 243 | Gets the current active shell from the shell stack 244 | 245 | Returns: 246 | Shell: The most recently entered shell context 247 | """ 248 | tvars = cls._get_thread_vars() 249 | if ('shell_stack' not in tvars) or (len(tvars['shell_stack']) == 0): 250 | return None 251 | return tvars['shell_stack'][-1] 252 | 253 | def __enter__(self): 254 | tvars = self._get_thread_vars() 255 | tvars.setdefault('shell_stack', []) 256 | tvars['shell_stack'].append(self) 257 | 258 | if self.event_loop is not None: 259 | raise RuntimeError(f"{self.__class__} is not re-entrant.") 260 | 261 | self.event_loop = asyncio.new_event_loop() 262 | 263 | return self 264 | 265 | def __exit__(self, exc_type, exc_val, exc_tb): 266 | self.event_loop.run_until_complete( 267 | self.event_loop.shutdown_asyncgens() 268 | ) 269 | self.event_loop.close() 270 | self.event_loop = None 271 | 272 | tvars = self._get_thread_vars() 273 | tvars['shell_stack'].pop() 274 | 275 | def __call__(self, *pipeline_factories, exceptions=None, wait=True, close_fds=True): 276 | """ 277 | Executes a series of PipelineNodeFactory nodes as subprocesses 278 | 279 | Args: 280 | *pipeline_factories: The PipelineNodeFactory nodes to execute 281 | exceptions: Whether or not to raise exceptions for non-zero return 282 | codes (Default value = None) 283 | wait: Whether the call should block waiting for the subprocesses to 284 | exit (Default value = True) 285 | close_fds (bool): If true, close_fds will be passed to the equivalent 286 | of subprocess.Popen() (Default value = True). 287 | 288 | Returns: 289 | list of int: The return codes of the subprocesses after exiting 290 | """ 291 | return self.run(*pipeline_factories, exceptions=exceptions, wait=wait, close_fds=close_fds) 292 | 293 | def run(self, *pipeline_factories, exceptions=None, wait=True, close_fds=True): 294 | """ 295 | Executes a series of PipelineNodeFactory nodes as subprocesses 296 | 297 | Args: 298 | *pipeline_factories: The PipelineNodeFactory nodes to execute. If 299 | multiple arguments are provided, then the commands will run in 300 | parallel. 301 | exceptions: Whether or not to raise exceptions for non-zero exit 302 | codes (Default value = None, meaning inherited) 303 | wait: Whether the call should block waiting for the subprocesses to 304 | exit (Default value = True) 305 | close_fds (bool): If true, close_fds will be passed to the equivalent 306 | of subprocess.Popen() (Default value = True). 307 | 308 | Returns: 309 | list of Job: 310 | Job instances representing individual pipelines. The length of 311 | the list will always be equal to the len(pipeline_factories) 312 | """ 313 | async def run_and_wait(*jobs, exceptions=None, wait=True): 314 | run_tasks = [] 315 | for job in jobs: 316 | run_tasks.append( 317 | self.event_loop.create_task( 318 | job.run_async( 319 | stdin_factory=ManualStreamFactory(fileobj_r=self.stdin), 320 | stdout_factory=ManualStreamFactory(fileobj_w=self.stdout), 321 | stderr_factory=ManualStreamFactory(fileobj_w=self.stderr) 322 | ) 323 | ) 324 | ) 325 | 326 | for run_task in run_tasks: 327 | await run_task 328 | 329 | if wait: 330 | for job in jobs: 331 | await job.wait_async(exceptions=exceptions) 332 | 333 | if exceptions is None: 334 | exceptions = self.exceptions 335 | 336 | ret = [] 337 | for pipeline_factory in pipeline_factories: 338 | job = Job( 339 | pipeline_factory, 340 | env=self.environment, 341 | cwd=self.cwd, 342 | event_loop=self.event_loop, 343 | user=self.user, 344 | group=self.group, 345 | close_fds=close_fds 346 | ) 347 | self.event_loop.run_until_complete( 348 | run_and_wait(job, exceptions=exceptions, wait=wait) 349 | ) 350 | ret.append(job) 351 | 352 | return ret 353 | 354 | def evaluate(self, pipeline_factory, *, exceptions=None): 355 | """ 356 | Executes a PipelineNodeFactory and returns the stdout text 357 | 358 | Args: 359 | pipeline_factory (PipelineNodeFactory): the pipeline to instantiate 360 | and execute 361 | exceptions: Whether or not to raise exceptions when subprocesses 362 | return non-zero return codes (Default value = None) 363 | 364 | Returns: 365 | str or bytes: 366 | A string generated by the text that the final subprocess writes 367 | to stdout 368 | """ 369 | if exceptions is None: 370 | exceptions = self.exceptions 371 | 372 | with Pipe() as stdout_pipe: 373 | job = Job( 374 | pipeline_factory, 375 | env=self.environment, 376 | cwd=self.cwd, 377 | event_loop=self.event_loop, 378 | user=self.user, 379 | group=self.group 380 | ) 381 | 382 | job.run( 383 | stdin_factory=ManualStreamFactory(fileobj_r=self.stdin), 384 | stdout_factory=ManualStreamFactory(fileobj_w=stdout_pipe.writer), 385 | stderr_factory=ManualStreamFactory(fileobj_w=self.stderr) 386 | ) 387 | 388 | stdout_pipe.close_writer() 389 | 390 | ret = stdout_pipe.read() 391 | 392 | job.wait(exceptions=exceptions) 393 | 394 | return ret 395 | 396 | def _read_env(self, fd_env, pipe_w): 397 | os.close(pipe_w) 398 | with os.fdopen(fd_env, 'rb') as env_fin: 399 | self.environment = json.load(env_fin) 400 | 401 | def source(self, filepath, *, exceptions=None): 402 | """ 403 | Use /bin/sh to source a file, then import the resulting environment as 404 | the shtk.Shell's new environment. 405 | 406 | This method uses (approximately) the following kludge: 407 | 408 | .. highlight:: python 409 | .. code-block:: python 410 | 411 | q = shlex.quote 412 | exec = str(sys.exec or 'python3') 413 | kludge = ''' 414 | import json, os, sys; 415 | fout = os.fdopen(int(sys.argv[1]), "w"); 416 | json.dump(dict(os.environ), fout); 417 | fout.close(); 418 | ''' 419 | args = (".", q(path), '&&', q(exec), '-c', q(kludge), str(int(pipe_fd))) 420 | pipeline_factory = shcmd('-c', " ".join(args)) 421 | ... 422 | 423 | Args: 424 | filepath (str or pathlib.Path): path to file to be sourced. It 425 | will be converted to an absolute path before sourcing for 426 | compatability with picky shells (e.g. dash). 427 | exceptions (bool): Whether or not to raise exceptions when subprocesses 428 | return non-zero return codes (Default value = None) 429 | 430 | Returns: 431 | str or bytes: 432 | A string generated by the text that the final subprocess writes 433 | to stdout 434 | """ 435 | if exceptions is None: 436 | exceptions = self.exceptions 437 | 438 | filepath = pathlib.Path(filepath) 439 | if not filepath.is_absolute(): 440 | filepath = self.cwd / filepath 441 | filepath = str(filepath.resolve()) 442 | 443 | shcmd = self.command('/bin/sh') 444 | executable = str(sys.executable or 'python3') 445 | 446 | pipe_r, pipe_w = os.pipe2(os.O_CLOEXEC) 447 | os.set_inheritable(pipe_r, False) 448 | os.set_inheritable(pipe_w, True) 449 | 450 | quote = shlex.quote 451 | python_kludge = '; '.join(( 452 | 'import json, os, sys', 453 | 'fout=os.fdopen(int(sys.argv[1]), "w")', 454 | 'json.dump(dict(os.environ), fout)', 455 | 'fout.close()' 456 | )) 457 | args = [] 458 | args.extend(('.', quote(filepath))) 459 | args.append('&&') 460 | args.extend((quote(executable), '-c')) 461 | args.append(quote(python_kludge)) 462 | args.append(str(int(pipe_w))) 463 | 464 | job = Job( 465 | shcmd('-c', " ".join(args)), 466 | env=self.environment, 467 | cwd=self.cwd, 468 | event_loop=self.event_loop, 469 | user=self.user, 470 | group=self.group, 471 | close_fds=False 472 | ) 473 | 474 | event_loop = self.event_loop or asyncio.new_event_loop() 475 | try: 476 | event_loop.run_until_complete( 477 | job.run_async( 478 | ManualStreamFactory(fileobj_r=self.stdin), 479 | ManualStreamFactory(fileobj_w=self.stdout), 480 | ManualStreamFactory(fileobj_w=self.stderr) 481 | ) 482 | ) 483 | 484 | async_helper = AsyncHelper(event_loop) 485 | task = async_helper.create_task(job.wait_async(exceptions=exceptions)) 486 | async_helper.add_reader(pipe_r, self._read_env, pipe_w) 487 | async_helper.run() 488 | 489 | exc = task.exception() 490 | if exc is not None: 491 | raise exc 492 | finally: 493 | if self.event_loop is None: 494 | event_loop.close() 495 | 496 | _default_shell = Shell(env=os.environ, cwd=os.getcwd()) 497 | _default_shell.__enter__() 498 | atexit.register(_default_shell.__exit__, None, None, None) 499 | -------------------------------------------------------------------------------- /shtk/Stream.py: -------------------------------------------------------------------------------- 1 | """ 2 | shtk uses Stream instances to track and manage file-like objects used for 3 | input and output streams of subprocesses. 4 | """ 5 | 6 | import os 7 | import pathlib 8 | import grp 9 | import pwd 10 | 11 | from .util import export 12 | 13 | __all__ = [] 14 | 15 | @export 16 | class Stream: 17 | """ 18 | Base class for other Stream classes. 19 | 20 | Wraps file-like objects to couple readers and writers to the same streams 21 | (where it makes sense) and more tightly control closure of the stream. 22 | Also functions as a context manager (yielding self) that calls self.close() 23 | upon exit. 24 | 25 | Args: 26 | fileobj_r (file-like or None): A file-like object suitable for reading. 27 | fileobj_w (file-like or None): A file-like object suitable for writing. 28 | 29 | Attributes: 30 | fileobj_r (file-like or None): A file-like object suitable for reading. 31 | fileobj_w (file-like or None): A file-like object suitable for writing. 32 | """ 33 | def __init__(self, fileobj_r=None, fileobj_w=None): 34 | self.fileobj_r = fileobj_r 35 | self.fileobj_w = fileobj_w 36 | 37 | def reader(self): 38 | """ 39 | Returns fileobj_r 40 | 41 | Returns: 42 | file-like: 43 | self.fileobj_r 44 | """ 45 | return self.fileobj_r 46 | 47 | def writer(self): 48 | """ 49 | Returns fileobj_w 50 | 51 | Returns: 52 | file-like: 53 | self.fileobj_w 54 | """ 55 | return self.fileobj_w 56 | 57 | def close_reader(self): 58 | """Closes self.fileobj_r if it's not None, then set it to None""" 59 | if self.fileobj_r is not None: 60 | self.fileobj_r.close() 61 | self.fileobj_r = None 62 | 63 | def close_writer(self): 64 | """Closes self.fileobj_w if it's not None, then set it to None""" 65 | if self.fileobj_w is not None: 66 | self.fileobj_w.close() 67 | self.fileobj_w = None 68 | 69 | def close(self): 70 | """Calls self.close_reader() and self.close_writer()""" 71 | self.close_reader() 72 | self.close_writer() 73 | 74 | def __enter__(self): 75 | return self 76 | 77 | def __exit__(self, exc_type, exc_val, exc_tb): 78 | self.close() 79 | 80 | @export 81 | class PipeStream(Stream): 82 | """ 83 | Creates an os.pipe2() suitable for communicating between processes 84 | 85 | Args: 86 | binary (boolean): Whether the streams should be opened in binary mode 87 | (Default value = False). 88 | flags (int): Flags to pass to os.pipe2 in addition to os.O_CLOEXEC 89 | (Default value = 0). 90 | user (None, int, str): The user that will own the pipe. If user is an 91 | int, the file will be chown'd to the user whose uid=user. If user 92 | is an str, the file will be chown'd to the user whose name=user. 93 | group (None, int, str): The group that will own the pipe. If group is 94 | an int, the file will be chown'd to the group whose gid=group. If 95 | group is an str, the file will be chown'd to the group whose 96 | name=group. 97 | """ 98 | def __init__(self, binary=False, flags=0): 99 | self.pipe_r, self.pipe_w = os.pipe2(os.O_CLOEXEC | flags) 100 | 101 | os.set_inheritable(self.pipe_r, True) 102 | os.set_inheritable(self.pipe_w, True) 103 | 104 | if binary: 105 | fileobj_r = os.fdopen(self.pipe_r, 'rb') 106 | fileobj_w = os.fdopen(self.pipe_w, 'wb') 107 | else: 108 | fileobj_r = os.fdopen(self.pipe_r, 'r') 109 | fileobj_w = os.fdopen(self.pipe_w, 'w') 110 | 111 | super().__init__(fileobj_r=fileobj_r, fileobj_w=fileobj_w) 112 | 113 | @export 114 | class FileStream(Stream): 115 | """ 116 | Opens a file for reading or writing 117 | 118 | Args: 119 | path (str or pathlib.Path): The path of the file to open. 120 | mode (str): Mode passed to open() when opening the file. If mode 121 | contains 'r' then the file will be opened for reading. If the mode 122 | contains 'w' or 'a' it will be opened for writing. 123 | user (None, int, str): The user that will own the file (if 'w' in 124 | mode). If user is an int, the file will be chown'd to the user 125 | whose uid=user. If user is an str, the file will be chown'd to the 126 | user whose name=user. 127 | group (None, int, str): The group that will own the file (if 'w' in 128 | mode). If group is an int, the file will be chown'd to the group 129 | whose gid=group. If group is an str, the file will be chown'd to the 130 | group whose name=group. 131 | """ 132 | def __init__(self, path, mode, user=None, group=None): 133 | self.path = pathlib.Path(path) 134 | 135 | if 'r' in mode: 136 | fileobj_r = open(self.path.resolve(), mode) 137 | else: 138 | fileobj_r = None 139 | 140 | if 'w' in mode or 'a' in mode: 141 | fileobj_w = open(self.path.resolve(), mode) 142 | else: 143 | fileobj_w = None 144 | 145 | if user is not None: 146 | if isinstance(user, str): 147 | uid = pwd.getpwnam(user).pw_uid 148 | elif isinstance(user, int): 149 | uid = user 150 | else: 151 | raise ValueError("argument user must be int, str, or none") 152 | else: 153 | uid = os.getuid() 154 | 155 | if group is not None: 156 | if isinstance(group, str): 157 | gid = grp.getgrnam(group).gr_gid 158 | elif isinstance(group, int): 159 | gid = group 160 | else: 161 | raise ValueError("argument group must be int, str, or none") 162 | else: 163 | gid = os.getgid() 164 | 165 | if user is not None or group is not None: 166 | # Only chown the writable files that we create 167 | if (fileobj_w is not None) and ('w' in mode): 168 | os.fchown(fileobj_w.fileno(), uid, gid) 169 | 170 | super().__init__(fileobj_r, fileobj_w) 171 | 172 | @export 173 | class NullStream(Stream): 174 | """ 175 | Opens os.devnull for both reading and writing 176 | """ 177 | 178 | def __init__(self): 179 | fileobj_r = open(os.devnull, 'r', encoding=None) 180 | fileobj_w = open(os.devnull, 'w', encoding=None) 181 | 182 | super().__init__(fileobj_r=fileobj_r, fileobj_w=fileobj_w) 183 | 184 | @export 185 | class ManualStream(Stream): 186 | """ 187 | Uses provided file-like objects for fileobj_r and fileobj_w. 188 | 189 | Note: 190 | The files will not be manually closed even when close_reader() or 191 | close_writer() are called. Closing the files is the responsibility of 192 | the caller. 193 | 194 | Args: 195 | fileobj_r (file-like): The file-like object to use for self.fileobj_r. 196 | fileobj_w (file-like): The file-like object to use for self.fileobj_w. 197 | 198 | Attributes: 199 | close_r (boolean): Whether the reader should be closed when 200 | close_reader() is called. 201 | close_w (boolean): Whether the writer should be closed when 202 | close_writer() is called. 203 | """ 204 | def __init__(self, fileobj_r=None, fileobj_w=None): 205 | self.close_r = fileobj_r is None 206 | self.close_w = fileobj_w is None 207 | 208 | super().__init__(fileobj_r=fileobj_r, fileobj_w=fileobj_w) 209 | 210 | def close_reader(self): 211 | """ 212 | Close the reader only if it wasn't provided at instantiation. 213 | """ 214 | if self.close_r: 215 | super().close_reader() 216 | 217 | def close_writer(self): 218 | """ 219 | Close the writer only if it wasn't provided at instantiation. 220 | """ 221 | if self.close_w: 222 | super().close_writer() 223 | -------------------------------------------------------------------------------- /shtk/StreamFactory.py: -------------------------------------------------------------------------------- 1 | """ 2 | StreamFactory instances are templates that instantiate corresponding instances 3 | of a corresponding Stream class. 4 | """ 5 | 6 | import abc 7 | import pathlib 8 | 9 | from .Stream import * # pylint: disable=unused-wildcard-import 10 | from .util import export # pylint: disable=unused-import 11 | 12 | __all__ = [] 13 | 14 | @export 15 | class StreamFactory(abc.ABC): 16 | """ 17 | Base class for templates creating associated Stream instances 18 | """ 19 | 20 | @abc.abstractmethod 21 | def build(self, job): 22 | """ 23 | Instantiates the Stream instance 24 | 25 | Args: 26 | job (Job): job to use for current working directory and environment 27 | variables. 28 | 29 | Returns: 30 | Stream: 31 | The constructed Stream instance. 32 | """ 33 | 34 | @export 35 | class PipeStreamFactory(StreamFactory): 36 | """ 37 | Creates a template for PipeStream instances 38 | 39 | Args: 40 | flags (int): flags to pass to PipeStream constructor (Default value = 41 | 0). 42 | 43 | Attributes: 44 | flags (int): flags to pass to PipeStream constructor 45 | """ 46 | def __init__(self, flags=0): 47 | super().__init__() 48 | self.flags = flags 49 | 50 | def build(self, job): 51 | """ 52 | Instantiates the PipeStream instance 53 | 54 | Args: 55 | job (Job): job to use for current working directory and environment 56 | variables. 57 | 58 | Returns: 59 | PipeStream: 60 | The constructed PipeStream instance. 61 | """ 62 | return PipeStream(flags=self.flags) 63 | 64 | @export 65 | class FileStreamFactory(StreamFactory): 66 | """ 67 | Creates a template for FileStream instances 68 | 69 | Args: 70 | partial_path (str or pathlib.Path): The absolute or job.cwd relative 71 | path to the file. 72 | mode (str): The mode to pass to open() when instantiating the 73 | FileStream. 74 | 75 | Attributes: 76 | partial_path (pathlib.Path): The absolute or job.cwd relative 77 | path to the file. 78 | mode (str): The mode to pass to open() when instantiating the 79 | FileStream. 80 | """ 81 | def __init__(self, partial_path, mode): 82 | super().__init__() 83 | self.partial_path = pathlib.Path(partial_path) 84 | self.mode = mode 85 | 86 | def build(self, job): 87 | """ 88 | Instantiates the FileStream instance 89 | 90 | Args: 91 | job (Job): job to use for current working directory and environment 92 | variables. 93 | 94 | Returns: 95 | FileStream: 96 | The constructed FileStream instance. 97 | """ 98 | 99 | if self.partial_path.is_absolute(): 100 | return FileStream(self.partial_path, self.mode, user=job.user, group=job.group) 101 | else: 102 | return FileStream( 103 | job.cwd / self.partial_path, 104 | mode=self.mode, 105 | user=job.user, 106 | group=job.group 107 | ) 108 | 109 | @export 110 | class NullStreamFactory(StreamFactory): 111 | """ 112 | Creates a template for NullStream instances 113 | """ 114 | def build(self, job): 115 | """ 116 | Instantiates the NullStream instance 117 | 118 | Args: 119 | job (Job): job to use for current working directory and environment 120 | variables. 121 | 122 | Returns: 123 | NullStream: 124 | The constructed NullStream instance. 125 | """ 126 | return NullStream() 127 | 128 | @export 129 | class ManualStreamFactory(StreamFactory): 130 | """ 131 | Creates a template for ManualStream instances 132 | 133 | Args: 134 | fileobj_r (file-like or None): A file-like object suitable for reading. 135 | None implies os.devnull should be used (Default value = None). 136 | fileobj_w (file-like or None): A file-like object suitable for writing. 137 | None implies os.devnull should be used (Default value = None). 138 | 139 | Attributes: 140 | fileobj_r (file-like or None): A file-like object suitable for reading. 141 | None implies os.devnull should be used. 142 | fileobj_w (file-like or None): A file-like object suitable for writing. 143 | None implies os.devnull should be used. 144 | """ 145 | def __init__(self, fileobj_r=None, fileobj_w=None): 146 | self.fileobj_r = fileobj_r 147 | self.fileobj_w = fileobj_w 148 | 149 | def build(self, job): 150 | """ 151 | Instantiates the ManualStream instance 152 | 153 | Args: 154 | job (Job): job to use for current working directory and environment 155 | variables. 156 | 157 | Returns: 158 | ManualStream: 159 | The constructed ManualStream instance. 160 | """ 161 | return ManualStream(fileobj_r=self.fileobj_r, fileobj_w=self.fileobj_w) 162 | -------------------------------------------------------------------------------- /shtk/_AsyncUtil.py: -------------------------------------------------------------------------------- 1 | "Tools to make using asyncio a little less complicated" 2 | 3 | class AsyncHelper: 4 | "Helps manage running programs while communicating with them" 5 | def __init__(self, event_loop): 6 | self.event_loop = event_loop 7 | self.task_count = 0 8 | self.completed_tasks = set() 9 | 10 | def mark_completed(self, task_id): 11 | "Mark a task as completed" 12 | num_prior = len(self.completed_tasks) 13 | self.completed_tasks.add(task_id) 14 | num_post = len(self.completed_tasks) 15 | if num_post == num_prior: 16 | raise ValueError(f"Task {task_id} completed twice") 17 | 18 | if num_post == self.task_count: 19 | self.event_loop.stop() 20 | 21 | async def _manage_task(self, task_id, coro): 22 | "Wait on a task, then mark it completed" 23 | try: 24 | ret = await coro 25 | self.mark_completed(task_id) 26 | return ret 27 | except: 28 | self.event_loop.stop() 29 | raise 30 | 31 | def create_task(self, coro, *, name=None): 32 | "Run a task" 33 | task_id = self.task_count 34 | self.task_count += 1 35 | return self.event_loop.create_task(self._manage_task(task_id, coro), name=name) 36 | 37 | def _manage_reader(self, task_id, fd_read, callback, *args): 38 | "Run a callback, then mark its task completed" 39 | try: 40 | ret = callback(fd_read, *args) 41 | self.event_loop.remove_reader(fd_read) 42 | self.mark_completed(task_id) 43 | return ret 44 | except: 45 | self.event_loop.stop() 46 | raise 47 | 48 | def add_reader(self, fd_read, callback, *args): 49 | "Add a reader callback" 50 | task_id = self.task_count 51 | self.task_count += 1 52 | return self.event_loop.add_reader( 53 | fd_read, self._manage_reader, task_id, fd_read, 54 | callback, *args 55 | ) 56 | 57 | def run(self): 58 | "Run the event loop until all tasks are completed" 59 | return self.event_loop.run_forever() 60 | -------------------------------------------------------------------------------- /shtk/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | SHTK is a package of resources for running, managing, and communicating with 3 | subprocess commands. 4 | """ 5 | 6 | from .Job import * 7 | from .PipelineNode import * 8 | from .PipelineNodeFactory import * 9 | from .Shell import * 10 | from .Stream import * 11 | from .StreamFactory import * 12 | 13 | from . import _version 14 | __version__ = _version.get_versions()['version'] 15 | -------------------------------------------------------------------------------- /shtk/test/Job/Job.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import signal 5 | import sys 6 | import time 7 | import unittest 8 | 9 | from ...Job import Job, NonzeroExitCodeException 10 | from ...Stream import NullStream, FileStream 11 | from ...StreamFactory import NullStreamFactory, FileStreamFactory, PipeStreamFactory, ManualStreamFactory 12 | from ...PipelineNodeFactory import PipelineProcessFactory, PipelineChannelFactory 13 | from ...util import which, export, Pipe 14 | 15 | from ..test_util import register, TmpDirMixin 16 | 17 | __all__ = [] 18 | 19 | @export 20 | @register() 21 | class TestWait(TmpDirMixin): 22 | def runTest(self): 23 | cwd = pathlib.Path(self.tmpdir.name) 24 | input_file = cwd / "input.txt" 25 | output_file = cwd / "output.txt" 26 | message = "Hello World!" 27 | 28 | with open(input_file.resolve(), 'w') as fout: 29 | fout.write(message) 30 | 31 | stdin_factory = FileStreamFactory(input_file, 'r') 32 | stdout_factory = FileStreamFactory(output_file, 'w') 33 | null_factory = NullStreamFactory() 34 | 35 | cat1 = PipelineProcessFactory(which('cat')).stdin(stdin_factory) 36 | cat2 = PipelineProcessFactory(which('cat')).stdout(stdout_factory) 37 | false = PipelineProcessFactory(which('false')) 38 | 39 | job = Job(cat1 | cat2 | false) 40 | job.run( 41 | stdin_factory = null_factory, 42 | stdout_factory = null_factory, 43 | stderr_factory = null_factory 44 | ) 45 | 46 | self.assertEqual(job.wait(job.pipeline.left.left, exceptions=False), (0,)) 47 | self.assertEqual(job.wait(job.pipeline.left.right, exceptions=False), (0,)) 48 | self.assertEqual(job.wait(job.pipeline.right, exceptions=False), (1,)) 49 | 50 | self.assertTrue(output_file.exists()) 51 | 52 | with open(output_file.resolve(), 'r') as fin: 53 | observed = fin.read() 54 | 55 | self.assertEqual(message, observed) 56 | 57 | @export 58 | @register() 59 | class TestBuildWithFileStdinStdout(TmpDirMixin): 60 | def runTest(self): 61 | cwd = pathlib.Path(self.tmpdir.name) 62 | input_file = cwd / "input.txt" 63 | output_file = cwd / "output.txt" 64 | message = "Hello World!" 65 | 66 | with open(input_file.resolve(), 'w') as fout: 67 | fout.write(message) 68 | 69 | stdin_factory = FileStreamFactory(input_file, 'r') 70 | stdout_factory = FileStreamFactory(output_file, 'w') 71 | null_factory = NullStreamFactory() 72 | 73 | 74 | cat = PipelineProcessFactory(which('cat')) 75 | 76 | job = Job(cat) 77 | 78 | job.run( 79 | stdin_factory = stdin_factory, 80 | stdout_factory = stdout_factory, 81 | stderr_factory = null_factory 82 | ) 83 | 84 | return_codes = job.wait() 85 | 86 | self.assertEqual(return_codes, (0,)) 87 | 88 | self.assertTrue(output_file.exists()) 89 | with open(output_file.resolve(), 'r') as fin: 90 | observed = fin.read() 91 | 92 | self.assertEqual(message, observed) 93 | 94 | @export 95 | @register() 96 | class TestBuildWithPipeStdinStdout(TmpDirMixin): 97 | def runTest(self): 98 | cwd = pathlib.Path(self.tmpdir.name) 99 | input_file = cwd / "input.txt" 100 | output_file = cwd / "output.txt" 101 | message = "Hello World!" 102 | 103 | cat = PipelineProcessFactory(which('cat')) 104 | 105 | with Pipe() as stdout_pipe: 106 | with Pipe() as stdin_pipe: 107 | job = Job(cat) 108 | job.run( 109 | stdin_factory = ManualStreamFactory(fileobj_r=stdin_pipe.reader), 110 | stdout_factory = ManualStreamFactory(fileobj_w=stdout_pipe.writer), 111 | stderr_factory = NullStreamFactory() 112 | ) 113 | 114 | stdin_pipe.write(message) 115 | 116 | stdout_pipe.close_writer() 117 | 118 | observed = stdout_pipe.read() 119 | 120 | return_codes = job.wait() 121 | 122 | self.assertEqual(return_codes, (0,)) 123 | 124 | self.assertEqual(message, observed) 125 | 126 | @export 127 | @register() 128 | class TestBuildWithNonexistentFileNoException(TmpDirMixin): 129 | def runTest(self): 130 | cwd = pathlib.Path(self.tmpdir.name) 131 | input_file = cwd / "input.txt" 132 | 133 | cat = PipelineProcessFactory(which('cat')) 134 | 135 | with Pipe() as stderr_pipe: 136 | job = Job(cat(input_file)) 137 | 138 | self.assertIsNone(job.stdin) 139 | self.assertIsNone(job.stdout) 140 | self.assertIsNone(job.stderr) 141 | 142 | job.run( 143 | stdin_factory = NullStreamFactory(), 144 | stdout_factory = NullStreamFactory(), 145 | stderr_factory = ManualStreamFactory(fileobj_w=stderr_pipe.writer) 146 | ) 147 | 148 | stderr_pipe.close_writer() 149 | 150 | return_codes = job.wait(exceptions=False) 151 | 152 | observed = stderr_pipe.read() 153 | 154 | self.assertEqual(return_codes, (1,)) 155 | self.assertIn('No such file or directory', observed) 156 | 157 | @export 158 | @register() 159 | class TestBuildWithNonexistentFileWithException(TmpDirMixin): 160 | def runTest(self): 161 | cwd = pathlib.Path(self.tmpdir.name) 162 | input_file = cwd / "non\\existent" 163 | 164 | cat = PipelineProcessFactory(which('cat')) 165 | 166 | with Pipe() as stderr_pipe: 167 | job = Job(cat(input_file)) 168 | 169 | self.assertIsNone(job.stdin) 170 | self.assertIsNone(job.stdout) 171 | self.assertIsNone(job.stderr) 172 | 173 | job.run( 174 | stdin_factory = NullStreamFactory(), 175 | stdout_factory = NullStreamFactory(), 176 | stderr_factory = ManualStreamFactory(fileobj_w=stderr_pipe.writer) 177 | ) 178 | 179 | stderr_pipe.close_writer() 180 | 181 | observed = stderr_pipe.read() 182 | 183 | self.assertIn('No such file or directory', observed) 184 | 185 | with self.assertRaises(NonzeroExitCodeException): 186 | job.wait() 187 | 188 | @export 189 | @register() 190 | class TestJobTerminate(TmpDirMixin): 191 | def runTest(self): 192 | cwd = pathlib.Path(self.tmpdir.name) 193 | 194 | job = Job( 195 | PipelineChannelFactory( 196 | PipelineProcessFactory( 197 | which('sleep') 198 | )('1'), 199 | PipelineProcessFactory( 200 | which('sleep') 201 | )('10') 202 | ) 203 | ) 204 | 205 | job.run( 206 | stdin_factory = NullStreamFactory(), 207 | stdout_factory = NullStreamFactory(), 208 | stderr_factory = NullStreamFactory() 209 | ) 210 | 211 | time.sleep(1.1) 212 | 213 | job.terminate() 214 | 215 | return_codes = job.wait(exceptions=False) 216 | 217 | self.assertEqual(return_codes, (0, -signal.SIGTERM)) 218 | 219 | @export 220 | @register() 221 | class TestJobKill(TmpDirMixin): 222 | def runTest(self): 223 | cwd = pathlib.Path(self.tmpdir.name) 224 | 225 | job = Job( 226 | PipelineChannelFactory( 227 | PipelineProcessFactory( 228 | which('sleep') 229 | )('1'), 230 | PipelineProcessFactory( 231 | which('sleep') 232 | )('10') 233 | ) 234 | ) 235 | 236 | job.run( 237 | stdin_factory = NullStreamFactory(), 238 | stdout_factory = NullStreamFactory(), 239 | stderr_factory = NullStreamFactory() 240 | ) 241 | 242 | time.sleep(1.1) 243 | 244 | job.kill() 245 | 246 | return_codes = job.wait(exceptions=False) 247 | 248 | self.assertEqual(return_codes, (0, -signal.SIGKILL)) 249 | -------------------------------------------------------------------------------- /shtk/test/Job/__init__.py: -------------------------------------------------------------------------------- 1 | from . import Job 2 | -------------------------------------------------------------------------------- /shtk/test/Operators/Operators.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jroose/shtk/bfbb2f25aac4c470c30286f0a4f39f8a766c75c2/shtk/test/Operators/Operators.py -------------------------------------------------------------------------------- /shtk/test/Operators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jroose/shtk/bfbb2f25aac4c470c30286f0a4f39f8a766c75c2/shtk/test/Operators/__init__.py -------------------------------------------------------------------------------- /shtk/test/PipelineNode/PipelineChannel.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import contextlib 3 | import os 4 | import pathlib 5 | import signal 6 | import sys 7 | import time 8 | import unittest 9 | 10 | from ...Stream import Stream, NullStream, FileStream, PipeStream, ManualStream 11 | from ...PipelineNode import PipelineProcess, PipelineChannel 12 | from ...util import which, export, Pipe 13 | 14 | from ..test_util import register, TmpDirMixin 15 | 16 | __all__ = [] 17 | 18 | @export 19 | @register() 20 | class TestCreateAndWait(TmpDirMixin): 21 | def runTest(self): 22 | cwd = pathlib.Path(self.tmpdir.name) 23 | cat = which('cat') 24 | message = "Hello World!" 25 | event_loop = asyncio.new_event_loop() 26 | 27 | async def run_and_wait(): 28 | with Pipe() as out_pipe, ManualStream(fileobj_w=out_pipe.writer) as p3, NullStream() as null_stream: 29 | with Pipe() as in_pipe, ManualStream(fileobj_r=in_pipe.reader) as p1, PipeStream() as p2: 30 | cat1 = await PipelineProcess.create( 31 | event_loop, 32 | cwd = cwd.resolve(), 33 | env = {}, 34 | args = [cat], 35 | stdin_stream = p1, 36 | stdout_stream = p2, 37 | stderr_stream = null_stream 38 | ) 39 | 40 | cat2 = await PipelineProcess.create( 41 | event_loop, 42 | cwd = cwd.resolve(), 43 | env = {}, 44 | args = [cat], 45 | stdin_stream = p2, 46 | stdout_stream = p3, 47 | stderr_stream = null_stream 48 | ) 49 | 50 | channel = await PipelineChannel.create( 51 | event_loop, 52 | left = cat1, 53 | right = cat2 54 | ) 55 | 56 | in_pipe.write(message) 57 | in_pipe.writer.close() 58 | 59 | out_pipe.close_writer() 60 | 61 | stdout_result = out_pipe.read() 62 | 63 | returncodes = await channel.wait_async() 64 | 65 | return channel, returncodes, stdout_result 66 | 67 | channel, returncodes, stdout_result = event_loop.run_until_complete(run_and_wait()) 68 | 69 | processes = [ 70 | channel.left.proc, 71 | channel.right.proc 72 | ] 73 | 74 | self.assertEqual([rc for rc in returncodes], [0, 0]) 75 | self.assertEqual([p.returncode for p in processes], [0, 0]) 76 | self.assertEqual(stdout_result, message) 77 | 78 | @export 79 | @register() 80 | class TestTerminate(TmpDirMixin): 81 | def runTest(self): 82 | cwd = pathlib.Path(self.tmpdir.name) 83 | cat = which('cat') 84 | sleep = which('sleep') 85 | event_loop = asyncio.new_event_loop() 86 | 87 | async def run_and_wait(): 88 | with NullStream() as null_stream, PipeStream() as p2: 89 | sleep1 = await PipelineProcess.create( 90 | event_loop, 91 | cwd = cwd.resolve(), 92 | env = {}, 93 | args = [sleep, "1"], 94 | stdin_stream = null_stream, 95 | stdout_stream = p2, 96 | stderr_stream = null_stream 97 | ) 98 | 99 | cat2 = await PipelineProcess.create( 100 | event_loop, 101 | cwd = cwd.resolve(), 102 | env = {}, 103 | args = [cat], 104 | stdin_stream = p2, 105 | stdout_stream = null_stream, 106 | stderr_stream = null_stream 107 | ) 108 | 109 | channel = await PipelineChannel.create( 110 | event_loop, 111 | left = sleep1, 112 | right = cat2 113 | ) 114 | 115 | return channel 116 | 117 | channel = event_loop.run_until_complete(run_and_wait()) 118 | channel.terminate() 119 | returncodes = channel.wait() 120 | 121 | processes = [ 122 | channel.left.proc, 123 | channel.right.proc 124 | ] 125 | 126 | EXPRC = -signal.SIGTERM 127 | self.assertEqual([rc for rc in returncodes], [EXPRC, EXPRC]) 128 | self.assertEqual([p.returncode for p in processes], [EXPRC, EXPRC]) 129 | 130 | @export 131 | @register() 132 | class TestKill(TmpDirMixin): 133 | def runTest(self): 134 | cwd = pathlib.Path(self.tmpdir.name) 135 | cat = which('cat') 136 | sleep = which('sleep') 137 | event_loop = asyncio.new_event_loop() 138 | 139 | async def run_and_wait(): 140 | with NullStream() as null_stream, PipeStream() as p2: 141 | sleep1 = await PipelineProcess.create( 142 | event_loop, 143 | cwd = cwd.resolve(), 144 | env = {}, 145 | args = [sleep, "1"], 146 | stdin_stream = null_stream, 147 | stdout_stream = p2, 148 | stderr_stream = null_stream 149 | ) 150 | 151 | cat2 = await PipelineProcess.create( 152 | event_loop, 153 | cwd = cwd.resolve(), 154 | env = {}, 155 | args = [cat], 156 | stdin_stream = p2, 157 | stdout_stream = null_stream, 158 | stderr_stream = null_stream 159 | ) 160 | 161 | channel = await PipelineChannel.create( 162 | event_loop, 163 | left = sleep1, 164 | right = cat2 165 | ) 166 | 167 | return channel 168 | 169 | channel = event_loop.run_until_complete(run_and_wait()) 170 | channel.kill() 171 | returncodes = channel.wait() 172 | 173 | processes = [ 174 | channel.left.proc, 175 | channel.right.proc 176 | ] 177 | 178 | EXPRC = -signal.SIGKILL 179 | self.assertEqual([rc for rc in returncodes], [EXPRC, EXPRC]) 180 | self.assertEqual([p.returncode for p in processes], [EXPRC, EXPRC]) 181 | 182 | @export 183 | @register() 184 | class TestKillPartial(TmpDirMixin): 185 | def runTest(self): 186 | cwd = pathlib.Path(self.tmpdir.name) 187 | test_file = 'tmp.txt' 188 | cat = which('cat') 189 | sleep = which('sleep') 190 | touch = which('touch') 191 | event_loop = asyncio.new_event_loop() 192 | 193 | async def run_and_wait(): 194 | with NullStream() as null_stream, PipeStream() as p2: 195 | sleep1 = await PipelineProcess.create( 196 | event_loop, 197 | cwd = cwd.resolve(), 198 | env = {}, 199 | args = [sleep, "1"], 200 | stdin_stream = null_stream, 201 | stdout_stream = p2, 202 | stderr_stream = null_stream 203 | ) 204 | 205 | touch2 = await PipelineProcess.create( 206 | event_loop, 207 | cwd = cwd.resolve(), 208 | env = {}, 209 | args = [touch, test_file], 210 | stdin_stream = p2, 211 | stdout_stream = null_stream, 212 | stderr_stream = null_stream 213 | ) 214 | 215 | channel = await PipelineChannel.create( 216 | event_loop, 217 | left = sleep1, 218 | right = touch2 219 | ) 220 | 221 | time.sleep(0.1) 222 | 223 | return channel 224 | 225 | channel = event_loop.run_until_complete(run_and_wait()) 226 | channel.kill() 227 | returncodes = channel.wait() 228 | 229 | processes = [ 230 | channel.left.proc, 231 | channel.right.proc 232 | ] 233 | 234 | EXPRC = -signal.SIGKILL 235 | self.assertEqual([rc for rc in returncodes], [EXPRC, 0]) 236 | self.assertEqual([p.returncode for p in processes], [EXPRC, 0]) 237 | -------------------------------------------------------------------------------- /shtk/test/PipelineNode/PipelineProcess.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import importlib 3 | import importlib.resources 4 | import inspect 5 | import os 6 | import pathlib 7 | import pwd 8 | import signal 9 | import sys 10 | import time 11 | import unittest 12 | 13 | from ...Stream import NullStream, PipeStream, ManualStream 14 | from ...PipelineNode import PipelineProcess 15 | from ...util import which, export, Pipe 16 | from ...Job import Job 17 | 18 | from ..test_util import register, TmpDirMixin 19 | 20 | __all__ = [] 21 | 22 | @export 23 | @register() 24 | class TestConstruct(TmpDirMixin): 25 | def runTest(self): 26 | cwd = pathlib.Path(self.tmpdir.name).resolve() 27 | args = [which('touch'), "tmp.txt"] 28 | event_loop = asyncio.new_event_loop() 29 | 30 | with NullStream() as null_stream: 31 | process = PipelineProcess( 32 | event_loop, 33 | cwd = cwd, 34 | env = {}, 35 | args = args, 36 | stdin_stream = null_stream, 37 | stdout_stream = null_stream, 38 | stderr_stream = null_stream 39 | ) 40 | 41 | self.assertEqual(process.cwd, cwd) 42 | self.assertEqual(process.args, args) 43 | 44 | str(process) 45 | repr(process) 46 | 47 | @export 48 | @register() 49 | class TestCreateAndWaitAsync(TmpDirMixin): 50 | def runTest(self): 51 | cwd = pathlib.Path(self.tmpdir.name) 52 | test_file = cwd / 'tmp.txt' 53 | args = [which('touch'), test_file] 54 | event_loop = asyncio.new_event_loop() 55 | 56 | async def run_and_wait(): 57 | with NullStream() as null_stream: 58 | process = await PipelineProcess.create( 59 | event_loop, 60 | cwd = cwd.resolve(), 61 | env = {}, 62 | args = args, 63 | stdin_stream = null_stream, 64 | stdout_stream = null_stream, 65 | stderr_stream = null_stream 66 | ) 67 | 68 | await process.wait_async() 69 | 70 | return process 71 | 72 | process = event_loop.run_until_complete(run_and_wait()) 73 | 74 | self.assertEqual(process.proc.returncode, 0) 75 | self.assertTrue(test_file.exists()) 76 | 77 | @export 78 | @register() 79 | class TestCreatePoll(TmpDirMixin): 80 | def runTest(self): 81 | cwd = pathlib.Path(self.tmpdir.name) 82 | test_file = cwd / 'tmp.txt' 83 | args = [which('touch'), test_file] 84 | event_loop = asyncio.new_event_loop() 85 | 86 | async def run_and_wait(event_loop): 87 | with NullStream() as null_stream: 88 | process = await PipelineProcess.create( 89 | event_loop, 90 | cwd = cwd.resolve(), 91 | env = {}, 92 | args = args, 93 | stdin_stream = null_stream, 94 | stdout_stream = null_stream, 95 | stderr_stream = null_stream 96 | ) 97 | 98 | time.sleep(0.1) 99 | 100 | return process 101 | 102 | process = event_loop.run_until_complete(run_and_wait(event_loop)) 103 | 104 | poll_rc = process.poll() 105 | poll2_rc = process.poll() 106 | 107 | self.assertEqual(process.proc.returncode, 0) 108 | self.assertEqual(poll_rc, [process.proc.returncode]) 109 | self.assertEqual(poll2_rc, [process.proc.returncode]) 110 | self.assertTrue(test_file.exists()) 111 | 112 | @export 113 | @register() 114 | class TestCreateTerminatePoll(TmpDirMixin): 115 | def runTest(self): 116 | cwd = pathlib.Path(self.tmpdir.name) 117 | args = [which('sleep'), "1"] 118 | event_loop = asyncio.new_event_loop() 119 | 120 | async def run_and_wait(event_loop): 121 | with NullStream() as null_stream: 122 | process = await PipelineProcess.create( 123 | event_loop, 124 | cwd = cwd.resolve(), 125 | env = {}, 126 | args = args, 127 | stdin_stream = null_stream, 128 | stdout_stream = null_stream, 129 | stderr_stream = null_stream 130 | ) 131 | 132 | return process 133 | 134 | process = event_loop.run_until_complete(run_and_wait(event_loop)) 135 | 136 | process.terminate() 137 | 138 | poll_rc = process.poll(0.1) 139 | 140 | self.assertEqual(process.proc.returncode, -signal.SIGTERM) 141 | self.assertEqual(poll_rc, [-signal.SIGTERM]) 142 | 143 | self.assertEqual(process.wait(), [-signal.SIGTERM]) 144 | 145 | @export 146 | @register() 147 | class TestCreateKillPoll(TmpDirMixin): 148 | def runTest(self): 149 | cwd = pathlib.Path(self.tmpdir.name) 150 | args = [which('sleep'), "1"] 151 | event_loop = asyncio.new_event_loop() 152 | 153 | async def run_and_wait(event_loop): 154 | with NullStream() as null_stream: 155 | process = await PipelineProcess.create( 156 | event_loop, 157 | cwd = cwd.resolve(), 158 | env = {}, 159 | args = args, 160 | stdin_stream = null_stream, 161 | stdout_stream = null_stream, 162 | stderr_stream = null_stream 163 | ) 164 | 165 | return process 166 | 167 | process = event_loop.run_until_complete(run_and_wait(event_loop)) 168 | 169 | process.kill() 170 | 171 | poll_rc = process.poll(0.1) 172 | 173 | self.assertEqual(process.proc.returncode, -signal.SIGKILL) 174 | self.assertEqual(poll_rc, [-signal.SIGKILL]) 175 | 176 | self.assertEqual(process.wait(), [-signal.SIGKILL]) 177 | 178 | @export 179 | @register() 180 | class TestCreatePollFail(TmpDirMixin): 181 | def runTest(self): 182 | cwd = pathlib.Path(self.tmpdir.name) 183 | args = [which('sleep'), "1"] 184 | event_loop = asyncio.new_event_loop() 185 | 186 | async def run_and_wait(event_loop): 187 | with NullStream() as null_stream: 188 | process = await PipelineProcess.create( 189 | event_loop, 190 | cwd = cwd.resolve(), 191 | env = {}, 192 | args = args, 193 | stdin_stream = null_stream, 194 | stdout_stream = null_stream, 195 | stderr_stream = null_stream 196 | ) 197 | 198 | return process 199 | 200 | process = event_loop.run_until_complete(run_and_wait(event_loop)) 201 | 202 | poll_rc = process.poll() 203 | 204 | self.assertEqual(process.proc.returncode, None) 205 | self.assertEqual(poll_rc, [None]) 206 | 207 | self.assertEqual(process.wait(), [0]) 208 | 209 | @export 210 | @register() 211 | class TestCreateWithDifferentUser(TmpDirMixin): 212 | def setUp(self): 213 | super().setUp() 214 | 215 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 216 | raise unittest.SkipTest("Python version is less than 3.9") 217 | 218 | if os.getuid() != 0: 219 | raise unittest.SkipTest("Not running as root") 220 | 221 | def unless_key_error(fun): 222 | try: 223 | return fun() 224 | except KeyError: 225 | return None 226 | 227 | self.uid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_uid) 228 | 229 | if self.uid is None: 230 | raise unittest.SkipTest("No user exists with name 'nobody'") 231 | 232 | def runTest(self): 233 | cwd = pathlib.Path(self.tmpdir.name) 234 | args = [which('id'), '-u'] 235 | event_loop = asyncio.new_event_loop() 236 | 237 | async def run_and_wait(event_loop): 238 | with NullStream() as null_stream, PipeStream(None) as stdout_stream: 239 | process = await PipelineProcess.create( 240 | event_loop, 241 | cwd = cwd.resolve(), 242 | env = {}, 243 | args = args, 244 | stdin_stream = null_stream, 245 | stdout_stream = stdout_stream, 246 | stderr_stream = null_stream, 247 | user = 'nobody' 248 | ) 249 | stdout_stream.close_writer() 250 | 251 | await process.wait_async() 252 | return stdout_stream.reader().read() 253 | 254 | observed_uid = event_loop.run_until_complete(run_and_wait(event_loop)) 255 | self.assertEqual(observed_uid.strip(), str(self.uid)) 256 | 257 | @export 258 | @register() 259 | class TestCreateWithDifferentGroup(TmpDirMixin): 260 | def setUp(self): 261 | super().setUp() 262 | 263 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 264 | raise unittest.SkipTest("Python version is less than 3.9") 265 | 266 | if os.getuid() != 0: 267 | raise unittest.SkipTest("Not running as root") 268 | 269 | def unless_key_error(fun): 270 | try: 271 | return fun() 272 | except KeyError: 273 | return None 274 | 275 | self.gid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_gid) 276 | 277 | if self.gid is None: 278 | raise unittest.SkipTest("No group exists with name 'nobody'") 279 | 280 | def runTest(self): 281 | cwd = pathlib.Path(self.tmpdir.name) 282 | args = [which('id'), '-g'] 283 | event_loop = asyncio.new_event_loop() 284 | 285 | async def run_and_wait(event_loop): 286 | with NullStream() as null_stream, PipeStream(None) as stdout_stream: 287 | process = await PipelineProcess.create( 288 | event_loop, 289 | cwd = cwd.resolve(), 290 | env = {}, 291 | args = args, 292 | stdin_stream = null_stream, 293 | stdout_stream = stdout_stream, 294 | stderr_stream = null_stream, 295 | group = self.gid 296 | ) 297 | stdout_stream.close_writer() 298 | 299 | await process.wait_async() 300 | return stdout_stream.reader().read() 301 | 302 | observed_gid = event_loop.run_until_complete(run_and_wait(event_loop)) 303 | self.assertEqual(observed_gid.strip(), str(self.gid)) 304 | 305 | 306 | @export 307 | @register() 308 | class TestCreateWait(TmpDirMixin): 309 | def runTest(self): 310 | cwd = pathlib.Path(self.tmpdir.name) 311 | test_file = cwd / 'tmp.txt' 312 | args = [which('touch'), test_file] 313 | event_loop = asyncio.new_event_loop() 314 | 315 | async def run_and_wait(event_loop): 316 | with NullStream() as null_stream: 317 | process = await PipelineProcess.create( 318 | event_loop, 319 | cwd = cwd.resolve(), 320 | env = {}, 321 | args = args, 322 | stdin_stream = null_stream, 323 | stdout_stream = null_stream, 324 | stderr_stream = null_stream 325 | ) 326 | 327 | return process 328 | 329 | process = event_loop.run_until_complete(run_and_wait(event_loop)) 330 | 331 | wait_rc = process.wait() 332 | 333 | self.assertEqual(process.proc.returncode, 0) 334 | self.assertEqual(wait_rc, [process.proc.returncode]) 335 | self.assertTrue(test_file.exists()) 336 | 337 | @export 338 | @register() 339 | class TestEnvironmentVariableExists(TmpDirMixin): 340 | def runTest(self): 341 | cwd = pathlib.Path(self.tmpdir.name) 342 | from .. import test_util 343 | with importlib.resources.path(test_util.__package__, 'echo_env.py') as echo_env: 344 | args = [which('python3'), echo_env] 345 | message = 'Hello World!' 346 | event_loop = asyncio.new_event_loop() 347 | 348 | async def run_and_wait(): 349 | with Pipe() as pipe: 350 | with NullStream() as null_stream, ManualStream(fileobj_w=pipe.writer) as stdout_stream: 351 | process = await PipelineProcess.create( 352 | event_loop, 353 | cwd = cwd.resolve(), 354 | env = { 355 | 'A': 'wrong output', 356 | 'MESSAGE': message, 357 | 'Z': 'wrong output' 358 | }, 359 | args = [which('python3'), echo_env, "MESSAGE"], 360 | stdin_stream = null_stream, 361 | stdout_stream = stdout_stream, 362 | stderr_stream = null_stream 363 | ) 364 | pipe.close_writer() 365 | observed = pipe.read() 366 | 367 | await process.wait_async() 368 | 369 | return process, observed 370 | 371 | process, observed = event_loop.run_until_complete(run_and_wait()) 372 | 373 | self.assertEqual(process.proc.returncode, 0) 374 | self.assertEqual(message, observed) 375 | -------------------------------------------------------------------------------- /shtk/test/PipelineNode/__init__.py: -------------------------------------------------------------------------------- 1 | from . import PipelineProcess 2 | from . import PipelineChannel 3 | -------------------------------------------------------------------------------- /shtk/test/PipelineNodeFactory/PipelineChannelFactory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import NullStream, FileStream 8 | from ...StreamFactory import NullStreamFactory, FileStreamFactory 9 | from ...PipelineNodeFactory import PipelineProcessFactory 10 | from ...Job import Job 11 | from ...util import which, export 12 | 13 | from ..test_util import register, TmpDirMixin 14 | 15 | __all__ = [] 16 | 17 | async def build_and_wait(factory, *args, **kwargs): 18 | obj = await factory.build(*args, **kwargs) 19 | return await obj.wait_async() 20 | 21 | @export 22 | @register() 23 | class TestBuildWithStdinStdout(TmpDirMixin): 24 | def runTest(self): 25 | cwd = pathlib.Path(self.tmpdir.name) 26 | input_file = cwd / "input.txt" 27 | output_file = cwd / "output.txt" 28 | message = "Hello World!" 29 | 30 | with open(input_file.resolve(), 'w') as fout: 31 | fout.write(message) 32 | 33 | null_stream = NullStreamFactory() 34 | 35 | ppf1 = PipelineProcessFactory(which('cat'), cwd='./') 36 | ppf2 = PipelineProcessFactory(which('cat'), cwd='./') 37 | 38 | ppf_channel = (ppf1 | ppf2).stdin(input_file.resolve()).stdout(output_file.resolve()).stderr(null_stream) 39 | job = Job(ppf_channel, cwd=cwd) 40 | 41 | return_codes = job.event_loop.run_until_complete(build_and_wait( 42 | ppf_channel, 43 | job 44 | )) 45 | 46 | self.assertEqual(return_codes, [0, 0]) 47 | 48 | self.assertTrue(output_file.exists()) 49 | with open(output_file.resolve(), 'r') as fin: 50 | observed = fin.read() 51 | 52 | self.assertEqual(message, observed) 53 | -------------------------------------------------------------------------------- /shtk/test/PipelineNodeFactory/PipelineProcessFactory.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import NullStream, FileStream 8 | from ...StreamFactory import NullStreamFactory, FileStreamFactory 9 | from ...PipelineNodeFactory import PipelineProcessFactory 10 | from ...util import which, export 11 | from ...Job import Job 12 | 13 | from ..test_util import register, TmpDirMixin 14 | 15 | __all__ = [] 16 | 17 | async def build_and_wait(factory, *args, **kwargs): 18 | obj = await factory.build(*args, **kwargs) 19 | return await obj.wait_async() 20 | 21 | @export 22 | @register() 23 | class TestBuild(TmpDirMixin): 24 | def runTest(self): 25 | cwd = pathlib.Path(self.tmpdir.name) 26 | tmp_file = cwd / "tmp.txt" 27 | 28 | null_stream = NullStream() 29 | 30 | ppf = PipelineProcessFactory(which('touch'), cwd='./') 31 | ppf = ppf(tmp_file.resolve()) 32 | job = Job(None, cwd=cwd) 33 | 34 | return_codes = job.event_loop.run_until_complete(build_and_wait( 35 | ppf, 36 | job, 37 | stdin_stream=null_stream, 38 | stdout_stream=null_stream, 39 | stderr_stream=null_stream 40 | )) 41 | 42 | self.assertEqual(return_codes, [0]) 43 | self.assertTrue(tmp_file.exists()) 44 | 45 | @export 46 | @register() 47 | class TestBuildWithExplicitStreamsStdinStdout(TmpDirMixin): 48 | def runTest(self): 49 | cwd = pathlib.Path(self.tmpdir.name) 50 | input_file = cwd / "input.txt" 51 | output_file = cwd / "output.txt" 52 | message = "Hello World!" 53 | 54 | with open(input_file.resolve(), 'w') as fout: 55 | fout.write(message) 56 | 57 | stdin_stream = FileStream(input_file.resolve(), 'r') 58 | stdout_stream = FileStream(output_file.resolve(), 'w') 59 | null_stream = NullStream() 60 | 61 | ppf = PipelineProcessFactory(which('cat'), cwd='./') 62 | job = Job(None, cwd=cwd) 63 | 64 | return_codes = job.event_loop.run_until_complete(build_and_wait( 65 | ppf, 66 | job, 67 | stdin_stream=stdin_stream, 68 | stdout_stream=stdout_stream, 69 | stderr_stream=null_stream 70 | )) 71 | 72 | self.assertEqual(return_codes, [0]) 73 | 74 | self.assertTrue(output_file.exists()) 75 | with open(output_file.resolve(), 'r') as fin: 76 | observed = fin.read() 77 | 78 | self.assertEqual(message, observed) 79 | 80 | @export 81 | @register() 82 | class TestBuildWithExplicitStreamsStderr(TmpDirMixin): 83 | def runTest(self): 84 | cwd = pathlib.Path(self.tmpdir.name) 85 | input_file = cwd / "input.txt" 86 | output_file = cwd / "output.txt" 87 | 88 | stderr_stream = FileStream(output_file.resolve(), 'w') 89 | null_stream = NullStream() 90 | 91 | ppf = PipelineProcessFactory(which('cat'), input_file.resolve(), cwd='./') 92 | job = Job(None, cwd=cwd) 93 | 94 | return_codes = job.event_loop.run_until_complete(build_and_wait( 95 | ppf, 96 | job, 97 | stdin_stream=null_stream, 98 | stdout_stream=null_stream, 99 | stderr_stream=stderr_stream 100 | )) 101 | 102 | self.assertEqual(return_codes, [1]) 103 | 104 | with open(output_file.resolve(), 'r') as fin: 105 | observed = fin.read() 106 | 107 | self.assertTrue('No such file or directory' in observed) 108 | 109 | @export 110 | @register() 111 | class TestBuildWithStreamFactoryStdinStdout(TmpDirMixin): 112 | def runTest(self): 113 | cwd = pathlib.Path(self.tmpdir.name) 114 | input_file = cwd / "input.txt" 115 | output_file = cwd / "output.txt" 116 | message = "Hello World!" 117 | 118 | with open(input_file.resolve(), 'w') as fout: 119 | fout.write(message) 120 | 121 | stdin_stream = FileStreamFactory(input_file.resolve(), 'r') 122 | stdout_stream = FileStreamFactory(output_file.resolve(), 'w') 123 | null_stream = None 124 | 125 | ppf = PipelineProcessFactory(which('cat'), cwd='./') 126 | job = Job(ppf, cwd=cwd) 127 | 128 | return_codes = job.event_loop.run_until_complete(build_and_wait( 129 | ppf.stdin(stdin_stream).stdout(stdout_stream).stderr(null_stream), 130 | job 131 | )) 132 | 133 | self.assertEqual(return_codes, [0]) 134 | 135 | self.assertTrue(output_file.exists()) 136 | with open(output_file.resolve(), 'r') as fin: 137 | observed = fin.read() 138 | 139 | self.assertEqual(message, observed) 140 | 141 | @export 142 | @register() 143 | class TestBuildWithStreamFactoryStderr(TmpDirMixin): 144 | def runTest(self): 145 | cwd = pathlib.Path(self.tmpdir.name) 146 | input_file = cwd / "input.txt" 147 | output_file = cwd / "output.txt" 148 | 149 | stderr_stream = FileStreamFactory(output_file.resolve(), 'w') 150 | null_stream = None 151 | 152 | ppf = PipelineProcessFactory(which('cat'), input_file.resolve(), cwd='./') 153 | job = Job(ppf, cwd=cwd) 154 | 155 | return_codes = job.event_loop.run_until_complete(build_and_wait( 156 | ppf.stdin(null_stream).stdout(null_stream).stderr(stderr_stream), 157 | job 158 | )) 159 | 160 | self.assertEqual(return_codes, [1]) 161 | 162 | with open(output_file.resolve(), 'r') as fin: 163 | observed = fin.read() 164 | 165 | self.assertTrue('No such file or directory' in observed) 166 | 167 | @export 168 | @register() 169 | class TestBuildWithFilePathStdinStdout(TmpDirMixin): 170 | def runTest(self): 171 | cwd = pathlib.Path(self.tmpdir.name) 172 | input_file = cwd / "input.txt" 173 | output_file = cwd / "output.txt" 174 | message = "Hello World!" 175 | 176 | with open(input_file.resolve(), 'w') as fout: 177 | fout.write(message) 178 | 179 | stdin = str(input_file.resolve()) 180 | stdout = str(output_file.resolve()) 181 | null = None 182 | 183 | ppf = PipelineProcessFactory(which('cat'), cwd='./') 184 | job = Job(ppf, cwd=cwd) 185 | 186 | return_codes = job.event_loop.run_until_complete(build_and_wait( 187 | ppf.stdin(stdin).stdout(stdout).stderr(null), 188 | job 189 | )) 190 | 191 | self.assertEqual(return_codes, [0]) 192 | 193 | self.assertTrue(output_file.exists()) 194 | with open(output_file.resolve(), 'r') as fin: 195 | observed = fin.read() 196 | 197 | self.assertEqual(message, observed) 198 | 199 | @export 200 | @register() 201 | class TestBuildWithFilePathStderr(TmpDirMixin): 202 | def runTest(self): 203 | cwd = pathlib.Path(self.tmpdir.name) 204 | input_file = cwd / "input.txt" 205 | output_file = cwd / "output.txt" 206 | 207 | stderr = str(output_file.resolve()) 208 | null = None 209 | 210 | ppf = PipelineProcessFactory(which('cat'), input_file.resolve(), cwd='./') 211 | job = Job(ppf, cwd=cwd) 212 | 213 | return_codes = job.event_loop.run_until_complete(build_and_wait( 214 | ppf.stdin(null).stdout(null).stderr(stderr), 215 | job 216 | )) 217 | 218 | self.assertEqual(return_codes, [1]) 219 | 220 | with open(output_file.resolve(), 'r') as fin: 221 | observed = fin.read() 222 | 223 | self.assertTrue('No such file or directory' in observed) 224 | 225 | @export 226 | @register() 227 | class TestBuildWithPathlibStdinStdout(TmpDirMixin): 228 | def runTest(self): 229 | cwd = pathlib.Path(self.tmpdir.name) 230 | input_file = cwd / "input.txt" 231 | output_file = cwd / "output.txt" 232 | message = "Hello World!" 233 | 234 | with open(input_file.resolve(), 'w') as fout: 235 | fout.write(message) 236 | 237 | stdin = input_file.resolve() 238 | stdout = output_file.resolve() 239 | null = None 240 | 241 | ppf = PipelineProcessFactory(which('cat'), cwd='./') 242 | job = Job(ppf, cwd=cwd) 243 | 244 | return_codes = job.event_loop.run_until_complete(build_and_wait( 245 | ppf.stdin(stdin).stdout(stdout).stderr(null), 246 | job 247 | )) 248 | 249 | self.assertEqual(return_codes, [0]) 250 | 251 | self.assertTrue(output_file.exists()) 252 | with open(output_file.resolve(), 'r') as fin: 253 | observed = fin.read() 254 | 255 | self.assertEqual(message, observed) 256 | 257 | @export 258 | @register() 259 | class TestBuildWithPathlibStderr(TmpDirMixin): 260 | def runTest(self): 261 | cwd = pathlib.Path(self.tmpdir.name) 262 | input_file = cwd / "input.txt" 263 | output_file = cwd / "output.txt" 264 | 265 | stderr = output_file.resolve() 266 | null = None 267 | 268 | ppf = PipelineProcessFactory(which('cat'), input_file.resolve(), cwd='./') 269 | job = Job(ppf, cwd=cwd) 270 | 271 | return_codes = job.event_loop.run_until_complete(build_and_wait( 272 | ppf.stdin(null).stdout(null).stderr(stderr), 273 | job 274 | )) 275 | 276 | self.assertEqual(return_codes, [1]) 277 | 278 | with open(output_file.resolve(), 'r') as fin: 279 | observed = fin.read() 280 | 281 | self.assertTrue('No such file or directory' in observed) 282 | -------------------------------------------------------------------------------- /shtk/test/PipelineNodeFactory/__init__.py: -------------------------------------------------------------------------------- 1 | from . import PipelineProcessFactory 2 | from . import PipelineChannelFactory 3 | -------------------------------------------------------------------------------- /shtk/test/Shell/Shell.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import grp 3 | import importlib.resources 4 | import os 5 | import pathlib 6 | import pwd 7 | import random 8 | import sys 9 | import unittest 10 | 11 | from ...Job import NonzeroExitCodeException 12 | from ...Shell import Shell 13 | from ...StreamFactory import NullStreamFactory 14 | from ...util import which, export 15 | 16 | from ..test_util import register, TmpDirMixin 17 | 18 | __all__ = [] 19 | 20 | async def build_and_wait(factory, *args, **kwargs): 21 | obj = await factory.build(*args, **kwargs) 22 | return await obj.wait() 23 | 24 | @export 25 | @register() 26 | class TestRunCommand(TmpDirMixin): 27 | def runTest(self): 28 | cwd = pathlib.Path(self.tmpdir.name) 29 | input_file = "input.txt" 30 | output_file = "output.txt" 31 | 32 | message = "Hello World!" 33 | 34 | with Shell(cwd=cwd) as sh: 35 | cat = sh.command('cat') 36 | 37 | with (cwd / input_file).open('w') as fout: 38 | fout.write(message) 39 | 40 | job = sh( 41 | cat.stdin(str(input_file)).stdout(str(output_file)).stderr(None), 42 | wait=False 43 | )[0] 44 | 45 | return_codes = job.wait() 46 | self.assertEqual(return_codes, (0,)) 47 | 48 | self.assertTrue(os.path.exists(str(cwd / output_file))) 49 | with (cwd / output_file).open('r') as fin: 50 | observed = fin.read() 51 | 52 | self.assertEqual(message, observed) 53 | 54 | @export 55 | @register() 56 | class TestRunCommandAndWait(TmpDirMixin): 57 | def runTest(self): 58 | cwd = pathlib.Path(self.tmpdir.name) 59 | input_file = "input.txt" 60 | output_file = "output.txt" 61 | 62 | message = "Hello World!" 63 | 64 | with Shell(cwd=cwd) as sh: 65 | cat = sh.command('cat') 66 | 67 | with (cwd / input_file).open('w') as fout: 68 | fout.write(message) 69 | 70 | job = sh( 71 | cat.stdin(str(input_file)).stdout(str(output_file)).stderr(None) 72 | )[0] 73 | 74 | self.assertTrue(os.path.exists(str(cwd / output_file))) 75 | with (cwd / output_file).open('r') as fin: 76 | observed = fin.read() 77 | 78 | self.assertEqual(message, observed) 79 | 80 | @export 81 | @register() 82 | class TestCommandDoesntExist(TmpDirMixin): 83 | def runTest(self): 84 | cwd = pathlib.Path(self.tmpdir.name) 85 | 86 | with Shell(cwd=cwd) as sh: 87 | with self.assertRaises(RuntimeError): 88 | sh.command('./DOES NOT EXIST') 89 | 90 | @export 91 | @register() 92 | class TestCommandNotExecutable(TmpDirMixin): 93 | def runTest(self): 94 | cwd = pathlib.Path(self.tmpdir.name) 95 | tmpfile = cwd / "notexecutable.sh" 96 | 97 | tmpfile.touch(mode=0o600) 98 | 99 | with Shell(cwd=cwd) as sh: 100 | with self.assertRaises(RuntimeError): 101 | sh.command(f"./{tmpfile.name}") 102 | 103 | @export 104 | @register() 105 | class TestCommandNotReadable(TmpDirMixin): 106 | def runTest(self): 107 | cwd = pathlib.Path(self.tmpdir.name) 108 | tmpfile = cwd / "notreadable.sh" 109 | 110 | tmpfile.touch(mode=0o300) 111 | 112 | with Shell(cwd=cwd) as sh: 113 | if os.getuid() != 0: 114 | with self.assertRaises(RuntimeError): 115 | sh.command(f"./{tmpfile.name}") 116 | else: 117 | sh.command(f"./{tmpfile.name}") 118 | 119 | @export 120 | @register() 121 | class TestNoEnvironment(TmpDirMixin): 122 | def runTest(self): 123 | with Shell(env={}) as sh: 124 | self.assertEqual(len(sh.environment), 0) 125 | 126 | @export 127 | @register() 128 | class TestWithEnvironment(TmpDirMixin): 129 | def runTest(self): 130 | num_existing = len(os.environ) 131 | message = 'Hello World!' 132 | MESSAGE = 'MESSAGE' 133 | os.environ[MESSAGE] = message 134 | with Shell(env=os.environ, cwd=os.getcwd()) as sh: 135 | self.assertEqual(message, sh.getenv(MESSAGE)) 136 | self.assertEqual(num_existing + 1, len(sh.environment)) 137 | 138 | @export 139 | @register() 140 | class TestChangeDirectory(TmpDirMixin): 141 | def runTest(self): 142 | cwd = pathlib.Path(self.tmpdir.name) 143 | input_file = "input.txt" 144 | output_file = "output.txt" 145 | 146 | message = "Hello World!" 147 | 148 | with Shell(cwd=cwd) as sh: 149 | cat = sh.command('cat') 150 | 151 | with (cwd / input_file).open('w') as fout: 152 | fout.write(message) 153 | 154 | job = sh( 155 | cat.stdin(str(input_file)).stdout(str(output_file)).stderr(None), 156 | wait=False 157 | )[0] 158 | 159 | return_codes = job.wait() 160 | self.assertEqual(return_codes, (0,)) 161 | 162 | self.assertTrue(os.path.exists(str(cwd / output_file))) 163 | with (cwd / output_file).open('r') as fin: 164 | observed = fin.read() 165 | 166 | self.assertEqual(message, observed) 167 | 168 | @export 169 | @register() 170 | class TestEvaluate(TmpDirMixin): 171 | def runTest(self): 172 | cwd = pathlib.Path(self.tmpdir.name) 173 | input_file = "input.txt" 174 | output_file = "output.txt" 175 | 176 | message = "Hello World!" 177 | 178 | with Shell(cwd=cwd) as sh: 179 | cat = sh.command('cat') 180 | 181 | with (cwd / input_file).open('w') as fout: 182 | fout.write(message) 183 | 184 | observed = sh.evaluate( 185 | cat.stdin(str(input_file)).stderr(None) 186 | ) 187 | 188 | self.assertEqual(message, observed) 189 | 190 | @export 191 | @register() 192 | class TestEnvironmentSet(TmpDirMixin): 193 | def runTest(self): 194 | cwd = pathlib.Path(self.tmpdir.name) 195 | message = "Hello World!" 196 | 197 | from .. import test_util 198 | with importlib.resources.path(test_util.__package__, 'echo_env.py') as echo_env: 199 | with Shell(cwd=cwd) as sh: 200 | sh.export( 201 | MESSAGE = message 202 | ) 203 | 204 | python3 = sh.command('python3') 205 | 206 | observed = sh.evaluate( 207 | python3(echo_env, "MESSAGE") 208 | ) 209 | 210 | self.assertEqual(message, observed) 211 | 212 | @export 213 | @register() 214 | class TestEnvironmentSetGet(TmpDirMixin): 215 | def runTest(self): 216 | cwd = pathlib.Path(self.tmpdir.name) 217 | message = "Hello World!" 218 | 219 | from .. import test_util 220 | 221 | with Shell(cwd=cwd) as sh: 222 | sh.export( 223 | MESSAGE = message 224 | ) 225 | 226 | observed = sh.getenv('MESSAGE') 227 | 228 | self.assertEqual(message, observed) 229 | 230 | @export 231 | @register() 232 | class TestChangeDirectoryManager(TmpDirMixin): 233 | def runTest(self): 234 | cwd = pathlib.Path(self.tmpdir.name) 235 | input_file = cwd / "input.txt" 236 | output_file = cwd / "output.txt" 237 | 238 | message = "Hello World!" 239 | 240 | os.chdir("/") 241 | 242 | with Shell(cwd=os.getcwd()) as sh: 243 | cat = sh.command(which('cat')) 244 | 245 | with input_file.open('w') as fout: 246 | fout.write(message) 247 | 248 | old_cwd = sh.cwd 249 | self.assertNotEqual(old_cwd, cwd) 250 | with sh.cd_manager(cwd) as target_cwd: 251 | self.assertEqual(cwd, target_cwd) 252 | job = sh( 253 | cat.stdin(input_file.name).stdout(output_file.name).stderr(None), 254 | wait=False 255 | )[0] 256 | self.assertEqual(sh.cwd, cwd) 257 | self.assertEqual(sh.pwd, old_cwd) 258 | self.assertEqual(sh.cwd, old_cwd) 259 | self.assertEqual(sh.pwd, cwd) 260 | 261 | return_codes = job.wait() 262 | self.assertEqual(return_codes, (0,)) 263 | 264 | self.assertTrue(os.path.exists(str(output_file))) 265 | with output_file.open('r') as fin: 266 | observed = fin.read() 267 | 268 | self.assertEqual(message, observed) 269 | 270 | @export 271 | @register() 272 | class TestRunCommandDefaultShell(TmpDirMixin): 273 | def runTest(self): 274 | cwd = pathlib.Path(self.tmpdir.name) 275 | input_file = cwd / "input.txt" 276 | output_file = cwd / "output.txt" 277 | 278 | message = "Hello World!" 279 | 280 | sh = Shell.get_shell() 281 | 282 | cat = sh.command('cat') 283 | 284 | with input_file.open('w') as fout: 285 | fout.write(message) 286 | 287 | job = sh( 288 | cat.stdin(str(input_file)).stdout(str(output_file)).stderr(None), 289 | wait=False 290 | )[0] 291 | 292 | return_codes = job.wait() 293 | self.assertEqual(return_codes, (0,)) 294 | 295 | self.assertTrue(os.path.exists(str(output_file))) 296 | with output_file.open('r') as fin: 297 | observed = fin.read() 298 | 299 | self.assertEqual(message, observed) 300 | 301 | @export 302 | @register() 303 | class TestRunAsDifferentUser(TmpDirMixin): 304 | def setUp(self): 305 | super().setUp() 306 | 307 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 308 | raise unittest.SkipTest("Python version is less than 3.9") 309 | 310 | if os.getuid() != 0: 311 | raise unittest.SkipTest("Not running as root") 312 | 313 | def unless_key_error(fun): 314 | try: 315 | return fun() 316 | except KeyError: 317 | return None 318 | 319 | self.uid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_uid) 320 | 321 | if self.uid is None: 322 | raise unittest.SkipTest("No user exists with name 'nobody'") 323 | 324 | def runTest(self): 325 | cwd = pathlib.Path(self.tmpdir.name) 326 | outfile = cwd / "result.txt" 327 | 328 | with Shell(cwd=cwd, user=self.uid) as sh: 329 | cmd_id = sh.command(which('id')) 330 | sh.run(cmd_id('-u').stdout(outfile)) 331 | with outfile.open('r') as fin: 332 | observed_uid = fin.read() 333 | self.assertEqual(observed_uid.strip(), str(self.uid)) 334 | self.assertEqual(outfile.owner(), "nobody") 335 | 336 | @export 337 | @register() 338 | class TestRunAsDifferentGroup(TmpDirMixin): 339 | def setUp(self): 340 | super().setUp() 341 | 342 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 343 | raise unittest.SkipTest("Python version is less than 3.9") 344 | 345 | if os.getuid() != 0: 346 | raise unittest.SkipTest("Not running as root") 347 | 348 | def unless_key_error(fun): 349 | try: 350 | return fun() 351 | except KeyError: 352 | return None 353 | 354 | self.gid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_gid) 355 | 356 | if self.gid is None: 357 | raise unittest.SkipTest("No group exists with name 'nobody'") 358 | 359 | def runTest(self): 360 | cwd = pathlib.Path(self.tmpdir.name) 361 | outfile = cwd / "result.txt" 362 | 363 | with Shell(cwd=cwd, group=self.gid) as sh: 364 | cmd_id = sh.command(which('id')) 365 | sh.run(cmd_id('-g').stdout(outfile)) 366 | with outfile.open('r') as fin: 367 | observed_gid = fin.read() 368 | self.assertEqual(observed_gid.strip(), str(self.gid)) 369 | self.assertEqual(grp.getgrnam(outfile.group()).gr_gid, self.gid) 370 | 371 | @export 372 | @register() 373 | class TestEvaluateAsDifferentUser(TmpDirMixin): 374 | def setUp(self): 375 | super().setUp() 376 | 377 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 378 | raise unittest.SkipTest("Python version is less than 3.9") 379 | 380 | if os.getuid() != 0: 381 | raise unittest.SkipTest("Not running as root") 382 | 383 | def unless_key_error(fun): 384 | try: 385 | return fun() 386 | except KeyError: 387 | return None 388 | 389 | self.uid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_uid) 390 | 391 | if self.uid is None: 392 | raise unittest.SkipTest("No user exists with name 'nobody'") 393 | 394 | def runTest(self): 395 | cwd = pathlib.Path(self.tmpdir.name) 396 | 397 | with Shell(cwd=cwd, user=self.uid) as sh: 398 | cmd_id = sh.command(which('id')) 399 | observed_uid = sh.evaluate(cmd_id('-u')) 400 | self.assertEqual(observed_uid.strip(), str(self.uid)) 401 | 402 | @export 403 | @register() 404 | class TestEvaluateAsDifferentGroup(TmpDirMixin): 405 | def setUp(self): 406 | super().setUp() 407 | 408 | if ((sys.version_info.major, sys.version_info.minor) < (3, 9)): 409 | raise unittest.SkipTest("Python version is less than 3.9") 410 | 411 | if os.getuid() != 0: 412 | raise unittest.SkipTest("Not running as root") 413 | 414 | def unless_key_error(fun): 415 | try: 416 | return fun() 417 | except KeyError: 418 | return None 419 | 420 | self.gid = unless_key_error(lambda: pwd.getpwnam('nobody').pw_gid) 421 | 422 | if self.gid is None: 423 | raise unittest.SkipTest("No group exists with name 'nobody'") 424 | 425 | def runTest(self): 426 | cwd = pathlib.Path(self.tmpdir.name) 427 | 428 | with Shell(cwd=cwd, group=self.gid) as sh: 429 | cmd_id = sh.command(which('id')) 430 | observed_gid = sh.evaluate(cmd_id('-g')) 431 | self.assertEqual(observed_gid.strip(), str(self.gid)) 432 | 433 | @export 434 | @register() 435 | class TestShellSourceSuccess(TmpDirMixin): 436 | def setUp(self): 437 | super().setUp() 438 | 439 | def runTest(self): 440 | cwd = pathlib.Path(self.tmpdir.name) 441 | input_file = cwd / 'test.sh' 442 | exp_val = random.randint(1024,1024*1024*1024) 443 | 444 | with input_file.open('w') as fout: 445 | print(f""" 446 | TEST={exp_val!s} 447 | export TEST 448 | """.strip(), file=fout) 449 | 450 | with Shell(cwd=cwd) as sh: 451 | sh.source(input_file) 452 | self.assertEqual(sh.environment.get('TEST'), str(exp_val)) 453 | 454 | @export 455 | @register() 456 | class TestShellRelativeSourceSuccess(TmpDirMixin): 457 | def setUp(self): 458 | super().setUp() 459 | 460 | def runTest(self): 461 | cwd = pathlib.Path(self.tmpdir.name) 462 | input_file = 'test.sh' 463 | exp_val = random.randint(1024,1024*1024*1024) 464 | 465 | with (cwd / input_file).open('w') as fout: 466 | print(f""" 467 | TEST={exp_val!s} 468 | export TEST 469 | """.strip(), file=fout) 470 | 471 | with Shell(cwd=cwd) as sh: 472 | sh.source(input_file) 473 | self.assertEqual(sh.environment.get('TEST'), str(exp_val)) 474 | 475 | @export 476 | @register() 477 | class TestShellSourceFailure(TmpDirMixin): 478 | def setUp(self): 479 | super().setUp() 480 | 481 | def runTest(self): 482 | cwd = pathlib.Path(self.tmpdir.name) 483 | input_file = cwd / 'test.sh' 484 | exp_val = random.randint(1024,1024*1024*1024) 485 | 486 | with open(os.devnull, 'wb') as devnull: 487 | with Shell(cwd=cwd, exceptions=True, stderr=devnull, stdout=devnull) as sh: 488 | with self.assertRaises(NonzeroExitCodeException): 489 | sh.source(str(input_file)) 490 | -------------------------------------------------------------------------------- /shtk/test/Shell/__init__.py: -------------------------------------------------------------------------------- 1 | from . import Shell 2 | -------------------------------------------------------------------------------- /shtk/test/Stream/FileStream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import FileStream 8 | from ...util import which, export 9 | 10 | from ..test_util import register, TmpDirMixin 11 | 12 | __all__ = [] 13 | 14 | @export 15 | @register() 16 | class TestRead(TmpDirMixin): 17 | def runTest(self): 18 | message = "Hello World!" 19 | path = pathlib.Path(self.tmpdir.name).resolve() / "tmp_w.txt" 20 | 21 | with open(path, 'w') as fout: 22 | fout.write(message) 23 | 24 | stream = FileStream(path, mode='r') 25 | reader = stream.reader() 26 | 27 | self.assertEqual(reader.read(), message) 28 | 29 | stream.close() 30 | 31 | self.assertTrue(reader.closed) 32 | 33 | @export 34 | @register() 35 | class TestWrite(TmpDirMixin): 36 | def runTest(self): 37 | message = "Hello World!" 38 | path = pathlib.Path(self.tmpdir.name).resolve() / "tmp_w.txt" 39 | 40 | stream = FileStream(path, mode='w') 41 | writer = stream.writer() 42 | 43 | self.assertEqual(writer.write(message), len(message)) 44 | 45 | stream.close() 46 | 47 | self.assertTrue(writer.closed) 48 | 49 | with open(path, 'r') as fin: 50 | observed = fin.read() 51 | 52 | self.assertEqual(message, observed) 53 | -------------------------------------------------------------------------------- /shtk/test/Stream/ManualStream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import ManualStream 8 | from ...util import which, export 9 | 10 | from ..test_util import register, TmpDirMixin 11 | 12 | __all__ = [] 13 | 14 | @export 15 | @register() 16 | class TestRead(TmpDirMixin): 17 | def runTest(self): 18 | message = "Hello World!" 19 | path = pathlib.Path(self.tmpdir.name).resolve() / "tmp_r.txt" 20 | 21 | with path.open('w') as fout: 22 | fout.write(message) 23 | 24 | fin = path.open('r') 25 | stream = ManualStream(fileobj_r=fin) 26 | reader = stream.reader() 27 | writer = stream.writer() 28 | 29 | self.assertEqual(reader.read(), message) 30 | 31 | stream.close() 32 | 33 | self.assertFalse(reader.closed) 34 | #self.assertTrue(writer.closed) 35 | 36 | @export 37 | @register() 38 | class TestWrite(TmpDirMixin): 39 | def runTest(self): 40 | message = "Hello World!" 41 | path = pathlib.Path(self.tmpdir.name).resolve() / "tmp_w.txt" 42 | 43 | with path.open('w') as fout: 44 | stream = ManualStream(fileobj_w=fout) 45 | reader = stream.reader() 46 | writer = stream.writer() 47 | 48 | self.assertEqual(writer.write(message), len(message)) 49 | 50 | stream.close() 51 | 52 | #self.assertTrue(reader.closed) 53 | self.assertFalse(writer.closed) 54 | 55 | with open(path, 'r') as fin: 56 | observed = fin.read() 57 | 58 | self.assertEqual(message, observed) 59 | -------------------------------------------------------------------------------- /shtk/test/Stream/NullStream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import NullStream 8 | from ...util import which, export 9 | 10 | from ..test_util import register, TmpDirMixin 11 | 12 | __all__ = [] 13 | 14 | @export 15 | @register() 16 | class TestRead(TmpDirMixin): 17 | def runTest(self): 18 | stream = NullStream() 19 | reader = stream.reader() 20 | 21 | message = "Hello World!" 22 | self.assertEqual(reader.read(), "") 23 | 24 | stream.close() 25 | 26 | self.assertTrue(reader.closed) 27 | 28 | @export 29 | @register() 30 | class TestWrite(TmpDirMixin): 31 | def runTest(self): 32 | stream = NullStream() 33 | writer = stream.writer() 34 | 35 | message = "Hello World!" 36 | self.assertEqual(writer.write(message), len(message)) 37 | 38 | stream.close() 39 | 40 | self.assertTrue(writer.closed) 41 | -------------------------------------------------------------------------------- /shtk/test/Stream/PipeStream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import PipeStream 8 | from ...util import which, export 9 | 10 | from ..test_util import register, TmpDirMixin 11 | 12 | __all__ = [] 13 | 14 | @export 15 | @register() 16 | class TestWriteRead(TmpDirMixin): 17 | def runTest(self): 18 | expected = "Hello World!" 19 | ps = PipeStream(None) 20 | 21 | writer = ps.writer() 22 | reader = ps.reader() 23 | 24 | writer.write(expected) 25 | 26 | ps.close_writer() 27 | self.assertTrue(writer.closed) 28 | 29 | observed = ps.reader().read() 30 | self.assertEqual(expected, observed) 31 | 32 | ps.close() 33 | self.assertTrue(reader.closed) 34 | -------------------------------------------------------------------------------- /shtk/test/Stream/Stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import pathlib 4 | import sys 5 | import unittest 6 | 7 | from ...Stream import Stream 8 | from ...util import which, export 9 | 10 | from ..test_util import register, TmpDirMixin 11 | 12 | __all__ = [] 13 | 14 | @export 15 | @register() 16 | class TestRead(TmpDirMixin): 17 | def runTest(self): 18 | message = "Hello World!" 19 | path = pathlib.Path(self.tmpdir.name) / 'tmp_r.txt' 20 | 21 | with open(path, 'w') as fout: 22 | fout.write(message) 23 | 24 | fileobj_r = open(path, 'r') 25 | stream = Stream(fileobj_r=fileobj_r) 26 | reader = stream.reader() 27 | 28 | self.assertEqual(reader.read(), message) 29 | 30 | stream.close() 31 | 32 | self.assertTrue(reader.closed) 33 | 34 | @export 35 | @register() 36 | class TestWrite(TmpDirMixin): 37 | def runTest(self): 38 | message = "Hello World!" 39 | path = pathlib.Path(self.tmpdir.name) / 'tmp_r.txt' 40 | 41 | fileobj_w = open(path, 'w') 42 | stream = Stream(fileobj_w=fileobj_w) 43 | writer = stream.writer() 44 | writer.write(message) 45 | stream.close() 46 | self.assertTrue(writer.closed) 47 | 48 | with open(path, 'r') as fin: 49 | self.assertEqual(fin.read(), message) 50 | -------------------------------------------------------------------------------- /shtk/test/Stream/__init__.py: -------------------------------------------------------------------------------- 1 | from . import Stream 2 | from . import PipeStream 3 | from . import ManualStream 4 | from . import NullStream 5 | from . import FileStream 6 | -------------------------------------------------------------------------------- /shtk/test/StreamFactory/FileStreamFactory.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pathlib 3 | 4 | from ...Job import Job 5 | from ...StreamFactory import FileStreamFactory 6 | from ...util import export 7 | 8 | from ..test_util import register, TmpDirMixin 9 | 10 | __all__ = [] 11 | 12 | @export 13 | @register() 14 | class TestRead(TmpDirMixin): 15 | def runTest(self): 16 | cwd = pathlib.Path(self.tmpdir.name) 17 | tmpfile = (cwd / "tmp.txt").resolve() 18 | message = "Hello World!" 19 | 20 | job = Job(None, cwd=cwd) 21 | 22 | with open(tmpfile, 'w') as fout: 23 | fout.write(message) 24 | 25 | with FileStreamFactory(tmpfile, mode='r').build(job) as fs: 26 | observed = fs.reader().read() 27 | 28 | self.assertEqual(message, observed) 29 | 30 | @export 31 | @register() 32 | class TestWrite(TmpDirMixin): 33 | def runTest(self): 34 | cwd = pathlib.Path(self.tmpdir.name) 35 | tmpfile = (cwd / "tmp.txt").resolve() 36 | message = "Hello World!" 37 | 38 | job = Job(None, cwd=cwd) 39 | 40 | with FileStreamFactory(tmpfile, mode='w').build(job) as fs: 41 | fs.writer().write(message) 42 | 43 | with open(tmpfile, 'r') as fin: 44 | observed = fin.read() 45 | 46 | self.assertEqual(message, observed) 47 | 48 | @export 49 | @register() 50 | class TestAppend(TmpDirMixin): 51 | def runTest(self): 52 | cwd = pathlib.Path(self.tmpdir.name) 53 | tmpfile = (cwd / "tmp.txt").resolve() 54 | message = "Hello World!" 55 | 56 | job = Job(None, cwd=cwd) 57 | 58 | with open(tmpfile, 'w') as fout: 59 | fout.write(message) 60 | 61 | with FileStreamFactory(tmpfile, mode='a').build(job) as fs: 62 | fs.writer().write(message) 63 | 64 | with open(tmpfile, 'r') as fin: 65 | observed = fin.read() 66 | 67 | self.assertEqual(message * 2, observed) 68 | -------------------------------------------------------------------------------- /shtk/test/StreamFactory/ManualStreamFactory.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pathlib 3 | 4 | from ...StreamFactory import ManualStreamFactory 5 | from ...util import export 6 | 7 | from ..test_util import register, TmpDirMixin 8 | 9 | __all__ = [] 10 | 11 | @export 12 | @register() 13 | class TestRead(TmpDirMixin): 14 | def runTest(self): 15 | cwd = pathlib.Path(self.tmpdir.name) 16 | tmpfile = (cwd / "tmp.txt").resolve() 17 | message = "Hello World!" 18 | 19 | with tmpfile.open('w') as fout: 20 | fout.write(message) 21 | 22 | with tmpfile.open('r') as fin: 23 | with ManualStreamFactory(fileobj_r=fin).build(None) as fs: 24 | observed = fs.reader().read() 25 | 26 | self.assertEqual(message, observed) 27 | 28 | @export 29 | @register() 30 | class TestWrite(TmpDirMixin): 31 | def runTest(self): 32 | cwd = pathlib.Path(self.tmpdir.name) 33 | tmpfile = (cwd / "tmp.txt").resolve() 34 | message = "Hello World!" 35 | 36 | with tmpfile.open('w') as fout: 37 | with ManualStreamFactory(fileobj_w=fout).build(None) as fs: 38 | fs.writer().write(message) 39 | 40 | with tmpfile.open('r') as fin: 41 | observed = fin.read() 42 | 43 | self.assertEqual(message, observed) 44 | -------------------------------------------------------------------------------- /shtk/test/StreamFactory/NullStreamFactory.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pathlib 3 | 4 | from ...StreamFactory import NullStreamFactory 5 | from ...util import export 6 | 7 | from ..test_util import register 8 | 9 | __all__ = [] 10 | 11 | @export 12 | @register() 13 | class TestRead(unittest.TestCase): 14 | def runTest(self): 15 | with NullStreamFactory().build(None) as fs: 16 | observed = fs.reader().read() 17 | 18 | self.assertEqual("", observed) 19 | 20 | @export 21 | @register() 22 | class TestWrite(unittest.TestCase): 23 | def runTest(self): 24 | message = "Hello World!" 25 | 26 | with NullStreamFactory().build(None) as fs: 27 | fs.writer().write(message) 28 | -------------------------------------------------------------------------------- /shtk/test/StreamFactory/PipeStreamFactory.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from ...StreamFactory import PipeStreamFactory 4 | from ...util import export 5 | 6 | from ..test_util import register 7 | 8 | __all__ = [] 9 | 10 | @export 11 | @register() 12 | class TestBuild(unittest.TestCase): 13 | def runTest(self): 14 | message = "Hello World!" 15 | 16 | psf = PipeStreamFactory() 17 | 18 | with psf.build(None) as ps: 19 | ps.writer().write(message) 20 | ps.close_writer() 21 | observed = ps.reader().read() 22 | 23 | self.assertEqual(message, observed) 24 | 25 | 26 | -------------------------------------------------------------------------------- /shtk/test/StreamFactory/__init__.py: -------------------------------------------------------------------------------- 1 | from . import FileStreamFactory 2 | from . import PipeStreamFactory 3 | from . import NullStreamFactory 4 | from . import ManualStreamFactory 5 | -------------------------------------------------------------------------------- /shtk/test/__init__.py: -------------------------------------------------------------------------------- 1 | from . import util 2 | from . import PipelineNode 3 | from . import Stream 4 | from . import StreamFactory 5 | from . import PipelineNodeFactory 6 | from . import Job 7 | from . import Shell 8 | -------------------------------------------------------------------------------- /shtk/test/coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | exclude_lines = 3 | pass 4 | -------------------------------------------------------------------------------- /shtk/test/echo_env.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | if __name__ == "__main__": 5 | sys.stdout.write(os.getenv(sys.argv[1])) 6 | -------------------------------------------------------------------------------- /shtk/test/test_util.py: -------------------------------------------------------------------------------- 1 | """Helpful tools for creating automated tests""" 2 | 3 | import collections 4 | import itertools 5 | import os 6 | import pathlib 7 | import sys 8 | import tempfile 9 | import unittest 10 | 11 | from ..util import export 12 | 13 | __all__ = ["test_registry"] 14 | 15 | test_registry = collections.defaultdict(lambda: []) 16 | 17 | @export 18 | def register(*tags): 19 | """Registers a unit test for the custom test harness""" 20 | tags = list(tags) 21 | def register_inner(test_case): 22 | for tag in itertools.chain(("all",), tags): 23 | test_registry[tag].append(test_case) 24 | return test_case 25 | return register_inner 26 | 27 | @export 28 | class TmpDirMixin(unittest.TestCase): 29 | """Unittest mixin that creates and manages a temporary directory""" 30 | def setUp(self): 31 | """Create the temporary directory""" 32 | super().setUp() 33 | self.old_cwd = os.getcwd() 34 | self.tmpdir = tempfile.TemporaryDirectory() 35 | os.chdir(self.tmpdir.__enter__()) 36 | 37 | def tearDown(self): 38 | """Delete the temporary directory""" 39 | os.chdir(self.old_cwd) 40 | self.tmpdir.__exit__(None, None, None) 41 | self.tmpdir.cleanup() 42 | super().tearDown() 43 | -------------------------------------------------------------------------------- /shtk/test/util/__init__.py: -------------------------------------------------------------------------------- 1 | from . import util 2 | -------------------------------------------------------------------------------- /shtk/test/util/util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | import sys 4 | import unittest 5 | 6 | from ...util import which, export 7 | 8 | from ..test_util import register 9 | 10 | __all__ = [] 11 | 12 | @export 13 | @register() 14 | class TestWhich(unittest.TestCase): 15 | def runTest(self): 16 | self.assertIsNotNone(which('sh')) 17 | self.assertIsNone(which('NAME OF AN EXECUTABLE THAT DOES NOT EXIST')) 18 | 19 | -------------------------------------------------------------------------------- /shtk/util.py: -------------------------------------------------------------------------------- 1 | """ 2 | A set of common utilities used by other pieces of SHTK. 3 | """ 4 | 5 | import pathlib 6 | import os 7 | import os.path 8 | import inspect 9 | 10 | __all__ = ["export"] 11 | 12 | def export(obj): 13 | """ 14 | A helpful decorator used to control __all__ in shtk's modules 15 | 16 | Args: 17 | obj: the object whose name should be added to __all__ 18 | 19 | Returns: 20 | obj 21 | 22 | """ 23 | inspect.getmodule(obj).__all__.append(obj.__name__) 24 | return obj 25 | 26 | @export 27 | def which(name, path=None): 28 | """ 29 | Searches dirs in path.split(os.pathsep) for an executable file named name 30 | 31 | Args: 32 | name (str): The name of the program to search for (Default value is 33 | None, meaning os.environ["PATH"]) 34 | path (str): List of directories to search separated by os.pathsep 35 | 36 | Returns: 37 | None or pathlib.Path: 38 | The first found path that meets the requirements or None if a file 39 | meeting the requirements is not found. 40 | 41 | """ 42 | if path is None: 43 | path = os.environ['PATH'] 44 | 45 | for single_path in path.split(os.pathsep): 46 | exe_file = pathlib.Path(os.path.expanduser(single_path), name) 47 | if os.path.isfile(exe_file) and os.access(exe_file, os.X_OK): 48 | return exe_file 49 | 50 | return None 51 | 52 | @export 53 | class Pipe: 54 | """ 55 | Uses os.pipe2() to create a context manager for an inter-process 56 | communication pipe. The underlying pipe gets created when __enter__() is 57 | called and gets destroyed when __exit__() is called. 58 | 59 | Args: 60 | flags (int): Flags passed to os.pipe2() 61 | binary (bool): Indicates whether the file objects should be opened in 62 | rb/wb (True) or r/w modes. 63 | 64 | Attributes: 65 | flags (int): Flags passed to os.pipe2() 66 | binary (bool): Indicates whether the file objects should be opened in 67 | rb/wb (True) or r/w modes. 68 | reader (file): Version of the read side of the pipe opened as a file 69 | object using os.fdopen(). 70 | writer (file): Version of the write side of the pipe opened as a file 71 | object using os.fdopen(). 72 | """ 73 | def __init__(self, flags = 0, binary=False): 74 | self.flags = flags 75 | self.binary = binary 76 | self.reader = None 77 | self.writer = None 78 | 79 | def __enter__(self): 80 | """ 81 | Enters the context manager and creates the pipe 82 | 83 | Returns: 84 | The Pipe object (self) 85 | """ 86 | fd_r, fd_w = os.pipe2(self.flags) 87 | 88 | if self.binary: 89 | self.reader, self.writer = os.fdopen(fd_r, 'rb'), os.fdopen(fd_w, 'wb') 90 | else: 91 | self.reader, self.writer = os.fdopen(fd_r, 'r'), os.fdopen(fd_w, 'w') 92 | 93 | return self 94 | 95 | def __exit__(self, exc_type, exc_tb, exc_value): 96 | """ 97 | Closes the reader and writer (if they're still open). 98 | """ 99 | self.close() 100 | 101 | def write(self, *args, **kwargs): 102 | """ 103 | Write to the write side of hte pipe 104 | """ 105 | return self.writer.write(*args, **kwargs) 106 | 107 | def read(self, *args, **kwargs): 108 | """ 109 | Read from the read side of the pipe 110 | 111 | Returns: 112 | Data read from the pipe 113 | """ 114 | return self.reader.read(*args, **kwargs) 115 | 116 | def close_writer(self): 117 | """ 118 | Closes the writer, if it's not already closed. 119 | """ 120 | if not self.writer.closed: 121 | self.writer.close() 122 | 123 | def close_reader(self): 124 | """ 125 | Closes the reader, if it's not already closed 126 | """ 127 | if not self.reader.closed: 128 | self.reader.close() 129 | 130 | def close(self): 131 | """ 132 | Closes both the reader and writer if they're not already closed. 133 | """ 134 | self.close_writer() 135 | self.close_reader() 136 | -------------------------------------------------------------------------------- /test_exception.py: -------------------------------------------------------------------------------- 1 | import shtk 2 | 3 | sh = shtk.Shell() 4 | 5 | ls = sh.command('ls') 6 | wc = sh.command('wc') 7 | cat = sh.command('cat') 8 | sleep = sh.command('sleep') 9 | 10 | sh(cat('/nonexistent.txt')) 11 | -------------------------------------------------------------------------------- /test_interactive.py: -------------------------------------------------------------------------------- 1 | from shtk import Shell, Job, PipeStreamFactory 2 | import os 3 | import sys 4 | import time 5 | 6 | class Parted: 7 | def __init__(self, img_path): 8 | self.img_path = img_path 9 | 10 | sh = Shell.get_shell() 11 | parted = sh.command('parted') 12 | 13 | psf_input = PipeStreamFactory(flags=os.O_NONBLOCK) 14 | psf_output = PipeStreamFactory(flags=os.O_NONBLOCK) 15 | 16 | self.job = sh(parted(self.img_path).stdin(psf_input).stdout(psf_output), wait=False)[0] 17 | time.sleep(1) 18 | 19 | reader = self.job.pipeline.stdout_stream.reader() 20 | 21 | time.sleep(0.1) 22 | 23 | self._write('unit s\n') 24 | print(self._read()) 25 | 26 | def _write(self, msg): 27 | writer = self.job.pipeline.stdin_stream.writer() 28 | return os.write(writer.fileno(), msg.encode('utf-8')) 29 | 30 | def _read(self): 31 | reader = self.job.pipeline.stdout_stream.reader() 32 | 33 | result = "" 34 | while 1: 35 | result += os.read(reader.fileno(), 1024*1024).decode('utf-8') 36 | if result.endswith("(parted) "): 37 | break 38 | time.sleep(0.0001) 39 | 40 | return result 41 | 42 | def __del__(self): 43 | if hasattr(self, 'job') and self.job: 44 | writer = self.job.pipeline.stdin_stream.close() 45 | reader = self.job.pipeline.stdout_stream.close() 46 | 47 | def describe(self): 48 | self._write("p\n") 49 | time.sleep(0.1) 50 | return self._read() 51 | 52 | def create_image(self, byte_size): 53 | sh = Shell.get_shell() 54 | truncate = sh.command('truncate') 55 | sh( 56 | truncate('-s', f">{int(byte_size)}", self.img_path) 57 | ) 58 | 59 | 60 | 61 | with Shell() as sh: 62 | xxd = sh.command('xxd') 63 | head = sh.command('head') 64 | wc = sh.command('wc') 65 | 66 | parted = Parted(sys.argv[1]) 67 | print(parted.describe()) 68 | --------------------------------------------------------------------------------