├── tests
├── __init__.py
├── conftest.py
├── test_cwd.py
├── test_homedir.py
├── test_touch.py
├── test_cd.py
├── test_umask.py
├── test_reljoin.py
├── test_environ.py
├── test_mkdir.py
├── test_run.py
├── test_sync.py
├── test_getdirsize.py
├── test_cp.py
├── test_lsarchive.py
├── test_rm.py
├── test_chown.py
├── test_unarchive.py
├── test_mv.py
├── test_atomic.py
├── test_chmod.py
├── test_backup.py
├── test_read.py
├── utils.py
├── test_write.py
├── test_ls.py
├── test_command.py
└── test_archive.py
├── requirements.txt
├── docs
├── authors.rst
├── changelog.rst
├── devguide.rst
├── contributing.rst
├── license.rst
├── api.rst
├── versioning.rst
├── installation.rst
├── index.rst
├── Makefile
└── conf.py
├── setup.py
├── AUTHORS.rst
├── MANIFEST.in
├── tox.ini
├── pyproject.toml
├── .readthedocs.yaml
├── src
└── shelmet
│ ├── __init__.py
│ ├── types.py
│ ├── path.py
│ └── fileio.py
├── LICENSE.rst
├── .github
└── workflows
│ └── main.yml
├── .gitignore
├── CHANGELOG.rst
├── CONTRIBUTING.rst
├── setup.cfg
├── tasks.py
├── DEVGUIDE.rst
├── pylintrc
└── README.rst
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | -e .[dev]
2 |
--------------------------------------------------------------------------------
/docs/authors.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../AUTHORS.rst
2 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CHANGELOG.rst
2 |
--------------------------------------------------------------------------------
/docs/devguide.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../DEVGUIDE.rst
2 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/license.rst:
--------------------------------------------------------------------------------
1 | License
2 | =======
3 |
4 | .. include:: ../LICENSE.rst
5 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from setuptools import setup
4 |
5 |
6 | setup()
7 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | .. _api:
2 |
3 | API Reference
4 | =============
5 |
6 |
7 | .. automodule:: shelmet
8 | :members:
9 | :imported-members:
10 |
--------------------------------------------------------------------------------
/docs/versioning.rst:
--------------------------------------------------------------------------------
1 | Versioning
2 | ==========
3 |
4 | This project follows `Semantic Versioning`_.
5 |
6 |
7 | .. _Semantic Versioning: http://semver.org/
8 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | shelmet requires Python >= 3.6.
5 |
6 | To install from `PyPI `_:
7 |
8 | ::
9 |
10 | pip install shelmet
11 |
--------------------------------------------------------------------------------
/AUTHORS.rst:
--------------------------------------------------------------------------------
1 | Authors
2 | =======
3 |
4 |
5 | Lead
6 | ----
7 |
8 | - Derrick Gilland, dgilland@gmail.com, `dgilland@github `_
9 |
10 |
11 | Contributors
12 | ------------
13 |
14 | None
15 |
--------------------------------------------------------------------------------
/tests/test_cwd.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 |
4 | import shelmet as sh
5 |
6 |
7 | def test_cwd__returns_current_working_directory():
8 | cwd = sh.cwd()
9 | assert isinstance(cwd, Path)
10 | assert str(cwd) == os.getcwd()
11 |
--------------------------------------------------------------------------------
/tests/test_homedir.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 |
4 | import shelmet as sh
5 |
6 |
7 | def test_homedir__returns_user_home_directory():
8 | home = sh.homedir()
9 | assert isinstance(home, Path)
10 | assert str(home) == os.path.expanduser("~")
11 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | graft src
2 | graft tests
3 | graft docs
4 |
5 | include AUTHORS.rst
6 | include CONTRIBUTING.rst
7 | include CHANGELOG.rst
8 | include LICENSE.rst
9 | include README.rst
10 | include requirements.txt
11 | include tox.ini
12 | include pylintrc
13 | include tasks.py
14 |
15 | global-exclude *.py[cod] __pycache__ *.so
16 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py37, py38, py39, py310, py311
3 | isolated_build = true
4 |
5 | [gh-actions]
6 | python =
7 | 3.7: py37
8 | 3.8: py38
9 | 3.9: py39
10 | 3.10: py310
11 | 3.11: py311
12 |
13 | [testenv]
14 | passenv = *
15 | extras = dev
16 | commands =
17 | {posargs:inv ci}
18 | setenv =
19 | TOX_ENV_SITE_PACKAGES_DIR = {envsitepackagesdir}
20 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=46.4",
4 | "wheel",
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
8 |
9 | [tool.black]
10 | line-length = 100
11 | include = '\.pyi?$'
12 | exclude = '''
13 | /(
14 | \.git
15 | | \.mypy_cache
16 | | \.tox
17 | | \.venv
18 | | \.cache
19 | | _build
20 | | build
21 | | dist
22 | )/
23 | '''
24 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. shelmet documentation master file
2 |
3 | .. include:: ../README.rst
4 |
5 | Guide
6 | =====
7 |
8 | .. toctree::
9 | :maxdepth: 3
10 |
11 | installation
12 | api
13 | devguide
14 |
15 |
16 | Project Info
17 | ============
18 |
19 | .. toctree::
20 | :maxdepth: 1
21 |
22 | license
23 | versioning
24 | changelog
25 | authors
26 | contributing
27 |
28 |
29 | Indices and Tables
30 | ==================
31 |
32 | - :ref:`genindex`
33 | - :ref:`modindex`
34 | - :ref:`search`
35 |
--------------------------------------------------------------------------------
/tests/test_touch.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 |
10 | parametrize = pytest.mark.parametrize
11 |
12 |
13 | @parametrize(
14 | "paths",
15 | [
16 | param(["a"]),
17 | param(["a", "b", "c/d/e"]),
18 | ],
19 | )
20 | def test_touch(tmp_path: Path, paths: t.List[str]):
21 | targets = [tmp_path / path for path in paths]
22 | sh.touch(*targets)
23 | for path in targets:
24 | assert path.is_file()
25 |
--------------------------------------------------------------------------------
/tests/test_cd.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 |
10 | parametrize = pytest.mark.parametrize
11 |
12 |
13 | @parametrize(
14 | "path",
15 | [
16 | param(""),
17 | param("a"),
18 | param("a/b"),
19 | param("a/b/c"),
20 | ],
21 | )
22 | def test_cd__changes_cwd(tmp_path: Path, path: str):
23 | orig_cwd = os.getcwd()
24 | cd_path = tmp_path / path
25 | cd_path.mkdir(parents=True, exist_ok=True)
26 |
27 | with sh.cd(cd_path):
28 | assert os.getcwd() == str(cd_path)
29 | assert os.getcwd() == orig_cwd
30 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file for Sphinx projects
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 |
4 | # Required
5 | version: 2
6 |
7 | # Set the OS, Python version and other tools you might need
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.11"
12 |
13 | # Build documentation in the "docs/" directory with Sphinx
14 | sphinx:
15 | configuration: docs/conf.py
16 | fail_on_warning: true
17 |
18 | # Declare the Python requirements required to build documentation
19 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
20 | python:
21 | install:
22 | - requirements: requirements.txt
23 |
--------------------------------------------------------------------------------
/src/shelmet/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | The shelmet package.
3 |
4 | A shell power-up for working with the file system and running subprocess commands.
5 | """
6 |
7 | __version__ = "0.6.0"
8 |
9 | from .archiving import ArchiveError, UnsafeArchiveError, archive, backup, lsarchive, unarchive
10 | from .command import Command, cmd, run
11 | from .fileio import (
12 | atomicdir,
13 | atomicfile,
14 | read,
15 | readbytes,
16 | readchunks,
17 | readlines,
18 | readtext,
19 | write,
20 | writebytes,
21 | writelines,
22 | writetext,
23 | )
24 | from .filesystem import (
25 | chmod,
26 | chown,
27 | cp,
28 | dirsync,
29 | environ,
30 | fsync,
31 | getdirsize,
32 | mkdir,
33 | mv,
34 | rm,
35 | rmdir,
36 | rmfile,
37 | touch,
38 | umask,
39 | )
40 | from .path import Ls, cd, cwd, homedir, ls, lsdirs, lsfiles, reljoin, walk, walkdirs, walkfiles
41 |
--------------------------------------------------------------------------------
/tests/test_umask.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import shelmet as sh
4 |
5 |
6 | def test_umask(tmp_path: Path):
7 | mode = 0o644 # -rw-rw-r-- (user,group = read-write, other = read)
8 | umask = 0o77 # g-rw,o-rw (disallow read-write for group and other)
9 | expected_mode_with_umask = 0o600 # -rw------- (user = read-write, group,other = no-access)
10 |
11 | file_before_umask = tmp_path / "before"
12 | file_with_umask = tmp_path / "with"
13 | file_after_umask = tmp_path / "after"
14 |
15 | file_before_umask.touch(mode=mode)
16 | stat_before = file_before_umask.stat()
17 | assert oct(stat_before.st_mode & 0o777) == oct(mode)
18 |
19 | with sh.umask(umask):
20 | file_with_umask.touch(mode=mode)
21 | stat_during = file_with_umask.stat()
22 | assert oct(stat_during.st_mode & 0o777) == oct(expected_mode_with_umask)
23 |
24 | file_after_umask.touch(mode=mode)
25 | stat_after = file_after_umask.stat()
26 | assert oct(stat_after.st_mode & 0o777) == oct(mode)
27 |
--------------------------------------------------------------------------------
/tests/test_reljoin.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 |
10 | parametrize = pytest.mark.parametrize
11 |
12 |
13 | @parametrize(
14 | "paths, expected",
15 | [
16 | param(["a"], "a"),
17 | param(["a/"], "a"),
18 | param(["a", "b", "c/d"], "a/b/c/d"),
19 | param(["a", "/b", "/c/d"], "a/b/c/d"),
20 | param(["/a", "b", "c/d"], "/a/b/c/d"),
21 | param(["/a/", "/b/", "/c/d/"], "/a/b/c/d"),
22 | param([Path("a")], "a"),
23 | param([Path("a/")], "a"),
24 | param([Path("a"), Path("b"), Path("c/d")], "a/b/c/d"),
25 | param([Path("a"), Path("/b"), Path("/c/d")], "a/b/c/d"),
26 | param([Path("/a"), Path("b"), Path("c/d")], "/a/b/c/d"),
27 | param(["a", Path("b"), "c/d"], "a/b/c/d"),
28 | param([Path("a"), "b", Path("c/d")], "a/b/c/d"),
29 | ],
30 | )
31 | def test_reljoin(paths: t.Sequence[t.Union[Path, str]], expected: str):
32 | assert sh.reljoin(*paths) == expected
33 |
--------------------------------------------------------------------------------
/LICENSE.rst:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Derrick Gilland
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 |
--------------------------------------------------------------------------------
/tests/test_environ.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 | from pytest import param
5 |
6 | import shelmet as sh
7 |
8 | from .utils import is_subdict
9 |
10 |
11 | parametrize = pytest.mark.parametrize
12 |
13 |
14 | @parametrize(
15 | "env",
16 | [
17 | param({"a": "1"}),
18 | param({"a": "1", "b": "2"}),
19 | ],
20 | )
21 | def test_environ__extends_envvars_and_restore_original(env: dict):
22 | orig_env = os.environ.copy()
23 |
24 | with sh.environ(env) as envvars:
25 | assert is_subdict(env, envvars)
26 | assert is_subdict(env, dict(os.environ))
27 | assert os.environ != orig_env
28 | assert os.environ == orig_env
29 |
30 |
31 | @parametrize(
32 | "env",
33 | [
34 | param({"a": "1"}),
35 | param({"a": "1", "b": "2"}),
36 | ],
37 | )
38 | def test_environ__replaces_envvars_and_restores_original(env: dict):
39 | orig_env = os.environ.copy()
40 |
41 | with sh.environ(env, replace=True) as envvars:
42 | assert env == envvars
43 | assert env == os.environ
44 | assert os.environ == orig_env
45 |
--------------------------------------------------------------------------------
/tests/test_mkdir.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 |
10 | parametrize = pytest.mark.parametrize
11 |
12 |
13 | @parametrize(
14 | "paths,",
15 | [
16 | param(["a"]),
17 | param(["a", "a/b", "a/b/c", "d/e/f/g/h"]),
18 | param([Path("a")]),
19 | param([Path("a"), Path("a/b"), Path("a/b/c"), Path("d/e/f/g/h")]),
20 | ],
21 | )
22 | def test_mkdir(tmp_path: Path, paths: t.List[t.Union[str, Path]]):
23 | targets = [tmp_path / path for path in paths]
24 | sh.mkdir(*targets)
25 |
26 | for target in targets:
27 | assert target.is_dir()
28 |
29 |
30 | def test_mkdir__sets_mode(tmp_path: Path):
31 | targets = [tmp_path / "test1", tmp_path / "test2", tmp_path / "1" / "2" / "3"]
32 | mode = 0o755
33 |
34 | sh.mkdir(*targets, mode=mode)
35 | for target in targets:
36 | target_mode = oct(target.stat().st_mode & 0o777)
37 | assert target_mode == oct(mode)
38 |
39 |
40 | def test_mkdir__raises_if_exist_not_ok(tmp_path: Path):
41 | with pytest.raises(FileExistsError):
42 | sh.mkdir(tmp_path, exist_ok=False)
43 |
--------------------------------------------------------------------------------
/tests/test_run.py:
--------------------------------------------------------------------------------
1 | import typing as t
2 | from unittest import mock
3 |
4 | import pytest
5 |
6 | import shelmet as sh
7 |
8 |
9 | parametrize = pytest.mark.parametrize
10 |
11 |
12 | @pytest.fixture()
13 | def mock_command() -> t.Generator[mock.MagicMock, None, None]:
14 | with mock.patch.object(sh.command, "Command", spec=sh.command.Command) as _mock_command:
15 | yield _mock_command
16 |
17 |
18 | def test_run__creates_command_and_call_run(mock_command):
19 | args = ["ls", "-la"]
20 | kwargs: t.Dict[str, t.Any] = {
21 | "stdin": None,
22 | "input": "test",
23 | "stdout": None,
24 | "stderr": None,
25 | "capture_output": False,
26 | "combine_output": True,
27 | "cwd": "/",
28 | "timeout": 10,
29 | "check": False,
30 | "encoding": "utf-8",
31 | "errors": "ignore",
32 | "text": False,
33 | "env": {"A": "B"},
34 | "replace_env": True,
35 | }
36 | popen_kwargs = {"umask": 1}
37 |
38 | sh.run(*args, **kwargs, **popen_kwargs)
39 |
40 | assert mock_command.called
41 | assert mock_command.call_args == mock.call(*args, **kwargs, **popen_kwargs)
42 |
43 | mock_run: mock.MagicMock = mock_command.return_value.run
44 | assert mock_run.called
45 | assert mock_run.call_args == mock.call()
46 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Main
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | test:
7 | name: Test
8 | runs-on: ubuntu-latest
9 | strategy:
10 | matrix:
11 | python-version: [ "3.7", "3.8", "3.9", "3.10", "3.11" ]
12 |
13 | steps:
14 | - name: Checkout
15 | uses: actions/checkout@v3
16 |
17 | - name: Set up Python ${{ matrix.python-version }}
18 | uses: actions/setup-python@v4
19 | with:
20 | python-version: ${{ matrix.python-version }}
21 |
22 | - name: Install dependencies
23 | run: |
24 | pip install --upgrade pip setuptools
25 | pip install --upgrade tox-gh-actions coveralls
26 |
27 | - name: Run tests
28 | run: |
29 | tox
30 |
31 | - name: Send coverage report
32 | run: |
33 | coveralls
34 | env:
35 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
36 | COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
37 | COVERALLS_PARALLEL: true
38 | COVERALLS_SERVICE_NAME: github
39 |
40 | coveralls:
41 | name: Finish Coveralls
42 | needs: test
43 | runs-on: ubuntu-latest
44 | container: python:3-slim
45 |
46 | steps:
47 | - name: Finished
48 | run: |
49 | pip install --upgrade coveralls
50 | coveralls --finish
51 | env:
52 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
53 |
--------------------------------------------------------------------------------
/tests/test_sync.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from unittest import mock
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 | from .utils import patch_os_fsync
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | def test_dirsync(tmp_path: Path):
16 | path = tmp_path / "test"
17 | path.mkdir()
18 |
19 | with patch_os_fsync() as mocked_os_fsync:
20 | sh.dirsync(path)
21 |
22 | assert mocked_os_fsync.called
23 |
24 |
25 | def test_fsync__syncs_on_file_object(tmp_path: Path):
26 | file = tmp_path / "test.txt"
27 |
28 | with file.open("w") as fp:
29 | fp.write("test")
30 | fileno = fp.fileno()
31 | with mock.patch.object(fp, "flush") as mock_flush, patch_os_fsync() as mock_os_fsync:
32 | sh.fsync(fp)
33 |
34 | assert mock_flush.called
35 | assert mock_os_fsync.called
36 | assert mock_os_fsync.call_args[0][0] == fileno
37 |
38 |
39 | def test_fsync__syncs_on_fileno(tmp_path: Path):
40 | file = tmp_path / "test.txt"
41 | file.write_text("test")
42 |
43 | with file.open() as fp:
44 | fileno = fp.fileno()
45 | with patch_os_fsync() as mock_os_fsync:
46 | sh.fsync(fileno)
47 |
48 | assert mock_os_fsync.called
49 | assert mock_os_fsync.call_args[0][0] == fileno
50 |
51 |
52 | @parametrize(
53 | "arg",
54 | [
55 | param(1.1),
56 | param(True),
57 | param([]),
58 | param({}),
59 | param(set()),
60 | ],
61 | )
62 | def test_fsync__raises_on_invalid_arg_type(arg):
63 | with pytest.raises(ValueError):
64 | sh.fsync(arg)
65 |
--------------------------------------------------------------------------------
/src/shelmet/types.py:
--------------------------------------------------------------------------------
1 | """The types module contains common type annotation definitions."""
2 |
3 | from pathlib import Path
4 | import typing as t
5 |
6 | from typing_extensions import Literal
7 |
8 |
9 | def _get_literal_args(literal_type) -> tuple: # pragma: no cover
10 | """Backwards compatible method to get arguments passed to ``Literal`` in lieu of
11 | ``typing.get_args``."""
12 | if hasattr(t, "get_args"):
13 | # Python 3.8+
14 | # pylint: disable=no-member
15 | return t.get_args(literal_type) # type: ignore
16 | elif hasattr(literal_type, "__args__"):
17 | # Python 3.7
18 | return literal_type.__args__
19 | else:
20 | # Python 3.6
21 | return literal_type.__values__
22 |
23 |
24 | StrPath = t.Union[str, Path]
25 | LsFilterFn = t.Callable[[Path], bool]
26 | LsFilterable = t.Union[str, t.Pattern, LsFilterFn]
27 | LsFilter = t.Union[LsFilterable, t.Iterable[LsFilterable]]
28 | StdFile = t.Union[int, t.IO[t.Any]]
29 | RunArgs = t.Union[str, bytes, None, t.Iterable[t.Union[str, bytes, None]]]
30 | ReadOnlyTextMode = Literal["r", "rt", "tr"]
31 | ReadOnlyBinMode = Literal["rb", "br"]
32 | WriteOnlyTextMode = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"]
33 | WriteOnlyBinMode = Literal["wb", "bw", "ab", "ba", "xb", "bx"]
34 |
35 | READ_ONLY_TEXT_MODES = _get_literal_args(ReadOnlyTextMode)
36 | READ_ONLY_BIN_MODES = _get_literal_args(ReadOnlyBinMode)
37 | READ_ONLY_MODES = READ_ONLY_TEXT_MODES + READ_ONLY_BIN_MODES
38 | WRITE_ONLY_TEXT_MODES = _get_literal_args(WriteOnlyTextMode)
39 | WRITE_ONLY_BIN_MODES = _get_literal_args(WriteOnlyBinMode)
40 | WRITE_ONLY_MODES = WRITE_ONLY_TEXT_MODES + WRITE_ONLY_BIN_MODES
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 | junit.xml
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 |
62 | # Sphinx documentation
63 | docs/_build/
64 |
65 | # PyBuilder
66 | .pybuilder/
67 | target/
68 |
69 | # IPython
70 | profile_default/
71 | ipython_config.py
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # PEP 582
77 | __pypackages__/
78 |
79 | # Environments
80 | .env
81 | .venv
82 | env/
83 | venv/
84 | ENV/
85 | env.bak/
86 | venv.bak/
87 |
88 | # mypy
89 | .mypy_cache/
90 | .dmypy.json
91 | dmypy.json
92 |
93 | # Pyre type checker
94 | .pyre/
95 |
96 | # pytype static type analyzer
97 | .pytype/
98 |
99 | # Mr Developer
100 | .mr.developer.cfg
101 | .project
102 | .pydevproject
103 | .idea
104 | .DS_Store
105 |
--------------------------------------------------------------------------------
/tests/test_getdirsize.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 | from .utils import Dir, File
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | @parametrize(
16 | "files, pattern, expected_size",
17 | [
18 | param([File("a", size=10)], None, 10),
19 | param(
20 | [
21 | File("a", size=10),
22 | File("b/1", size=5),
23 | File("b/2", size=5),
24 | File("b/3", size=3),
25 | File("b/4", size=2),
26 | File("b/c/5", size=100),
27 | File("d", size=50),
28 | ],
29 | None,
30 | 175,
31 | ),
32 | param(
33 | [
34 | File("a.json", size=123),
35 | File("b.txt", size=17),
36 | File("c.json", size=38),
37 | File("d", size=173),
38 | ],
39 | "*.json",
40 | 161,
41 | ),
42 | param(
43 | [
44 | File("1/a.py", size=123),
45 | File("1/2/b.py", size=17),
46 | File("1/2/3/c.py", size=38),
47 | File("d.py", size=173),
48 | File("foo.txt", size=12),
49 | File("1/bar.txt", size=293),
50 | File("1/2/baz.txt", size=314),
51 | File("1/2/3/qux.txt", size=83),
52 | ],
53 | "**/*.py",
54 | 351,
55 | ),
56 | ],
57 | )
58 | def test_getdirsize(
59 | tmp_path: Path, files: t.List[File], pattern: t.Optional[str], expected_size: int
60 | ):
61 | Dir(tmp_path, *files).mkdir()
62 | kwargs = {}
63 | if pattern:
64 | kwargs["pattern"] = pattern
65 | assert sh.getdirsize(tmp_path, **kwargs) == expected_size
66 |
--------------------------------------------------------------------------------
/CHANGELOG.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 |
5 | v0.6.0 (2021-03-29)
6 | -------------------
7 |
8 | - Change return type for ``ls``, ``lsfiles``, ``lsdirs``, ``walk``, ``walkfiles``, and ``walkdirs`` to an iterable class, ``Ls``. Previously, these functions were generators.
9 | - Add option to backup to an archive file in ``backup``.
10 | - Add functions:
11 |
12 | - ``archive``
13 | - ``chmod``
14 | - ``chown``
15 | - ``lsarchive``
16 | - ``unarchive``
17 |
18 |
19 | v0.5.0 (2021-03-04)
20 | -------------------
21 |
22 | - Import all utility functions into ``shelmet`` namespace.
23 | - Remove ``shelmet.sh`` catch-all submodule in favor of splitting it into smaller submodules, ``shelmet.filesystem`` and ``shelmet.path``. Recommend using ``import shelmet as sh`` as primary usage pattern instead of importing submodules. **breaking change**
24 | - Add functions:
25 |
26 | - ``backup``
27 | - ``read``
28 | - ``readbytes``
29 | - ``readchunks``
30 | - ``readlines``
31 | - ``readtext``
32 | - ``write``
33 | - ``writebytes``
34 | - ``writelines``
35 | - ``writetext``
36 |
37 |
38 | v0.4.0 (2021-01-26)
39 | -------------------
40 |
41 | - Rename ``sh.command`` to ``sh.cmd``. **breaking change**
42 | - Add methods to ``sh.Command`` / ``sh.command``:
43 |
44 | - ``Command.and_``
45 | - ``Command.or_``
46 | - ``Command.after``
47 |
48 |
49 | v0.3.0 (2020-12-24)
50 | -------------------
51 |
52 | - Add to ``sh`` module:
53 |
54 | - ``Command``
55 | - ``command``
56 | - ``cwd``
57 | - ``homedir``
58 | - ``run``
59 |
60 |
61 | v0.2.0 (2020-11-30)
62 | -------------------
63 |
64 | - Add to ``sh`` module:
65 |
66 | - ``atomicdir``
67 |
68 | - Rename ``atomic_write`` to ``atomicfile``. **breaking change**
69 |
70 |
71 | v0.1.0 (2020-11-16)
72 | -------------------
73 |
74 | - First release.
75 | - Add ``sh`` module:
76 |
77 | - ``atomic_write``
78 | - ``cd``
79 | - ``cp``
80 | - ``dirsync``
81 | - ``environ``
82 | - ``fsync``
83 | - ``getdirsize``
84 | - ``ls``
85 | - ``lsdirs``
86 | - ``lsfiles``
87 | - ``mkdir``
88 | - ``mv``
89 | - ``reljoin``
90 | - ``rm``
91 | - ``rmdir``
92 | - ``rmfile``
93 | - ``touch``
94 | - ``umask``
95 | - ``walk``
96 | - ``walkdirs``
97 | - ``walkfiles``
98 |
--------------------------------------------------------------------------------
/tests/test_cp.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import pytest
4 |
5 | import shelmet as sh
6 |
7 | from .utils import Dir, File
8 |
9 |
10 | def test_cp__raises_when_copying_dir_to_existing_file(tmp_path: Path):
11 | src_dir = tmp_path / "src"
12 | src_dir.mkdir()
13 |
14 | dst_file = tmp_path / "dst" / "dst.txt"
15 | dst_file.parent.mkdir()
16 | dst_file.touch()
17 |
18 | with pytest.raises(FileExistsError):
19 | sh.cp(src_dir, dst_file)
20 |
21 |
22 | def test_cp__copies_file_to_file(tmp_path: Path):
23 | src_file = File("test.txt", text="test")
24 | src_dir = Dir(tmp_path / "src", src_file)
25 | src_dir.mkdir()
26 |
27 | dst_file = tmp_path / "dst" / "target.txt"
28 | sh.cp(src_file.path, dst_file)
29 |
30 | assert dst_file.is_file()
31 | assert dst_file.read_text() == src_file.text
32 |
33 |
34 | def test_cp__copies_file_to_existing_dir(tmp_path: Path):
35 | src_file = File("test.txt", text="test")
36 | src_dir = Dir(tmp_path / "src", src_file)
37 | src_dir.mkdir()
38 |
39 | dst_dir = Dir(tmp_path / "dst")
40 | dst_dir.mkdir()
41 | sh.cp(src_file.path, dst_dir.path)
42 |
43 | dst_file = dst_dir.path / src_file.path.name
44 | assert dst_file.is_file()
45 | assert dst_file.read_text() == src_file.text
46 |
47 |
48 | def test_cp__copies_dir_to_new_dir(tmp_path: Path):
49 | src_dir = Dir(
50 | tmp_path / "src",
51 | File("1.txt", text="1"),
52 | File("2.txt", text="2"),
53 | File("a/a1.txt", text="a1"),
54 | File("a/a2.txt", text="a2"),
55 | )
56 | src_dir.mkdir()
57 | dst_path = tmp_path / "dst"
58 |
59 | sh.cp(src_dir.path, dst_path)
60 |
61 | copied_src_files = src_dir.repath(dst_path).files
62 | for file in copied_src_files:
63 | assert file.path.is_file()
64 | assert file.path.read_text() == file.text
65 |
66 |
67 | def test_cp__copies_and_merge_dir_to_existing_dir(tmp_path: Path):
68 | src_dir = Dir(
69 | tmp_path / "src",
70 | File("1.txt", text="1"),
71 | File("2.txt", text="2"),
72 | File("a/a1.txt", text="a1"),
73 | File("a/a2.txt", text="a2"),
74 | )
75 | src_dir.mkdir()
76 |
77 | dst_dir = Dir(
78 | tmp_path / "dst",
79 | File("11.txt", text="11"),
80 | File("22.txt", text="22"),
81 | File("a/b1.txt", text="b1"),
82 | File("a/b2.txt", text="b2"),
83 | )
84 | dst_dir.mkdir()
85 |
86 | sh.cp(src_dir.path, dst_dir.path)
87 |
88 | copied_src_files = src_dir.repath(dst_dir.path).files
89 | all_files = copied_src_files + dst_dir.files
90 |
91 | for file in all_files:
92 | assert file.path.is_file()
93 | assert file.path.read_text() == file.text
94 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given.
5 |
6 | You can contribute in many ways:
7 |
8 |
9 | Types of Contributions
10 | ----------------------
11 |
12 | Report Bugs
13 | +++++++++++
14 |
15 | Report bugs at https://github.com/dgilland/shelmet.
16 |
17 | If you are reporting a bug, please include:
18 |
19 | - Your operating system name and version.
20 | - Any details about your local setup that might be helpful in troubleshooting.
21 | - Detailed steps to reproduce the bug.
22 |
23 |
24 | Fix Bugs
25 | ++++++++
26 |
27 | Look through the GitHub issues for bugs. Anything tagged with "bug" is open to whoever wants to implement it.
28 |
29 |
30 | Implement Features
31 | ++++++++++++++++++
32 |
33 | Look through the GitHub issues for features. Anything tagged with "enhancement" or "help wanted" is open to whoever wants to implement it.
34 |
35 |
36 | Write Documentation
37 | +++++++++++++++++++
38 |
39 | shelmet could always use more documentation, whether as part of the official shelmet docs, in docstrings, or even on the web in blog posts, articles, and such.
40 |
41 |
42 | Submit Feedback
43 | +++++++++++++++
44 |
45 | The best way to send feedback is to file an issue at https://github.com/dgilland/shelmet.
46 |
47 | If you are proposing a feature:
48 |
49 | - Explain in detail how it would work.
50 | - Keep the scope as narrow as possible, to make it easier to implement.
51 | - Remember that this is a volunteer-driven project, and that contributions are welcome :)
52 |
53 |
54 | Get Started!
55 | ------------
56 |
57 | Ready to contribute? Here's how to set up ``shelmet`` for local development.
58 |
59 | 1. Fork the ``shelmet`` repo on GitHub.
60 | 2. Clone your fork locally::
61 |
62 | $ git clone git@github.com:your_username_here/shelmet.git
63 |
64 | 3. Install Python dependencies into a virtualenv::
65 |
66 | $ cd shelmet
67 | $ pip install -r requirements.txt
68 |
69 | 4. Create a branch for local development::
70 |
71 | $ git checkout -b name-of-your-bugfix-or-feature
72 |
73 | Now you can make your changes locally.
74 |
75 | 5. Autoformat code::
76 |
77 | $ inv fmt
78 |
79 | 6. When you're done making changes, check that your changes pass all unit tests by testing with ``tox`` across all supported Python versions::
80 |
81 | $ tox
82 |
83 | 7. Add yourself to ``AUTHORS.rst``.
84 |
85 | 8. Commit your changes and push your branch to GitHub::
86 |
87 | $ git add .
88 | $ git commit -m ""
89 | $ git push origin name-of-your-bugfix-or-feature-branch
90 |
91 | 9. Submit a pull request through GitHub.
92 |
93 |
94 | Pull Request Guidelines
95 | -----------------------
96 |
97 | Before you submit a pull request, check that it meets these guidelines:
98 |
99 | 1. The pull request should include tests.
100 | 2. The pull request should work for all versions Python that this project supports.
101 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = shelmet
3 | version = attr: shelmet.__version__
4 | author = Derrick Gilland
5 | author_email = dgilland@gmail.com
6 | url = https://github.com/dgilland/shelmet
7 | description = A shell power-up for working with the file system and running subprocess commands.
8 | long_description = file: README.rst, CHANGELOG.rst, LICENSE.rst
9 | keywords = shelmet shell subprocess process file system path os utility
10 | license = MIT License
11 | classifiers =
12 | Development Status :: 4 - Beta
13 | Intended Audience :: Developers
14 | License :: OSI Approved :: MIT License
15 | Operating System :: Unix
16 | Programming Language :: Python
17 | Programming Language :: Python :: 3
18 | Programming Language :: Python :: 3.7
19 | Programming Language :: Python :: 3.8
20 | Programming Language :: Python :: 3.9
21 | Programming Language :: Python :: 3.10
22 | Programming Language :: Python :: 3.11
23 | Topic :: Software Development :: Libraries
24 | Topic :: Software Development :: Libraries :: Python Modules
25 |
26 | [options]
27 | package_dir =
28 | = src
29 | packages = find:
30 | python_requires = >=3.6
31 | install_requires =
32 | typing-extensions>=3.7.4
33 |
34 | [options.packages.find]
35 | where = src
36 |
37 | [options.extras_require]
38 | dev =
39 | autodocsumm
40 | black
41 | build
42 | coverage
43 | docformatter
44 | flake8
45 | flake8-black
46 | flake8-bugbear
47 | flake8-isort
48 | furo
49 | importlib_metadata<5; python_version=="3.7"
50 | invoke
51 | isort
52 | mypy
53 | pylint
54 | pytest
55 | pytest-cov
56 | pytest-freezegun
57 | sphinx
58 | sphinx-autodoc-typehints
59 | tox
60 | twine
61 | wheel
62 |
63 |
64 | [bdist_wheel]
65 | python_tag = py3
66 |
67 | [flake8]
68 | exclude = .tox,venv,env
69 | max_line_length = 100
70 | max_complexity = 10
71 | # F401 - `module` imported but unused
72 | # F811 - redefinition of unused `name` from line `N`
73 | # E203 - whitespace before ':'
74 | # W503 - line break before binary operator
75 | ignore = F401,F811,E203,W503
76 |
77 | [mypy]
78 | show_column_numbers = True
79 | show_error_context = False
80 | ignore_missing_imports = True
81 | warn_return_any = False
82 | strict_optional = True
83 | warn_no_return = True
84 | warn_redundant_casts = False
85 | warn_unused_ignores = False
86 |
87 | [tool:isort]
88 | line_length = 100
89 | multi_line_output = 3
90 | lines_after_imports = 2
91 | combine_as_imports = true
92 | include_trailing_comma = true
93 | force_sort_within_sections = true
94 |
95 | [tool:pytest]
96 | junit_family = xunit2
97 | addopts =
98 | --verbose
99 | --doctest-modules
100 | --no-cov-on-fail
101 | --cov-fail-under=100
102 | --cov-report=term-missing
103 | --cov-report=xml:build/coverage/coverage.xml
104 | --cov-report=html:build/coverage
105 | --junitxml=build/testresults/junit.xml
106 |
107 | [coverage:run]
108 | omit =
109 | */tests/*
110 | */test_*
111 |
--------------------------------------------------------------------------------
/tests/test_lsarchive.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 | from .utils import ARCHIVE_EXTENSIONS, Dir, File, create_archive, create_archive_source
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | @pytest.fixture(params=ARCHIVE_EXTENSIONS)
16 | def arc_ext(request) -> str:
17 | """Fixture that yields all archive extensions."""
18 | return request.param
19 |
20 |
21 | @parametrize(
22 | "source, expected",
23 | [
24 | param(
25 | Dir(
26 | "a",
27 | Dir("b"),
28 | Dir("c", File("c1", text="c1"), File("c2", text="c2")),
29 | Dir("d", Dir("e", Dir("f", File("f1", text="f1")), File("e1", text="e1"))),
30 | File("a1", text="a1"),
31 | File("a2", text="a2"),
32 | ),
33 | {
34 | Path("a"),
35 | Path("a/b"),
36 | Path("a/c"),
37 | Path("a/c/c1"),
38 | Path("a/c/c2"),
39 | Path("a/d"),
40 | Path("a/d/e"),
41 | Path("a/d/e/f"),
42 | Path("a/d/e/f/f1"),
43 | Path("a/d/e/e1"),
44 | Path("a/a1"),
45 | Path("a/a2"),
46 | },
47 | )
48 | ],
49 | )
50 | def test_lsarchive__returns_list_of_archive_members(
51 | tmp_path: Path, arc_ext: str, source: Dir, expected: t.Set[Path]
52 | ):
53 | archive_file = tmp_path / f"archive{arc_ext}"
54 | src_dir = create_archive_source(tmp_path, source)
55 | create_archive(archive_file, src_dir.items[0].path)
56 |
57 | listing = sh.lsarchive(archive_file)
58 | assert set(listing) == expected
59 |
60 |
61 | @parametrize(
62 | "source, expected",
63 | [
64 | param(
65 | Dir(
66 | "a",
67 | Dir("b"),
68 | Dir("c", File("c1", text="c1"), File("c2", text="c2")),
69 | Dir("d", Dir("e", Dir("f", File("f1", text="f1")), File("e1", text="e1"))),
70 | File("a1", text="a1"),
71 | File("a2", text="a2"),
72 | ),
73 | {
74 | Path("a"),
75 | Path("a/b"),
76 | Path("a/c"),
77 | Path("a/c/c1"),
78 | Path("a/c/c2"),
79 | Path("a/d"),
80 | Path("a/d/e"),
81 | Path("a/d/e/f"),
82 | Path("a/d/e/f/f1"),
83 | Path("a/d/e/e1"),
84 | Path("a/a1"),
85 | Path("a/a2"),
86 | },
87 | )
88 | ],
89 | )
90 | def test_lsarchive__returns_list_of_archive_members_with_explicit_extension_format(
91 | tmp_path: Path, arc_ext: str, source: Dir, expected: t.Set[Path]
92 | ):
93 | archive_file = tmp_path / "archive"
94 | src_dir = create_archive_source(tmp_path, source)
95 | create_archive(archive_file, src_dir.items[0].path, ext=arc_ext)
96 |
97 | listing = sh.lsarchive(archive_file, ext=arc_ext)
98 | assert set(listing) == expected
99 |
--------------------------------------------------------------------------------
/tests/test_rm.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 | from .utils import Dir, File
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | @parametrize(
16 | "sources",
17 | [
18 | param([File("1.txt")], id="one_file"),
19 | param([File("1.txt"), File("2.txt"), File("3.txt")], id="many_files"),
20 | param([Dir("1")], id="one_dir_with_no_files"),
21 | param([Dir("1", File("1.txt"), File("2.txt"))], id="one_dir_with_files"),
22 | param(
23 | [Dir("1"), Dir("2"), Dir("3/4"), Dir("5/6/7")],
24 | id="many_dirs_with_no_files",
25 | ),
26 | param(
27 | [
28 | Dir("1", File("1.txt")),
29 | Dir("2", File("2.txt"), Dir("2.1")),
30 | Dir("3/4", File("3.txt"), File("4.txt")),
31 | Dir("5/6/7"),
32 | ],
33 | id="many_dirs_with_files",
34 | ),
35 | ],
36 | )
37 | def test_rm(tmp_path: Path, sources: t.Sequence[File]):
38 | base_dir = Dir(tmp_path, *sources)
39 | base_dir.mkdir()
40 |
41 | for src in sources:
42 | assert src.path.exists()
43 |
44 | sh.rm(*(src.path for src in sources))
45 |
46 | for src in sources:
47 | assert not src.path.exists()
48 |
49 |
50 | @parametrize(
51 | "sources",
52 | [
53 | param([Dir("1")], id="one_dir_with_no_files"),
54 | param([Dir("1", File("1.txt"), File("2.txt"))], id="one_dir_with_files"),
55 | param(
56 | [Dir("1"), Dir("2"), Dir("3/4"), Dir("5/6/7")],
57 | id="many_dirs_with_no_files",
58 | ),
59 | param(
60 | [
61 | Dir("1", File("1.txt")),
62 | Dir("2", File("2.txt"), Dir("2.1")),
63 | Dir("3/4", File("3.txt"), File("4.txt")),
64 | Dir("5/6/7"),
65 | ],
66 | id="many_dirs_with_files",
67 | ),
68 | ],
69 | )
70 | def test_rmdir(tmp_path: Path, sources: t.Sequence[File]):
71 | base_dir = Dir(tmp_path, *sources)
72 | base_dir.mkdir()
73 |
74 | for src in sources:
75 | assert src.path.exists()
76 |
77 | sh.rmdir(*(src.path for src in sources))
78 |
79 | for src in sources:
80 | assert not src.path.exists()
81 |
82 |
83 | def test_rmdir__raises_on_file(tmp_path: Path):
84 | path = tmp_path / "test.txt"
85 | path.touch()
86 |
87 | with pytest.raises(NotADirectoryError):
88 | sh.rmdir(path)
89 |
90 |
91 | @parametrize(
92 | "sources",
93 | [
94 | param([File("1.txt")], id="one_file"),
95 | param([File("1.txt"), File("2.txt"), File("3.txt")], id="many_files"),
96 | ],
97 | )
98 | def test_rmfile(tmp_path: Path, sources: t.Sequence[File]):
99 | base_dir = Dir(tmp_path, *sources)
100 | base_dir.mkdir()
101 |
102 | for src in sources:
103 | assert src.path.exists()
104 |
105 | sh.rmfile(*(src.path for src in sources))
106 |
107 | for src in sources:
108 | assert not src.path.exists()
109 |
110 |
111 | def test_rmfile__raises_on_dir(tmp_path: Path):
112 | path = tmp_path / "test"
113 | path.mkdir()
114 |
115 | with pytest.raises(OSError):
116 | sh.rmfile(path)
117 |
118 |
119 | @parametrize(
120 | "rm_fn",
121 | [
122 | param(sh.rm),
123 | param(sh.rmdir),
124 | param(sh.rmfile),
125 | ],
126 | )
127 | def test_rm__ignores_missing_sources(tmp_path: Path, rm_fn: t.Callable):
128 | rm_fn(tmp_path / "1", tmp_path / "2", tmp_path / "3")
129 |
--------------------------------------------------------------------------------
/tests/test_chown.py:
--------------------------------------------------------------------------------
1 | import grp
2 | import os
3 | from pathlib import Path
4 | import pwd
5 | import typing as t
6 | from unittest import mock
7 | from uuid import uuid4
8 |
9 | import pytest
10 |
11 | import shelmet as sh
12 |
13 | from .utils import Dir, File
14 |
15 |
16 | parametrize = pytest.mark.parametrize
17 |
18 |
19 | @pytest.fixture()
20 | def os_user() -> pwd.struct_passwd:
21 | return pwd.getpwuid(os.getuid())
22 |
23 |
24 | @pytest.fixture()
25 | def os_group() -> grp.struct_group:
26 | return grp.getgrgid(os.getgid())
27 |
28 |
29 | @pytest.fixture()
30 | def test_file(tmp_path: Path) -> Path:
31 | test_file = tmp_path / "test_file.txt"
32 | test_file.touch()
33 | return test_file
34 |
35 |
36 | @pytest.fixture()
37 | def test_dir(tmp_path: Path) -> Path:
38 | test_dir = tmp_path / "test_dir"
39 | test_dir.mkdir()
40 | return test_dir
41 |
42 |
43 | @pytest.fixture()
44 | def mock_os_chown() -> t.Generator[mock.MagicMock, None, None]:
45 | with mock.patch("os.chown") as mocked_os_chown:
46 | yield mocked_os_chown
47 |
48 |
49 | def chown_call(
50 | path: t.Union[str, Path, int], user: int = -1, group: int = -1, follow_symlinks: bool = True
51 | ) -> tuple:
52 | if isinstance(path, str):
53 | path = Path(path)
54 | return mock.call(path, user, group, follow_symlinks=follow_symlinks)
55 |
56 |
57 | def test_chown__changes_ownership_given_uid(tmp_path: Path, mock_os_chown: mock.MagicMock):
58 | sh.chown(tmp_path, user=1)
59 | assert mock_os_chown.call_args == chown_call(tmp_path, user=1)
60 |
61 |
62 | def test_chown__changes_ownership_given_gid(tmp_path: Path, mock_os_chown: mock.MagicMock):
63 | sh.chown(tmp_path, group=1)
64 | assert mock_os_chown.call_args == chown_call(tmp_path, group=1)
65 |
66 |
67 | def test_chown__changes_ownership_given_user_name(
68 | tmp_path: Path, mock_os_chown: mock.MagicMock, os_user: pwd.struct_passwd
69 | ):
70 | sh.chown(tmp_path, user=os_user.pw_name)
71 | assert mock_os_chown.call_args == chown_call(tmp_path, user=os_user.pw_uid)
72 |
73 |
74 | def test_chown__changes_ownership_given_group_name(
75 | tmp_path: Path, mock_os_chown: mock.MagicMock, os_group: grp.struct_group
76 | ):
77 | sh.chown(tmp_path, group=os_group.gr_name)
78 | assert mock_os_chown.call_args == chown_call(tmp_path, group=os_group.gr_gid)
79 |
80 |
81 | def test_chown__changes_ownership_given_file_descriptor(mock_os_chown: mock.MagicMock):
82 | sh.chown(1, user=2, group=3)
83 | assert mock_os_chown.call_args == chown_call(1, user=2, group=3)
84 |
85 |
86 | def test_chown__changes_ownership_without_following_symlinks(
87 | tmp_path: Path, mock_os_chown: mock.MagicMock
88 | ):
89 | sh.chown(tmp_path, user=1, group=2, follow_symlinks=False)
90 | assert mock_os_chown.call_args == chown_call(tmp_path, user=1, group=2, follow_symlinks=False)
91 |
92 |
93 | def test_chown__changes_ownership_recursively(tmp_path: Path, mock_os_chown: mock.MagicMock):
94 | test_dir = Dir(
95 | tmp_path,
96 | Dir("a", File("1.txt"), File("2.txt"), File("3.txt")),
97 | Dir(
98 | "b",
99 | Dir("c", File("4.txt"), Dir("d", File("5.txt"))),
100 | File("6.txt"),
101 | File("7.txt"),
102 | File("8.txt"),
103 | ),
104 | File("9.txt"),
105 | File("10.txt"),
106 | )
107 | test_dir.mkdir()
108 |
109 | sh.chown(test_dir.path, user=1, group=2, recursive=True)
110 |
111 | for path in (test_dir.path, *sh.walk(test_dir.path)):
112 | assert chown_call(path, user=1, group=2) in mock_os_chown.call_args_list
113 |
114 |
115 | def test_chown__raises_when_missing_user_and_group():
116 | with pytest.raises(ValueError):
117 | sh.chown("path")
118 |
119 |
120 | def test_chown__raises_when_user_name_invalid():
121 | with pytest.raises(LookupError):
122 | sh.chown("path", user=uuid4().hex)
123 |
124 |
125 | def test_chown__raises_when_group_name_invalid():
126 | with pytest.raises(LookupError):
127 | sh.chown("path", group=uuid4().hex)
128 |
--------------------------------------------------------------------------------
/tests/test_unarchive.py:
--------------------------------------------------------------------------------
1 | from contextlib import ExitStack
2 | from pathlib import Path
3 | import typing as t
4 | from unittest import mock
5 |
6 | import pytest
7 | from pytest import param
8 |
9 | import shelmet as sh
10 |
11 | from .utils import (
12 | ARCHIVE_EXTENSIONS,
13 | Dir,
14 | File,
15 | create_archive,
16 | create_archive_source,
17 | create_unsafe_archive,
18 | is_same_dir,
19 | )
20 |
21 |
22 | parametrize = pytest.mark.parametrize
23 |
24 |
25 | def _test_unarchive(tmp_path: Path, archive_file: Path, source: t.Union[File, Dir], ext: str = ""):
26 | src_dir = create_archive_source(tmp_path, source)
27 | create_archive(archive_file, src_dir.path, ext=ext)
28 |
29 | dst_path = tmp_path / "dst"
30 | sh.unarchive(archive_file, dst_path, ext=ext)
31 |
32 | assert dst_path.is_dir()
33 | assert is_same_dir(src_dir.path, dst_path / "src")
34 |
35 |
36 | @pytest.fixture(params=ARCHIVE_EXTENSIONS)
37 | def arc_ext(request) -> str:
38 | """Fixture that yields all archive extensions."""
39 | return request.param
40 |
41 |
42 | @pytest.fixture(params=[".tar", ".zip"])
43 | def rep_ext(request) -> str:
44 | """Fixture that yields a representative sample of archive extensions."""
45 | return request.param
46 |
47 |
48 | @parametrize(
49 | "source", [param(Dir("a", Dir("b"), File("1.txt", text="1"), File("2.txt", text="2")))]
50 | )
51 | def test_unarchive__unarchives(tmp_path: Path, arc_ext: str, source: t.Union[File, Dir]):
52 | archive_file = tmp_path / f"archive{arc_ext}"
53 | _test_unarchive(tmp_path, archive_file, source)
54 |
55 |
56 | def test_unarchive__unarchives_with_explicit_extension_format(tmp_path: Path, arc_ext: str):
57 | source = Dir("a", Dir("b"), File("1.txt", text="1"), File("2.txt", text="2"))
58 | archive_file = tmp_path / "archive"
59 | _test_unarchive(tmp_path, archive_file, source, ext=arc_ext)
60 |
61 |
62 | def test_unarchive__raises_when_file_extension_not_supported():
63 | with pytest.raises(NotImplementedError) as exc_info:
64 | sh.unarchive("test.txt")
65 | assert "format not supported" in str(exc_info.value)
66 |
67 |
68 | def test_unarchive__raises_when_extraction_fails(tmp_path: Path, rep_ext: str):
69 | archive_file = tmp_path / f"archive{rep_ext}"
70 | src_dir = create_archive_source(tmp_path, File("1.txt", text="1"))
71 | create_archive(archive_file, src_dir.path)
72 |
73 | with ExitStack() as mock_stack:
74 | mock_stack.enter_context(mock.patch("tarfile.TarFile.extractall", side_effect=Exception))
75 | mock_stack.enter_context(mock.patch("zipfile.ZipFile.extractall", side_effect=Exception))
76 |
77 | with pytest.raises(sh.ArchiveError):
78 | sh.unarchive(archive_file, tmp_path / "dst")
79 |
80 |
81 | def test_unarchive__unarchives_trusted_archive_outside_target(tmp_path: Path):
82 | src_dir = create_archive_source(tmp_path, File("1.txt", text="1"))
83 |
84 | unsafe_archive_file = tmp_path / "unsafe.tar"
85 | unsafe_dest = tmp_path / "unsafe"
86 | create_unsafe_archive(unsafe_archive_file, src_dir.path, unsafe_dest)
87 |
88 | dst_path = tmp_path / "dst"
89 | sh.unarchive(unsafe_archive_file, dst_path, trusted=True)
90 |
91 | assert not dst_path.exists()
92 | assert unsafe_dest.exists()
93 | assert is_same_dir(src_dir.path, unsafe_dest / "src")
94 |
95 |
96 | def test_unarchive__raises_when_untrusted_archive_would_extract_outside_target(
97 | tmp_path: Path, rep_ext: str
98 | ):
99 | src_dir = create_archive_source(tmp_path, File("1.txt", text="1"))
100 |
101 | unsafe_archive_file = tmp_path / f"unsafe{rep_ext}"
102 | unsafe_dest = tmp_path / "unsafe"
103 | create_unsafe_archive(unsafe_archive_file, src_dir.path, unsafe_dest)
104 |
105 | dst_path = tmp_path / "dst"
106 | with pytest.raises(sh.UnsafeArchiveError) as exc_info:
107 | sh.unarchive(unsafe_archive_file, dst_path)
108 |
109 | assert "destination is outside the target directory" in str(exc_info.value)
110 | assert not dst_path.exists()
111 | assert not unsafe_dest.exists()
112 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS ?=
6 | SPHINXBUILD ?= sphinx-build
7 | PAPER ?=
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Internal variables.
12 | PAPEROPT_a4 = -D latex_elements.papersize=a4
13 | PAPEROPT_letter = -D latex_elements.papersize=letter
14 | # $(O) is meant as a shortcut for $(SPHINXOPTS)
15 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(O) $(SOURCEDIR)
16 | # the i18n builder cannot share the environment and doctrees with the others
17 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(O) $(SOURCEDIR)
18 |
19 | .PHONY: help
20 | help:
21 | @echo "Please use \`make ' where is one of"
22 | @echo " html to make standalone HTML files"
23 | @echo " dirhtml to make HTML files named index.html in directories"
24 | @echo " singlehtml to make a single large HTML file"
25 | @echo " pickle to make pickle files"
26 | @echo " json to make JSON files"
27 | @echo " htmlhelp to make HTML files and an HTML help project"
28 | @echo " qthelp to make HTML files and a qthelp project"
29 | @echo " applehelp to make an Apple Help Book"
30 | @echo " devhelp to make HTML files and a Devhelp project"
31 | @echo " epub to make an epub"
32 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
33 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
34 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
35 | @echo " lualatexpdf to make LaTeX files and run them through lualatex"
36 | @echo " xelatexpdf to make LaTeX files and run them through xelatex"
37 | @echo " text to make text files"
38 | @echo " man to make manual pages"
39 | @echo " texinfo to make Texinfo files"
40 | @echo " info to make Texinfo files and run them through makeinfo"
41 | @echo " gettext to make PO message catalogs"
42 | @echo " changes to make an overview of all changed/added/deprecated items"
43 | @echo " xml to make Docutils-native XML files"
44 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
45 | @echo " linkcheck to check all external links for integrity"
46 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
47 | @echo " coverage to run coverage check of the documentation (if enabled)"
48 | @echo " dummy to check syntax errors of document sources"
49 |
50 | .PHONY: clean
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | .PHONY: latexpdf
55 | latexpdf:
56 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
57 | @echo "Running LaTeX files through pdflatex..."
58 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
59 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
60 |
61 | .PHONY: latexpdfja
62 | latexpdfja:
63 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
64 | @echo "Running LaTeX files through platex and dvipdfmx..."
65 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
66 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
67 |
68 | .PHONY: lualatexpdf
69 | lualatexpdf:
70 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
71 | @echo "Running LaTeX files through lualatex..."
72 | $(MAKE) PDFLATEX=lualatex -C $(BUILDDIR)/latex all-pdf
73 | @echo "lualatex finished; the PDF files are in $(BUILDDIR)/latex."
74 |
75 | .PHONY: xelatexpdf
76 | xelatexpdf:
77 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
78 | @echo "Running LaTeX files through xelatex..."
79 | $(MAKE) PDFLATEX=xelatex -C $(BUILDDIR)/latex all-pdf
80 | @echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
81 |
82 | .PHONY: info
83 | info:
84 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
85 | @echo "Running Texinfo files through makeinfo..."
86 | make -C $(BUILDDIR)/texinfo info
87 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
88 |
89 | .PHONY: gettext
90 | gettext:
91 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
92 |
93 | # Catch-all target: route all unknown targets to Sphinx
94 | .PHONY: Makefile
95 | %: Makefile
96 | $(SPHINXBUILD) -b "$@" $(ALLSPHINXOPTS) "$(BUILDDIR)/$@"
97 |
--------------------------------------------------------------------------------
/tasks.py:
--------------------------------------------------------------------------------
1 | """
2 | This module provides the CLI interface for invoke tasks.
3 |
4 | All tasks can be executed from this file's directory using:
5 |
6 | $ inv
7 |
8 | Where is a function defined below with the @task decorator.
9 | """
10 |
11 | from functools import partial
12 | import os
13 |
14 | from invoke import Exit, UnexpectedExit, run as _run, task
15 |
16 |
17 | PACKAGE_NAME = "shelmet"
18 | PACKAGE_SOURCE = f"src/{PACKAGE_NAME}"
19 | TEST_TARGETS = f"{PACKAGE_SOURCE} tests"
20 | LINT_TARGETS = f"{TEST_TARGETS} tasks.py"
21 | EXIT_EXCEPTIONS = (Exit, UnexpectedExit, SystemExit)
22 |
23 |
24 | # Set pyt=True to enable colored output when available.
25 | run = partial(_run, pty=True)
26 |
27 |
28 | @task
29 | def black(ctx, quiet=False):
30 | """Autoformat code using black."""
31 | run(f"black {LINT_TARGETS}", hide=quiet)
32 |
33 |
34 | @task
35 | def isort(ctx, quiet=False):
36 | """Autoformat Python imports."""
37 | run(f"isort {LINT_TARGETS}", hide=quiet)
38 |
39 |
40 | @task
41 | def docformatter(ctx):
42 | """Autoformat docstrings using docformatter."""
43 | run(
44 | f"docformatter -r {LINT_TARGETS} "
45 | f"--in-place --pre-summary-newline --wrap-descriptions 100 --wrap-summaries 100"
46 | )
47 |
48 |
49 | @task
50 | def fmt(ctx):
51 | """Autoformat code and docstrings."""
52 | print("Running docformatter")
53 | docformatter(ctx)
54 |
55 | print("Running isort")
56 | isort(ctx, quiet=True)
57 |
58 | print("Running black")
59 | black(ctx, quiet=True)
60 |
61 |
62 | @task
63 | def flake8(ctx):
64 | """Check code for PEP8 violations using flake8."""
65 | run(f"flake8 --format=pylint {LINT_TARGETS}")
66 |
67 |
68 | @task
69 | def pylint(ctx):
70 | """Check code for static errors using pylint."""
71 | run(f"pylint {LINT_TARGETS}")
72 |
73 |
74 | @task
75 | def mypy(ctx):
76 | """Check code using mypy type checker."""
77 | run(f"mypy {LINT_TARGETS}")
78 |
79 |
80 | @task
81 | def lint(ctx):
82 | """Run linters."""
83 | linters = {"flake8": flake8, "pylint": pylint, "mypy": mypy}
84 | failures = []
85 |
86 | print(f"Preparing to run linters: {', '.join(linters)}\n")
87 |
88 | for name, linter in linters.items():
89 | print(f"Running {name}")
90 | try:
91 | linter(ctx)
92 | except EXIT_EXCEPTIONS:
93 | failures.append(name)
94 | result = "FAILED"
95 | else:
96 | result = "PASSED"
97 | print(f"{result}\n")
98 |
99 | if failures:
100 | failed = ", ".join(failures)
101 | raise Exit(f"ERROR: linters failed: {failed}")
102 |
103 |
104 | @task(help={"args": "Override default pytest arguments"})
105 | def test(ctx, args=f"{TEST_TARGETS} --cov={PACKAGE_NAME}"):
106 | """Run unit tests using pytest."""
107 | tox_env_site_packages_dir = os.getenv("TOX_ENV_SITE_PACKAGES_DIR")
108 | if tox_env_site_packages_dir:
109 | # Re-path package source to match tox env so that we generate proper coverage report.
110 | tox_env_pkg_src = os.path.join(tox_env_site_packages_dir, os.path.basename(PACKAGE_SOURCE))
111 | args = args.replace(PACKAGE_SOURCE, tox_env_pkg_src)
112 |
113 | run(f"pytest {args}")
114 |
115 |
116 | @task
117 | def ci(ctx):
118 | """Run linters and tests."""
119 | print("Building package")
120 | build(ctx)
121 |
122 | print("Building docs")
123 | docs(ctx)
124 |
125 | print("Checking linters")
126 | lint(ctx)
127 |
128 | print("Running unit tests")
129 | test(ctx)
130 |
131 |
132 | @task
133 | def docs(ctx, serve=False, bind="127.0.0.1", port=8000):
134 | """Build docs."""
135 | run("rm -rf docs/_build")
136 | run("sphinx-build -q -W -b html docs docs/_build/html")
137 |
138 | if serve:
139 | print(f"Serving docs on {bind} port {port} (http://{bind}:{port}/) ...")
140 | run(f"python -m http.server -b {bind} --directory docs/_build/html {port}", hide=True)
141 |
142 |
143 | @task
144 | def build(ctx):
145 | """Build Python package."""
146 | run("rm -rf dist build docs/_build")
147 | run("python -m build")
148 |
149 |
150 | @task
151 | def clean(ctx):
152 | """Remove temporary files related to development."""
153 | run("find . -type f -name '*.py[cod]' -delete -o -type d -name __pycache__ -delete")
154 | run("rm -rf .tox .coverage .cache .pytest_cache .mypy_cache **/.egg* **/*.egg* dist build")
155 |
156 |
157 | @task(pre=[build])
158 | def release(ctx):
159 | """Release Python package."""
160 | run("twine upload dist/*")
161 |
--------------------------------------------------------------------------------
/DEVGUIDE.rst:
--------------------------------------------------------------------------------
1 | Developer Guide
2 | ===============
3 |
4 | This guide provides an overview of the tooling this project uses and how to execute developer workflows using the developer CLI.
5 |
6 |
7 | Python Environments
8 | -------------------
9 |
10 | This Python project is tested against different Python versions. For local development, it is a good idea to have those versions installed so that tests can be run against each.
11 |
12 | There are libraries that can help with this. Which tools to use is largely a matter of preference, but below are a few recommendations.
13 |
14 | For managing multiple Python versions:
15 |
16 | - pyenv_
17 | - OS package manager (e.g. apt, yum, homebrew, etc)
18 | - Build from source
19 |
20 | For managing Python virtualenvs:
21 |
22 | - pyenv-virtualenv_
23 | - pew_
24 | - python-venv_
25 |
26 |
27 | Tooling
28 | -------
29 |
30 | The following tools are used by this project:
31 |
32 | ============= ========================== ==================
33 | Tool Description Configuration
34 | ============= ========================== ==================
35 | black_ Code formatter ``pyproject.toml``
36 | isort_ Import statement formatter ``setup.cfg``
37 | docformatter_ Docstring formatter ``setup.cfg``
38 | flake8_ Code linter ``setup.cfg``
39 | pylint_ Code linter ``pylintrc``
40 | mypy_ Type checker ``setup.cfg``
41 | pytest_ Test framework ``setup.cfg``
42 | tox_ Test environment manager ``tox.ini``
43 | invoke_ CLI task execution library ``tasks.py``
44 | ============= ========================== ==================
45 |
46 |
47 | Workflows
48 | ---------
49 |
50 | The following workflows use developer CLI commands via `invoke`_ and are defined in ``tasks.py``.
51 |
52 | Autoformat Code
53 | +++++++++++++++
54 |
55 | To run all autoformatters:
56 |
57 | ::
58 |
59 | inv fmt
60 |
61 | This is the same as running each autoformatter individually:
62 |
63 | ::
64 |
65 | inv black
66 | inv isort
67 | inv docformatter
68 |
69 |
70 | Lint
71 | ++++
72 |
73 | To run all linters:
74 |
75 | ::
76 |
77 | inv lint
78 |
79 | This is the same as running each linter individually:
80 |
81 | ::
82 |
83 | inv flake8
84 | inv pylint
85 | inv mypy
86 |
87 |
88 | Test
89 | ++++
90 |
91 | To run all unit tests:
92 |
93 | ::
94 |
95 | inv unit
96 |
97 |
98 | To run unit tests and builds:
99 |
100 | ::
101 |
102 | inv test
103 |
104 |
105 | Test on All Supported Python Versions
106 | +++++++++++++++++++++++++++++++++++++
107 |
108 | To run tests on all supported Python versions:
109 |
110 | ::
111 |
112 | tox
113 |
114 | This requires that the supported versions are available on the PATH.
115 |
116 |
117 | Build Package
118 | +++++++++++++
119 |
120 | To build the package:
121 |
122 | ::
123 |
124 | inv build
125 |
126 | This will output the source and binary distributions under ``dist/``.
127 |
128 |
129 | Build Docs
130 | ++++++++++
131 |
132 | To build documentation:
133 |
134 | ::
135 |
136 | inv docs
137 |
138 | This will output the documentation under ``docs/_build/``.
139 |
140 |
141 | Serve Docs
142 | ++++++++++
143 |
144 | To serve docs over HTTP:
145 |
146 | ::
147 |
148 | inv docs -s|--server [-b|--bind 127.0.0.1] [-p|--port 8000]
149 |
150 | inv docs -s
151 | inv docs -s -p 8080
152 | inv docs -s -b 0.0.0.0 -p 8080
153 |
154 |
155 | Delete Build Files
156 | ++++++++++++++++++
157 |
158 | To remove all build and temporary files:
159 |
160 | ::
161 |
162 | inv clean
163 |
164 | This will remove Python bytecode files, egg files, build output folders, caches, and tox folders.
165 |
166 |
167 | Release Package
168 | +++++++++++++++
169 |
170 | To release a new version of the package to https://pypi.org:
171 |
172 | ::
173 |
174 | inv release
175 |
176 |
177 | CI/CD
178 | -----
179 |
180 | This project uses `Github Actions `_ for CI/CD:
181 |
182 | - https://github.com/dgilland/shelmet/actions
183 |
184 |
185 | .. _pyenv: https://github.com/pyenv/pyenv
186 | .. _pyenv-virtualenv: https://github.com/pyenv/pyenv-virtualenv
187 | .. _pew: https://github.com/berdario/pew
188 | .. _python-venv: https://docs.python.org/3/library/venv.html
189 | .. _black: https://black.readthedocs.io
190 | .. _isort: https://pycqa.github.io/isort/
191 | .. _docformatter: https://github.com/myint/docformatter
192 | .. _flake8: https://flake8.pycqa.org
193 | .. _pylint: https://www.pylint.org/
194 | .. _mypy: http://mypy-lang.org/
195 | .. _pytest: https://docs.pytest.org
196 | .. _tox: https://tox.readthedocs.io
197 | .. _invoke: http://docs.pyinvoke.org
198 |
--------------------------------------------------------------------------------
/pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 |
3 | # A comma-separated list of package or module names from where C extensions may
4 | # be loaded. Extensions are loading into the active Python interpreter and may
5 | # run arbitrary code.
6 | extension-pkg-whitelist=
7 |
8 | # Specify a score threshold to be exceeded before program exits with error.
9 | fail-under=10
10 |
11 | # Add files or directories to the blacklist. They should be base names, not
12 | # paths.
13 | ignore=CVS
14 |
15 | # Add files or directories matching the regex patterns to the blacklist. The
16 | # regex matches against base names, not paths.
17 | #ignore-patterns=
18 |
19 | # Python code to execute, usually for sys.path manipulation such as
20 | # pygtk.require().
21 | #init-hook=
22 |
23 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
24 | # number of processors available to use.
25 | #jobs=0
26 | # Temporarily disable multiprocessing since it duplicates violations due to bug: https://github.com/PyCQA/pylint/issues/3584
27 | jobs=1
28 |
29 | # Control the amount of potential inferred values when inferring a single
30 | # object. This can help the performance when dealing with large functions or
31 | # complex, nested conditions.
32 | limit-inference-results=100
33 |
34 | # List of plugins (as comma separated values of python module names) to load,
35 | # usually to register additional checkers.
36 | load-plugins=
37 |
38 | # Pickle collected data for later comparisons.
39 | persistent=yes
40 |
41 | # When enabled, pylint would attempt to guess common misconfiguration and emit
42 | # user-friendly hints instead of false-positive error messages.
43 | suggestion-mode=yes
44 |
45 | # Allow loading of arbitrary C extensions. Extensions are imported into the
46 | # active Python interpreter and may run arbitrary code.
47 | unsafe-load-any-extension=no
48 |
49 |
50 | [MESSAGES CONTROL]
51 |
52 | # Only show warnings with the listed confidence levels. Leave empty to show
53 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
54 | confidence=
55 |
56 | # Enable the message, report, category or checker with the given id(s). You can
57 | # either give multiple identifier separated by comma (,) or put this option
58 | # multiple time (only on the command line, not in the configuration file where
59 | # it should appear only once). See also the "--disable" option for examples.
60 | # NOTE: Only check for errors. Style/other conventions are not being checked.
61 | enable=E,F
62 |
63 | # Disable the message, report, category or checker with the given id(s). You
64 | # can either give multiple identifiers separated by comma (,) or put this
65 | # option multiple times (only on the command line, not in the configuration
66 | # file where it should appear only once). You can also use "--disable=all" to
67 | # disable everything first and then reenable specific checks. For example, if
68 | # you want to run only the similarities checker, you can use "--disable=all
69 | # --enable=similarities". If you want to run only the classes checker, but have
70 | # no Warning level messages displayed, use "--disable=all --enable=classes
71 | # --disable=W".
72 | # TODO: drop unsubscriptable-object once https://github.com/PyCQA/pylint/issues/3882 is resolved
73 | disable=C,R,W,unsubscriptable-object
74 |
75 |
76 | [REPORTS]
77 |
78 | # Python expression which should return a score less than or equal to 10. You
79 | # have access to the variables 'error', 'warning', 'refactor', and 'convention'
80 | # which contain the number of messages in each category, as well as 'statement'
81 | # which is the total number of statements analyzed. This score is used by the
82 | # global evaluation report (RP0004).
83 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
84 |
85 | # Template used to display messages. This is a python new-style format string
86 | # used to format the message information. See doc for all details.
87 | #msg-template=
88 |
89 | # Set the output format. Available formats are text, parseable, colorized, json
90 | # and msvs (visual studio). You can also give a reporter class, e.g.
91 | # mypackage.mymodule.MyReporterClass.
92 | output-format=text
93 |
94 | # Tells whether to display a full report or only the messages.
95 | reports=no
96 |
97 | # Activate the evaluation score.
98 | # NOTE: Since we're only checking errors, we don't really need to worry about a score.
99 | score=no
100 |
101 |
102 | ########################################################################################
103 | # NOTE: All other sections of pylintrc have been removed since we are only interested in
104 | # checking pylint errors and not other style conventions. Those could be added from
105 | # https://github.com/PyCQA/pylint/blob/master/pylintrc or by borrowing from
106 | # $ pylint --generate-rcfile
107 | ########################################################################################
108 |
--------------------------------------------------------------------------------
/tests/test_mv.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import os
3 | from pathlib import Path
4 | import typing as t
5 | from unittest import mock
6 |
7 | import pytest
8 | from pytest import param
9 |
10 | import shelmet as sh
11 |
12 | from .utils import Dir, File
13 |
14 |
15 | parametrize = pytest.mark.parametrize
16 |
17 |
18 | @parametrize(
19 | "src_file, dst_file, expected",
20 | [
21 | param(
22 | File("src.txt", text="src"),
23 | File("dst.txt"),
24 | File("dst.txt", text="src"),
25 | id="to_new_file",
26 | ),
27 | param(
28 | File("src.txt", text="src"),
29 | File("dst.txt", text="dst"),
30 | File("dst.txt", text="src"),
31 | id="to_overwite_existing_file",
32 | ),
33 | ],
34 | )
35 | def test_mv__moves_file_to_file(tmp_path: Path, src_file: File, dst_file: File, expected: File):
36 | base_dir = Dir(tmp_path, src_file)
37 | base_dir.mkdir()
38 |
39 | expected_file_path = base_dir.path / expected.path
40 | expected_file_text = expected.text
41 |
42 | dst_target_path = base_dir.path / dst_file.path
43 | if dst_file.text:
44 | dst_target_path.write_text(dst_file.text)
45 |
46 | sh.mv(src_file.path, dst_target_path)
47 |
48 | assert not src_file.path.exists()
49 | assert expected_file_path.exists()
50 | assert expected_file_path.read_text() == expected_file_text
51 |
52 |
53 | @parametrize(
54 | "src_file, dst_dir, expected",
55 | [
56 | param(
57 | File("src.txt", text="src"),
58 | Dir("dst"),
59 | File("dst/src.txt", text="src"),
60 | id="to_new_file_under_destination",
61 | ),
62 | ],
63 | )
64 | def test_mv__moves_file_to_dir(tmp_path: Path, src_file: File, dst_dir: Dir, expected: File):
65 | base_dir = Dir(tmp_path, src_file)
66 | base_dir.mkdir()
67 |
68 | expected_file_path = base_dir.path / expected.path
69 | expected_file_text = expected.text
70 |
71 | dst_target_path = base_dir.path / dst_dir.path
72 | dst_target_path.mkdir()
73 |
74 | sh.mv(src_file.path, dst_target_path)
75 |
76 | assert not src_file.path.exists()
77 | assert expected_file_path.exists()
78 | assert expected_file_path.read_text() == expected_file_text
79 |
80 |
81 | @parametrize(
82 | "src_files, dst, expected",
83 | [
84 | param([File("1.txt", text="1")], "dst", "dst", id="to_new_dir"),
85 | param(
86 | [File("1.txt", text="1")],
87 | Dir("dst", File("2.txt")),
88 | "dst/src",
89 | id="to_new_dir_under_destination",
90 | ),
91 | param(
92 | [File("1.txt", text="1")],
93 | Dir("dst", Dir("src")),
94 | "dst/src",
95 | id="to_new_dir_overwriting_existing_dir_under_destination",
96 | ),
97 | ],
98 | )
99 | def test_mv__moves_dir(
100 | tmp_path: Path, src_files: t.List[File], dst: t.Union[Dir, str], expected: str
101 | ):
102 | src_dir = Dir(tmp_path / "src", *src_files)
103 | src_dir.mkdir()
104 |
105 | if isinstance(dst, Dir):
106 | dst_dir = Dir(tmp_path / dst.path, *dst.files)
107 | dst_dir.mkdir()
108 | else:
109 | dst_dir = Dir(tmp_path / dst)
110 |
111 | sh.mv(src_dir.path, dst_dir.path)
112 |
113 | expected_dst_dir = Dir(tmp_path / expected)
114 | assert not src_dir.path.exists()
115 | assert expected_dst_dir.path.exists()
116 |
117 | for src_file in src_files:
118 | dst_file_path = expected_dst_dir.path / src_file.path.name
119 | assert dst_file_path.read_text() == src_file.text
120 |
121 |
122 | def test_mv__allows_same_file_as_destination(tmp_path: Path):
123 | src_file = File(tmp_path / "src.txt", text="src")
124 | src_file.write()
125 | sh.mv(src_file.path, src_file.path)
126 | assert src_file.path.exists()
127 | assert src_file.path.read_text() == src_file.text
128 |
129 |
130 | def test_mv__works_across_file_systems(tmp_path: Path):
131 | src_file = File(tmp_path / "src.txt", text="src")
132 | src_file.write()
133 |
134 | dst_file = File(tmp_path / "dst.txt")
135 | _os_rename = os.rename
136 |
137 | def mock_os_rename(src, dst):
138 | if str(src) == str(src_file.path) and str(dst) == str(dst_file.path):
139 | raise OSError(errno.EXDEV, "mock error from move across file systems")
140 | return _os_rename(src, dst)
141 |
142 | with mock.patch("os.rename", side_effect=mock_os_rename):
143 | sh.mv(src_file.path, dst_file.path)
144 |
145 | assert dst_file.path.exists()
146 | assert dst_file.path.read_text() == src_file.text
147 | assert not src_file.path.exists()
148 |
149 |
150 | def test_mv__raises_when_source_dir_exists_in_destination_and_is_not_empty(tmp_path: Path):
151 | src_dir = Dir(tmp_path / "src", File("src.txt", text="src"))
152 | src_dir.mkdir()
153 | dst_dir = Dir(tmp_path / "dst", File("src/other.txt", text="other"))
154 | dst_dir.mkdir()
155 |
156 | with pytest.raises(OSError):
157 | sh.mv(src_dir.path, dst_dir.path)
158 |
--------------------------------------------------------------------------------
/tests/test_atomic.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 |
4 | import pytest
5 | from pytest import param
6 |
7 | import shelmet as sh
8 |
9 | from .utils import Dir, File, patch_os_fsync
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | @parametrize(
16 | "opts",
17 | [
18 | param({}),
19 | param({"overwrite": False}),
20 | param({"skip_sync": True}),
21 | param({"overwrite": False, "skip_sync": True}),
22 | ],
23 | )
24 | def test_atomicdir(tmp_path: Path, opts: t.Dict[str, t.Any]):
25 | dir_path = tmp_path / "test"
26 | files = [
27 | File("1.txt", text="1"),
28 | File("2.txt", text="2"),
29 | File("3.txt", text="3"),
30 | File("a/a.txt", text="a"),
31 | File("b/b.txt", text="b"),
32 | File("c/c.txt", text="c"),
33 | ]
34 |
35 | with sh.atomicdir(dir_path, **opts) as stage_path:
36 | assert stage_path.exists()
37 | assert not dir_path.exists()
38 |
39 | stage_dir = Dir(stage_path, *files)
40 | stage_dir.mkdir()
41 |
42 | assert not dir_path.exists()
43 |
44 | assert dir_path.exists()
45 |
46 | created_files = stage_dir.repath(dir_path).files
47 | for file in created_files:
48 | assert file.path.exists()
49 | assert file.path.read_text() == file.text
50 |
51 |
52 | def test_atomicdir__syncs_dir(tmp_path: Path):
53 | dir = tmp_path / "test"
54 |
55 | with patch_os_fsync() as mocked_os_fsync:
56 | with sh.atomicdir(dir):
57 | pass
58 |
59 | assert mocked_os_fsync.called
60 | assert mocked_os_fsync.call_count == 2
61 |
62 |
63 | def test_atomicdir__skips_sync_when_disabled(tmp_path: Path):
64 | dir = tmp_path / "test"
65 |
66 | with patch_os_fsync() as mocked_os_fsync:
67 | with sh.atomicdir(dir, skip_sync=True):
68 | pass
69 |
70 | assert not mocked_os_fsync.called
71 |
72 |
73 | def test_atomicdir__overwrites_when_enabled(tmp_path: Path):
74 | dir = Dir(tmp_path / "test", File("1"), File("2"), File("3"))
75 | dir.mkdir()
76 |
77 | assert list(dir.path.iterdir())
78 |
79 | with sh.atomicdir(dir.path):
80 | pass
81 |
82 | assert not list(dir.path.iterdir())
83 |
84 |
85 | def test_atomicdir__does_not_overwrite_when_disabled(tmp_path: Path):
86 | dir = Dir(tmp_path / "test", File("1"), File("2"), File("3"))
87 | dir.mkdir()
88 |
89 | with pytest.raises(FileExistsError):
90 | with sh.atomicdir(dir.path, overwrite=False):
91 | pass
92 |
93 |
94 | def test_atomicdir__fails_if_path_is_file(tmp_path: Path):
95 | already_exists_file = tmp_path / "test"
96 | already_exists_file.write_text("")
97 |
98 | with pytest.raises(FileExistsError):
99 | with sh.atomicdir(already_exists_file):
100 | pass
101 |
102 |
103 | @parametrize(
104 | "opts",
105 | [
106 | param({}),
107 | param({"overwrite": False}),
108 | param({"skip_sync": True}),
109 | param({"overwrite": False, "skip_sync": True}),
110 | ],
111 | )
112 | def test_atomicfile(tmp_path: Path, opts: t.Dict[str, t.Any]):
113 | file = tmp_path / "test.txt"
114 | text = "test"
115 |
116 | with sh.atomicfile(file, **opts) as fp:
117 | assert not file.exists()
118 | fp.write(text)
119 | assert not file.exists()
120 |
121 | assert file.exists()
122 | assert file.read_text() == text
123 |
124 |
125 | def test_atomicfile__syncs_new_file_and_dir(tmp_path: Path):
126 | file = tmp_path / "test.txt"
127 |
128 | with patch_os_fsync() as mocked_os_fsync:
129 | with sh.atomicfile(file) as fp:
130 | fp.write("test")
131 |
132 | assert mocked_os_fsync.called
133 | assert mocked_os_fsync.call_count == 2
134 |
135 |
136 | def test_atomicfile__skips_sync_when_disabled(tmp_path: Path):
137 | file = tmp_path / "test.txt"
138 |
139 | with patch_os_fsync() as mocked_os_fsync:
140 | with sh.atomicfile(file, skip_sync=True) as fp:
141 | fp.write("test")
142 |
143 | assert not mocked_os_fsync.called
144 |
145 |
146 | def test_atomicfile__does_not_overwrite_when_disabled(tmp_path: Path):
147 | file = tmp_path / "test.txt"
148 | file.write_text("")
149 |
150 | with pytest.raises(FileExistsError):
151 | with sh.atomicfile(file, overwrite=False):
152 | pass
153 |
154 |
155 | def test_atomicfile__fails_if_path_is_dir(tmp_path: Path):
156 | already_exists_dir = tmp_path
157 | with pytest.raises(IsADirectoryError):
158 | with sh.atomicfile(already_exists_dir):
159 | pass
160 |
161 | will_exist_dir = tmp_path / "test"
162 | with pytest.raises(IsADirectoryError):
163 | with sh.atomicfile(will_exist_dir) as fp:
164 | will_exist_dir.mkdir()
165 | fp.write("test")
166 |
167 |
168 | @parametrize(
169 | "mode",
170 | [
171 | param("r"),
172 | param("r+"),
173 | param("rb"),
174 | param("rb+"),
175 | param("a"),
176 | param("a+"),
177 | param("ab"),
178 | param("ab+"),
179 | param("x"),
180 | param("x+"),
181 | param(True),
182 | ],
183 | )
184 | def test_atomicfile__raises_when_mode_invalid(tmp_path: Path, mode: t.Any):
185 | with pytest.raises(ValueError):
186 | with sh.atomicfile(tmp_path / "test.txt", mode):
187 | pass
188 |
--------------------------------------------------------------------------------
/tests/test_chmod.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import stat
3 | import typing as t
4 |
5 | import pytest
6 | from pytest import param
7 |
8 | import shelmet as sh
9 |
10 | from .utils import Dir, File
11 |
12 |
13 | parametrize = pytest.mark.parametrize
14 |
15 |
16 | @pytest.fixture()
17 | def test_file(tmp_path: Path) -> Path:
18 | test_file = tmp_path / "test_file.txt"
19 | test_file.touch()
20 | return test_file
21 |
22 |
23 | @pytest.fixture()
24 | def test_dir(tmp_path: Path) -> Path:
25 | test_dir = tmp_path / "test_dir"
26 | test_dir.mkdir()
27 | return test_dir
28 |
29 |
30 | @parametrize(
31 | "starting_mode, desired_mode, expected_mode",
32 | [
33 | param(None, 0o777, "-rwxrwxrwx"),
34 | param(None, 0o666, "-rw-rw-rw-"),
35 | param(None, "777", "-rwxrwxrwx"),
36 | param(None, "666", "-rw-rw-rw-"),
37 | param(None, "+rwx", "-rwxrwxrwx"),
38 | param(None, "u+rw,go-rwx", "-rw-------"),
39 | param(None, "=rw", "-rw-rw-rw-"),
40 | param(None, "a=rw", "-rw-rw-rw-"),
41 | param(None, "ug=rw,o=r", "-rw-rw-r--"),
42 | param(None, "u=rwx,g=rw,o=r", "-rwxrw-r--"),
43 | param(None, "=rw,o+t", "-rw-rw-rwT"),
44 | param(None, "=rw,a+t", "-rw-rw-rwT"),
45 | param(None, "=rw,+t", "-rw-rw-rwT"),
46 | param(None, "=rw,+x,o+t", "-rwxrwxrwt"),
47 | param(None, "=rw,+x,a+t", "-rwxrwxrwt"),
48 | param(None, "=rw,+x,+t", "-rwxrwxrwt"),
49 | param(None, "+rw,u+s", "-rwSrw-rw-"),
50 | param(None, "+rw,u+x,u+s", "-rwsrw-rw-"),
51 | param(None, "+rw,g+s", "-rw-rwSrw-"),
52 | param(None, "+rw,g+x,g+s", "-rw-rwsrw-"),
53 | param(0o777, "u=rwx,g=rw,o=r", "-rwxrw-r--"),
54 | param(0, "u=rwx,g=rw,o=r", "-rwxrw-r--"),
55 | param(0o600, "g=u", "-rw-rw----"),
56 | param(0o740, "g+u", "-rwxrwx---"),
57 | param(0o700, "o=u", "-rwx---rwx"),
58 | param(0o604, "o+u", "-rw----rw-"),
59 | param(0o060, "u=g", "-rw-rw----"),
60 | param(0o470, "u+g", "-rwxrwx---"),
61 | param(0o070, "o=g", "----rwxrwx"),
62 | param(0o064, "o+g", "----rw-rw-"),
63 | param(0o006, "u=o", "-rw----rw-"),
64 | param(0o407, "u+o", "-rwx---rwx"),
65 | param(0o007, "g=o", "----rwxrwx"),
66 | param(0o046, "g+o", "----rw-rw-"),
67 | param(0o600, "go=u", "-rw-rw-rw-"),
68 | param(0o060, "uo=g", "-rw-rw-rw-"),
69 | param(0o006, "ug=o", "-rw-rw-rw-"),
70 | param(0o600, "a=u", "-rw-rw-rw-"),
71 | param(0o060, "a=g", "-rw-rw-rw-"),
72 | param(0o006, "a=o", "-rw-rw-rw-"),
73 | param(0o777, "o=", "-rwxrwx---"),
74 | ],
75 | )
76 | def test_chmod__sets_file_mode(
77 | test_file: Path,
78 | starting_mode: t.Optional[int],
79 | desired_mode: t.Union[int, str],
80 | expected_mode: str,
81 | ):
82 | if starting_mode is not None:
83 | test_file.chmod(starting_mode)
84 |
85 | sh.chmod(test_file, desired_mode)
86 |
87 | filemode = stat.filemode(test_file.stat().st_mode)
88 | assert filemode == expected_mode
89 |
90 |
91 | def test_chmod__sets_dir_mode(test_dir: Path):
92 | sh.chmod(test_dir, "+rw")
93 |
94 | filemode = stat.filemode(test_dir.stat().st_mode)
95 | assert filemode == "drwxrwxrwx"
96 |
97 |
98 | def test_chmod__accepts_fileno(test_file: Path):
99 | with test_file.open() as fp:
100 | fd = fp.fileno()
101 | sh.chmod(fd, "+rwx")
102 |
103 | filemode = stat.filemode(test_file.stat().st_mode)
104 | assert filemode == "-rwxrwxrwx"
105 |
106 |
107 | @parametrize(
108 | "items, mode, expected_file_mode, expected_dir_mode",
109 | [
110 | param(
111 | [
112 | Dir("a", File("1.txt"), File("2.txt"), File("3.txt")),
113 | Dir(
114 | "b",
115 | Dir("c", File("4.txt"), Dir("d", File("5.txt"))),
116 | File("6.txt"),
117 | File("7.txt"),
118 | File("8.txt"),
119 | ),
120 | File("9.txt"),
121 | File("10.txt"),
122 | ],
123 | "766",
124 | "-rwxrw-rw-",
125 | "drwxrw-rw-",
126 | ),
127 | param(
128 | [
129 | Dir("a", File("1.txt"), File("2.txt"), File("3.txt")),
130 | Dir(
131 | "b",
132 | Dir("c", File("4.txt"), Dir("d", File("5.txt"))),
133 | File("6.txt"),
134 | File("7.txt"),
135 | File("8.txt"),
136 | ),
137 | File("9.txt"),
138 | File("10.txt"),
139 | ],
140 | "go-rwx",
141 | "-rw-------",
142 | "drwx------",
143 | ),
144 | ],
145 | )
146 | def test_chmod__recursively_sets_mode(
147 | tmp_path: Path,
148 | items: t.List[t.Union[Dir, File]],
149 | mode: t.Union[int, str],
150 | expected_file_mode: int,
151 | expected_dir_mode: int,
152 | ):
153 | test_dir = Dir(tmp_path / "test_dir", *items)
154 | test_dir.mkdir()
155 |
156 | sh.chmod(test_dir.path, mode, recursive=True)
157 |
158 | for path in (test_dir.path, *sh.walk(test_dir.path)):
159 | expected_mode = expected_dir_mode if path.is_dir() else expected_file_mode
160 | path_mode = stat.filemode(path.stat().st_mode)
161 |
162 | assert (
163 | path_mode == expected_mode
164 | ), f"Expected mode of {path} to be {expected_mode!r}, not {path_mode!r}"
165 |
166 |
167 | @parametrize(
168 | "mode, exception",
169 | [
170 | param(None, TypeError),
171 | param("", ValueError),
172 | param("u=Z", ValueError),
173 | param("u=rwxg", ValueError),
174 | param("rw", ValueError),
175 | param("u=t", ValueError),
176 | ],
177 | )
178 | def test_chmod__raises_when_mode_invalid(
179 | test_file: Path, mode: t.Any, exception: t.Type[Exception]
180 | ):
181 | with pytest.raises(exception):
182 | sh.chmod(test_file, mode)
183 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | import os
16 | import sys
17 |
18 |
19 | sys.path.insert(0, os.path.abspath(".."))
20 |
21 |
22 | # -- Project information -----------------------------------------------------
23 |
24 | from email import message_from_string
25 | from pkg_resources import get_distribution
26 |
27 | dist = get_distribution("shelmet")
28 |
29 | if hasattr(dist, "_parsed_pkg_info"):
30 | pkg_info = dict(dist._parsed_pkg_info)
31 | else:
32 | pkg_info = dict(message_from_string("\n".join(dist._get_metadata("PKG-INFO"))))
33 |
34 | project = pkg_info["Name"]
35 | author = pkg_info["Author"]
36 | description = pkg_info["Summary"]
37 | copyright = "2020, " + author
38 |
39 | # The short X.Y version
40 | version = pkg_info["Version"]
41 | # The full version, including alpha/beta/rc tags
42 | release = version
43 |
44 |
45 | # -- General configuration ---------------------------------------------------
46 |
47 | # If your documentation needs a minimal Sphinx version, state it here.
48 | #
49 | # needs_sphinx = '1.0'
50 |
51 | # Add any Sphinx extension module names here, as strings. They can be
52 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
53 | # ones.
54 | extensions = [
55 | "sphinx.ext.autodoc",
56 | "sphinx.ext.doctest",
57 | "sphinx.ext.coverage",
58 | "sphinx.ext.viewcode",
59 | "sphinx.ext.napoleon",
60 | "sphinx_autodoc_typehints",
61 | "autodocsumm",
62 | ]
63 |
64 | # Add any paths that contain templates here, relative to this directory.
65 | templates_path = ["_templates"]
66 |
67 | # The suffix(es) of source filenames.
68 | # You can specify multiple suffix as a list of string:
69 | #
70 | # source_suffix = ['.rst', '.md']
71 | source_parsers = {}
72 | source_suffix = [".rst"]
73 |
74 | # The master toctree document.
75 | master_doc = "index"
76 |
77 | # The language for content autogenerated by Sphinx. Refer to documentation
78 | # for a list of supported languages.
79 | #
80 | # This is also used if you do content translation via gettext catalogs.
81 | # Usually you set "language" from the command line for these cases.
82 | language = "en"
83 |
84 | # List of patterns, relative to source directory, that match files and
85 | # directories to ignore when looking for source files.
86 | # This pattern also affects html_static_path and html_extra_path.
87 | exclude_patterns = ["_build"]
88 |
89 | # The name of the Pygments (syntax highlighting) style to use.
90 | pygments_style = "sphinx"
91 |
92 | # If true, `todo` and `todoList` produce output, else they produce nothing.
93 | todo_include_todos = False
94 |
95 |
96 | # -- Options for HTML output -------------------------------------------------
97 |
98 | # The theme to use for HTML and HTML Help pages. See the documentation for
99 | # a list of builtin themes.
100 | #
101 | html_theme = "furo"
102 |
103 | # Theme options are theme-specific and customize the look and feel of a theme
104 | # further. For a list of options available for each theme, see the
105 | # documentation.
106 | #
107 | # html_theme_options = {}
108 |
109 | # Add any paths that contain custom static files (such as style sheets) here,
110 | # relative to this directory. They are copied after the builtin static files,
111 | # so a file named "default.css" will overwrite the builtin "default.css".
112 | # html_static_path = ['_static']
113 |
114 | # Custom sidebar templates, must be a dictionary that maps document names
115 | # to template names.
116 | #
117 | # The default sidebars (for documents that don't match any pattern) are
118 | # defined by theme itself. Builtin themes are using these templates by
119 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
120 | # 'searchbox.html']``.
121 | #
122 | # html_sidebars = {}
123 |
124 |
125 | # -- Options for HTMLHelp output ---------------------------------------------
126 |
127 | # Output file base name for HTML help builder.
128 | htmlhelp_basename = project + "doc"
129 |
130 |
131 | # -- Options for LaTeX output ------------------------------------------------
132 |
133 | latex_elements = {
134 | # The paper size ('letterpaper' or 'a4paper').
135 | #
136 | #'papersize': 'letterpaper',
137 | # The font size ('10pt', '11pt' or '12pt').
138 | #
139 | #'pointsize': '10pt',
140 | # Additional stuff for the LaTeX preamble.
141 | #
142 | #'preamble': '',
143 | # Latex figure (float) alignment
144 | #
145 | #'figure_align': 'htbp',
146 | }
147 |
148 |
149 | # Grouping the document tree into LaTeX files. List of tuples
150 | # (source start file, target name, title,
151 | # author, documentclass [howto, manual, or own class]).
152 | latex_documents = [
153 | (master_doc, project + ".tex", project + " Documentation", author, "manual"),
154 | ]
155 |
156 |
157 | # -- Options for manual page output ------------------------------------------
158 |
159 | # One entry per manual page. List of tuples
160 | # (source start file, name, description, authors, manual section).
161 | man_pages = [(master_doc, project, project + " Documentation", [author], 1)]
162 |
163 |
164 | # -- Options for Texinfo output ----------------------------------------------
165 |
166 | # Grouping the document tree into Texinfo files. List of tuples
167 | # (source start file, target name, title, author,
168 | # dir menu entry, description, category)
169 | texinfo_documents = [
170 | (
171 | master_doc,
172 | project,
173 | project + " Documentation",
174 | author,
175 | project,
176 | description,
177 | "Miscellaneous",
178 | ),
179 | ]
180 |
181 |
182 | # -- Extension configuration -------------------------------------------------
183 |
184 | # Configure autodocsumm
185 | autodoc_default_options = {"autosummary": True, "autosummary-nosignatures": True}
186 |
--------------------------------------------------------------------------------
/tests/test_backup.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timezone
2 | from pathlib import Path
3 | import re
4 | import typing as t
5 |
6 | import pytest
7 | from pytest import param
8 |
9 | import shelmet as sh
10 |
11 | from .utils import ARCHIVE_EXTENSIONS, Dir, File, is_same_dir, is_same_file
12 |
13 |
14 | parametrize = pytest.mark.parametrize
15 |
16 |
17 | T_WRITE_FILE = t.Callable[[t.Union[str, Path], str], Path]
18 | DEFAULT_TS_PATTERN = r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+"
19 | DEFAULT_TS_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z"
20 |
21 |
22 | @pytest.fixture()
23 | def write_file(tmp_path: Path) -> T_WRITE_FILE:
24 | def _write_file(path, contents):
25 | file = tmp_path / path
26 | file.write_text(contents)
27 | return file
28 |
29 | return _write_file
30 |
31 |
32 | @pytest.fixture()
33 | def src_file(tmp_path: Path) -> Path:
34 | src_file = tmp_path / "src_file.txt"
35 | src_file.write_text("test")
36 | return src_file
37 |
38 |
39 | @pytest.fixture(params=ARCHIVE_EXTENSIONS)
40 | def arc_ext(request) -> str:
41 | return request.param
42 |
43 |
44 | def test_backup__backs_up_file(src_file: Path):
45 | backup_file = sh.backup(src_file)
46 | assert backup_file != src_file
47 | assert backup_file.parent == src_file.parent
48 | assert is_same_file(src_file, backup_file)
49 |
50 |
51 | def test_backup__backs_up_directory(tmp_path: Path):
52 | src_dir = Dir(
53 | tmp_path / "src",
54 | File("1.txt", text="1"),
55 | File("2.txt", text="2"),
56 | File("a/a1.txt", text="a1"),
57 | File("a/a2.txt", text="a2"),
58 | )
59 | src_dir.mkdir()
60 |
61 | backup_dir = sh.backup(src_dir.path)
62 | assert backup_dir.is_dir()
63 | assert is_same_dir(src_dir.path, backup_dir)
64 |
65 |
66 | def test_backup__backs_up_directory_as_archive(tmp_path: Path, arc_ext: str):
67 | src_dir = Dir(
68 | tmp_path / "src",
69 | File("1.txt", text="1"),
70 | File("2.txt", text="2"),
71 | File("a/a1.txt", text="a1"),
72 | File("a/a2.txt", text="a2"),
73 | )
74 | src_dir.mkdir()
75 |
76 | backup_archive = sh.backup(src_dir.path, ext=arc_ext)
77 | assert backup_archive.is_file()
78 |
79 | dst_path = tmp_path / "dst"
80 | sh.unarchive(backup_archive, dst_path)
81 | assert is_same_dir(src_dir.path, dst_path / "src")
82 |
83 |
84 | def test_backup__customizes_backup_parent_directory(tmp_path: Path, src_file: Path):
85 | dir = tmp_path / "a" / "b" / "c"
86 | dir.mkdir(parents=True)
87 | backup_file = sh.backup(src_file, dir=dir)
88 |
89 | assert backup_file.parent == dir
90 | assert is_same_file(src_file, backup_file)
91 |
92 |
93 | def test_backup__can_overwrite_destination(src_file: Path):
94 | dst = src_file.parent / f"{src_file.name}~"
95 | dst.touch()
96 |
97 | backup_file = sh.backup(src_file, suffix="~", timestamp=None, overwrite=True)
98 | assert backup_file == dst
99 | assert is_same_file(src_file, backup_file)
100 |
101 |
102 | @pytest.mark.freeze_time
103 | def test_backup__appends_local_timestamp_using_strftime(src_file: Path):
104 | now = datetime.now()
105 | expected_name = f"{src_file.name}.{now.strftime(DEFAULT_TS_FORMAT)}~"
106 |
107 | backup_file = sh.backup(src_file)
108 | assert backup_file.name == expected_name
109 |
110 |
111 | @pytest.mark.freeze_time
112 | def test_backup__appends_utc_timestamp_using_strftime(freezer, src_file: Path):
113 | utcnow = datetime.now(timezone.utc)
114 | expected_name = f"{src_file.name}.{utcnow.strftime(DEFAULT_TS_FORMAT)}~"
115 |
116 | backup_file = sh.backup(src_file, utc=True)
117 | assert backup_file.name == expected_name
118 |
119 |
120 | @parametrize(
121 | "filename, args, pattern",
122 | [
123 | param("test.txt", {}, re.compile(rf"test\.txt\.{DEFAULT_TS_PATTERN}~")),
124 | param("test.txt", {"timestamp": "%Y-%d-%m"}, re.compile(r"test\.txt\.\d{4}-\d{2}-\d{2}~")),
125 | param("test.txt", {"epoch": True}, re.compile(r"test\.txt\.\d+\.\d+~")),
126 | param("test.txt", {"timestamp": None}, re.compile(r"test\.txt~")),
127 | param(
128 | "test.txt", {"prefix": "bak."}, re.compile(rf"bak\.test\.txt\.{DEFAULT_TS_PATTERN}~")
129 | ),
130 | param("test.txt", {"suffix": ".bak"}, re.compile(rf"test\.txt\.{DEFAULT_TS_PATTERN}.bak")),
131 | param("test.txt", {"suffix": ""}, re.compile(rf"test\.txt\.{DEFAULT_TS_PATTERN}")),
132 | param("test.txt", {"hidden": True}, re.compile(rf"\.test\.txt\.{DEFAULT_TS_PATTERN}~")),
133 | param(
134 | "test.txt",
135 | {"hidden": True, "prefix": "."},
136 | re.compile(rf"\.test\.txt\.{DEFAULT_TS_PATTERN}~"),
137 | ),
138 | param(
139 | "test.txt",
140 | {"hidden": True, "prefix": "BACKUP_", "suffix": ".BAK"},
141 | re.compile(rf"\.BACKUP_test\.txt\.{DEFAULT_TS_PATTERN}\.BAK"),
142 | ),
143 | param(
144 | "test.txt",
145 | {"namer": lambda src: src.parent / f"{src.name}.bak"},
146 | re.compile(r"test\.txt\.bak"),
147 | ),
148 | ],
149 | )
150 | def test_backup__customizes_filename(
151 | write_file: T_WRITE_FILE, filename: str, args: t.Dict[str, t.Any], pattern: t.Pattern
152 | ):
153 | src_file = write_file(filename, "test")
154 | backup_file = sh.backup(src_file, **args)
155 |
156 | assert pattern.fullmatch(backup_file.name), (
157 | f"Backup of {src_file.name!r} with name {backup_file.name!r}"
158 | f" did not match pattern {pattern!r}"
159 | )
160 |
161 |
162 | @parametrize(
163 | "timestamp",
164 | [
165 | param(True),
166 | param(False),
167 | param(b""),
168 | param(10),
169 | param({}),
170 | param([]),
171 | ],
172 | )
173 | def test_backup__raises_when_timestamp_is_invalid(src_file: Path, timestamp: t.Any):
174 | with pytest.raises(ValueError):
175 | sh.backup(src_file, timestamp=timestamp)
176 |
177 |
178 | def test_backup__raises_when_destination_is_same_as_source(src_file):
179 | with pytest.raises(FileExistsError):
180 | sh.backup(src_file, prefix="", suffix="", timestamp=None)
181 |
182 |
183 | def test_backup__raises_when_destination_exists(src_file):
184 | dst = src_file.parent / f"{src_file.name}~"
185 | dst.touch()
186 | with pytest.raises(FileExistsError):
187 | sh.backup(src_file, suffix="~", timestamp=None)
188 |
--------------------------------------------------------------------------------
/tests/test_read.py:
--------------------------------------------------------------------------------
1 | import os
2 | from pathlib import Path
3 | import typing as t
4 | from uuid import uuid4
5 |
6 | import pytest
7 | from pytest import param
8 |
9 | import shelmet as sh
10 |
11 |
12 | parametrize = pytest.mark.parametrize
13 |
14 |
15 | def lines_as_text(items: t.Iterable) -> t.List[str]:
16 | lines = os.linesep.join(str(item) for item in items)
17 | return lines.splitlines(keepends=True)
18 |
19 |
20 | def lines_as_bytes(items: t.Iterable) -> t.List[bytes]:
21 | lines = os.linesep.join(str(item) for item in items).encode()
22 | return lines.splitlines(keepends=True)
23 |
24 |
25 | def repeat_items(items: t.List[str], times: int) -> t.List[str]:
26 | return [item * times for item in items]
27 |
28 |
29 | @pytest.fixture()
30 | def write_text(tmp_path: Path) -> t.Callable[[str], Path]:
31 | def _write_text(content: str) -> Path:
32 | filename = tmp_path / uuid4().hex
33 | filename.write_text(content)
34 | return filename
35 |
36 | return _write_text
37 |
38 |
39 | @pytest.fixture()
40 | def write_bytes(tmp_path: Path) -> t.Callable[[bytes], Path]:
41 | def _write_bytes(content: bytes) -> Path:
42 | filename = tmp_path / uuid4().hex
43 | filename.write_bytes(content)
44 | return filename
45 |
46 | return _write_bytes
47 |
48 |
49 | @pytest.fixture(
50 | params=[
51 | param("r"),
52 | param("rt"),
53 | param("tr"),
54 | param("rb"),
55 | param("br"),
56 | ]
57 | )
58 | def valid_read_only_mode(request):
59 | return request.param
60 |
61 |
62 | @pytest.fixture(
63 | params=[
64 | param("r+"),
65 | param("rb+"),
66 | param("w"),
67 | param("w+"),
68 | param("wb"),
69 | param("wb+"),
70 | param("x"),
71 | param("a"),
72 | param("a+"),
73 | ]
74 | )
75 | def invalid_read_only_mode(request):
76 | return request.param
77 |
78 |
79 | def test_read__returns_text_file_contents(write_text: t.Callable[[str], Path]):
80 | content = "some text"
81 | test_file = write_text(content)
82 | assert sh.read(test_file) == content
83 |
84 |
85 | def test_read__returns_binary_file_contents(write_bytes: t.Callable[[bytes], Path]):
86 | content = b"some data"
87 | test_file = write_bytes(content)
88 | assert sh.read(test_file, "rb") == content
89 |
90 |
91 | def test_read__accepts_valid_mode(tmp_path: Path, valid_read_only_mode):
92 | test_file = tmp_path / "test_file"
93 | test_file.touch()
94 | sh.read(test_file, valid_read_only_mode)
95 |
96 |
97 | def test_read__raises_when_mode_invalid(
98 | write_text: t.Callable[[str], Path], invalid_read_only_mode: str
99 | ):
100 | test_file = write_text("test")
101 | with pytest.raises(ValueError):
102 | sh.read(test_file, invalid_read_only_mode)
103 |
104 |
105 | def test_readtext__returns_text_file_contents(write_text: t.Callable[[str], Path]):
106 | content = "some text"
107 | test_file = write_text(content)
108 | assert sh.readtext(test_file) == content
109 |
110 |
111 | def test_readbytes__returns_binary_file_contents(write_bytes: t.Callable[[bytes], Path]):
112 | content = b"some data"
113 | test_file = write_bytes(content)
114 | assert sh.readbytes(test_file) == content
115 |
116 |
117 | @parametrize(
118 | "chunks, size, sep",
119 | [
120 | param(repeat_items(["1", "2", "3", "4"], 1), 1, None),
121 | param(repeat_items(["1", "2", "3", "4"], 2), 2, None),
122 | param(repeat_items(["1", "2", "3", "4"], 50), 50, None),
123 | param(repeat_items(["1", "2", "3", "4"], 1), 1, "|"),
124 | param(repeat_items(["1", "2", "3", "4"], 1), 4, "|"),
125 | param(repeat_items(["1", "2", "3", "4"], 2), 1, "|"),
126 | param(repeat_items(["1", "2", "3", "4"], 2), 2, "|"),
127 | param(repeat_items(["1", "2", "3", "4"], 2), 4, "|"),
128 | param(repeat_items(["1", "2", "3", "4"], 50), 1, "|"),
129 | param(repeat_items(["1", "2", "3", "4"], 50), 5, "|"),
130 | param(repeat_items(["1", "2", "3", "4"], 50), 25, "|"),
131 | param(repeat_items(["1", "2", "3", "4"], 50), 1, ";|&"),
132 | param(repeat_items(["1", "2", "3", "4"], 50), 25, ";|&"),
133 | param(repeat_items(["1", "2", "3", "4"], 50), 50, ";|&"),
134 | param(repeat_items(["1", "2", "3", "4"], 50), 100, ";|&"),
135 | ],
136 | )
137 | def test_readchunks__yields_text_chunks_by_size(
138 | write_text: t.Callable[[str], Path], chunks: list, size: int, sep: str
139 | ):
140 | content = (sep or "").join(chunks)
141 | test_file = write_text(content)
142 |
143 | for i, chunk in enumerate(sh.readchunks(test_file, size=size, sep=sep)):
144 | assert chunk == chunks[i]
145 |
146 |
147 | @parametrize(
148 | "chunks, size, sep",
149 | [
150 | param(repeat_items(["1", "2", "3", "4"], 1), 1, None),
151 | param(repeat_items(["1", "2", "3", "4"], 2), 2, None),
152 | param(repeat_items(["1", "2", "3", "4"], 50), 50, None),
153 | param(repeat_items(["1", "2", "3", "4"], 1), 1, "|"),
154 | param(repeat_items(["1", "2", "3", "4"], 1), 4, "|"),
155 | param(repeat_items(["1", "2", "3", "4"], 2), 1, "|"),
156 | param(repeat_items(["1", "2", "3", "4"], 2), 2, "|"),
157 | param(repeat_items(["1", "2", "3", "4"], 2), 4, "|"),
158 | param(repeat_items(["1", "2", "3", "4"], 50), 1, "|"),
159 | param(repeat_items(["1", "2", "3", "4"], 50), 5, "|"),
160 | param(repeat_items(["1", "2", "3", "4"], 50), 25, "|"),
161 | param(repeat_items(["1", "2", "3", "4"], 50), 1, ";|&"),
162 | param(repeat_items(["1", "2", "3", "4"], 50), 25, ";|&"),
163 | param(repeat_items(["1", "2", "3", "4"], 50), 50, ";|&"),
164 | param(repeat_items(["1", "2", "3", "4"], 50), 100, ";|&"),
165 | ],
166 | )
167 | def test_readchunks__yields_binary_chunks_by_size(
168 | write_bytes: t.Callable[[bytes], Path], chunks: list, size: int, sep: str
169 | ):
170 | content = (sep or "").join(chunks)
171 | test_file = write_bytes(content.encode())
172 | bin_sep: t.Optional[bytes] = sep.encode() if sep else None
173 |
174 | for i, chunk in enumerate(sh.readchunks(test_file, "rb", size=size, sep=bin_sep)):
175 | assert chunk.decode() == chunks[i]
176 |
177 |
178 | def test_readchunks__raises_when_mode_invalid(
179 | write_text: t.Callable[[str], Path], invalid_read_only_mode: str
180 | ):
181 | test_file = write_text("test")
182 | with pytest.raises(ValueError):
183 | sh.readchunks(test_file, invalid_read_only_mode)
184 |
185 |
186 | def test_readlines__yields_each_line_from_text_file(write_text: t.Callable[[str], Path]):
187 | lines = lines_as_text(range(10))
188 | test_file = write_text("".join(lines))
189 |
190 | for i, line in enumerate(sh.readlines(test_file)):
191 | assert line == lines[i]
192 |
193 |
194 | def test_readlines__yields_each_line_from_binary_file(write_bytes: t.Callable[[bytes], Path]):
195 | lines = lines_as_bytes(range(10))
196 | test_file = write_bytes(b"".join(lines))
197 |
198 | for i, line in enumerate(sh.readlines(test_file, "rb")):
199 | assert line == lines[i]
200 |
201 |
202 | def test_readlines__raises_when_mode_invalid(
203 | write_text: t.Callable[[str], Path], invalid_read_only_mode: str
204 | ):
205 | test_file = write_text("test")
206 | with pytest.raises(ValueError):
207 | sh.readlines(test_file, invalid_read_only_mode)
208 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | from contextlib import contextmanager
2 | import filecmp
3 | from pathlib import Path
4 | import tarfile
5 | import typing as t
6 | from unittest import mock
7 | import zipfile
8 |
9 | import shelmet as sh
10 |
11 |
12 | try:
13 | import fcntl
14 | except ImportError: # pragma: no cover
15 | fcntl = None # type: ignore
16 |
17 |
18 | USES_FCNTL_FULLSYNC = hasattr(fcntl, "F_FULLFSYNC")
19 |
20 | TAR_COMPRESSIONS = {
21 | ".tar": "",
22 | ".tar.gz": "gz",
23 | ".tgz": "gz",
24 | ".taz": "gz",
25 | ".tar.bz2": "bz2",
26 | ".tb2": "bz2",
27 | ".tbz": "bz2",
28 | ".tbz2": "bz2",
29 | ".tz2": "bz2",
30 | ".tar.xz": "xz",
31 | ".txz": "xz",
32 | }
33 | TAR_EXTENSIONS = list(TAR_COMPRESSIONS.keys())
34 | ZIP_EXTENSIONS = [
35 | ".docx",
36 | ".egg",
37 | ".jar",
38 | ".odg",
39 | ".odp",
40 | ".ods",
41 | ".odt",
42 | ".pptx",
43 | ".xlsx",
44 | ".zip",
45 | ]
46 | ARCHIVE_EXTENSIONS = TAR_EXTENSIONS + ZIP_EXTENSIONS
47 |
48 |
49 | class File:
50 | def __init__(self, path: t.Union[Path, str], text: t.Optional[str] = None, size: int = 0):
51 | self.path = Path(path)
52 | self.text = text
53 | self.size = size
54 |
55 | def __repr__(self) -> str:
56 | return f"{self.__class__.__name__}(path={self.path!r})"
57 |
58 | def clone(self) -> "File":
59 | return self.__class__(self.path, text=self.text, size=self.size)
60 |
61 | def write(self) -> None:
62 | self.path.parent.mkdir(parents=True, exist_ok=True)
63 |
64 | if self.text is not None:
65 | self.path.write_text(self.text)
66 | elif self.size > 0:
67 | with self.path.open("wb") as fp:
68 | fp.seek(self.size - 1)
69 | fp.write(b"\0")
70 | else:
71 | self.path.touch()
72 |
73 |
74 | class Dir:
75 | def __init__(self, path: t.Union[Path, str], *items: t.Union[File, "Dir"]):
76 | self.path = Path(path)
77 | self.items = list(items)
78 |
79 | @property
80 | def files(self) -> t.List[File]:
81 | return [item for item in self.items if isinstance(item, File)]
82 |
83 | @property
84 | def dirs(self) -> t.List["Dir"]:
85 | return [item for item in self.items if isinstance(item, Dir)]
86 |
87 | def __repr__(self) -> str:
88 | return (
89 | f"{self.__class__.__name__}(path={self.path!r}, files={self.files}, dirs={self.dirs})"
90 | )
91 |
92 | def clone(self) -> "Dir":
93 | return self.__class__(self.path, *(item.clone() for item in self.items))
94 |
95 | def mkdir(self) -> None:
96 | self.path.mkdir(parents=True, exist_ok=True)
97 |
98 | for dir in self.dirs:
99 | dir.path = self.path / dir.path
100 | dir.mkdir()
101 |
102 | for file in self.files:
103 | file.path = self.path / file.path
104 | file.write()
105 |
106 | def repath(self, root: Path) -> "Dir":
107 | items = []
108 | for item in self.items:
109 | new_path = root / item.path.relative_to(self.path)
110 | if isinstance(item, File):
111 | item = item.clone()
112 | item.path = new_path
113 | else:
114 | item = item.repath(new_path)
115 | items.append(item)
116 | return Dir(root, *items)
117 |
118 |
119 | @contextmanager
120 | def patch_os_fsync() -> t.Iterator[mock.MagicMock]:
121 | if USES_FCNTL_FULLSYNC:
122 | patched_os_fsync = mock.patch("fcntl.fcntl")
123 | else:
124 | patched_os_fsync = mock.patch("os.fsync")
125 |
126 | with patched_os_fsync as mocked_os_fsync:
127 | yield mocked_os_fsync
128 |
129 |
130 | def is_same_file(file1: Path, file2: Path) -> bool:
131 | return filecmp.cmp(file1, file2)
132 |
133 |
134 | def is_same_dir(dir1: Path, dir2: Path) -> bool:
135 | return _is_same_dir(filecmp.dircmp(dir1, dir2))
136 |
137 |
138 | def _is_same_dir(dcmp: filecmp.dircmp) -> bool:
139 | if dcmp.diff_files or dcmp.left_only or dcmp.right_only:
140 | return False
141 |
142 | for sub_dcmp in dcmp.subdirs.values():
143 | if not _is_same_dir(sub_dcmp):
144 | return False
145 |
146 | return True
147 |
148 |
149 | def is_subdict(subset: dict, superset: dict) -> bool:
150 | """Return whether one dict is a subset of another."""
151 | if isinstance(subset, dict):
152 | return all(
153 | key in superset and is_subdict(val, superset[key]) for key, val in subset.items()
154 | )
155 |
156 | if isinstance(subset, list) and isinstance(superset, list) and len(superset) == len(subset):
157 | return all(is_subdict(subitem, superset[idx]) for idx, subitem in enumerate(subset))
158 |
159 | # Assume that subset is a plain value if none of the above match.
160 | return subset == superset
161 |
162 |
163 | def extract_archive(archive_file: Path, dst: Path, ext: str = "") -> None:
164 | if not ext:
165 | ext = next(
166 | (
167 | e
168 | for e in list(TAR_EXTENSIONS) + list(ZIP_EXTENSIONS)
169 | if archive_file.name.endswith(e)
170 | ),
171 | "",
172 | )
173 |
174 | if ext in TAR_EXTENSIONS:
175 | _extract_tar(archive_file, dst, ext=ext)
176 | elif ext in ZIP_EXTENSIONS:
177 | _extract_zip(archive_file, dst)
178 | else:
179 | raise ValueError(f"unrecognized archive extension: {ext}")
180 |
181 |
182 | def create_archive(archive_file: Path, *sources: Path, ext: str = "") -> None:
183 | if not ext:
184 | ext = "".join(archive_file.suffixes)
185 |
186 | if ext in TAR_EXTENSIONS:
187 | _create_tar(archive_file, *sources, ext=ext)
188 | elif ext in ZIP_EXTENSIONS:
189 | _create_zip(archive_file, *sources)
190 | else:
191 | raise ValueError(f"unrecognized archive extension: {ext}")
192 |
193 |
194 | def create_unsafe_archive(archive_file: Path, *sources: Path) -> None:
195 | ext = "".join(archive_file.suffixes)
196 |
197 | if ext in TAR_EXTENSIONS:
198 | _create_unsafe_tar(archive_file, *sources)
199 | elif ext in ZIP_EXTENSIONS:
200 | _create_unsafe_zip(archive_file, *sources)
201 | else:
202 | raise ValueError(f"unrecognized archive extension: {ext}")
203 |
204 |
205 | def _extract_tar(archive_file: Path, dst: Path, ext: str = "") -> None:
206 | if not ext:
207 | ext = "".join(archive_file.suffixes)
208 | compression = TAR_COMPRESSIONS[ext]
209 | mode = f"r:{compression}"
210 | with tarfile.open(archive_file, mode, format=tarfile.PAX_FORMAT) as arc:
211 | arc.extractall(dst)
212 |
213 |
214 | def _extract_zip(archive_file: Path, dst: Path) -> None:
215 | with zipfile.ZipFile(archive_file) as arc:
216 | arc.extractall(dst)
217 |
218 |
219 | def _create_tar(archive_file: Path, *sources: Path, ext: str = "") -> None:
220 | if not ext:
221 | ext = "".join(archive_file.suffixes)
222 | compression = TAR_COMPRESSIONS[ext]
223 | mode = f"w:{compression}"
224 | with tarfile.open(archive_file, mode, format=tarfile.PAX_FORMAT) as archive:
225 | for src in sources:
226 | archive.add(src, arcname=src.name)
227 |
228 |
229 | def _create_zip(archive_file: Path, *sources: Path) -> None:
230 | with zipfile.ZipFile(archive_file, "w") as archive:
231 | for src in sources:
232 | with sh.cd(src.parent):
233 | items = [src.relative_to(src.parent), *sh.walk(src.name)]
234 | for item in items:
235 | archive.write(item)
236 |
237 |
238 | def _create_unsafe_tar(archive_file: Path, src: Path, parent_path: Path) -> None:
239 | ext = "".join(archive_file.suffixes)
240 | compression = TAR_COMPRESSIONS[ext]
241 | mode = f"w:{compression}"
242 | with tarfile.open(archive_file, mode, format=tarfile.PAX_FORMAT) as archive:
243 | with sh.cd(src.parent):
244 | items = [src.relative_to(src.parent)] + list(sh.walk(src.name))
245 | for item in items:
246 | member = archive.gettarinfo(str(item))
247 | member.name = str(parent_path / member.name)
248 | archive.addfile(member)
249 |
250 |
251 | def _create_unsafe_zip(archive_file: Path, src: Path, parent_path: Path) -> None:
252 | with zipfile.ZipFile(archive_file, "w") as archive:
253 | with sh.cd(src.parent):
254 | items = [src.relative_to(src.parent)] + list(sh.walk(src.name))
255 | for item in items:
256 | member = zipfile.ZipInfo.from_file(str(item))
257 | member.filename = str(parent_path / member.filename)
258 | data = item.read_text() if item.is_file() else ""
259 | archive.writestr(member, data=data)
260 |
261 |
262 | def create_archive_source(tmp_path: Path, *sources: t.Union[Dir, File]) -> Dir:
263 | sources = tuple(source.clone() for source in sources)
264 | src_dir = Dir(tmp_path / "src", *sources)
265 | src_dir.mkdir()
266 | return src_dir
267 |
--------------------------------------------------------------------------------
/tests/test_write.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import typing as t
3 | from unittest import mock
4 |
5 | import pytest
6 | from pytest import param
7 |
8 | import shelmet as sh
9 |
10 |
11 | parametrize = pytest.mark.parametrize
12 |
13 |
14 | @pytest.fixture(
15 | params=[
16 | param("w"),
17 | param("wt"),
18 | param("tw"),
19 | param("a"),
20 | param("at"),
21 | param("ta"),
22 | param("x"),
23 | param("xt"),
24 | param("tx"),
25 | param("wb"),
26 | param("bw"),
27 | param("ab"),
28 | param("ba"),
29 | param("xb"),
30 | param("bx"),
31 | ]
32 | )
33 | def valid_write_only_mode(request) -> str:
34 | return request.param
35 |
36 |
37 | @pytest.fixture(
38 | params=[
39 | param("wb"),
40 | param("bw"),
41 | param("ab"),
42 | param("ba"),
43 | param("xb"),
44 | param("bx"),
45 | ]
46 | )
47 | def valid_write_only_bin_mode(request) -> str:
48 | return request.param
49 |
50 |
51 | @pytest.fixture(
52 | params=[
53 | param("w"),
54 | param("wt"),
55 | param("tw"),
56 | param("a"),
57 | param("at"),
58 | param("ta"),
59 | param("x"),
60 | param("xt"),
61 | param("tx"),
62 | ]
63 | )
64 | def valid_write_only_text_mode(request) -> str:
65 | return request.param
66 |
67 |
68 | @pytest.fixture(
69 | params=[
70 | param("r"),
71 | param("r+"),
72 | param("rb"),
73 | param("rb+"),
74 | param("w+"),
75 | param("wb+"),
76 | param("a+"),
77 | param("ab+"),
78 | ]
79 | )
80 | def invalid_write_only_mode(request) -> str:
81 | return request.param
82 |
83 |
84 | @pytest.fixture(
85 | params=[
86 | param("r"),
87 | param("r+"),
88 | param("rb"),
89 | param("rb+"),
90 | param("w+"),
91 | param("wb+"),
92 | param("a+"),
93 | param("ab+"),
94 | param("w"),
95 | param("wt"),
96 | param("a"),
97 | param("x"),
98 | ]
99 | )
100 | def invalid_write_only_bin_mode(request) -> str:
101 | return request.param
102 |
103 |
104 | @pytest.fixture(
105 | params=[
106 | param("r"),
107 | param("r+"),
108 | param("rb"),
109 | param("rb+"),
110 | param("w+"),
111 | param("wb+"),
112 | param("a+"),
113 | param("ab+"),
114 | param("wb"),
115 | param("ab"),
116 | param("xb"),
117 | ]
118 | )
119 | def invalid_write_only_text_mode(request) -> str:
120 | return request.param
121 |
122 |
123 | @pytest.fixture()
124 | def mock_atomicfile():
125 | with mock.patch("shelmet.fileio.atomicfile") as _mock_atomicfile:
126 | yield _mock_atomicfile
127 |
128 |
129 | @parametrize(
130 | "mode, contents",
131 | [
132 | param("w", "abcd"),
133 | param("a", "abcd"),
134 | param("x", "abcd"),
135 | param("wb", b"abcd"),
136 | param("ab", b"abcd"),
137 | param("xb", b"abcd"),
138 | ],
139 | )
140 | def test_write(tmp_path: Path, mode: str, contents: t.Union[str, bytes]):
141 | file = tmp_path / "test_file"
142 | sh.write(file, contents, mode)
143 |
144 | actual_contents = file.read_bytes()
145 | if isinstance(contents, str):
146 | actual_contents = actual_contents.decode() # type: ignore
147 |
148 | assert contents == actual_contents
149 |
150 |
151 | def test_write__accepts_valid_mode(tmp_path: Path, valid_write_only_mode: str):
152 | contents: t.Union[str, bytes] = b"" if "b" in valid_write_only_mode else ""
153 | sh.write(tmp_path / "test_file", contents, valid_write_only_mode)
154 |
155 |
156 | @parametrize(
157 | "mode, contents, expected_mode, expected_overwrite",
158 | [
159 | param("w", "test", "w", True),
160 | param("wb", b"test", "wb", True),
161 | param("x", "test", "w", False),
162 | param("xb", b"test", "wb", False),
163 | ],
164 | )
165 | def test_write__can_atomically_create_file(
166 | tmp_path: Path,
167 | mock_atomicfile: mock.MagicMock,
168 | mode: str,
169 | contents: t.Union[str, bytes],
170 | expected_mode: str,
171 | expected_overwrite: bool,
172 | ):
173 | file = tmp_path / "test_file"
174 | sh.write(file, contents, mode, atomic=True)
175 |
176 | assert mock_atomicfile.called
177 | assert mock_atomicfile.call_args == mock.call(file, expected_mode, overwrite=expected_overwrite)
178 |
179 | args, kwargs = mock_atomicfile.call_args
180 | with mock_atomicfile(*args, **kwargs) as fp:
181 | assert fp.write.called
182 | assert fp.write.call_args == mock.call(contents)
183 |
184 |
185 | def test_write__raises_when_mode_invalid(tmp_path: Path, invalid_write_only_mode: str):
186 | file = tmp_path / "test_file"
187 | with pytest.raises(ValueError):
188 | sh.write(file, "", invalid_write_only_mode)
189 |
190 |
191 | @parametrize(
192 | "mode, contents",
193 | [
194 | param("wb", b"abcd"),
195 | param("ab", b"abcd"),
196 | param("xb", b"abcd"),
197 | ],
198 | )
199 | def test_writebytes(tmp_path: Path, mode: str, contents: bytes):
200 | file = tmp_path / "test_file"
201 | sh.writebytes(file, contents, mode)
202 |
203 | actual_contents = file.read_bytes()
204 | assert contents == actual_contents
205 |
206 |
207 | def test_writebytes__accepts_valid_mode(tmp_path: Path, valid_write_only_bin_mode: str):
208 | sh.write(tmp_path / "test_file", b"", valid_write_only_bin_mode)
209 |
210 |
211 | def test_writebytes__raises_when_mode_invalid(tmp_path: Path, invalid_write_only_bin_mode: str):
212 | file = tmp_path / "test_file"
213 | with pytest.raises(ValueError):
214 | sh.writebytes(file, b"", invalid_write_only_bin_mode)
215 |
216 |
217 | @parametrize(
218 | "mode, contents",
219 | [
220 | param("w", "abcd"),
221 | param("a", "abcd"),
222 | param("x", "abcd"),
223 | ],
224 | )
225 | def test_writetext(tmp_path: Path, mode: str, contents: str):
226 | file = tmp_path / "test_file"
227 | sh.writetext(file, contents, mode)
228 |
229 | actual_contents = file.read_text()
230 | assert contents == actual_contents
231 |
232 |
233 | def test_writetext__accepts_valid_mode(tmp_path: Path, valid_write_only_text_mode: str):
234 | sh.write(tmp_path / "test_file", "", valid_write_only_text_mode)
235 |
236 |
237 | def test_writetext__raises_when_mode_invalid(tmp_path: Path, invalid_write_only_text_mode: str):
238 | file = tmp_path / "test_file"
239 | with pytest.raises(ValueError):
240 | sh.writetext(file, "", invalid_write_only_text_mode)
241 |
242 |
243 | @parametrize(
244 | "mode, items",
245 | [
246 | param("w", ["a", "b", "c"]),
247 | param("a", ["a", "b", "c"]),
248 | param("x", ["a", "b", "c"]),
249 | param("w", ["a", "b", "c"]),
250 | param("a", ["a", "b", "c"]),
251 | param("x", ["a", "b", "c"]),
252 | param("wb", [b"a", b"b", b"c"]),
253 | param("ab", [b"a", b"b", b"c"]),
254 | param("xb", [b"a", b"b", b"c"]),
255 | param("wb", [b"a", b"b", b"c"]),
256 | param("ab", [b"a", b"b", b"c"]),
257 | param("xb", [b"a", b"b", b"c"]),
258 | ],
259 | )
260 | def test_writelines(tmp_path: Path, mode: str, items: t.List[t.AnyStr]):
261 | file = tmp_path / "test_file"
262 | sh.writelines(file, items, mode)
263 |
264 | read_mode = "r"
265 | if "b" in mode:
266 | read_mode += "b"
267 |
268 | with open(file, read_mode) as fp:
269 | lines = fp.readlines()
270 |
271 | for i, line in enumerate(lines):
272 | assert items[i] == line.strip()
273 |
274 |
275 | def test_writelines__accepts_valid_mode(tmp_path: Path, valid_write_only_mode: str):
276 | contents: t.Union[str, bytes] = b"" if "b" in valid_write_only_mode else ""
277 | sh.writelines(tmp_path / "test_file", [contents], valid_write_only_mode) # type: ignore
278 |
279 |
280 | @parametrize(
281 | "mode, items, ending",
282 | [
283 | param("w", ["a", "b", "c"], "|"),
284 | param("a", ["a", "b", "c"], "|"),
285 | param("x", ["a", "b", "c"], "|"),
286 | param("w", ["a", "b", "c"], "|"),
287 | param("a", ["a", "b", "c"], "|"),
288 | param("x", ["a", "b", "c"], "|"),
289 | param("wb", [b"a", b"b", b"c"], b"|"),
290 | param("ab", [b"a", b"b", b"c"], b"|"),
291 | param("xb", [b"a", b"b", b"c"], b"|"),
292 | param("wb", [b"a", b"b", b"c"], b"|"),
293 | param("ab", [b"a", b"b", b"c"], b"|"),
294 | param("xb", [b"a", b"b", b"c"], b"|"),
295 | ],
296 | )
297 | def test_writelines__uses_custom_ending(
298 | tmp_path: Path, mode: str, items: t.List[t.AnyStr], ending: t.AnyStr
299 | ):
300 | file = tmp_path / "test_file"
301 | sh.writelines(file, items, mode, ending=ending)
302 |
303 | read_mode = "r"
304 | if "b" in mode:
305 | read_mode += "b"
306 |
307 | with open(file, read_mode) as fp:
308 | contents = fp.read()
309 |
310 | actual_items = contents.rstrip(ending).split(ending)
311 | for i, actual_item in enumerate(actual_items):
312 | assert items[i] == actual_item
313 |
314 |
315 | @parametrize(
316 | "mode, items, expected_mode, expected_overwrite",
317 | [
318 | param("w", ["test"], "w", True),
319 | param("wb", [b"test"], "wb", True),
320 | param("x", ["test"], "w", False),
321 | param("xb", [b"test"], "wb", False),
322 | ],
323 | )
324 | def test_writelines__can_atomically_create_file(
325 | tmp_path: Path,
326 | mock_atomicfile: mock.MagicMock,
327 | mode: str,
328 | items: t.Union[t.List[str], t.List[bytes]],
329 | expected_mode: str,
330 | expected_overwrite: bool,
331 | ):
332 | file = tmp_path / "test_file"
333 | sh.writelines(file, items, mode, atomic=True)
334 |
335 | assert mock_atomicfile.called
336 | assert mock_atomicfile.call_args == mock.call(file, expected_mode, overwrite=expected_overwrite)
337 |
338 | args, kwargs = mock_atomicfile.call_args
339 | with mock_atomicfile(*args, **kwargs) as fp:
340 | assert fp.writelines.called
341 | lines = [line.strip() for line in fp.writelines.call_args[0][0]]
342 | assert items == lines
343 |
344 |
345 | def test_writelines__raises_when_mode_invalid(tmp_path: Path, invalid_write_only_mode: str):
346 | file = tmp_path / "test_file"
347 | with pytest.raises(ValueError):
348 | sh.writelines(file, [], invalid_write_only_mode)
349 |
--------------------------------------------------------------------------------
/tests/test_ls.py:
--------------------------------------------------------------------------------
1 | import fnmatch
2 | from pathlib import Path
3 | import re
4 | import typing as t
5 | from unittest import mock
6 |
7 | import pytest
8 | from pytest import param
9 |
10 | import shelmet as sh
11 | from shelmet.types import LsFilter
12 |
13 | from .utils import Dir, File
14 |
15 |
16 | parametrize = pytest.mark.parametrize
17 |
18 |
19 | @parametrize(
20 | "items, kwargs, expected_contents",
21 | [
22 | param(
23 | [
24 | Dir("x/xx", File("x1.txt")),
25 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
26 | Dir("z/zz"),
27 | File("a.txt"),
28 | File("b.txt"),
29 | File("c.txt"),
30 | ],
31 | {},
32 | {Path("x"), Path("y"), Path("z"), Path("a.txt"), Path("b.txt"), Path("c.txt")},
33 | ),
34 | param(
35 | [
36 | Dir("x/xx", File("x1.txt")),
37 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
38 | Dir("z/zz"),
39 | File("a.txt"),
40 | File("b.txt"),
41 | File("c.txt"),
42 | ],
43 | {"recursive": True},
44 | {
45 | Path("x"),
46 | Path("x/xx"),
47 | Path("x/xx/x1.txt"),
48 | Path("y"),
49 | Path("y/yy"),
50 | Path("y/yy/y1.txt"),
51 | Path("y/yy/y2.txt"),
52 | Path("z"),
53 | Path("z/zz"),
54 | Path("a.txt"),
55 | Path("b.txt"),
56 | Path("c.txt"),
57 | },
58 | ),
59 | param(
60 | [
61 | Dir("x/xx", File("x1.txt")),
62 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
63 | Dir("z/zz"),
64 | File("a.txt"),
65 | File("b.txt"),
66 | File("c.txt"),
67 | ],
68 | {"recursive": True, "only_files": True},
69 | {
70 | Path("x/xx/x1.txt"),
71 | Path("y/yy/y1.txt"),
72 | Path("y/yy/y2.txt"),
73 | Path("a.txt"),
74 | Path("b.txt"),
75 | Path("c.txt"),
76 | },
77 | ),
78 | param(
79 | [
80 | Dir("x/xx", File("x1.txt")),
81 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
82 | Dir("z/zz"),
83 | File("a.txt"),
84 | File("b.txt"),
85 | File("c.txt"),
86 | ],
87 | {"recursive": True, "only_dirs": True},
88 | {Path("x"), Path("x/xx"), Path("y"), Path("y/yy"), Path("z"), Path("z/zz")},
89 | ),
90 | param(
91 | [
92 | Dir("x/xx", File("x1.txt")),
93 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
94 | Dir("z/zz"),
95 | File("a.txt"),
96 | File("b.txt"),
97 | File("c.txt"),
98 | ],
99 | {"include": "*.txt"},
100 | {Path("a.txt"), Path("b.txt"), Path("c.txt")},
101 | ),
102 | param(
103 | [
104 | Dir("x/xx", File("x1.txt")),
105 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
106 | Dir("z/zz"),
107 | File("a.txt"),
108 | File("b.txt"),
109 | File("c.txt"),
110 | ],
111 | {"exclude": "*.txt"},
112 | {Path("x"), Path("y"), Path("z")},
113 | ),
114 | param(
115 | [
116 | Dir("x/xx", File("x1.txt")),
117 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
118 | Dir("z/zz"),
119 | File("a.txt"),
120 | File("b.txt"),
121 | File("c.txt"),
122 | ],
123 | {"include": "*.txt", "recursive": True},
124 | {
125 | Path("x/xx/x1.txt"),
126 | Path("y/yy/y1.txt"),
127 | Path("y/yy/y2.txt"),
128 | Path("a.txt"),
129 | Path("b.txt"),
130 | Path("c.txt"),
131 | },
132 | ),
133 | param(
134 | [
135 | Dir("x/xx", File("x1.txt")),
136 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
137 | Dir("z/zz"),
138 | File("a.txt"),
139 | File("b.txt"),
140 | File("c.txt"),
141 | ],
142 | {"exclude": "*.txt", "recursive": True},
143 | {
144 | Path("x"),
145 | Path("x/xx"),
146 | Path("y"),
147 | Path("y/yy"),
148 | Path("z"),
149 | Path("z/zz"),
150 | },
151 | ),
152 | param(
153 | [
154 | Dir("x/xx", File("x1.txt")),
155 | Dir("y/yy", File("y1.txt"), File("y2.txt")),
156 | Dir("z/zz"),
157 | File("a.txt"),
158 | File("b.txt"),
159 | File("c.txt"),
160 | ],
161 | {"include": ["*.txt"], "only_dirs": True, "recursive": True},
162 | set(),
163 | ),
164 | ],
165 | )
166 | def test_ls(
167 | tmp_path: Path,
168 | items: t.List[t.Union[Dir, File]],
169 | kwargs: dict,
170 | expected_contents: t.Set[Path],
171 | ):
172 | src = Dir(tmp_path, *items)
173 | src.mkdir()
174 | with sh.cd(tmp_path):
175 | contents = set(sh.ls("", **kwargs))
176 | assert contents == expected_contents
177 |
178 |
179 | @parametrize(
180 | "include",
181 | [
182 | param("*_include", id="str"),
183 | param(["foo", "*_include"], id="str_list"),
184 | param(re.compile(fnmatch.translate("*_include")), id="regex"),
185 | param(
186 | [re.compile(fnmatch.translate("foo")), re.compile(fnmatch.translate("*_include"))],
187 | id="regex_list",
188 | ),
189 | param(lambda p: p.name.endswith("_include"), id="callable"),
190 | param(
191 | [lambda p: p.name == "foo", lambda p: p.name.endswith("_include")], id="callable_list"
192 | ),
193 | ],
194 | )
195 | def test_ls__includes_on_multiple_types(tmp_path: Path, include: LsFilter):
196 | items: t.List[t.Union[Dir, File]] = [
197 | Dir("a_dir_include"),
198 | Dir("b_dir"),
199 | Dir("c_dir_include"),
200 | File("d_file_include"),
201 | File("e_file"),
202 | File("f_file_include"),
203 | ]
204 | expected_contents = {
205 | Path("a_dir_include"),
206 | Path("c_dir_include"),
207 | Path("d_file_include"),
208 | Path("f_file_include"),
209 | }
210 | src = Dir(tmp_path, *items)
211 | src.mkdir()
212 | with sh.cd(tmp_path):
213 | contents = set(sh.ls("", include=include))
214 | assert contents == expected_contents
215 |
216 |
217 | @parametrize(
218 | "items, kwargs, expected_contents",
219 | [
220 | param(
221 | [
222 | Dir("a_dir_include"),
223 | Dir("b_dir"),
224 | Dir("c_dir_include"),
225 | File("d_file_include"),
226 | File("e_file"),
227 | File("f_file_include"),
228 | ],
229 | {"include": "*_include", "only_files": True},
230 | {Path("d_file_include"), Path("f_file_include")},
231 | ),
232 | param(
233 | [
234 | Dir("a_dir_include"),
235 | Dir("b_dir"),
236 | Dir("c_dir_include"),
237 | File("d_file_include"),
238 | File("e_file"),
239 | File("f_file_include"),
240 | ],
241 | {"include": "*_include", "only_dirs": True},
242 | {Path("a_dir_include"), Path("c_dir_include")},
243 | ),
244 | ],
245 | )
246 | def test_ls__uses_only_files_and_only_dirs_in_include(
247 | tmp_path: Path,
248 | items: t.List[t.Union[Dir, File]],
249 | kwargs: dict,
250 | expected_contents: t.Set[Path],
251 | ):
252 | src = Dir(tmp_path, *items)
253 | src.mkdir()
254 | with sh.cd(tmp_path):
255 | contents = set(sh.ls("", **kwargs))
256 | assert contents == expected_contents
257 |
258 |
259 | @parametrize(
260 | "exclude",
261 | [
262 | param("*_exclude", id="str"),
263 | param(["foo", "*_exclude"], id="str_list"),
264 | param(re.compile(fnmatch.translate("*_exclude")), id="regex"),
265 | param(
266 | [re.compile(fnmatch.translate("foo")), re.compile(fnmatch.translate("*_exclude"))],
267 | id="regex_list",
268 | ),
269 | param(lambda p: p.name.endswith("_exclude"), id="callable"),
270 | param(
271 | [lambda p: p.name == "foo", lambda p: p.name.endswith("_exclude")], id="callable_list"
272 | ),
273 | ],
274 | )
275 | def test_ls__excludes_on_multiple_types(tmp_path: Path, exclude: LsFilter):
276 | items: t.List[t.Union[Dir, File]] = [
277 | Dir("a_dir_exclude"),
278 | Dir("b_dir"),
279 | Dir("c_dir_exclude"),
280 | File("d_file_exclude"),
281 | File("e_file"),
282 | File("f_file_exclude"),
283 | ]
284 | expected_contents = {Path("b_dir"), Path("e_file")}
285 | src = Dir(tmp_path, *items)
286 | src.mkdir()
287 | with sh.cd(tmp_path):
288 | contents = set(sh.ls("", exclude=exclude))
289 | assert contents == expected_contents
290 |
291 |
292 | def test_ls__does_not_recurse_into_excluded_dirs(tmp_path: Path):
293 | items: t.List[t.Union[Dir, File]] = [
294 | Dir("a_dir_excluded", File("a1.txt")),
295 | Dir("b_dir", File("b2.txt")),
296 | ]
297 | expected_contents = {
298 | Path("b_dir"),
299 | Path("b_dir/b2.txt"),
300 | }
301 | exclude = "*_excluded"
302 | src = Dir(tmp_path, *items)
303 | src.mkdir()
304 | with sh.cd(tmp_path):
305 | contents = set(sh.ls("", exclude=exclude, recursive=True))
306 | assert contents == expected_contents
307 |
308 |
309 | @parametrize(
310 | "path, kwargs, expected",
311 | [
312 | param(".", {}, "Ls(path='.', recursive=False)"),
313 | param("/foo/bar/baz", {}, "Ls(path='/foo/bar/baz', recursive=False)"),
314 | param(".", {"recursive": True}, "Ls(path='.', recursive=True)"),
315 | ],
316 | )
317 | def test_ls__has_repr(path, kwargs, expected):
318 | listing = sh.ls(path, **kwargs)
319 | assert repr(listing) == expected
320 |
321 |
322 | def test_ls__raises_when_both_only_files_and_only_dirs_are_true():
323 | with pytest.raises(ValueError):
324 | list(sh.ls(only_files=True, only_dirs=True))
325 |
326 |
327 | def test_ls__raises_when_include_is_invalid_type():
328 | with pytest.raises(TypeError):
329 | list(sh.ls(include=True))
330 |
331 |
332 | @parametrize(
333 | "fn, expected_kwargs",
334 | [
335 | param(sh.lsfiles, {"only_files": True}),
336 | param(sh.lsdirs, {"only_dirs": True}),
337 | param(sh.walk, {"recursive": True, "only_files": False, "only_dirs": False}),
338 | param(sh.walkfiles, {"recursive": True, "only_files": True, "only_dirs": False}),
339 | param(sh.walkdirs, {"recursive": True, "only_dirs": True, "only_files": False}),
340 | ],
341 | )
342 | def test_ls_aliases(tmp_path: Path, fn: t.Callable, expected_kwargs: dict):
343 | expected_kwargs["include"] = None
344 | expected_kwargs["exclude"] = None
345 |
346 | with mock.patch.object(sh.path, "ls") as mocked_ls:
347 | list(fn(tmp_path))
348 |
349 | assert mocked_ls.called
350 | assert mocked_ls.call_args[1] == expected_kwargs
351 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | shelmet
2 | *******
3 |
4 | |version| |build| |coveralls| |license|
5 |
6 |
7 | A shell power-up for working with the file system and running subprocess commands.
8 |
9 |
10 | Links
11 | =====
12 |
13 | - Project: https://github.com/dgilland/shelmet
14 | - Documentation: https://shelmet.readthedocs.io
15 | - PyPI: https://pypi.python.org/pypi/shelmet/
16 | - Github Actions: https://github.com/dgilland/shelmet/actions
17 |
18 |
19 | Features
20 | ========
21 |
22 | - Run and define subprocess commands
23 |
24 | - ``run``
25 | - ``cmd``
26 |
27 | - Interact with files
28 |
29 | - ``atomicdfile``, ``atomicdir``
30 | - ``read``, ``readchunks``, ``readlines``, ``readtext``, ``readbytes``
31 | - ``write``, ``writechunks``, ``writelines``, ``writetext``, ``writebytes``
32 | - ``fsync``, ``dirsync``
33 |
34 | - Execute core shell operations
35 |
36 | - ``cp``, ``mv``, ``mkdir``, ``touch``
37 | - ``rm``, ``rmfile``, ``rmdir``
38 | - ``ls``, ``lsfiles``, ``lsdirs``
39 | - ``walk``, ``walkfiles``, ``walkdirs``
40 |
41 | - Archive and backup files
42 |
43 | - ``archive``, ``unarchive``, ``lsarchive``
44 | - ``backup``
45 |
46 | - Other utilities
47 |
48 | - ``cd``
49 | - ``environ``
50 | - ``cwd``, ``homedir``
51 | - and more!
52 |
53 | - 100% test coverage
54 | - Fully type-annotated
55 | - Python 3.6+
56 |
57 |
58 | Quickstart
59 | ==========
60 |
61 | Install using pip:
62 |
63 |
64 | ::
65 |
66 | pip3 install shelmet
67 |
68 |
69 | Import the ``sh`` module:
70 |
71 | .. code-block:: python
72 |
73 | import shelmet as sh
74 |
75 |
76 | Run system commands:
77 |
78 | .. code-block:: python
79 |
80 | # sh.run() is a wrapper around subprocess.run() that defaults to output capture, text-mode,
81 | # exception raising on non-zero exit codes, environment variable extension instead of
82 | # replacement, and support for passing command arguments as a variable number of strings
83 | # instead of just a list of strings.
84 | result = sh.run("ps", "aux")
85 | print(result.stdout)
86 | print(result.stderr)
87 |
88 | # stdout and stderr can be combined with...
89 | result = sh.run("some", "command", combine_output=True)
90 |
91 | # or not captured at all...
92 | sh.run("...", capture_output=False)
93 |
94 |
95 | Create reusable run commands that support chained commands like "pipe" ``|`` , "and" ``&&``, "or" ``||``, and "after" ``;``:
96 |
97 | .. code-block:: python
98 |
99 | # sh.cmd() returns a sh.Command object that can be used to execute a fixed command.
100 | ps_aux = sh.cmd("ps", "aux")
101 |
102 | # And has the option to pipe it's output into another command automatically.
103 | grep_ps = ps_aux.pipe("grep", "-i", check=False)
104 | print(grep_ps.shell_cmd)
105 | # ps aux | grep -i
106 |
107 | search_result_1 = grep_ps.run("search term 1")
108 | print(search_result_1.stdout)
109 |
110 | search_result_2 = grep_ps.run("search term 2")
111 | print(search_result_2.stdout)
112 |
113 | # Equivalent to: mkdir foo && echo 'success' || echo 'failure'
114 | sh.cmd("mkdir", "foo").and_("echo", "success").or_("echo", "failure").run()
115 |
116 |
117 | Perform file system operations:
118 |
119 | .. code-block:: python
120 |
121 | # Make directories and sub-directories. Behaves like "$ mkdir -p"
122 | sh.mkdir("a", "b", "c", "d/e/f/g")
123 |
124 | # Context-manager to change working directory temporarily. Behaves like "$ cd".
125 | with sh.cd("d/e/f/g"):
126 | sh.touch("1.txt", "2.txt", "3.txt")
127 |
128 | # Move files or directories. Works across file-systems. Behaves like "$ mv".
129 | sh.mv("1.txt", "11.txt")
130 |
131 | # Copy files or directories. Behaves like "$ cp -r"
132 | sh.cp("2.txt", "22.txt")
133 |
134 | # List top-level directory contents.
135 | # NOTE: sh.ls() and its siblings return iterables.
136 | list(sh.ls())
137 |
138 | # Limit to files.
139 | list(sh.lsfiles())
140 |
141 | # Limit to directories.
142 | list(sh.lsdirs())
143 |
144 | # Remove files.
145 | sh.rmfile("11.txt", "22.txt", "3.txt")
146 | # Or use sh.rm which handles both files and directories.
147 | sh.rm("11.txt", "22.txt", "3.txt")
148 |
149 | # Recursively walk current directory.
150 | # NOTE: sh.walk() and its siblings return iterables.
151 | list(sh.walk())
152 |
153 | # Or just a specified directory.
154 | list(sh.walk("d"))
155 |
156 | # Or just it's files or directories.
157 | list(sh.walkfiles())
158 | list(sh.walkdirs())
159 |
160 | # Remove directories.
161 | sh.rmdir("a", "b", "c", "d")
162 | # Or use sh.rm which handles both files and directories.
163 | sh.rm("a", "b", "c", "d")
164 |
165 |
166 | Perform file IO:
167 |
168 | .. code-block:: python
169 |
170 | sh.write("test.txt", "some text\n")
171 | sh.write("test.txt", " some more text\n", "a")
172 |
173 | sh.write("test.bin", b"some bytes")
174 | sh.write("test.bin", b" some more bytes", "ab")
175 |
176 | sh.writelines("output.txt", ["1", "2", "3"]) # -> "1\n2\n3\n"
177 | sh.writelines("output.txt", (str(i) for i in range(5))) # -> "0\n1\n2\n3\n4\n"
178 |
179 | # Write to a file atomically. See sh.atomicfile for more details.
180 | sh.write("test.txt", "content", atomic=True)
181 | sh.writelines("test.txt", ["content"], atomic=True)
182 |
183 | text = sh.read("test.txt") # -> "some text\nsome more text\n"
184 | data = sh.read("text.bin", "rb") # -> b"some bytes some more bytes"
185 |
186 | for line in sh.readlines("test.txt"):
187 | print(line)
188 |
189 | for chunk in sh.readchunks("test.txt", size=1024):
190 | print(chunk)
191 |
192 | sh.write("test.txt", "a|b|c|d")
193 | items = list(sh.readchunks("test.txt", sep="|"))
194 | print(items) # -> ["a", "b", "c", "d"]
195 |
196 | sh.write("test.txt", b"a|b|c|d", "wb")
197 | assert "".join(sh.readchunks("test.txt", "rb", sep=b"|")) == b"a|b|c|d"
198 |
199 |
200 | Backup files:
201 |
202 | .. code-block:: python
203 |
204 | # Create backup as copy of file.
205 | backup_file = sh.backup("a.txt")
206 | print(backup_file) # a.txt.2021-02-24T16:19:20.276491~
207 | sh.backup("a.txt", utc=True) # a.txt.2021-02-24T11:19:20.276491Z~
208 | sh.backup("a.txt", epoch=True) # a.txt.1614878783.56201
209 | sh.backup("a.txt", suffix=".bak") # a.txt.2021-02-24T16:19:20.276491.bak
210 | sh.backup("a.txt", suffix=".bak", timestamp=False) # a.txt.bak
211 | sh.backup("a.txt", prefix="BACKUP_", suffix="") # BACKUP_a.txt.2021-02-24T16:19:20.276491
212 |
213 | # Create backup as copy of directory.
214 | sh.backup("path/to/dir") # path/to/dir.2021-02-24T16:19:20.276491~
215 |
216 | # Create backup as archive of file or directory.
217 | sh.backup("b/c", ext=".tar.gz") # b/c.2021-02-24T16:19:20.276491.tar.gz
218 | sh.backup("b/c", ext=".tar.bz2") # b/c.2021-02-24T16:19:20.276491.tar.bz2
219 | sh.backup("b/c", ext=".tar.xz") # b/c.2021-02-24T16:19:20.276491.tar.xz
220 | sh.backup("b/c", ext=".zip") # b/c.2021-02-24T16:19:20.276491.zip
221 |
222 | from functools import partial
223 | import itertools
224 |
225 | counter = itertools.count(1)
226 | backup = partial(sh.backup, namer=lambda src: f"{src.name}-{next(counter)}~")
227 | backup("test.txt") # test.txt-1~
228 | backup("test.txt") # test.txt-2~
229 | backup("test.txt") # test.txt-3~
230 |
231 |
232 | Archive files:
233 |
234 | .. code-block:: python
235 |
236 | # Create tar, tar-gz, tar-bz2, tar-xz, or zip archives.
237 | sh.archive("archive.tar.gz", "/path/to/foo", "/path/to/bar")
238 |
239 | # Archive type is inferred from extension in filename but can be explicitly set.
240 | sh.archive("archive", "path", ext=".tbz")
241 |
242 | # Files can be filtered with ls, lsfiles, lsdirs, walk, walkfiles, and walkdirs functions.
243 | sh.archive(
244 | "archive.tgz",
245 | sh.walk("path", include="*.py"),
246 | sh.walk("other/path", exclude="*.log"),
247 | )
248 |
249 | # Archive paths can be customized with root and repath arguments.
250 | # root changes the base path for archive members.
251 | sh.archive("archive.txz", "/a/b/c/1", "/a/b/d/2", root="/a/b")
252 | # -> archive members will be "c/1/*" and "d/2/*"
253 | # -> without root, they would be "b/c/1/*" and "b/d/2/*"
254 |
255 | # repath renames paths.
256 | sh.archive("archive.zip", "/a/b/c", "/a/b/d", repath={"/a/b/c": "foo/bar"})
257 | # -> archive members: "foo/bar/*" and "b/d/*"
258 |
259 | # repath also works with ls* and walk* by matching on the base path.
260 | sh.archive(
261 | "log-dump.taz",
262 | sh.walk("path/to/logs", include="*.log*"),
263 | repath={"path/to/logs": "logs"},
264 | )
265 |
266 |
267 | Get list of archive contents:
268 |
269 | .. code-block:: python
270 |
271 | # Get list of archive contents as PurePath objects.
272 | listing = sh.lsarchive("archive.tgz")
273 |
274 | # Use an explicit extension when archive doesn't have one but is supported.
275 | listing = sh.lsarchive("archive", ext=".tgz")
276 |
277 |
278 | Unarchive tar and zip based archives:
279 |
280 | .. code-block:: python
281 |
282 | # Extract tar, tar-gz, tar-bz2, tar-xz, or zip archives to directory.
283 | sh.unarchive("archive.tgz", "out/dir")
284 |
285 | # Potentially unsafe archives will raise an exception if the extraction path falls outside
286 | # the destination, e.g., when the archive contains absolute paths.
287 | try:
288 | sh.unarchive("unsafe-archive.tz2", "out")
289 | except sh.ArchiveError:
290 | pass
291 |
292 | # But if an archive can be trusted...
293 | sh.unarchive("unsafe-archive.tz2", "out")
294 |
295 |
296 | Write to a new file atomically where content is written to a temporary file and then moved once finished:
297 |
298 | .. code-block:: python
299 |
300 | import os
301 |
302 | with sh.atomicfile("path/to/atomic.txt") as fp:
303 | # Writes are sent to a temporary file in the same directory as the destination.
304 | print(fp.name) # will be something like "path/to/.atomic.txt_XZKVqrlk.tmp"
305 | fp.write("some text")
306 | fp.write("some more text")
307 |
308 | # File doesn't exist yet.
309 | assert not os.path.exists("path/to/atomic.txt")
310 |
311 | # Exiting context manager will result in the temporary file being atomically moved to
312 | # destination. This will also result in a lower-level fsync on the destination file and
313 | # directory.
314 | assert os.path.exists("path/to/atomic.txt")
315 |
316 | # File mode, sync skipping, and overwrite flag can be specified to change the default
317 | # behavior which is...
318 | with sh.atomicfile("file.txt", "w", skip_sync=False, overwrite=True) as fp:
319 | pass
320 |
321 | # Additional parameters to open() can be passed as keyword arguments.
322 | with sh.atomicfile("file.txt", "w", **open_kwargs) as fp:
323 | pass
324 |
325 | # To writie to a file atomically without a context manager
326 | sh.write("file.txt", "content", atomic=True)
327 |
328 |
329 | Create a new directory atomically where its contents are written to a temporary directory and then moved once finished:
330 |
331 | .. code-block:: python
332 |
333 | with sh.atomicdir("path/to/atomic_dir") as atomic_dir:
334 | # Yielded path is temporary directory within the same parent directory as the destination.
335 | # path will be something like "path/to/.atomic_dir_QGLDfPwz_tmp"
336 | some_file = atomic_dir / "file.txt"
337 |
338 | # file written to "path/to/.atomic_dir_QGLDfPwz_tmp/file.txt"
339 | some_file.write_text("contents")
340 |
341 | some_dir = atomic_dir / "dir"
342 | some_dir.mkdir() # directory created at "path/to/.atomic_dir_QGLDfPwz_tmp/dir/"
343 |
344 | # Directory doesn't exist yet.
345 | assert not os.path.exists("path/to/atomic_dir")
346 |
347 | # Exiting context manager will atomically move the the temporary directory to the destination.
348 | assert os.path.exists("path/to/atomic_dir")
349 |
350 | # Sync skipping and overwrite flag can be specified to change the default behavior which is...
351 | with sh.atomicdir("atomic_dir", skip_sync=False, overwrite=True) as atomic_dir:
352 | pass
353 |
354 |
355 | Temporarily change environment variables:
356 |
357 | .. code-block:: python
358 |
359 | # Extend existing environment.
360 | with sh.environ({"KEY1": "val1", "KEY2": "val2"}) as new_environ:
361 | # Do something while environment changed.
362 | # Environment variables include all previous ones and {"KEY1": "val1", "KEY2": "val2"}.
363 | pass
364 |
365 | # Replace the entire environment with a new one.
366 | with sh.environ({"KEY": "val"}, replace=True):
367 | # Environment variables are replaced and are now just {"KEY": "val"}.
368 | pass
369 |
370 |
371 | For more details, please see the full documentation at https://shelmet.readthedocs.io.
372 |
373 |
374 |
375 | .. |version| image:: https://img.shields.io/pypi/v/shelmet.svg?style=flat-square
376 | :target: https://pypi.python.org/pypi/shelmet/
377 |
378 | .. |build| image:: https://img.shields.io/github/actions/workflow/status/dgilland/shelmet/main.yml?branch=master&style=flat-square
379 | :target: https://github.com/dgilland/shelmet/actions
380 |
381 | .. |coveralls| image:: https://img.shields.io/coveralls/dgilland/shelmet/master.svg?style=flat-square
382 | :target: https://coveralls.io/r/dgilland/shelmet
383 |
384 | .. |license| image:: https://img.shields.io/pypi/l/shelmet.svg?style=flat-square
385 | :target: https://pypi.python.org/pypi/shelmet/
386 |
--------------------------------------------------------------------------------
/src/shelmet/path.py:
--------------------------------------------------------------------------------
1 | """The path module contains utilities for working with OS paths."""
2 |
3 | from contextlib import contextmanager
4 | import fnmatch
5 | import os
6 | from pathlib import Path
7 | import typing as t
8 | from typing import Iterable
9 |
10 | from .types import LsFilter, LsFilterable, LsFilterFn, StrPath
11 |
12 |
13 | class Ls:
14 | """
15 | Directory listing iterable that iterates over its contents and returns them as ``Path`` objects.
16 |
17 | Args:
18 | path: Directory to list.
19 | recursive: Whether to recurse into subdirectories. Defaults to ``False``.
20 | only_files: Limit results to files only. Mutually exclusive with ``only_dirs``.
21 | only_dirs: Limit results to directories only. Mutually exclusive with ``only_files``.
22 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
23 | iterable containing any of those types. Path is included if any of the filters return
24 | ``True`` and path matches ``only_files`` or ``only_dirs`` (if set). If path is a
25 | directory and is not included, its contents are still eligible for inclusion if they
26 | match one of the include filters.
27 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
28 | iterable containing any of those types. Path is not yielded if any of the filters return
29 | ``True``. If the path is a directory and is excluded, then all of its contents will be
30 | excluded.
31 | """
32 |
33 | def __init__(
34 | self,
35 | path: StrPath = ".",
36 | *,
37 | recursive: bool = False,
38 | only_files: bool = False,
39 | only_dirs: bool = False,
40 | include: t.Optional[LsFilter] = None,
41 | exclude: t.Optional[LsFilter] = None,
42 | ):
43 | if only_files and only_dirs:
44 | raise ValueError("only_files and only_dirs cannot both be true")
45 |
46 | include_filters: t.List[t.Callable[[Path], bool]] = []
47 | exclude_filters: t.List[t.Callable[[Path], bool]] = []
48 |
49 | if include:
50 | if isinstance(include, Iterable) and not isinstance(include, (str, bytes)):
51 | includes = include
52 | else:
53 | includes = [include]
54 | # When creating the include filters, need to also take into account the only_* filter
55 | # settings so that an include filter will only match if both are true.
56 | include_filters.extend(
57 | _make_ls_filter(only_files=only_files, only_dirs=only_dirs, filterable=incl)
58 | for incl in includes
59 | )
60 | elif only_files or only_dirs:
61 | # If no include filters are given, but one of the only_* filters is, then we'll add it.
62 | # Otherwise, when include is given, the only_* filters are taken into account for each
63 | # include filter.
64 | include_filters.append(_make_ls_filter(only_files=only_files, only_dirs=only_dirs))
65 |
66 | if exclude:
67 | if isinstance(exclude, Iterable) and not isinstance(exclude, (str, bytes)):
68 | excludes = exclude
69 | else:
70 | excludes = [exclude]
71 | exclude_filters.extend(_make_ls_filter(filterable=excl) for excl in excludes)
72 |
73 | self.path = path
74 | self.recursive = recursive
75 | self.include_filters = include_filters
76 | self.exclude_filters = exclude_filters
77 |
78 | def __repr__(self) -> str:
79 | return f"{self.__class__.__name__}(path={self.path!r}, recursive={self.recursive})"
80 |
81 | def __str__(self) -> str:
82 | """Return file system representation of path."""
83 | return str(self.path)
84 |
85 | # Make class PathLike for os.fspath compatibility.
86 | __fspath__ = __str__
87 |
88 | def __iter__(self) -> t.Iterator[Path]:
89 | """Iterate over :attr:`path` and yield its contents."""
90 | yield from _ls(
91 | self.path,
92 | recursive=self.recursive,
93 | include_filters=self.include_filters,
94 | exclude_filters=self.exclude_filters,
95 | )
96 |
97 |
98 | def _ls(
99 | path: StrPath = ".",
100 | *,
101 | recursive: bool = False,
102 | include_filters: t.Optional[t.List[LsFilterFn]] = None,
103 | exclude_filters: t.Optional[t.List[LsFilterFn]] = None,
104 | ) -> t.Generator[Path, None, None]:
105 | scanner = os.scandir(Path(path))
106 | recurse_into: t.List[str] = []
107 |
108 | with scanner:
109 | while True:
110 | try:
111 | try:
112 | entry = next(scanner)
113 | except StopIteration:
114 | break
115 | except OSError: # pragma: no cover
116 | return
117 |
118 | entry_path = Path(entry.path)
119 | excluded = exclude_filters and any(
120 | is_excluded(entry_path) for is_excluded in exclude_filters
121 | )
122 |
123 | if not excluded and (
124 | not include_filters
125 | or any(is_included(entry_path) for is_included in include_filters)
126 | ):
127 | yield entry_path
128 |
129 | if recursive and not excluded and entry_path.is_dir() and not entry_path.is_symlink():
130 | recurse_into.append(entry.path)
131 |
132 | for subdir in recurse_into:
133 | yield from _ls(
134 | subdir,
135 | recursive=recursive,
136 | include_filters=include_filters,
137 | exclude_filters=exclude_filters,
138 | )
139 |
140 |
141 | def _make_ls_filter(
142 | only_files: bool = False, only_dirs: bool = False, filterable: t.Optional[LsFilterable] = None
143 | ) -> LsFilterFn:
144 | filter_fn: t.Optional[LsFilterFn] = None
145 | if filterable:
146 | filter_fn = _make_ls_filterable_fn(filterable)
147 |
148 | def _ls_filter(path: Path) -> bool:
149 | if only_files and path.is_dir():
150 | return False
151 | elif only_dirs and path.is_file():
152 | return False
153 | elif filter_fn:
154 | return filter_fn(path)
155 | else:
156 | return True
157 |
158 | return _ls_filter
159 |
160 |
161 | def _make_ls_filterable_fn(filterable: LsFilterable) -> LsFilterFn:
162 | _ls_filterable_fn: LsFilterFn
163 |
164 | if isinstance(filterable, str):
165 |
166 | def _ls_filterable_fn(path: Path) -> bool:
167 | return fnmatch.fnmatch(path, filterable) # type: ignore
168 |
169 | elif isinstance(filterable, t.Pattern):
170 |
171 | def _ls_filterable_fn(path: Path) -> bool:
172 | return bool(filterable.match(str(path))) # type: ignore
173 |
174 | elif callable(filterable):
175 |
176 | def _ls_filterable_fn(path: Path) -> bool:
177 | return filterable(path) # type: ignore
178 |
179 | else:
180 | raise TypeError(
181 | f"ls filter must be one of str, re.compile() or callable, not {type(filterable)!r}"
182 | )
183 |
184 | return _ls_filterable_fn
185 |
186 |
187 | @contextmanager
188 | def cd(path: StrPath) -> t.Iterator[None]:
189 | """
190 | Context manager that changes the working directory on enter and restores it on exit.
191 |
192 | Args:
193 | path: Directory to change to.
194 | """
195 | orig_cwd = os.getcwd()
196 |
197 | if path:
198 | os.chdir(path)
199 |
200 | try:
201 | yield
202 | finally:
203 | if path:
204 | os.chdir(orig_cwd)
205 |
206 |
207 | def cwd() -> Path:
208 | """Return current working directory as ``Path`` object."""
209 | return Path.cwd()
210 |
211 |
212 | def homedir():
213 | """Return current user's home directory as ``Path`` object."""
214 | return Path.home()
215 |
216 |
217 | def ls(
218 | path: StrPath = ".",
219 | *,
220 | recursive: bool = False,
221 | only_files: bool = False,
222 | only_dirs: bool = False,
223 | include: t.Optional[LsFilter] = None,
224 | exclude: t.Optional[LsFilter] = None,
225 | ) -> Ls:
226 | """
227 | Return iterable that lists directory contents as ``Path`` objects.
228 |
229 | Args:
230 | path: Directory to list.
231 | recursive: Whether to recurse into subdirectories. Defaults to ``False``.
232 | only_files: Limit results to files only. Mutually exclusive with ``only_dirs``.
233 | only_dirs: Limit results to directories only. Mutually exclusive with ``only_files``.
234 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
235 | iterable containing any of those types. Path is included if any of the filters return
236 | ``True`` and path matches ``only_files`` or ``only_dirs`` (if set). If path is a
237 | directory and is not included, its contents are still eligible for inclusion if they
238 | match one of the include filters.
239 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
240 | iterable containing any of those types. Path is not yielded if any of the filters return
241 | ``True``. If the path is a directory and is excluded, then all of its contents will be
242 | excluded.
243 | """
244 | return Ls(
245 | path,
246 | recursive=recursive,
247 | only_files=only_files,
248 | only_dirs=only_dirs,
249 | include=include,
250 | exclude=exclude,
251 | )
252 |
253 |
254 | def lsfiles(
255 | path: StrPath = ".",
256 | *,
257 | include: t.Optional[LsFilter] = None,
258 | exclude: t.Optional[LsFilter] = None,
259 | ) -> Ls:
260 | """
261 | Return iterable that only lists files in directory as ``Path`` objects.
262 |
263 | See Also:
264 | This function is not recursive and will only yield the top-level contents of a directory.
265 | Use :func:`.walkfiles` to recursively yield all files from a directory.
266 |
267 | Args:
268 | path: Directory to list.
269 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
270 | iterable containing any of those types. Path is included if any of the filters return
271 | ``True``. If path is a directory and is not included, its contents are still eligible
272 | for inclusion if they match one of the include filters.
273 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
274 | iterable containing any of those types. Path is not yielded if any of the filters return
275 | ``True``. If the path is a directory and is excluded, then all of its contents will be
276 | excluded.
277 | """
278 | return ls(path, only_files=True, include=include, exclude=exclude)
279 |
280 |
281 | def lsdirs(
282 | path: StrPath = ".",
283 | *,
284 | include: t.Optional[LsFilter] = None,
285 | exclude: t.Optional[LsFilter] = None,
286 | ) -> Ls:
287 | """
288 | Return iterable that only lists directories in directory as ``Path`` objects.
289 |
290 | See Also:
291 | This function is not recursive and will only yield the top-level contents of a directory.
292 | Use :func:`.walkdirs` to recursively yield all directories from a directory.
293 |
294 | Args:
295 | path: Directory to list.
296 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
297 | iterable containing any of those types. Path is included if any of the filters return
298 | ``True``. If path is a directory and is not included, its contents are still eligible
299 | for inclusion if they match one of the include filters.
300 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
301 | iterable containing any of those types. Path is not yielded if any of the filters return
302 | ``True``. If the path is a directory and is excluded, then all of its contents will be
303 | excluded.
304 | """
305 | return ls(path, only_dirs=True, include=include, exclude=exclude)
306 |
307 |
308 | def reljoin(*paths: StrPath) -> str:
309 | """
310 | Like ``os.path.join`` except that all paths are treated as relative to the previous one so that
311 | an absolute path in the middle will extend the existing path instead of becoming the new root
312 | path.
313 |
314 | Args:
315 | *paths: Paths to join together.
316 | """
317 | path = os.sep.join(str(Path(path)) for path in paths)
318 | return os.path.normpath(path)
319 |
320 |
321 | def walk(
322 | path: StrPath = ".",
323 | *,
324 | only_files: bool = False,
325 | only_dirs: bool = False,
326 | include: t.Optional[LsFilter] = None,
327 | exclude: t.Optional[LsFilter] = None,
328 | ) -> Ls:
329 | """
330 | Return iterable that recursively lists all directory contents as ``Path`` objects.
331 |
332 | See Also:
333 | This function is recursive and will list all contents of a directory. Use :func:`.ls` to
334 | list only the top-level contents of a directory.
335 |
336 | Args:
337 | path: Directory to walk.
338 | only_files: Limit results to files only. Mutually exclusive with ``only_dirs``.
339 | only_dirs: Limit results to directories only. Mutually exclusive with ``only_files``.
340 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
341 | iterable containing any of those types. Path is included if any of the filters return
342 | ``True`` and path matches ``only_files`` or ``only_dirs`` (if set). If path is a
343 | directory and is not included, its contents are still eligible for inclusion if they
344 | match one of the include filters.
345 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
346 | iterable containing any of those types. Path is not yielded if any of the filters return
347 | ``True``. If the path is a directory and is excluded, then all of its contents will be
348 | excluded.
349 | """
350 | return ls(
351 | path,
352 | recursive=True,
353 | only_files=only_files,
354 | only_dirs=only_dirs,
355 | include=include,
356 | exclude=exclude,
357 | )
358 |
359 |
360 | def walkfiles(
361 | path: StrPath = ".",
362 | *,
363 | include: t.Optional[LsFilter] = None,
364 | exclude: t.Optional[LsFilter] = None,
365 | ) -> Ls:
366 | """
367 | Return iterable that recursively lists only files in directory as ``Path`` objects.
368 |
369 | See Also:
370 | This function is recursive and will list all files in a directory. Use :func:`.lsfiles` to
371 | list only the top-level files in a directory.
372 |
373 | Args:
374 | path: Directory to walk.
375 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
376 | iterable containing any of those types. Path is included if any of the filters return
377 | ``True``. If path is a directory and is not included, its contents are still eligible
378 | for inclusion if they match one of the include filters.
379 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
380 | iterable containing any of those types. Path is not yielded if any of the filters return
381 | ``True``. If the path is a directory and is excluded, then all of its contents will be
382 | excluded.
383 | """
384 | return walk(path, only_files=True, include=include, exclude=exclude)
385 |
386 |
387 | def walkdirs(
388 | path: StrPath = ".",
389 | *,
390 | include: t.Optional[LsFilter] = None,
391 | exclude: t.Optional[LsFilter] = None,
392 | ) -> Ls:
393 | """
394 | Return iterable that recursively lists only directories in directory as ``Path`` objects.
395 |
396 | See Also:
397 | This function is recursive and will list all directories in a directory. Use
398 | :func:`.lsfiles` to list only the top-level directories in a directory.
399 |
400 | Args:
401 | path: Directory to walk.
402 | include: Include paths by filtering on a glob-pattern string, compiled regex, callable, or
403 | iterable containing any of those types. Path is included if any of the filters return
404 | ``True``. If path is a directory and is not included, its contents are still eligible
405 | for inclusion if they match one of the include filters.
406 | exclude: Exclude paths by filtering on a glob-pattern string, compiled regex, callable, or
407 | iterable containing any of those types. Path is not yielded if any of the filters return
408 | ``True``. If the path is a directory and is excluded, then all of its contents will be
409 | excluded.
410 | """
411 | return walk(path, only_dirs=True, include=include, exclude=exclude)
412 |
--------------------------------------------------------------------------------
/tests/test_command.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import subprocess
4 | import typing as t
5 | from unittest import mock
6 |
7 | import pytest
8 | from pytest import param
9 |
10 | import shelmet as sh
11 |
12 |
13 | parametrize = pytest.mark.parametrize
14 |
15 |
16 | @pytest.fixture()
17 | def mock_subprocess_run() -> t.Generator[mock.MagicMock, None, None]:
18 | with mock.patch("subprocess.run") as _mock_subprocess_run:
19 | yield _mock_subprocess_run
20 |
21 |
22 | def run_call_args(
23 | args,
24 | stdin=None,
25 | input=None,
26 | stdout=subprocess.PIPE,
27 | stderr=subprocess.PIPE,
28 | cwd=None,
29 | timeout=None,
30 | check=True,
31 | encoding=None,
32 | errors=None,
33 | text=True,
34 | env=None,
35 | **popen_kwargs,
36 | ):
37 | popen_kwargs.pop("capture_output", None)
38 | popen_kwargs.pop("combine_output", None)
39 | popen_kwargs.pop("replace_env", None)
40 | return mock.call(
41 | args,
42 | stdin=stdin,
43 | input=input,
44 | stdout=stdout,
45 | stderr=stderr,
46 | cwd=cwd,
47 | timeout=timeout,
48 | check=check,
49 | encoding=encoding,
50 | errors=errors,
51 | universal_newlines=text,
52 | env=env,
53 | **popen_kwargs,
54 | )
55 |
56 |
57 | def test_command__returns_command_object_with_defaults():
58 | args = ["ls", "-la"]
59 | cmd = sh.cmd(*args)
60 |
61 | assert cmd.args == args
62 | assert cmd.stdin is None
63 | assert cmd.input is None
64 | assert cmd.stdout == subprocess.PIPE
65 | assert cmd.stderr == subprocess.PIPE
66 | assert cmd.capture_output is True
67 | assert cmd.cwd is None
68 | assert cmd.timeout is None
69 | assert cmd.check is True
70 | assert cmd.encoding is None
71 | assert cmd.errors is None
72 | assert cmd.text is True
73 | assert cmd.env is None
74 | assert cmd.replace_env is False
75 | assert cmd.popen_kwargs == {}
76 | assert cmd.parent is None
77 | assert cmd.parents == []
78 | assert cmd.shell_cmd == "ls -la"
79 |
80 |
81 | @parametrize(
82 | "args, exception, match",
83 | [
84 | param([], TypeError, "Command(): requires at least one non-empty positional argument"),
85 | param(
86 | [None, None],
87 | TypeError,
88 | "Command(): requires at least one non-empty positional argument",
89 | ),
90 | param(
91 | [5],
92 | TypeError,
93 | "Command(): requires all positional arguments to be either string or bytes",
94 | ),
95 | ],
96 | )
97 | def test_command__raises_on_bad_args(args: list, exception: t.Type[Exception], match: str):
98 | with pytest.raises(exception, match=re.escape(match)):
99 | sh.cmd(*args)
100 |
101 |
102 | @parametrize(
103 | "cmd, expected_repr",
104 | [
105 | param(sh.cmd("ls", "-la"), "Command(args=['ls', '-la'])"),
106 | param(
107 | sh.cmd("ls", "-la").pipe("grep", "foo"),
108 | "Command(args=['grep', 'foo'], parents=[PipeCommand(args=['ls', '-la'])])",
109 | ),
110 | param(
111 | sh.cmd("cmd1").pipe("cmd2").pipe("cmd3"),
112 | (
113 | "Command(args=['cmd3'],"
114 | " parents=[PipeCommand(args=['cmd1']), PipeCommand(args=['cmd2'])])"
115 | ),
116 | ),
117 | ],
118 | )
119 | def test_command_repr(cmd, expected_repr):
120 | assert repr(cmd) == expected_repr
121 |
122 |
123 | @parametrize(
124 | "args, kwargs, expected_call",
125 | [
126 | param(["ls"], {}, run_call_args(["ls"]), id="single_arg"),
127 | param(["ls", "-l", "-a"], {}, run_call_args(["ls", "-l", "-a"]), id="multiple_args"),
128 | param(
129 | [["ls", "-l", "-a"]], {}, run_call_args(["ls", "-l", "-a"]), id="single_list_of_args"
130 | ),
131 | param(
132 | [["ls"], ["-l", "-a"]],
133 | {},
134 | run_call_args(["ls", "-l", "-a"]),
135 | id="multiple_lists_of_args",
136 | ),
137 | param(
138 | ["ls", None, "-l", None, "-a"],
139 | {},
140 | run_call_args(["ls", "-l", "-a"]),
141 | id="none_valued_args_discarded",
142 | ),
143 | param(
144 | ["ls"],
145 | {"capture_output": False},
146 | run_call_args(["ls"], stdout=None, stderr=None),
147 | id="no_capture_output",
148 | ),
149 | param(["ls"], {"stdout": None}, run_call_args(["ls"], stdout=None), id="no_capture_stdout"),
150 | param(["ls"], {"stderr": None}, run_call_args(["ls"], stderr=None), id="no_capture_stderr"),
151 | param(
152 | ["ls"],
153 | {"combine_output": True},
154 | run_call_args(["ls"], stderr=subprocess.STDOUT),
155 | id="combine_output",
156 | ),
157 | param(["ls"], {"text": False}, run_call_args(["ls"], text=False), id="no_text"),
158 | param(["ls"], {"input": "test"}, run_call_args(["ls"], input="test"), id="input_as_str"),
159 | param(
160 | ["ls"],
161 | {"input": b"test", "text": False},
162 | run_call_args(["ls"], input=b"test", text=False),
163 | id="input_as_bytes",
164 | ),
165 | param(
166 | ["ls"],
167 | {"input": b"test"},
168 | run_call_args(["ls"], input="test"),
169 | id="coerce_input_to_str",
170 | ),
171 | param(
172 | ["ls"],
173 | {"input": "test", "text": False},
174 | run_call_args(["ls"], input=b"test", text=False),
175 | id="coerce_input_to_bytes",
176 | ),
177 | param(
178 | ["ls"],
179 | {"stdin": subprocess.DEVNULL},
180 | run_call_args(["ls"], stdin=subprocess.DEVNULL),
181 | id="set_stdin",
182 | ),
183 | param(["ls"], {"cwd": "."}, run_call_args(["ls"], cwd="."), id="set_cwd"),
184 | param(["ls"], {"timeout": 10}, run_call_args(["ls"], timeout=10), id="set_timeout"),
185 | param(
186 | ["ls"],
187 | {"encoding": "utf-8"},
188 | run_call_args(["ls"], encoding="utf-8"),
189 | id="set_encoding",
190 | ),
191 | param(
192 | ["ls"],
193 | {"errors": "strict"},
194 | run_call_args(["ls"], errors="strict"),
195 | id="set_errors",
196 | ),
197 | ],
198 | )
199 | def test_command_run__passes_arguments_to_subprocess_run(
200 | mock_subprocess_run: mock.MagicMock, args: list, kwargs: dict, expected_call: tuple
201 | ):
202 | sh.run(*args, **kwargs)
203 | assert mock_subprocess_run.call_args == expected_call
204 |
205 |
206 | def test_command_run__extends_env(mock_subprocess_run: mock.MagicMock):
207 | env = {"a": "1", "b": "2"}
208 | expected_call = run_call_args(["ls"], env={**os.environ, **env})
209 | sh.run("ls", env=env)
210 | assert mock_subprocess_run.call_args == expected_call
211 |
212 |
213 | def test_command_run__replaces_env(mock_subprocess_run: mock.MagicMock):
214 | env = {"a": "1", "b": "2"}
215 | expected_call = run_call_args(["ls"], env=env)
216 | sh.run("ls", env=env, replace_env=True)
217 | assert mock_subprocess_run.call_args == expected_call
218 |
219 |
220 | @parametrize(
221 | "cmd, extra_args, overrides, expected_call",
222 | [
223 | param(sh.cmd("ls"), ["-l", "-a"], {}, run_call_args(["ls", "-l", "-a"])),
224 | param(
225 | sh.cmd("ls"),
226 | [],
227 | {"env": {"A": "B"}, "replace_env": True},
228 | run_call_args(["ls"], env={"A": "B"}),
229 | ),
230 | param(sh.cmd("ls"), [], {"cwd": "/tmp"}, run_call_args(["ls"], cwd="/tmp")),
231 | param(
232 | sh.cmd("ls"),
233 | [],
234 | {"stdin": subprocess.PIPE, "stdout": None, "stderr": None},
235 | run_call_args(["ls"], stdin=subprocess.PIPE, stdout=None, stderr=None),
236 | ),
237 | param(
238 | sh.cmd("ls"),
239 | [],
240 | {"capture_output": False},
241 | run_call_args(["ls"], stdout=None, stderr=None),
242 | ),
243 | param(
244 | sh.cmd("ls"),
245 | [],
246 | {"input": "test", "timeout": 10},
247 | run_call_args(["ls"], input="test", timeout=10),
248 | ),
249 | param(
250 | sh.cmd("ls"),
251 | [],
252 | {"encoding": "utf-8", "errors": "ignore", "text": False},
253 | run_call_args(["ls"], encoding="utf-8", errors="ignore", text=False),
254 | ),
255 | ],
256 | )
257 | def test_command_run__overrides_defaults(
258 | mock_subprocess_run: mock.MagicMock,
259 | cmd: sh.Command,
260 | extra_args: list,
261 | overrides: dict,
262 | expected_call: tuple,
263 | ):
264 | cmd.run(*extra_args, **overrides)
265 | assert mock_subprocess_run.called
266 | assert mock_subprocess_run.call_args == expected_call
267 |
268 |
269 | @parametrize(
270 | "cmd, mock_side_effect, expected_call_args_list",
271 | [
272 | param(
273 | sh.cmd("cmd1").pipe("cmd2").pipe("cmd3"),
274 | [
275 | subprocess.CompletedProcess(args=["cmd1"], returncode=0),
276 | subprocess.CompletedProcess(args=["cmd2"], returncode=0),
277 | subprocess.CompletedProcess(args=["cmd3"], returncode=0),
278 | ],
279 | [["cmd1"], ["cmd2"], ["cmd3"]],
280 | id="pipe_run_3",
281 | ),
282 | param(
283 | sh.cmd("cmd1").pipe("cmd2").pipe("cmd3"),
284 | [
285 | subprocess.CalledProcessError(cmd=["cmd1"], returncode=1),
286 | subprocess.CalledProcessError(cmd=["cmd2"], returncode=1),
287 | subprocess.CompletedProcess(args=["cmd3"], returncode=0),
288 | ],
289 | [["cmd1"], ["cmd2"], ["cmd3"]],
290 | id="pipe_run_3",
291 | ),
292 | param(
293 | sh.cmd("cmd1").and_("cmd2").and_("cmd3"),
294 | [subprocess.CompletedProcess(args=["cmd1"], returncode=1)],
295 | [["cmd1"]],
296 | id="and_run_1",
297 | ),
298 | param(
299 | sh.cmd("cmd1").and_("cmd2").and_("cmd3"),
300 | [
301 | subprocess.CompletedProcess(args=["cmd1"], returncode=0),
302 | subprocess.CompletedProcess(args=["cmd2"], returncode=1),
303 | ],
304 | [["cmd1"], ["cmd2"]],
305 | id="and_run_2",
306 | ),
307 | param(
308 | sh.cmd("cmd1").and_("cmd2").and_("cmd3"),
309 | [
310 | subprocess.CompletedProcess(args=["cmd1"], returncode=0),
311 | subprocess.CompletedProcess(args=["cmd2"], returncode=0),
312 | subprocess.CompletedProcess(args=["cmd3"], returncode=0),
313 | ],
314 | [["cmd1"], ["cmd2"], ["cmd3"]],
315 | id="and_run_3",
316 | ),
317 | param(
318 | sh.cmd("cmd1").or_("cmd2").or_("cmd3"),
319 | [subprocess.CompletedProcess(args=["cmd1"], returncode=0)],
320 | [["cmd1"]],
321 | id="or_run_1",
322 | ),
323 | param(
324 | sh.cmd("cmd1").or_("cmd2").or_("cmd3"),
325 | [
326 | subprocess.CompletedProcess(args=["cmd1"], returncode=1),
327 | subprocess.CompletedProcess(args=["cmd2"], returncode=0),
328 | ],
329 | [["cmd1"], ["cmd2"]],
330 | id="or_run_2",
331 | ),
332 | param(
333 | sh.cmd("cmd1").or_("cmd2").or_("cmd3"),
334 | [
335 | subprocess.CompletedProcess(args=["cmd1"], returncode=1),
336 | subprocess.CompletedProcess(args=["cmd2"], returncode=1),
337 | subprocess.CompletedProcess(args=["cmd3"], returncode=0),
338 | ],
339 | [["cmd1"], ["cmd2"], ["cmd3"]],
340 | id="or_run_3",
341 | ),
342 | param(
343 | sh.cmd("cmd1").or_("cmd2").or_("cmd3"),
344 | [
345 | subprocess.CalledProcessError(cmd=["cmd1"], returncode=1),
346 | subprocess.CalledProcessError(cmd=["cmd2"], returncode=1),
347 | subprocess.CompletedProcess(args=["cmd3"], returncode=1),
348 | ],
349 | [["cmd1"], ["cmd2"], ["cmd3"]],
350 | id="or_run_3_errors",
351 | ),
352 | param(
353 | sh.cmd("cmd1").after("cmd2").after("cmd3"),
354 | [
355 | subprocess.CompletedProcess(args=["cmd1"], returncode=1),
356 | subprocess.CompletedProcess(args=["cmd2"], returncode=1),
357 | subprocess.CompletedProcess(args=["cmd3"], returncode=1),
358 | ],
359 | [["cmd1"], ["cmd2"], ["cmd3"]],
360 | id="after_run_3_failures",
361 | ),
362 | param(
363 | sh.cmd("cmd1").after("cmd2").after("cmd3"),
364 | [
365 | subprocess.CalledProcessError(cmd=["cmd1"], returncode=1),
366 | subprocess.CalledProcessError(cmd=["cmd2"], returncode=1),
367 | subprocess.CompletedProcess(args=["cmd3"], returncode=1),
368 | ],
369 | [["cmd1"], ["cmd2"], ["cmd3"]],
370 | id="after_run_3_errors",
371 | ),
372 | param(
373 | sh.cmd("cmd1").after("cmd2").after("cmd3"),
374 | [
375 | subprocess.CompletedProcess(args=["cmd1"], returncode=0),
376 | subprocess.CompletedProcess(args=["cmd2"], returncode=0),
377 | subprocess.CompletedProcess(args=["cmd3"], returncode=0),
378 | ],
379 | [["cmd1"], ["cmd2"], ["cmd3"]],
380 | id="after_run_3_successes",
381 | ),
382 | ],
383 | )
384 | def test_command_run__calls_parent_command_run(
385 | mock_subprocess_run: mock.MagicMock,
386 | cmd: sh.Command,
387 | mock_side_effect: list,
388 | expected_call_args_list: list,
389 | ):
390 | if mock_side_effect:
391 | mock_subprocess_run.side_effect = mock_side_effect
392 |
393 | result = cmd.run()
394 | assert len(mock_subprocess_run.call_args_list) == len(expected_call_args_list)
395 | assert result.args == expected_call_args_list[-1]
396 |
397 | for i, call_args in enumerate(expected_call_args_list):
398 | call_cmd = mock_subprocess_run.call_args_list[i]
399 | assert call_args == call_cmd[0][0]
400 |
401 |
402 | def test_command_run__pipes_parent_stdout_to_child(mock_subprocess_run: mock.MagicMock):
403 | cmd1_stdout = "cmd1_stdout"
404 | mock_subprocess_run().stdout = cmd1_stdout
405 |
406 | cmd = sh.cmd("cmd1").pipe("cmd2")
407 | cmd.run()
408 |
409 | call_cmd2 = mock_subprocess_run.call_args_list[-1]
410 | assert call_cmd2[0][0] == ["cmd2"]
411 | assert call_cmd2[1]["input"] == cmd1_stdout
412 |
413 |
414 | def test_command_pipe__sets_parent():
415 | cmd1 = sh.cmd("cmd1")
416 | cmd2 = cmd1.pipe("cmd2")
417 | cmd3 = cmd2.pipe("cmd3")
418 | cmd4 = cmd3.pipe("cmd4")
419 |
420 | assert cmd2.parent.command is cmd1
421 | assert cmd3.parent.command is cmd2
422 | assert cmd4.parent.command is cmd3
423 |
424 |
425 | def test_command_pipe__returns_child_command():
426 | parent_cmd = sh.cmd("parent")
427 | child_kwargs = {
428 | "stdin": None,
429 | "input": b"test",
430 | "stdout": None,
431 | "stderr": None,
432 | "capture_output": False,
433 | "cwd": "/",
434 | "timeout": 10,
435 | "check": False,
436 | "encoding": "utf-8",
437 | "errors": "ignore",
438 | "text": False,
439 | "env": {"A": "B"},
440 | "replace_env": True,
441 | }
442 | child_popen_kwargs = {"umask": 1}
443 | child = parent_cmd.pipe("child", "cmd", **child_kwargs, **child_popen_kwargs)
444 |
445 | assert child.args == ["child", "cmd"]
446 | for attr, value in child_kwargs.items():
447 | assert getattr(child, attr) == value
448 | assert child.popen_kwargs == child_popen_kwargs
449 | assert child.parent.command is parent_cmd
450 |
451 |
452 | @parametrize(
453 | "cmd, expected_shell_cmd",
454 | [
455 | param(sh.cmd("ls"), "ls"),
456 | param(sh.cmd("ps").pipe("grep", "foo bar"), "ps | grep 'foo bar'"),
457 | param(
458 | sh.cmd("ps").pipe("grep", "foo bar").pipe("grep", "test"),
459 | "ps | grep 'foo bar' | grep test",
460 | ),
461 | param(sh.cmd("cmd1").and_("cmd2", "a b").and_("cmd3"), "cmd1 && cmd2 'a b' && cmd3"),
462 | param(sh.cmd("cmd1").or_("cmd2", "a b").or_("cmd3"), "cmd1 || cmd2 'a b' || cmd3"),
463 | param(sh.cmd("cmd1").after("cmd2", "a b").after("cmd3"), "cmd1 ; cmd2 'a b' ; cmd3"),
464 | param(
465 | sh.cmd("cmd1").pipe("cmd2", "a b").and_("cmd3").or_("cmd4").after("cmd5"),
466 | "cmd1 | cmd2 'a b' && cmd3 || cmd4 ; cmd5",
467 | ),
468 | ],
469 | )
470 | def test_command_shell_cmd__returns_full_chained_command(cmd: sh.Command, expected_shell_cmd: str):
471 | assert cmd.shell_cmd == expected_shell_cmd
472 |
--------------------------------------------------------------------------------
/tests/test_archive.py:
--------------------------------------------------------------------------------
1 | from contextlib import ExitStack
2 | from pathlib import Path
3 | import typing as t
4 | from unittest import mock
5 |
6 | import pytest
7 | from pytest import param
8 |
9 | import shelmet as sh
10 | from shelmet.archiving import ArchiveSource
11 |
12 | from .utils import (
13 | ARCHIVE_EXTENSIONS,
14 | Dir,
15 | File,
16 | create_archive_source,
17 | extract_archive,
18 | is_same_dir,
19 | )
20 |
21 |
22 | parametrize = pytest.mark.parametrize
23 |
24 |
25 | def _test_archive(
26 | tmp_path: Path,
27 | archive_file: Path,
28 | *sources: t.Union[Dir, File],
29 | iteratee: t.Callable[[Path], t.Union[str, Path, sh.Ls]] = lambda p: p,
30 | ext: str = "",
31 | skip_extraction: bool = False,
32 | ):
33 | src_dir = create_archive_source(tmp_path, *sources)
34 | sh.archive(archive_file, *(iteratee(source.path) for source in src_dir.items), ext=ext)
35 | assert archive_file.is_file()
36 |
37 | if skip_extraction:
38 | return
39 |
40 | dst_path = tmp_path / "dst"
41 | if len(src_dir.items) > 1:
42 | extracted_src_path = dst_path / src_dir.path.name
43 | else:
44 | extracted_src_path = dst_path
45 |
46 | extract_archive(archive_file, dst_path, ext=ext)
47 |
48 | assert dst_path.is_dir()
49 | assert extracted_src_path.is_dir()
50 | assert is_same_dir(src_dir.path, extracted_src_path)
51 |
52 |
53 | @pytest.fixture(params=ARCHIVE_EXTENSIONS)
54 | def arc_ext(request) -> str:
55 | """Fixture that yields all archive extensions."""
56 | return request.param
57 |
58 |
59 | @pytest.fixture(params=[".tar", ".zip"])
60 | def rep_ext(request) -> str:
61 | """Fixture that yields a representative sample of archive extensions."""
62 | return request.param
63 |
64 |
65 | @parametrize(
66 | "sources",
67 | [
68 | param([File("1.txt", text="1")]),
69 | param([Dir("a", Dir("b"), File("1.txt", text="1"), File("2.txt", text="2"))]),
70 | param(
71 | [
72 | Dir(
73 | "root",
74 | Dir(
75 | "a",
76 | Dir(
77 | "aa",
78 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
79 | File("aa1.txt", text="aa1"),
80 | ),
81 | File("a1.txt", text="a1"),
82 | File("a2.txt", text="a2"),
83 | ),
84 | Dir("b"),
85 | Dir("c"),
86 | Dir("d"),
87 | File("1.txt", text="1"),
88 | File("2.txt", text="2"),
89 | File("3.txt", text="3"),
90 | )
91 | ]
92 | ),
93 | param(
94 | [
95 | Dir(
96 | "a",
97 | Dir(
98 | "aa",
99 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
100 | File("aa1.txt", text="aa1"),
101 | ),
102 | File("a1.txt", text="a1"),
103 | File("a2.txt", text="a2"),
104 | ),
105 | Dir("b"),
106 | Dir("c"),
107 | Dir("d"),
108 | File("1.txt", text="1"),
109 | File("2.txt", text="2"),
110 | File("3.txt", text="3"),
111 | ]
112 | ),
113 | ],
114 | )
115 | def test_archive__archives_path_sources(
116 | tmp_path: Path, arc_ext: str, sources: t.List[t.Union[Dir, File]]
117 | ):
118 | archive_file = tmp_path / f"archive{arc_ext}"
119 | _test_archive(tmp_path, archive_file, *sources)
120 |
121 |
122 | @parametrize(
123 | "sources",
124 | [
125 | param(
126 | [
127 | Dir(
128 | "root",
129 | Dir(
130 | "a",
131 | Dir(
132 | "aa",
133 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
134 | File("aa1.txt", text="aa1"),
135 | ),
136 | File("a1.txt", text="a1"),
137 | File("a2.txt", text="a2"),
138 | ),
139 | Dir("b"),
140 | Dir("c"),
141 | Dir("d"),
142 | File("1.txt", text="1"),
143 | File("2.txt", text="2"),
144 | File("3.txt", text="3"),
145 | ),
146 | ]
147 | ),
148 | param(
149 | [
150 | Dir(
151 | "a",
152 | Dir(
153 | "aa",
154 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
155 | File("aa1.txt", text="aa1"),
156 | ),
157 | File("a1.txt", text="a1"),
158 | File("a2.txt", text="a2"),
159 | ),
160 | Dir("b"),
161 | Dir("c"),
162 | Dir("d"),
163 | ]
164 | ),
165 | ],
166 | )
167 | def test_archive__archives_ls_sources(tmp_path: Path, arc_ext: str, sources: t.List[Dir]):
168 | archive_file = tmp_path / f"archive{arc_ext}"
169 | _test_archive(tmp_path, archive_file, *sources, iteratee=sh.walk)
170 |
171 |
172 | @parametrize(
173 | "sources, ls_func, expected_listing",
174 | [
175 | param(
176 | [
177 | Dir(
178 | "root",
179 | Dir(
180 | "a",
181 | Dir(
182 | "aa",
183 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
184 | File("aa1.txt", text="aa1"),
185 | ),
186 | File("a1.txt", text="a1"),
187 | File("a2.txt", text="a2"),
188 | ),
189 | Dir("b"),
190 | Dir("c"),
191 | Dir("d"),
192 | File("1.txt", text="1"),
193 | File("2.txt", text="2"),
194 | File("3.txt", text="3"),
195 | ),
196 | ],
197 | sh.ls,
198 | {
199 | Path("root"),
200 | Path("root/a"),
201 | Path("root/b"),
202 | Path("root/c"),
203 | Path("root/d"),
204 | Path("root/1.txt"),
205 | Path("root/2.txt"),
206 | Path("root/3.txt"),
207 | },
208 | ),
209 | param(
210 | [
211 | Dir(
212 | "root",
213 | Dir(
214 | "a",
215 | Dir(
216 | "aa",
217 | Dir("aaa", File("aaa1.txt", text="aaa1"), Dir("aaaa")),
218 | File("aa1.txt", text="aa1"),
219 | ),
220 | File("a1.txt", text="a1"),
221 | File("a2.txt", text="a2"),
222 | ),
223 | Dir("b"),
224 | Dir("c"),
225 | Dir("d"),
226 | File("1.txt", text="1"),
227 | File("2.txt", text="2"),
228 | File("3.txt", text="3"),
229 | ),
230 | ],
231 | sh.walkfiles,
232 | {
233 | Path("root"),
234 | Path("root/1.txt"),
235 | Path("root/2.txt"),
236 | Path("root/3.txt"),
237 | Path("root/a/a1.txt"),
238 | Path("root/a/a2.txt"),
239 | Path("root/a/aa/aa1.txt"),
240 | Path("root/a/aa/aaa/aaa1.txt"),
241 | },
242 | ),
243 | ],
244 | )
245 | def test_archive__archives_filtered_ls_sources(
246 | tmp_path: Path,
247 | arc_ext: str,
248 | sources: t.List[Dir],
249 | ls_func: t.Callable[[Path], sh.Ls],
250 | expected_listing: t.Set[Path],
251 | ):
252 | archive_file = tmp_path / f"archive{arc_ext}"
253 | _test_archive(tmp_path, archive_file, *sources, iteratee=ls_func, skip_extraction=True)
254 |
255 | listing = set(sh.lsarchive(archive_file))
256 | assert listing == expected_listing
257 |
258 |
259 | def test_archive__allows_extra_leading_file_extension_suffixes(tmp_path: Path, arc_ext: str):
260 | source = Dir("a", Dir("b"), File("1.txt", text="1"), File("2.txt", text="2"))
261 | archive_file = tmp_path / f"archive.foo.bar.baz{arc_ext}"
262 | _test_archive(tmp_path, archive_file, source)
263 |
264 |
265 | def test_archive__archives_with_explicit_extension_format(tmp_path: Path, arc_ext: str):
266 | source = Dir("a", Dir("b"), File("1.txt", text="1"), File("2.txt", text="2"))
267 | archive_file = tmp_path / "archive"
268 | _test_archive(tmp_path, archive_file, source, ext=arc_ext)
269 |
270 |
271 | @parametrize(
272 | "source, root, expected_listing",
273 | [
274 | param(
275 | Dir("a", File("1.txt", text="1"), File("2.txt", text="2"), File("3.txt", text="3")),
276 | Path("a"),
277 | {Path("1.txt"), Path("2.txt"), Path("3.txt")},
278 | )
279 | ],
280 | )
281 | def test_archive__uses_custom_root_path_inside_archive(
282 | tmp_path: Path,
283 | rep_ext: str,
284 | source: t.Union[File, Dir],
285 | root: Path,
286 | expected_listing: t.Set[Path],
287 | ):
288 | src_dir = create_archive_source(tmp_path, source)
289 | root = src_dir.path / root
290 |
291 | archive_file = tmp_path / f"archive{rep_ext}"
292 | sh.archive(archive_file, *(item.path for item in src_dir.items), root=root)
293 | assert archive_file.is_file()
294 |
295 | listing = set(sh.lsarchive(archive_file))
296 | assert listing == expected_listing
297 |
298 |
299 | @parametrize(
300 | "sources, paths, root, repath, expected_listing",
301 | [
302 | param(
303 | [Dir("a", File("1.txt"), File("2.txt"), File("3.txt"))],
304 | ["a"],
305 | None,
306 | "abc",
307 | {Path("abc"), Path("abc/1.txt"), Path("abc/2.txt"), Path("abc/3.txt")},
308 | ),
309 | param(
310 | [Dir("a", File("1.txt"), File("2.txt"), File("3.txt"))],
311 | ["a"],
312 | None,
313 | {"a": "abc"},
314 | {Path("abc"), Path("abc/1.txt"), Path("abc/2.txt"), Path("abc/3.txt")},
315 | ),
316 | param(
317 | [
318 | Dir(
319 | "a",
320 | Dir("aa1", Dir("aaa1", File("aaa1.txt")), Dir("aaa2", File("aaa2.txt"))),
321 | Dir("aa2"),
322 | ),
323 | Dir("b"),
324 | Dir("c"),
325 | ],
326 | ["a", "b", "c"],
327 | None,
328 | {"a": "1", Path("b"): "2"},
329 | {
330 | Path("1"),
331 | Path("1/aa1"),
332 | Path("1/aa1/aaa1"),
333 | Path("1/aa1/aaa1/aaa1.txt"),
334 | Path("1/aa1/aaa2"),
335 | Path("1/aa1/aaa2/aaa2.txt"),
336 | Path("1/aa2"),
337 | Path("2"),
338 | Path("src/c"),
339 | },
340 | ),
341 | param(
342 | [
343 | Dir(
344 | "a",
345 | Dir("aa1", Dir("aaa1", File("aaa1.txt")), Dir("aaa2", File("aaa2.txt"))),
346 | Dir("aa2"),
347 | ),
348 | Dir("b"),
349 | Dir("c"),
350 | ],
351 | ["a", "b", "c"],
352 | ".",
353 | {"a": "1", Path("b"): "2"},
354 | {
355 | Path("1"),
356 | Path("1/aa1"),
357 | Path("1/aa1/aaa1"),
358 | Path("1/aa1/aaa1/aaa1.txt"),
359 | Path("1/aa1/aaa2"),
360 | Path("1/aa1/aaa2/aaa2.txt"),
361 | Path("1/aa2"),
362 | Path("2"),
363 | Path("c"),
364 | },
365 | ),
366 | param(
367 | [
368 | Dir(
369 | "a",
370 | Dir("aa1", Dir("aaa1", File("aaa1.txt")), Dir("aaa2", File("aaa2.txt"))),
371 | Dir("aa2"),
372 | ),
373 | Dir("b"),
374 | Dir("c"),
375 | ],
376 | [Path("a"), sh.ls("b"), sh.walk("c")],
377 | ".",
378 | {"a": "1", "b": "2", "c": "3"},
379 | {
380 | Path("1"),
381 | Path("1/aa1"),
382 | Path("1/aa1/aaa1"),
383 | Path("1/aa1/aaa1/aaa1.txt"),
384 | Path("1/aa1/aaa2"),
385 | Path("1/aa1/aaa2/aaa2.txt"),
386 | Path("1/aa2"),
387 | Path("2"),
388 | Path("3"),
389 | },
390 | ),
391 | ],
392 | )
393 | def test_archive__repaths_paths_inside_archive(
394 | tmp_path: Path,
395 | rep_ext: str,
396 | sources: t.List[t.Union[File, Dir]],
397 | paths: t.List[t.Union[str, Path, sh.Ls]],
398 | root: t.Optional[Path],
399 | repath: t.Optional[t.Union[str, dict]],
400 | expected_listing: t.Set[Path],
401 | ):
402 | src_dir = create_archive_source(tmp_path, *sources)
403 | archive_file = tmp_path / f"archive{rep_ext}"
404 |
405 | with sh.cd(src_dir.path):
406 | sh.archive(archive_file, *paths, root=root, repath=repath)
407 |
408 | assert archive_file.is_file()
409 |
410 | listing = set(sh.lsarchive(archive_file))
411 | assert listing == expected_listing
412 |
413 |
414 | @parametrize(
415 | "source, root, expected_listing",
416 | [
417 | param(
418 | Dir("a", File("1.txt", text="1"), File("2.txt", text="2"), File("3.txt", text="3")),
419 | None,
420 | {Path("a"), Path("a/1.txt"), Path("a/2.txt"), Path("a/3.txt")},
421 | ),
422 | param(
423 | Dir("a", File("1.txt", text="1"), File("2.txt", text="2"), File("3.txt", text="3")),
424 | Path("a"),
425 | {Path("1.txt"), Path("2.txt"), Path("3.txt")},
426 | ),
427 | ],
428 | )
429 | def test_archive__archives_relative_paths(
430 | tmp_path: Path,
431 | rep_ext: str,
432 | source: t.Union[File, Dir],
433 | root: t.Optional[Path],
434 | expected_listing: t.Set[Path],
435 | ):
436 | src_dir = create_archive_source(tmp_path, source)
437 | archive_file = tmp_path / f"archive{rep_ext}"
438 |
439 | with sh.cd(src_dir.path):
440 | items = [item.path.relative_to(src_dir.path) for item in src_dir.items]
441 | sh.archive(archive_file, *items, root=root)
442 |
443 | assert archive_file.is_file()
444 |
445 | listing = set(sh.lsarchive(archive_file))
446 | assert listing == expected_listing
447 |
448 |
449 | def test_archive__raises_when_sources_are_not_subpaths_of_root_path(tmp_path: Path, rep_ext: str):
450 | archive_file = tmp_path / f"archive{rep_ext}"
451 | with pytest.raises(ValueError) as exc_info:
452 | sh.archive(archive_file, tmp_path, root="bad-root")
453 | assert "paths must be a subpath of the root" in str(exc_info.value)
454 |
455 |
456 | def test_archive__raises_when_file_extension_not_supported(tmp_path: Path):
457 | with pytest.raises(NotImplementedError) as exc_info:
458 | sh.archive(tmp_path / "test.txt")
459 | assert "format not supported" in str(exc_info.value)
460 |
461 |
462 | def test_archive__raises_when_add_fails(tmp_path: Path, rep_ext: str):
463 | src_dir = create_archive_source(tmp_path, File("1.txt", text="1"))
464 |
465 | with ExitStack() as mock_stack:
466 | mock_stack.enter_context(mock.patch("tarfile.TarFile.add", side_effect=Exception))
467 | mock_stack.enter_context(mock.patch("zipfile.ZipFile.write", side_effect=Exception))
468 |
469 | with pytest.raises(sh.ArchiveError):
470 | sh.archive(tmp_path / f"archive{rep_ext}", src_dir.path)
471 |
472 |
473 | @parametrize(
474 | "paths, repath, expected_error",
475 | [
476 | param(["a"], True, "repath must be a string or dict"),
477 | param(
478 | ["a", "b"],
479 | "abc",
480 | "repath must be a dict when there is more than one archive source path",
481 | ),
482 | ],
483 | )
484 | def test_archive__raises_when_repath_is_bad_type(
485 | tmp_path: Path, paths: list, repath: t.Any, expected_error: str
486 | ):
487 | with pytest.raises(TypeError) as exc_info:
488 | sh.archive(tmp_path / "archive.tar", *paths, repath=repath)
489 | assert expected_error in str(exc_info.value)
490 |
491 |
492 | @parametrize(
493 | "source, expected",
494 | [
495 | param(ArchiveSource("a"), f"ArchiveSource(source='a', path='{sh.cwd() / 'a'}')"),
496 | param(ArchiveSource(Path("a")), f"ArchiveSource(source='a', path='{sh.cwd() / 'a'}')"),
497 | param(
498 | ArchiveSource(sh.ls("a")),
499 | f"ArchiveSource(source=Ls(path='a', recursive=False), path='{sh.cwd() / 'a'}')",
500 | ),
501 | param(
502 | ArchiveSource(Path("a").absolute()),
503 | f"ArchiveSource(source='{sh.cwd() / 'a'}', path='{sh.cwd() / 'a'}')",
504 | ),
505 | ],
506 | )
507 | def test_archive_source__has_repr(source, expected):
508 | assert repr(source) == expected
509 |
--------------------------------------------------------------------------------
/src/shelmet/fileio.py:
--------------------------------------------------------------------------------
1 | """The fileio module contains utilities for file IO."""
2 |
3 | from contextlib import contextmanager
4 | import errno
5 | from functools import partial
6 | import io
7 | import os
8 | from pathlib import Path
9 | import typing as t
10 |
11 | from .filesystem import _candidate_temp_pathname, dirsync, fsync, mkdir, rm
12 | from .types import (
13 | READ_ONLY_MODES,
14 | WRITE_ONLY_BIN_MODES,
15 | WRITE_ONLY_MODES,
16 | WRITE_ONLY_TEXT_MODES,
17 | ReadOnlyBinMode,
18 | ReadOnlyTextMode,
19 | StrPath,
20 | WriteOnlyBinMode,
21 | WriteOnlyTextMode,
22 | )
23 |
24 |
25 | DEFAULT_CHUNK_SIZE = io.DEFAULT_BUFFER_SIZE
26 |
27 |
28 | @contextmanager
29 | def atomicdir(dir: StrPath, *, skip_sync: bool = False, overwrite: bool = True) -> t.Iterator[Path]:
30 | """
31 | Context-manager that is used to atomically create a directory and its contents.
32 |
33 | This context-manager will create a temporary directory in the same directory as the destination
34 | and yield the temporary directory as a ``pathblib.Path`` object. All atomic file system updates
35 | to the directory should then be done within the context-manager. Once the context-manager exits,
36 | the temporary directory will be passed to :func:`dirsync` (unless ``skip_sync=True``) and then
37 | moved to the destination followed by :func:`dirsync` on the parent directory. If the
38 | destination directory exists, it will be overwritten unless ``overwrite=False``.
39 |
40 | Args:
41 | dir: Directory path to create.
42 | skip_sync: Whether to skip calling :func:`dirsync` on the directory. Skipping this can help
43 | with performance at the cost of durability.
44 | overwrite: Whether to raise an exception if the destination exists once the directory is to
45 | be moved to its destination.
46 | """
47 | dst = Path(dir).absolute()
48 | if dst.is_file():
49 | raise FileExistsError(errno.EEXIST, f"Atomic directory target must not be a file: {dst}")
50 |
51 | tmp_dir = _candidate_temp_pathname(path=dst, prefix="_", suffix="_tmp")
52 | mkdir(tmp_dir)
53 |
54 | try:
55 | yield Path(tmp_dir)
56 |
57 | if not skip_sync:
58 | dirsync(tmp_dir)
59 |
60 | if overwrite:
61 | rm(dst)
62 | elif dst.exists():
63 | raise FileExistsError(
64 | errno.EEXIST,
65 | f"Atomic directory target must not exist when overwrite disabled: {dst}",
66 | )
67 |
68 | os.rename(tmp_dir, dst)
69 |
70 | if not skip_sync:
71 | dirsync(dst)
72 | finally:
73 | # In case something went wrong that prevented moving tmp_dir to dst.
74 | rm(tmp_dir)
75 |
76 |
77 | @contextmanager
78 | def atomicfile(
79 | file: StrPath,
80 | mode: str = "w",
81 | *,
82 | skip_sync: bool = False,
83 | overwrite: bool = True,
84 | **open_kwargs: t.Any,
85 | ) -> t.Iterator[t.IO]:
86 | """
87 | Context-manager similar to ``open()`` that is used to perform an atomic file write operation by
88 | first writing to a temporary location in the same directory as the destination and then renaming
89 | the file to the destination after all write operations are finished.
90 |
91 | This context-manager will open a temporary file for writing in the same directory as the
92 | destination and yield a file object just like ``open()`` does. All file operations while the
93 | context-manager is opened will be performed on the temporary file. Once the context-manager
94 | exits, the temporary file will flushed and fsync'd (unless ``skip_sync=True``). If the
95 | destination file exists, it will be overwritten unless ``overwrite=False``.
96 |
97 | Args:
98 | file: File path to write to.
99 | mode: File open mode.
100 | skip_sync: Whether to skip calling ``fsync`` on file. Skipping this can help with
101 | performance at the cost of durability.
102 | overwrite: Whether to raise an exception if the destination file exists once the file is to
103 | be written to its destination.
104 | **open_kwargs: Additional keyword arguments to ``open()`` when creating the temporary write
105 | file.
106 | """
107 | if isinstance(mode, str) and "x" in mode:
108 | raise ValueError(
109 | "Atomic file write mode 'x' is not supported. Use 'overwrite=False' instead."
110 | )
111 |
112 | if not isinstance(mode, str) or "w" not in mode:
113 | raise ValueError(f"Invalid atomic write mode: {mode}")
114 |
115 | dst = Path(file).absolute()
116 | if dst.is_dir():
117 | raise IsADirectoryError(errno.EISDIR, f"Atomic file target must not be a directory: {dst}")
118 |
119 | mkdir(dst.parent)
120 | tmp_file = _candidate_temp_pathname(path=dst, prefix="_", suffix=".tmp")
121 |
122 | try:
123 | with open(tmp_file, mode, **open_kwargs) as fp:
124 | yield fp
125 | if not skip_sync:
126 | fsync(fp)
127 |
128 | if overwrite:
129 | os.rename(tmp_file, dst)
130 | else:
131 | # This will fail if dst exists.
132 | os.link(tmp_file, dst)
133 | rm(tmp_file)
134 |
135 | if not skip_sync:
136 | dirsync(dst.parent)
137 | finally:
138 | # In case something went wrong that prevented moving tmp_file to dst.
139 | rm(tmp_file)
140 |
141 |
142 | @t.overload
143 | def read(file: StrPath, mode: ReadOnlyTextMode, **open_kwargs: t.Any) -> str:
144 | ... # pragma: no cover
145 |
146 |
147 | @t.overload
148 | def read(file: StrPath, mode: ReadOnlyBinMode, **open_kwargs: t.Any) -> bytes:
149 | ... # pragma: no cover
150 |
151 |
152 | @t.overload
153 | def read(file: StrPath, mode: str = "r", **open_kwargs: t.Any) -> t.Union[str, bytes]:
154 | ... # pragma: no cover
155 |
156 |
157 | def read(file: StrPath, mode: str = "r", **open_kwargs: t.Any) -> t.Union[str, bytes]:
158 | """
159 | Return contents of file.
160 |
161 | Args:
162 | file: File to read.
163 | mode: File open mode.
164 | **open_kwargs: Additional keyword arguments to pass to ``open``.
165 | """
166 | if mode not in READ_ONLY_MODES:
167 | raise ValueError(f"Invalid read-only mode: {mode}")
168 |
169 | with open(file, mode, **open_kwargs) as fp:
170 | return fp.read()
171 |
172 |
173 | def readbytes(file: StrPath, **open_kwargs: t.Any) -> bytes:
174 | """
175 | Return binary contents of file.
176 |
177 | Equivalent to calling :func:`read` with ``mode="rb"``.
178 |
179 | Args:
180 | file: File to read.
181 | **open_kwargs: Additional keyword arguments to pass to ``open``.
182 | """
183 | return read(file, "rb", **open_kwargs)
184 |
185 |
186 | def readtext(file: StrPath, **open_kwargs: t.Any) -> str:
187 | """
188 | Return text contents of file.
189 |
190 | Equivalent to calling :func:`read` with ``mode="r"`` (the default behavior of :func:`read`).
191 |
192 | Args:
193 | file: File to read.
194 | **open_kwargs: Additional keyword arguments to pass to ``open``.
195 | """
196 | return read(file, "r", **open_kwargs)
197 |
198 |
199 | @t.overload
200 | def readchunks(
201 | file: StrPath,
202 | mode: ReadOnlyTextMode,
203 | *,
204 | size: int = ...,
205 | sep: t.Optional[str] = ...,
206 | **open_kwargs: t.Any,
207 | ) -> t.Generator[str, None, None]:
208 | ... # pragma: no cover
209 |
210 |
211 | @t.overload
212 | def readchunks(
213 | file: StrPath,
214 | mode: ReadOnlyBinMode,
215 | *,
216 | size: int = ...,
217 | sep: t.Optional[bytes] = ...,
218 | **open_kwargs: t.Any,
219 | ) -> t.Generator[bytes, None, None]:
220 | ... # pragma: no cover
221 |
222 |
223 | @t.overload
224 | def readchunks(
225 | file: StrPath,
226 | mode: str = "r",
227 | *,
228 | size: int = ...,
229 | sep: t.Optional[t.Union[str, bytes]] = ...,
230 | **open_kwargs: t.Any,
231 | ) -> t.Generator[t.Union[str, bytes], None, None]:
232 | ... # pragma: no cover
233 |
234 |
235 | def readchunks(
236 | file: StrPath,
237 | mode: str = "r",
238 | *,
239 | size: int = DEFAULT_CHUNK_SIZE,
240 | sep: t.Optional[t.Union[str, bytes]] = None,
241 | **open_kwargs: t.Any,
242 | ) -> t.Generator[t.Union[str, bytes], None, None]:
243 | """
244 | Yield contents of file as chunks.
245 |
246 | If separator, `sep`, is not given, chunks will be yielded by `size`.
247 |
248 | If separator, `sep`, is given, chunks will be yielded from as if from ``contents.split(sep)``.
249 | The `size` argument will still be used for each file read operation, but the contents will be
250 | buffered until a separator is encountered.
251 |
252 | Args:
253 | file: File to read.
254 | mode: File open mode.
255 | size: Size of chunks to read from file at a time and chunk size to yield when `sep` not
256 | given.
257 | sep: Separator to split chunks by in lieu of splitting by size.
258 | **open_kwargs: Additional keyword arguments to pass to ``open``.
259 | """
260 | if mode not in READ_ONLY_MODES:
261 | raise ValueError(f"Invalid read-only mode: {mode}")
262 | return _readchunks(file, mode, size=size, sep=sep, **open_kwargs)
263 |
264 |
265 | def _readchunks(file, mode="r", *, size=DEFAULT_CHUNK_SIZE, sep=None, **open_kwargs):
266 | buffer = ""
267 | if "b" in mode:
268 | buffer = b""
269 |
270 | with open(file, mode, **open_kwargs) as fp:
271 | try:
272 | while True:
273 | chunk = fp.read(size)
274 |
275 | if not chunk:
276 | # We're done with the file but if we have anything in the buffer, yield it.
277 | if buffer:
278 | yield buffer
279 | break
280 | elif not sep:
281 | # Yield chunks delineated by size.
282 | yield chunk
283 | else:
284 | buffer += chunk
285 | # Yield chunks delineated by separator.
286 | while sep in buffer:
287 | chunk, buffer = buffer.split(sep, 1)
288 | yield chunk
289 |
290 | except GeneratorExit: # pragma: no cover
291 | # Catch GeneratorExit to ensure contextmanager closes file when exiting generator early.
292 | pass
293 |
294 |
295 | @t.overload
296 | def readlines(
297 | file: StrPath, mode: ReadOnlyTextMode, *, limit: int = ..., **open_kwargs: t.Any
298 | ) -> t.Generator[str, None, None]:
299 | ... # pragma: no cover
300 |
301 |
302 | @t.overload
303 | def readlines(
304 | file: StrPath, mode: ReadOnlyBinMode, *, limit: int = ..., **open_kwargs: t.Any
305 | ) -> t.Generator[bytes, None, None]:
306 | ... # pragma: no cover
307 |
308 |
309 | @t.overload
310 | def readlines(
311 | file: StrPath, mode: str = "r", *, limit: int = ..., **open_kwargs: t.Any
312 | ) -> t.Generator[t.Union[str, bytes], None, None]:
313 | ... # pragma: no cover
314 |
315 |
316 | def readlines(
317 | file: StrPath, mode: str = "r", *, limit: int = -1, **open_kwargs: t.Any
318 | ) -> t.Generator[t.Union[str, bytes], None, None]:
319 | """
320 | Yield each line of a file.
321 |
322 | Note:
323 | Line-endings are included in the yielded values.
324 |
325 | Args:
326 | file: File to read.
327 | mode: File open mode.
328 | limit: Maximum length of each line to yield. For example, ``limit=10`` will yield the first
329 | 10 characters of each line.
330 | **open_kwargs: Additional keyword arguments to pass to ``open``.
331 | """
332 | if mode not in READ_ONLY_MODES:
333 | raise ValueError(f"Invalid read-only mode: {mode}")
334 | return _readlines(file, mode, limit=limit, **open_kwargs)
335 |
336 |
337 | def _readlines(file, mode="r", *, limit=-1, **open_kwargs):
338 | sentinel = ""
339 | if "b" in mode:
340 | sentinel = b""
341 |
342 | with open(file, mode, **open_kwargs) as fp:
343 | try:
344 | yield from iter(lambda: fp.readline(limit), sentinel)
345 | except GeneratorExit: # pragma: no cover
346 | # Catch GeneratorExit to ensure contextmanager closes file when exiting generator early.
347 | pass
348 |
349 |
350 | @t.overload
351 | def write(
352 | file: StrPath,
353 | contents: str,
354 | mode: WriteOnlyTextMode,
355 | *,
356 | atomic: bool = ...,
357 | **open_kwargs: t.Any,
358 | ) -> None:
359 | ... # pragma: no cover
360 |
361 |
362 | @t.overload
363 | def write(
364 | file: StrPath,
365 | contents: bytes,
366 | mode: WriteOnlyBinMode,
367 | *,
368 | atomic: bool = ...,
369 | **open_kwargs: t.Any,
370 | ) -> None:
371 | ... # pragma: no cover
372 |
373 |
374 | @t.overload
375 | def write(
376 | file: StrPath,
377 | contents: t.Union[str, bytes],
378 | mode: str = "w",
379 | *,
380 | atomic: bool = ...,
381 | **open_kwargs: t.Any,
382 | ) -> None:
383 | ... # pragma: no cover
384 |
385 |
386 | def write(
387 | file: StrPath,
388 | contents: t.Union[str, bytes],
389 | mode: str = "w",
390 | *,
391 | atomic: bool = False,
392 | **open_kwargs: t.Any,
393 | ) -> None:
394 | """
395 | Write contents to file.
396 |
397 | Args:
398 | file: File to write.
399 | contents: Contents to write.
400 | mode: File open mode.
401 | atomic: Whether to write the file to a temporary location in the same directory before
402 | moving it to the destination.
403 | **open_kwargs: Additional keyword arguments to pass to ``open``.
404 | """
405 | if mode not in WRITE_ONLY_MODES:
406 | raise ValueError(f"Invalid write-only mode: {mode}")
407 |
408 | opener = open
409 | if atomic:
410 | overwrite = "x" not in mode
411 | mode = mode.replace("x", "w")
412 | opener = partial(atomicfile, overwrite=overwrite) # type: ignore
413 |
414 | with opener(file, mode, **open_kwargs) as fp:
415 | fp.write(contents)
416 |
417 |
418 | def writetext(
419 | file: StrPath, contents: str, mode: str = "w", *, atomic: bool = False, **open_kwargs: t.Any
420 | ) -> None:
421 | """
422 | Write text contents to file.
423 |
424 | Args:
425 | file: File to write.
426 | contents: Contents to write.
427 | mode: File open mode.
428 | atomic: Whether to write the file to a temporary location in the same directory before
429 | moving it to the destination.
430 | **open_kwargs: Additional keyword arguments to pass to ``open``.
431 | """
432 | if mode not in WRITE_ONLY_TEXT_MODES:
433 | raise ValueError(f"Invalid write-only text-mode: {mode}")
434 | write(file, contents, mode, atomic=atomic, **open_kwargs)
435 |
436 |
437 | def writebytes(
438 | file: StrPath, contents: bytes, mode: str = "wb", *, atomic: bool = False, **open_kwargs: t.Any
439 | ) -> None:
440 | """
441 | Write binary contents to file.
442 |
443 | Args:
444 | file: File to write.
445 | contents: Contents to write.
446 | mode: File open mode.
447 | atomic: Whether to write the file to a temporary location in the same directory before
448 | moving it to the destination.
449 | **open_kwargs: Additional keyword arguments to pass to ``open``.
450 | """
451 | if mode not in WRITE_ONLY_BIN_MODES:
452 | raise ValueError(f"Invalid write-only binary-mode: {mode}")
453 | write(file, contents, mode, atomic=atomic, **open_kwargs)
454 |
455 |
456 | @t.overload
457 | def writelines(
458 | file: StrPath,
459 | items: t.Iterable[str],
460 | mode: WriteOnlyTextMode,
461 | *,
462 | ending: t.Optional[str] = None,
463 | atomic: bool = False,
464 | **open_kwargs: t.Any,
465 | ) -> None:
466 | ... # pragma: no cover
467 |
468 |
469 | @t.overload
470 | def writelines(
471 | file: StrPath,
472 | items: t.Iterable[bytes],
473 | mode: WriteOnlyBinMode,
474 | *,
475 | ending: t.Optional[bytes] = None,
476 | atomic: bool = False,
477 | **open_kwargs: t.Any,
478 | ) -> None:
479 | ... # pragma: no cover
480 |
481 |
482 | @t.overload
483 | def writelines(
484 | file: StrPath,
485 | items: t.Union[t.Iterable[str], t.Iterable[bytes]],
486 | mode: str = "w",
487 | *,
488 | ending: t.Optional[t.Union[str, bytes]] = None,
489 | atomic: bool = False,
490 | **open_kwargs: t.Any,
491 | ) -> None:
492 | ... # pragma: no cover
493 |
494 |
495 | def writelines(
496 | file: StrPath,
497 | items: t.Union[t.Iterable[str], t.Iterable[bytes]],
498 | mode: str = "w",
499 | *,
500 | ending: t.Optional[t.Union[str, bytes]] = None,
501 | atomic: bool = False,
502 | **open_kwargs: t.Any,
503 | ) -> None:
504 | """
505 | Write lines to file.
506 |
507 | Args:
508 | file: File to write.
509 | items: Items to write.
510 | mode: File open mode.
511 | ending: Line ending to use. Defaults to newline.
512 | atomic: Whether to write the file to a temporary location in the same directory before
513 | moving it to the destination.
514 | **open_kwargs: Additional keyword arguments to pass to ``open``.
515 | """
516 | if mode not in WRITE_ONLY_MODES:
517 | raise ValueError(f"Invalid write-only mode: {mode}")
518 |
519 | if ending is None:
520 | ending = "\n"
521 | if "b" in mode:
522 | ending = b"\n"
523 |
524 | opener = open
525 | if atomic:
526 | overwrite = "x" not in mode
527 | mode = mode.replace("x", "w")
528 | opener = partial(atomicfile, overwrite=overwrite) # type: ignore
529 |
530 | lines = (item + ending for item in items) # type: ignore
531 | with opener(file, mode, **open_kwargs) as fp:
532 | fp.writelines(lines)
533 |
--------------------------------------------------------------------------------